##// END OF EJS Templates
Merge manifest refactor work
Matt Mackall -
r2836:e78cad1f merge default
parent child Browse files
Show More
@@ -1,174 +1,174 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
8 from demandload import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = prefix.replace('\\', '/')
18 prefix = prefix.replace('\\', '/')
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 def __init__(self, dest, prefix, mtime, kind=''):
40 def __init__(self, dest, prefix, mtime, kind=''):
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 '.tgz', 'tbz2'])
42 '.tgz', 'tbz2'])
43 self.mtime = mtime
43 self.mtime = mtime
44 if isinstance(dest, str):
44 if isinstance(dest, str):
45 self.z = tarfile.open(dest, mode='w:'+kind)
45 self.z = tarfile.open(dest, mode='w:'+kind)
46 else:
46 else:
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48
48
49 def addfile(self, name, mode, data):
49 def addfile(self, name, mode, data):
50 i = tarfile.TarInfo(self.prefix + name)
50 i = tarfile.TarInfo(self.prefix + name)
51 i.mtime = self.mtime
51 i.mtime = self.mtime
52 i.size = len(data)
52 i.size = len(data)
53 i.mode = mode
53 i.mode = mode
54 self.z.addfile(i, cStringIO.StringIO(data))
54 self.z.addfile(i, cStringIO.StringIO(data))
55
55
56 def done(self):
56 def done(self):
57 self.z.close()
57 self.z.close()
58
58
59 class tellable:
59 class tellable:
60 '''provide tell method for zipfile.ZipFile when writing to http
60 '''provide tell method for zipfile.ZipFile when writing to http
61 response file object.'''
61 response file object.'''
62
62
63 def __init__(self, fp):
63 def __init__(self, fp):
64 self.fp = fp
64 self.fp = fp
65 self.offset = 0
65 self.offset = 0
66
66
67 def __getattr__(self, key):
67 def __getattr__(self, key):
68 return getattr(self.fp, key)
68 return getattr(self.fp, key)
69
69
70 def write(self, s):
70 def write(self, s):
71 self.fp.write(s)
71 self.fp.write(s)
72 self.offset += len(s)
72 self.offset += len(s)
73
73
74 def tell(self):
74 def tell(self):
75 return self.offset
75 return self.offset
76
76
77 class zipit:
77 class zipit:
78 '''write archive to zip file or stream. can write uncompressed,
78 '''write archive to zip file or stream. can write uncompressed,
79 or compressed with deflate.'''
79 or compressed with deflate.'''
80
80
81 def __init__(self, dest, prefix, mtime, compress=True):
81 def __init__(self, dest, prefix, mtime, compress=True):
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 if not isinstance(dest, str):
83 if not isinstance(dest, str):
84 try:
84 try:
85 dest.tell()
85 dest.tell()
86 except (AttributeError, IOError):
86 except (AttributeError, IOError):
87 dest = tellable(dest)
87 dest = tellable(dest)
88 self.z = zipfile.ZipFile(dest, 'w',
88 self.z = zipfile.ZipFile(dest, 'w',
89 compress and zipfile.ZIP_DEFLATED or
89 compress and zipfile.ZIP_DEFLATED or
90 zipfile.ZIP_STORED)
90 zipfile.ZIP_STORED)
91 self.date_time = time.gmtime(mtime)[:6]
91 self.date_time = time.gmtime(mtime)[:6]
92
92
93 def addfile(self, name, mode, data):
93 def addfile(self, name, mode, data):
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 i.compress_type = self.z.compression
95 i.compress_type = self.z.compression
96 i.flag_bits = 0x08
96 i.flag_bits = 0x08
97 # unzip will not honor unix file modes unless file creator is
97 # unzip will not honor unix file modes unless file creator is
98 # set to unix (id 3).
98 # set to unix (id 3).
99 i.create_system = 3
99 i.create_system = 3
100 i.external_attr = (mode | stat.S_IFREG) << 16L
100 i.external_attr = (mode | stat.S_IFREG) << 16L
101 self.z.writestr(i, data)
101 self.z.writestr(i, data)
102
102
103 def done(self):
103 def done(self):
104 self.z.close()
104 self.z.close()
105
105
106 class fileit:
106 class fileit:
107 '''write archive as files in directory.'''
107 '''write archive as files in directory.'''
108
108
109 def __init__(self, name, prefix, mtime):
109 def __init__(self, name, prefix, mtime):
110 if prefix:
110 if prefix:
111 raise util.Abort(_('cannot give prefix when archiving to files'))
111 raise util.Abort(_('cannot give prefix when archiving to files'))
112 self.basedir = name
112 self.basedir = name
113 self.dirs = {}
113 self.dirs = {}
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 getattr(os, 'O_BINARY', 0) |
115 getattr(os, 'O_BINARY', 0) |
116 getattr(os, 'O_NOFOLLOW', 0))
116 getattr(os, 'O_NOFOLLOW', 0))
117
117
118 def addfile(self, name, mode, data):
118 def addfile(self, name, mode, data):
119 destfile = os.path.join(self.basedir, name)
119 destfile = os.path.join(self.basedir, name)
120 destdir = os.path.dirname(destfile)
120 destdir = os.path.dirname(destfile)
121 if destdir not in self.dirs:
121 if destdir not in self.dirs:
122 if not os.path.isdir(destdir):
122 if not os.path.isdir(destdir):
123 os.makedirs(destdir)
123 os.makedirs(destdir)
124 self.dirs[destdir] = 1
124 self.dirs[destdir] = 1
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126
126
127 def done(self):
127 def done(self):
128 pass
128 pass
129
129
130 archivers = {
130 archivers = {
131 'files': fileit,
131 'files': fileit,
132 'tar': tarit,
132 'tar': tarit,
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 'zip': zipit,
136 'zip': zipit,
137 }
137 }
138
138
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 prefix=None, mtime=None):
140 prefix=None, mtime=None):
141 '''create archive of repo as it was at node.
141 '''create archive of repo as it was at node.
142
142
143 dest can be name of directory, name of archive file, or file
143 dest can be name of directory, name of archive file, or file
144 object to write archive to.
144 object to write archive to.
145
145
146 kind is type of archive to create.
146 kind is type of archive to create.
147
147
148 decode tells whether to put files through decode filters from
148 decode tells whether to put files through decode filters from
149 hgrc.
149 hgrc.
150
150
151 matchfn is function to filter names of files to write to archive.
151 matchfn is function to filter names of files to write to archive.
152
152
153 prefix is name of path to put before every archive member.'''
153 prefix is name of path to put before every archive member.'''
154
154
155 def write(name, mode, data):
155 def write(name, mode, data):
156 if matchfn and not matchfn(name): return
156 if matchfn and not matchfn(name): return
157 if decode:
157 if decode:
158 fp = cStringIO.StringIO()
158 fp = cStringIO.StringIO()
159 repo.wwrite(name, data, fp)
159 repo.wwrite(name, data, fp)
160 data = fp.getvalue()
160 data = fp.getvalue()
161 archiver.addfile(name, mode, data)
161 archiver.addfile(name, mode, data)
162
162
163 change = repo.changelog.read(node)
163 change = repo.changelog.read(node)
164 mn = change[0]
164 mn = change[0]
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 mf = repo.manifest.read(mn).items()
166 mf = repo.manifest.read(mn).items()
167 mff = repo.manifest.readflags(mn)
167 mff = repo.manifest.readflags(mn)
168 mf.sort()
168 mf.sort()
169 write('.hg_archival.txt', 0644,
169 write('.hg_archival.txt', 0644,
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 for filename, filenode in mf:
171 for filename, filenode in mf:
172 write(filename, mff[filename] and 0755 or 0644,
172 write(filename, mff.execf(filename) and 0755 or 0644,
173 repo.file(filename).read(filenode))
173 repo.file(filename).read(filenode))
174 archiver.done()
174 archiver.done()
@@ -1,3686 +1,3687 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def logmessage(opts):
43 def logmessage(opts):
44 """ get the log message according to -m and -l option """
44 """ get the log message according to -m and -l option """
45 message = opts['message']
45 message = opts['message']
46 logfile = opts['logfile']
46 logfile = opts['logfile']
47
47
48 if message and logfile:
48 if message and logfile:
49 raise util.Abort(_('options --message and --logfile are mutually '
49 raise util.Abort(_('options --message and --logfile are mutually '
50 'exclusive'))
50 'exclusive'))
51 if not message and logfile:
51 if not message and logfile:
52 try:
52 try:
53 if logfile == '-':
53 if logfile == '-':
54 message = sys.stdin.read()
54 message = sys.stdin.read()
55 else:
55 else:
56 message = open(logfile).read()
56 message = open(logfile).read()
57 except IOError, inst:
57 except IOError, inst:
58 raise util.Abort(_("can't read commit message '%s': %s") %
58 raise util.Abort(_("can't read commit message '%s': %s") %
59 (logfile, inst.strerror))
59 (logfile, inst.strerror))
60 return message
60 return message
61
61
62 def matchpats(repo, pats=[], opts={}, head=''):
62 def matchpats(repo, pats=[], opts={}, head=''):
63 cwd = repo.getcwd()
63 cwd = repo.getcwd()
64 if not pats and cwd:
64 if not pats and cwd:
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 cwd = ''
67 cwd = ''
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 opts.get('exclude'), head)
69 opts.get('exclude'), head)
70
70
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 exact = dict(zip(files, files))
73 exact = dict(zip(files, files))
74 def walk():
74 def walk():
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 badmatch=badmatch):
76 badmatch=badmatch):
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 return files, matchfn, walk()
78 return files, matchfn, walk()
79
79
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 for r in results:
82 for r in results:
83 yield r
83 yield r
84
84
85 def walkchangerevs(ui, repo, pats, opts):
85 def walkchangerevs(ui, repo, pats, opts):
86 '''Iterate over files and the revs they changed in.
86 '''Iterate over files and the revs they changed in.
87
87
88 Callers most commonly need to iterate backwards over the history
88 Callers most commonly need to iterate backwards over the history
89 it is interested in. Doing so has awful (quadratic-looking)
89 it is interested in. Doing so has awful (quadratic-looking)
90 performance, so we use iterators in a "windowed" way.
90 performance, so we use iterators in a "windowed" way.
91
91
92 We walk a window of revisions in the desired order. Within the
92 We walk a window of revisions in the desired order. Within the
93 window, we first walk forwards to gather data, then in the desired
93 window, we first walk forwards to gather data, then in the desired
94 order (usually backwards) to display it.
94 order (usually backwards) to display it.
95
95
96 This function returns an (iterator, getchange, matchfn) tuple. The
96 This function returns an (iterator, getchange, matchfn) tuple. The
97 getchange function returns the changelog entry for a numeric
97 getchange function returns the changelog entry for a numeric
98 revision. The iterator yields 3-tuples. They will be of one of
98 revision. The iterator yields 3-tuples. They will be of one of
99 the following forms:
99 the following forms:
100
100
101 "window", incrementing, lastrev: stepping through a window,
101 "window", incrementing, lastrev: stepping through a window,
102 positive if walking forwards through revs, last rev in the
102 positive if walking forwards through revs, last rev in the
103 sequence iterated over - use to reset state for the current window
103 sequence iterated over - use to reset state for the current window
104
104
105 "add", rev, fns: out-of-order traversal of the given file names
105 "add", rev, fns: out-of-order traversal of the given file names
106 fns, which changed during revision rev - use to gather data for
106 fns, which changed during revision rev - use to gather data for
107 possible display
107 possible display
108
108
109 "iter", rev, None: in-order traversal of the revs earlier iterated
109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 over with "add" - use to display data'''
110 over with "add" - use to display data'''
111
111
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 if start < end:
113 if start < end:
114 while start < end:
114 while start < end:
115 yield start, min(windowsize, end-start)
115 yield start, min(windowsize, end-start)
116 start += windowsize
116 start += windowsize
117 if windowsize < sizelimit:
117 if windowsize < sizelimit:
118 windowsize *= 2
118 windowsize *= 2
119 else:
119 else:
120 while start > end:
120 while start > end:
121 yield start, min(windowsize, start-end-1)
121 yield start, min(windowsize, start-end-1)
122 start -= windowsize
122 start -= windowsize
123 if windowsize < sizelimit:
123 if windowsize < sizelimit:
124 windowsize *= 2
124 windowsize *= 2
125
125
126
126
127 files, matchfn, anypats = matchpats(repo, pats, opts)
127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 follow = opts.get('follow') or opts.get('follow_first')
128 follow = opts.get('follow') or opts.get('follow_first')
129
129
130 if repo.changelog.count() == 0:
130 if repo.changelog.count() == 0:
131 return [], False, matchfn
131 return [], False, matchfn
132
132
133 if follow:
133 if follow:
134 p = repo.dirstate.parents()[0]
134 p = repo.dirstate.parents()[0]
135 if p == nullid:
135 if p == nullid:
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 start = 'tip'
137 start = 'tip'
138 else:
138 else:
139 start = repo.changelog.rev(p)
139 start = repo.changelog.rev(p)
140 defrange = '%s:0' % start
140 defrange = '%s:0' % start
141 else:
141 else:
142 defrange = 'tip:0'
142 defrange = 'tip:0'
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
144 wanted = {}
144 wanted = {}
145 slowpath = anypats
145 slowpath = anypats
146 fncache = {}
146 fncache = {}
147
147
148 chcache = {}
148 chcache = {}
149 def getchange(rev):
149 def getchange(rev):
150 ch = chcache.get(rev)
150 ch = chcache.get(rev)
151 if ch is None:
151 if ch is None:
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
153 return ch
153 return ch
154
154
155 if not slowpath and not files:
155 if not slowpath and not files:
156 # No files, no patterns. Display all revs.
156 # No files, no patterns. Display all revs.
157 wanted = dict(zip(revs, revs))
157 wanted = dict(zip(revs, revs))
158 copies = []
158 copies = []
159 if not slowpath:
159 if not slowpath:
160 # Only files, no patterns. Check the history of each file.
160 # Only files, no patterns. Check the history of each file.
161 def filerevgen(filelog, node):
161 def filerevgen(filelog, node):
162 cl_count = repo.changelog.count()
162 cl_count = repo.changelog.count()
163 if node is None:
163 if node is None:
164 last = filelog.count() - 1
164 last = filelog.count() - 1
165 else:
165 else:
166 last = filelog.rev(node)
166 last = filelog.rev(node)
167 for i, window in increasing_windows(last, -1):
167 for i, window in increasing_windows(last, -1):
168 revs = []
168 revs = []
169 for j in xrange(i - window, i + 1):
169 for j in xrange(i - window, i + 1):
170 n = filelog.node(j)
170 n = filelog.node(j)
171 revs.append((filelog.linkrev(n),
171 revs.append((filelog.linkrev(n),
172 follow and filelog.renamed(n)))
172 follow and filelog.renamed(n)))
173 revs.reverse()
173 revs.reverse()
174 for rev in revs:
174 for rev in revs:
175 # only yield rev for which we have the changelog, it can
175 # only yield rev for which we have the changelog, it can
176 # happen while doing "hg log" during a pull or commit
176 # happen while doing "hg log" during a pull or commit
177 if rev[0] < cl_count:
177 if rev[0] < cl_count:
178 yield rev
178 yield rev
179 def iterfiles():
179 def iterfiles():
180 for filename in files:
180 for filename in files:
181 yield filename, None
181 yield filename, None
182 for filename_node in copies:
182 for filename_node in copies:
183 yield filename_node
183 yield filename_node
184 minrev, maxrev = min(revs), max(revs)
184 minrev, maxrev = min(revs), max(revs)
185 for file_, node in iterfiles():
185 for file_, node in iterfiles():
186 filelog = repo.file(file_)
186 filelog = repo.file(file_)
187 # A zero count may be a directory or deleted file, so
187 # A zero count may be a directory or deleted file, so
188 # try to find matching entries on the slow path.
188 # try to find matching entries on the slow path.
189 if filelog.count() == 0:
189 if filelog.count() == 0:
190 slowpath = True
190 slowpath = True
191 break
191 break
192 for rev, copied in filerevgen(filelog, node):
192 for rev, copied in filerevgen(filelog, node):
193 if rev <= maxrev:
193 if rev <= maxrev:
194 if rev < minrev:
194 if rev < minrev:
195 break
195 break
196 fncache.setdefault(rev, [])
196 fncache.setdefault(rev, [])
197 fncache[rev].append(file_)
197 fncache[rev].append(file_)
198 wanted[rev] = 1
198 wanted[rev] = 1
199 if follow and copied:
199 if follow and copied:
200 copies.append(copied)
200 copies.append(copied)
201 if slowpath:
201 if slowpath:
202 if follow:
202 if follow:
203 raise util.Abort(_('can only follow copies/renames for explicit '
203 raise util.Abort(_('can only follow copies/renames for explicit '
204 'file names'))
204 'file names'))
205
205
206 # The slow path checks files modified in every changeset.
206 # The slow path checks files modified in every changeset.
207 def changerevgen():
207 def changerevgen():
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
209 for j in xrange(i - window, i + 1):
209 for j in xrange(i - window, i + 1):
210 yield j, getchange(j)[3]
210 yield j, getchange(j)[3]
211
211
212 for rev, changefiles in changerevgen():
212 for rev, changefiles in changerevgen():
213 matches = filter(matchfn, changefiles)
213 matches = filter(matchfn, changefiles)
214 if matches:
214 if matches:
215 fncache[rev] = matches
215 fncache[rev] = matches
216 wanted[rev] = 1
216 wanted[rev] = 1
217
217
218 def iterate():
218 def iterate():
219 class followfilter:
219 class followfilter:
220 def __init__(self, onlyfirst=False):
220 def __init__(self, onlyfirst=False):
221 self.startrev = -1
221 self.startrev = -1
222 self.roots = []
222 self.roots = []
223 self.onlyfirst = onlyfirst
223 self.onlyfirst = onlyfirst
224
224
225 def match(self, rev):
225 def match(self, rev):
226 def realparents(rev):
226 def realparents(rev):
227 if self.onlyfirst:
227 if self.onlyfirst:
228 return repo.changelog.parentrevs(rev)[0:1]
228 return repo.changelog.parentrevs(rev)[0:1]
229 else:
229 else:
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231
231
232 if self.startrev == -1:
232 if self.startrev == -1:
233 self.startrev = rev
233 self.startrev = rev
234 return True
234 return True
235
235
236 if rev > self.startrev:
236 if rev > self.startrev:
237 # forward: all descendants
237 # forward: all descendants
238 if not self.roots:
238 if not self.roots:
239 self.roots.append(self.startrev)
239 self.roots.append(self.startrev)
240 for parent in realparents(rev):
240 for parent in realparents(rev):
241 if parent in self.roots:
241 if parent in self.roots:
242 self.roots.append(rev)
242 self.roots.append(rev)
243 return True
243 return True
244 else:
244 else:
245 # backwards: all parents
245 # backwards: all parents
246 if not self.roots:
246 if not self.roots:
247 self.roots.extend(realparents(self.startrev))
247 self.roots.extend(realparents(self.startrev))
248 if rev in self.roots:
248 if rev in self.roots:
249 self.roots.remove(rev)
249 self.roots.remove(rev)
250 self.roots.extend(realparents(rev))
250 self.roots.extend(realparents(rev))
251 return True
251 return True
252
252
253 return False
253 return False
254
254
255 if follow and not files:
255 if follow and not files:
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 def want(rev):
257 def want(rev):
258 if rev not in wanted:
258 if rev not in wanted:
259 return False
259 return False
260 return ff.match(rev)
260 return ff.match(rev)
261 else:
261 else:
262 def want(rev):
262 def want(rev):
263 return rev in wanted
263 return rev in wanted
264
264
265 for i, window in increasing_windows(0, len(revs)):
265 for i, window in increasing_windows(0, len(revs)):
266 yield 'window', revs[0] < revs[-1], revs[-1]
266 yield 'window', revs[0] < revs[-1], revs[-1]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
268 srevs = list(nrevs)
268 srevs = list(nrevs)
269 srevs.sort()
269 srevs.sort()
270 for rev in srevs:
270 for rev in srevs:
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
272 yield 'add', rev, fns
272 yield 'add', rev, fns
273 for rev in nrevs:
273 for rev in nrevs:
274 yield 'iter', rev, None
274 yield 'iter', rev, None
275 return iterate(), getchange, matchfn
275 return iterate(), getchange, matchfn
276
276
277 revrangesep = ':'
277 revrangesep = ':'
278
278
279 def revfix(repo, val, defval):
279 def revfix(repo, val, defval):
280 '''turn user-level id of changeset into rev number.
280 '''turn user-level id of changeset into rev number.
281 user-level id can be tag, changeset, rev number, or negative rev
281 user-level id can be tag, changeset, rev number, or negative rev
282 number relative to number of revs (-1 is tip, etc).'''
282 number relative to number of revs (-1 is tip, etc).'''
283 if not val:
283 if not val:
284 return defval
284 return defval
285 try:
285 try:
286 num = int(val)
286 num = int(val)
287 if str(num) != val:
287 if str(num) != val:
288 raise ValueError
288 raise ValueError
289 if num < 0:
289 if num < 0:
290 num += repo.changelog.count()
290 num += repo.changelog.count()
291 if num < 0:
291 if num < 0:
292 num = 0
292 num = 0
293 elif num >= repo.changelog.count():
293 elif num >= repo.changelog.count():
294 raise ValueError
294 raise ValueError
295 except ValueError:
295 except ValueError:
296 try:
296 try:
297 num = repo.changelog.rev(repo.lookup(val))
297 num = repo.changelog.rev(repo.lookup(val))
298 except KeyError:
298 except KeyError:
299 raise util.Abort(_('invalid revision identifier %s'), val)
299 raise util.Abort(_('invalid revision identifier %s'), val)
300 return num
300 return num
301
301
302 def revpair(ui, repo, revs):
302 def revpair(ui, repo, revs):
303 '''return pair of nodes, given list of revisions. second item can
303 '''return pair of nodes, given list of revisions. second item can
304 be None, meaning use working dir.'''
304 be None, meaning use working dir.'''
305 if not revs:
305 if not revs:
306 return repo.dirstate.parents()[0], None
306 return repo.dirstate.parents()[0], None
307 end = None
307 end = None
308 if len(revs) == 1:
308 if len(revs) == 1:
309 start = revs[0]
309 start = revs[0]
310 if revrangesep in start:
310 if revrangesep in start:
311 start, end = start.split(revrangesep, 1)
311 start, end = start.split(revrangesep, 1)
312 start = revfix(repo, start, 0)
312 start = revfix(repo, start, 0)
313 end = revfix(repo, end, repo.changelog.count() - 1)
313 end = revfix(repo, end, repo.changelog.count() - 1)
314 else:
314 else:
315 start = revfix(repo, start, None)
315 start = revfix(repo, start, None)
316 elif len(revs) == 2:
316 elif len(revs) == 2:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
318 raise util.Abort(_('too many revisions specified'))
318 raise util.Abort(_('too many revisions specified'))
319 start = revfix(repo, revs[0], None)
319 start = revfix(repo, revs[0], None)
320 end = revfix(repo, revs[1], None)
320 end = revfix(repo, revs[1], None)
321 else:
321 else:
322 raise util.Abort(_('too many revisions specified'))
322 raise util.Abort(_('too many revisions specified'))
323 if end is not None: end = repo.lookup(str(end))
323 if end is not None: end = repo.lookup(str(end))
324 return repo.lookup(str(start)), end
324 return repo.lookup(str(start)), end
325
325
326 def revrange(ui, repo, revs):
326 def revrange(ui, repo, revs):
327 """Yield revision as strings from a list of revision specifications."""
327 """Yield revision as strings from a list of revision specifications."""
328 seen = {}
328 seen = {}
329 for spec in revs:
329 for spec in revs:
330 if revrangesep in spec:
330 if revrangesep in spec:
331 start, end = spec.split(revrangesep, 1)
331 start, end = spec.split(revrangesep, 1)
332 start = revfix(repo, start, 0)
332 start = revfix(repo, start, 0)
333 end = revfix(repo, end, repo.changelog.count() - 1)
333 end = revfix(repo, end, repo.changelog.count() - 1)
334 step = start > end and -1 or 1
334 step = start > end and -1 or 1
335 for rev in xrange(start, end+step, step):
335 for rev in xrange(start, end+step, step):
336 if rev in seen:
336 if rev in seen:
337 continue
337 continue
338 seen[rev] = 1
338 seen[rev] = 1
339 yield str(rev)
339 yield str(rev)
340 else:
340 else:
341 rev = revfix(repo, spec, None)
341 rev = revfix(repo, spec, None)
342 if rev in seen:
342 if rev in seen:
343 continue
343 continue
344 seen[rev] = 1
344 seen[rev] = 1
345 yield str(rev)
345 yield str(rev)
346
346
347 def make_filename(repo, pat, node,
347 def make_filename(repo, pat, node,
348 total=None, seqno=None, revwidth=None, pathname=None):
348 total=None, seqno=None, revwidth=None, pathname=None):
349 node_expander = {
349 node_expander = {
350 'H': lambda: hex(node),
350 'H': lambda: hex(node),
351 'R': lambda: str(repo.changelog.rev(node)),
351 'R': lambda: str(repo.changelog.rev(node)),
352 'h': lambda: short(node),
352 'h': lambda: short(node),
353 }
353 }
354 expander = {
354 expander = {
355 '%': lambda: '%',
355 '%': lambda: '%',
356 'b': lambda: os.path.basename(repo.root),
356 'b': lambda: os.path.basename(repo.root),
357 }
357 }
358
358
359 try:
359 try:
360 if node:
360 if node:
361 expander.update(node_expander)
361 expander.update(node_expander)
362 if node and revwidth is not None:
362 if node and revwidth is not None:
363 expander['r'] = (lambda:
363 expander['r'] = (lambda:
364 str(repo.changelog.rev(node)).zfill(revwidth))
364 str(repo.changelog.rev(node)).zfill(revwidth))
365 if total is not None:
365 if total is not None:
366 expander['N'] = lambda: str(total)
366 expander['N'] = lambda: str(total)
367 if seqno is not None:
367 if seqno is not None:
368 expander['n'] = lambda: str(seqno)
368 expander['n'] = lambda: str(seqno)
369 if total is not None and seqno is not None:
369 if total is not None and seqno is not None:
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
371 if pathname is not None:
371 if pathname is not None:
372 expander['s'] = lambda: os.path.basename(pathname)
372 expander['s'] = lambda: os.path.basename(pathname)
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
374 expander['p'] = lambda: pathname
374 expander['p'] = lambda: pathname
375
375
376 newname = []
376 newname = []
377 patlen = len(pat)
377 patlen = len(pat)
378 i = 0
378 i = 0
379 while i < patlen:
379 while i < patlen:
380 c = pat[i]
380 c = pat[i]
381 if c == '%':
381 if c == '%':
382 i += 1
382 i += 1
383 c = pat[i]
383 c = pat[i]
384 c = expander[c]()
384 c = expander[c]()
385 newname.append(c)
385 newname.append(c)
386 i += 1
386 i += 1
387 return ''.join(newname)
387 return ''.join(newname)
388 except KeyError, inst:
388 except KeyError, inst:
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
390 inst.args[0])
390 inst.args[0])
391
391
392 def make_file(repo, pat, node=None,
392 def make_file(repo, pat, node=None,
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
394 if not pat or pat == '-':
394 if not pat or pat == '-':
395 return 'w' in mode and sys.stdout or sys.stdin
395 return 'w' in mode and sys.stdout or sys.stdin
396 if hasattr(pat, 'write') and 'w' in mode:
396 if hasattr(pat, 'write') and 'w' in mode:
397 return pat
397 return pat
398 if hasattr(pat, 'read') and 'r' in mode:
398 if hasattr(pat, 'read') and 'r' in mode:
399 return pat
399 return pat
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
401 pathname),
401 pathname),
402 mode)
402 mode)
403
403
404 def write_bundle(cg, filename=None, compress=True):
404 def write_bundle(cg, filename=None, compress=True):
405 """Write a bundle file and return its filename.
405 """Write a bundle file and return its filename.
406
406
407 Existing files will not be overwritten.
407 Existing files will not be overwritten.
408 If no filename is specified, a temporary file is created.
408 If no filename is specified, a temporary file is created.
409 bz2 compression can be turned off.
409 bz2 compression can be turned off.
410 The bundle file will be deleted in case of errors.
410 The bundle file will be deleted in case of errors.
411 """
411 """
412 class nocompress(object):
412 class nocompress(object):
413 def compress(self, x):
413 def compress(self, x):
414 return x
414 return x
415 def flush(self):
415 def flush(self):
416 return ""
416 return ""
417
417
418 fh = None
418 fh = None
419 cleanup = None
419 cleanup = None
420 try:
420 try:
421 if filename:
421 if filename:
422 if os.path.exists(filename):
422 if os.path.exists(filename):
423 raise util.Abort(_("file '%s' already exists"), filename)
423 raise util.Abort(_("file '%s' already exists"), filename)
424 fh = open(filename, "wb")
424 fh = open(filename, "wb")
425 else:
425 else:
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
427 fh = os.fdopen(fd, "wb")
427 fh = os.fdopen(fd, "wb")
428 cleanup = filename
428 cleanup = filename
429
429
430 if compress:
430 if compress:
431 fh.write("HG10")
431 fh.write("HG10")
432 z = bz2.BZ2Compressor(9)
432 z = bz2.BZ2Compressor(9)
433 else:
433 else:
434 fh.write("HG10UN")
434 fh.write("HG10UN")
435 z = nocompress()
435 z = nocompress()
436 # parse the changegroup data, otherwise we will block
436 # parse the changegroup data, otherwise we will block
437 # in case of sshrepo because we don't know the end of the stream
437 # in case of sshrepo because we don't know the end of the stream
438
438
439 # an empty chunkiter is the end of the changegroup
439 # an empty chunkiter is the end of the changegroup
440 empty = False
440 empty = False
441 while not empty:
441 while not empty:
442 empty = True
442 empty = True
443 for chunk in changegroup.chunkiter(cg):
443 for chunk in changegroup.chunkiter(cg):
444 empty = False
444 empty = False
445 fh.write(z.compress(changegroup.genchunk(chunk)))
445 fh.write(z.compress(changegroup.genchunk(chunk)))
446 fh.write(z.compress(changegroup.closechunk()))
446 fh.write(z.compress(changegroup.closechunk()))
447 fh.write(z.flush())
447 fh.write(z.flush())
448 cleanup = None
448 cleanup = None
449 return filename
449 return filename
450 finally:
450 finally:
451 if fh is not None:
451 if fh is not None:
452 fh.close()
452 fh.close()
453 if cleanup is not None:
453 if cleanup is not None:
454 os.unlink(cleanup)
454 os.unlink(cleanup)
455
455
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
457 changes=None, text=False, opts={}):
457 changes=None, text=False, opts={}):
458 if not node1:
458 if not node1:
459 node1 = repo.dirstate.parents()[0]
459 node1 = repo.dirstate.parents()[0]
460 # reading the data for node1 early allows it to play nicely
460 # reading the data for node1 early allows it to play nicely
461 # with repo.changes and the revlog cache.
461 # with repo.changes and the revlog cache.
462 change = repo.changelog.read(node1)
462 change = repo.changelog.read(node1)
463 mmap = repo.manifest.read(change[0])
463 mmap = repo.manifest.read(change[0])
464 date1 = util.datestr(change[2])
464 date1 = util.datestr(change[2])
465
465
466 if not changes:
466 if not changes:
467 changes = repo.changes(node1, node2, files, match=match)
467 changes = repo.changes(node1, node2, files, match=match)
468 modified, added, removed, deleted, unknown = changes
468 modified, added, removed, deleted, unknown = changes
469 if files:
469 if files:
470 modified, added, removed = map(lambda x: filterfiles(files, x),
470 modified, added, removed = map(lambda x: filterfiles(files, x),
471 (modified, added, removed))
471 (modified, added, removed))
472
472
473 if not modified and not added and not removed:
473 if not modified and not added and not removed:
474 return
474 return
475
475
476 if node2:
476 if node2:
477 change = repo.changelog.read(node2)
477 change = repo.changelog.read(node2)
478 mmap2 = repo.manifest.read(change[0])
478 mmap2 = repo.manifest.read(change[0])
479 _date2 = util.datestr(change[2])
479 _date2 = util.datestr(change[2])
480 def date2(f):
480 def date2(f):
481 return _date2
481 return _date2
482 def read(f):
482 def read(f):
483 return repo.file(f).read(mmap2[f])
483 return repo.file(f).read(mmap2[f])
484 else:
484 else:
485 tz = util.makedate()[1]
485 tz = util.makedate()[1]
486 _date2 = util.datestr()
486 _date2 = util.datestr()
487 def date2(f):
487 def date2(f):
488 try:
488 try:
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
490 except OSError, err:
490 except OSError, err:
491 if err.errno != errno.ENOENT: raise
491 if err.errno != errno.ENOENT: raise
492 return _date2
492 return _date2
493 def read(f):
493 def read(f):
494 return repo.wread(f)
494 return repo.wread(f)
495
495
496 if ui.quiet:
496 if ui.quiet:
497 r = None
497 r = None
498 else:
498 else:
499 hexfunc = ui.verbose and hex or short
499 hexfunc = ui.verbose and hex or short
500 r = [hexfunc(node) for node in [node1, node2] if node]
500 r = [hexfunc(node) for node in [node1, node2] if node]
501
501
502 diffopts = ui.diffopts()
502 diffopts = ui.diffopts()
503 showfunc = opts.get('show_function') or diffopts['showfunc']
503 showfunc = opts.get('show_function') or diffopts['showfunc']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
505 ignorewsamount = opts.get('ignore_space_change') or \
505 ignorewsamount = opts.get('ignore_space_change') or \
506 diffopts['ignorewsamount']
506 diffopts['ignorewsamount']
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
508 diffopts['ignoreblanklines']
508 diffopts['ignoreblanklines']
509
509
510 all = modified + added + removed
510 all = modified + added + removed
511 all.sort()
511 all.sort()
512 for f in all:
512 for f in all:
513 to = None
513 to = None
514 tn = None
514 tn = None
515 if f in mmap:
515 if f in mmap:
516 to = repo.file(f).read(mmap[f])
516 to = repo.file(f).read(mmap[f])
517 if f not in removed:
517 if f not in removed:
518 tn = read(f)
518 tn = read(f)
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
520 showfunc=showfunc, ignorews=ignorews,
520 showfunc=showfunc, ignorews=ignorews,
521 ignorewsamount=ignorewsamount,
521 ignorewsamount=ignorewsamount,
522 ignoreblanklines=ignoreblanklines))
522 ignoreblanklines=ignoreblanklines))
523
523
524 def trimuser(ui, name, rev, revcache):
524 def trimuser(ui, name, rev, revcache):
525 """trim the name of the user who committed a change"""
525 """trim the name of the user who committed a change"""
526 user = revcache.get(rev)
526 user = revcache.get(rev)
527 if user is None:
527 if user is None:
528 user = revcache[rev] = ui.shortuser(name)
528 user = revcache[rev] = ui.shortuser(name)
529 return user
529 return user
530
530
531 class changeset_printer(object):
531 class changeset_printer(object):
532 '''show changeset information when templating not requested.'''
532 '''show changeset information when templating not requested.'''
533
533
534 def __init__(self, ui, repo):
534 def __init__(self, ui, repo):
535 self.ui = ui
535 self.ui = ui
536 self.repo = repo
536 self.repo = repo
537
537
538 def show(self, rev=0, changenode=None, brinfo=None):
538 def show(self, rev=0, changenode=None, brinfo=None):
539 '''show a single changeset or file revision'''
539 '''show a single changeset or file revision'''
540 log = self.repo.changelog
540 log = self.repo.changelog
541 if changenode is None:
541 if changenode is None:
542 changenode = log.node(rev)
542 changenode = log.node(rev)
543 elif not rev:
543 elif not rev:
544 rev = log.rev(changenode)
544 rev = log.rev(changenode)
545
545
546 if self.ui.quiet:
546 if self.ui.quiet:
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
548 return
548 return
549
549
550 changes = log.read(changenode)
550 changes = log.read(changenode)
551 date = util.datestr(changes[2])
551 date = util.datestr(changes[2])
552
552
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
554 for p in log.parents(changenode)
554 for p in log.parents(changenode)
555 if self.ui.debugflag or p != nullid]
555 if self.ui.debugflag or p != nullid]
556 if (not self.ui.debugflag and len(parents) == 1 and
556 if (not self.ui.debugflag and len(parents) == 1 and
557 parents[0][0] == rev-1):
557 parents[0][0] == rev-1):
558 parents = []
558 parents = []
559
559
560 if self.ui.verbose:
560 if self.ui.verbose:
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
562 else:
562 else:
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
564
564
565 for tag in self.repo.nodetags(changenode):
565 for tag in self.repo.nodetags(changenode):
566 self.ui.status(_("tag: %s\n") % tag)
566 self.ui.status(_("tag: %s\n") % tag)
567 for parent in parents:
567 for parent in parents:
568 self.ui.write(_("parent: %d:%s\n") % parent)
568 self.ui.write(_("parent: %d:%s\n") % parent)
569
569
570 if brinfo and changenode in brinfo:
570 if brinfo and changenode in brinfo:
571 br = brinfo[changenode]
571 br = brinfo[changenode]
572 self.ui.write(_("branch: %s\n") % " ".join(br))
572 self.ui.write(_("branch: %s\n") % " ".join(br))
573
573
574 self.ui.debug(_("manifest: %d:%s\n") %
574 self.ui.debug(_("manifest: %d:%s\n") %
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
576 self.ui.status(_("user: %s\n") % changes[1])
576 self.ui.status(_("user: %s\n") % changes[1])
577 self.ui.status(_("date: %s\n") % date)
577 self.ui.status(_("date: %s\n") % date)
578
578
579 if self.ui.debugflag:
579 if self.ui.debugflag:
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
582 files):
582 files):
583 if value:
583 if value:
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
585 else:
585 else:
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
587
587
588 description = changes[4].strip()
588 description = changes[4].strip()
589 if description:
589 if description:
590 if self.ui.verbose:
590 if self.ui.verbose:
591 self.ui.status(_("description:\n"))
591 self.ui.status(_("description:\n"))
592 self.ui.status(description)
592 self.ui.status(description)
593 self.ui.status("\n\n")
593 self.ui.status("\n\n")
594 else:
594 else:
595 self.ui.status(_("summary: %s\n") %
595 self.ui.status(_("summary: %s\n") %
596 description.splitlines()[0])
596 description.splitlines()[0])
597 self.ui.status("\n")
597 self.ui.status("\n")
598
598
599 def show_changeset(ui, repo, opts):
599 def show_changeset(ui, repo, opts):
600 '''show one changeset. uses template or regular display. caller
600 '''show one changeset. uses template or regular display. caller
601 can pass in 'style' and 'template' options in opts.'''
601 can pass in 'style' and 'template' options in opts.'''
602
602
603 tmpl = opts.get('template')
603 tmpl = opts.get('template')
604 if tmpl:
604 if tmpl:
605 tmpl = templater.parsestring(tmpl, quoted=False)
605 tmpl = templater.parsestring(tmpl, quoted=False)
606 else:
606 else:
607 tmpl = ui.config('ui', 'logtemplate')
607 tmpl = ui.config('ui', 'logtemplate')
608 if tmpl: tmpl = templater.parsestring(tmpl)
608 if tmpl: tmpl = templater.parsestring(tmpl)
609 mapfile = opts.get('style') or ui.config('ui', 'style')
609 mapfile = opts.get('style') or ui.config('ui', 'style')
610 if tmpl or mapfile:
610 if tmpl or mapfile:
611 if mapfile:
611 if mapfile:
612 if not os.path.isfile(mapfile):
612 if not os.path.isfile(mapfile):
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
615 if mapname: mapfile = mapname
615 if mapname: mapfile = mapname
616 try:
616 try:
617 t = templater.changeset_templater(ui, repo, mapfile)
617 t = templater.changeset_templater(ui, repo, mapfile)
618 except SyntaxError, inst:
618 except SyntaxError, inst:
619 raise util.Abort(inst.args[0])
619 raise util.Abort(inst.args[0])
620 if tmpl: t.use_template(tmpl)
620 if tmpl: t.use_template(tmpl)
621 return t
621 return t
622 return changeset_printer(ui, repo)
622 return changeset_printer(ui, repo)
623
623
624 def setremoteconfig(ui, opts):
624 def setremoteconfig(ui, opts):
625 "copy remote options to ui tree"
625 "copy remote options to ui tree"
626 if opts.get('ssh'):
626 if opts.get('ssh'):
627 ui.setconfig("ui", "ssh", opts['ssh'])
627 ui.setconfig("ui", "ssh", opts['ssh'])
628 if opts.get('remotecmd'):
628 if opts.get('remotecmd'):
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
630
630
631 def show_version(ui):
631 def show_version(ui):
632 """output version and copyright information"""
632 """output version and copyright information"""
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
634 % version.get_version())
634 % version.get_version())
635 ui.status(_(
635 ui.status(_(
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
637 "This is free software; see the source for copying conditions. "
637 "This is free software; see the source for copying conditions. "
638 "There is NO\nwarranty; "
638 "There is NO\nwarranty; "
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
640 ))
640 ))
641
641
642 def help_(ui, name=None, with_version=False):
642 def help_(ui, name=None, with_version=False):
643 """show help for a command, extension, or list of commands
643 """show help for a command, extension, or list of commands
644
644
645 With no arguments, print a list of commands and short help.
645 With no arguments, print a list of commands and short help.
646
646
647 Given a command name, print help for that command.
647 Given a command name, print help for that command.
648
648
649 Given an extension name, print help for that extension, and the
649 Given an extension name, print help for that extension, and the
650 commands it provides."""
650 commands it provides."""
651 option_lists = []
651 option_lists = []
652
652
653 def helpcmd(name):
653 def helpcmd(name):
654 if with_version:
654 if with_version:
655 show_version(ui)
655 show_version(ui)
656 ui.write('\n')
656 ui.write('\n')
657 aliases, i = findcmd(name)
657 aliases, i = findcmd(name)
658 # synopsis
658 # synopsis
659 ui.write("%s\n\n" % i[2])
659 ui.write("%s\n\n" % i[2])
660
660
661 # description
661 # description
662 doc = i[0].__doc__
662 doc = i[0].__doc__
663 if not doc:
663 if not doc:
664 doc = _("(No help text available)")
664 doc = _("(No help text available)")
665 if ui.quiet:
665 if ui.quiet:
666 doc = doc.splitlines(0)[0]
666 doc = doc.splitlines(0)[0]
667 ui.write("%s\n" % doc.rstrip())
667 ui.write("%s\n" % doc.rstrip())
668
668
669 if not ui.quiet:
669 if not ui.quiet:
670 # aliases
670 # aliases
671 if len(aliases) > 1:
671 if len(aliases) > 1:
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
673
673
674 # options
674 # options
675 if i[1]:
675 if i[1]:
676 option_lists.append(("options", i[1]))
676 option_lists.append(("options", i[1]))
677
677
678 def helplist(select=None):
678 def helplist(select=None):
679 h = {}
679 h = {}
680 cmds = {}
680 cmds = {}
681 for c, e in table.items():
681 for c, e in table.items():
682 f = c.split("|", 1)[0]
682 f = c.split("|", 1)[0]
683 if select and not select(f):
683 if select and not select(f):
684 continue
684 continue
685 if name == "shortlist" and not f.startswith("^"):
685 if name == "shortlist" and not f.startswith("^"):
686 continue
686 continue
687 f = f.lstrip("^")
687 f = f.lstrip("^")
688 if not ui.debugflag and f.startswith("debug"):
688 if not ui.debugflag and f.startswith("debug"):
689 continue
689 continue
690 doc = e[0].__doc__
690 doc = e[0].__doc__
691 if not doc:
691 if not doc:
692 doc = _("(No help text available)")
692 doc = _("(No help text available)")
693 h[f] = doc.splitlines(0)[0].rstrip()
693 h[f] = doc.splitlines(0)[0].rstrip()
694 cmds[f] = c.lstrip("^")
694 cmds[f] = c.lstrip("^")
695
695
696 fns = h.keys()
696 fns = h.keys()
697 fns.sort()
697 fns.sort()
698 m = max(map(len, fns))
698 m = max(map(len, fns))
699 for f in fns:
699 for f in fns:
700 if ui.verbose:
700 if ui.verbose:
701 commands = cmds[f].replace("|",", ")
701 commands = cmds[f].replace("|",", ")
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
703 else:
703 else:
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
705
705
706 def helpext(name):
706 def helpext(name):
707 try:
707 try:
708 mod = findext(name)
708 mod = findext(name)
709 except KeyError:
709 except KeyError:
710 raise UnknownCommand(name)
710 raise UnknownCommand(name)
711
711
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
714 for d in doc[1:]:
714 for d in doc[1:]:
715 ui.write(d, '\n')
715 ui.write(d, '\n')
716
716
717 ui.status('\n')
717 ui.status('\n')
718 if ui.verbose:
718 if ui.verbose:
719 ui.status(_('list of commands:\n\n'))
719 ui.status(_('list of commands:\n\n'))
720 else:
720 else:
721 ui.status(_('list of commands (use "hg help -v %s" '
721 ui.status(_('list of commands (use "hg help -v %s" '
722 'to show aliases and global options):\n\n') % name)
722 'to show aliases and global options):\n\n') % name)
723
723
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
725 helplist(modcmds.has_key)
725 helplist(modcmds.has_key)
726
726
727 if name and name != 'shortlist':
727 if name and name != 'shortlist':
728 try:
728 try:
729 helpcmd(name)
729 helpcmd(name)
730 except UnknownCommand:
730 except UnknownCommand:
731 helpext(name)
731 helpext(name)
732
732
733 else:
733 else:
734 # program name
734 # program name
735 if ui.verbose or with_version:
735 if ui.verbose or with_version:
736 show_version(ui)
736 show_version(ui)
737 else:
737 else:
738 ui.status(_("Mercurial Distributed SCM\n"))
738 ui.status(_("Mercurial Distributed SCM\n"))
739 ui.status('\n')
739 ui.status('\n')
740
740
741 # list of commands
741 # list of commands
742 if name == "shortlist":
742 if name == "shortlist":
743 ui.status(_('basic commands (use "hg help" '
743 ui.status(_('basic commands (use "hg help" '
744 'for the full list or option "-v" for details):\n\n'))
744 'for the full list or option "-v" for details):\n\n'))
745 elif ui.verbose:
745 elif ui.verbose:
746 ui.status(_('list of commands:\n\n'))
746 ui.status(_('list of commands:\n\n'))
747 else:
747 else:
748 ui.status(_('list of commands (use "hg help -v" '
748 ui.status(_('list of commands (use "hg help -v" '
749 'to show aliases and global options):\n\n'))
749 'to show aliases and global options):\n\n'))
750
750
751 helplist()
751 helplist()
752
752
753 # global options
753 # global options
754 if ui.verbose:
754 if ui.verbose:
755 option_lists.append(("global options", globalopts))
755 option_lists.append(("global options", globalopts))
756
756
757 # list all option lists
757 # list all option lists
758 opt_output = []
758 opt_output = []
759 for title, options in option_lists:
759 for title, options in option_lists:
760 opt_output.append(("\n%s:\n" % title, None))
760 opt_output.append(("\n%s:\n" % title, None))
761 for shortopt, longopt, default, desc in options:
761 for shortopt, longopt, default, desc in options:
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
763 longopt and " --%s" % longopt),
763 longopt and " --%s" % longopt),
764 "%s%s" % (desc,
764 "%s%s" % (desc,
765 default
765 default
766 and _(" (default: %s)") % default
766 and _(" (default: %s)") % default
767 or "")))
767 or "")))
768
768
769 if opt_output:
769 if opt_output:
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
771 for first, second in opt_output:
771 for first, second in opt_output:
772 if second:
772 if second:
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
774 else:
774 else:
775 ui.write("%s\n" % first)
775 ui.write("%s\n" % first)
776
776
777 # Commands start here, listed alphabetically
777 # Commands start here, listed alphabetically
778
778
779 def add(ui, repo, *pats, **opts):
779 def add(ui, repo, *pats, **opts):
780 """add the specified files on the next commit
780 """add the specified files on the next commit
781
781
782 Schedule files to be version controlled and added to the repository.
782 Schedule files to be version controlled and added to the repository.
783
783
784 The files will be added to the repository at the next commit.
784 The files will be added to the repository at the next commit.
785
785
786 If no names are given, add all files in the repository.
786 If no names are given, add all files in the repository.
787 """
787 """
788
788
789 names = []
789 names = []
790 for src, abs, rel, exact in walk(repo, pats, opts):
790 for src, abs, rel, exact in walk(repo, pats, opts):
791 if exact:
791 if exact:
792 if ui.verbose:
792 if ui.verbose:
793 ui.status(_('adding %s\n') % rel)
793 ui.status(_('adding %s\n') % rel)
794 names.append(abs)
794 names.append(abs)
795 elif repo.dirstate.state(abs) == '?':
795 elif repo.dirstate.state(abs) == '?':
796 ui.status(_('adding %s\n') % rel)
796 ui.status(_('adding %s\n') % rel)
797 names.append(abs)
797 names.append(abs)
798 if not opts.get('dry_run'):
798 if not opts.get('dry_run'):
799 repo.add(names)
799 repo.add(names)
800
800
801 def addremove(ui, repo, *pats, **opts):
801 def addremove(ui, repo, *pats, **opts):
802 """add all new files, delete all missing files (DEPRECATED)
802 """add all new files, delete all missing files (DEPRECATED)
803
803
804 (DEPRECATED)
804 (DEPRECATED)
805 Add all new files and remove all missing files from the repository.
805 Add all new files and remove all missing files from the repository.
806
806
807 New files are ignored if they match any of the patterns in .hgignore. As
807 New files are ignored if they match any of the patterns in .hgignore. As
808 with add, these changes take effect at the next commit.
808 with add, these changes take effect at the next commit.
809
809
810 This command is now deprecated and will be removed in a future
810 This command is now deprecated and will be removed in a future
811 release. Please use add and remove --after instead.
811 release. Please use add and remove --after instead.
812 """
812 """
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
814 '--after instead)\n'))
814 '--after instead)\n'))
815 return addremove_lock(ui, repo, pats, opts)
815 return addremove_lock(ui, repo, pats, opts)
816
816
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
818 add, remove = [], []
818 add, remove = [], []
819 for src, abs, rel, exact in walk(repo, pats, opts):
819 for src, abs, rel, exact in walk(repo, pats, opts):
820 if src == 'f' and repo.dirstate.state(abs) == '?':
820 if src == 'f' and repo.dirstate.state(abs) == '?':
821 add.append(abs)
821 add.append(abs)
822 if ui.verbose or not exact:
822 if ui.verbose or not exact:
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
825 remove.append(abs)
825 remove.append(abs)
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
828 if not opts.get('dry_run'):
828 if not opts.get('dry_run'):
829 repo.add(add, wlock=wlock)
829 repo.add(add, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
831
831
832 def annotate(ui, repo, *pats, **opts):
832 def annotate(ui, repo, *pats, **opts):
833 """show changeset information per file line
833 """show changeset information per file line
834
834
835 List changes in files, showing the revision id responsible for each line
835 List changes in files, showing the revision id responsible for each line
836
836
837 This command is useful to discover who did a change or when a change took
837 This command is useful to discover who did a change or when a change took
838 place.
838 place.
839
839
840 Without the -a option, annotate will avoid processing files it
840 Without the -a option, annotate will avoid processing files it
841 detects as binary. With -a, annotate will generate an annotation
841 detects as binary. With -a, annotate will generate an annotation
842 anyway, probably with undesirable results.
842 anyway, probably with undesirable results.
843 """
843 """
844 def getnode(rev):
844 def getnode(rev):
845 return short(repo.changelog.node(rev))
845 return short(repo.changelog.node(rev))
846
846
847 ucache = {}
847 ucache = {}
848 def getname(rev):
848 def getname(rev):
849 try:
849 try:
850 return ucache[rev]
850 return ucache[rev]
851 except:
851 except:
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
853 ucache[rev] = u
853 ucache[rev] = u
854 return u
854 return u
855
855
856 dcache = {}
856 dcache = {}
857 def getdate(rev):
857 def getdate(rev):
858 datestr = dcache.get(rev)
858 datestr = dcache.get(rev)
859 if datestr is None:
859 if datestr is None:
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
861 return datestr
861 return datestr
862
862
863 if not pats:
863 if not pats:
864 raise util.Abort(_('at least one file name or pattern required'))
864 raise util.Abort(_('at least one file name or pattern required'))
865
865
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 ['date', getdate]]
867 ['date', getdate]]
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 opts['number'] = 1
869 opts['number'] = 1
870
870
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
872
872
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
874 fctx = ctx.filectx(abs)
874 fctx = ctx.filectx(abs)
875 if not opts['text'] and util.binary(fctx.data()):
875 if not opts['text'] and util.binary(fctx.data()):
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 continue
877 continue
878
878
879 lines = fctx.annotate()
879 lines = fctx.annotate()
880 pieces = []
880 pieces = []
881
881
882 for o, f in opmap:
882 for o, f in opmap:
883 if opts[o]:
883 if opts[o]:
884 l = [f(n) for n, dummy in lines]
884 l = [f(n) for n, dummy in lines]
885 if l:
885 if l:
886 m = max(map(len, l))
886 m = max(map(len, l))
887 pieces.append(["%*s" % (m, x) for x in l])
887 pieces.append(["%*s" % (m, x) for x in l])
888
888
889 if pieces:
889 if pieces:
890 for p, l in zip(zip(*pieces), lines):
890 for p, l in zip(zip(*pieces), lines):
891 ui.write("%s: %s" % (" ".join(p), l[1]))
891 ui.write("%s: %s" % (" ".join(p), l[1]))
892
892
893 def archive(ui, repo, dest, **opts):
893 def archive(ui, repo, dest, **opts):
894 '''create unversioned archive of a repository revision
894 '''create unversioned archive of a repository revision
895
895
896 By default, the revision used is the parent of the working
896 By default, the revision used is the parent of the working
897 directory; use "-r" to specify a different revision.
897 directory; use "-r" to specify a different revision.
898
898
899 To specify the type of archive to create, use "-t". Valid
899 To specify the type of archive to create, use "-t". Valid
900 types are:
900 types are:
901
901
902 "files" (default): a directory full of files
902 "files" (default): a directory full of files
903 "tar": tar archive, uncompressed
903 "tar": tar archive, uncompressed
904 "tbz2": tar archive, compressed using bzip2
904 "tbz2": tar archive, compressed using bzip2
905 "tgz": tar archive, compressed using gzip
905 "tgz": tar archive, compressed using gzip
906 "uzip": zip archive, uncompressed
906 "uzip": zip archive, uncompressed
907 "zip": zip archive, compressed using deflate
907 "zip": zip archive, compressed using deflate
908
908
909 The exact name of the destination archive or directory is given
909 The exact name of the destination archive or directory is given
910 using a format string; see "hg help export" for details.
910 using a format string; see "hg help export" for details.
911
911
912 Each member added to an archive file has a directory prefix
912 Each member added to an archive file has a directory prefix
913 prepended. Use "-p" to specify a format string for the prefix.
913 prepended. Use "-p" to specify a format string for the prefix.
914 The default is the basename of the archive, with suffixes removed.
914 The default is the basename of the archive, with suffixes removed.
915 '''
915 '''
916
916
917 if opts['rev']:
917 if opts['rev']:
918 node = repo.lookup(opts['rev'])
918 node = repo.lookup(opts['rev'])
919 else:
919 else:
920 node, p2 = repo.dirstate.parents()
920 node, p2 = repo.dirstate.parents()
921 if p2 != nullid:
921 if p2 != nullid:
922 raise util.Abort(_('uncommitted merge - please provide a '
922 raise util.Abort(_('uncommitted merge - please provide a '
923 'specific revision'))
923 'specific revision'))
924
924
925 dest = make_filename(repo, dest, node)
925 dest = make_filename(repo, dest, node)
926 if os.path.realpath(dest) == repo.root:
926 if os.path.realpath(dest) == repo.root:
927 raise util.Abort(_('repository root cannot be destination'))
927 raise util.Abort(_('repository root cannot be destination'))
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
929 kind = opts.get('type') or 'files'
929 kind = opts.get('type') or 'files'
930 prefix = opts['prefix']
930 prefix = opts['prefix']
931 if dest == '-':
931 if dest == '-':
932 if kind == 'files':
932 if kind == 'files':
933 raise util.Abort(_('cannot archive plain files to stdout'))
933 raise util.Abort(_('cannot archive plain files to stdout'))
934 dest = sys.stdout
934 dest = sys.stdout
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
936 prefix = make_filename(repo, prefix, node)
936 prefix = make_filename(repo, prefix, node)
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
938 matchfn, prefix)
938 matchfn, prefix)
939
939
940 def backout(ui, repo, rev, **opts):
940 def backout(ui, repo, rev, **opts):
941 '''reverse effect of earlier changeset
941 '''reverse effect of earlier changeset
942
942
943 Commit the backed out changes as a new changeset. The new
943 Commit the backed out changes as a new changeset. The new
944 changeset is a child of the backed out changeset.
944 changeset is a child of the backed out changeset.
945
945
946 If you back out a changeset other than the tip, a new head is
946 If you back out a changeset other than the tip, a new head is
947 created. This head is the parent of the working directory. If
947 created. This head is the parent of the working directory. If
948 you back out an old changeset, your working directory will appear
948 you back out an old changeset, your working directory will appear
949 old after the backout. You should merge the backout changeset
949 old after the backout. You should merge the backout changeset
950 with another head.
950 with another head.
951
951
952 The --merge option remembers the parent of the working directory
952 The --merge option remembers the parent of the working directory
953 before starting the backout, then merges the new head with that
953 before starting the backout, then merges the new head with that
954 changeset afterwards. This saves you from doing the merge by
954 changeset afterwards. This saves you from doing the merge by
955 hand. The result of this merge is not committed, as for a normal
955 hand. The result of this merge is not committed, as for a normal
956 merge.'''
956 merge.'''
957
957
958 bail_if_changed(repo)
958 bail_if_changed(repo)
959 op1, op2 = repo.dirstate.parents()
959 op1, op2 = repo.dirstate.parents()
960 if op2 != nullid:
960 if op2 != nullid:
961 raise util.Abort(_('outstanding uncommitted merge'))
961 raise util.Abort(_('outstanding uncommitted merge'))
962 node = repo.lookup(rev)
962 node = repo.lookup(rev)
963 p1, p2 = repo.changelog.parents(node)
963 p1, p2 = repo.changelog.parents(node)
964 if p1 == nullid:
964 if p1 == nullid:
965 raise util.Abort(_('cannot back out a change with no parents'))
965 raise util.Abort(_('cannot back out a change with no parents'))
966 if p2 != nullid:
966 if p2 != nullid:
967 if not opts['parent']:
967 if not opts['parent']:
968 raise util.Abort(_('cannot back out a merge changeset without '
968 raise util.Abort(_('cannot back out a merge changeset without '
969 '--parent'))
969 '--parent'))
970 p = repo.lookup(opts['parent'])
970 p = repo.lookup(opts['parent'])
971 if p not in (p1, p2):
971 if p not in (p1, p2):
972 raise util.Abort(_('%s is not a parent of %s' %
972 raise util.Abort(_('%s is not a parent of %s' %
973 (short(p), short(node))))
973 (short(p), short(node))))
974 parent = p
974 parent = p
975 else:
975 else:
976 if opts['parent']:
976 if opts['parent']:
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
978 parent = p1
978 parent = p1
979 hg.clean(repo, node, show_stats=False)
979 hg.clean(repo, node, show_stats=False)
980 revert_opts = opts.copy()
980 revert_opts = opts.copy()
981 revert_opts['rev'] = hex(parent)
981 revert_opts['rev'] = hex(parent)
982 revert(ui, repo, **revert_opts)
982 revert(ui, repo, **revert_opts)
983 commit_opts = opts.copy()
983 commit_opts = opts.copy()
984 commit_opts['addremove'] = False
984 commit_opts['addremove'] = False
985 if not commit_opts['message'] and not commit_opts['logfile']:
985 if not commit_opts['message'] and not commit_opts['logfile']:
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
987 commit_opts['force_editor'] = True
987 commit_opts['force_editor'] = True
988 commit(ui, repo, **commit_opts)
988 commit(ui, repo, **commit_opts)
989 def nice(node):
989 def nice(node):
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
991 ui.status(_('changeset %s backs out changeset %s\n') %
991 ui.status(_('changeset %s backs out changeset %s\n') %
992 (nice(repo.changelog.tip()), nice(node)))
992 (nice(repo.changelog.tip()), nice(node)))
993 if op1 != node:
993 if op1 != node:
994 if opts['merge']:
994 if opts['merge']:
995 ui.status(_('merging with changeset %s\n') % nice(op1))
995 ui.status(_('merging with changeset %s\n') % nice(op1))
996 n = _lookup(repo, hex(op1))
996 n = _lookup(repo, hex(op1))
997 hg.merge(repo, n)
997 hg.merge(repo, n)
998 else:
998 else:
999 ui.status(_('the backout changeset is a new head - '
999 ui.status(_('the backout changeset is a new head - '
1000 'do not forget to merge\n'))
1000 'do not forget to merge\n'))
1001 ui.status(_('(use "backout --merge" '
1001 ui.status(_('(use "backout --merge" '
1002 'if you want to auto-merge)\n'))
1002 'if you want to auto-merge)\n'))
1003
1003
1004 def bundle(ui, repo, fname, dest=None, **opts):
1004 def bundle(ui, repo, fname, dest=None, **opts):
1005 """create a changegroup file
1005 """create a changegroup file
1006
1006
1007 Generate a compressed changegroup file collecting all changesets
1007 Generate a compressed changegroup file collecting all changesets
1008 not found in the other repository.
1008 not found in the other repository.
1009
1009
1010 This file can then be transferred using conventional means and
1010 This file can then be transferred using conventional means and
1011 applied to another repository with the unbundle command. This is
1011 applied to another repository with the unbundle command. This is
1012 useful when native push and pull are not available or when
1012 useful when native push and pull are not available or when
1013 exporting an entire repository is undesirable. The standard file
1013 exporting an entire repository is undesirable. The standard file
1014 extension is ".hg".
1014 extension is ".hg".
1015
1015
1016 Unlike import/export, this exactly preserves all changeset
1016 Unlike import/export, this exactly preserves all changeset
1017 contents including permissions, rename data, and revision history.
1017 contents including permissions, rename data, and revision history.
1018 """
1018 """
1019 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1019 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1020 other = hg.repository(ui, dest)
1020 other = hg.repository(ui, dest)
1021 o = repo.findoutgoing(other, force=opts['force'])
1021 o = repo.findoutgoing(other, force=opts['force'])
1022 cg = repo.changegroup(o, 'bundle')
1022 cg = repo.changegroup(o, 'bundle')
1023 write_bundle(cg, fname)
1023 write_bundle(cg, fname)
1024
1024
1025 def cat(ui, repo, file1, *pats, **opts):
1025 def cat(ui, repo, file1, *pats, **opts):
1026 """output the latest or given revisions of files
1026 """output the latest or given revisions of files
1027
1027
1028 Print the specified files as they were at the given revision.
1028 Print the specified files as they were at the given revision.
1029 If no revision is given then the tip is used.
1029 If no revision is given then the tip is used.
1030
1030
1031 Output may be to a file, in which case the name of the file is
1031 Output may be to a file, in which case the name of the file is
1032 given using a format string. The formatting rules are the same as
1032 given using a format string. The formatting rules are the same as
1033 for the export command, with the following additions:
1033 for the export command, with the following additions:
1034
1034
1035 %s basename of file being printed
1035 %s basename of file being printed
1036 %d dirname of file being printed, or '.' if in repo root
1036 %d dirname of file being printed, or '.' if in repo root
1037 %p root-relative path name of file being printed
1037 %p root-relative path name of file being printed
1038 """
1038 """
1039 ctx = repo.changectx(opts['rev'] or "-1")
1039 ctx = repo.changectx(opts['rev'] or "-1")
1040 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1040 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1041 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1041 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1042 fp.write(ctx.filectx(abs).data())
1042 fp.write(ctx.filectx(abs).data())
1043
1043
1044 def clone(ui, source, dest=None, **opts):
1044 def clone(ui, source, dest=None, **opts):
1045 """make a copy of an existing repository
1045 """make a copy of an existing repository
1046
1046
1047 Create a copy of an existing repository in a new directory.
1047 Create a copy of an existing repository in a new directory.
1048
1048
1049 If no destination directory name is specified, it defaults to the
1049 If no destination directory name is specified, it defaults to the
1050 basename of the source.
1050 basename of the source.
1051
1051
1052 The location of the source is added to the new repository's
1052 The location of the source is added to the new repository's
1053 .hg/hgrc file, as the default to be used for future pulls.
1053 .hg/hgrc file, as the default to be used for future pulls.
1054
1054
1055 For efficiency, hardlinks are used for cloning whenever the source
1055 For efficiency, hardlinks are used for cloning whenever the source
1056 and destination are on the same filesystem (note this applies only
1056 and destination are on the same filesystem (note this applies only
1057 to the repository data, not to the checked out files). Some
1057 to the repository data, not to the checked out files). Some
1058 filesystems, such as AFS, implement hardlinking incorrectly, but
1058 filesystems, such as AFS, implement hardlinking incorrectly, but
1059 do not report errors. In these cases, use the --pull option to
1059 do not report errors. In these cases, use the --pull option to
1060 avoid hardlinking.
1060 avoid hardlinking.
1061
1061
1062 You can safely clone repositories and checked out files using full
1062 You can safely clone repositories and checked out files using full
1063 hardlinks with
1063 hardlinks with
1064
1064
1065 $ cp -al REPO REPOCLONE
1065 $ cp -al REPO REPOCLONE
1066
1066
1067 which is the fastest way to clone. However, the operation is not
1067 which is the fastest way to clone. However, the operation is not
1068 atomic (making sure REPO is not modified during the operation is
1068 atomic (making sure REPO is not modified during the operation is
1069 up to you) and you have to make sure your editor breaks hardlinks
1069 up to you) and you have to make sure your editor breaks hardlinks
1070 (Emacs and most Linux Kernel tools do so).
1070 (Emacs and most Linux Kernel tools do so).
1071
1071
1072 If you use the -r option to clone up to a specific revision, no
1072 If you use the -r option to clone up to a specific revision, no
1073 subsequent revisions will be present in the cloned repository.
1073 subsequent revisions will be present in the cloned repository.
1074 This option implies --pull, even on local repositories.
1074 This option implies --pull, even on local repositories.
1075
1075
1076 See pull for valid source format details.
1076 See pull for valid source format details.
1077
1077
1078 It is possible to specify an ssh:// URL as the destination, but no
1078 It is possible to specify an ssh:// URL as the destination, but no
1079 .hg/hgrc will be created on the remote side. Look at the help text
1079 .hg/hgrc will be created on the remote side. Look at the help text
1080 for the pull command for important details about ssh:// URLs.
1080 for the pull command for important details about ssh:// URLs.
1081 """
1081 """
1082 setremoteconfig(ui, opts)
1082 setremoteconfig(ui, opts)
1083 hg.clone(ui, ui.expandpath(source), dest,
1083 hg.clone(ui, ui.expandpath(source), dest,
1084 pull=opts['pull'],
1084 pull=opts['pull'],
1085 stream=opts['uncompressed'],
1085 stream=opts['uncompressed'],
1086 rev=opts['rev'],
1086 rev=opts['rev'],
1087 update=not opts['noupdate'])
1087 update=not opts['noupdate'])
1088
1088
1089 def commit(ui, repo, *pats, **opts):
1089 def commit(ui, repo, *pats, **opts):
1090 """commit the specified files or all outstanding changes
1090 """commit the specified files or all outstanding changes
1091
1091
1092 Commit changes to the given files into the repository.
1092 Commit changes to the given files into the repository.
1093
1093
1094 If a list of files is omitted, all changes reported by "hg status"
1094 If a list of files is omitted, all changes reported by "hg status"
1095 will be committed.
1095 will be committed.
1096
1096
1097 If no commit message is specified, the editor configured in your hgrc
1097 If no commit message is specified, the editor configured in your hgrc
1098 or in the EDITOR environment variable is started to enter a message.
1098 or in the EDITOR environment variable is started to enter a message.
1099 """
1099 """
1100 message = logmessage(opts)
1100 message = logmessage(opts)
1101
1101
1102 if opts['addremove']:
1102 if opts['addremove']:
1103 addremove_lock(ui, repo, pats, opts)
1103 addremove_lock(ui, repo, pats, opts)
1104 fns, match, anypats = matchpats(repo, pats, opts)
1104 fns, match, anypats = matchpats(repo, pats, opts)
1105 if pats:
1105 if pats:
1106 modified, added, removed, deleted, unknown = (
1106 modified, added, removed, deleted, unknown = (
1107 repo.changes(files=fns, match=match))
1107 repo.changes(files=fns, match=match))
1108 files = modified + added + removed
1108 files = modified + added + removed
1109 else:
1109 else:
1110 files = []
1110 files = []
1111 try:
1111 try:
1112 repo.commit(files, message, opts['user'], opts['date'], match,
1112 repo.commit(files, message, opts['user'], opts['date'], match,
1113 force_editor=opts.get('force_editor'))
1113 force_editor=opts.get('force_editor'))
1114 except ValueError, inst:
1114 except ValueError, inst:
1115 raise util.Abort(str(inst))
1115 raise util.Abort(str(inst))
1116
1116
1117 def docopy(ui, repo, pats, opts, wlock):
1117 def docopy(ui, repo, pats, opts, wlock):
1118 # called with the repo lock held
1118 # called with the repo lock held
1119 cwd = repo.getcwd()
1119 cwd = repo.getcwd()
1120 errors = 0
1120 errors = 0
1121 copied = []
1121 copied = []
1122 targets = {}
1122 targets = {}
1123
1123
1124 def okaytocopy(abs, rel, exact):
1124 def okaytocopy(abs, rel, exact):
1125 reasons = {'?': _('is not managed'),
1125 reasons = {'?': _('is not managed'),
1126 'a': _('has been marked for add'),
1126 'a': _('has been marked for add'),
1127 'r': _('has been marked for remove')}
1127 'r': _('has been marked for remove')}
1128 state = repo.dirstate.state(abs)
1128 state = repo.dirstate.state(abs)
1129 reason = reasons.get(state)
1129 reason = reasons.get(state)
1130 if reason:
1130 if reason:
1131 if state == 'a':
1131 if state == 'a':
1132 origsrc = repo.dirstate.copied(abs)
1132 origsrc = repo.dirstate.copied(abs)
1133 if origsrc is not None:
1133 if origsrc is not None:
1134 return origsrc
1134 return origsrc
1135 if exact:
1135 if exact:
1136 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1136 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1137 else:
1137 else:
1138 return abs
1138 return abs
1139
1139
1140 def copy(origsrc, abssrc, relsrc, target, exact):
1140 def copy(origsrc, abssrc, relsrc, target, exact):
1141 abstarget = util.canonpath(repo.root, cwd, target)
1141 abstarget = util.canonpath(repo.root, cwd, target)
1142 reltarget = util.pathto(cwd, abstarget)
1142 reltarget = util.pathto(cwd, abstarget)
1143 prevsrc = targets.get(abstarget)
1143 prevsrc = targets.get(abstarget)
1144 if prevsrc is not None:
1144 if prevsrc is not None:
1145 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1145 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1146 (reltarget, abssrc, prevsrc))
1146 (reltarget, abssrc, prevsrc))
1147 return
1147 return
1148 if (not opts['after'] and os.path.exists(reltarget) or
1148 if (not opts['after'] and os.path.exists(reltarget) or
1149 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1149 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1150 if not opts['force']:
1150 if not opts['force']:
1151 ui.warn(_('%s: not overwriting - file exists\n') %
1151 ui.warn(_('%s: not overwriting - file exists\n') %
1152 reltarget)
1152 reltarget)
1153 return
1153 return
1154 if not opts['after'] and not opts.get('dry_run'):
1154 if not opts['after'] and not opts.get('dry_run'):
1155 os.unlink(reltarget)
1155 os.unlink(reltarget)
1156 if opts['after']:
1156 if opts['after']:
1157 if not os.path.exists(reltarget):
1157 if not os.path.exists(reltarget):
1158 return
1158 return
1159 else:
1159 else:
1160 targetdir = os.path.dirname(reltarget) or '.'
1160 targetdir = os.path.dirname(reltarget) or '.'
1161 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1161 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1162 os.makedirs(targetdir)
1162 os.makedirs(targetdir)
1163 try:
1163 try:
1164 restore = repo.dirstate.state(abstarget) == 'r'
1164 restore = repo.dirstate.state(abstarget) == 'r'
1165 if restore and not opts.get('dry_run'):
1165 if restore and not opts.get('dry_run'):
1166 repo.undelete([abstarget], wlock)
1166 repo.undelete([abstarget], wlock)
1167 try:
1167 try:
1168 if not opts.get('dry_run'):
1168 if not opts.get('dry_run'):
1169 shutil.copyfile(relsrc, reltarget)
1169 shutil.copyfile(relsrc, reltarget)
1170 shutil.copymode(relsrc, reltarget)
1170 shutil.copymode(relsrc, reltarget)
1171 restore = False
1171 restore = False
1172 finally:
1172 finally:
1173 if restore:
1173 if restore:
1174 repo.remove([abstarget], wlock)
1174 repo.remove([abstarget], wlock)
1175 except shutil.Error, inst:
1175 except shutil.Error, inst:
1176 raise util.Abort(str(inst))
1176 raise util.Abort(str(inst))
1177 except IOError, inst:
1177 except IOError, inst:
1178 if inst.errno == errno.ENOENT:
1178 if inst.errno == errno.ENOENT:
1179 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1179 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1180 else:
1180 else:
1181 ui.warn(_('%s: cannot copy - %s\n') %
1181 ui.warn(_('%s: cannot copy - %s\n') %
1182 (relsrc, inst.strerror))
1182 (relsrc, inst.strerror))
1183 errors += 1
1183 errors += 1
1184 return
1184 return
1185 if ui.verbose or not exact:
1185 if ui.verbose or not exact:
1186 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1186 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1187 targets[abstarget] = abssrc
1187 targets[abstarget] = abssrc
1188 if abstarget != origsrc and not opts.get('dry_run'):
1188 if abstarget != origsrc and not opts.get('dry_run'):
1189 repo.copy(origsrc, abstarget, wlock)
1189 repo.copy(origsrc, abstarget, wlock)
1190 copied.append((abssrc, relsrc, exact))
1190 copied.append((abssrc, relsrc, exact))
1191
1191
1192 def targetpathfn(pat, dest, srcs):
1192 def targetpathfn(pat, dest, srcs):
1193 if os.path.isdir(pat):
1193 if os.path.isdir(pat):
1194 abspfx = util.canonpath(repo.root, cwd, pat)
1194 abspfx = util.canonpath(repo.root, cwd, pat)
1195 if destdirexists:
1195 if destdirexists:
1196 striplen = len(os.path.split(abspfx)[0])
1196 striplen = len(os.path.split(abspfx)[0])
1197 else:
1197 else:
1198 striplen = len(abspfx)
1198 striplen = len(abspfx)
1199 if striplen:
1199 if striplen:
1200 striplen += len(os.sep)
1200 striplen += len(os.sep)
1201 res = lambda p: os.path.join(dest, p[striplen:])
1201 res = lambda p: os.path.join(dest, p[striplen:])
1202 elif destdirexists:
1202 elif destdirexists:
1203 res = lambda p: os.path.join(dest, os.path.basename(p))
1203 res = lambda p: os.path.join(dest, os.path.basename(p))
1204 else:
1204 else:
1205 res = lambda p: dest
1205 res = lambda p: dest
1206 return res
1206 return res
1207
1207
1208 def targetpathafterfn(pat, dest, srcs):
1208 def targetpathafterfn(pat, dest, srcs):
1209 if util.patkind(pat, None)[0]:
1209 if util.patkind(pat, None)[0]:
1210 # a mercurial pattern
1210 # a mercurial pattern
1211 res = lambda p: os.path.join(dest, os.path.basename(p))
1211 res = lambda p: os.path.join(dest, os.path.basename(p))
1212 else:
1212 else:
1213 abspfx = util.canonpath(repo.root, cwd, pat)
1213 abspfx = util.canonpath(repo.root, cwd, pat)
1214 if len(abspfx) < len(srcs[0][0]):
1214 if len(abspfx) < len(srcs[0][0]):
1215 # A directory. Either the target path contains the last
1215 # A directory. Either the target path contains the last
1216 # component of the source path or it does not.
1216 # component of the source path or it does not.
1217 def evalpath(striplen):
1217 def evalpath(striplen):
1218 score = 0
1218 score = 0
1219 for s in srcs:
1219 for s in srcs:
1220 t = os.path.join(dest, s[0][striplen:])
1220 t = os.path.join(dest, s[0][striplen:])
1221 if os.path.exists(t):
1221 if os.path.exists(t):
1222 score += 1
1222 score += 1
1223 return score
1223 return score
1224
1224
1225 striplen = len(abspfx)
1225 striplen = len(abspfx)
1226 if striplen:
1226 if striplen:
1227 striplen += len(os.sep)
1227 striplen += len(os.sep)
1228 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1228 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1229 score = evalpath(striplen)
1229 score = evalpath(striplen)
1230 striplen1 = len(os.path.split(abspfx)[0])
1230 striplen1 = len(os.path.split(abspfx)[0])
1231 if striplen1:
1231 if striplen1:
1232 striplen1 += len(os.sep)
1232 striplen1 += len(os.sep)
1233 if evalpath(striplen1) > score:
1233 if evalpath(striplen1) > score:
1234 striplen = striplen1
1234 striplen = striplen1
1235 res = lambda p: os.path.join(dest, p[striplen:])
1235 res = lambda p: os.path.join(dest, p[striplen:])
1236 else:
1236 else:
1237 # a file
1237 # a file
1238 if destdirexists:
1238 if destdirexists:
1239 res = lambda p: os.path.join(dest, os.path.basename(p))
1239 res = lambda p: os.path.join(dest, os.path.basename(p))
1240 else:
1240 else:
1241 res = lambda p: dest
1241 res = lambda p: dest
1242 return res
1242 return res
1243
1243
1244
1244
1245 pats = list(pats)
1245 pats = list(pats)
1246 if not pats:
1246 if not pats:
1247 raise util.Abort(_('no source or destination specified'))
1247 raise util.Abort(_('no source or destination specified'))
1248 if len(pats) == 1:
1248 if len(pats) == 1:
1249 raise util.Abort(_('no destination specified'))
1249 raise util.Abort(_('no destination specified'))
1250 dest = pats.pop()
1250 dest = pats.pop()
1251 destdirexists = os.path.isdir(dest)
1251 destdirexists = os.path.isdir(dest)
1252 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1252 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1253 raise util.Abort(_('with multiple sources, destination must be an '
1253 raise util.Abort(_('with multiple sources, destination must be an '
1254 'existing directory'))
1254 'existing directory'))
1255 if opts['after']:
1255 if opts['after']:
1256 tfn = targetpathafterfn
1256 tfn = targetpathafterfn
1257 else:
1257 else:
1258 tfn = targetpathfn
1258 tfn = targetpathfn
1259 copylist = []
1259 copylist = []
1260 for pat in pats:
1260 for pat in pats:
1261 srcs = []
1261 srcs = []
1262 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1262 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1263 origsrc = okaytocopy(abssrc, relsrc, exact)
1263 origsrc = okaytocopy(abssrc, relsrc, exact)
1264 if origsrc:
1264 if origsrc:
1265 srcs.append((origsrc, abssrc, relsrc, exact))
1265 srcs.append((origsrc, abssrc, relsrc, exact))
1266 if not srcs:
1266 if not srcs:
1267 continue
1267 continue
1268 copylist.append((tfn(pat, dest, srcs), srcs))
1268 copylist.append((tfn(pat, dest, srcs), srcs))
1269 if not copylist:
1269 if not copylist:
1270 raise util.Abort(_('no files to copy'))
1270 raise util.Abort(_('no files to copy'))
1271
1271
1272 for targetpath, srcs in copylist:
1272 for targetpath, srcs in copylist:
1273 for origsrc, abssrc, relsrc, exact in srcs:
1273 for origsrc, abssrc, relsrc, exact in srcs:
1274 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1274 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1275
1275
1276 if errors:
1276 if errors:
1277 ui.warn(_('(consider using --after)\n'))
1277 ui.warn(_('(consider using --after)\n'))
1278 return errors, copied
1278 return errors, copied
1279
1279
1280 def copy(ui, repo, *pats, **opts):
1280 def copy(ui, repo, *pats, **opts):
1281 """mark files as copied for the next commit
1281 """mark files as copied for the next commit
1282
1282
1283 Mark dest as having copies of source files. If dest is a
1283 Mark dest as having copies of source files. If dest is a
1284 directory, copies are put in that directory. If dest is a file,
1284 directory, copies are put in that directory. If dest is a file,
1285 there can only be one source.
1285 there can only be one source.
1286
1286
1287 By default, this command copies the contents of files as they
1287 By default, this command copies the contents of files as they
1288 stand in the working directory. If invoked with --after, the
1288 stand in the working directory. If invoked with --after, the
1289 operation is recorded, but no copying is performed.
1289 operation is recorded, but no copying is performed.
1290
1290
1291 This command takes effect in the next commit.
1291 This command takes effect in the next commit.
1292
1292
1293 NOTE: This command should be treated as experimental. While it
1293 NOTE: This command should be treated as experimental. While it
1294 should properly record copied files, this information is not yet
1294 should properly record copied files, this information is not yet
1295 fully used by merge, nor fully reported by log.
1295 fully used by merge, nor fully reported by log.
1296 """
1296 """
1297 wlock = repo.wlock(0)
1297 wlock = repo.wlock(0)
1298 errs, copied = docopy(ui, repo, pats, opts, wlock)
1298 errs, copied = docopy(ui, repo, pats, opts, wlock)
1299 return errs
1299 return errs
1300
1300
1301 def debugancestor(ui, index, rev1, rev2):
1301 def debugancestor(ui, index, rev1, rev2):
1302 """find the ancestor revision of two revisions in a given index"""
1302 """find the ancestor revision of two revisions in a given index"""
1303 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1303 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1304 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1304 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1305 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1305 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1306
1306
1307 def debugcomplete(ui, cmd='', **opts):
1307 def debugcomplete(ui, cmd='', **opts):
1308 """returns the completion list associated with the given command"""
1308 """returns the completion list associated with the given command"""
1309
1309
1310 if opts['options']:
1310 if opts['options']:
1311 options = []
1311 options = []
1312 otables = [globalopts]
1312 otables = [globalopts]
1313 if cmd:
1313 if cmd:
1314 aliases, entry = findcmd(cmd)
1314 aliases, entry = findcmd(cmd)
1315 otables.append(entry[1])
1315 otables.append(entry[1])
1316 for t in otables:
1316 for t in otables:
1317 for o in t:
1317 for o in t:
1318 if o[0]:
1318 if o[0]:
1319 options.append('-%s' % o[0])
1319 options.append('-%s' % o[0])
1320 options.append('--%s' % o[1])
1320 options.append('--%s' % o[1])
1321 ui.write("%s\n" % "\n".join(options))
1321 ui.write("%s\n" % "\n".join(options))
1322 return
1322 return
1323
1323
1324 clist = findpossible(cmd).keys()
1324 clist = findpossible(cmd).keys()
1325 clist.sort()
1325 clist.sort()
1326 ui.write("%s\n" % "\n".join(clist))
1326 ui.write("%s\n" % "\n".join(clist))
1327
1327
1328 def debugrebuildstate(ui, repo, rev=None):
1328 def debugrebuildstate(ui, repo, rev=None):
1329 """rebuild the dirstate as it would look like for the given revision"""
1329 """rebuild the dirstate as it would look like for the given revision"""
1330 if not rev:
1330 if not rev:
1331 rev = repo.changelog.tip()
1331 rev = repo.changelog.tip()
1332 else:
1332 else:
1333 rev = repo.lookup(rev)
1333 rev = repo.lookup(rev)
1334 change = repo.changelog.read(rev)
1334 change = repo.changelog.read(rev)
1335 n = change[0]
1335 n = change[0]
1336 files = repo.manifest.readflags(n)
1336 files = repo.manifest.readflags(n)
1337 wlock = repo.wlock()
1337 wlock = repo.wlock()
1338 repo.dirstate.rebuild(rev, files.iteritems())
1338 repo.dirstate.rebuild(rev, files.iteritems())
1339
1339
1340 def debugcheckstate(ui, repo):
1340 def debugcheckstate(ui, repo):
1341 """validate the correctness of the current dirstate"""
1341 """validate the correctness of the current dirstate"""
1342 parent1, parent2 = repo.dirstate.parents()
1342 parent1, parent2 = repo.dirstate.parents()
1343 repo.dirstate.read()
1343 repo.dirstate.read()
1344 dc = repo.dirstate.map
1344 dc = repo.dirstate.map
1345 keys = dc.keys()
1345 keys = dc.keys()
1346 keys.sort()
1346 keys.sort()
1347 m1n = repo.changelog.read(parent1)[0]
1347 m1n = repo.changelog.read(parent1)[0]
1348 m2n = repo.changelog.read(parent2)[0]
1348 m2n = repo.changelog.read(parent2)[0]
1349 m1 = repo.manifest.read(m1n)
1349 m1 = repo.manifest.read(m1n)
1350 m2 = repo.manifest.read(m2n)
1350 m2 = repo.manifest.read(m2n)
1351 errors = 0
1351 errors = 0
1352 for f in dc:
1352 for f in dc:
1353 state = repo.dirstate.state(f)
1353 state = repo.dirstate.state(f)
1354 if state in "nr" and f not in m1:
1354 if state in "nr" and f not in m1:
1355 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1355 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1356 errors += 1
1356 errors += 1
1357 if state in "a" and f in m1:
1357 if state in "a" and f in m1:
1358 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1358 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1359 errors += 1
1359 errors += 1
1360 if state in "m" and f not in m1 and f not in m2:
1360 if state in "m" and f not in m1 and f not in m2:
1361 ui.warn(_("%s in state %s, but not in either manifest\n") %
1361 ui.warn(_("%s in state %s, but not in either manifest\n") %
1362 (f, state))
1362 (f, state))
1363 errors += 1
1363 errors += 1
1364 for f in m1:
1364 for f in m1:
1365 state = repo.dirstate.state(f)
1365 state = repo.dirstate.state(f)
1366 if state not in "nrm":
1366 if state not in "nrm":
1367 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1367 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1368 errors += 1
1368 errors += 1
1369 if errors:
1369 if errors:
1370 error = _(".hg/dirstate inconsistent with current parent's manifest")
1370 error = _(".hg/dirstate inconsistent with current parent's manifest")
1371 raise util.Abort(error)
1371 raise util.Abort(error)
1372
1372
1373 def debugconfig(ui, repo, *values):
1373 def debugconfig(ui, repo, *values):
1374 """show combined config settings from all hgrc files
1374 """show combined config settings from all hgrc files
1375
1375
1376 With no args, print names and values of all config items.
1376 With no args, print names and values of all config items.
1377
1377
1378 With one arg of the form section.name, print just the value of
1378 With one arg of the form section.name, print just the value of
1379 that config item.
1379 that config item.
1380
1380
1381 With multiple args, print names and values of all config items
1381 With multiple args, print names and values of all config items
1382 with matching section names."""
1382 with matching section names."""
1383
1383
1384 if values:
1384 if values:
1385 if len([v for v in values if '.' in v]) > 1:
1385 if len([v for v in values if '.' in v]) > 1:
1386 raise util.Abort(_('only one config item permitted'))
1386 raise util.Abort(_('only one config item permitted'))
1387 for section, name, value in ui.walkconfig():
1387 for section, name, value in ui.walkconfig():
1388 sectname = section + '.' + name
1388 sectname = section + '.' + name
1389 if values:
1389 if values:
1390 for v in values:
1390 for v in values:
1391 if v == section:
1391 if v == section:
1392 ui.write('%s=%s\n' % (sectname, value))
1392 ui.write('%s=%s\n' % (sectname, value))
1393 elif v == sectname:
1393 elif v == sectname:
1394 ui.write(value, '\n')
1394 ui.write(value, '\n')
1395 else:
1395 else:
1396 ui.write('%s=%s\n' % (sectname, value))
1396 ui.write('%s=%s\n' % (sectname, value))
1397
1397
1398 def debugsetparents(ui, repo, rev1, rev2=None):
1398 def debugsetparents(ui, repo, rev1, rev2=None):
1399 """manually set the parents of the current working directory
1399 """manually set the parents of the current working directory
1400
1400
1401 This is useful for writing repository conversion tools, but should
1401 This is useful for writing repository conversion tools, but should
1402 be used with care.
1402 be used with care.
1403 """
1403 """
1404
1404
1405 if not rev2:
1405 if not rev2:
1406 rev2 = hex(nullid)
1406 rev2 = hex(nullid)
1407
1407
1408 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1408 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1409
1409
1410 def debugstate(ui, repo):
1410 def debugstate(ui, repo):
1411 """show the contents of the current dirstate"""
1411 """show the contents of the current dirstate"""
1412 repo.dirstate.read()
1412 repo.dirstate.read()
1413 dc = repo.dirstate.map
1413 dc = repo.dirstate.map
1414 keys = dc.keys()
1414 keys = dc.keys()
1415 keys.sort()
1415 keys.sort()
1416 for file_ in keys:
1416 for file_ in keys:
1417 ui.write("%c %3o %10d %s %s\n"
1417 ui.write("%c %3o %10d %s %s\n"
1418 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1418 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1419 time.strftime("%x %X",
1419 time.strftime("%x %X",
1420 time.localtime(dc[file_][3])), file_))
1420 time.localtime(dc[file_][3])), file_))
1421 for f in repo.dirstate.copies:
1421 for f in repo.dirstate.copies:
1422 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1422 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1423
1423
1424 def debugdata(ui, file_, rev):
1424 def debugdata(ui, file_, rev):
1425 """dump the contents of an data file revision"""
1425 """dump the contents of an data file revision"""
1426 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1426 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1427 file_[:-2] + ".i", file_, 0)
1427 file_[:-2] + ".i", file_, 0)
1428 try:
1428 try:
1429 ui.write(r.revision(r.lookup(rev)))
1429 ui.write(r.revision(r.lookup(rev)))
1430 except KeyError:
1430 except KeyError:
1431 raise util.Abort(_('invalid revision identifier %s'), rev)
1431 raise util.Abort(_('invalid revision identifier %s'), rev)
1432
1432
1433 def debugindex(ui, file_):
1433 def debugindex(ui, file_):
1434 """dump the contents of an index file"""
1434 """dump the contents of an index file"""
1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1436 ui.write(" rev offset length base linkrev" +
1436 ui.write(" rev offset length base linkrev" +
1437 " nodeid p1 p2\n")
1437 " nodeid p1 p2\n")
1438 for i in range(r.count()):
1438 for i in range(r.count()):
1439 node = r.node(i)
1439 node = r.node(i)
1440 pp = r.parents(node)
1440 pp = r.parents(node)
1441 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1441 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1442 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1442 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1443 short(node), short(pp[0]), short(pp[1])))
1443 short(node), short(pp[0]), short(pp[1])))
1444
1444
1445 def debugindexdot(ui, file_):
1445 def debugindexdot(ui, file_):
1446 """dump an index DAG as a .dot file"""
1446 """dump an index DAG as a .dot file"""
1447 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1447 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1448 ui.write("digraph G {\n")
1448 ui.write("digraph G {\n")
1449 for i in range(r.count()):
1449 for i in range(r.count()):
1450 node = r.node(i)
1450 node = r.node(i)
1451 pp = r.parents(node)
1451 pp = r.parents(node)
1452 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1452 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1453 if pp[1] != nullid:
1453 if pp[1] != nullid:
1454 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1454 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1455 ui.write("}\n")
1455 ui.write("}\n")
1456
1456
1457 def debugrename(ui, repo, file, rev=None):
1457 def debugrename(ui, repo, file, rev=None):
1458 """dump rename information"""
1458 """dump rename information"""
1459 r = repo.file(relpath(repo, [file])[0])
1459 r = repo.file(relpath(repo, [file])[0])
1460 if rev:
1460 if rev:
1461 try:
1461 try:
1462 # assume all revision numbers are for changesets
1462 # assume all revision numbers are for changesets
1463 n = repo.lookup(rev)
1463 n = repo.lookup(rev)
1464 change = repo.changelog.read(n)
1464 change = repo.changelog.read(n)
1465 m = repo.manifest.read(change[0])
1465 m = repo.manifest.read(change[0])
1466 n = m[relpath(repo, [file])[0]]
1466 n = m[relpath(repo, [file])[0]]
1467 except (hg.RepoError, KeyError):
1467 except (hg.RepoError, KeyError):
1468 n = r.lookup(rev)
1468 n = r.lookup(rev)
1469 else:
1469 else:
1470 n = r.tip()
1470 n = r.tip()
1471 m = r.renamed(n)
1471 m = r.renamed(n)
1472 if m:
1472 if m:
1473 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1473 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1474 else:
1474 else:
1475 ui.write(_("not renamed\n"))
1475 ui.write(_("not renamed\n"))
1476
1476
1477 def debugwalk(ui, repo, *pats, **opts):
1477 def debugwalk(ui, repo, *pats, **opts):
1478 """show how files match on given patterns"""
1478 """show how files match on given patterns"""
1479 items = list(walk(repo, pats, opts))
1479 items = list(walk(repo, pats, opts))
1480 if not items:
1480 if not items:
1481 return
1481 return
1482 fmt = '%%s %%-%ds %%-%ds %%s' % (
1482 fmt = '%%s %%-%ds %%-%ds %%s' % (
1483 max([len(abs) for (src, abs, rel, exact) in items]),
1483 max([len(abs) for (src, abs, rel, exact) in items]),
1484 max([len(rel) for (src, abs, rel, exact) in items]))
1484 max([len(rel) for (src, abs, rel, exact) in items]))
1485 for src, abs, rel, exact in items:
1485 for src, abs, rel, exact in items:
1486 line = fmt % (src, abs, rel, exact and 'exact' or '')
1486 line = fmt % (src, abs, rel, exact and 'exact' or '')
1487 ui.write("%s\n" % line.rstrip())
1487 ui.write("%s\n" % line.rstrip())
1488
1488
1489 def diff(ui, repo, *pats, **opts):
1489 def diff(ui, repo, *pats, **opts):
1490 """diff repository (or selected files)
1490 """diff repository (or selected files)
1491
1491
1492 Show differences between revisions for the specified files.
1492 Show differences between revisions for the specified files.
1493
1493
1494 Differences between files are shown using the unified diff format.
1494 Differences between files are shown using the unified diff format.
1495
1495
1496 When two revision arguments are given, then changes are shown
1496 When two revision arguments are given, then changes are shown
1497 between those revisions. If only one revision is specified then
1497 between those revisions. If only one revision is specified then
1498 that revision is compared to the working directory, and, when no
1498 that revision is compared to the working directory, and, when no
1499 revisions are specified, the working directory files are compared
1499 revisions are specified, the working directory files are compared
1500 to its parent.
1500 to its parent.
1501
1501
1502 Without the -a option, diff will avoid generating diffs of files
1502 Without the -a option, diff will avoid generating diffs of files
1503 it detects as binary. With -a, diff will generate a diff anyway,
1503 it detects as binary. With -a, diff will generate a diff anyway,
1504 probably with undesirable results.
1504 probably with undesirable results.
1505 """
1505 """
1506 node1, node2 = revpair(ui, repo, opts['rev'])
1506 node1, node2 = revpair(ui, repo, opts['rev'])
1507
1507
1508 fns, matchfn, anypats = matchpats(repo, pats, opts)
1508 fns, matchfn, anypats = matchpats(repo, pats, opts)
1509
1509
1510 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1510 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1511 text=opts['text'], opts=opts)
1511 text=opts['text'], opts=opts)
1512
1512
1513 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1513 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1514 node = repo.lookup(changeset)
1514 node = repo.lookup(changeset)
1515 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1515 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1516 if opts['switch_parent']:
1516 if opts['switch_parent']:
1517 parents.reverse()
1517 parents.reverse()
1518 prev = (parents and parents[0]) or nullid
1518 prev = (parents and parents[0]) or nullid
1519 change = repo.changelog.read(node)
1519 change = repo.changelog.read(node)
1520
1520
1521 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1521 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1522 revwidth=revwidth)
1522 revwidth=revwidth)
1523 if fp != sys.stdout:
1523 if fp != sys.stdout:
1524 ui.note("%s\n" % fp.name)
1524 ui.note("%s\n" % fp.name)
1525
1525
1526 fp.write("# HG changeset patch\n")
1526 fp.write("# HG changeset patch\n")
1527 fp.write("# User %s\n" % change[1])
1527 fp.write("# User %s\n" % change[1])
1528 fp.write("# Date %d %d\n" % change[2])
1528 fp.write("# Date %d %d\n" % change[2])
1529 fp.write("# Node ID %s\n" % hex(node))
1529 fp.write("# Node ID %s\n" % hex(node))
1530 fp.write("# Parent %s\n" % hex(prev))
1530 fp.write("# Parent %s\n" % hex(prev))
1531 if len(parents) > 1:
1531 if len(parents) > 1:
1532 fp.write("# Parent %s\n" % hex(parents[1]))
1532 fp.write("# Parent %s\n" % hex(parents[1]))
1533 fp.write(change[4].rstrip())
1533 fp.write(change[4].rstrip())
1534 fp.write("\n\n")
1534 fp.write("\n\n")
1535
1535
1536 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1536 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1537 if fp != sys.stdout:
1537 if fp != sys.stdout:
1538 fp.close()
1538 fp.close()
1539
1539
1540 def export(ui, repo, *changesets, **opts):
1540 def export(ui, repo, *changesets, **opts):
1541 """dump the header and diffs for one or more changesets
1541 """dump the header and diffs for one or more changesets
1542
1542
1543 Print the changeset header and diffs for one or more revisions.
1543 Print the changeset header and diffs for one or more revisions.
1544
1544
1545 The information shown in the changeset header is: author,
1545 The information shown in the changeset header is: author,
1546 changeset hash, parent and commit comment.
1546 changeset hash, parent and commit comment.
1547
1547
1548 Output may be to a file, in which case the name of the file is
1548 Output may be to a file, in which case the name of the file is
1549 given using a format string. The formatting rules are as follows:
1549 given using a format string. The formatting rules are as follows:
1550
1550
1551 %% literal "%" character
1551 %% literal "%" character
1552 %H changeset hash (40 bytes of hexadecimal)
1552 %H changeset hash (40 bytes of hexadecimal)
1553 %N number of patches being generated
1553 %N number of patches being generated
1554 %R changeset revision number
1554 %R changeset revision number
1555 %b basename of the exporting repository
1555 %b basename of the exporting repository
1556 %h short-form changeset hash (12 bytes of hexadecimal)
1556 %h short-form changeset hash (12 bytes of hexadecimal)
1557 %n zero-padded sequence number, starting at 1
1557 %n zero-padded sequence number, starting at 1
1558 %r zero-padded changeset revision number
1558 %r zero-padded changeset revision number
1559
1559
1560 Without the -a option, export will avoid generating diffs of files
1560 Without the -a option, export will avoid generating diffs of files
1561 it detects as binary. With -a, export will generate a diff anyway,
1561 it detects as binary. With -a, export will generate a diff anyway,
1562 probably with undesirable results.
1562 probably with undesirable results.
1563
1563
1564 With the --switch-parent option, the diff will be against the second
1564 With the --switch-parent option, the diff will be against the second
1565 parent. It can be useful to review a merge.
1565 parent. It can be useful to review a merge.
1566 """
1566 """
1567 if not changesets:
1567 if not changesets:
1568 raise util.Abort(_("export requires at least one changeset"))
1568 raise util.Abort(_("export requires at least one changeset"))
1569 seqno = 0
1569 seqno = 0
1570 revs = list(revrange(ui, repo, changesets))
1570 revs = list(revrange(ui, repo, changesets))
1571 total = len(revs)
1571 total = len(revs)
1572 revwidth = max(map(len, revs))
1572 revwidth = max(map(len, revs))
1573 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1573 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1574 ui.note(msg)
1574 ui.note(msg)
1575 for cset in revs:
1575 for cset in revs:
1576 seqno += 1
1576 seqno += 1
1577 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1577 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1578
1578
1579 def forget(ui, repo, *pats, **opts):
1579 def forget(ui, repo, *pats, **opts):
1580 """don't add the specified files on the next commit (DEPRECATED)
1580 """don't add the specified files on the next commit (DEPRECATED)
1581
1581
1582 (DEPRECATED)
1582 (DEPRECATED)
1583 Undo an 'hg add' scheduled for the next commit.
1583 Undo an 'hg add' scheduled for the next commit.
1584
1584
1585 This command is now deprecated and will be removed in a future
1585 This command is now deprecated and will be removed in a future
1586 release. Please use revert instead.
1586 release. Please use revert instead.
1587 """
1587 """
1588 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1588 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1589 forget = []
1589 forget = []
1590 for src, abs, rel, exact in walk(repo, pats, opts):
1590 for src, abs, rel, exact in walk(repo, pats, opts):
1591 if repo.dirstate.state(abs) == 'a':
1591 if repo.dirstate.state(abs) == 'a':
1592 forget.append(abs)
1592 forget.append(abs)
1593 if ui.verbose or not exact:
1593 if ui.verbose or not exact:
1594 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1594 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1595 repo.forget(forget)
1595 repo.forget(forget)
1596
1596
1597 def grep(ui, repo, pattern, *pats, **opts):
1597 def grep(ui, repo, pattern, *pats, **opts):
1598 """search for a pattern in specified files and revisions
1598 """search for a pattern in specified files and revisions
1599
1599
1600 Search revisions of files for a regular expression.
1600 Search revisions of files for a regular expression.
1601
1601
1602 This command behaves differently than Unix grep. It only accepts
1602 This command behaves differently than Unix grep. It only accepts
1603 Python/Perl regexps. It searches repository history, not the
1603 Python/Perl regexps. It searches repository history, not the
1604 working directory. It always prints the revision number in which
1604 working directory. It always prints the revision number in which
1605 a match appears.
1605 a match appears.
1606
1606
1607 By default, grep only prints output for the first revision of a
1607 By default, grep only prints output for the first revision of a
1608 file in which it finds a match. To get it to print every revision
1608 file in which it finds a match. To get it to print every revision
1609 that contains a change in match status ("-" for a match that
1609 that contains a change in match status ("-" for a match that
1610 becomes a non-match, or "+" for a non-match that becomes a match),
1610 becomes a non-match, or "+" for a non-match that becomes a match),
1611 use the --all flag.
1611 use the --all flag.
1612 """
1612 """
1613 reflags = 0
1613 reflags = 0
1614 if opts['ignore_case']:
1614 if opts['ignore_case']:
1615 reflags |= re.I
1615 reflags |= re.I
1616 regexp = re.compile(pattern, reflags)
1616 regexp = re.compile(pattern, reflags)
1617 sep, eol = ':', '\n'
1617 sep, eol = ':', '\n'
1618 if opts['print0']:
1618 if opts['print0']:
1619 sep = eol = '\0'
1619 sep = eol = '\0'
1620
1620
1621 fcache = {}
1621 fcache = {}
1622 def getfile(fn):
1622 def getfile(fn):
1623 if fn not in fcache:
1623 if fn not in fcache:
1624 fcache[fn] = repo.file(fn)
1624 fcache[fn] = repo.file(fn)
1625 return fcache[fn]
1625 return fcache[fn]
1626
1626
1627 def matchlines(body):
1627 def matchlines(body):
1628 begin = 0
1628 begin = 0
1629 linenum = 0
1629 linenum = 0
1630 while True:
1630 while True:
1631 match = regexp.search(body, begin)
1631 match = regexp.search(body, begin)
1632 if not match:
1632 if not match:
1633 break
1633 break
1634 mstart, mend = match.span()
1634 mstart, mend = match.span()
1635 linenum += body.count('\n', begin, mstart) + 1
1635 linenum += body.count('\n', begin, mstart) + 1
1636 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1636 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1637 lend = body.find('\n', mend)
1637 lend = body.find('\n', mend)
1638 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1638 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1639 begin = lend + 1
1639 begin = lend + 1
1640
1640
1641 class linestate(object):
1641 class linestate(object):
1642 def __init__(self, line, linenum, colstart, colend):
1642 def __init__(self, line, linenum, colstart, colend):
1643 self.line = line
1643 self.line = line
1644 self.linenum = linenum
1644 self.linenum = linenum
1645 self.colstart = colstart
1645 self.colstart = colstart
1646 self.colend = colend
1646 self.colend = colend
1647 def __eq__(self, other):
1647 def __eq__(self, other):
1648 return self.line == other.line
1648 return self.line == other.line
1649 def __hash__(self):
1649 def __hash__(self):
1650 return hash(self.line)
1650 return hash(self.line)
1651
1651
1652 matches = {}
1652 matches = {}
1653 def grepbody(fn, rev, body):
1653 def grepbody(fn, rev, body):
1654 matches[rev].setdefault(fn, {})
1654 matches[rev].setdefault(fn, {})
1655 m = matches[rev][fn]
1655 m = matches[rev][fn]
1656 for lnum, cstart, cend, line in matchlines(body):
1656 for lnum, cstart, cend, line in matchlines(body):
1657 s = linestate(line, lnum, cstart, cend)
1657 s = linestate(line, lnum, cstart, cend)
1658 m[s] = s
1658 m[s] = s
1659
1659
1660 # FIXME: prev isn't used, why ?
1660 # FIXME: prev isn't used, why ?
1661 prev = {}
1661 prev = {}
1662 ucache = {}
1662 ucache = {}
1663 def display(fn, rev, states, prevstates):
1663 def display(fn, rev, states, prevstates):
1664 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1664 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1665 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1665 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1666 counts = {'-': 0, '+': 0}
1666 counts = {'-': 0, '+': 0}
1667 filerevmatches = {}
1667 filerevmatches = {}
1668 for l in diff:
1668 for l in diff:
1669 if incrementing or not opts['all']:
1669 if incrementing or not opts['all']:
1670 change = ((l in prevstates) and '-') or '+'
1670 change = ((l in prevstates) and '-') or '+'
1671 r = rev
1671 r = rev
1672 else:
1672 else:
1673 change = ((l in states) and '-') or '+'
1673 change = ((l in states) and '-') or '+'
1674 r = prev[fn]
1674 r = prev[fn]
1675 cols = [fn, str(rev)]
1675 cols = [fn, str(rev)]
1676 if opts['line_number']:
1676 if opts['line_number']:
1677 cols.append(str(l.linenum))
1677 cols.append(str(l.linenum))
1678 if opts['all']:
1678 if opts['all']:
1679 cols.append(change)
1679 cols.append(change)
1680 if opts['user']:
1680 if opts['user']:
1681 cols.append(trimuser(ui, getchange(rev)[1], rev,
1681 cols.append(trimuser(ui, getchange(rev)[1], rev,
1682 ucache))
1682 ucache))
1683 if opts['files_with_matches']:
1683 if opts['files_with_matches']:
1684 c = (fn, rev)
1684 c = (fn, rev)
1685 if c in filerevmatches:
1685 if c in filerevmatches:
1686 continue
1686 continue
1687 filerevmatches[c] = 1
1687 filerevmatches[c] = 1
1688 else:
1688 else:
1689 cols.append(l.line)
1689 cols.append(l.line)
1690 ui.write(sep.join(cols), eol)
1690 ui.write(sep.join(cols), eol)
1691 counts[change] += 1
1691 counts[change] += 1
1692 return counts['+'], counts['-']
1692 return counts['+'], counts['-']
1693
1693
1694 fstate = {}
1694 fstate = {}
1695 skip = {}
1695 skip = {}
1696 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1696 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1697 count = 0
1697 count = 0
1698 incrementing = False
1698 incrementing = False
1699 for st, rev, fns in changeiter:
1699 for st, rev, fns in changeiter:
1700 if st == 'window':
1700 if st == 'window':
1701 incrementing = rev
1701 incrementing = rev
1702 matches.clear()
1702 matches.clear()
1703 elif st == 'add':
1703 elif st == 'add':
1704 change = repo.changelog.read(repo.lookup(str(rev)))
1704 change = repo.changelog.read(repo.lookup(str(rev)))
1705 mf = repo.manifest.read(change[0])
1705 mf = repo.manifest.read(change[0])
1706 matches[rev] = {}
1706 matches[rev] = {}
1707 for fn in fns:
1707 for fn in fns:
1708 if fn in skip:
1708 if fn in skip:
1709 continue
1709 continue
1710 fstate.setdefault(fn, {})
1710 fstate.setdefault(fn, {})
1711 try:
1711 try:
1712 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1712 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1713 except KeyError:
1713 except KeyError:
1714 pass
1714 pass
1715 elif st == 'iter':
1715 elif st == 'iter':
1716 states = matches[rev].items()
1716 states = matches[rev].items()
1717 states.sort()
1717 states.sort()
1718 for fn, m in states:
1718 for fn, m in states:
1719 if fn in skip:
1719 if fn in skip:
1720 continue
1720 continue
1721 if incrementing or not opts['all'] or fstate[fn]:
1721 if incrementing or not opts['all'] or fstate[fn]:
1722 pos, neg = display(fn, rev, m, fstate[fn])
1722 pos, neg = display(fn, rev, m, fstate[fn])
1723 count += pos + neg
1723 count += pos + neg
1724 if pos and not opts['all']:
1724 if pos and not opts['all']:
1725 skip[fn] = True
1725 skip[fn] = True
1726 fstate[fn] = m
1726 fstate[fn] = m
1727 prev[fn] = rev
1727 prev[fn] = rev
1728
1728
1729 if not incrementing:
1729 if not incrementing:
1730 fstate = fstate.items()
1730 fstate = fstate.items()
1731 fstate.sort()
1731 fstate.sort()
1732 for fn, state in fstate:
1732 for fn, state in fstate:
1733 if fn in skip:
1733 if fn in skip:
1734 continue
1734 continue
1735 display(fn, rev, {}, state)
1735 display(fn, rev, {}, state)
1736 return (count == 0 and 1) or 0
1736 return (count == 0 and 1) or 0
1737
1737
1738 def heads(ui, repo, **opts):
1738 def heads(ui, repo, **opts):
1739 """show current repository heads
1739 """show current repository heads
1740
1740
1741 Show all repository head changesets.
1741 Show all repository head changesets.
1742
1742
1743 Repository "heads" are changesets that don't have children
1743 Repository "heads" are changesets that don't have children
1744 changesets. They are where development generally takes place and
1744 changesets. They are where development generally takes place and
1745 are the usual targets for update and merge operations.
1745 are the usual targets for update and merge operations.
1746 """
1746 """
1747 if opts['rev']:
1747 if opts['rev']:
1748 heads = repo.heads(repo.lookup(opts['rev']))
1748 heads = repo.heads(repo.lookup(opts['rev']))
1749 else:
1749 else:
1750 heads = repo.heads()
1750 heads = repo.heads()
1751 br = None
1751 br = None
1752 if opts['branches']:
1752 if opts['branches']:
1753 br = repo.branchlookup(heads)
1753 br = repo.branchlookup(heads)
1754 displayer = show_changeset(ui, repo, opts)
1754 displayer = show_changeset(ui, repo, opts)
1755 for n in heads:
1755 for n in heads:
1756 displayer.show(changenode=n, brinfo=br)
1756 displayer.show(changenode=n, brinfo=br)
1757
1757
1758 def identify(ui, repo):
1758 def identify(ui, repo):
1759 """print information about the working copy
1759 """print information about the working copy
1760
1760
1761 Print a short summary of the current state of the repo.
1761 Print a short summary of the current state of the repo.
1762
1762
1763 This summary identifies the repository state using one or two parent
1763 This summary identifies the repository state using one or two parent
1764 hash identifiers, followed by a "+" if there are uncommitted changes
1764 hash identifiers, followed by a "+" if there are uncommitted changes
1765 in the working directory, followed by a list of tags for this revision.
1765 in the working directory, followed by a list of tags for this revision.
1766 """
1766 """
1767 parents = [p for p in repo.dirstate.parents() if p != nullid]
1767 parents = [p for p in repo.dirstate.parents() if p != nullid]
1768 if not parents:
1768 if not parents:
1769 ui.write(_("unknown\n"))
1769 ui.write(_("unknown\n"))
1770 return
1770 return
1771
1771
1772 hexfunc = ui.verbose and hex or short
1772 hexfunc = ui.verbose and hex or short
1773 modified, added, removed, deleted, unknown = repo.changes()
1773 modified, added, removed, deleted, unknown = repo.changes()
1774 output = ["%s%s" %
1774 output = ["%s%s" %
1775 ('+'.join([hexfunc(parent) for parent in parents]),
1775 ('+'.join([hexfunc(parent) for parent in parents]),
1776 (modified or added or removed or deleted) and "+" or "")]
1776 (modified or added or removed or deleted) and "+" or "")]
1777
1777
1778 if not ui.quiet:
1778 if not ui.quiet:
1779 # multiple tags for a single parent separated by '/'
1779 # multiple tags for a single parent separated by '/'
1780 parenttags = ['/'.join(tags)
1780 parenttags = ['/'.join(tags)
1781 for tags in map(repo.nodetags, parents) if tags]
1781 for tags in map(repo.nodetags, parents) if tags]
1782 # tags for multiple parents separated by ' + '
1782 # tags for multiple parents separated by ' + '
1783 if parenttags:
1783 if parenttags:
1784 output.append(' + '.join(parenttags))
1784 output.append(' + '.join(parenttags))
1785
1785
1786 ui.write("%s\n" % ' '.join(output))
1786 ui.write("%s\n" % ' '.join(output))
1787
1787
1788 def import_(ui, repo, patch1, *patches, **opts):
1788 def import_(ui, repo, patch1, *patches, **opts):
1789 """import an ordered set of patches
1789 """import an ordered set of patches
1790
1790
1791 Import a list of patches and commit them individually.
1791 Import a list of patches and commit them individually.
1792
1792
1793 If there are outstanding changes in the working directory, import
1793 If there are outstanding changes in the working directory, import
1794 will abort unless given the -f flag.
1794 will abort unless given the -f flag.
1795
1795
1796 You can import a patch straight from a mail message. Even patches
1796 You can import a patch straight from a mail message. Even patches
1797 as attachments work (body part must be type text/plain or
1797 as attachments work (body part must be type text/plain or
1798 text/x-patch to be used). From and Subject headers of email
1798 text/x-patch to be used). From and Subject headers of email
1799 message are used as default committer and commit message. All
1799 message are used as default committer and commit message. All
1800 text/plain body parts before first diff are added to commit
1800 text/plain body parts before first diff are added to commit
1801 message.
1801 message.
1802
1802
1803 If imported patch was generated by hg export, user and description
1803 If imported patch was generated by hg export, user and description
1804 from patch override values from message headers and body. Values
1804 from patch override values from message headers and body. Values
1805 given on command line with -m and -u override these.
1805 given on command line with -m and -u override these.
1806
1806
1807 To read a patch from standard input, use patch name "-".
1807 To read a patch from standard input, use patch name "-".
1808 """
1808 """
1809 patches = (patch1,) + patches
1809 patches = (patch1,) + patches
1810
1810
1811 if not opts['force']:
1811 if not opts['force']:
1812 bail_if_changed(repo)
1812 bail_if_changed(repo)
1813
1813
1814 d = opts["base"]
1814 d = opts["base"]
1815 strip = opts["strip"]
1815 strip = opts["strip"]
1816
1816
1817 mailre = re.compile(r'(?:From |[\w-]+:)')
1817 mailre = re.compile(r'(?:From |[\w-]+:)')
1818
1818
1819 # attempt to detect the start of a patch
1819 # attempt to detect the start of a patch
1820 # (this heuristic is borrowed from quilt)
1820 # (this heuristic is borrowed from quilt)
1821 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1821 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1822 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1822 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1823 '(---|\*\*\*)[ \t])', re.MULTILINE)
1823 '(---|\*\*\*)[ \t])', re.MULTILINE)
1824
1824
1825 for patch in patches:
1825 for patch in patches:
1826 pf = os.path.join(d, patch)
1826 pf = os.path.join(d, patch)
1827
1827
1828 message = None
1828 message = None
1829 user = None
1829 user = None
1830 date = None
1830 date = None
1831 hgpatch = False
1831 hgpatch = False
1832
1832
1833 p = email.Parser.Parser()
1833 p = email.Parser.Parser()
1834 if pf == '-':
1834 if pf == '-':
1835 msg = p.parse(sys.stdin)
1835 msg = p.parse(sys.stdin)
1836 ui.status(_("applying patch from stdin\n"))
1836 ui.status(_("applying patch from stdin\n"))
1837 else:
1837 else:
1838 msg = p.parse(file(pf))
1838 msg = p.parse(file(pf))
1839 ui.status(_("applying %s\n") % patch)
1839 ui.status(_("applying %s\n") % patch)
1840
1840
1841 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1841 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1842 tmpfp = os.fdopen(fd, 'w')
1842 tmpfp = os.fdopen(fd, 'w')
1843 try:
1843 try:
1844 message = msg['Subject']
1844 message = msg['Subject']
1845 if message:
1845 if message:
1846 message = message.replace('\n\t', ' ')
1846 message = message.replace('\n\t', ' ')
1847 ui.debug('Subject: %s\n' % message)
1847 ui.debug('Subject: %s\n' % message)
1848 user = msg['From']
1848 user = msg['From']
1849 if user:
1849 if user:
1850 ui.debug('From: %s\n' % user)
1850 ui.debug('From: %s\n' % user)
1851 diffs_seen = 0
1851 diffs_seen = 0
1852 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1852 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1853 for part in msg.walk():
1853 for part in msg.walk():
1854 content_type = part.get_content_type()
1854 content_type = part.get_content_type()
1855 ui.debug('Content-Type: %s\n' % content_type)
1855 ui.debug('Content-Type: %s\n' % content_type)
1856 if content_type not in ok_types:
1856 if content_type not in ok_types:
1857 continue
1857 continue
1858 payload = part.get_payload(decode=True)
1858 payload = part.get_payload(decode=True)
1859 m = diffre.search(payload)
1859 m = diffre.search(payload)
1860 if m:
1860 if m:
1861 ui.debug(_('found patch at byte %d\n') % m.start(0))
1861 ui.debug(_('found patch at byte %d\n') % m.start(0))
1862 diffs_seen += 1
1862 diffs_seen += 1
1863 hgpatch = False
1863 hgpatch = False
1864 fp = cStringIO.StringIO()
1864 fp = cStringIO.StringIO()
1865 if message:
1865 if message:
1866 fp.write(message)
1866 fp.write(message)
1867 fp.write('\n')
1867 fp.write('\n')
1868 for line in payload[:m.start(0)].splitlines():
1868 for line in payload[:m.start(0)].splitlines():
1869 if line.startswith('# HG changeset patch'):
1869 if line.startswith('# HG changeset patch'):
1870 ui.debug(_('patch generated by hg export\n'))
1870 ui.debug(_('patch generated by hg export\n'))
1871 hgpatch = True
1871 hgpatch = True
1872 # drop earlier commit message content
1872 # drop earlier commit message content
1873 fp.seek(0)
1873 fp.seek(0)
1874 fp.truncate()
1874 fp.truncate()
1875 elif hgpatch:
1875 elif hgpatch:
1876 if line.startswith('# User '):
1876 if line.startswith('# User '):
1877 user = line[7:]
1877 user = line[7:]
1878 ui.debug('From: %s\n' % user)
1878 ui.debug('From: %s\n' % user)
1879 elif line.startswith("# Date "):
1879 elif line.startswith("# Date "):
1880 date = line[7:]
1880 date = line[7:]
1881 if not line.startswith('# '):
1881 if not line.startswith('# '):
1882 fp.write(line)
1882 fp.write(line)
1883 fp.write('\n')
1883 fp.write('\n')
1884 message = fp.getvalue()
1884 message = fp.getvalue()
1885 if tmpfp:
1885 if tmpfp:
1886 tmpfp.write(payload)
1886 tmpfp.write(payload)
1887 if not payload.endswith('\n'):
1887 if not payload.endswith('\n'):
1888 tmpfp.write('\n')
1888 tmpfp.write('\n')
1889 elif not diffs_seen and message and content_type == 'text/plain':
1889 elif not diffs_seen and message and content_type == 'text/plain':
1890 message += '\n' + payload
1890 message += '\n' + payload
1891
1891
1892 if opts['message']:
1892 if opts['message']:
1893 # pickup the cmdline msg
1893 # pickup the cmdline msg
1894 message = opts['message']
1894 message = opts['message']
1895 elif message:
1895 elif message:
1896 # pickup the patch msg
1896 # pickup the patch msg
1897 message = message.strip()
1897 message = message.strip()
1898 else:
1898 else:
1899 # launch the editor
1899 # launch the editor
1900 message = None
1900 message = None
1901 ui.debug(_('message:\n%s\n') % message)
1901 ui.debug(_('message:\n%s\n') % message)
1902
1902
1903 tmpfp.close()
1903 tmpfp.close()
1904 if not diffs_seen:
1904 if not diffs_seen:
1905 raise util.Abort(_('no diffs found'))
1905 raise util.Abort(_('no diffs found'))
1906
1906
1907 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1907 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1908 if len(files) > 0:
1908 if len(files) > 0:
1909 cfiles = files
1909 cfiles = files
1910 cwd = repo.getcwd()
1910 cwd = repo.getcwd()
1911 if cwd:
1911 if cwd:
1912 cfiles = [util.pathto(cwd, f) for f in files]
1912 cfiles = [util.pathto(cwd, f) for f in files]
1913 addremove_lock(ui, repo, cfiles, {})
1913 addremove_lock(ui, repo, cfiles, {})
1914 repo.commit(files, message, user, date)
1914 repo.commit(files, message, user, date)
1915 finally:
1915 finally:
1916 os.unlink(tmpname)
1916 os.unlink(tmpname)
1917
1917
1918 def incoming(ui, repo, source="default", **opts):
1918 def incoming(ui, repo, source="default", **opts):
1919 """show new changesets found in source
1919 """show new changesets found in source
1920
1920
1921 Show new changesets found in the specified path/URL or the default
1921 Show new changesets found in the specified path/URL or the default
1922 pull location. These are the changesets that would be pulled if a pull
1922 pull location. These are the changesets that would be pulled if a pull
1923 was requested.
1923 was requested.
1924
1924
1925 For remote repository, using --bundle avoids downloading the changesets
1925 For remote repository, using --bundle avoids downloading the changesets
1926 twice if the incoming is followed by a pull.
1926 twice if the incoming is followed by a pull.
1927
1927
1928 See pull for valid source format details.
1928 See pull for valid source format details.
1929 """
1929 """
1930 source = ui.expandpath(source)
1930 source = ui.expandpath(source)
1931 setremoteconfig(ui, opts)
1931 setremoteconfig(ui, opts)
1932
1932
1933 other = hg.repository(ui, source)
1933 other = hg.repository(ui, source)
1934 incoming = repo.findincoming(other, force=opts["force"])
1934 incoming = repo.findincoming(other, force=opts["force"])
1935 if not incoming:
1935 if not incoming:
1936 ui.status(_("no changes found\n"))
1936 ui.status(_("no changes found\n"))
1937 return
1937 return
1938
1938
1939 cleanup = None
1939 cleanup = None
1940 try:
1940 try:
1941 fname = opts["bundle"]
1941 fname = opts["bundle"]
1942 if fname or not other.local():
1942 if fname or not other.local():
1943 # create a bundle (uncompressed if other repo is not local)
1943 # create a bundle (uncompressed if other repo is not local)
1944 cg = other.changegroup(incoming, "incoming")
1944 cg = other.changegroup(incoming, "incoming")
1945 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1945 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1946 # keep written bundle?
1946 # keep written bundle?
1947 if opts["bundle"]:
1947 if opts["bundle"]:
1948 cleanup = None
1948 cleanup = None
1949 if not other.local():
1949 if not other.local():
1950 # use the created uncompressed bundlerepo
1950 # use the created uncompressed bundlerepo
1951 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1951 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1952
1952
1953 revs = None
1953 revs = None
1954 if opts['rev']:
1954 if opts['rev']:
1955 revs = [other.lookup(rev) for rev in opts['rev']]
1955 revs = [other.lookup(rev) for rev in opts['rev']]
1956 o = other.changelog.nodesbetween(incoming, revs)[0]
1956 o = other.changelog.nodesbetween(incoming, revs)[0]
1957 if opts['newest_first']:
1957 if opts['newest_first']:
1958 o.reverse()
1958 o.reverse()
1959 displayer = show_changeset(ui, other, opts)
1959 displayer = show_changeset(ui, other, opts)
1960 for n in o:
1960 for n in o:
1961 parents = [p for p in other.changelog.parents(n) if p != nullid]
1961 parents = [p for p in other.changelog.parents(n) if p != nullid]
1962 if opts['no_merges'] and len(parents) == 2:
1962 if opts['no_merges'] and len(parents) == 2:
1963 continue
1963 continue
1964 displayer.show(changenode=n)
1964 displayer.show(changenode=n)
1965 if opts['patch']:
1965 if opts['patch']:
1966 prev = (parents and parents[0]) or nullid
1966 prev = (parents and parents[0]) or nullid
1967 dodiff(ui, ui, other, prev, n)
1967 dodiff(ui, ui, other, prev, n)
1968 ui.write("\n")
1968 ui.write("\n")
1969 finally:
1969 finally:
1970 if hasattr(other, 'close'):
1970 if hasattr(other, 'close'):
1971 other.close()
1971 other.close()
1972 if cleanup:
1972 if cleanup:
1973 os.unlink(cleanup)
1973 os.unlink(cleanup)
1974
1974
1975 def init(ui, dest=".", **opts):
1975 def init(ui, dest=".", **opts):
1976 """create a new repository in the given directory
1976 """create a new repository in the given directory
1977
1977
1978 Initialize a new repository in the given directory. If the given
1978 Initialize a new repository in the given directory. If the given
1979 directory does not exist, it is created.
1979 directory does not exist, it is created.
1980
1980
1981 If no directory is given, the current directory is used.
1981 If no directory is given, the current directory is used.
1982
1982
1983 It is possible to specify an ssh:// URL as the destination.
1983 It is possible to specify an ssh:// URL as the destination.
1984 Look at the help text for the pull command for important details
1984 Look at the help text for the pull command for important details
1985 about ssh:// URLs.
1985 about ssh:// URLs.
1986 """
1986 """
1987 setremoteconfig(ui, opts)
1987 setremoteconfig(ui, opts)
1988 hg.repository(ui, dest, create=1)
1988 hg.repository(ui, dest, create=1)
1989
1989
1990 def locate(ui, repo, *pats, **opts):
1990 def locate(ui, repo, *pats, **opts):
1991 """locate files matching specific patterns
1991 """locate files matching specific patterns
1992
1992
1993 Print all files under Mercurial control whose names match the
1993 Print all files under Mercurial control whose names match the
1994 given patterns.
1994 given patterns.
1995
1995
1996 This command searches the current directory and its
1996 This command searches the current directory and its
1997 subdirectories. To search an entire repository, move to the root
1997 subdirectories. To search an entire repository, move to the root
1998 of the repository.
1998 of the repository.
1999
1999
2000 If no patterns are given to match, this command prints all file
2000 If no patterns are given to match, this command prints all file
2001 names.
2001 names.
2002
2002
2003 If you want to feed the output of this command into the "xargs"
2003 If you want to feed the output of this command into the "xargs"
2004 command, use the "-0" option to both this command and "xargs".
2004 command, use the "-0" option to both this command and "xargs".
2005 This will avoid the problem of "xargs" treating single filenames
2005 This will avoid the problem of "xargs" treating single filenames
2006 that contain white space as multiple filenames.
2006 that contain white space as multiple filenames.
2007 """
2007 """
2008 end = opts['print0'] and '\0' or '\n'
2008 end = opts['print0'] and '\0' or '\n'
2009 rev = opts['rev']
2009 rev = opts['rev']
2010 if rev:
2010 if rev:
2011 node = repo.lookup(rev)
2011 node = repo.lookup(rev)
2012 else:
2012 else:
2013 node = None
2013 node = None
2014
2014
2015 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2015 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2016 head='(?:.*/|)'):
2016 head='(?:.*/|)'):
2017 if not node and repo.dirstate.state(abs) == '?':
2017 if not node and repo.dirstate.state(abs) == '?':
2018 continue
2018 continue
2019 if opts['fullpath']:
2019 if opts['fullpath']:
2020 ui.write(os.path.join(repo.root, abs), end)
2020 ui.write(os.path.join(repo.root, abs), end)
2021 else:
2021 else:
2022 ui.write(((pats and rel) or abs), end)
2022 ui.write(((pats and rel) or abs), end)
2023
2023
2024 def log(ui, repo, *pats, **opts):
2024 def log(ui, repo, *pats, **opts):
2025 """show revision history of entire repository or files
2025 """show revision history of entire repository or files
2026
2026
2027 Print the revision history of the specified files or the entire
2027 Print the revision history of the specified files or the entire
2028 project.
2028 project.
2029
2029
2030 File history is shown without following rename or copy history of
2030 File history is shown without following rename or copy history of
2031 files. Use -f/--follow with a file name to follow history across
2031 files. Use -f/--follow with a file name to follow history across
2032 renames and copies. --follow without a file name will only show
2032 renames and copies. --follow without a file name will only show
2033 ancestors or descendants of the starting revision. --follow-first
2033 ancestors or descendants of the starting revision. --follow-first
2034 only follows the first parent of merge revisions.
2034 only follows the first parent of merge revisions.
2035
2035
2036 If no revision range is specified, the default is tip:0 unless
2036 If no revision range is specified, the default is tip:0 unless
2037 --follow is set, in which case the working directory parent is
2037 --follow is set, in which case the working directory parent is
2038 used as the starting revision.
2038 used as the starting revision.
2039
2039
2040 By default this command outputs: changeset id and hash, tags,
2040 By default this command outputs: changeset id and hash, tags,
2041 non-trivial parents, user, date and time, and a summary for each
2041 non-trivial parents, user, date and time, and a summary for each
2042 commit. When the -v/--verbose switch is used, the list of changed
2042 commit. When the -v/--verbose switch is used, the list of changed
2043 files and full commit message is shown.
2043 files and full commit message is shown.
2044 """
2044 """
2045 class dui(object):
2045 class dui(object):
2046 # Implement and delegate some ui protocol. Save hunks of
2046 # Implement and delegate some ui protocol. Save hunks of
2047 # output for later display in the desired order.
2047 # output for later display in the desired order.
2048 def __init__(self, ui):
2048 def __init__(self, ui):
2049 self.ui = ui
2049 self.ui = ui
2050 self.hunk = {}
2050 self.hunk = {}
2051 self.header = {}
2051 self.header = {}
2052 def bump(self, rev):
2052 def bump(self, rev):
2053 self.rev = rev
2053 self.rev = rev
2054 self.hunk[rev] = []
2054 self.hunk[rev] = []
2055 self.header[rev] = []
2055 self.header[rev] = []
2056 def note(self, *args):
2056 def note(self, *args):
2057 if self.verbose:
2057 if self.verbose:
2058 self.write(*args)
2058 self.write(*args)
2059 def status(self, *args):
2059 def status(self, *args):
2060 if not self.quiet:
2060 if not self.quiet:
2061 self.write(*args)
2061 self.write(*args)
2062 def write(self, *args):
2062 def write(self, *args):
2063 self.hunk[self.rev].append(args)
2063 self.hunk[self.rev].append(args)
2064 def write_header(self, *args):
2064 def write_header(self, *args):
2065 self.header[self.rev].append(args)
2065 self.header[self.rev].append(args)
2066 def debug(self, *args):
2066 def debug(self, *args):
2067 if self.debugflag:
2067 if self.debugflag:
2068 self.write(*args)
2068 self.write(*args)
2069 def __getattr__(self, key):
2069 def __getattr__(self, key):
2070 return getattr(self.ui, key)
2070 return getattr(self.ui, key)
2071
2071
2072 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2072 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2073
2073
2074 if opts['limit']:
2074 if opts['limit']:
2075 try:
2075 try:
2076 limit = int(opts['limit'])
2076 limit = int(opts['limit'])
2077 except ValueError:
2077 except ValueError:
2078 raise util.Abort(_('limit must be a positive integer'))
2078 raise util.Abort(_('limit must be a positive integer'))
2079 if limit <= 0: raise util.Abort(_('limit must be positive'))
2079 if limit <= 0: raise util.Abort(_('limit must be positive'))
2080 else:
2080 else:
2081 limit = sys.maxint
2081 limit = sys.maxint
2082 count = 0
2082 count = 0
2083
2083
2084 displayer = show_changeset(ui, repo, opts)
2084 displayer = show_changeset(ui, repo, opts)
2085 for st, rev, fns in changeiter:
2085 for st, rev, fns in changeiter:
2086 if st == 'window':
2086 if st == 'window':
2087 du = dui(ui)
2087 du = dui(ui)
2088 displayer.ui = du
2088 displayer.ui = du
2089 elif st == 'add':
2089 elif st == 'add':
2090 du.bump(rev)
2090 du.bump(rev)
2091 changenode = repo.changelog.node(rev)
2091 changenode = repo.changelog.node(rev)
2092 parents = [p for p in repo.changelog.parents(changenode)
2092 parents = [p for p in repo.changelog.parents(changenode)
2093 if p != nullid]
2093 if p != nullid]
2094 if opts['no_merges'] and len(parents) == 2:
2094 if opts['no_merges'] and len(parents) == 2:
2095 continue
2095 continue
2096 if opts['only_merges'] and len(parents) != 2:
2096 if opts['only_merges'] and len(parents) != 2:
2097 continue
2097 continue
2098
2098
2099 if opts['keyword']:
2099 if opts['keyword']:
2100 changes = getchange(rev)
2100 changes = getchange(rev)
2101 miss = 0
2101 miss = 0
2102 for k in [kw.lower() for kw in opts['keyword']]:
2102 for k in [kw.lower() for kw in opts['keyword']]:
2103 if not (k in changes[1].lower() or
2103 if not (k in changes[1].lower() or
2104 k in changes[4].lower() or
2104 k in changes[4].lower() or
2105 k in " ".join(changes[3][:20]).lower()):
2105 k in " ".join(changes[3][:20]).lower()):
2106 miss = 1
2106 miss = 1
2107 break
2107 break
2108 if miss:
2108 if miss:
2109 continue
2109 continue
2110
2110
2111 br = None
2111 br = None
2112 if opts['branches']:
2112 if opts['branches']:
2113 br = repo.branchlookup([repo.changelog.node(rev)])
2113 br = repo.branchlookup([repo.changelog.node(rev)])
2114
2114
2115 displayer.show(rev, brinfo=br)
2115 displayer.show(rev, brinfo=br)
2116 if opts['patch']:
2116 if opts['patch']:
2117 prev = (parents and parents[0]) or nullid
2117 prev = (parents and parents[0]) or nullid
2118 dodiff(du, du, repo, prev, changenode, match=matchfn)
2118 dodiff(du, du, repo, prev, changenode, match=matchfn)
2119 du.write("\n\n")
2119 du.write("\n\n")
2120 elif st == 'iter':
2120 elif st == 'iter':
2121 if count == limit: break
2121 if count == limit: break
2122 if du.header[rev]:
2122 if du.header[rev]:
2123 for args in du.header[rev]:
2123 for args in du.header[rev]:
2124 ui.write_header(*args)
2124 ui.write_header(*args)
2125 if du.hunk[rev]:
2125 if du.hunk[rev]:
2126 count += 1
2126 count += 1
2127 for args in du.hunk[rev]:
2127 for args in du.hunk[rev]:
2128 ui.write(*args)
2128 ui.write(*args)
2129
2129
2130 def manifest(ui, repo, rev=None):
2130 def manifest(ui, repo, rev=None):
2131 """output the latest or given revision of the project manifest
2131 """output the latest or given revision of the project manifest
2132
2132
2133 Print a list of version controlled files for the given revision.
2133 Print a list of version controlled files for the given revision.
2134
2134
2135 The manifest is the list of files being version controlled. If no revision
2135 The manifest is the list of files being version controlled. If no revision
2136 is given then the tip is used.
2136 is given then the tip is used.
2137 """
2137 """
2138 if rev:
2138 if rev:
2139 try:
2139 try:
2140 # assume all revision numbers are for changesets
2140 # assume all revision numbers are for changesets
2141 n = repo.lookup(rev)
2141 n = repo.lookup(rev)
2142 change = repo.changelog.read(n)
2142 change = repo.changelog.read(n)
2143 n = change[0]
2143 n = change[0]
2144 except hg.RepoError:
2144 except hg.RepoError:
2145 n = repo.manifest.lookup(rev)
2145 n = repo.manifest.lookup(rev)
2146 else:
2146 else:
2147 n = repo.manifest.tip()
2147 n = repo.manifest.tip()
2148 m = repo.manifest.read(n)
2148 m = repo.manifest.read(n)
2149 mf = repo.manifest.readflags(n)
2149 mf = repo.manifest.readflags(n)
2150 files = m.keys()
2150 files = m.keys()
2151 files.sort()
2151 files.sort()
2152
2152
2153 for f in files:
2153 for f in files:
2154 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2154 ui.write("%40s %3s %s\n" % (hex(m[f]),
2155 mf.execf(f) and "755" or "644", f))
2155
2156
2156 def merge(ui, repo, node=None, force=None, branch=None):
2157 def merge(ui, repo, node=None, force=None, branch=None):
2157 """Merge working directory with another revision
2158 """Merge working directory with another revision
2158
2159
2159 Merge the contents of the current working directory and the
2160 Merge the contents of the current working directory and the
2160 requested revision. Files that changed between either parent are
2161 requested revision. Files that changed between either parent are
2161 marked as changed for the next commit and a commit must be
2162 marked as changed for the next commit and a commit must be
2162 performed before any further updates are allowed.
2163 performed before any further updates are allowed.
2163 """
2164 """
2164
2165
2165 node = _lookup(repo, node, branch)
2166 node = _lookup(repo, node, branch)
2166 return hg.merge(repo, node, force=force)
2167 return hg.merge(repo, node, force=force)
2167
2168
2168 def outgoing(ui, repo, dest=None, **opts):
2169 def outgoing(ui, repo, dest=None, **opts):
2169 """show changesets not found in destination
2170 """show changesets not found in destination
2170
2171
2171 Show changesets not found in the specified destination repository or
2172 Show changesets not found in the specified destination repository or
2172 the default push location. These are the changesets that would be pushed
2173 the default push location. These are the changesets that would be pushed
2173 if a push was requested.
2174 if a push was requested.
2174
2175
2175 See pull for valid destination format details.
2176 See pull for valid destination format details.
2176 """
2177 """
2177 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2178 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2178 setremoteconfig(ui, opts)
2179 setremoteconfig(ui, opts)
2179 revs = None
2180 revs = None
2180 if opts['rev']:
2181 if opts['rev']:
2181 revs = [repo.lookup(rev) for rev in opts['rev']]
2182 revs = [repo.lookup(rev) for rev in opts['rev']]
2182
2183
2183 other = hg.repository(ui, dest)
2184 other = hg.repository(ui, dest)
2184 o = repo.findoutgoing(other, force=opts['force'])
2185 o = repo.findoutgoing(other, force=opts['force'])
2185 if not o:
2186 if not o:
2186 ui.status(_("no changes found\n"))
2187 ui.status(_("no changes found\n"))
2187 return
2188 return
2188 o = repo.changelog.nodesbetween(o, revs)[0]
2189 o = repo.changelog.nodesbetween(o, revs)[0]
2189 if opts['newest_first']:
2190 if opts['newest_first']:
2190 o.reverse()
2191 o.reverse()
2191 displayer = show_changeset(ui, repo, opts)
2192 displayer = show_changeset(ui, repo, opts)
2192 for n in o:
2193 for n in o:
2193 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2194 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2194 if opts['no_merges'] and len(parents) == 2:
2195 if opts['no_merges'] and len(parents) == 2:
2195 continue
2196 continue
2196 displayer.show(changenode=n)
2197 displayer.show(changenode=n)
2197 if opts['patch']:
2198 if opts['patch']:
2198 prev = (parents and parents[0]) or nullid
2199 prev = (parents and parents[0]) or nullid
2199 dodiff(ui, ui, repo, prev, n)
2200 dodiff(ui, ui, repo, prev, n)
2200 ui.write("\n")
2201 ui.write("\n")
2201
2202
2202 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2203 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2203 """show the parents of the working dir or revision
2204 """show the parents of the working dir or revision
2204
2205
2205 Print the working directory's parent revisions.
2206 Print the working directory's parent revisions.
2206 """
2207 """
2207 # legacy
2208 # legacy
2208 if file_ and not rev:
2209 if file_ and not rev:
2209 try:
2210 try:
2210 rev = repo.lookup(file_)
2211 rev = repo.lookup(file_)
2211 file_ = None
2212 file_ = None
2212 except hg.RepoError:
2213 except hg.RepoError:
2213 pass
2214 pass
2214 else:
2215 else:
2215 ui.warn(_("'hg parent REV' is deprecated, "
2216 ui.warn(_("'hg parent REV' is deprecated, "
2216 "please use 'hg parents -r REV instead\n"))
2217 "please use 'hg parents -r REV instead\n"))
2217
2218
2218 if rev:
2219 if rev:
2219 if file_:
2220 if file_:
2220 ctx = repo.filectx(file_, changeid=rev)
2221 ctx = repo.filectx(file_, changeid=rev)
2221 else:
2222 else:
2222 ctx = repo.changectx(rev)
2223 ctx = repo.changectx(rev)
2223 p = [cp.node() for cp in ctx.parents()]
2224 p = [cp.node() for cp in ctx.parents()]
2224 else:
2225 else:
2225 p = repo.dirstate.parents()
2226 p = repo.dirstate.parents()
2226
2227
2227 br = None
2228 br = None
2228 if branches is not None:
2229 if branches is not None:
2229 br = repo.branchlookup(p)
2230 br = repo.branchlookup(p)
2230 displayer = show_changeset(ui, repo, opts)
2231 displayer = show_changeset(ui, repo, opts)
2231 for n in p:
2232 for n in p:
2232 if n != nullid:
2233 if n != nullid:
2233 displayer.show(changenode=n, brinfo=br)
2234 displayer.show(changenode=n, brinfo=br)
2234
2235
2235 def paths(ui, repo, search=None):
2236 def paths(ui, repo, search=None):
2236 """show definition of symbolic path names
2237 """show definition of symbolic path names
2237
2238
2238 Show definition of symbolic path name NAME. If no name is given, show
2239 Show definition of symbolic path name NAME. If no name is given, show
2239 definition of available names.
2240 definition of available names.
2240
2241
2241 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2242 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2242 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2243 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2243 """
2244 """
2244 if search:
2245 if search:
2245 for name, path in ui.configitems("paths"):
2246 for name, path in ui.configitems("paths"):
2246 if name == search:
2247 if name == search:
2247 ui.write("%s\n" % path)
2248 ui.write("%s\n" % path)
2248 return
2249 return
2249 ui.warn(_("not found!\n"))
2250 ui.warn(_("not found!\n"))
2250 return 1
2251 return 1
2251 else:
2252 else:
2252 for name, path in ui.configitems("paths"):
2253 for name, path in ui.configitems("paths"):
2253 ui.write("%s = %s\n" % (name, path))
2254 ui.write("%s = %s\n" % (name, path))
2254
2255
2255 def postincoming(ui, repo, modheads, optupdate):
2256 def postincoming(ui, repo, modheads, optupdate):
2256 if modheads == 0:
2257 if modheads == 0:
2257 return
2258 return
2258 if optupdate:
2259 if optupdate:
2259 if modheads == 1:
2260 if modheads == 1:
2260 return hg.update(repo, repo.changelog.tip()) # update
2261 return hg.update(repo, repo.changelog.tip()) # update
2261 else:
2262 else:
2262 ui.status(_("not updating, since new heads added\n"))
2263 ui.status(_("not updating, since new heads added\n"))
2263 if modheads > 1:
2264 if modheads > 1:
2264 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2265 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2265 else:
2266 else:
2266 ui.status(_("(run 'hg update' to get a working copy)\n"))
2267 ui.status(_("(run 'hg update' to get a working copy)\n"))
2267
2268
2268 def pull(ui, repo, source="default", **opts):
2269 def pull(ui, repo, source="default", **opts):
2269 """pull changes from the specified source
2270 """pull changes from the specified source
2270
2271
2271 Pull changes from a remote repository to a local one.
2272 Pull changes from a remote repository to a local one.
2272
2273
2273 This finds all changes from the repository at the specified path
2274 This finds all changes from the repository at the specified path
2274 or URL and adds them to the local repository. By default, this
2275 or URL and adds them to the local repository. By default, this
2275 does not update the copy of the project in the working directory.
2276 does not update the copy of the project in the working directory.
2276
2277
2277 Valid URLs are of the form:
2278 Valid URLs are of the form:
2278
2279
2279 local/filesystem/path
2280 local/filesystem/path
2280 http://[user@]host[:port]/[path]
2281 http://[user@]host[:port]/[path]
2281 https://[user@]host[:port]/[path]
2282 https://[user@]host[:port]/[path]
2282 ssh://[user@]host[:port]/[path]
2283 ssh://[user@]host[:port]/[path]
2283
2284
2284 Some notes about using SSH with Mercurial:
2285 Some notes about using SSH with Mercurial:
2285 - SSH requires an accessible shell account on the destination machine
2286 - SSH requires an accessible shell account on the destination machine
2286 and a copy of hg in the remote path or specified with as remotecmd.
2287 and a copy of hg in the remote path or specified with as remotecmd.
2287 - path is relative to the remote user's home directory by default.
2288 - path is relative to the remote user's home directory by default.
2288 Use an extra slash at the start of a path to specify an absolute path:
2289 Use an extra slash at the start of a path to specify an absolute path:
2289 ssh://example.com//tmp/repository
2290 ssh://example.com//tmp/repository
2290 - Mercurial doesn't use its own compression via SSH; the right thing
2291 - Mercurial doesn't use its own compression via SSH; the right thing
2291 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2292 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2292 Host *.mylocalnetwork.example.com
2293 Host *.mylocalnetwork.example.com
2293 Compression off
2294 Compression off
2294 Host *
2295 Host *
2295 Compression on
2296 Compression on
2296 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2297 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2297 with the --ssh command line option.
2298 with the --ssh command line option.
2298 """
2299 """
2299 source = ui.expandpath(source)
2300 source = ui.expandpath(source)
2300 setremoteconfig(ui, opts)
2301 setremoteconfig(ui, opts)
2301
2302
2302 other = hg.repository(ui, source)
2303 other = hg.repository(ui, source)
2303 ui.status(_('pulling from %s\n') % (source))
2304 ui.status(_('pulling from %s\n') % (source))
2304 revs = None
2305 revs = None
2305 if opts['rev'] and not other.local():
2306 if opts['rev'] and not other.local():
2306 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2307 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2307 elif opts['rev']:
2308 elif opts['rev']:
2308 revs = [other.lookup(rev) for rev in opts['rev']]
2309 revs = [other.lookup(rev) for rev in opts['rev']]
2309 modheads = repo.pull(other, heads=revs, force=opts['force'])
2310 modheads = repo.pull(other, heads=revs, force=opts['force'])
2310 return postincoming(ui, repo, modheads, opts['update'])
2311 return postincoming(ui, repo, modheads, opts['update'])
2311
2312
2312 def push(ui, repo, dest=None, **opts):
2313 def push(ui, repo, dest=None, **opts):
2313 """push changes to the specified destination
2314 """push changes to the specified destination
2314
2315
2315 Push changes from the local repository to the given destination.
2316 Push changes from the local repository to the given destination.
2316
2317
2317 This is the symmetrical operation for pull. It helps to move
2318 This is the symmetrical operation for pull. It helps to move
2318 changes from the current repository to a different one. If the
2319 changes from the current repository to a different one. If the
2319 destination is local this is identical to a pull in that directory
2320 destination is local this is identical to a pull in that directory
2320 from the current one.
2321 from the current one.
2321
2322
2322 By default, push will refuse to run if it detects the result would
2323 By default, push will refuse to run if it detects the result would
2323 increase the number of remote heads. This generally indicates the
2324 increase the number of remote heads. This generally indicates the
2324 the client has forgotten to sync and merge before pushing.
2325 the client has forgotten to sync and merge before pushing.
2325
2326
2326 Valid URLs are of the form:
2327 Valid URLs are of the form:
2327
2328
2328 local/filesystem/path
2329 local/filesystem/path
2329 ssh://[user@]host[:port]/[path]
2330 ssh://[user@]host[:port]/[path]
2330
2331
2331 Look at the help text for the pull command for important details
2332 Look at the help text for the pull command for important details
2332 about ssh:// URLs.
2333 about ssh:// URLs.
2333
2334
2334 Pushing to http:// and https:// URLs is possible, too, if this
2335 Pushing to http:// and https:// URLs is possible, too, if this
2335 feature is enabled on the remote Mercurial server.
2336 feature is enabled on the remote Mercurial server.
2336 """
2337 """
2337 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2338 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2338 setremoteconfig(ui, opts)
2339 setremoteconfig(ui, opts)
2339
2340
2340 other = hg.repository(ui, dest)
2341 other = hg.repository(ui, dest)
2341 ui.status('pushing to %s\n' % (dest))
2342 ui.status('pushing to %s\n' % (dest))
2342 revs = None
2343 revs = None
2343 if opts['rev']:
2344 if opts['rev']:
2344 revs = [repo.lookup(rev) for rev in opts['rev']]
2345 revs = [repo.lookup(rev) for rev in opts['rev']]
2345 r = repo.push(other, opts['force'], revs=revs)
2346 r = repo.push(other, opts['force'], revs=revs)
2346 return r == 0
2347 return r == 0
2347
2348
2348 def rawcommit(ui, repo, *flist, **rc):
2349 def rawcommit(ui, repo, *flist, **rc):
2349 """raw commit interface (DEPRECATED)
2350 """raw commit interface (DEPRECATED)
2350
2351
2351 (DEPRECATED)
2352 (DEPRECATED)
2352 Lowlevel commit, for use in helper scripts.
2353 Lowlevel commit, for use in helper scripts.
2353
2354
2354 This command is not intended to be used by normal users, as it is
2355 This command is not intended to be used by normal users, as it is
2355 primarily useful for importing from other SCMs.
2356 primarily useful for importing from other SCMs.
2356
2357
2357 This command is now deprecated and will be removed in a future
2358 This command is now deprecated and will be removed in a future
2358 release, please use debugsetparents and commit instead.
2359 release, please use debugsetparents and commit instead.
2359 """
2360 """
2360
2361
2361 ui.warn(_("(the rawcommit command is deprecated)\n"))
2362 ui.warn(_("(the rawcommit command is deprecated)\n"))
2362
2363
2363 message = rc['message']
2364 message = rc['message']
2364 if not message and rc['logfile']:
2365 if not message and rc['logfile']:
2365 try:
2366 try:
2366 message = open(rc['logfile']).read()
2367 message = open(rc['logfile']).read()
2367 except IOError:
2368 except IOError:
2368 pass
2369 pass
2369 if not message and not rc['logfile']:
2370 if not message and not rc['logfile']:
2370 raise util.Abort(_("missing commit message"))
2371 raise util.Abort(_("missing commit message"))
2371
2372
2372 files = relpath(repo, list(flist))
2373 files = relpath(repo, list(flist))
2373 if rc['files']:
2374 if rc['files']:
2374 files += open(rc['files']).read().splitlines()
2375 files += open(rc['files']).read().splitlines()
2375
2376
2376 rc['parent'] = map(repo.lookup, rc['parent'])
2377 rc['parent'] = map(repo.lookup, rc['parent'])
2377
2378
2378 try:
2379 try:
2379 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2380 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2380 except ValueError, inst:
2381 except ValueError, inst:
2381 raise util.Abort(str(inst))
2382 raise util.Abort(str(inst))
2382
2383
2383 def recover(ui, repo):
2384 def recover(ui, repo):
2384 """roll back an interrupted transaction
2385 """roll back an interrupted transaction
2385
2386
2386 Recover from an interrupted commit or pull.
2387 Recover from an interrupted commit or pull.
2387
2388
2388 This command tries to fix the repository status after an interrupted
2389 This command tries to fix the repository status after an interrupted
2389 operation. It should only be necessary when Mercurial suggests it.
2390 operation. It should only be necessary when Mercurial suggests it.
2390 """
2391 """
2391 if repo.recover():
2392 if repo.recover():
2392 return hg.verify(repo)
2393 return hg.verify(repo)
2393 return 1
2394 return 1
2394
2395
2395 def remove(ui, repo, *pats, **opts):
2396 def remove(ui, repo, *pats, **opts):
2396 """remove the specified files on the next commit
2397 """remove the specified files on the next commit
2397
2398
2398 Schedule the indicated files for removal from the repository.
2399 Schedule the indicated files for removal from the repository.
2399
2400
2400 This command schedules the files to be removed at the next commit.
2401 This command schedules the files to be removed at the next commit.
2401 This only removes files from the current branch, not from the
2402 This only removes files from the current branch, not from the
2402 entire project history. If the files still exist in the working
2403 entire project history. If the files still exist in the working
2403 directory, they will be deleted from it. If invoked with --after,
2404 directory, they will be deleted from it. If invoked with --after,
2404 files that have been manually deleted are marked as removed.
2405 files that have been manually deleted are marked as removed.
2405
2406
2406 Modified files and added files are not removed by default. To
2407 Modified files and added files are not removed by default. To
2407 remove them, use the -f/--force option.
2408 remove them, use the -f/--force option.
2408 """
2409 """
2409 names = []
2410 names = []
2410 if not opts['after'] and not pats:
2411 if not opts['after'] and not pats:
2411 raise util.Abort(_('no files specified'))
2412 raise util.Abort(_('no files specified'))
2412 files, matchfn, anypats = matchpats(repo, pats, opts)
2413 files, matchfn, anypats = matchpats(repo, pats, opts)
2413 exact = dict.fromkeys(files)
2414 exact = dict.fromkeys(files)
2414 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2415 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2415 modified, added, removed, deleted, unknown = mardu
2416 modified, added, removed, deleted, unknown = mardu
2416 remove, forget = [], []
2417 remove, forget = [], []
2417 for src, abs, rel, exact in walk(repo, pats, opts):
2418 for src, abs, rel, exact in walk(repo, pats, opts):
2418 reason = None
2419 reason = None
2419 if abs not in deleted and opts['after']:
2420 if abs not in deleted and opts['after']:
2420 reason = _('is still present')
2421 reason = _('is still present')
2421 elif abs in modified and not opts['force']:
2422 elif abs in modified and not opts['force']:
2422 reason = _('is modified (use -f to force removal)')
2423 reason = _('is modified (use -f to force removal)')
2423 elif abs in added:
2424 elif abs in added:
2424 if opts['force']:
2425 if opts['force']:
2425 forget.append(abs)
2426 forget.append(abs)
2426 continue
2427 continue
2427 reason = _('has been marked for add (use -f to force removal)')
2428 reason = _('has been marked for add (use -f to force removal)')
2428 elif abs in unknown:
2429 elif abs in unknown:
2429 reason = _('is not managed')
2430 reason = _('is not managed')
2430 elif abs in removed:
2431 elif abs in removed:
2431 continue
2432 continue
2432 if reason:
2433 if reason:
2433 if exact:
2434 if exact:
2434 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2435 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2435 else:
2436 else:
2436 if ui.verbose or not exact:
2437 if ui.verbose or not exact:
2437 ui.status(_('removing %s\n') % rel)
2438 ui.status(_('removing %s\n') % rel)
2438 remove.append(abs)
2439 remove.append(abs)
2439 repo.forget(forget)
2440 repo.forget(forget)
2440 repo.remove(remove, unlink=not opts['after'])
2441 repo.remove(remove, unlink=not opts['after'])
2441
2442
2442 def rename(ui, repo, *pats, **opts):
2443 def rename(ui, repo, *pats, **opts):
2443 """rename files; equivalent of copy + remove
2444 """rename files; equivalent of copy + remove
2444
2445
2445 Mark dest as copies of sources; mark sources for deletion. If
2446 Mark dest as copies of sources; mark sources for deletion. If
2446 dest is a directory, copies are put in that directory. If dest is
2447 dest is a directory, copies are put in that directory. If dest is
2447 a file, there can only be one source.
2448 a file, there can only be one source.
2448
2449
2449 By default, this command copies the contents of files as they
2450 By default, this command copies the contents of files as they
2450 stand in the working directory. If invoked with --after, the
2451 stand in the working directory. If invoked with --after, the
2451 operation is recorded, but no copying is performed.
2452 operation is recorded, but no copying is performed.
2452
2453
2453 This command takes effect in the next commit.
2454 This command takes effect in the next commit.
2454
2455
2455 NOTE: This command should be treated as experimental. While it
2456 NOTE: This command should be treated as experimental. While it
2456 should properly record rename files, this information is not yet
2457 should properly record rename files, this information is not yet
2457 fully used by merge, nor fully reported by log.
2458 fully used by merge, nor fully reported by log.
2458 """
2459 """
2459 wlock = repo.wlock(0)
2460 wlock = repo.wlock(0)
2460 errs, copied = docopy(ui, repo, pats, opts, wlock)
2461 errs, copied = docopy(ui, repo, pats, opts, wlock)
2461 names = []
2462 names = []
2462 for abs, rel, exact in copied:
2463 for abs, rel, exact in copied:
2463 if ui.verbose or not exact:
2464 if ui.verbose or not exact:
2464 ui.status(_('removing %s\n') % rel)
2465 ui.status(_('removing %s\n') % rel)
2465 names.append(abs)
2466 names.append(abs)
2466 if not opts.get('dry_run'):
2467 if not opts.get('dry_run'):
2467 repo.remove(names, True, wlock)
2468 repo.remove(names, True, wlock)
2468 return errs
2469 return errs
2469
2470
2470 def revert(ui, repo, *pats, **opts):
2471 def revert(ui, repo, *pats, **opts):
2471 """revert files or dirs to their states as of some revision
2472 """revert files or dirs to their states as of some revision
2472
2473
2473 With no revision specified, revert the named files or directories
2474 With no revision specified, revert the named files or directories
2474 to the contents they had in the parent of the working directory.
2475 to the contents they had in the parent of the working directory.
2475 This restores the contents of the affected files to an unmodified
2476 This restores the contents of the affected files to an unmodified
2476 state. If the working directory has two parents, you must
2477 state. If the working directory has two parents, you must
2477 explicitly specify the revision to revert to.
2478 explicitly specify the revision to revert to.
2478
2479
2479 Modified files are saved with a .orig suffix before reverting.
2480 Modified files are saved with a .orig suffix before reverting.
2480 To disable these backups, use --no-backup.
2481 To disable these backups, use --no-backup.
2481
2482
2482 Using the -r option, revert the given files or directories to
2483 Using the -r option, revert the given files or directories to
2483 their contents as of a specific revision. This can be helpful to"roll
2484 their contents as of a specific revision. This can be helpful to"roll
2484 back" some or all of a change that should not have been committed.
2485 back" some or all of a change that should not have been committed.
2485
2486
2486 Revert modifies the working directory. It does not commit any
2487 Revert modifies the working directory. It does not commit any
2487 changes, or change the parent of the working directory. If you
2488 changes, or change the parent of the working directory. If you
2488 revert to a revision other than the parent of the working
2489 revert to a revision other than the parent of the working
2489 directory, the reverted files will thus appear modified
2490 directory, the reverted files will thus appear modified
2490 afterwards.
2491 afterwards.
2491
2492
2492 If a file has been deleted, it is recreated. If the executable
2493 If a file has been deleted, it is recreated. If the executable
2493 mode of a file was changed, it is reset.
2494 mode of a file was changed, it is reset.
2494
2495
2495 If names are given, all files matching the names are reverted.
2496 If names are given, all files matching the names are reverted.
2496
2497
2497 If no arguments are given, all files in the repository are reverted.
2498 If no arguments are given, all files in the repository are reverted.
2498 """
2499 """
2499 parent, p2 = repo.dirstate.parents()
2500 parent, p2 = repo.dirstate.parents()
2500 if opts['rev']:
2501 if opts['rev']:
2501 node = repo.lookup(opts['rev'])
2502 node = repo.lookup(opts['rev'])
2502 elif p2 != nullid:
2503 elif p2 != nullid:
2503 raise util.Abort(_('working dir has two parents; '
2504 raise util.Abort(_('working dir has two parents; '
2504 'you must specify the revision to revert to'))
2505 'you must specify the revision to revert to'))
2505 else:
2506 else:
2506 node = parent
2507 node = parent
2507 mf = repo.manifest.read(repo.changelog.read(node)[0])
2508 mf = repo.manifest.read(repo.changelog.read(node)[0])
2508 if node == parent:
2509 if node == parent:
2509 pmf = mf
2510 pmf = mf
2510 else:
2511 else:
2511 pmf = None
2512 pmf = None
2512
2513
2513 wlock = repo.wlock()
2514 wlock = repo.wlock()
2514
2515
2515 # need all matching names in dirstate and manifest of target rev,
2516 # need all matching names in dirstate and manifest of target rev,
2516 # so have to walk both. do not print errors if files exist in one
2517 # so have to walk both. do not print errors if files exist in one
2517 # but not other.
2518 # but not other.
2518
2519
2519 names = {}
2520 names = {}
2520 target_only = {}
2521 target_only = {}
2521
2522
2522 # walk dirstate.
2523 # walk dirstate.
2523
2524
2524 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2525 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2525 names[abs] = (rel, exact)
2526 names[abs] = (rel, exact)
2526 if src == 'b':
2527 if src == 'b':
2527 target_only[abs] = True
2528 target_only[abs] = True
2528
2529
2529 # walk target manifest.
2530 # walk target manifest.
2530
2531
2531 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2532 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2532 badmatch=names.has_key):
2533 badmatch=names.has_key):
2533 if abs in names: continue
2534 if abs in names: continue
2534 names[abs] = (rel, exact)
2535 names[abs] = (rel, exact)
2535 target_only[abs] = True
2536 target_only[abs] = True
2536
2537
2537 changes = repo.changes(match=names.has_key, wlock=wlock)
2538 changes = repo.changes(match=names.has_key, wlock=wlock)
2538 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2539 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2539
2540
2540 revert = ([], _('reverting %s\n'))
2541 revert = ([], _('reverting %s\n'))
2541 add = ([], _('adding %s\n'))
2542 add = ([], _('adding %s\n'))
2542 remove = ([], _('removing %s\n'))
2543 remove = ([], _('removing %s\n'))
2543 forget = ([], _('forgetting %s\n'))
2544 forget = ([], _('forgetting %s\n'))
2544 undelete = ([], _('undeleting %s\n'))
2545 undelete = ([], _('undeleting %s\n'))
2545 update = {}
2546 update = {}
2546
2547
2547 disptable = (
2548 disptable = (
2548 # dispatch table:
2549 # dispatch table:
2549 # file state
2550 # file state
2550 # action if in target manifest
2551 # action if in target manifest
2551 # action if not in target manifest
2552 # action if not in target manifest
2552 # make backup if in target manifest
2553 # make backup if in target manifest
2553 # make backup if not in target manifest
2554 # make backup if not in target manifest
2554 (modified, revert, remove, True, True),
2555 (modified, revert, remove, True, True),
2555 (added, revert, forget, True, False),
2556 (added, revert, forget, True, False),
2556 (removed, undelete, None, False, False),
2557 (removed, undelete, None, False, False),
2557 (deleted, revert, remove, False, False),
2558 (deleted, revert, remove, False, False),
2558 (unknown, add, None, True, False),
2559 (unknown, add, None, True, False),
2559 (target_only, add, None, False, False),
2560 (target_only, add, None, False, False),
2560 )
2561 )
2561
2562
2562 entries = names.items()
2563 entries = names.items()
2563 entries.sort()
2564 entries.sort()
2564
2565
2565 for abs, (rel, exact) in entries:
2566 for abs, (rel, exact) in entries:
2566 mfentry = mf.get(abs)
2567 mfentry = mf.get(abs)
2567 def handle(xlist, dobackup):
2568 def handle(xlist, dobackup):
2568 xlist[0].append(abs)
2569 xlist[0].append(abs)
2569 update[abs] = 1
2570 update[abs] = 1
2570 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2571 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2571 bakname = "%s.orig" % rel
2572 bakname = "%s.orig" % rel
2572 ui.note(_('saving current version of %s as %s\n') %
2573 ui.note(_('saving current version of %s as %s\n') %
2573 (rel, bakname))
2574 (rel, bakname))
2574 if not opts.get('dry_run'):
2575 if not opts.get('dry_run'):
2575 shutil.copyfile(rel, bakname)
2576 shutil.copyfile(rel, bakname)
2576 shutil.copymode(rel, bakname)
2577 shutil.copymode(rel, bakname)
2577 if ui.verbose or not exact:
2578 if ui.verbose or not exact:
2578 ui.status(xlist[1] % rel)
2579 ui.status(xlist[1] % rel)
2579 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2580 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2580 if abs not in table: continue
2581 if abs not in table: continue
2581 # file has changed in dirstate
2582 # file has changed in dirstate
2582 if mfentry:
2583 if mfentry:
2583 handle(hitlist, backuphit)
2584 handle(hitlist, backuphit)
2584 elif misslist is not None:
2585 elif misslist is not None:
2585 handle(misslist, backupmiss)
2586 handle(misslist, backupmiss)
2586 else:
2587 else:
2587 if exact: ui.warn(_('file not managed: %s\n' % rel))
2588 if exact: ui.warn(_('file not managed: %s\n' % rel))
2588 break
2589 break
2589 else:
2590 else:
2590 # file has not changed in dirstate
2591 # file has not changed in dirstate
2591 if node == parent:
2592 if node == parent:
2592 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2593 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2593 continue
2594 continue
2594 if pmf is None:
2595 if pmf is None:
2595 # only need parent manifest in this unlikely case,
2596 # only need parent manifest in this unlikely case,
2596 # so do not read by default
2597 # so do not read by default
2597 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2598 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2598 if abs in pmf:
2599 if abs in pmf:
2599 if mfentry:
2600 if mfentry:
2600 # if version of file is same in parent and target
2601 # if version of file is same in parent and target
2601 # manifests, do nothing
2602 # manifests, do nothing
2602 if pmf[abs] != mfentry:
2603 if pmf[abs] != mfentry:
2603 handle(revert, False)
2604 handle(revert, False)
2604 else:
2605 else:
2605 handle(remove, False)
2606 handle(remove, False)
2606
2607
2607 if not opts.get('dry_run'):
2608 if not opts.get('dry_run'):
2608 repo.dirstate.forget(forget[0])
2609 repo.dirstate.forget(forget[0])
2609 r = hg.revert(repo, node, update.has_key, wlock)
2610 r = hg.revert(repo, node, update.has_key, wlock)
2610 repo.dirstate.update(add[0], 'a')
2611 repo.dirstate.update(add[0], 'a')
2611 repo.dirstate.update(undelete[0], 'n')
2612 repo.dirstate.update(undelete[0], 'n')
2612 repo.dirstate.update(remove[0], 'r')
2613 repo.dirstate.update(remove[0], 'r')
2613 return r
2614 return r
2614
2615
2615 def rollback(ui, repo):
2616 def rollback(ui, repo):
2616 """roll back the last transaction in this repository
2617 """roll back the last transaction in this repository
2617
2618
2618 Roll back the last transaction in this repository, restoring the
2619 Roll back the last transaction in this repository, restoring the
2619 project to its state prior to the transaction.
2620 project to its state prior to the transaction.
2620
2621
2621 Transactions are used to encapsulate the effects of all commands
2622 Transactions are used to encapsulate the effects of all commands
2622 that create new changesets or propagate existing changesets into a
2623 that create new changesets or propagate existing changesets into a
2623 repository. For example, the following commands are transactional,
2624 repository. For example, the following commands are transactional,
2624 and their effects can be rolled back:
2625 and their effects can be rolled back:
2625
2626
2626 commit
2627 commit
2627 import
2628 import
2628 pull
2629 pull
2629 push (with this repository as destination)
2630 push (with this repository as destination)
2630 unbundle
2631 unbundle
2631
2632
2632 This command should be used with care. There is only one level of
2633 This command should be used with care. There is only one level of
2633 rollback, and there is no way to undo a rollback.
2634 rollback, and there is no way to undo a rollback.
2634
2635
2635 This command is not intended for use on public repositories. Once
2636 This command is not intended for use on public repositories. Once
2636 changes are visible for pull by other users, rolling a transaction
2637 changes are visible for pull by other users, rolling a transaction
2637 back locally is ineffective (someone else may already have pulled
2638 back locally is ineffective (someone else may already have pulled
2638 the changes). Furthermore, a race is possible with readers of the
2639 the changes). Furthermore, a race is possible with readers of the
2639 repository; for example an in-progress pull from the repository
2640 repository; for example an in-progress pull from the repository
2640 may fail if a rollback is performed.
2641 may fail if a rollback is performed.
2641 """
2642 """
2642 repo.rollback()
2643 repo.rollback()
2643
2644
2644 def root(ui, repo):
2645 def root(ui, repo):
2645 """print the root (top) of the current working dir
2646 """print the root (top) of the current working dir
2646
2647
2647 Print the root directory of the current repository.
2648 Print the root directory of the current repository.
2648 """
2649 """
2649 ui.write(repo.root + "\n")
2650 ui.write(repo.root + "\n")
2650
2651
2651 def serve(ui, repo, **opts):
2652 def serve(ui, repo, **opts):
2652 """export the repository via HTTP
2653 """export the repository via HTTP
2653
2654
2654 Start a local HTTP repository browser and pull server.
2655 Start a local HTTP repository browser and pull server.
2655
2656
2656 By default, the server logs accesses to stdout and errors to
2657 By default, the server logs accesses to stdout and errors to
2657 stderr. Use the "-A" and "-E" options to log to files.
2658 stderr. Use the "-A" and "-E" options to log to files.
2658 """
2659 """
2659
2660
2660 if opts["stdio"]:
2661 if opts["stdio"]:
2661 if repo is None:
2662 if repo is None:
2662 raise hg.RepoError(_('no repo found'))
2663 raise hg.RepoError(_('no repo found'))
2663 s = sshserver.sshserver(ui, repo)
2664 s = sshserver.sshserver(ui, repo)
2664 s.serve_forever()
2665 s.serve_forever()
2665
2666
2666 optlist = ("name templates style address port ipv6"
2667 optlist = ("name templates style address port ipv6"
2667 " accesslog errorlog webdir_conf")
2668 " accesslog errorlog webdir_conf")
2668 for o in optlist.split():
2669 for o in optlist.split():
2669 if opts[o]:
2670 if opts[o]:
2670 ui.setconfig("web", o, opts[o])
2671 ui.setconfig("web", o, opts[o])
2671
2672
2672 if repo is None and not ui.config("web", "webdir_conf"):
2673 if repo is None and not ui.config("web", "webdir_conf"):
2673 raise hg.RepoError(_('no repo found'))
2674 raise hg.RepoError(_('no repo found'))
2674
2675
2675 if opts['daemon'] and not opts['daemon_pipefds']:
2676 if opts['daemon'] and not opts['daemon_pipefds']:
2676 rfd, wfd = os.pipe()
2677 rfd, wfd = os.pipe()
2677 args = sys.argv[:]
2678 args = sys.argv[:]
2678 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2679 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2679 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2680 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2680 args[0], args)
2681 args[0], args)
2681 os.close(wfd)
2682 os.close(wfd)
2682 os.read(rfd, 1)
2683 os.read(rfd, 1)
2683 os._exit(0)
2684 os._exit(0)
2684
2685
2685 try:
2686 try:
2686 httpd = hgweb.server.create_server(ui, repo)
2687 httpd = hgweb.server.create_server(ui, repo)
2687 except socket.error, inst:
2688 except socket.error, inst:
2688 raise util.Abort(_('cannot start server: ') + inst.args[1])
2689 raise util.Abort(_('cannot start server: ') + inst.args[1])
2689
2690
2690 if ui.verbose:
2691 if ui.verbose:
2691 addr, port = httpd.socket.getsockname()
2692 addr, port = httpd.socket.getsockname()
2692 if addr == '0.0.0.0':
2693 if addr == '0.0.0.0':
2693 addr = socket.gethostname()
2694 addr = socket.gethostname()
2694 else:
2695 else:
2695 try:
2696 try:
2696 addr = socket.gethostbyaddr(addr)[0]
2697 addr = socket.gethostbyaddr(addr)[0]
2697 except socket.error:
2698 except socket.error:
2698 pass
2699 pass
2699 if port != 80:
2700 if port != 80:
2700 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2701 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2701 else:
2702 else:
2702 ui.status(_('listening at http://%s/\n') % addr)
2703 ui.status(_('listening at http://%s/\n') % addr)
2703
2704
2704 if opts['pid_file']:
2705 if opts['pid_file']:
2705 fp = open(opts['pid_file'], 'w')
2706 fp = open(opts['pid_file'], 'w')
2706 fp.write(str(os.getpid()) + '\n')
2707 fp.write(str(os.getpid()) + '\n')
2707 fp.close()
2708 fp.close()
2708
2709
2709 if opts['daemon_pipefds']:
2710 if opts['daemon_pipefds']:
2710 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2711 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2711 os.close(rfd)
2712 os.close(rfd)
2712 os.write(wfd, 'y')
2713 os.write(wfd, 'y')
2713 os.close(wfd)
2714 os.close(wfd)
2714 sys.stdout.flush()
2715 sys.stdout.flush()
2715 sys.stderr.flush()
2716 sys.stderr.flush()
2716 fd = os.open(util.nulldev, os.O_RDWR)
2717 fd = os.open(util.nulldev, os.O_RDWR)
2717 if fd != 0: os.dup2(fd, 0)
2718 if fd != 0: os.dup2(fd, 0)
2718 if fd != 1: os.dup2(fd, 1)
2719 if fd != 1: os.dup2(fd, 1)
2719 if fd != 2: os.dup2(fd, 2)
2720 if fd != 2: os.dup2(fd, 2)
2720 if fd not in (0, 1, 2): os.close(fd)
2721 if fd not in (0, 1, 2): os.close(fd)
2721
2722
2722 httpd.serve_forever()
2723 httpd.serve_forever()
2723
2724
2724 def status(ui, repo, *pats, **opts):
2725 def status(ui, repo, *pats, **opts):
2725 """show changed files in the working directory
2726 """show changed files in the working directory
2726
2727
2727 Show status of files in the repository. If names are given, only
2728 Show status of files in the repository. If names are given, only
2728 files that match are shown. Files that are clean or ignored, are
2729 files that match are shown. Files that are clean or ignored, are
2729 not listed unless -c (clean), -i (ignored) or -A is given.
2730 not listed unless -c (clean), -i (ignored) or -A is given.
2730
2731
2731 The codes used to show the status of files are:
2732 The codes used to show the status of files are:
2732 M = modified
2733 M = modified
2733 A = added
2734 A = added
2734 R = removed
2735 R = removed
2735 C = clean
2736 C = clean
2736 ! = deleted, but still tracked
2737 ! = deleted, but still tracked
2737 ? = not tracked
2738 ? = not tracked
2738 I = ignored (not shown by default)
2739 I = ignored (not shown by default)
2739 = the previous added file was copied from here
2740 = the previous added file was copied from here
2740 """
2741 """
2741
2742
2742 all = opts['all']
2743 all = opts['all']
2743
2744
2744 files, matchfn, anypats = matchpats(repo, pats, opts)
2745 files, matchfn, anypats = matchpats(repo, pats, opts)
2745 cwd = (pats and repo.getcwd()) or ''
2746 cwd = (pats and repo.getcwd()) or ''
2746 modified, added, removed, deleted, unknown, ignored, clean = [
2747 modified, added, removed, deleted, unknown, ignored, clean = [
2747 [util.pathto(cwd, x) for x in n]
2748 [util.pathto(cwd, x) for x in n]
2748 for n in repo.status(files=files, match=matchfn,
2749 for n in repo.status(files=files, match=matchfn,
2749 list_ignored=all or opts['ignored'],
2750 list_ignored=all or opts['ignored'],
2750 list_clean=all or opts['clean'])]
2751 list_clean=all or opts['clean'])]
2751
2752
2752 changetypes = (('modified', 'M', modified),
2753 changetypes = (('modified', 'M', modified),
2753 ('added', 'A', added),
2754 ('added', 'A', added),
2754 ('removed', 'R', removed),
2755 ('removed', 'R', removed),
2755 ('deleted', '!', deleted),
2756 ('deleted', '!', deleted),
2756 ('unknown', '?', unknown),
2757 ('unknown', '?', unknown),
2757 ('ignored', 'I', ignored))
2758 ('ignored', 'I', ignored))
2758
2759
2759 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2760 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2760
2761
2761 end = opts['print0'] and '\0' or '\n'
2762 end = opts['print0'] and '\0' or '\n'
2762
2763
2763 for opt, char, changes in ([ct for ct in explicit_changetypes
2764 for opt, char, changes in ([ct for ct in explicit_changetypes
2764 if all or opts[ct[0]]]
2765 if all or opts[ct[0]]]
2765 or changetypes):
2766 or changetypes):
2766 if opts['no_status']:
2767 if opts['no_status']:
2767 format = "%%s%s" % end
2768 format = "%%s%s" % end
2768 else:
2769 else:
2769 format = "%s %%s%s" % (char, end)
2770 format = "%s %%s%s" % (char, end)
2770
2771
2771 for f in changes:
2772 for f in changes:
2772 ui.write(format % f)
2773 ui.write(format % f)
2773 if ((all or opts.get('copies')) and not opts.get('no_status')
2774 if ((all or opts.get('copies')) and not opts.get('no_status')
2774 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2775 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2775 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2776 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2776
2777
2777 def tag(ui, repo, name, rev_=None, **opts):
2778 def tag(ui, repo, name, rev_=None, **opts):
2778 """add a tag for the current tip or a given revision
2779 """add a tag for the current tip or a given revision
2779
2780
2780 Name a particular revision using <name>.
2781 Name a particular revision using <name>.
2781
2782
2782 Tags are used to name particular revisions of the repository and are
2783 Tags are used to name particular revisions of the repository and are
2783 very useful to compare different revision, to go back to significant
2784 very useful to compare different revision, to go back to significant
2784 earlier versions or to mark branch points as releases, etc.
2785 earlier versions or to mark branch points as releases, etc.
2785
2786
2786 If no revision is given, the parent of the working directory is used.
2787 If no revision is given, the parent of the working directory is used.
2787
2788
2788 To facilitate version control, distribution, and merging of tags,
2789 To facilitate version control, distribution, and merging of tags,
2789 they are stored as a file named ".hgtags" which is managed
2790 they are stored as a file named ".hgtags" which is managed
2790 similarly to other project files and can be hand-edited if
2791 similarly to other project files and can be hand-edited if
2791 necessary. The file '.hg/localtags' is used for local tags (not
2792 necessary. The file '.hg/localtags' is used for local tags (not
2792 shared among repositories).
2793 shared among repositories).
2793 """
2794 """
2794 if name in ['tip', '.']:
2795 if name in ['tip', '.']:
2795 raise util.Abort(_("the name '%s' is reserved") % name)
2796 raise util.Abort(_("the name '%s' is reserved") % name)
2796 if rev_ is not None:
2797 if rev_ is not None:
2797 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2798 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2798 "please use 'hg tag [-r REV] NAME' instead\n"))
2799 "please use 'hg tag [-r REV] NAME' instead\n"))
2799 if opts['rev']:
2800 if opts['rev']:
2800 raise util.Abort(_("use only one form to specify the revision"))
2801 raise util.Abort(_("use only one form to specify the revision"))
2801 if opts['rev']:
2802 if opts['rev']:
2802 rev_ = opts['rev']
2803 rev_ = opts['rev']
2803 if rev_:
2804 if rev_:
2804 r = hex(repo.lookup(rev_))
2805 r = hex(repo.lookup(rev_))
2805 else:
2806 else:
2806 p1, p2 = repo.dirstate.parents()
2807 p1, p2 = repo.dirstate.parents()
2807 if p1 == nullid:
2808 if p1 == nullid:
2808 raise util.Abort(_('no revision to tag'))
2809 raise util.Abort(_('no revision to tag'))
2809 if p2 != nullid:
2810 if p2 != nullid:
2810 raise util.Abort(_('outstanding uncommitted merges'))
2811 raise util.Abort(_('outstanding uncommitted merges'))
2811 r = hex(p1)
2812 r = hex(p1)
2812
2813
2813 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2814 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2814 opts['date'])
2815 opts['date'])
2815
2816
2816 def tags(ui, repo):
2817 def tags(ui, repo):
2817 """list repository tags
2818 """list repository tags
2818
2819
2819 List the repository tags.
2820 List the repository tags.
2820
2821
2821 This lists both regular and local tags.
2822 This lists both regular and local tags.
2822 """
2823 """
2823
2824
2824 l = repo.tagslist()
2825 l = repo.tagslist()
2825 l.reverse()
2826 l.reverse()
2826 for t, n in l:
2827 for t, n in l:
2827 try:
2828 try:
2828 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2829 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2829 except KeyError:
2830 except KeyError:
2830 r = " ?:?"
2831 r = " ?:?"
2831 if ui.quiet:
2832 if ui.quiet:
2832 ui.write("%s\n" % t)
2833 ui.write("%s\n" % t)
2833 else:
2834 else:
2834 ui.write("%-30s %s\n" % (t, r))
2835 ui.write("%-30s %s\n" % (t, r))
2835
2836
2836 def tip(ui, repo, **opts):
2837 def tip(ui, repo, **opts):
2837 """show the tip revision
2838 """show the tip revision
2838
2839
2839 Show the tip revision.
2840 Show the tip revision.
2840 """
2841 """
2841 n = repo.changelog.tip()
2842 n = repo.changelog.tip()
2842 br = None
2843 br = None
2843 if opts['branches']:
2844 if opts['branches']:
2844 br = repo.branchlookup([n])
2845 br = repo.branchlookup([n])
2845 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2846 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2846 if opts['patch']:
2847 if opts['patch']:
2847 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2848 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2848
2849
2849 def unbundle(ui, repo, fname, **opts):
2850 def unbundle(ui, repo, fname, **opts):
2850 """apply a changegroup file
2851 """apply a changegroup file
2851
2852
2852 Apply a compressed changegroup file generated by the bundle
2853 Apply a compressed changegroup file generated by the bundle
2853 command.
2854 command.
2854 """
2855 """
2855 f = urllib.urlopen(fname)
2856 f = urllib.urlopen(fname)
2856
2857
2857 header = f.read(6)
2858 header = f.read(6)
2858 if not header.startswith("HG"):
2859 if not header.startswith("HG"):
2859 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2860 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2860 elif not header.startswith("HG10"):
2861 elif not header.startswith("HG10"):
2861 raise util.Abort(_("%s: unknown bundle version") % fname)
2862 raise util.Abort(_("%s: unknown bundle version") % fname)
2862 elif header == "HG10BZ":
2863 elif header == "HG10BZ":
2863 def generator(f):
2864 def generator(f):
2864 zd = bz2.BZ2Decompressor()
2865 zd = bz2.BZ2Decompressor()
2865 zd.decompress("BZ")
2866 zd.decompress("BZ")
2866 for chunk in f:
2867 for chunk in f:
2867 yield zd.decompress(chunk)
2868 yield zd.decompress(chunk)
2868 elif header == "HG10UN":
2869 elif header == "HG10UN":
2869 def generator(f):
2870 def generator(f):
2870 for chunk in f:
2871 for chunk in f:
2871 yield chunk
2872 yield chunk
2872 else:
2873 else:
2873 raise util.Abort(_("%s: unknown bundle compression type")
2874 raise util.Abort(_("%s: unknown bundle compression type")
2874 % fname)
2875 % fname)
2875 gen = generator(util.filechunkiter(f, 4096))
2876 gen = generator(util.filechunkiter(f, 4096))
2876 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2877 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2877 'bundle:' + fname)
2878 'bundle:' + fname)
2878 return postincoming(ui, repo, modheads, opts['update'])
2879 return postincoming(ui, repo, modheads, opts['update'])
2879
2880
2880 def undo(ui, repo):
2881 def undo(ui, repo):
2881 """undo the last commit or pull (DEPRECATED)
2882 """undo the last commit or pull (DEPRECATED)
2882
2883
2883 (DEPRECATED)
2884 (DEPRECATED)
2884 This command is now deprecated and will be removed in a future
2885 This command is now deprecated and will be removed in a future
2885 release. Please use the rollback command instead. For usage
2886 release. Please use the rollback command instead. For usage
2886 instructions, see the rollback command.
2887 instructions, see the rollback command.
2887 """
2888 """
2888 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2889 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2889 repo.rollback()
2890 repo.rollback()
2890
2891
2891 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2892 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2892 branch=None):
2893 branch=None):
2893 """update or merge working directory
2894 """update or merge working directory
2894
2895
2895 Update the working directory to the specified revision.
2896 Update the working directory to the specified revision.
2896
2897
2897 If there are no outstanding changes in the working directory and
2898 If there are no outstanding changes in the working directory and
2898 there is a linear relationship between the current version and the
2899 there is a linear relationship between the current version and the
2899 requested version, the result is the requested version.
2900 requested version, the result is the requested version.
2900
2901
2901 To merge the working directory with another revision, use the
2902 To merge the working directory with another revision, use the
2902 merge command.
2903 merge command.
2903
2904
2904 By default, update will refuse to run if doing so would require
2905 By default, update will refuse to run if doing so would require
2905 merging or discarding local changes.
2906 merging or discarding local changes.
2906 """
2907 """
2907 node = _lookup(repo, node, branch)
2908 node = _lookup(repo, node, branch)
2908 if merge:
2909 if merge:
2909 ui.warn(_('(the -m/--merge option is deprecated; '
2910 ui.warn(_('(the -m/--merge option is deprecated; '
2910 'use the merge command instead)\n'))
2911 'use the merge command instead)\n'))
2911 return hg.merge(repo, node, force=force)
2912 return hg.merge(repo, node, force=force)
2912 elif clean:
2913 elif clean:
2913 return hg.clean(repo, node)
2914 return hg.clean(repo, node)
2914 else:
2915 else:
2915 return hg.update(repo, node)
2916 return hg.update(repo, node)
2916
2917
2917 def _lookup(repo, node, branch=None):
2918 def _lookup(repo, node, branch=None):
2918 if branch:
2919 if branch:
2919 br = repo.branchlookup(branch=branch)
2920 br = repo.branchlookup(branch=branch)
2920 found = []
2921 found = []
2921 for x in br:
2922 for x in br:
2922 if branch in br[x]:
2923 if branch in br[x]:
2923 found.append(x)
2924 found.append(x)
2924 if len(found) > 1:
2925 if len(found) > 1:
2925 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2926 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2926 for x in found:
2927 for x in found:
2927 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2928 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2928 raise util.Abort("")
2929 raise util.Abort("")
2929 if len(found) == 1:
2930 if len(found) == 1:
2930 node = found[0]
2931 node = found[0]
2931 repo.ui.warn(_("Using head %s for branch %s\n")
2932 repo.ui.warn(_("Using head %s for branch %s\n")
2932 % (short(node), branch))
2933 % (short(node), branch))
2933 else:
2934 else:
2934 raise util.Abort(_("branch %s not found\n") % (branch))
2935 raise util.Abort(_("branch %s not found\n") % (branch))
2935 else:
2936 else:
2936 node = node and repo.lookup(node) or repo.changelog.tip()
2937 node = node and repo.lookup(node) or repo.changelog.tip()
2937 return node
2938 return node
2938
2939
2939 def verify(ui, repo):
2940 def verify(ui, repo):
2940 """verify the integrity of the repository
2941 """verify the integrity of the repository
2941
2942
2942 Verify the integrity of the current repository.
2943 Verify the integrity of the current repository.
2943
2944
2944 This will perform an extensive check of the repository's
2945 This will perform an extensive check of the repository's
2945 integrity, validating the hashes and checksums of each entry in
2946 integrity, validating the hashes and checksums of each entry in
2946 the changelog, manifest, and tracked files, as well as the
2947 the changelog, manifest, and tracked files, as well as the
2947 integrity of their crosslinks and indices.
2948 integrity of their crosslinks and indices.
2948 """
2949 """
2949 return hg.verify(repo)
2950 return hg.verify(repo)
2950
2951
2951 # Command options and aliases are listed here, alphabetically
2952 # Command options and aliases are listed here, alphabetically
2952
2953
2953 table = {
2954 table = {
2954 "^add":
2955 "^add":
2955 (add,
2956 (add,
2956 [('I', 'include', [], _('include names matching the given patterns')),
2957 [('I', 'include', [], _('include names matching the given patterns')),
2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2958 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2959 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2959 _('hg add [OPTION]... [FILE]...')),
2960 _('hg add [OPTION]... [FILE]...')),
2960 "debugaddremove|addremove":
2961 "debugaddremove|addremove":
2961 (addremove,
2962 (addremove,
2962 [('I', 'include', [], _('include names matching the given patterns')),
2963 [('I', 'include', [], _('include names matching the given patterns')),
2963 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2964 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2964 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2965 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2965 _('hg addremove [OPTION]... [FILE]...')),
2966 _('hg addremove [OPTION]... [FILE]...')),
2966 "^annotate":
2967 "^annotate":
2967 (annotate,
2968 (annotate,
2968 [('r', 'rev', '', _('annotate the specified revision')),
2969 [('r', 'rev', '', _('annotate the specified revision')),
2969 ('a', 'text', None, _('treat all files as text')),
2970 ('a', 'text', None, _('treat all files as text')),
2970 ('u', 'user', None, _('list the author')),
2971 ('u', 'user', None, _('list the author')),
2971 ('d', 'date', None, _('list the date')),
2972 ('d', 'date', None, _('list the date')),
2972 ('n', 'number', None, _('list the revision number (default)')),
2973 ('n', 'number', None, _('list the revision number (default)')),
2973 ('c', 'changeset', None, _('list the changeset')),
2974 ('c', 'changeset', None, _('list the changeset')),
2974 ('I', 'include', [], _('include names matching the given patterns')),
2975 ('I', 'include', [], _('include names matching the given patterns')),
2975 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2976 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2976 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2977 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2977 "archive":
2978 "archive":
2978 (archive,
2979 (archive,
2979 [('', 'no-decode', None, _('do not pass files through decoders')),
2980 [('', 'no-decode', None, _('do not pass files through decoders')),
2980 ('p', 'prefix', '', _('directory prefix for files in archive')),
2981 ('p', 'prefix', '', _('directory prefix for files in archive')),
2981 ('r', 'rev', '', _('revision to distribute')),
2982 ('r', 'rev', '', _('revision to distribute')),
2982 ('t', 'type', '', _('type of distribution to create')),
2983 ('t', 'type', '', _('type of distribution to create')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 _('hg archive [OPTION]... DEST')),
2986 _('hg archive [OPTION]... DEST')),
2986 "backout":
2987 "backout":
2987 (backout,
2988 (backout,
2988 [('', 'merge', None,
2989 [('', 'merge', None,
2989 _('merge with old dirstate parent after backout')),
2990 _('merge with old dirstate parent after backout')),
2990 ('m', 'message', '', _('use <text> as commit message')),
2991 ('m', 'message', '', _('use <text> as commit message')),
2991 ('l', 'logfile', '', _('read commit message from <file>')),
2992 ('l', 'logfile', '', _('read commit message from <file>')),
2992 ('d', 'date', '', _('record datecode as commit date')),
2993 ('d', 'date', '', _('record datecode as commit date')),
2993 ('', 'parent', '', _('parent to choose when backing out merge')),
2994 ('', 'parent', '', _('parent to choose when backing out merge')),
2994 ('u', 'user', '', _('record user as committer')),
2995 ('u', 'user', '', _('record user as committer')),
2995 ('I', 'include', [], _('include names matching the given patterns')),
2996 ('I', 'include', [], _('include names matching the given patterns')),
2996 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2997 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2997 _('hg backout [OPTION]... REV')),
2998 _('hg backout [OPTION]... REV')),
2998 "bundle":
2999 "bundle":
2999 (bundle,
3000 (bundle,
3000 [('f', 'force', None,
3001 [('f', 'force', None,
3001 _('run even when remote repository is unrelated'))],
3002 _('run even when remote repository is unrelated'))],
3002 _('hg bundle FILE DEST')),
3003 _('hg bundle FILE DEST')),
3003 "cat":
3004 "cat":
3004 (cat,
3005 (cat,
3005 [('o', 'output', '', _('print output to file with formatted name')),
3006 [('o', 'output', '', _('print output to file with formatted name')),
3006 ('r', 'rev', '', _('print the given revision')),
3007 ('r', 'rev', '', _('print the given revision')),
3007 ('I', 'include', [], _('include names matching the given patterns')),
3008 ('I', 'include', [], _('include names matching the given patterns')),
3008 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3009 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3009 _('hg cat [OPTION]... FILE...')),
3010 _('hg cat [OPTION]... FILE...')),
3010 "^clone":
3011 "^clone":
3011 (clone,
3012 (clone,
3012 [('U', 'noupdate', None, _('do not update the new working directory')),
3013 [('U', 'noupdate', None, _('do not update the new working directory')),
3013 ('r', 'rev', [],
3014 ('r', 'rev', [],
3014 _('a changeset you would like to have after cloning')),
3015 _('a changeset you would like to have after cloning')),
3015 ('', 'pull', None, _('use pull protocol to copy metadata')),
3016 ('', 'pull', None, _('use pull protocol to copy metadata')),
3016 ('', 'uncompressed', None,
3017 ('', 'uncompressed', None,
3017 _('use uncompressed transfer (fast over LAN)')),
3018 _('use uncompressed transfer (fast over LAN)')),
3018 ('e', 'ssh', '', _('specify ssh command to use')),
3019 ('e', 'ssh', '', _('specify ssh command to use')),
3019 ('', 'remotecmd', '',
3020 ('', 'remotecmd', '',
3020 _('specify hg command to run on the remote side'))],
3021 _('specify hg command to run on the remote side'))],
3021 _('hg clone [OPTION]... SOURCE [DEST]')),
3022 _('hg clone [OPTION]... SOURCE [DEST]')),
3022 "^commit|ci":
3023 "^commit|ci":
3023 (commit,
3024 (commit,
3024 [('A', 'addremove', None,
3025 [('A', 'addremove', None,
3025 _('mark new/missing files as added/removed before committing')),
3026 _('mark new/missing files as added/removed before committing')),
3026 ('m', 'message', '', _('use <text> as commit message')),
3027 ('m', 'message', '', _('use <text> as commit message')),
3027 ('l', 'logfile', '', _('read the commit message from <file>')),
3028 ('l', 'logfile', '', _('read the commit message from <file>')),
3028 ('d', 'date', '', _('record datecode as commit date')),
3029 ('d', 'date', '', _('record datecode as commit date')),
3029 ('u', 'user', '', _('record user as commiter')),
3030 ('u', 'user', '', _('record user as commiter')),
3030 ('I', 'include', [], _('include names matching the given patterns')),
3031 ('I', 'include', [], _('include names matching the given patterns')),
3031 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3032 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3032 _('hg commit [OPTION]... [FILE]...')),
3033 _('hg commit [OPTION]... [FILE]...')),
3033 "copy|cp":
3034 "copy|cp":
3034 (copy,
3035 (copy,
3035 [('A', 'after', None, _('record a copy that has already occurred')),
3036 [('A', 'after', None, _('record a copy that has already occurred')),
3036 ('f', 'force', None,
3037 ('f', 'force', None,
3037 _('forcibly copy over an existing managed file')),
3038 _('forcibly copy over an existing managed file')),
3038 ('I', 'include', [], _('include names matching the given patterns')),
3039 ('I', 'include', [], _('include names matching the given patterns')),
3039 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3040 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3040 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3041 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3041 _('hg copy [OPTION]... [SOURCE]... DEST')),
3042 _('hg copy [OPTION]... [SOURCE]... DEST')),
3042 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3043 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3043 "debugcomplete":
3044 "debugcomplete":
3044 (debugcomplete,
3045 (debugcomplete,
3045 [('o', 'options', None, _('show the command options'))],
3046 [('o', 'options', None, _('show the command options'))],
3046 _('debugcomplete [-o] CMD')),
3047 _('debugcomplete [-o] CMD')),
3047 "debugrebuildstate":
3048 "debugrebuildstate":
3048 (debugrebuildstate,
3049 (debugrebuildstate,
3049 [('r', 'rev', '', _('revision to rebuild to'))],
3050 [('r', 'rev', '', _('revision to rebuild to'))],
3050 _('debugrebuildstate [-r REV] [REV]')),
3051 _('debugrebuildstate [-r REV] [REV]')),
3051 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3052 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3052 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3053 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3053 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3054 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3054 "debugstate": (debugstate, [], _('debugstate')),
3055 "debugstate": (debugstate, [], _('debugstate')),
3055 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3056 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3056 "debugindex": (debugindex, [], _('debugindex FILE')),
3057 "debugindex": (debugindex, [], _('debugindex FILE')),
3057 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3058 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3058 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3059 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3059 "debugwalk":
3060 "debugwalk":
3060 (debugwalk,
3061 (debugwalk,
3061 [('I', 'include', [], _('include names matching the given patterns')),
3062 [('I', 'include', [], _('include names matching the given patterns')),
3062 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 _('debugwalk [OPTION]... [FILE]...')),
3064 _('debugwalk [OPTION]... [FILE]...')),
3064 "^diff":
3065 "^diff":
3065 (diff,
3066 (diff,
3066 [('r', 'rev', [], _('revision')),
3067 [('r', 'rev', [], _('revision')),
3067 ('a', 'text', None, _('treat all files as text')),
3068 ('a', 'text', None, _('treat all files as text')),
3068 ('p', 'show-function', None,
3069 ('p', 'show-function', None,
3069 _('show which function each change is in')),
3070 _('show which function each change is in')),
3070 ('w', 'ignore-all-space', None,
3071 ('w', 'ignore-all-space', None,
3071 _('ignore white space when comparing lines')),
3072 _('ignore white space when comparing lines')),
3072 ('b', 'ignore-space-change', None,
3073 ('b', 'ignore-space-change', None,
3073 _('ignore changes in the amount of white space')),
3074 _('ignore changes in the amount of white space')),
3074 ('B', 'ignore-blank-lines', None,
3075 ('B', 'ignore-blank-lines', None,
3075 _('ignore changes whose lines are all blank')),
3076 _('ignore changes whose lines are all blank')),
3076 ('I', 'include', [], _('include names matching the given patterns')),
3077 ('I', 'include', [], _('include names matching the given patterns')),
3077 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3078 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3078 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3079 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3079 "^export":
3080 "^export":
3080 (export,
3081 (export,
3081 [('o', 'output', '', _('print output to file with formatted name')),
3082 [('o', 'output', '', _('print output to file with formatted name')),
3082 ('a', 'text', None, _('treat all files as text')),
3083 ('a', 'text', None, _('treat all files as text')),
3083 ('', 'switch-parent', None, _('diff against the second parent'))],
3084 ('', 'switch-parent', None, _('diff against the second parent'))],
3084 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3085 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3085 "debugforget|forget":
3086 "debugforget|forget":
3086 (forget,
3087 (forget,
3087 [('I', 'include', [], _('include names matching the given patterns')),
3088 [('I', 'include', [], _('include names matching the given patterns')),
3088 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3089 _('hg forget [OPTION]... FILE...')),
3090 _('hg forget [OPTION]... FILE...')),
3090 "grep":
3091 "grep":
3091 (grep,
3092 (grep,
3092 [('0', 'print0', None, _('end fields with NUL')),
3093 [('0', 'print0', None, _('end fields with NUL')),
3093 ('', 'all', None, _('print all revisions that match')),
3094 ('', 'all', None, _('print all revisions that match')),
3094 ('i', 'ignore-case', None, _('ignore case when matching')),
3095 ('i', 'ignore-case', None, _('ignore case when matching')),
3095 ('l', 'files-with-matches', None,
3096 ('l', 'files-with-matches', None,
3096 _('print only filenames and revs that match')),
3097 _('print only filenames and revs that match')),
3097 ('n', 'line-number', None, _('print matching line numbers')),
3098 ('n', 'line-number', None, _('print matching line numbers')),
3098 ('r', 'rev', [], _('search in given revision range')),
3099 ('r', 'rev', [], _('search in given revision range')),
3099 ('u', 'user', None, _('print user who committed change')),
3100 ('u', 'user', None, _('print user who committed change')),
3100 ('I', 'include', [], _('include names matching the given patterns')),
3101 ('I', 'include', [], _('include names matching the given patterns')),
3101 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3102 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3102 _('hg grep [OPTION]... PATTERN [FILE]...')),
3103 _('hg grep [OPTION]... PATTERN [FILE]...')),
3103 "heads":
3104 "heads":
3104 (heads,
3105 (heads,
3105 [('b', 'branches', None, _('show branches')),
3106 [('b', 'branches', None, _('show branches')),
3106 ('', 'style', '', _('display using template map file')),
3107 ('', 'style', '', _('display using template map file')),
3107 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3108 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3108 ('', 'template', '', _('display with template'))],
3109 ('', 'template', '', _('display with template'))],
3109 _('hg heads [-b] [-r <rev>]')),
3110 _('hg heads [-b] [-r <rev>]')),
3110 "help": (help_, [], _('hg help [COMMAND]')),
3111 "help": (help_, [], _('hg help [COMMAND]')),
3111 "identify|id": (identify, [], _('hg identify')),
3112 "identify|id": (identify, [], _('hg identify')),
3112 "import|patch":
3113 "import|patch":
3113 (import_,
3114 (import_,
3114 [('p', 'strip', 1,
3115 [('p', 'strip', 1,
3115 _('directory strip option for patch. This has the same\n'
3116 _('directory strip option for patch. This has the same\n'
3116 'meaning as the corresponding patch option')),
3117 'meaning as the corresponding patch option')),
3117 ('m', 'message', '', _('use <text> as commit message')),
3118 ('m', 'message', '', _('use <text> as commit message')),
3118 ('b', 'base', '', _('base path')),
3119 ('b', 'base', '', _('base path')),
3119 ('f', 'force', None,
3120 ('f', 'force', None,
3120 _('skip check for outstanding uncommitted changes'))],
3121 _('skip check for outstanding uncommitted changes'))],
3121 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3122 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3122 "incoming|in": (incoming,
3123 "incoming|in": (incoming,
3123 [('M', 'no-merges', None, _('do not show merges')),
3124 [('M', 'no-merges', None, _('do not show merges')),
3124 ('f', 'force', None,
3125 ('f', 'force', None,
3125 _('run even when remote repository is unrelated')),
3126 _('run even when remote repository is unrelated')),
3126 ('', 'style', '', _('display using template map file')),
3127 ('', 'style', '', _('display using template map file')),
3127 ('n', 'newest-first', None, _('show newest record first')),
3128 ('n', 'newest-first', None, _('show newest record first')),
3128 ('', 'bundle', '', _('file to store the bundles into')),
3129 ('', 'bundle', '', _('file to store the bundles into')),
3129 ('p', 'patch', None, _('show patch')),
3130 ('p', 'patch', None, _('show patch')),
3130 ('r', 'rev', [], _('a specific revision you would like to pull')),
3131 ('r', 'rev', [], _('a specific revision you would like to pull')),
3131 ('', 'template', '', _('display with template')),
3132 ('', 'template', '', _('display with template')),
3132 ('e', 'ssh', '', _('specify ssh command to use')),
3133 ('e', 'ssh', '', _('specify ssh command to use')),
3133 ('', 'remotecmd', '',
3134 ('', 'remotecmd', '',
3134 _('specify hg command to run on the remote side'))],
3135 _('specify hg command to run on the remote side'))],
3135 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3136 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3136 ' [--bundle FILENAME] [SOURCE]')),
3137 ' [--bundle FILENAME] [SOURCE]')),
3137 "^init":
3138 "^init":
3138 (init,
3139 (init,
3139 [('e', 'ssh', '', _('specify ssh command to use')),
3140 [('e', 'ssh', '', _('specify ssh command to use')),
3140 ('', 'remotecmd', '',
3141 ('', 'remotecmd', '',
3141 _('specify hg command to run on the remote side'))],
3142 _('specify hg command to run on the remote side'))],
3142 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3143 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3143 "locate":
3144 "locate":
3144 (locate,
3145 (locate,
3145 [('r', 'rev', '', _('search the repository as it stood at rev')),
3146 [('r', 'rev', '', _('search the repository as it stood at rev')),
3146 ('0', 'print0', None,
3147 ('0', 'print0', None,
3147 _('end filenames with NUL, for use with xargs')),
3148 _('end filenames with NUL, for use with xargs')),
3148 ('f', 'fullpath', None,
3149 ('f', 'fullpath', None,
3149 _('print complete paths from the filesystem root')),
3150 _('print complete paths from the filesystem root')),
3150 ('I', 'include', [], _('include names matching the given patterns')),
3151 ('I', 'include', [], _('include names matching the given patterns')),
3151 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3152 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3152 _('hg locate [OPTION]... [PATTERN]...')),
3153 _('hg locate [OPTION]... [PATTERN]...')),
3153 "^log|history":
3154 "^log|history":
3154 (log,
3155 (log,
3155 [('b', 'branches', None, _('show branches')),
3156 [('b', 'branches', None, _('show branches')),
3156 ('f', 'follow', None,
3157 ('f', 'follow', None,
3157 _('follow changeset history, or file history across copies and renames')),
3158 _('follow changeset history, or file history across copies and renames')),
3158 ('', 'follow-first', None,
3159 ('', 'follow-first', None,
3159 _('only follow the first parent of merge changesets')),
3160 _('only follow the first parent of merge changesets')),
3160 ('k', 'keyword', [], _('search for a keyword')),
3161 ('k', 'keyword', [], _('search for a keyword')),
3161 ('l', 'limit', '', _('limit number of changes displayed')),
3162 ('l', 'limit', '', _('limit number of changes displayed')),
3162 ('r', 'rev', [], _('show the specified revision or range')),
3163 ('r', 'rev', [], _('show the specified revision or range')),
3163 ('M', 'no-merges', None, _('do not show merges')),
3164 ('M', 'no-merges', None, _('do not show merges')),
3164 ('', 'style', '', _('display using template map file')),
3165 ('', 'style', '', _('display using template map file')),
3165 ('m', 'only-merges', None, _('show only merges')),
3166 ('m', 'only-merges', None, _('show only merges')),
3166 ('p', 'patch', None, _('show patch')),
3167 ('p', 'patch', None, _('show patch')),
3167 ('', 'template', '', _('display with template')),
3168 ('', 'template', '', _('display with template')),
3168 ('I', 'include', [], _('include names matching the given patterns')),
3169 ('I', 'include', [], _('include names matching the given patterns')),
3169 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3170 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3170 _('hg log [OPTION]... [FILE]')),
3171 _('hg log [OPTION]... [FILE]')),
3171 "manifest": (manifest, [], _('hg manifest [REV]')),
3172 "manifest": (manifest, [], _('hg manifest [REV]')),
3172 "merge":
3173 "merge":
3173 (merge,
3174 (merge,
3174 [('b', 'branch', '', _('merge with head of a specific branch')),
3175 [('b', 'branch', '', _('merge with head of a specific branch')),
3175 ('f', 'force', None, _('force a merge with outstanding changes'))],
3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3176 _('hg merge [-b TAG] [-f] [REV]')),
3177 _('hg merge [-b TAG] [-f] [REV]')),
3177 "outgoing|out": (outgoing,
3178 "outgoing|out": (outgoing,
3178 [('M', 'no-merges', None, _('do not show merges')),
3179 [('M', 'no-merges', None, _('do not show merges')),
3179 ('f', 'force', None,
3180 ('f', 'force', None,
3180 _('run even when remote repository is unrelated')),
3181 _('run even when remote repository is unrelated')),
3181 ('p', 'patch', None, _('show patch')),
3182 ('p', 'patch', None, _('show patch')),
3182 ('', 'style', '', _('display using template map file')),
3183 ('', 'style', '', _('display using template map file')),
3183 ('r', 'rev', [], _('a specific revision you would like to push')),
3184 ('r', 'rev', [], _('a specific revision you would like to push')),
3184 ('n', 'newest-first', None, _('show newest record first')),
3185 ('n', 'newest-first', None, _('show newest record first')),
3185 ('', 'template', '', _('display with template')),
3186 ('', 'template', '', _('display with template')),
3186 ('e', 'ssh', '', _('specify ssh command to use')),
3187 ('e', 'ssh', '', _('specify ssh command to use')),
3187 ('', 'remotecmd', '',
3188 ('', 'remotecmd', '',
3188 _('specify hg command to run on the remote side'))],
3189 _('specify hg command to run on the remote side'))],
3189 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3190 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3190 "^parents":
3191 "^parents":
3191 (parents,
3192 (parents,
3192 [('b', 'branches', None, _('show branches')),
3193 [('b', 'branches', None, _('show branches')),
3193 ('r', 'rev', '', _('show parents from the specified rev')),
3194 ('r', 'rev', '', _('show parents from the specified rev')),
3194 ('', 'style', '', _('display using template map file')),
3195 ('', 'style', '', _('display using template map file')),
3195 ('', 'template', '', _('display with template'))],
3196 ('', 'template', '', _('display with template'))],
3196 _('hg parents [-b] [-r REV] [FILE]')),
3197 _('hg parents [-b] [-r REV] [FILE]')),
3197 "paths": (paths, [], _('hg paths [NAME]')),
3198 "paths": (paths, [], _('hg paths [NAME]')),
3198 "^pull":
3199 "^pull":
3199 (pull,
3200 (pull,
3200 [('u', 'update', None,
3201 [('u', 'update', None,
3201 _('update the working directory to tip after pull')),
3202 _('update the working directory to tip after pull')),
3202 ('e', 'ssh', '', _('specify ssh command to use')),
3203 ('e', 'ssh', '', _('specify ssh command to use')),
3203 ('f', 'force', None,
3204 ('f', 'force', None,
3204 _('run even when remote repository is unrelated')),
3205 _('run even when remote repository is unrelated')),
3205 ('r', 'rev', [], _('a specific revision you would like to pull')),
3206 ('r', 'rev', [], _('a specific revision you would like to pull')),
3206 ('', 'remotecmd', '',
3207 ('', 'remotecmd', '',
3207 _('specify hg command to run on the remote side'))],
3208 _('specify hg command to run on the remote side'))],
3208 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3209 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3209 "^push":
3210 "^push":
3210 (push,
3211 (push,
3211 [('f', 'force', None, _('force push')),
3212 [('f', 'force', None, _('force push')),
3212 ('e', 'ssh', '', _('specify ssh command to use')),
3213 ('e', 'ssh', '', _('specify ssh command to use')),
3213 ('r', 'rev', [], _('a specific revision you would like to push')),
3214 ('r', 'rev', [], _('a specific revision you would like to push')),
3214 ('', 'remotecmd', '',
3215 ('', 'remotecmd', '',
3215 _('specify hg command to run on the remote side'))],
3216 _('specify hg command to run on the remote side'))],
3216 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3217 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3217 "debugrawcommit|rawcommit":
3218 "debugrawcommit|rawcommit":
3218 (rawcommit,
3219 (rawcommit,
3219 [('p', 'parent', [], _('parent')),
3220 [('p', 'parent', [], _('parent')),
3220 ('d', 'date', '', _('date code')),
3221 ('d', 'date', '', _('date code')),
3221 ('u', 'user', '', _('user')),
3222 ('u', 'user', '', _('user')),
3222 ('F', 'files', '', _('file list')),
3223 ('F', 'files', '', _('file list')),
3223 ('m', 'message', '', _('commit message')),
3224 ('m', 'message', '', _('commit message')),
3224 ('l', 'logfile', '', _('commit message file'))],
3225 ('l', 'logfile', '', _('commit message file'))],
3225 _('hg debugrawcommit [OPTION]... [FILE]...')),
3226 _('hg debugrawcommit [OPTION]... [FILE]...')),
3226 "recover": (recover, [], _('hg recover')),
3227 "recover": (recover, [], _('hg recover')),
3227 "^remove|rm":
3228 "^remove|rm":
3228 (remove,
3229 (remove,
3229 [('A', 'after', None, _('record remove that has already occurred')),
3230 [('A', 'after', None, _('record remove that has already occurred')),
3230 ('f', 'force', None, _('remove file even if modified')),
3231 ('f', 'force', None, _('remove file even if modified')),
3231 ('I', 'include', [], _('include names matching the given patterns')),
3232 ('I', 'include', [], _('include names matching the given patterns')),
3232 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3233 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3233 _('hg remove [OPTION]... FILE...')),
3234 _('hg remove [OPTION]... FILE...')),
3234 "rename|mv":
3235 "rename|mv":
3235 (rename,
3236 (rename,
3236 [('A', 'after', None, _('record a rename that has already occurred')),
3237 [('A', 'after', None, _('record a rename that has already occurred')),
3237 ('f', 'force', None,
3238 ('f', 'force', None,
3238 _('forcibly copy over an existing managed file')),
3239 _('forcibly copy over an existing managed file')),
3239 ('I', 'include', [], _('include names matching the given patterns')),
3240 ('I', 'include', [], _('include names matching the given patterns')),
3240 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3241 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3241 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3242 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3242 _('hg rename [OPTION]... SOURCE... DEST')),
3243 _('hg rename [OPTION]... SOURCE... DEST')),
3243 "^revert":
3244 "^revert":
3244 (revert,
3245 (revert,
3245 [('r', 'rev', '', _('revision to revert to')),
3246 [('r', 'rev', '', _('revision to revert to')),
3246 ('', 'no-backup', None, _('do not save backup copies of files')),
3247 ('', 'no-backup', None, _('do not save backup copies of files')),
3247 ('I', 'include', [], _('include names matching given patterns')),
3248 ('I', 'include', [], _('include names matching given patterns')),
3248 ('X', 'exclude', [], _('exclude names matching given patterns')),
3249 ('X', 'exclude', [], _('exclude names matching given patterns')),
3249 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3250 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3250 _('hg revert [-r REV] [NAME]...')),
3251 _('hg revert [-r REV] [NAME]...')),
3251 "rollback": (rollback, [], _('hg rollback')),
3252 "rollback": (rollback, [], _('hg rollback')),
3252 "root": (root, [], _('hg root')),
3253 "root": (root, [], _('hg root')),
3253 "^serve":
3254 "^serve":
3254 (serve,
3255 (serve,
3255 [('A', 'accesslog', '', _('name of access log file to write to')),
3256 [('A', 'accesslog', '', _('name of access log file to write to')),
3256 ('d', 'daemon', None, _('run server in background')),
3257 ('d', 'daemon', None, _('run server in background')),
3257 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3258 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3258 ('E', 'errorlog', '', _('name of error log file to write to')),
3259 ('E', 'errorlog', '', _('name of error log file to write to')),
3259 ('p', 'port', 0, _('port to use (default: 8000)')),
3260 ('p', 'port', 0, _('port to use (default: 8000)')),
3260 ('a', 'address', '', _('address to use')),
3261 ('a', 'address', '', _('address to use')),
3261 ('n', 'name', '',
3262 ('n', 'name', '',
3262 _('name to show in web pages (default: working dir)')),
3263 _('name to show in web pages (default: working dir)')),
3263 ('', 'webdir-conf', '', _('name of the webdir config file'
3264 ('', 'webdir-conf', '', _('name of the webdir config file'
3264 ' (serve more than one repo)')),
3265 ' (serve more than one repo)')),
3265 ('', 'pid-file', '', _('name of file to write process ID to')),
3266 ('', 'pid-file', '', _('name of file to write process ID to')),
3266 ('', 'stdio', None, _('for remote clients')),
3267 ('', 'stdio', None, _('for remote clients')),
3267 ('t', 'templates', '', _('web templates to use')),
3268 ('t', 'templates', '', _('web templates to use')),
3268 ('', 'style', '', _('template style to use')),
3269 ('', 'style', '', _('template style to use')),
3269 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3270 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3270 _('hg serve [OPTION]...')),
3271 _('hg serve [OPTION]...')),
3271 "^status|st":
3272 "^status|st":
3272 (status,
3273 (status,
3273 [('A', 'all', None, _('show status of all files')),
3274 [('A', 'all', None, _('show status of all files')),
3274 ('m', 'modified', None, _('show only modified files')),
3275 ('m', 'modified', None, _('show only modified files')),
3275 ('a', 'added', None, _('show only added files')),
3276 ('a', 'added', None, _('show only added files')),
3276 ('r', 'removed', None, _('show only removed files')),
3277 ('r', 'removed', None, _('show only removed files')),
3277 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3278 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3278 ('c', 'clean', None, _('show only files without changes')),
3279 ('c', 'clean', None, _('show only files without changes')),
3279 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3280 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3280 ('i', 'ignored', None, _('show ignored files')),
3281 ('i', 'ignored', None, _('show ignored files')),
3281 ('n', 'no-status', None, _('hide status prefix')),
3282 ('n', 'no-status', None, _('hide status prefix')),
3282 ('C', 'copies', None, _('show source of copied files')),
3283 ('C', 'copies', None, _('show source of copied files')),
3283 ('0', 'print0', None,
3284 ('0', 'print0', None,
3284 _('end filenames with NUL, for use with xargs')),
3285 _('end filenames with NUL, for use with xargs')),
3285 ('I', 'include', [], _('include names matching the given patterns')),
3286 ('I', 'include', [], _('include names matching the given patterns')),
3286 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3287 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3287 _('hg status [OPTION]... [FILE]...')),
3288 _('hg status [OPTION]... [FILE]...')),
3288 "tag":
3289 "tag":
3289 (tag,
3290 (tag,
3290 [('l', 'local', None, _('make the tag local')),
3291 [('l', 'local', None, _('make the tag local')),
3291 ('m', 'message', '', _('message for tag commit log entry')),
3292 ('m', 'message', '', _('message for tag commit log entry')),
3292 ('d', 'date', '', _('record datecode as commit date')),
3293 ('d', 'date', '', _('record datecode as commit date')),
3293 ('u', 'user', '', _('record user as commiter')),
3294 ('u', 'user', '', _('record user as commiter')),
3294 ('r', 'rev', '', _('revision to tag'))],
3295 ('r', 'rev', '', _('revision to tag'))],
3295 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3296 "tags": (tags, [], _('hg tags')),
3297 "tags": (tags, [], _('hg tags')),
3297 "tip":
3298 "tip":
3298 (tip,
3299 (tip,
3299 [('b', 'branches', None, _('show branches')),
3300 [('b', 'branches', None, _('show branches')),
3300 ('', 'style', '', _('display using template map file')),
3301 ('', 'style', '', _('display using template map file')),
3301 ('p', 'patch', None, _('show patch')),
3302 ('p', 'patch', None, _('show patch')),
3302 ('', 'template', '', _('display with template'))],
3303 ('', 'template', '', _('display with template'))],
3303 _('hg tip [-b] [-p]')),
3304 _('hg tip [-b] [-p]')),
3304 "unbundle":
3305 "unbundle":
3305 (unbundle,
3306 (unbundle,
3306 [('u', 'update', None,
3307 [('u', 'update', None,
3307 _('update the working directory to tip after unbundle'))],
3308 _('update the working directory to tip after unbundle'))],
3308 _('hg unbundle [-u] FILE')),
3309 _('hg unbundle [-u] FILE')),
3309 "debugundo|undo": (undo, [], _('hg undo')),
3310 "debugundo|undo": (undo, [], _('hg undo')),
3310 "^update|up|checkout|co":
3311 "^update|up|checkout|co":
3311 (update,
3312 (update,
3312 [('b', 'branch', '', _('checkout the head of a specific branch')),
3313 [('b', 'branch', '', _('checkout the head of a specific branch')),
3313 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3314 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3314 ('C', 'clean', None, _('overwrite locally modified files')),
3315 ('C', 'clean', None, _('overwrite locally modified files')),
3315 ('f', 'force', None, _('force a merge with outstanding changes'))],
3316 ('f', 'force', None, _('force a merge with outstanding changes'))],
3316 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3317 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3317 "verify": (verify, [], _('hg verify')),
3318 "verify": (verify, [], _('hg verify')),
3318 "version": (show_version, [], _('hg version')),
3319 "version": (show_version, [], _('hg version')),
3319 }
3320 }
3320
3321
3321 globalopts = [
3322 globalopts = [
3322 ('R', 'repository', '',
3323 ('R', 'repository', '',
3323 _('repository root directory or symbolic path name')),
3324 _('repository root directory or symbolic path name')),
3324 ('', 'cwd', '', _('change working directory')),
3325 ('', 'cwd', '', _('change working directory')),
3325 ('y', 'noninteractive', None,
3326 ('y', 'noninteractive', None,
3326 _('do not prompt, assume \'yes\' for any required answers')),
3327 _('do not prompt, assume \'yes\' for any required answers')),
3327 ('q', 'quiet', None, _('suppress output')),
3328 ('q', 'quiet', None, _('suppress output')),
3328 ('v', 'verbose', None, _('enable additional output')),
3329 ('v', 'verbose', None, _('enable additional output')),
3329 ('', 'config', [], _('set/override config option')),
3330 ('', 'config', [], _('set/override config option')),
3330 ('', 'debug', None, _('enable debugging output')),
3331 ('', 'debug', None, _('enable debugging output')),
3331 ('', 'debugger', None, _('start debugger')),
3332 ('', 'debugger', None, _('start debugger')),
3332 ('', 'lsprof', None, _('print improved command execution profile')),
3333 ('', 'lsprof', None, _('print improved command execution profile')),
3333 ('', 'traceback', None, _('print traceback on exception')),
3334 ('', 'traceback', None, _('print traceback on exception')),
3334 ('', 'time', None, _('time how long the command takes')),
3335 ('', 'time', None, _('time how long the command takes')),
3335 ('', 'profile', None, _('print command execution profile')),
3336 ('', 'profile', None, _('print command execution profile')),
3336 ('', 'version', None, _('output version information and exit')),
3337 ('', 'version', None, _('output version information and exit')),
3337 ('h', 'help', None, _('display help and exit')),
3338 ('h', 'help', None, _('display help and exit')),
3338 ]
3339 ]
3339
3340
3340 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3341 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3341 " debugindex debugindexdot")
3342 " debugindex debugindexdot")
3342 optionalrepo = ("paths serve debugconfig")
3343 optionalrepo = ("paths serve debugconfig")
3343
3344
3344 def findpossible(cmd):
3345 def findpossible(cmd):
3345 """
3346 """
3346 Return cmd -> (aliases, command table entry)
3347 Return cmd -> (aliases, command table entry)
3347 for each matching command.
3348 for each matching command.
3348 Return debug commands (or their aliases) only if no normal command matches.
3349 Return debug commands (or their aliases) only if no normal command matches.
3349 """
3350 """
3350 choice = {}
3351 choice = {}
3351 debugchoice = {}
3352 debugchoice = {}
3352 for e in table.keys():
3353 for e in table.keys():
3353 aliases = e.lstrip("^").split("|")
3354 aliases = e.lstrip("^").split("|")
3354 found = None
3355 found = None
3355 if cmd in aliases:
3356 if cmd in aliases:
3356 found = cmd
3357 found = cmd
3357 else:
3358 else:
3358 for a in aliases:
3359 for a in aliases:
3359 if a.startswith(cmd):
3360 if a.startswith(cmd):
3360 found = a
3361 found = a
3361 break
3362 break
3362 if found is not None:
3363 if found is not None:
3363 if aliases[0].startswith("debug"):
3364 if aliases[0].startswith("debug"):
3364 debugchoice[found] = (aliases, table[e])
3365 debugchoice[found] = (aliases, table[e])
3365 else:
3366 else:
3366 choice[found] = (aliases, table[e])
3367 choice[found] = (aliases, table[e])
3367
3368
3368 if not choice and debugchoice:
3369 if not choice and debugchoice:
3369 choice = debugchoice
3370 choice = debugchoice
3370
3371
3371 return choice
3372 return choice
3372
3373
3373 def findcmd(cmd):
3374 def findcmd(cmd):
3374 """Return (aliases, command table entry) for command string."""
3375 """Return (aliases, command table entry) for command string."""
3375 choice = findpossible(cmd)
3376 choice = findpossible(cmd)
3376
3377
3377 if choice.has_key(cmd):
3378 if choice.has_key(cmd):
3378 return choice[cmd]
3379 return choice[cmd]
3379
3380
3380 if len(choice) > 1:
3381 if len(choice) > 1:
3381 clist = choice.keys()
3382 clist = choice.keys()
3382 clist.sort()
3383 clist.sort()
3383 raise AmbiguousCommand(cmd, clist)
3384 raise AmbiguousCommand(cmd, clist)
3384
3385
3385 if choice:
3386 if choice:
3386 return choice.values()[0]
3387 return choice.values()[0]
3387
3388
3388 raise UnknownCommand(cmd)
3389 raise UnknownCommand(cmd)
3389
3390
3390 def catchterm(*args):
3391 def catchterm(*args):
3391 raise util.SignalInterrupt
3392 raise util.SignalInterrupt
3392
3393
3393 def run():
3394 def run():
3394 sys.exit(dispatch(sys.argv[1:]))
3395 sys.exit(dispatch(sys.argv[1:]))
3395
3396
3396 class ParseError(Exception):
3397 class ParseError(Exception):
3397 """Exception raised on errors in parsing the command line."""
3398 """Exception raised on errors in parsing the command line."""
3398
3399
3399 def parse(ui, args):
3400 def parse(ui, args):
3400 options = {}
3401 options = {}
3401 cmdoptions = {}
3402 cmdoptions = {}
3402
3403
3403 try:
3404 try:
3404 args = fancyopts.fancyopts(args, globalopts, options)
3405 args = fancyopts.fancyopts(args, globalopts, options)
3405 except fancyopts.getopt.GetoptError, inst:
3406 except fancyopts.getopt.GetoptError, inst:
3406 raise ParseError(None, inst)
3407 raise ParseError(None, inst)
3407
3408
3408 if args:
3409 if args:
3409 cmd, args = args[0], args[1:]
3410 cmd, args = args[0], args[1:]
3410 aliases, i = findcmd(cmd)
3411 aliases, i = findcmd(cmd)
3411 cmd = aliases[0]
3412 cmd = aliases[0]
3412 defaults = ui.config("defaults", cmd)
3413 defaults = ui.config("defaults", cmd)
3413 if defaults:
3414 if defaults:
3414 args = defaults.split() + args
3415 args = defaults.split() + args
3415 c = list(i[1])
3416 c = list(i[1])
3416 else:
3417 else:
3417 cmd = None
3418 cmd = None
3418 c = []
3419 c = []
3419
3420
3420 # combine global options into local
3421 # combine global options into local
3421 for o in globalopts:
3422 for o in globalopts:
3422 c.append((o[0], o[1], options[o[1]], o[3]))
3423 c.append((o[0], o[1], options[o[1]], o[3]))
3423
3424
3424 try:
3425 try:
3425 args = fancyopts.fancyopts(args, c, cmdoptions)
3426 args = fancyopts.fancyopts(args, c, cmdoptions)
3426 except fancyopts.getopt.GetoptError, inst:
3427 except fancyopts.getopt.GetoptError, inst:
3427 raise ParseError(cmd, inst)
3428 raise ParseError(cmd, inst)
3428
3429
3429 # separate global options back out
3430 # separate global options back out
3430 for o in globalopts:
3431 for o in globalopts:
3431 n = o[1]
3432 n = o[1]
3432 options[n] = cmdoptions[n]
3433 options[n] = cmdoptions[n]
3433 del cmdoptions[n]
3434 del cmdoptions[n]
3434
3435
3435 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3436 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3436
3437
3437 external = {}
3438 external = {}
3438
3439
3439 def findext(name):
3440 def findext(name):
3440 '''return module with given extension name'''
3441 '''return module with given extension name'''
3441 try:
3442 try:
3442 return sys.modules[external[name]]
3443 return sys.modules[external[name]]
3443 except KeyError:
3444 except KeyError:
3444 for k, v in external.iteritems():
3445 for k, v in external.iteritems():
3445 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3446 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3446 return sys.modules[v]
3447 return sys.modules[v]
3447 raise KeyError(name)
3448 raise KeyError(name)
3448
3449
3449 def dispatch(args):
3450 def dispatch(args):
3450 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3451 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3451 num = getattr(signal, name, None)
3452 num = getattr(signal, name, None)
3452 if num: signal.signal(num, catchterm)
3453 if num: signal.signal(num, catchterm)
3453
3454
3454 try:
3455 try:
3455 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3456 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3456 except util.Abort, inst:
3457 except util.Abort, inst:
3457 sys.stderr.write(_("abort: %s\n") % inst)
3458 sys.stderr.write(_("abort: %s\n") % inst)
3458 return -1
3459 return -1
3459
3460
3460 for ext_name, load_from_name in u.extensions():
3461 for ext_name, load_from_name in u.extensions():
3461 try:
3462 try:
3462 if load_from_name:
3463 if load_from_name:
3463 # the module will be loaded in sys.modules
3464 # the module will be loaded in sys.modules
3464 # choose an unique name so that it doesn't
3465 # choose an unique name so that it doesn't
3465 # conflicts with other modules
3466 # conflicts with other modules
3466 module_name = "hgext_%s" % ext_name.replace('.', '_')
3467 module_name = "hgext_%s" % ext_name.replace('.', '_')
3467 mod = imp.load_source(module_name, load_from_name)
3468 mod = imp.load_source(module_name, load_from_name)
3468 else:
3469 else:
3469 def importh(name):
3470 def importh(name):
3470 mod = __import__(name)
3471 mod = __import__(name)
3471 components = name.split('.')
3472 components = name.split('.')
3472 for comp in components[1:]:
3473 for comp in components[1:]:
3473 mod = getattr(mod, comp)
3474 mod = getattr(mod, comp)
3474 return mod
3475 return mod
3475 try:
3476 try:
3476 mod = importh("hgext.%s" % ext_name)
3477 mod = importh("hgext.%s" % ext_name)
3477 except ImportError:
3478 except ImportError:
3478 mod = importh(ext_name)
3479 mod = importh(ext_name)
3479 external[ext_name] = mod.__name__
3480 external[ext_name] = mod.__name__
3480 except (util.SignalInterrupt, KeyboardInterrupt):
3481 except (util.SignalInterrupt, KeyboardInterrupt):
3481 raise
3482 raise
3482 except Exception, inst:
3483 except Exception, inst:
3483 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3484 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3484 if u.print_exc():
3485 if u.print_exc():
3485 return 1
3486 return 1
3486
3487
3487 for name in external.itervalues():
3488 for name in external.itervalues():
3488 mod = sys.modules[name]
3489 mod = sys.modules[name]
3489 uisetup = getattr(mod, 'uisetup', None)
3490 uisetup = getattr(mod, 'uisetup', None)
3490 if uisetup:
3491 if uisetup:
3491 uisetup(u)
3492 uisetup(u)
3492 cmdtable = getattr(mod, 'cmdtable', {})
3493 cmdtable = getattr(mod, 'cmdtable', {})
3493 for t in cmdtable:
3494 for t in cmdtable:
3494 if t in table:
3495 if t in table:
3495 u.warn(_("module %s overrides %s\n") % (name, t))
3496 u.warn(_("module %s overrides %s\n") % (name, t))
3496 table.update(cmdtable)
3497 table.update(cmdtable)
3497
3498
3498 try:
3499 try:
3499 cmd, func, args, options, cmdoptions = parse(u, args)
3500 cmd, func, args, options, cmdoptions = parse(u, args)
3500 if options["time"]:
3501 if options["time"]:
3501 def get_times():
3502 def get_times():
3502 t = os.times()
3503 t = os.times()
3503 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3504 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3504 t = (t[0], t[1], t[2], t[3], time.clock())
3505 t = (t[0], t[1], t[2], t[3], time.clock())
3505 return t
3506 return t
3506 s = get_times()
3507 s = get_times()
3507 def print_time():
3508 def print_time():
3508 t = get_times()
3509 t = get_times()
3509 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3510 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3510 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3511 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3511 atexit.register(print_time)
3512 atexit.register(print_time)
3512
3513
3513 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3514 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3514 not options["noninteractive"], options["traceback"],
3515 not options["noninteractive"], options["traceback"],
3515 options["config"])
3516 options["config"])
3516
3517
3517 # enter the debugger before command execution
3518 # enter the debugger before command execution
3518 if options['debugger']:
3519 if options['debugger']:
3519 pdb.set_trace()
3520 pdb.set_trace()
3520
3521
3521 try:
3522 try:
3522 if options['cwd']:
3523 if options['cwd']:
3523 try:
3524 try:
3524 os.chdir(options['cwd'])
3525 os.chdir(options['cwd'])
3525 except OSError, inst:
3526 except OSError, inst:
3526 raise util.Abort('%s: %s' %
3527 raise util.Abort('%s: %s' %
3527 (options['cwd'], inst.strerror))
3528 (options['cwd'], inst.strerror))
3528
3529
3529 path = u.expandpath(options["repository"]) or ""
3530 path = u.expandpath(options["repository"]) or ""
3530 repo = path and hg.repository(u, path=path) or None
3531 repo = path and hg.repository(u, path=path) or None
3531
3532
3532 if options['help']:
3533 if options['help']:
3533 return help_(u, cmd, options['version'])
3534 return help_(u, cmd, options['version'])
3534 elif options['version']:
3535 elif options['version']:
3535 return show_version(u)
3536 return show_version(u)
3536 elif not cmd:
3537 elif not cmd:
3537 return help_(u, 'shortlist')
3538 return help_(u, 'shortlist')
3538
3539
3539 if cmd not in norepo.split():
3540 if cmd not in norepo.split():
3540 try:
3541 try:
3541 if not repo:
3542 if not repo:
3542 repo = hg.repository(u, path=path)
3543 repo = hg.repository(u, path=path)
3543 u = repo.ui
3544 u = repo.ui
3544 for name in external.itervalues():
3545 for name in external.itervalues():
3545 mod = sys.modules[name]
3546 mod = sys.modules[name]
3546 if hasattr(mod, 'reposetup'):
3547 if hasattr(mod, 'reposetup'):
3547 mod.reposetup(u, repo)
3548 mod.reposetup(u, repo)
3548 except hg.RepoError:
3549 except hg.RepoError:
3549 if cmd not in optionalrepo.split():
3550 if cmd not in optionalrepo.split():
3550 raise
3551 raise
3551 d = lambda: func(u, repo, *args, **cmdoptions)
3552 d = lambda: func(u, repo, *args, **cmdoptions)
3552 else:
3553 else:
3553 d = lambda: func(u, *args, **cmdoptions)
3554 d = lambda: func(u, *args, **cmdoptions)
3554
3555
3555 # reupdate the options, repo/.hg/hgrc may have changed them
3556 # reupdate the options, repo/.hg/hgrc may have changed them
3556 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3557 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3557 not options["noninteractive"], options["traceback"],
3558 not options["noninteractive"], options["traceback"],
3558 options["config"])
3559 options["config"])
3559
3560
3560 try:
3561 try:
3561 if options['profile']:
3562 if options['profile']:
3562 import hotshot, hotshot.stats
3563 import hotshot, hotshot.stats
3563 prof = hotshot.Profile("hg.prof")
3564 prof = hotshot.Profile("hg.prof")
3564 try:
3565 try:
3565 try:
3566 try:
3566 return prof.runcall(d)
3567 return prof.runcall(d)
3567 except:
3568 except:
3568 try:
3569 try:
3569 u.warn(_('exception raised - generating '
3570 u.warn(_('exception raised - generating '
3570 'profile anyway\n'))
3571 'profile anyway\n'))
3571 except:
3572 except:
3572 pass
3573 pass
3573 raise
3574 raise
3574 finally:
3575 finally:
3575 prof.close()
3576 prof.close()
3576 stats = hotshot.stats.load("hg.prof")
3577 stats = hotshot.stats.load("hg.prof")
3577 stats.strip_dirs()
3578 stats.strip_dirs()
3578 stats.sort_stats('time', 'calls')
3579 stats.sort_stats('time', 'calls')
3579 stats.print_stats(40)
3580 stats.print_stats(40)
3580 elif options['lsprof']:
3581 elif options['lsprof']:
3581 try:
3582 try:
3582 from mercurial import lsprof
3583 from mercurial import lsprof
3583 except ImportError:
3584 except ImportError:
3584 raise util.Abort(_(
3585 raise util.Abort(_(
3585 'lsprof not available - install from '
3586 'lsprof not available - install from '
3586 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3587 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3587 p = lsprof.Profiler()
3588 p = lsprof.Profiler()
3588 p.enable(subcalls=True)
3589 p.enable(subcalls=True)
3589 try:
3590 try:
3590 return d()
3591 return d()
3591 finally:
3592 finally:
3592 p.disable()
3593 p.disable()
3593 stats = lsprof.Stats(p.getstats())
3594 stats = lsprof.Stats(p.getstats())
3594 stats.sort()
3595 stats.sort()
3595 stats.pprint(top=10, file=sys.stderr, climit=5)
3596 stats.pprint(top=10, file=sys.stderr, climit=5)
3596 else:
3597 else:
3597 return d()
3598 return d()
3598 finally:
3599 finally:
3599 u.flush()
3600 u.flush()
3600 except:
3601 except:
3601 # enter the debugger when we hit an exception
3602 # enter the debugger when we hit an exception
3602 if options['debugger']:
3603 if options['debugger']:
3603 pdb.post_mortem(sys.exc_info()[2])
3604 pdb.post_mortem(sys.exc_info()[2])
3604 u.print_exc()
3605 u.print_exc()
3605 raise
3606 raise
3606 except ParseError, inst:
3607 except ParseError, inst:
3607 if inst.args[0]:
3608 if inst.args[0]:
3608 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3609 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3609 help_(u, inst.args[0])
3610 help_(u, inst.args[0])
3610 else:
3611 else:
3611 u.warn(_("hg: %s\n") % inst.args[1])
3612 u.warn(_("hg: %s\n") % inst.args[1])
3612 help_(u, 'shortlist')
3613 help_(u, 'shortlist')
3613 except AmbiguousCommand, inst:
3614 except AmbiguousCommand, inst:
3614 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3615 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3615 (inst.args[0], " ".join(inst.args[1])))
3616 (inst.args[0], " ".join(inst.args[1])))
3616 except UnknownCommand, inst:
3617 except UnknownCommand, inst:
3617 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3618 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3618 help_(u, 'shortlist')
3619 help_(u, 'shortlist')
3619 except hg.RepoError, inst:
3620 except hg.RepoError, inst:
3620 u.warn(_("abort: %s!\n") % inst)
3621 u.warn(_("abort: %s!\n") % inst)
3621 except lock.LockHeld, inst:
3622 except lock.LockHeld, inst:
3622 if inst.errno == errno.ETIMEDOUT:
3623 if inst.errno == errno.ETIMEDOUT:
3623 reason = _('timed out waiting for lock held by %s') % inst.locker
3624 reason = _('timed out waiting for lock held by %s') % inst.locker
3624 else:
3625 else:
3625 reason = _('lock held by %s') % inst.locker
3626 reason = _('lock held by %s') % inst.locker
3626 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3627 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3627 except lock.LockUnavailable, inst:
3628 except lock.LockUnavailable, inst:
3628 u.warn(_("abort: could not lock %s: %s\n") %
3629 u.warn(_("abort: could not lock %s: %s\n") %
3629 (inst.desc or inst.filename, inst.strerror))
3630 (inst.desc or inst.filename, inst.strerror))
3630 except revlog.RevlogError, inst:
3631 except revlog.RevlogError, inst:
3631 u.warn(_("abort: "), inst, "!\n")
3632 u.warn(_("abort: "), inst, "!\n")
3632 except util.SignalInterrupt:
3633 except util.SignalInterrupt:
3633 u.warn(_("killed!\n"))
3634 u.warn(_("killed!\n"))
3634 except KeyboardInterrupt:
3635 except KeyboardInterrupt:
3635 try:
3636 try:
3636 u.warn(_("interrupted!\n"))
3637 u.warn(_("interrupted!\n"))
3637 except IOError, inst:
3638 except IOError, inst:
3638 if inst.errno == errno.EPIPE:
3639 if inst.errno == errno.EPIPE:
3639 if u.debugflag:
3640 if u.debugflag:
3640 u.warn(_("\nbroken pipe\n"))
3641 u.warn(_("\nbroken pipe\n"))
3641 else:
3642 else:
3642 raise
3643 raise
3643 except IOError, inst:
3644 except IOError, inst:
3644 if hasattr(inst, "code"):
3645 if hasattr(inst, "code"):
3645 u.warn(_("abort: %s\n") % inst)
3646 u.warn(_("abort: %s\n") % inst)
3646 elif hasattr(inst, "reason"):
3647 elif hasattr(inst, "reason"):
3647 u.warn(_("abort: error: %s\n") % inst.reason[1])
3648 u.warn(_("abort: error: %s\n") % inst.reason[1])
3648 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3649 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3649 if u.debugflag:
3650 if u.debugflag:
3650 u.warn(_("broken pipe\n"))
3651 u.warn(_("broken pipe\n"))
3651 elif getattr(inst, "strerror", None):
3652 elif getattr(inst, "strerror", None):
3652 if getattr(inst, "filename", None):
3653 if getattr(inst, "filename", None):
3653 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3654 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3654 else:
3655 else:
3655 u.warn(_("abort: %s\n") % inst.strerror)
3656 u.warn(_("abort: %s\n") % inst.strerror)
3656 else:
3657 else:
3657 raise
3658 raise
3658 except OSError, inst:
3659 except OSError, inst:
3659 if hasattr(inst, "filename"):
3660 if hasattr(inst, "filename"):
3660 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3661 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3661 else:
3662 else:
3662 u.warn(_("abort: %s\n") % inst.strerror)
3663 u.warn(_("abort: %s\n") % inst.strerror)
3663 except util.Abort, inst:
3664 except util.Abort, inst:
3664 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3665 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3665 except TypeError, inst:
3666 except TypeError, inst:
3666 # was this an argument error?
3667 # was this an argument error?
3667 tb = traceback.extract_tb(sys.exc_info()[2])
3668 tb = traceback.extract_tb(sys.exc_info()[2])
3668 if len(tb) > 2: # no
3669 if len(tb) > 2: # no
3669 raise
3670 raise
3670 u.debug(inst, "\n")
3671 u.debug(inst, "\n")
3671 u.warn(_("%s: invalid arguments\n") % cmd)
3672 u.warn(_("%s: invalid arguments\n") % cmd)
3672 help_(u, cmd)
3673 help_(u, cmd)
3673 except SystemExit, inst:
3674 except SystemExit, inst:
3674 # Commands shouldn't sys.exit directly, but give a return code.
3675 # Commands shouldn't sys.exit directly, but give a return code.
3675 # Just in case catch this and and pass exit code to caller.
3676 # Just in case catch this and and pass exit code to caller.
3676 return inst.code
3677 return inst.code
3677 except:
3678 except:
3678 u.warn(_("** unknown exception encountered, details follow\n"))
3679 u.warn(_("** unknown exception encountered, details follow\n"))
3679 u.warn(_("** report bug details to "
3680 u.warn(_("** report bug details to "
3680 "http://www.selenic.com/mercurial/bts\n"))
3681 "http://www.selenic.com/mercurial/bts\n"))
3681 u.warn(_("** or mercurial@selenic.com\n"))
3682 u.warn(_("** or mercurial@selenic.com\n"))
3682 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3683 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3683 % version.get_version())
3684 % version.get_version())
3684 raise
3685 raise
3685
3686
3686 return -1
3687 return -1
@@ -1,491 +1,491 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "struct os time bisect stat util re errno")
13 demandload(globals(), "struct os time bisect stat util re errno")
14
14
15 class dirstate(object):
15 class dirstate(object):
16 format = ">cllll"
16 format = ">cllll"
17
17
18 def __init__(self, opener, ui, root):
18 def __init__(self, opener, ui, root):
19 self.opener = opener
19 self.opener = opener
20 self.root = root
20 self.root = root
21 self.dirty = 0
21 self.dirty = 0
22 self.ui = ui
22 self.ui = ui
23 self.map = None
23 self.map = None
24 self.pl = None
24 self.pl = None
25 self.copies = {}
25 self.copies = {}
26 self.ignorefunc = None
26 self.ignorefunc = None
27 self.blockignore = False
27 self.blockignore = False
28
28
29 def wjoin(self, f):
29 def wjoin(self, f):
30 return os.path.join(self.root, f)
30 return os.path.join(self.root, f)
31
31
32 def getcwd(self):
32 def getcwd(self):
33 cwd = os.getcwd()
33 cwd = os.getcwd()
34 if cwd == self.root: return ''
34 if cwd == self.root: return ''
35 return cwd[len(self.root) + 1:]
35 return cwd[len(self.root) + 1:]
36
36
37 def hgignore(self):
37 def hgignore(self):
38 '''return the contents of .hgignore files as a list of patterns.
38 '''return the contents of .hgignore files as a list of patterns.
39
39
40 the files parsed for patterns include:
40 the files parsed for patterns include:
41 .hgignore in the repository root
41 .hgignore in the repository root
42 any additional files specified in the [ui] section of ~/.hgrc
42 any additional files specified in the [ui] section of ~/.hgrc
43
43
44 trailing white space is dropped.
44 trailing white space is dropped.
45 the escape character is backslash.
45 the escape character is backslash.
46 comments start with #.
46 comments start with #.
47 empty lines are skipped.
47 empty lines are skipped.
48
48
49 lines can be of the following formats:
49 lines can be of the following formats:
50
50
51 syntax: regexp # defaults following lines to non-rooted regexps
51 syntax: regexp # defaults following lines to non-rooted regexps
52 syntax: glob # defaults following lines to non-rooted globs
52 syntax: glob # defaults following lines to non-rooted globs
53 re:pattern # non-rooted regular expression
53 re:pattern # non-rooted regular expression
54 glob:pattern # non-rooted glob
54 glob:pattern # non-rooted glob
55 pattern # pattern of the current default type'''
55 pattern # pattern of the current default type'''
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
57 def parselines(fp):
57 def parselines(fp):
58 for line in fp:
58 for line in fp:
59 escape = False
59 escape = False
60 for i in xrange(len(line)):
60 for i in xrange(len(line)):
61 if escape: escape = False
61 if escape: escape = False
62 elif line[i] == '\\': escape = True
62 elif line[i] == '\\': escape = True
63 elif line[i] == '#': break
63 elif line[i] == '#': break
64 line = line[:i].rstrip()
64 line = line[:i].rstrip()
65 if line: yield line
65 if line: yield line
66 repoignore = self.wjoin('.hgignore')
66 repoignore = self.wjoin('.hgignore')
67 files = [repoignore]
67 files = [repoignore]
68 files.extend(self.ui.hgignorefiles())
68 files.extend(self.ui.hgignorefiles())
69 pats = {}
69 pats = {}
70 for f in files:
70 for f in files:
71 try:
71 try:
72 pats[f] = []
72 pats[f] = []
73 fp = open(f)
73 fp = open(f)
74 syntax = 'relre:'
74 syntax = 'relre:'
75 for line in parselines(fp):
75 for line in parselines(fp):
76 if line.startswith('syntax:'):
76 if line.startswith('syntax:'):
77 s = line[7:].strip()
77 s = line[7:].strip()
78 try:
78 try:
79 syntax = syntaxes[s]
79 syntax = syntaxes[s]
80 except KeyError:
80 except KeyError:
81 self.ui.warn(_("%s: ignoring invalid "
81 self.ui.warn(_("%s: ignoring invalid "
82 "syntax '%s'\n") % (f, s))
82 "syntax '%s'\n") % (f, s))
83 continue
83 continue
84 pat = syntax + line
84 pat = syntax + line
85 for s in syntaxes.values():
85 for s in syntaxes.values():
86 if line.startswith(s):
86 if line.startswith(s):
87 pat = line
87 pat = line
88 break
88 break
89 pats[f].append(pat)
89 pats[f].append(pat)
90 except IOError, inst:
90 except IOError, inst:
91 if f != repoignore:
91 if f != repoignore:
92 self.ui.warn(_("skipping unreadable ignore file"
92 self.ui.warn(_("skipping unreadable ignore file"
93 " '%s': %s\n") % (f, inst.strerror))
93 " '%s': %s\n") % (f, inst.strerror))
94 return pats
94 return pats
95
95
96 def ignore(self, fn):
96 def ignore(self, fn):
97 '''default match function used by dirstate and
97 '''default match function used by dirstate and
98 localrepository. this honours the repository .hgignore file
98 localrepository. this honours the repository .hgignore file
99 and any other files specified in the [ui] section of .hgrc.'''
99 and any other files specified in the [ui] section of .hgrc.'''
100 if self.blockignore:
100 if self.blockignore:
101 return False
101 return False
102 if not self.ignorefunc:
102 if not self.ignorefunc:
103 ignore = self.hgignore()
103 ignore = self.hgignore()
104 allpats = []
104 allpats = []
105 [allpats.extend(patlist) for patlist in ignore.values()]
105 [allpats.extend(patlist) for patlist in ignore.values()]
106 if allpats:
106 if allpats:
107 try:
107 try:
108 files, self.ignorefunc, anypats = (
108 files, self.ignorefunc, anypats = (
109 util.matcher(self.root, inc=allpats, src='.hgignore'))
109 util.matcher(self.root, inc=allpats, src='.hgignore'))
110 except util.Abort:
110 except util.Abort:
111 # Re-raise an exception where the src is the right file
111 # Re-raise an exception where the src is the right file
112 for f, patlist in ignore.items():
112 for f, patlist in ignore.items():
113 files, self.ignorefunc, anypats = (
113 files, self.ignorefunc, anypats = (
114 util.matcher(self.root, inc=patlist, src=f))
114 util.matcher(self.root, inc=patlist, src=f))
115 else:
115 else:
116 self.ignorefunc = util.never
116 self.ignorefunc = util.never
117 return self.ignorefunc(fn)
117 return self.ignorefunc(fn)
118
118
119 def __del__(self):
119 def __del__(self):
120 if self.dirty:
120 if self.dirty:
121 self.write()
121 self.write()
122
122
123 def __getitem__(self, key):
123 def __getitem__(self, key):
124 try:
124 try:
125 return self.map[key]
125 return self.map[key]
126 except TypeError:
126 except TypeError:
127 self.lazyread()
127 self.lazyread()
128 return self[key]
128 return self[key]
129
129
130 def __contains__(self, key):
130 def __contains__(self, key):
131 self.lazyread()
131 self.lazyread()
132 return key in self.map
132 return key in self.map
133
133
134 def parents(self):
134 def parents(self):
135 self.lazyread()
135 self.lazyread()
136 return self.pl
136 return self.pl
137
137
138 def markdirty(self):
138 def markdirty(self):
139 if not self.dirty:
139 if not self.dirty:
140 self.dirty = 1
140 self.dirty = 1
141
141
142 def setparents(self, p1, p2=nullid):
142 def setparents(self, p1, p2=nullid):
143 self.lazyread()
143 self.lazyread()
144 self.markdirty()
144 self.markdirty()
145 self.pl = p1, p2
145 self.pl = p1, p2
146
146
147 def state(self, key):
147 def state(self, key):
148 try:
148 try:
149 return self[key][0]
149 return self[key][0]
150 except KeyError:
150 except KeyError:
151 return "?"
151 return "?"
152
152
153 def lazyread(self):
153 def lazyread(self):
154 if self.map is None:
154 if self.map is None:
155 self.read()
155 self.read()
156
156
157 def parse(self, st):
157 def parse(self, st):
158 self.pl = [st[:20], st[20: 40]]
158 self.pl = [st[:20], st[20: 40]]
159
159
160 # deref fields so they will be local in loop
160 # deref fields so they will be local in loop
161 map = self.map
161 map = self.map
162 copies = self.copies
162 copies = self.copies
163 format = self.format
163 format = self.format
164 unpack = struct.unpack
164 unpack = struct.unpack
165
165
166 pos = 40
166 pos = 40
167 e_size = struct.calcsize(format)
167 e_size = struct.calcsize(format)
168
168
169 while pos < len(st):
169 while pos < len(st):
170 newpos = pos + e_size
170 newpos = pos + e_size
171 e = unpack(format, st[pos:newpos])
171 e = unpack(format, st[pos:newpos])
172 l = e[4]
172 l = e[4]
173 pos = newpos
173 pos = newpos
174 newpos = pos + l
174 newpos = pos + l
175 f = st[pos:newpos]
175 f = st[pos:newpos]
176 if '\0' in f:
176 if '\0' in f:
177 f, c = f.split('\0')
177 f, c = f.split('\0')
178 copies[f] = c
178 copies[f] = c
179 map[f] = e[:4]
179 map[f] = e[:4]
180 pos = newpos
180 pos = newpos
181
181
182 def read(self):
182 def read(self):
183 self.map = {}
183 self.map = {}
184 self.pl = [nullid, nullid]
184 self.pl = [nullid, nullid]
185 try:
185 try:
186 st = self.opener("dirstate").read()
186 st = self.opener("dirstate").read()
187 if st:
187 if st:
188 self.parse(st)
188 self.parse(st)
189 except IOError, err:
189 except IOError, err:
190 if err.errno != errno.ENOENT: raise
190 if err.errno != errno.ENOENT: raise
191
191
192 def copy(self, source, dest):
192 def copy(self, source, dest):
193 self.lazyread()
193 self.lazyread()
194 self.markdirty()
194 self.markdirty()
195 self.copies[dest] = source
195 self.copies[dest] = source
196
196
197 def copied(self, file):
197 def copied(self, file):
198 return self.copies.get(file, None)
198 return self.copies.get(file, None)
199
199
200 def update(self, files, state, **kw):
200 def update(self, files, state, **kw):
201 ''' current states:
201 ''' current states:
202 n normal
202 n normal
203 m needs merging
203 m needs merging
204 r marked for removal
204 r marked for removal
205 a marked for addition'''
205 a marked for addition'''
206
206
207 if not files: return
207 if not files: return
208 self.lazyread()
208 self.lazyread()
209 self.markdirty()
209 self.markdirty()
210 for f in files:
210 for f in files:
211 if state == "r":
211 if state == "r":
212 self.map[f] = ('r', 0, 0, 0)
212 self.map[f] = ('r', 0, 0, 0)
213 else:
213 else:
214 s = os.lstat(self.wjoin(f))
214 s = os.lstat(self.wjoin(f))
215 st_size = kw.get('st_size', s.st_size)
215 st_size = kw.get('st_size', s.st_size)
216 st_mtime = kw.get('st_mtime', s.st_mtime)
216 st_mtime = kw.get('st_mtime', s.st_mtime)
217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
218 if self.copies.has_key(f):
218 if self.copies.has_key(f):
219 del self.copies[f]
219 del self.copies[f]
220
220
221 def forget(self, files):
221 def forget(self, files):
222 if not files: return
222 if not files: return
223 self.lazyread()
223 self.lazyread()
224 self.markdirty()
224 self.markdirty()
225 for f in files:
225 for f in files:
226 try:
226 try:
227 del self.map[f]
227 del self.map[f]
228 except KeyError:
228 except KeyError:
229 self.ui.warn(_("not in dirstate: %s!\n") % f)
229 self.ui.warn(_("not in dirstate: %s!\n") % f)
230 pass
230 pass
231
231
232 def clear(self):
232 def clear(self):
233 self.map = {}
233 self.map = {}
234 self.copies = {}
234 self.copies = {}
235 self.markdirty()
235 self.markdirty()
236
236
237 def rebuild(self, parent, files):
237 def rebuild(self, parent, files):
238 self.clear()
238 self.clear()
239 umask = os.umask(0)
239 umask = os.umask(0)
240 os.umask(umask)
240 os.umask(umask)
241 for f, mode in files:
241 for f in files:
242 if mode:
242 if files.execf(f):
243 self.map[f] = ('n', ~umask, -1, 0)
243 self.map[f] = ('n', ~umask, -1, 0)
244 else:
244 else:
245 self.map[f] = ('n', ~umask & 0666, -1, 0)
245 self.map[f] = ('n', ~umask & 0666, -1, 0)
246 self.pl = (parent, nullid)
246 self.pl = (parent, nullid)
247 self.markdirty()
247 self.markdirty()
248
248
249 def write(self):
249 def write(self):
250 if not self.dirty:
250 if not self.dirty:
251 return
251 return
252 st = self.opener("dirstate", "w", atomic=True)
252 st = self.opener("dirstate", "w", atomic=True)
253 st.write("".join(self.pl))
253 st.write("".join(self.pl))
254 for f, e in self.map.items():
254 for f, e in self.map.items():
255 c = self.copied(f)
255 c = self.copied(f)
256 if c:
256 if c:
257 f = f + "\0" + c
257 f = f + "\0" + c
258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
259 st.write(e + f)
259 st.write(e + f)
260 self.dirty = 0
260 self.dirty = 0
261
261
262 def filterfiles(self, files):
262 def filterfiles(self, files):
263 ret = {}
263 ret = {}
264 unknown = []
264 unknown = []
265
265
266 for x in files:
266 for x in files:
267 if x == '.':
267 if x == '.':
268 return self.map.copy()
268 return self.map.copy()
269 if x not in self.map:
269 if x not in self.map:
270 unknown.append(x)
270 unknown.append(x)
271 else:
271 else:
272 ret[x] = self.map[x]
272 ret[x] = self.map[x]
273
273
274 if not unknown:
274 if not unknown:
275 return ret
275 return ret
276
276
277 b = self.map.keys()
277 b = self.map.keys()
278 b.sort()
278 b.sort()
279 blen = len(b)
279 blen = len(b)
280
280
281 for x in unknown:
281 for x in unknown:
282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
283 while bs < blen:
283 while bs < blen:
284 s = b[bs]
284 s = b[bs]
285 if len(s) > len(x) and s.startswith(x):
285 if len(s) > len(x) and s.startswith(x):
286 ret[s] = self.map[s]
286 ret[s] = self.map[s]
287 else:
287 else:
288 break
288 break
289 bs += 1
289 bs += 1
290 return ret
290 return ret
291
291
292 def supported_type(self, f, st, verbose=False):
292 def supported_type(self, f, st, verbose=False):
293 if stat.S_ISREG(st.st_mode):
293 if stat.S_ISREG(st.st_mode):
294 return True
294 return True
295 if verbose:
295 if verbose:
296 kind = 'unknown'
296 kind = 'unknown'
297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
304 util.pathto(self.getcwd(), f),
304 util.pathto(self.getcwd(), f),
305 kind))
305 kind))
306 return False
306 return False
307
307
308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
309 badmatch=None):
309 badmatch=None):
310 self.lazyread()
310 self.lazyread()
311
311
312 # walk all files by default
312 # walk all files by default
313 if not files:
313 if not files:
314 files = [self.root]
314 files = [self.root]
315 if not dc:
315 if not dc:
316 dc = self.map.copy()
316 dc = self.map.copy()
317 elif not dc:
317 elif not dc:
318 dc = self.filterfiles(files)
318 dc = self.filterfiles(files)
319
319
320 def statmatch(file_, stat):
320 def statmatch(file_, stat):
321 file_ = util.pconvert(file_)
321 file_ = util.pconvert(file_)
322 if not ignored and file_ not in dc and self.ignore(file_):
322 if not ignored and file_ not in dc and self.ignore(file_):
323 return False
323 return False
324 return match(file_)
324 return match(file_)
325
325
326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
327 badmatch=badmatch)
327 badmatch=badmatch)
328
328
329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
330 # filter out the stat
330 # filter out the stat
331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
332 yield src, f
332 yield src, f
333
333
334 # walk recursively through the directory tree, finding all files
334 # walk recursively through the directory tree, finding all files
335 # matched by the statmatch function
335 # matched by the statmatch function
336 #
336 #
337 # results are yielded in a tuple (src, filename, st), where src
337 # results are yielded in a tuple (src, filename, st), where src
338 # is one of:
338 # is one of:
339 # 'f' the file was found in the directory tree
339 # 'f' the file was found in the directory tree
340 # 'm' the file was only in the dirstate and not in the tree
340 # 'm' the file was only in the dirstate and not in the tree
341 # and st is the stat result if the file was found in the directory.
341 # and st is the stat result if the file was found in the directory.
342 #
342 #
343 # dc is an optional arg for the current dirstate. dc is not modified
343 # dc is an optional arg for the current dirstate. dc is not modified
344 # directly by this function, but might be modified by your statmatch call.
344 # directly by this function, but might be modified by your statmatch call.
345 #
345 #
346 def walkhelper(self, files, statmatch, dc, badmatch=None):
346 def walkhelper(self, files, statmatch, dc, badmatch=None):
347 # self.root may end with a path separator when self.root == '/'
347 # self.root may end with a path separator when self.root == '/'
348 common_prefix_len = len(self.root)
348 common_prefix_len = len(self.root)
349 if not self.root.endswith('/'):
349 if not self.root.endswith('/'):
350 common_prefix_len += 1
350 common_prefix_len += 1
351 # recursion free walker, faster than os.walk.
351 # recursion free walker, faster than os.walk.
352 def findfiles(s):
352 def findfiles(s):
353 work = [s]
353 work = [s]
354 while work:
354 while work:
355 top = work.pop()
355 top = work.pop()
356 names = os.listdir(top)
356 names = os.listdir(top)
357 names.sort()
357 names.sort()
358 # nd is the top of the repository dir tree
358 # nd is the top of the repository dir tree
359 nd = util.normpath(top[common_prefix_len:])
359 nd = util.normpath(top[common_prefix_len:])
360 if nd == '.':
360 if nd == '.':
361 nd = ''
361 nd = ''
362 else:
362 else:
363 # do not recurse into a repo contained in this
363 # do not recurse into a repo contained in this
364 # one. use bisect to find .hg directory so speed
364 # one. use bisect to find .hg directory so speed
365 # is good on big directory.
365 # is good on big directory.
366 hg = bisect.bisect_left(names, '.hg')
366 hg = bisect.bisect_left(names, '.hg')
367 if hg < len(names) and names[hg] == '.hg':
367 if hg < len(names) and names[hg] == '.hg':
368 if os.path.isdir(os.path.join(top, '.hg')):
368 if os.path.isdir(os.path.join(top, '.hg')):
369 continue
369 continue
370 for f in names:
370 for f in names:
371 np = util.pconvert(os.path.join(nd, f))
371 np = util.pconvert(os.path.join(nd, f))
372 if seen(np):
372 if seen(np):
373 continue
373 continue
374 p = os.path.join(top, f)
374 p = os.path.join(top, f)
375 # don't trip over symlinks
375 # don't trip over symlinks
376 st = os.lstat(p)
376 st = os.lstat(p)
377 if stat.S_ISDIR(st.st_mode):
377 if stat.S_ISDIR(st.st_mode):
378 ds = os.path.join(nd, f +'/')
378 ds = os.path.join(nd, f +'/')
379 if statmatch(ds, st):
379 if statmatch(ds, st):
380 work.append(p)
380 work.append(p)
381 if statmatch(np, st) and np in dc:
381 if statmatch(np, st) and np in dc:
382 yield 'm', np, st
382 yield 'm', np, st
383 elif statmatch(np, st):
383 elif statmatch(np, st):
384 if self.supported_type(np, st):
384 if self.supported_type(np, st):
385 yield 'f', np, st
385 yield 'f', np, st
386 elif np in dc:
386 elif np in dc:
387 yield 'm', np, st
387 yield 'm', np, st
388
388
389 known = {'.hg': 1}
389 known = {'.hg': 1}
390 def seen(fn):
390 def seen(fn):
391 if fn in known: return True
391 if fn in known: return True
392 known[fn] = 1
392 known[fn] = 1
393
393
394 # step one, find all files that match our criteria
394 # step one, find all files that match our criteria
395 files.sort()
395 files.sort()
396 for ff in util.unique(files):
396 for ff in util.unique(files):
397 f = self.wjoin(ff)
397 f = self.wjoin(ff)
398 try:
398 try:
399 st = os.lstat(f)
399 st = os.lstat(f)
400 except OSError, inst:
400 except OSError, inst:
401 nf = util.normpath(ff)
401 nf = util.normpath(ff)
402 found = False
402 found = False
403 for fn in dc:
403 for fn in dc:
404 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
404 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
405 found = True
405 found = True
406 break
406 break
407 if not found:
407 if not found:
408 if inst.errno != errno.ENOENT or not badmatch:
408 if inst.errno != errno.ENOENT or not badmatch:
409 self.ui.warn('%s: %s\n' % (
409 self.ui.warn('%s: %s\n' % (
410 util.pathto(self.getcwd(), ff),
410 util.pathto(self.getcwd(), ff),
411 inst.strerror))
411 inst.strerror))
412 elif badmatch and badmatch(ff) and statmatch(ff, None):
412 elif badmatch and badmatch(ff) and statmatch(ff, None):
413 yield 'b', ff, None
413 yield 'b', ff, None
414 continue
414 continue
415 if stat.S_ISDIR(st.st_mode):
415 if stat.S_ISDIR(st.st_mode):
416 cmp1 = (lambda x, y: cmp(x[1], y[1]))
416 cmp1 = (lambda x, y: cmp(x[1], y[1]))
417 sorted_ = [ x for x in findfiles(f) ]
417 sorted_ = [ x for x in findfiles(f) ]
418 sorted_.sort(cmp1)
418 sorted_.sort(cmp1)
419 for e in sorted_:
419 for e in sorted_:
420 yield e
420 yield e
421 else:
421 else:
422 ff = util.normpath(ff)
422 ff = util.normpath(ff)
423 if seen(ff):
423 if seen(ff):
424 continue
424 continue
425 self.blockignore = True
425 self.blockignore = True
426 if statmatch(ff, st):
426 if statmatch(ff, st):
427 if self.supported_type(ff, st, verbose=True):
427 if self.supported_type(ff, st, verbose=True):
428 yield 'f', ff, st
428 yield 'f', ff, st
429 elif ff in dc:
429 elif ff in dc:
430 yield 'm', ff, st
430 yield 'm', ff, st
431 self.blockignore = False
431 self.blockignore = False
432
432
433 # step two run through anything left in the dc hash and yield
433 # step two run through anything left in the dc hash and yield
434 # if we haven't already seen it
434 # if we haven't already seen it
435 ks = dc.keys()
435 ks = dc.keys()
436 ks.sort()
436 ks.sort()
437 for k in ks:
437 for k in ks:
438 if not seen(k) and (statmatch(k, None)):
438 if not seen(k) and (statmatch(k, None)):
439 yield 'm', k, None
439 yield 'm', k, None
440
440
441 def status(self, files=None, match=util.always, list_ignored=False,
441 def status(self, files=None, match=util.always, list_ignored=False,
442 list_clean=False):
442 list_clean=False):
443 lookup, modified, added, unknown, ignored = [], [], [], [], []
443 lookup, modified, added, unknown, ignored = [], [], [], [], []
444 removed, deleted, clean = [], [], []
444 removed, deleted, clean = [], [], []
445
445
446 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
446 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
447 try:
447 try:
448 type_, mode, size, time = self[fn]
448 type_, mode, size, time = self[fn]
449 except KeyError:
449 except KeyError:
450 if list_ignored and self.ignore(fn):
450 if list_ignored and self.ignore(fn):
451 ignored.append(fn)
451 ignored.append(fn)
452 else:
452 else:
453 unknown.append(fn)
453 unknown.append(fn)
454 continue
454 continue
455 if src == 'm':
455 if src == 'm':
456 nonexistent = True
456 nonexistent = True
457 if not st:
457 if not st:
458 try:
458 try:
459 st = os.lstat(self.wjoin(fn))
459 st = os.lstat(self.wjoin(fn))
460 except OSError, inst:
460 except OSError, inst:
461 if inst.errno != errno.ENOENT:
461 if inst.errno != errno.ENOENT:
462 raise
462 raise
463 st = None
463 st = None
464 # We need to re-check that it is a valid file
464 # We need to re-check that it is a valid file
465 if st and self.supported_type(fn, st):
465 if st and self.supported_type(fn, st):
466 nonexistent = False
466 nonexistent = False
467 # XXX: what to do with file no longer present in the fs
467 # XXX: what to do with file no longer present in the fs
468 # who are not removed in the dirstate ?
468 # who are not removed in the dirstate ?
469 if nonexistent and type_ in "nm":
469 if nonexistent and type_ in "nm":
470 deleted.append(fn)
470 deleted.append(fn)
471 continue
471 continue
472 # check the common case first
472 # check the common case first
473 if type_ == 'n':
473 if type_ == 'n':
474 if not st:
474 if not st:
475 st = os.lstat(self.wjoin(fn))
475 st = os.lstat(self.wjoin(fn))
476 if size >= 0 and (size != st.st_size
476 if size >= 0 and (size != st.st_size
477 or (mode ^ st.st_mode) & 0100):
477 or (mode ^ st.st_mode) & 0100):
478 modified.append(fn)
478 modified.append(fn)
479 elif time != st.st_mtime:
479 elif time != st.st_mtime:
480 lookup.append(fn)
480 lookup.append(fn)
481 elif list_clean:
481 elif list_clean:
482 clean.append(fn)
482 clean.append(fn)
483 elif type_ == 'm':
483 elif type_ == 'm':
484 modified.append(fn)
484 modified.append(fn)
485 elif type_ == 'a':
485 elif type_ == 'a':
486 added.append(fn)
486 added.append(fn)
487 elif type_ == 'r':
487 elif type_ == 'r':
488 removed.append(fn)
488 removed.append(fn)
489
489
490 return (lookup, modified, added, removed, deleted, unknown, ignored,
490 return (lookup, modified, added, removed, deleted, unknown, ignored,
491 clean)
491 clean)
@@ -1,1765 +1,1764 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def url(self):
86 def url(self):
87 return 'file:' + self.root
87 return 'file:' + self.root
88
88
89 def hook(self, name, throw=False, **args):
89 def hook(self, name, throw=False, **args):
90 def callhook(hname, funcname):
90 def callhook(hname, funcname):
91 '''call python hook. hook is callable object, looked up as
91 '''call python hook. hook is callable object, looked up as
92 name in python module. if callable returns "true", hook
92 name in python module. if callable returns "true", hook
93 fails, else passes. if hook raises exception, treated as
93 fails, else passes. if hook raises exception, treated as
94 hook failure. exception propagates if throw is "true".
94 hook failure. exception propagates if throw is "true".
95
95
96 reason for "true" meaning "hook failed" is so that
96 reason for "true" meaning "hook failed" is so that
97 unmodified commands (e.g. mercurial.commands.update) can
97 unmodified commands (e.g. mercurial.commands.update) can
98 be run as hooks without wrappers to convert return values.'''
98 be run as hooks without wrappers to convert return values.'''
99
99
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 d = funcname.rfind('.')
101 d = funcname.rfind('.')
102 if d == -1:
102 if d == -1:
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 % (hname, funcname))
104 % (hname, funcname))
105 modname = funcname[:d]
105 modname = funcname[:d]
106 try:
106 try:
107 obj = __import__(modname)
107 obj = __import__(modname)
108 except ImportError:
108 except ImportError:
109 try:
109 try:
110 # extensions are loaded with hgext_ prefix
110 # extensions are loaded with hgext_ prefix
111 obj = __import__("hgext_%s" % modname)
111 obj = __import__("hgext_%s" % modname)
112 except ImportError:
112 except ImportError:
113 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
114 '(import of "%s" failed)') %
114 '(import of "%s" failed)') %
115 (hname, modname))
115 (hname, modname))
116 try:
116 try:
117 for p in funcname.split('.')[1:]:
117 for p in funcname.split('.')[1:]:
118 obj = getattr(obj, p)
118 obj = getattr(obj, p)
119 except AttributeError, err:
119 except AttributeError, err:
120 raise util.Abort(_('%s hook is invalid '
120 raise util.Abort(_('%s hook is invalid '
121 '("%s" is not defined)') %
121 '("%s" is not defined)') %
122 (hname, funcname))
122 (hname, funcname))
123 if not callable(obj):
123 if not callable(obj):
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not callable)') %
125 '("%s" is not callable)') %
126 (hname, funcname))
126 (hname, funcname))
127 try:
127 try:
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 except (KeyboardInterrupt, util.SignalInterrupt):
129 except (KeyboardInterrupt, util.SignalInterrupt):
130 raise
130 raise
131 except Exception, exc:
131 except Exception, exc:
132 if isinstance(exc, util.Abort):
132 if isinstance(exc, util.Abort):
133 self.ui.warn(_('error: %s hook failed: %s\n') %
133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 (hname, exc.args[0] % exc.args[1:]))
134 (hname, exc.args[0] % exc.args[1:]))
135 else:
135 else:
136 self.ui.warn(_('error: %s hook raised an exception: '
136 self.ui.warn(_('error: %s hook raised an exception: '
137 '%s\n') % (hname, exc))
137 '%s\n') % (hname, exc))
138 if throw:
138 if throw:
139 raise
139 raise
140 self.ui.print_exc()
140 self.ui.print_exc()
141 return True
141 return True
142 if r:
142 if r:
143 if throw:
143 if throw:
144 raise util.Abort(_('%s hook failed') % hname)
144 raise util.Abort(_('%s hook failed') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 return r
146 return r
147
147
148 def runhook(name, cmd):
148 def runhook(name, cmd):
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 r = util.system(cmd, environ=env, cwd=self.root)
151 r = util.system(cmd, environ=env, cwd=self.root)
152 if r:
152 if r:
153 desc, r = util.explain_exit(r)
153 desc, r = util.explain_exit(r)
154 if throw:
154 if throw:
155 raise util.Abort(_('%s hook %s') % (name, desc))
155 raise util.Abort(_('%s hook %s') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 return r
157 return r
158
158
159 r = False
159 r = False
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 if hname.split(".", 1)[0] == name and cmd]
161 if hname.split(".", 1)[0] == name and cmd]
162 hooks.sort()
162 hooks.sort()
163 for hname, cmd in hooks:
163 for hname, cmd in hooks:
164 if cmd.startswith('python:'):
164 if cmd.startswith('python:'):
165 r = callhook(hname, cmd[7:].strip()) or r
165 r = callhook(hname, cmd[7:].strip()) or r
166 else:
166 else:
167 r = runhook(hname, cmd) or r
167 r = runhook(hname, cmd) or r
168 return r
168 return r
169
169
170 tag_disallowed = ':\r\n'
170 tag_disallowed = ':\r\n'
171
171
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
174
174
175 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
176 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
177 changeset is committed with the change.
177 changeset is committed with the change.
178
178
179 keyword arguments:
179 keyword arguments:
180
180
181 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
182 (default False)
182 (default False)
183
183
184 message: commit message to use if committing
184 message: commit message to use if committing
185
185
186 user: name of user to use if committing
186 user: name of user to use if committing
187
187
188 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
189
189
190 for c in self.tag_disallowed:
190 for c in self.tag_disallowed:
191 if c in name:
191 if c in name:
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193
193
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195
195
196 if local:
196 if local:
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 self.hook('tag', node=node, tag=name, local=local)
198 self.hook('tag', node=node, tag=name, local=local)
199 return
199 return
200
200
201 for x in self.changes():
201 for x in self.changes():
202 if '.hgtags' in x:
202 if '.hgtags' in x:
203 raise util.Abort(_('working copy of .hgtags is changed '
203 raise util.Abort(_('working copy of .hgtags is changed '
204 '(please commit .hgtags manually)'))
204 '(please commit .hgtags manually)'))
205
205
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 if self.dirstate.state('.hgtags') == '?':
207 if self.dirstate.state('.hgtags') == '?':
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 if not message:
210 if not message:
211 message = _('Added tag %s for changeset %s') % (name, node)
211 message = _('Added tag %s for changeset %s') % (name, node)
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=node, tag=name, local=local)
214 self.hook('tag', node=node, tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
295 if key == '.':
296 key = self.dirstate.parents()[0]
296 key = self.dirstate.parents()[0]
297 if key == nullid:
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
298 raise repo.RepoError(_("no revision checked out"))
299 try:
299 try:
300 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
301 except:
301 except:
302 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
303
303
304 def dev(self):
304 def dev(self):
305 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
306
306
307 def local(self):
307 def local(self):
308 return True
308 return True
309
309
310 def join(self, f):
310 def join(self, f):
311 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
312
312
313 def wjoin(self, f):
313 def wjoin(self, f):
314 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
315
315
316 def file(self, f):
316 def file(self, f):
317 if f[0] == '/':
317 if f[0] == '/':
318 f = f[1:]
318 f = f[1:]
319 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
320
320
321 def changectx(self, changeid):
321 def changectx(self, changeid):
322 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
323
323
324 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
325 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
326 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
327 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
328
328
329 def getcwd(self):
329 def getcwd(self):
330 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
331
331
332 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
333 return self.wopener(f, mode)
333 return self.wopener(f, mode)
334
334
335 def wread(self, filename):
335 def wread(self, filename):
336 if self.encodepats == None:
336 if self.encodepats == None:
337 l = []
337 l = []
338 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
340 l.append((mf, cmd))
340 l.append((mf, cmd))
341 self.encodepats = l
341 self.encodepats = l
342
342
343 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
344
344
345 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
346 if mf(filename):
346 if mf(filename):
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
348 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
349 break
349 break
350
350
351 return data
351 return data
352
352
353 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
354 if self.decodepats == None:
354 if self.decodepats == None:
355 l = []
355 l = []
356 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
358 l.append((mf, cmd))
358 l.append((mf, cmd))
359 self.decodepats = l
359 self.decodepats = l
360
360
361 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
362 if mf(filename):
362 if mf(filename):
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
365 break
365 break
366
366
367 if fd:
367 if fd:
368 return fd.write(data)
368 return fd.write(data)
369 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
370
370
371 def transaction(self):
371 def transaction(self):
372 tr = self.transhandle
372 tr = self.transhandle
373 if tr != None and tr.running():
373 if tr != None and tr.running():
374 return tr.nest()
374 return tr.nest()
375
375
376 # save dirstate for rollback
376 # save dirstate for rollback
377 try:
377 try:
378 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
379 except IOError:
379 except IOError:
380 ds = ""
380 ds = ""
381 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
382
382
383 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
384 self.join("journal"),
384 self.join("journal"),
385 aftertrans(self.path))
385 aftertrans(self.path))
386 self.transhandle = tr
386 self.transhandle = tr
387 return tr
387 return tr
388
388
389 def recover(self):
389 def recover(self):
390 l = self.lock()
390 l = self.lock()
391 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
392 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
393 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
394 self.reload()
394 self.reload()
395 return True
395 return True
396 else:
396 else:
397 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
398 return False
398 return False
399
399
400 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
401 if not wlock:
401 if not wlock:
402 wlock = self.wlock()
402 wlock = self.wlock()
403 l = self.lock()
403 l = self.lock()
404 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
405 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
406 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
408 self.reload()
408 self.reload()
409 self.wreload()
409 self.wreload()
410 else:
410 else:
411 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
412
412
413 def wreload(self):
413 def wreload(self):
414 self.dirstate.read()
414 self.dirstate.read()
415
415
416 def reload(self):
416 def reload(self):
417 self.changelog.load()
417 self.changelog.load()
418 self.manifest.load()
418 self.manifest.load()
419 self.tagscache = None
419 self.tagscache = None
420 self.nodetagscache = None
420 self.nodetagscache = None
421
421
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
423 desc=None):
423 desc=None):
424 try:
424 try:
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
426 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
427 if not wait:
427 if not wait:
428 raise
428 raise
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
430 (desc, inst.args[0]))
430 (desc, inst.args[0]))
431 # default to 600 seconds timeout
431 # default to 600 seconds timeout
432 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
433 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
434 releasefn, desc=desc)
434 releasefn, desc=desc)
435 if acquirefn:
435 if acquirefn:
436 acquirefn()
436 acquirefn()
437 return l
437 return l
438
438
439 def lock(self, wait=1):
439 def lock(self, wait=1):
440 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
441 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
442
442
443 def wlock(self, wait=1):
443 def wlock(self, wait=1):
444 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
445 self.wreload,
445 self.wreload,
446 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
447
447
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
449 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
450 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
452
452
453 if fp2 != nullid:
453 if fp2 != nullid:
454 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
455 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
456 if fpa == fp1:
456 if fpa == fp1:
457 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
458 elif fpa == fp2:
458 elif fpa == fp2:
459 fp2 = nullid
459 fp2 = nullid
460
460
461 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
462 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
463 return (fp1, None, None)
463 return (fp1, None, None)
464
464
465 return (None, fp1, fp2)
465 return (None, fp1, fp2)
466
466
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
468 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
471 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
472 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
473 m1 = self.manifest.read(c1[0])
473 m1 = self.manifest.read(c1[0])
474 mf1 = self.manifest.readflags(c1[0])
474 mf1 = self.manifest.readflags(c1[0])
475 m2 = self.manifest.read(c2[0])
475 m2 = self.manifest.read(c2[0])
476 changed = []
476 changed = []
477
477
478 if orig_parent == p1:
478 if orig_parent == p1:
479 update_dirstate = 1
479 update_dirstate = 1
480 else:
480 else:
481 update_dirstate = 0
481 update_dirstate = 0
482
482
483 if not wlock:
483 if not wlock:
484 wlock = self.wlock()
484 wlock = self.wlock()
485 l = self.lock()
485 l = self.lock()
486 tr = self.transaction()
486 tr = self.transaction()
487 mm = m1.copy()
487 mm = m1.copy()
488 mfm = mf1.copy()
488 mfm = mf1.copy()
489 linkrev = self.changelog.count()
489 linkrev = self.changelog.count()
490 for f in files:
490 for f in files:
491 try:
491 try:
492 t = self.wread(f)
492 t = self.wread(f)
493 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
493 mfm.set(f, util.is_exec(self.wjoin(f), mfm.execf(f)))
494 r = self.file(f)
494 r = self.file(f)
495 mfm[f] = tm
496
495
497 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
496 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
498 if entry:
497 if entry:
499 mm[f] = entry
498 mm[f] = entry
500 continue
499 continue
501
500
502 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
501 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
503 changed.append(f)
502 changed.append(f)
504 if update_dirstate:
503 if update_dirstate:
505 self.dirstate.update([f], "n")
504 self.dirstate.update([f], "n")
506 except IOError:
505 except IOError:
507 try:
506 try:
508 del mm[f]
507 del mm[f]
509 del mfm[f]
508 del mfm[f]
510 if update_dirstate:
509 if update_dirstate:
511 self.dirstate.forget([f])
510 self.dirstate.forget([f])
512 except:
511 except:
513 # deleted from p2?
512 # deleted from p2?
514 pass
513 pass
515
514
516 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
515 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
517 user = user or self.ui.username()
516 user = user or self.ui.username()
518 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
517 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
519 tr.close()
518 tr.close()
520 if update_dirstate:
519 if update_dirstate:
521 self.dirstate.setparents(n, nullid)
520 self.dirstate.setparents(n, nullid)
522
521
523 def commit(self, files=None, text="", user=None, date=None,
522 def commit(self, files=None, text="", user=None, date=None,
524 match=util.always, force=False, lock=None, wlock=None,
523 match=util.always, force=False, lock=None, wlock=None,
525 force_editor=False):
524 force_editor=False):
526 commit = []
525 commit = []
527 remove = []
526 remove = []
528 changed = []
527 changed = []
529
528
530 if files:
529 if files:
531 for f in files:
530 for f in files:
532 s = self.dirstate.state(f)
531 s = self.dirstate.state(f)
533 if s in 'nmai':
532 if s in 'nmai':
534 commit.append(f)
533 commit.append(f)
535 elif s == 'r':
534 elif s == 'r':
536 remove.append(f)
535 remove.append(f)
537 else:
536 else:
538 self.ui.warn(_("%s not tracked!\n") % f)
537 self.ui.warn(_("%s not tracked!\n") % f)
539 else:
538 else:
540 modified, added, removed, deleted, unknown = self.changes(match=match)
539 modified, added, removed, deleted, unknown = self.changes(match=match)
541 commit = modified + added
540 commit = modified + added
542 remove = removed
541 remove = removed
543
542
544 p1, p2 = self.dirstate.parents()
543 p1, p2 = self.dirstate.parents()
545 c1 = self.changelog.read(p1)
544 c1 = self.changelog.read(p1)
546 c2 = self.changelog.read(p2)
545 c2 = self.changelog.read(p2)
547 m1 = self.manifest.read(c1[0])
546 m1 = self.manifest.read(c1[0])
548 mf1 = self.manifest.readflags(c1[0])
547 mf1 = self.manifest.readflags(c1[0])
549 m2 = self.manifest.read(c2[0])
548 m2 = self.manifest.read(c2[0])
550
549
551 if not commit and not remove and not force and p2 == nullid:
550 if not commit and not remove and not force and p2 == nullid:
552 self.ui.status(_("nothing changed\n"))
551 self.ui.status(_("nothing changed\n"))
553 return None
552 return None
554
553
555 xp1 = hex(p1)
554 xp1 = hex(p1)
556 if p2 == nullid: xp2 = ''
555 if p2 == nullid: xp2 = ''
557 else: xp2 = hex(p2)
556 else: xp2 = hex(p2)
558
557
559 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
558 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
560
559
561 if not wlock:
560 if not wlock:
562 wlock = self.wlock()
561 wlock = self.wlock()
563 if not lock:
562 if not lock:
564 lock = self.lock()
563 lock = self.lock()
565 tr = self.transaction()
564 tr = self.transaction()
566
565
567 # check in files
566 # check in files
568 new = {}
567 new = {}
569 linkrev = self.changelog.count()
568 linkrev = self.changelog.count()
570 commit.sort()
569 commit.sort()
571 for f in commit:
570 for f in commit:
572 self.ui.note(f + "\n")
571 self.ui.note(f + "\n")
573 try:
572 try:
574 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
573 mf1.set(f, util.is_exec(self.wjoin(f), mf1.execf(f)))
575 t = self.wread(f)
574 t = self.wread(f)
576 except IOError:
575 except IOError:
577 self.ui.warn(_("trouble committing %s!\n") % f)
576 self.ui.warn(_("trouble committing %s!\n") % f)
578 raise
577 raise
579
578
580 r = self.file(f)
579 r = self.file(f)
581
580
582 meta = {}
581 meta = {}
583 cp = self.dirstate.copied(f)
582 cp = self.dirstate.copied(f)
584 if cp:
583 if cp:
585 meta["copy"] = cp
584 meta["copy"] = cp
586 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
585 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
587 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
586 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
588 fp1, fp2 = nullid, nullid
587 fp1, fp2 = nullid, nullid
589 else:
588 else:
590 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
589 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
591 if entry:
590 if entry:
592 new[f] = entry
591 new[f] = entry
593 continue
592 continue
594
593
595 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
594 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
596 # remember what we've added so that we can later calculate
595 # remember what we've added so that we can later calculate
597 # the files to pull from a set of changesets
596 # the files to pull from a set of changesets
598 changed.append(f)
597 changed.append(f)
599
598
600 # update manifest
599 # update manifest
601 m1 = m1.copy()
600 m1 = m1.copy()
602 m1.update(new)
601 m1.update(new)
603 for f in remove:
602 for f in remove:
604 if f in m1:
603 if f in m1:
605 del m1[f]
604 del m1[f]
606 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
605 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
607 (new, remove))
606 (new, remove))
608
607
609 # add changeset
608 # add changeset
610 new = new.keys()
609 new = new.keys()
611 new.sort()
610 new.sort()
612
611
613 user = user or self.ui.username()
612 user = user or self.ui.username()
614 if not text or force_editor:
613 if not text or force_editor:
615 edittext = []
614 edittext = []
616 if text:
615 if text:
617 edittext.append(text)
616 edittext.append(text)
618 edittext.append("")
617 edittext.append("")
619 if p2 != nullid:
618 if p2 != nullid:
620 edittext.append("HG: branch merge")
619 edittext.append("HG: branch merge")
621 edittext.extend(["HG: changed %s" % f for f in changed])
620 edittext.extend(["HG: changed %s" % f for f in changed])
622 edittext.extend(["HG: removed %s" % f for f in remove])
621 edittext.extend(["HG: removed %s" % f for f in remove])
623 if not changed and not remove:
622 if not changed and not remove:
624 edittext.append("HG: no files changed")
623 edittext.append("HG: no files changed")
625 edittext.append("")
624 edittext.append("")
626 # run editor in the repository root
625 # run editor in the repository root
627 olddir = os.getcwd()
626 olddir = os.getcwd()
628 os.chdir(self.root)
627 os.chdir(self.root)
629 text = self.ui.edit("\n".join(edittext), user)
628 text = self.ui.edit("\n".join(edittext), user)
630 os.chdir(olddir)
629 os.chdir(olddir)
631
630
632 lines = [line.rstrip() for line in text.rstrip().splitlines()]
631 lines = [line.rstrip() for line in text.rstrip().splitlines()]
633 while lines and not lines[0]:
632 while lines and not lines[0]:
634 del lines[0]
633 del lines[0]
635 if not lines:
634 if not lines:
636 return None
635 return None
637 text = '\n'.join(lines)
636 text = '\n'.join(lines)
638 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
637 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
639 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
638 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
640 parent2=xp2)
639 parent2=xp2)
641 tr.close()
640 tr.close()
642
641
643 self.dirstate.setparents(n)
642 self.dirstate.setparents(n)
644 self.dirstate.update(new, "n")
643 self.dirstate.update(new, "n")
645 self.dirstate.forget(remove)
644 self.dirstate.forget(remove)
646
645
647 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
646 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
648 return n
647 return n
649
648
650 def walk(self, node=None, files=[], match=util.always, badmatch=None):
649 def walk(self, node=None, files=[], match=util.always, badmatch=None):
651 if node:
650 if node:
652 fdict = dict.fromkeys(files)
651 fdict = dict.fromkeys(files)
653 for fn in self.manifest.read(self.changelog.read(node)[0]):
652 for fn in self.manifest.read(self.changelog.read(node)[0]):
654 fdict.pop(fn, None)
653 fdict.pop(fn, None)
655 if match(fn):
654 if match(fn):
656 yield 'm', fn
655 yield 'm', fn
657 for fn in fdict:
656 for fn in fdict:
658 if badmatch and badmatch(fn):
657 if badmatch and badmatch(fn):
659 if match(fn):
658 if match(fn):
660 yield 'b', fn
659 yield 'b', fn
661 else:
660 else:
662 self.ui.warn(_('%s: No such file in rev %s\n') % (
661 self.ui.warn(_('%s: No such file in rev %s\n') % (
663 util.pathto(self.getcwd(), fn), short(node)))
662 util.pathto(self.getcwd(), fn), short(node)))
664 else:
663 else:
665 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
664 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
666 yield src, fn
665 yield src, fn
667
666
668 def status(self, node1=None, node2=None, files=[], match=util.always,
667 def status(self, node1=None, node2=None, files=[], match=util.always,
669 wlock=None, list_ignored=False, list_clean=False):
668 wlock=None, list_ignored=False, list_clean=False):
670 """return status of files between two nodes or node and working directory
669 """return status of files between two nodes or node and working directory
671
670
672 If node1 is None, use the first dirstate parent instead.
671 If node1 is None, use the first dirstate parent instead.
673 If node2 is None, compare node1 with working directory.
672 If node2 is None, compare node1 with working directory.
674 """
673 """
675
674
676 def fcmp(fn, mf):
675 def fcmp(fn, mf):
677 t1 = self.wread(fn)
676 t1 = self.wread(fn)
678 t2 = self.file(fn).read(mf.get(fn, nullid))
677 t2 = self.file(fn).read(mf.get(fn, nullid))
679 return cmp(t1, t2)
678 return cmp(t1, t2)
680
679
681 def mfmatches(node):
680 def mfmatches(node):
682 change = self.changelog.read(node)
681 change = self.changelog.read(node)
683 mf = dict(self.manifest.read(change[0]))
682 mf = dict(self.manifest.read(change[0]))
684 for fn in mf.keys():
683 for fn in mf.keys():
685 if not match(fn):
684 if not match(fn):
686 del mf[fn]
685 del mf[fn]
687 return mf
686 return mf
688
687
689 modified, added, removed, deleted, unknown = [], [], [], [], []
688 modified, added, removed, deleted, unknown = [], [], [], [], []
690 ignored, clean = [], []
689 ignored, clean = [], []
691
690
692 compareworking = False
691 compareworking = False
693 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
692 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
694 compareworking = True
693 compareworking = True
695
694
696 if not compareworking:
695 if not compareworking:
697 # read the manifest from node1 before the manifest from node2,
696 # read the manifest from node1 before the manifest from node2,
698 # so that we'll hit the manifest cache if we're going through
697 # so that we'll hit the manifest cache if we're going through
699 # all the revisions in parent->child order.
698 # all the revisions in parent->child order.
700 mf1 = mfmatches(node1)
699 mf1 = mfmatches(node1)
701
700
702 # are we comparing the working directory?
701 # are we comparing the working directory?
703 if not node2:
702 if not node2:
704 if not wlock:
703 if not wlock:
705 try:
704 try:
706 wlock = self.wlock(wait=0)
705 wlock = self.wlock(wait=0)
707 except lock.LockException:
706 except lock.LockException:
708 wlock = None
707 wlock = None
709 (lookup, modified, added, removed, deleted, unknown,
708 (lookup, modified, added, removed, deleted, unknown,
710 ignored, clean) = self.dirstate.status(files, match,
709 ignored, clean) = self.dirstate.status(files, match,
711 list_ignored, list_clean)
710 list_ignored, list_clean)
712
711
713 # are we comparing working dir against its parent?
712 # are we comparing working dir against its parent?
714 if compareworking:
713 if compareworking:
715 if lookup:
714 if lookup:
716 # do a full compare of any files that might have changed
715 # do a full compare of any files that might have changed
717 mf2 = mfmatches(self.dirstate.parents()[0])
716 mf2 = mfmatches(self.dirstate.parents()[0])
718 for f in lookup:
717 for f in lookup:
719 if fcmp(f, mf2):
718 if fcmp(f, mf2):
720 modified.append(f)
719 modified.append(f)
721 elif wlock is not None:
720 elif wlock is not None:
722 self.dirstate.update([f], "n")
721 self.dirstate.update([f], "n")
723 else:
722 else:
724 # we are comparing working dir against non-parent
723 # we are comparing working dir against non-parent
725 # generate a pseudo-manifest for the working dir
724 # generate a pseudo-manifest for the working dir
726 mf2 = mfmatches(self.dirstate.parents()[0])
725 mf2 = mfmatches(self.dirstate.parents()[0])
727 for f in lookup + modified + added:
726 for f in lookup + modified + added:
728 mf2[f] = ""
727 mf2[f] = ""
729 for f in removed:
728 for f in removed:
730 if f in mf2:
729 if f in mf2:
731 del mf2[f]
730 del mf2[f]
732 else:
731 else:
733 # we are comparing two revisions
732 # we are comparing two revisions
734 mf2 = mfmatches(node2)
733 mf2 = mfmatches(node2)
735
734
736 if not compareworking:
735 if not compareworking:
737 # flush lists from dirstate before comparing manifests
736 # flush lists from dirstate before comparing manifests
738 modified, added, clean = [], [], []
737 modified, added, clean = [], [], []
739
738
740 # make sure to sort the files so we talk to the disk in a
739 # make sure to sort the files so we talk to the disk in a
741 # reasonable order
740 # reasonable order
742 mf2keys = mf2.keys()
741 mf2keys = mf2.keys()
743 mf2keys.sort()
742 mf2keys.sort()
744 for fn in mf2keys:
743 for fn in mf2keys:
745 if mf1.has_key(fn):
744 if mf1.has_key(fn):
746 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
745 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
747 modified.append(fn)
746 modified.append(fn)
748 elif list_clean:
747 elif list_clean:
749 clean.append(fn)
748 clean.append(fn)
750 del mf1[fn]
749 del mf1[fn]
751 else:
750 else:
752 added.append(fn)
751 added.append(fn)
753
752
754 removed = mf1.keys()
753 removed = mf1.keys()
755
754
756 # sort and return results:
755 # sort and return results:
757 for l in modified, added, removed, deleted, unknown, ignored, clean:
756 for l in modified, added, removed, deleted, unknown, ignored, clean:
758 l.sort()
757 l.sort()
759 return (modified, added, removed, deleted, unknown, ignored, clean)
758 return (modified, added, removed, deleted, unknown, ignored, clean)
760
759
761 def changes(self, node1=None, node2=None, files=[], match=util.always,
760 def changes(self, node1=None, node2=None, files=[], match=util.always,
762 wlock=None, list_ignored=False, list_clean=False):
761 wlock=None, list_ignored=False, list_clean=False):
763 '''DEPRECATED - use status instead'''
762 '''DEPRECATED - use status instead'''
764 marduit = self.status(node1, node2, files, match, wlock,
763 marduit = self.status(node1, node2, files, match, wlock,
765 list_ignored, list_clean)
764 list_ignored, list_clean)
766 if list_ignored:
765 if list_ignored:
767 return marduit[:-1]
766 return marduit[:-1]
768 else:
767 else:
769 return marduit[:-2]
768 return marduit[:-2]
770
769
771 def add(self, list, wlock=None):
770 def add(self, list, wlock=None):
772 if not wlock:
771 if not wlock:
773 wlock = self.wlock()
772 wlock = self.wlock()
774 for f in list:
773 for f in list:
775 p = self.wjoin(f)
774 p = self.wjoin(f)
776 if not os.path.exists(p):
775 if not os.path.exists(p):
777 self.ui.warn(_("%s does not exist!\n") % f)
776 self.ui.warn(_("%s does not exist!\n") % f)
778 elif not os.path.isfile(p):
777 elif not os.path.isfile(p):
779 self.ui.warn(_("%s not added: only files supported currently\n")
778 self.ui.warn(_("%s not added: only files supported currently\n")
780 % f)
779 % f)
781 elif self.dirstate.state(f) in 'an':
780 elif self.dirstate.state(f) in 'an':
782 self.ui.warn(_("%s already tracked!\n") % f)
781 self.ui.warn(_("%s already tracked!\n") % f)
783 else:
782 else:
784 self.dirstate.update([f], "a")
783 self.dirstate.update([f], "a")
785
784
786 def forget(self, list, wlock=None):
785 def forget(self, list, wlock=None):
787 if not wlock:
786 if not wlock:
788 wlock = self.wlock()
787 wlock = self.wlock()
789 for f in list:
788 for f in list:
790 if self.dirstate.state(f) not in 'ai':
789 if self.dirstate.state(f) not in 'ai':
791 self.ui.warn(_("%s not added!\n") % f)
790 self.ui.warn(_("%s not added!\n") % f)
792 else:
791 else:
793 self.dirstate.forget([f])
792 self.dirstate.forget([f])
794
793
795 def remove(self, list, unlink=False, wlock=None):
794 def remove(self, list, unlink=False, wlock=None):
796 if unlink:
795 if unlink:
797 for f in list:
796 for f in list:
798 try:
797 try:
799 util.unlink(self.wjoin(f))
798 util.unlink(self.wjoin(f))
800 except OSError, inst:
799 except OSError, inst:
801 if inst.errno != errno.ENOENT:
800 if inst.errno != errno.ENOENT:
802 raise
801 raise
803 if not wlock:
802 if not wlock:
804 wlock = self.wlock()
803 wlock = self.wlock()
805 for f in list:
804 for f in list:
806 p = self.wjoin(f)
805 p = self.wjoin(f)
807 if os.path.exists(p):
806 if os.path.exists(p):
808 self.ui.warn(_("%s still exists!\n") % f)
807 self.ui.warn(_("%s still exists!\n") % f)
809 elif self.dirstate.state(f) == 'a':
808 elif self.dirstate.state(f) == 'a':
810 self.dirstate.forget([f])
809 self.dirstate.forget([f])
811 elif f not in self.dirstate:
810 elif f not in self.dirstate:
812 self.ui.warn(_("%s not tracked!\n") % f)
811 self.ui.warn(_("%s not tracked!\n") % f)
813 else:
812 else:
814 self.dirstate.update([f], "r")
813 self.dirstate.update([f], "r")
815
814
816 def undelete(self, list, wlock=None):
815 def undelete(self, list, wlock=None):
817 p = self.dirstate.parents()[0]
816 p = self.dirstate.parents()[0]
818 mn = self.changelog.read(p)[0]
817 mn = self.changelog.read(p)[0]
819 mf = self.manifest.readflags(mn)
818 mf = self.manifest.readflags(mn)
820 m = self.manifest.read(mn)
819 m = self.manifest.read(mn)
821 if not wlock:
820 if not wlock:
822 wlock = self.wlock()
821 wlock = self.wlock()
823 for f in list:
822 for f in list:
824 if self.dirstate.state(f) not in "r":
823 if self.dirstate.state(f) not in "r":
825 self.ui.warn("%s not removed!\n" % f)
824 self.ui.warn("%s not removed!\n" % f)
826 else:
825 else:
827 t = self.file(f).read(m[f])
826 t = self.file(f).read(m[f])
828 self.wwrite(f, t)
827 self.wwrite(f, t)
829 util.set_exec(self.wjoin(f), mf[f])
828 util.set_exec(self.wjoin(f), mf.execf(f))
830 self.dirstate.update([f], "n")
829 self.dirstate.update([f], "n")
831
830
832 def copy(self, source, dest, wlock=None):
831 def copy(self, source, dest, wlock=None):
833 p = self.wjoin(dest)
832 p = self.wjoin(dest)
834 if not os.path.exists(p):
833 if not os.path.exists(p):
835 self.ui.warn(_("%s does not exist!\n") % dest)
834 self.ui.warn(_("%s does not exist!\n") % dest)
836 elif not os.path.isfile(p):
835 elif not os.path.isfile(p):
837 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
836 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
838 else:
837 else:
839 if not wlock:
838 if not wlock:
840 wlock = self.wlock()
839 wlock = self.wlock()
841 if self.dirstate.state(dest) == '?':
840 if self.dirstate.state(dest) == '?':
842 self.dirstate.update([dest], "a")
841 self.dirstate.update([dest], "a")
843 self.dirstate.copy(source, dest)
842 self.dirstate.copy(source, dest)
844
843
845 def heads(self, start=None):
844 def heads(self, start=None):
846 heads = self.changelog.heads(start)
845 heads = self.changelog.heads(start)
847 # sort the output in rev descending order
846 # sort the output in rev descending order
848 heads = [(-self.changelog.rev(h), h) for h in heads]
847 heads = [(-self.changelog.rev(h), h) for h in heads]
849 heads.sort()
848 heads.sort()
850 return [n for (r, n) in heads]
849 return [n for (r, n) in heads]
851
850
852 # branchlookup returns a dict giving a list of branches for
851 # branchlookup returns a dict giving a list of branches for
853 # each head. A branch is defined as the tag of a node or
852 # each head. A branch is defined as the tag of a node or
854 # the branch of the node's parents. If a node has multiple
853 # the branch of the node's parents. If a node has multiple
855 # branch tags, tags are eliminated if they are visible from other
854 # branch tags, tags are eliminated if they are visible from other
856 # branch tags.
855 # branch tags.
857 #
856 #
858 # So, for this graph: a->b->c->d->e
857 # So, for this graph: a->b->c->d->e
859 # \ /
858 # \ /
860 # aa -----/
859 # aa -----/
861 # a has tag 2.6.12
860 # a has tag 2.6.12
862 # d has tag 2.6.13
861 # d has tag 2.6.13
863 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
862 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
864 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
863 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
865 # from the list.
864 # from the list.
866 #
865 #
867 # It is possible that more than one head will have the same branch tag.
866 # It is possible that more than one head will have the same branch tag.
868 # callers need to check the result for multiple heads under the same
867 # callers need to check the result for multiple heads under the same
869 # branch tag if that is a problem for them (ie checkout of a specific
868 # branch tag if that is a problem for them (ie checkout of a specific
870 # branch).
869 # branch).
871 #
870 #
872 # passing in a specific branch will limit the depth of the search
871 # passing in a specific branch will limit the depth of the search
873 # through the parents. It won't limit the branches returned in the
872 # through the parents. It won't limit the branches returned in the
874 # result though.
873 # result though.
875 def branchlookup(self, heads=None, branch=None):
874 def branchlookup(self, heads=None, branch=None):
876 if not heads:
875 if not heads:
877 heads = self.heads()
876 heads = self.heads()
878 headt = [ h for h in heads ]
877 headt = [ h for h in heads ]
879 chlog = self.changelog
878 chlog = self.changelog
880 branches = {}
879 branches = {}
881 merges = []
880 merges = []
882 seenmerge = {}
881 seenmerge = {}
883
882
884 # traverse the tree once for each head, recording in the branches
883 # traverse the tree once for each head, recording in the branches
885 # dict which tags are visible from this head. The branches
884 # dict which tags are visible from this head. The branches
886 # dict also records which tags are visible from each tag
885 # dict also records which tags are visible from each tag
887 # while we traverse.
886 # while we traverse.
888 while headt or merges:
887 while headt or merges:
889 if merges:
888 if merges:
890 n, found = merges.pop()
889 n, found = merges.pop()
891 visit = [n]
890 visit = [n]
892 else:
891 else:
893 h = headt.pop()
892 h = headt.pop()
894 visit = [h]
893 visit = [h]
895 found = [h]
894 found = [h]
896 seen = {}
895 seen = {}
897 while visit:
896 while visit:
898 n = visit.pop()
897 n = visit.pop()
899 if n in seen:
898 if n in seen:
900 continue
899 continue
901 pp = chlog.parents(n)
900 pp = chlog.parents(n)
902 tags = self.nodetags(n)
901 tags = self.nodetags(n)
903 if tags:
902 if tags:
904 for x in tags:
903 for x in tags:
905 if x == 'tip':
904 if x == 'tip':
906 continue
905 continue
907 for f in found:
906 for f in found:
908 branches.setdefault(f, {})[n] = 1
907 branches.setdefault(f, {})[n] = 1
909 branches.setdefault(n, {})[n] = 1
908 branches.setdefault(n, {})[n] = 1
910 break
909 break
911 if n not in found:
910 if n not in found:
912 found.append(n)
911 found.append(n)
913 if branch in tags:
912 if branch in tags:
914 continue
913 continue
915 seen[n] = 1
914 seen[n] = 1
916 if pp[1] != nullid and n not in seenmerge:
915 if pp[1] != nullid and n not in seenmerge:
917 merges.append((pp[1], [x for x in found]))
916 merges.append((pp[1], [x for x in found]))
918 seenmerge[n] = 1
917 seenmerge[n] = 1
919 if pp[0] != nullid:
918 if pp[0] != nullid:
920 visit.append(pp[0])
919 visit.append(pp[0])
921 # traverse the branches dict, eliminating branch tags from each
920 # traverse the branches dict, eliminating branch tags from each
922 # head that are visible from another branch tag for that head.
921 # head that are visible from another branch tag for that head.
923 out = {}
922 out = {}
924 viscache = {}
923 viscache = {}
925 for h in heads:
924 for h in heads:
926 def visible(node):
925 def visible(node):
927 if node in viscache:
926 if node in viscache:
928 return viscache[node]
927 return viscache[node]
929 ret = {}
928 ret = {}
930 visit = [node]
929 visit = [node]
931 while visit:
930 while visit:
932 x = visit.pop()
931 x = visit.pop()
933 if x in viscache:
932 if x in viscache:
934 ret.update(viscache[x])
933 ret.update(viscache[x])
935 elif x not in ret:
934 elif x not in ret:
936 ret[x] = 1
935 ret[x] = 1
937 if x in branches:
936 if x in branches:
938 visit[len(visit):] = branches[x].keys()
937 visit[len(visit):] = branches[x].keys()
939 viscache[node] = ret
938 viscache[node] = ret
940 return ret
939 return ret
941 if h not in branches:
940 if h not in branches:
942 continue
941 continue
943 # O(n^2), but somewhat limited. This only searches the
942 # O(n^2), but somewhat limited. This only searches the
944 # tags visible from a specific head, not all the tags in the
943 # tags visible from a specific head, not all the tags in the
945 # whole repo.
944 # whole repo.
946 for b in branches[h]:
945 for b in branches[h]:
947 vis = False
946 vis = False
948 for bb in branches[h].keys():
947 for bb in branches[h].keys():
949 if b != bb:
948 if b != bb:
950 if b in visible(bb):
949 if b in visible(bb):
951 vis = True
950 vis = True
952 break
951 break
953 if not vis:
952 if not vis:
954 l = out.setdefault(h, [])
953 l = out.setdefault(h, [])
955 l[len(l):] = self.nodetags(b)
954 l[len(l):] = self.nodetags(b)
956 return out
955 return out
957
956
958 def branches(self, nodes):
957 def branches(self, nodes):
959 if not nodes:
958 if not nodes:
960 nodes = [self.changelog.tip()]
959 nodes = [self.changelog.tip()]
961 b = []
960 b = []
962 for n in nodes:
961 for n in nodes:
963 t = n
962 t = n
964 while 1:
963 while 1:
965 p = self.changelog.parents(n)
964 p = self.changelog.parents(n)
966 if p[1] != nullid or p[0] == nullid:
965 if p[1] != nullid or p[0] == nullid:
967 b.append((t, n, p[0], p[1]))
966 b.append((t, n, p[0], p[1]))
968 break
967 break
969 n = p[0]
968 n = p[0]
970 return b
969 return b
971
970
972 def between(self, pairs):
971 def between(self, pairs):
973 r = []
972 r = []
974
973
975 for top, bottom in pairs:
974 for top, bottom in pairs:
976 n, l, i = top, [], 0
975 n, l, i = top, [], 0
977 f = 1
976 f = 1
978
977
979 while n != bottom:
978 while n != bottom:
980 p = self.changelog.parents(n)[0]
979 p = self.changelog.parents(n)[0]
981 if i == f:
980 if i == f:
982 l.append(n)
981 l.append(n)
983 f = f * 2
982 f = f * 2
984 n = p
983 n = p
985 i += 1
984 i += 1
986
985
987 r.append(l)
986 r.append(l)
988
987
989 return r
988 return r
990
989
991 def findincoming(self, remote, base=None, heads=None, force=False):
990 def findincoming(self, remote, base=None, heads=None, force=False):
992 """Return list of roots of the subsets of missing nodes from remote
991 """Return list of roots of the subsets of missing nodes from remote
993
992
994 If base dict is specified, assume that these nodes and their parents
993 If base dict is specified, assume that these nodes and their parents
995 exist on the remote side and that no child of a node of base exists
994 exist on the remote side and that no child of a node of base exists
996 in both remote and self.
995 in both remote and self.
997 Furthermore base will be updated to include the nodes that exists
996 Furthermore base will be updated to include the nodes that exists
998 in self and remote but no children exists in self and remote.
997 in self and remote but no children exists in self and remote.
999 If a list of heads is specified, return only nodes which are heads
998 If a list of heads is specified, return only nodes which are heads
1000 or ancestors of these heads.
999 or ancestors of these heads.
1001
1000
1002 All the ancestors of base are in self and in remote.
1001 All the ancestors of base are in self and in remote.
1003 All the descendants of the list returned are missing in self.
1002 All the descendants of the list returned are missing in self.
1004 (and so we know that the rest of the nodes are missing in remote, see
1003 (and so we know that the rest of the nodes are missing in remote, see
1005 outgoing)
1004 outgoing)
1006 """
1005 """
1007 m = self.changelog.nodemap
1006 m = self.changelog.nodemap
1008 search = []
1007 search = []
1009 fetch = {}
1008 fetch = {}
1010 seen = {}
1009 seen = {}
1011 seenbranch = {}
1010 seenbranch = {}
1012 if base == None:
1011 if base == None:
1013 base = {}
1012 base = {}
1014
1013
1015 if not heads:
1014 if not heads:
1016 heads = remote.heads()
1015 heads = remote.heads()
1017
1016
1018 if self.changelog.tip() == nullid:
1017 if self.changelog.tip() == nullid:
1019 base[nullid] = 1
1018 base[nullid] = 1
1020 if heads != [nullid]:
1019 if heads != [nullid]:
1021 return [nullid]
1020 return [nullid]
1022 return []
1021 return []
1023
1022
1024 # assume we're closer to the tip than the root
1023 # assume we're closer to the tip than the root
1025 # and start by examining the heads
1024 # and start by examining the heads
1026 self.ui.status(_("searching for changes\n"))
1025 self.ui.status(_("searching for changes\n"))
1027
1026
1028 unknown = []
1027 unknown = []
1029 for h in heads:
1028 for h in heads:
1030 if h not in m:
1029 if h not in m:
1031 unknown.append(h)
1030 unknown.append(h)
1032 else:
1031 else:
1033 base[h] = 1
1032 base[h] = 1
1034
1033
1035 if not unknown:
1034 if not unknown:
1036 return []
1035 return []
1037
1036
1038 req = dict.fromkeys(unknown)
1037 req = dict.fromkeys(unknown)
1039 reqcnt = 0
1038 reqcnt = 0
1040
1039
1041 # search through remote branches
1040 # search through remote branches
1042 # a 'branch' here is a linear segment of history, with four parts:
1041 # a 'branch' here is a linear segment of history, with four parts:
1043 # head, root, first parent, second parent
1042 # head, root, first parent, second parent
1044 # (a branch always has two parents (or none) by definition)
1043 # (a branch always has two parents (or none) by definition)
1045 unknown = remote.branches(unknown)
1044 unknown = remote.branches(unknown)
1046 while unknown:
1045 while unknown:
1047 r = []
1046 r = []
1048 while unknown:
1047 while unknown:
1049 n = unknown.pop(0)
1048 n = unknown.pop(0)
1050 if n[0] in seen:
1049 if n[0] in seen:
1051 continue
1050 continue
1052
1051
1053 self.ui.debug(_("examining %s:%s\n")
1052 self.ui.debug(_("examining %s:%s\n")
1054 % (short(n[0]), short(n[1])))
1053 % (short(n[0]), short(n[1])))
1055 if n[0] == nullid: # found the end of the branch
1054 if n[0] == nullid: # found the end of the branch
1056 pass
1055 pass
1057 elif n in seenbranch:
1056 elif n in seenbranch:
1058 self.ui.debug(_("branch already found\n"))
1057 self.ui.debug(_("branch already found\n"))
1059 continue
1058 continue
1060 elif n[1] and n[1] in m: # do we know the base?
1059 elif n[1] and n[1] in m: # do we know the base?
1061 self.ui.debug(_("found incomplete branch %s:%s\n")
1060 self.ui.debug(_("found incomplete branch %s:%s\n")
1062 % (short(n[0]), short(n[1])))
1061 % (short(n[0]), short(n[1])))
1063 search.append(n) # schedule branch range for scanning
1062 search.append(n) # schedule branch range for scanning
1064 seenbranch[n] = 1
1063 seenbranch[n] = 1
1065 else:
1064 else:
1066 if n[1] not in seen and n[1] not in fetch:
1065 if n[1] not in seen and n[1] not in fetch:
1067 if n[2] in m and n[3] in m:
1066 if n[2] in m and n[3] in m:
1068 self.ui.debug(_("found new changeset %s\n") %
1067 self.ui.debug(_("found new changeset %s\n") %
1069 short(n[1]))
1068 short(n[1]))
1070 fetch[n[1]] = 1 # earliest unknown
1069 fetch[n[1]] = 1 # earliest unknown
1071 for p in n[2:4]:
1070 for p in n[2:4]:
1072 if p in m:
1071 if p in m:
1073 base[p] = 1 # latest known
1072 base[p] = 1 # latest known
1074
1073
1075 for p in n[2:4]:
1074 for p in n[2:4]:
1076 if p not in req and p not in m:
1075 if p not in req and p not in m:
1077 r.append(p)
1076 r.append(p)
1078 req[p] = 1
1077 req[p] = 1
1079 seen[n[0]] = 1
1078 seen[n[0]] = 1
1080
1079
1081 if r:
1080 if r:
1082 reqcnt += 1
1081 reqcnt += 1
1083 self.ui.debug(_("request %d: %s\n") %
1082 self.ui.debug(_("request %d: %s\n") %
1084 (reqcnt, " ".join(map(short, r))))
1083 (reqcnt, " ".join(map(short, r))))
1085 for p in range(0, len(r), 10):
1084 for p in range(0, len(r), 10):
1086 for b in remote.branches(r[p:p+10]):
1085 for b in remote.branches(r[p:p+10]):
1087 self.ui.debug(_("received %s:%s\n") %
1086 self.ui.debug(_("received %s:%s\n") %
1088 (short(b[0]), short(b[1])))
1087 (short(b[0]), short(b[1])))
1089 unknown.append(b)
1088 unknown.append(b)
1090
1089
1091 # do binary search on the branches we found
1090 # do binary search on the branches we found
1092 while search:
1091 while search:
1093 n = search.pop(0)
1092 n = search.pop(0)
1094 reqcnt += 1
1093 reqcnt += 1
1095 l = remote.between([(n[0], n[1])])[0]
1094 l = remote.between([(n[0], n[1])])[0]
1096 l.append(n[1])
1095 l.append(n[1])
1097 p = n[0]
1096 p = n[0]
1098 f = 1
1097 f = 1
1099 for i in l:
1098 for i in l:
1100 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1099 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1101 if i in m:
1100 if i in m:
1102 if f <= 2:
1101 if f <= 2:
1103 self.ui.debug(_("found new branch changeset %s\n") %
1102 self.ui.debug(_("found new branch changeset %s\n") %
1104 short(p))
1103 short(p))
1105 fetch[p] = 1
1104 fetch[p] = 1
1106 base[i] = 1
1105 base[i] = 1
1107 else:
1106 else:
1108 self.ui.debug(_("narrowed branch search to %s:%s\n")
1107 self.ui.debug(_("narrowed branch search to %s:%s\n")
1109 % (short(p), short(i)))
1108 % (short(p), short(i)))
1110 search.append((p, i))
1109 search.append((p, i))
1111 break
1110 break
1112 p, f = i, f * 2
1111 p, f = i, f * 2
1113
1112
1114 # sanity check our fetch list
1113 # sanity check our fetch list
1115 for f in fetch.keys():
1114 for f in fetch.keys():
1116 if f in m:
1115 if f in m:
1117 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1116 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1118
1117
1119 if base.keys() == [nullid]:
1118 if base.keys() == [nullid]:
1120 if force:
1119 if force:
1121 self.ui.warn(_("warning: repository is unrelated\n"))
1120 self.ui.warn(_("warning: repository is unrelated\n"))
1122 else:
1121 else:
1123 raise util.Abort(_("repository is unrelated"))
1122 raise util.Abort(_("repository is unrelated"))
1124
1123
1125 self.ui.note(_("found new changesets starting at ") +
1124 self.ui.note(_("found new changesets starting at ") +
1126 " ".join([short(f) for f in fetch]) + "\n")
1125 " ".join([short(f) for f in fetch]) + "\n")
1127
1126
1128 self.ui.debug(_("%d total queries\n") % reqcnt)
1127 self.ui.debug(_("%d total queries\n") % reqcnt)
1129
1128
1130 return fetch.keys()
1129 return fetch.keys()
1131
1130
1132 def findoutgoing(self, remote, base=None, heads=None, force=False):
1131 def findoutgoing(self, remote, base=None, heads=None, force=False):
1133 """Return list of nodes that are roots of subsets not in remote
1132 """Return list of nodes that are roots of subsets not in remote
1134
1133
1135 If base dict is specified, assume that these nodes and their parents
1134 If base dict is specified, assume that these nodes and their parents
1136 exist on the remote side.
1135 exist on the remote side.
1137 If a list of heads is specified, return only nodes which are heads
1136 If a list of heads is specified, return only nodes which are heads
1138 or ancestors of these heads, and return a second element which
1137 or ancestors of these heads, and return a second element which
1139 contains all remote heads which get new children.
1138 contains all remote heads which get new children.
1140 """
1139 """
1141 if base == None:
1140 if base == None:
1142 base = {}
1141 base = {}
1143 self.findincoming(remote, base, heads, force=force)
1142 self.findincoming(remote, base, heads, force=force)
1144
1143
1145 self.ui.debug(_("common changesets up to ")
1144 self.ui.debug(_("common changesets up to ")
1146 + " ".join(map(short, base.keys())) + "\n")
1145 + " ".join(map(short, base.keys())) + "\n")
1147
1146
1148 remain = dict.fromkeys(self.changelog.nodemap)
1147 remain = dict.fromkeys(self.changelog.nodemap)
1149
1148
1150 # prune everything remote has from the tree
1149 # prune everything remote has from the tree
1151 del remain[nullid]
1150 del remain[nullid]
1152 remove = base.keys()
1151 remove = base.keys()
1153 while remove:
1152 while remove:
1154 n = remove.pop(0)
1153 n = remove.pop(0)
1155 if n in remain:
1154 if n in remain:
1156 del remain[n]
1155 del remain[n]
1157 for p in self.changelog.parents(n):
1156 for p in self.changelog.parents(n):
1158 remove.append(p)
1157 remove.append(p)
1159
1158
1160 # find every node whose parents have been pruned
1159 # find every node whose parents have been pruned
1161 subset = []
1160 subset = []
1162 # find every remote head that will get new children
1161 # find every remote head that will get new children
1163 updated_heads = {}
1162 updated_heads = {}
1164 for n in remain:
1163 for n in remain:
1165 p1, p2 = self.changelog.parents(n)
1164 p1, p2 = self.changelog.parents(n)
1166 if p1 not in remain and p2 not in remain:
1165 if p1 not in remain and p2 not in remain:
1167 subset.append(n)
1166 subset.append(n)
1168 if heads:
1167 if heads:
1169 if p1 in heads:
1168 if p1 in heads:
1170 updated_heads[p1] = True
1169 updated_heads[p1] = True
1171 if p2 in heads:
1170 if p2 in heads:
1172 updated_heads[p2] = True
1171 updated_heads[p2] = True
1173
1172
1174 # this is the set of all roots we have to push
1173 # this is the set of all roots we have to push
1175 if heads:
1174 if heads:
1176 return subset, updated_heads.keys()
1175 return subset, updated_heads.keys()
1177 else:
1176 else:
1178 return subset
1177 return subset
1179
1178
1180 def pull(self, remote, heads=None, force=False, lock=None):
1179 def pull(self, remote, heads=None, force=False, lock=None):
1181 mylock = False
1180 mylock = False
1182 if not lock:
1181 if not lock:
1183 lock = self.lock()
1182 lock = self.lock()
1184 mylock = True
1183 mylock = True
1185
1184
1186 try:
1185 try:
1187 fetch = self.findincoming(remote, force=force)
1186 fetch = self.findincoming(remote, force=force)
1188 if fetch == [nullid]:
1187 if fetch == [nullid]:
1189 self.ui.status(_("requesting all changes\n"))
1188 self.ui.status(_("requesting all changes\n"))
1190
1189
1191 if not fetch:
1190 if not fetch:
1192 self.ui.status(_("no changes found\n"))
1191 self.ui.status(_("no changes found\n"))
1193 return 0
1192 return 0
1194
1193
1195 if heads is None:
1194 if heads is None:
1196 cg = remote.changegroup(fetch, 'pull')
1195 cg = remote.changegroup(fetch, 'pull')
1197 else:
1196 else:
1198 cg = remote.changegroupsubset(fetch, heads, 'pull')
1197 cg = remote.changegroupsubset(fetch, heads, 'pull')
1199 return self.addchangegroup(cg, 'pull', remote.url())
1198 return self.addchangegroup(cg, 'pull', remote.url())
1200 finally:
1199 finally:
1201 if mylock:
1200 if mylock:
1202 lock.release()
1201 lock.release()
1203
1202
1204 def push(self, remote, force=False, revs=None):
1203 def push(self, remote, force=False, revs=None):
1205 # there are two ways to push to remote repo:
1204 # there are two ways to push to remote repo:
1206 #
1205 #
1207 # addchangegroup assumes local user can lock remote
1206 # addchangegroup assumes local user can lock remote
1208 # repo (local filesystem, old ssh servers).
1207 # repo (local filesystem, old ssh servers).
1209 #
1208 #
1210 # unbundle assumes local user cannot lock remote repo (new ssh
1209 # unbundle assumes local user cannot lock remote repo (new ssh
1211 # servers, http servers).
1210 # servers, http servers).
1212
1211
1213 if remote.capable('unbundle'):
1212 if remote.capable('unbundle'):
1214 return self.push_unbundle(remote, force, revs)
1213 return self.push_unbundle(remote, force, revs)
1215 return self.push_addchangegroup(remote, force, revs)
1214 return self.push_addchangegroup(remote, force, revs)
1216
1215
1217 def prepush(self, remote, force, revs):
1216 def prepush(self, remote, force, revs):
1218 base = {}
1217 base = {}
1219 remote_heads = remote.heads()
1218 remote_heads = remote.heads()
1220 inc = self.findincoming(remote, base, remote_heads, force=force)
1219 inc = self.findincoming(remote, base, remote_heads, force=force)
1221 if not force and inc:
1220 if not force and inc:
1222 self.ui.warn(_("abort: unsynced remote changes!\n"))
1221 self.ui.warn(_("abort: unsynced remote changes!\n"))
1223 self.ui.status(_("(did you forget to sync?"
1222 self.ui.status(_("(did you forget to sync?"
1224 " use push -f to force)\n"))
1223 " use push -f to force)\n"))
1225 return None, 1
1224 return None, 1
1226
1225
1227 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1226 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1228 if revs is not None:
1227 if revs is not None:
1229 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1228 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1230 else:
1229 else:
1231 bases, heads = update, self.changelog.heads()
1230 bases, heads = update, self.changelog.heads()
1232
1231
1233 if not bases:
1232 if not bases:
1234 self.ui.status(_("no changes found\n"))
1233 self.ui.status(_("no changes found\n"))
1235 return None, 1
1234 return None, 1
1236 elif not force:
1235 elif not force:
1237 # FIXME we don't properly detect creation of new heads
1236 # FIXME we don't properly detect creation of new heads
1238 # in the push -r case, assume the user knows what he's doing
1237 # in the push -r case, assume the user knows what he's doing
1239 if not revs and len(remote_heads) < len(heads) \
1238 if not revs and len(remote_heads) < len(heads) \
1240 and remote_heads != [nullid]:
1239 and remote_heads != [nullid]:
1241 self.ui.warn(_("abort: push creates new remote branches!\n"))
1240 self.ui.warn(_("abort: push creates new remote branches!\n"))
1242 self.ui.status(_("(did you forget to merge?"
1241 self.ui.status(_("(did you forget to merge?"
1243 " use push -f to force)\n"))
1242 " use push -f to force)\n"))
1244 return None, 1
1243 return None, 1
1245
1244
1246 if revs is None:
1245 if revs is None:
1247 cg = self.changegroup(update, 'push')
1246 cg = self.changegroup(update, 'push')
1248 else:
1247 else:
1249 cg = self.changegroupsubset(update, revs, 'push')
1248 cg = self.changegroupsubset(update, revs, 'push')
1250 return cg, remote_heads
1249 return cg, remote_heads
1251
1250
1252 def push_addchangegroup(self, remote, force, revs):
1251 def push_addchangegroup(self, remote, force, revs):
1253 lock = remote.lock()
1252 lock = remote.lock()
1254
1253
1255 ret = self.prepush(remote, force, revs)
1254 ret = self.prepush(remote, force, revs)
1256 if ret[0] is not None:
1255 if ret[0] is not None:
1257 cg, remote_heads = ret
1256 cg, remote_heads = ret
1258 return remote.addchangegroup(cg, 'push', self.url())
1257 return remote.addchangegroup(cg, 'push', self.url())
1259 return ret[1]
1258 return ret[1]
1260
1259
1261 def push_unbundle(self, remote, force, revs):
1260 def push_unbundle(self, remote, force, revs):
1262 # local repo finds heads on server, finds out what revs it
1261 # local repo finds heads on server, finds out what revs it
1263 # must push. once revs transferred, if server finds it has
1262 # must push. once revs transferred, if server finds it has
1264 # different heads (someone else won commit/push race), server
1263 # different heads (someone else won commit/push race), server
1265 # aborts.
1264 # aborts.
1266
1265
1267 ret = self.prepush(remote, force, revs)
1266 ret = self.prepush(remote, force, revs)
1268 if ret[0] is not None:
1267 if ret[0] is not None:
1269 cg, remote_heads = ret
1268 cg, remote_heads = ret
1270 if force: remote_heads = ['force']
1269 if force: remote_heads = ['force']
1271 return remote.unbundle(cg, remote_heads, 'push')
1270 return remote.unbundle(cg, remote_heads, 'push')
1272 return ret[1]
1271 return ret[1]
1273
1272
1274 def changegroupsubset(self, bases, heads, source):
1273 def changegroupsubset(self, bases, heads, source):
1275 """This function generates a changegroup consisting of all the nodes
1274 """This function generates a changegroup consisting of all the nodes
1276 that are descendents of any of the bases, and ancestors of any of
1275 that are descendents of any of the bases, and ancestors of any of
1277 the heads.
1276 the heads.
1278
1277
1279 It is fairly complex as determining which filenodes and which
1278 It is fairly complex as determining which filenodes and which
1280 manifest nodes need to be included for the changeset to be complete
1279 manifest nodes need to be included for the changeset to be complete
1281 is non-trivial.
1280 is non-trivial.
1282
1281
1283 Another wrinkle is doing the reverse, figuring out which changeset in
1282 Another wrinkle is doing the reverse, figuring out which changeset in
1284 the changegroup a particular filenode or manifestnode belongs to."""
1283 the changegroup a particular filenode or manifestnode belongs to."""
1285
1284
1286 self.hook('preoutgoing', throw=True, source=source)
1285 self.hook('preoutgoing', throw=True, source=source)
1287
1286
1288 # Set up some initial variables
1287 # Set up some initial variables
1289 # Make it easy to refer to self.changelog
1288 # Make it easy to refer to self.changelog
1290 cl = self.changelog
1289 cl = self.changelog
1291 # msng is short for missing - compute the list of changesets in this
1290 # msng is short for missing - compute the list of changesets in this
1292 # changegroup.
1291 # changegroup.
1293 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1292 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1294 # Some bases may turn out to be superfluous, and some heads may be
1293 # Some bases may turn out to be superfluous, and some heads may be
1295 # too. nodesbetween will return the minimal set of bases and heads
1294 # too. nodesbetween will return the minimal set of bases and heads
1296 # necessary to re-create the changegroup.
1295 # necessary to re-create the changegroup.
1297
1296
1298 # Known heads are the list of heads that it is assumed the recipient
1297 # Known heads are the list of heads that it is assumed the recipient
1299 # of this changegroup will know about.
1298 # of this changegroup will know about.
1300 knownheads = {}
1299 knownheads = {}
1301 # We assume that all parents of bases are known heads.
1300 # We assume that all parents of bases are known heads.
1302 for n in bases:
1301 for n in bases:
1303 for p in cl.parents(n):
1302 for p in cl.parents(n):
1304 if p != nullid:
1303 if p != nullid:
1305 knownheads[p] = 1
1304 knownheads[p] = 1
1306 knownheads = knownheads.keys()
1305 knownheads = knownheads.keys()
1307 if knownheads:
1306 if knownheads:
1308 # Now that we know what heads are known, we can compute which
1307 # Now that we know what heads are known, we can compute which
1309 # changesets are known. The recipient must know about all
1308 # changesets are known. The recipient must know about all
1310 # changesets required to reach the known heads from the null
1309 # changesets required to reach the known heads from the null
1311 # changeset.
1310 # changeset.
1312 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1311 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1313 junk = None
1312 junk = None
1314 # Transform the list into an ersatz set.
1313 # Transform the list into an ersatz set.
1315 has_cl_set = dict.fromkeys(has_cl_set)
1314 has_cl_set = dict.fromkeys(has_cl_set)
1316 else:
1315 else:
1317 # If there were no known heads, the recipient cannot be assumed to
1316 # If there were no known heads, the recipient cannot be assumed to
1318 # know about any changesets.
1317 # know about any changesets.
1319 has_cl_set = {}
1318 has_cl_set = {}
1320
1319
1321 # Make it easy to refer to self.manifest
1320 # Make it easy to refer to self.manifest
1322 mnfst = self.manifest
1321 mnfst = self.manifest
1323 # We don't know which manifests are missing yet
1322 # We don't know which manifests are missing yet
1324 msng_mnfst_set = {}
1323 msng_mnfst_set = {}
1325 # Nor do we know which filenodes are missing.
1324 # Nor do we know which filenodes are missing.
1326 msng_filenode_set = {}
1325 msng_filenode_set = {}
1327
1326
1328 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1327 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1329 junk = None
1328 junk = None
1330
1329
1331 # A changeset always belongs to itself, so the changenode lookup
1330 # A changeset always belongs to itself, so the changenode lookup
1332 # function for a changenode is identity.
1331 # function for a changenode is identity.
1333 def identity(x):
1332 def identity(x):
1334 return x
1333 return x
1335
1334
1336 # A function generating function. Sets up an environment for the
1335 # A function generating function. Sets up an environment for the
1337 # inner function.
1336 # inner function.
1338 def cmp_by_rev_func(revlog):
1337 def cmp_by_rev_func(revlog):
1339 # Compare two nodes by their revision number in the environment's
1338 # Compare two nodes by their revision number in the environment's
1340 # revision history. Since the revision number both represents the
1339 # revision history. Since the revision number both represents the
1341 # most efficient order to read the nodes in, and represents a
1340 # most efficient order to read the nodes in, and represents a
1342 # topological sorting of the nodes, this function is often useful.
1341 # topological sorting of the nodes, this function is often useful.
1343 def cmp_by_rev(a, b):
1342 def cmp_by_rev(a, b):
1344 return cmp(revlog.rev(a), revlog.rev(b))
1343 return cmp(revlog.rev(a), revlog.rev(b))
1345 return cmp_by_rev
1344 return cmp_by_rev
1346
1345
1347 # If we determine that a particular file or manifest node must be a
1346 # If we determine that a particular file or manifest node must be a
1348 # node that the recipient of the changegroup will already have, we can
1347 # node that the recipient of the changegroup will already have, we can
1349 # also assume the recipient will have all the parents. This function
1348 # also assume the recipient will have all the parents. This function
1350 # prunes them from the set of missing nodes.
1349 # prunes them from the set of missing nodes.
1351 def prune_parents(revlog, hasset, msngset):
1350 def prune_parents(revlog, hasset, msngset):
1352 haslst = hasset.keys()
1351 haslst = hasset.keys()
1353 haslst.sort(cmp_by_rev_func(revlog))
1352 haslst.sort(cmp_by_rev_func(revlog))
1354 for node in haslst:
1353 for node in haslst:
1355 parentlst = [p for p in revlog.parents(node) if p != nullid]
1354 parentlst = [p for p in revlog.parents(node) if p != nullid]
1356 while parentlst:
1355 while parentlst:
1357 n = parentlst.pop()
1356 n = parentlst.pop()
1358 if n not in hasset:
1357 if n not in hasset:
1359 hasset[n] = 1
1358 hasset[n] = 1
1360 p = [p for p in revlog.parents(n) if p != nullid]
1359 p = [p for p in revlog.parents(n) if p != nullid]
1361 parentlst.extend(p)
1360 parentlst.extend(p)
1362 for n in hasset:
1361 for n in hasset:
1363 msngset.pop(n, None)
1362 msngset.pop(n, None)
1364
1363
1365 # This is a function generating function used to set up an environment
1364 # This is a function generating function used to set up an environment
1366 # for the inner function to execute in.
1365 # for the inner function to execute in.
1367 def manifest_and_file_collector(changedfileset):
1366 def manifest_and_file_collector(changedfileset):
1368 # This is an information gathering function that gathers
1367 # This is an information gathering function that gathers
1369 # information from each changeset node that goes out as part of
1368 # information from each changeset node that goes out as part of
1370 # the changegroup. The information gathered is a list of which
1369 # the changegroup. The information gathered is a list of which
1371 # manifest nodes are potentially required (the recipient may
1370 # manifest nodes are potentially required (the recipient may
1372 # already have them) and total list of all files which were
1371 # already have them) and total list of all files which were
1373 # changed in any changeset in the changegroup.
1372 # changed in any changeset in the changegroup.
1374 #
1373 #
1375 # We also remember the first changenode we saw any manifest
1374 # We also remember the first changenode we saw any manifest
1376 # referenced by so we can later determine which changenode 'owns'
1375 # referenced by so we can later determine which changenode 'owns'
1377 # the manifest.
1376 # the manifest.
1378 def collect_manifests_and_files(clnode):
1377 def collect_manifests_and_files(clnode):
1379 c = cl.read(clnode)
1378 c = cl.read(clnode)
1380 for f in c[3]:
1379 for f in c[3]:
1381 # This is to make sure we only have one instance of each
1380 # This is to make sure we only have one instance of each
1382 # filename string for each filename.
1381 # filename string for each filename.
1383 changedfileset.setdefault(f, f)
1382 changedfileset.setdefault(f, f)
1384 msng_mnfst_set.setdefault(c[0], clnode)
1383 msng_mnfst_set.setdefault(c[0], clnode)
1385 return collect_manifests_and_files
1384 return collect_manifests_and_files
1386
1385
1387 # Figure out which manifest nodes (of the ones we think might be part
1386 # Figure out which manifest nodes (of the ones we think might be part
1388 # of the changegroup) the recipient must know about and remove them
1387 # of the changegroup) the recipient must know about and remove them
1389 # from the changegroup.
1388 # from the changegroup.
1390 def prune_manifests():
1389 def prune_manifests():
1391 has_mnfst_set = {}
1390 has_mnfst_set = {}
1392 for n in msng_mnfst_set:
1391 for n in msng_mnfst_set:
1393 # If a 'missing' manifest thinks it belongs to a changenode
1392 # If a 'missing' manifest thinks it belongs to a changenode
1394 # the recipient is assumed to have, obviously the recipient
1393 # the recipient is assumed to have, obviously the recipient
1395 # must have that manifest.
1394 # must have that manifest.
1396 linknode = cl.node(mnfst.linkrev(n))
1395 linknode = cl.node(mnfst.linkrev(n))
1397 if linknode in has_cl_set:
1396 if linknode in has_cl_set:
1398 has_mnfst_set[n] = 1
1397 has_mnfst_set[n] = 1
1399 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1398 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1400
1399
1401 # Use the information collected in collect_manifests_and_files to say
1400 # Use the information collected in collect_manifests_and_files to say
1402 # which changenode any manifestnode belongs to.
1401 # which changenode any manifestnode belongs to.
1403 def lookup_manifest_link(mnfstnode):
1402 def lookup_manifest_link(mnfstnode):
1404 return msng_mnfst_set[mnfstnode]
1403 return msng_mnfst_set[mnfstnode]
1405
1404
1406 # A function generating function that sets up the initial environment
1405 # A function generating function that sets up the initial environment
1407 # the inner function.
1406 # the inner function.
1408 def filenode_collector(changedfiles):
1407 def filenode_collector(changedfiles):
1409 next_rev = [0]
1408 next_rev = [0]
1410 # This gathers information from each manifestnode included in the
1409 # This gathers information from each manifestnode included in the
1411 # changegroup about which filenodes the manifest node references
1410 # changegroup about which filenodes the manifest node references
1412 # so we can include those in the changegroup too.
1411 # so we can include those in the changegroup too.
1413 #
1412 #
1414 # It also remembers which changenode each filenode belongs to. It
1413 # It also remembers which changenode each filenode belongs to. It
1415 # does this by assuming the a filenode belongs to the changenode
1414 # does this by assuming the a filenode belongs to the changenode
1416 # the first manifest that references it belongs to.
1415 # the first manifest that references it belongs to.
1417 def collect_msng_filenodes(mnfstnode):
1416 def collect_msng_filenodes(mnfstnode):
1418 r = mnfst.rev(mnfstnode)
1417 r = mnfst.rev(mnfstnode)
1419 if r == next_rev[0]:
1418 if r == next_rev[0]:
1420 # If the last rev we looked at was the one just previous,
1419 # If the last rev we looked at was the one just previous,
1421 # we only need to see a diff.
1420 # we only need to see a diff.
1422 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1421 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1423 # For each line in the delta
1422 # For each line in the delta
1424 for dline in delta.splitlines():
1423 for dline in delta.splitlines():
1425 # get the filename and filenode for that line
1424 # get the filename and filenode for that line
1426 f, fnode = dline.split('\0')
1425 f, fnode = dline.split('\0')
1427 fnode = bin(fnode[:40])
1426 fnode = bin(fnode[:40])
1428 f = changedfiles.get(f, None)
1427 f = changedfiles.get(f, None)
1429 # And if the file is in the list of files we care
1428 # And if the file is in the list of files we care
1430 # about.
1429 # about.
1431 if f is not None:
1430 if f is not None:
1432 # Get the changenode this manifest belongs to
1431 # Get the changenode this manifest belongs to
1433 clnode = msng_mnfst_set[mnfstnode]
1432 clnode = msng_mnfst_set[mnfstnode]
1434 # Create the set of filenodes for the file if
1433 # Create the set of filenodes for the file if
1435 # there isn't one already.
1434 # there isn't one already.
1436 ndset = msng_filenode_set.setdefault(f, {})
1435 ndset = msng_filenode_set.setdefault(f, {})
1437 # And set the filenode's changelog node to the
1436 # And set the filenode's changelog node to the
1438 # manifest's if it hasn't been set already.
1437 # manifest's if it hasn't been set already.
1439 ndset.setdefault(fnode, clnode)
1438 ndset.setdefault(fnode, clnode)
1440 else:
1439 else:
1441 # Otherwise we need a full manifest.
1440 # Otherwise we need a full manifest.
1442 m = mnfst.read(mnfstnode)
1441 m = mnfst.read(mnfstnode)
1443 # For every file in we care about.
1442 # For every file in we care about.
1444 for f in changedfiles:
1443 for f in changedfiles:
1445 fnode = m.get(f, None)
1444 fnode = m.get(f, None)
1446 # If it's in the manifest
1445 # If it's in the manifest
1447 if fnode is not None:
1446 if fnode is not None:
1448 # See comments above.
1447 # See comments above.
1449 clnode = msng_mnfst_set[mnfstnode]
1448 clnode = msng_mnfst_set[mnfstnode]
1450 ndset = msng_filenode_set.setdefault(f, {})
1449 ndset = msng_filenode_set.setdefault(f, {})
1451 ndset.setdefault(fnode, clnode)
1450 ndset.setdefault(fnode, clnode)
1452 # Remember the revision we hope to see next.
1451 # Remember the revision we hope to see next.
1453 next_rev[0] = r + 1
1452 next_rev[0] = r + 1
1454 return collect_msng_filenodes
1453 return collect_msng_filenodes
1455
1454
1456 # We have a list of filenodes we think we need for a file, lets remove
1455 # We have a list of filenodes we think we need for a file, lets remove
1457 # all those we now the recipient must have.
1456 # all those we now the recipient must have.
1458 def prune_filenodes(f, filerevlog):
1457 def prune_filenodes(f, filerevlog):
1459 msngset = msng_filenode_set[f]
1458 msngset = msng_filenode_set[f]
1460 hasset = {}
1459 hasset = {}
1461 # If a 'missing' filenode thinks it belongs to a changenode we
1460 # If a 'missing' filenode thinks it belongs to a changenode we
1462 # assume the recipient must have, then the recipient must have
1461 # assume the recipient must have, then the recipient must have
1463 # that filenode.
1462 # that filenode.
1464 for n in msngset:
1463 for n in msngset:
1465 clnode = cl.node(filerevlog.linkrev(n))
1464 clnode = cl.node(filerevlog.linkrev(n))
1466 if clnode in has_cl_set:
1465 if clnode in has_cl_set:
1467 hasset[n] = 1
1466 hasset[n] = 1
1468 prune_parents(filerevlog, hasset, msngset)
1467 prune_parents(filerevlog, hasset, msngset)
1469
1468
1470 # A function generator function that sets up the a context for the
1469 # A function generator function that sets up the a context for the
1471 # inner function.
1470 # inner function.
1472 def lookup_filenode_link_func(fname):
1471 def lookup_filenode_link_func(fname):
1473 msngset = msng_filenode_set[fname]
1472 msngset = msng_filenode_set[fname]
1474 # Lookup the changenode the filenode belongs to.
1473 # Lookup the changenode the filenode belongs to.
1475 def lookup_filenode_link(fnode):
1474 def lookup_filenode_link(fnode):
1476 return msngset[fnode]
1475 return msngset[fnode]
1477 return lookup_filenode_link
1476 return lookup_filenode_link
1478
1477
1479 # Now that we have all theses utility functions to help out and
1478 # Now that we have all theses utility functions to help out and
1480 # logically divide up the task, generate the group.
1479 # logically divide up the task, generate the group.
1481 def gengroup():
1480 def gengroup():
1482 # The set of changed files starts empty.
1481 # The set of changed files starts empty.
1483 changedfiles = {}
1482 changedfiles = {}
1484 # Create a changenode group generator that will call our functions
1483 # Create a changenode group generator that will call our functions
1485 # back to lookup the owning changenode and collect information.
1484 # back to lookup the owning changenode and collect information.
1486 group = cl.group(msng_cl_lst, identity,
1485 group = cl.group(msng_cl_lst, identity,
1487 manifest_and_file_collector(changedfiles))
1486 manifest_and_file_collector(changedfiles))
1488 for chnk in group:
1487 for chnk in group:
1489 yield chnk
1488 yield chnk
1490
1489
1491 # The list of manifests has been collected by the generator
1490 # The list of manifests has been collected by the generator
1492 # calling our functions back.
1491 # calling our functions back.
1493 prune_manifests()
1492 prune_manifests()
1494 msng_mnfst_lst = msng_mnfst_set.keys()
1493 msng_mnfst_lst = msng_mnfst_set.keys()
1495 # Sort the manifestnodes by revision number.
1494 # Sort the manifestnodes by revision number.
1496 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1495 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1497 # Create a generator for the manifestnodes that calls our lookup
1496 # Create a generator for the manifestnodes that calls our lookup
1498 # and data collection functions back.
1497 # and data collection functions back.
1499 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1498 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1500 filenode_collector(changedfiles))
1499 filenode_collector(changedfiles))
1501 for chnk in group:
1500 for chnk in group:
1502 yield chnk
1501 yield chnk
1503
1502
1504 # These are no longer needed, dereference and toss the memory for
1503 # These are no longer needed, dereference and toss the memory for
1505 # them.
1504 # them.
1506 msng_mnfst_lst = None
1505 msng_mnfst_lst = None
1507 msng_mnfst_set.clear()
1506 msng_mnfst_set.clear()
1508
1507
1509 changedfiles = changedfiles.keys()
1508 changedfiles = changedfiles.keys()
1510 changedfiles.sort()
1509 changedfiles.sort()
1511 # Go through all our files in order sorted by name.
1510 # Go through all our files in order sorted by name.
1512 for fname in changedfiles:
1511 for fname in changedfiles:
1513 filerevlog = self.file(fname)
1512 filerevlog = self.file(fname)
1514 # Toss out the filenodes that the recipient isn't really
1513 # Toss out the filenodes that the recipient isn't really
1515 # missing.
1514 # missing.
1516 if msng_filenode_set.has_key(fname):
1515 if msng_filenode_set.has_key(fname):
1517 prune_filenodes(fname, filerevlog)
1516 prune_filenodes(fname, filerevlog)
1518 msng_filenode_lst = msng_filenode_set[fname].keys()
1517 msng_filenode_lst = msng_filenode_set[fname].keys()
1519 else:
1518 else:
1520 msng_filenode_lst = []
1519 msng_filenode_lst = []
1521 # If any filenodes are left, generate the group for them,
1520 # If any filenodes are left, generate the group for them,
1522 # otherwise don't bother.
1521 # otherwise don't bother.
1523 if len(msng_filenode_lst) > 0:
1522 if len(msng_filenode_lst) > 0:
1524 yield changegroup.genchunk(fname)
1523 yield changegroup.genchunk(fname)
1525 # Sort the filenodes by their revision #
1524 # Sort the filenodes by their revision #
1526 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1525 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1527 # Create a group generator and only pass in a changenode
1526 # Create a group generator and only pass in a changenode
1528 # lookup function as we need to collect no information
1527 # lookup function as we need to collect no information
1529 # from filenodes.
1528 # from filenodes.
1530 group = filerevlog.group(msng_filenode_lst,
1529 group = filerevlog.group(msng_filenode_lst,
1531 lookup_filenode_link_func(fname))
1530 lookup_filenode_link_func(fname))
1532 for chnk in group:
1531 for chnk in group:
1533 yield chnk
1532 yield chnk
1534 if msng_filenode_set.has_key(fname):
1533 if msng_filenode_set.has_key(fname):
1535 # Don't need this anymore, toss it to free memory.
1534 # Don't need this anymore, toss it to free memory.
1536 del msng_filenode_set[fname]
1535 del msng_filenode_set[fname]
1537 # Signal that no more groups are left.
1536 # Signal that no more groups are left.
1538 yield changegroup.closechunk()
1537 yield changegroup.closechunk()
1539
1538
1540 if msng_cl_lst:
1539 if msng_cl_lst:
1541 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1540 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1542
1541
1543 return util.chunkbuffer(gengroup())
1542 return util.chunkbuffer(gengroup())
1544
1543
1545 def changegroup(self, basenodes, source):
1544 def changegroup(self, basenodes, source):
1546 """Generate a changegroup of all nodes that we have that a recipient
1545 """Generate a changegroup of all nodes that we have that a recipient
1547 doesn't.
1546 doesn't.
1548
1547
1549 This is much easier than the previous function as we can assume that
1548 This is much easier than the previous function as we can assume that
1550 the recipient has any changenode we aren't sending them."""
1549 the recipient has any changenode we aren't sending them."""
1551
1550
1552 self.hook('preoutgoing', throw=True, source=source)
1551 self.hook('preoutgoing', throw=True, source=source)
1553
1552
1554 cl = self.changelog
1553 cl = self.changelog
1555 nodes = cl.nodesbetween(basenodes, None)[0]
1554 nodes = cl.nodesbetween(basenodes, None)[0]
1556 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1555 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1557
1556
1558 def identity(x):
1557 def identity(x):
1559 return x
1558 return x
1560
1559
1561 def gennodelst(revlog):
1560 def gennodelst(revlog):
1562 for r in xrange(0, revlog.count()):
1561 for r in xrange(0, revlog.count()):
1563 n = revlog.node(r)
1562 n = revlog.node(r)
1564 if revlog.linkrev(n) in revset:
1563 if revlog.linkrev(n) in revset:
1565 yield n
1564 yield n
1566
1565
1567 def changed_file_collector(changedfileset):
1566 def changed_file_collector(changedfileset):
1568 def collect_changed_files(clnode):
1567 def collect_changed_files(clnode):
1569 c = cl.read(clnode)
1568 c = cl.read(clnode)
1570 for fname in c[3]:
1569 for fname in c[3]:
1571 changedfileset[fname] = 1
1570 changedfileset[fname] = 1
1572 return collect_changed_files
1571 return collect_changed_files
1573
1572
1574 def lookuprevlink_func(revlog):
1573 def lookuprevlink_func(revlog):
1575 def lookuprevlink(n):
1574 def lookuprevlink(n):
1576 return cl.node(revlog.linkrev(n))
1575 return cl.node(revlog.linkrev(n))
1577 return lookuprevlink
1576 return lookuprevlink
1578
1577
1579 def gengroup():
1578 def gengroup():
1580 # construct a list of all changed files
1579 # construct a list of all changed files
1581 changedfiles = {}
1580 changedfiles = {}
1582
1581
1583 for chnk in cl.group(nodes, identity,
1582 for chnk in cl.group(nodes, identity,
1584 changed_file_collector(changedfiles)):
1583 changed_file_collector(changedfiles)):
1585 yield chnk
1584 yield chnk
1586 changedfiles = changedfiles.keys()
1585 changedfiles = changedfiles.keys()
1587 changedfiles.sort()
1586 changedfiles.sort()
1588
1587
1589 mnfst = self.manifest
1588 mnfst = self.manifest
1590 nodeiter = gennodelst(mnfst)
1589 nodeiter = gennodelst(mnfst)
1591 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1590 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1592 yield chnk
1591 yield chnk
1593
1592
1594 for fname in changedfiles:
1593 for fname in changedfiles:
1595 filerevlog = self.file(fname)
1594 filerevlog = self.file(fname)
1596 nodeiter = gennodelst(filerevlog)
1595 nodeiter = gennodelst(filerevlog)
1597 nodeiter = list(nodeiter)
1596 nodeiter = list(nodeiter)
1598 if nodeiter:
1597 if nodeiter:
1599 yield changegroup.genchunk(fname)
1598 yield changegroup.genchunk(fname)
1600 lookup = lookuprevlink_func(filerevlog)
1599 lookup = lookuprevlink_func(filerevlog)
1601 for chnk in filerevlog.group(nodeiter, lookup):
1600 for chnk in filerevlog.group(nodeiter, lookup):
1602 yield chnk
1601 yield chnk
1603
1602
1604 yield changegroup.closechunk()
1603 yield changegroup.closechunk()
1605
1604
1606 if nodes:
1605 if nodes:
1607 self.hook('outgoing', node=hex(nodes[0]), source=source)
1606 self.hook('outgoing', node=hex(nodes[0]), source=source)
1608
1607
1609 return util.chunkbuffer(gengroup())
1608 return util.chunkbuffer(gengroup())
1610
1609
1611 def addchangegroup(self, source, srctype, url):
1610 def addchangegroup(self, source, srctype, url):
1612 """add changegroup to repo.
1611 """add changegroup to repo.
1613 returns number of heads modified or added + 1."""
1612 returns number of heads modified or added + 1."""
1614
1613
1615 def csmap(x):
1614 def csmap(x):
1616 self.ui.debug(_("add changeset %s\n") % short(x))
1615 self.ui.debug(_("add changeset %s\n") % short(x))
1617 return cl.count()
1616 return cl.count()
1618
1617
1619 def revmap(x):
1618 def revmap(x):
1620 return cl.rev(x)
1619 return cl.rev(x)
1621
1620
1622 if not source:
1621 if not source:
1623 return 0
1622 return 0
1624
1623
1625 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1624 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1626
1625
1627 changesets = files = revisions = 0
1626 changesets = files = revisions = 0
1628
1627
1629 tr = self.transaction()
1628 tr = self.transaction()
1630
1629
1631 # write changelog data to temp files so concurrent readers will not see
1630 # write changelog data to temp files so concurrent readers will not see
1632 # inconsistent view
1631 # inconsistent view
1633 cl = None
1632 cl = None
1634 try:
1633 try:
1635 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1634 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1636
1635
1637 oldheads = len(cl.heads())
1636 oldheads = len(cl.heads())
1638
1637
1639 # pull off the changeset group
1638 # pull off the changeset group
1640 self.ui.status(_("adding changesets\n"))
1639 self.ui.status(_("adding changesets\n"))
1641 cor = cl.count() - 1
1640 cor = cl.count() - 1
1642 chunkiter = changegroup.chunkiter(source)
1641 chunkiter = changegroup.chunkiter(source)
1643 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1642 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1644 raise util.Abort(_("received changelog group is empty"))
1643 raise util.Abort(_("received changelog group is empty"))
1645 cnr = cl.count() - 1
1644 cnr = cl.count() - 1
1646 changesets = cnr - cor
1645 changesets = cnr - cor
1647
1646
1648 # pull off the manifest group
1647 # pull off the manifest group
1649 self.ui.status(_("adding manifests\n"))
1648 self.ui.status(_("adding manifests\n"))
1650 chunkiter = changegroup.chunkiter(source)
1649 chunkiter = changegroup.chunkiter(source)
1651 # no need to check for empty manifest group here:
1650 # no need to check for empty manifest group here:
1652 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1651 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1653 # no new manifest will be created and the manifest group will
1652 # no new manifest will be created and the manifest group will
1654 # be empty during the pull
1653 # be empty during the pull
1655 self.manifest.addgroup(chunkiter, revmap, tr)
1654 self.manifest.addgroup(chunkiter, revmap, tr)
1656
1655
1657 # process the files
1656 # process the files
1658 self.ui.status(_("adding file changes\n"))
1657 self.ui.status(_("adding file changes\n"))
1659 while 1:
1658 while 1:
1660 f = changegroup.getchunk(source)
1659 f = changegroup.getchunk(source)
1661 if not f:
1660 if not f:
1662 break
1661 break
1663 self.ui.debug(_("adding %s revisions\n") % f)
1662 self.ui.debug(_("adding %s revisions\n") % f)
1664 fl = self.file(f)
1663 fl = self.file(f)
1665 o = fl.count()
1664 o = fl.count()
1666 chunkiter = changegroup.chunkiter(source)
1665 chunkiter = changegroup.chunkiter(source)
1667 if fl.addgroup(chunkiter, revmap, tr) is None:
1666 if fl.addgroup(chunkiter, revmap, tr) is None:
1668 raise util.Abort(_("received file revlog group is empty"))
1667 raise util.Abort(_("received file revlog group is empty"))
1669 revisions += fl.count() - o
1668 revisions += fl.count() - o
1670 files += 1
1669 files += 1
1671
1670
1672 cl.writedata()
1671 cl.writedata()
1673 finally:
1672 finally:
1674 if cl:
1673 if cl:
1675 cl.cleanup()
1674 cl.cleanup()
1676
1675
1677 # make changelog see real files again
1676 # make changelog see real files again
1678 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1677 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1679 self.changelog.checkinlinesize(tr)
1678 self.changelog.checkinlinesize(tr)
1680
1679
1681 newheads = len(self.changelog.heads())
1680 newheads = len(self.changelog.heads())
1682 heads = ""
1681 heads = ""
1683 if oldheads and newheads != oldheads:
1682 if oldheads and newheads != oldheads:
1684 heads = _(" (%+d heads)") % (newheads - oldheads)
1683 heads = _(" (%+d heads)") % (newheads - oldheads)
1685
1684
1686 self.ui.status(_("added %d changesets"
1685 self.ui.status(_("added %d changesets"
1687 " with %d changes to %d files%s\n")
1686 " with %d changes to %d files%s\n")
1688 % (changesets, revisions, files, heads))
1687 % (changesets, revisions, files, heads))
1689
1688
1690 if changesets > 0:
1689 if changesets > 0:
1691 self.hook('pretxnchangegroup', throw=True,
1690 self.hook('pretxnchangegroup', throw=True,
1692 node=hex(self.changelog.node(cor+1)), source=srctype,
1691 node=hex(self.changelog.node(cor+1)), source=srctype,
1693 url=url)
1692 url=url)
1694
1693
1695 tr.close()
1694 tr.close()
1696
1695
1697 if changesets > 0:
1696 if changesets > 0:
1698 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1697 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1699 source=srctype, url=url)
1698 source=srctype, url=url)
1700
1699
1701 for i in range(cor + 1, cnr + 1):
1700 for i in range(cor + 1, cnr + 1):
1702 self.hook("incoming", node=hex(self.changelog.node(i)),
1701 self.hook("incoming", node=hex(self.changelog.node(i)),
1703 source=srctype, url=url)
1702 source=srctype, url=url)
1704
1703
1705 return newheads - oldheads + 1
1704 return newheads - oldheads + 1
1706
1705
1707
1706
1708 def stream_in(self, remote):
1707 def stream_in(self, remote):
1709 fp = remote.stream_out()
1708 fp = remote.stream_out()
1710 resp = int(fp.readline())
1709 resp = int(fp.readline())
1711 if resp != 0:
1710 if resp != 0:
1712 raise util.Abort(_('operation forbidden by server'))
1711 raise util.Abort(_('operation forbidden by server'))
1713 self.ui.status(_('streaming all changes\n'))
1712 self.ui.status(_('streaming all changes\n'))
1714 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1713 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1715 self.ui.status(_('%d files to transfer, %s of data\n') %
1714 self.ui.status(_('%d files to transfer, %s of data\n') %
1716 (total_files, util.bytecount(total_bytes)))
1715 (total_files, util.bytecount(total_bytes)))
1717 start = time.time()
1716 start = time.time()
1718 for i in xrange(total_files):
1717 for i in xrange(total_files):
1719 name, size = fp.readline().split('\0', 1)
1718 name, size = fp.readline().split('\0', 1)
1720 size = int(size)
1719 size = int(size)
1721 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1720 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1722 ofp = self.opener(name, 'w')
1721 ofp = self.opener(name, 'w')
1723 for chunk in util.filechunkiter(fp, limit=size):
1722 for chunk in util.filechunkiter(fp, limit=size):
1724 ofp.write(chunk)
1723 ofp.write(chunk)
1725 ofp.close()
1724 ofp.close()
1726 elapsed = time.time() - start
1725 elapsed = time.time() - start
1727 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1726 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1728 (util.bytecount(total_bytes), elapsed,
1727 (util.bytecount(total_bytes), elapsed,
1729 util.bytecount(total_bytes / elapsed)))
1728 util.bytecount(total_bytes / elapsed)))
1730 self.reload()
1729 self.reload()
1731 return len(self.heads()) + 1
1730 return len(self.heads()) + 1
1732
1731
1733 def clone(self, remote, heads=[], stream=False):
1732 def clone(self, remote, heads=[], stream=False):
1734 '''clone remote repository.
1733 '''clone remote repository.
1735
1734
1736 keyword arguments:
1735 keyword arguments:
1737 heads: list of revs to clone (forces use of pull)
1736 heads: list of revs to clone (forces use of pull)
1738 stream: use streaming clone if possible'''
1737 stream: use streaming clone if possible'''
1739
1738
1740 # now, all clients that can request uncompressed clones can
1739 # now, all clients that can request uncompressed clones can
1741 # read repo formats supported by all servers that can serve
1740 # read repo formats supported by all servers that can serve
1742 # them.
1741 # them.
1743
1742
1744 # if revlog format changes, client will have to check version
1743 # if revlog format changes, client will have to check version
1745 # and format flags on "stream" capability, and use
1744 # and format flags on "stream" capability, and use
1746 # uncompressed only if compatible.
1745 # uncompressed only if compatible.
1747
1746
1748 if stream and not heads and remote.capable('stream'):
1747 if stream and not heads and remote.capable('stream'):
1749 return self.stream_in(remote)
1748 return self.stream_in(remote)
1750 return self.pull(remote, heads)
1749 return self.pull(remote, heads)
1751
1750
1752 # used to avoid circular references so destructors work
1751 # used to avoid circular references so destructors work
1753 def aftertrans(base):
1752 def aftertrans(base):
1754 p = base
1753 p = base
1755 def a():
1754 def a():
1756 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1755 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1757 util.rename(os.path.join(p, "journal.dirstate"),
1756 util.rename(os.path.join(p, "journal.dirstate"),
1758 os.path.join(p, "undo.dirstate"))
1757 os.path.join(p, "undo.dirstate"))
1759 return a
1758 return a
1760
1759
1761 def instance(ui, path, create):
1760 def instance(ui, path, create):
1762 return localrepository(ui, util.drop_scheme('file', path), create)
1761 return localrepository(ui, util.drop_scheme('file', path), create)
1763
1762
1764 def islocal(path):
1763 def islocal(path):
1765 return True
1764 return True
@@ -1,188 +1,214 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "array bisect struct")
11 demandload(globals(), "array bisect struct")
12
12
13 class manifestdict(dict):
14 def __init__(self, mapping={}):
15 dict.__init__(self, mapping)
16 def __getitem__(self, f):
17 return self.node(f)
18 def get(self, f, default=None):
19 try:
20 return dict.__getitem__(self, f)[:20]
21 except KeyError:
22 return default
23 def __setitem__(self, f, node):
24 fl = self.flags(f)
25 dict.__setitem__(self, f, node + fl)
26 def node(self, f):
27 return dict.__getitem__(self, f)[:20]
28 def flags(self, f):
29 return dict.get(self, f, "")[20:]
30 def execf(self, f):
31 "test for executable in manifest flags"
32 return "x" in self.flags(f)
33 def linkf(self, f):
34 "test for symlink in manifest flags"
35 return "l" in self.flags(f)
36 def rawset(self, f, node, flags):
37 dict.__setitem__(self, f, node + flags)
38 def set(self, f, execf=False, linkf=False):
39 n = dict.get(self, f, nullid)[:20]
40 fl = ""
41 if execf: fl = "x"
42 if linkf: fl = "l"
43 dict.__setitem__(self, f, n + fl)
44 def copy(self):
45 return manifestdict(dict.copy(self))
46
13 class manifest(revlog):
47 class manifest(revlog):
14 def __init__(self, opener, defversion=REVLOGV0):
48 def __init__(self, opener, defversion=REVLOGV0):
15 self.mapcache = None
49 self.mapcache = None
16 self.listcache = None
50 self.listcache = None
17 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
51 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
18 defversion)
52 defversion)
19
53
20 def read(self, node):
54 def read(self, node):
21 if node == nullid: return {} # don't upset local cache
55 if node == nullid: return manifestdict() # don't upset local cache
22 if self.mapcache and self.mapcache[0] == node:
56 if self.mapcache and self.mapcache[0] == node:
23 return self.mapcache[1]
57 return self.mapcache[1]
24 text = self.revision(node)
58 text = self.revision(node)
25 map = {}
26 flag = {}
27 self.listcache = array.array('c', text)
59 self.listcache = array.array('c', text)
28 lines = text.splitlines(1)
60 lines = text.splitlines(1)
61 mapping = manifestdict()
29 for l in lines:
62 for l in lines:
30 (f, n) = l.split('\0')
63 (f, n) = l.split('\0')
31 map[f] = bin(n[:40])
64 mapping.rawset(f, bin(n[:40]), n[40:-1])
32 flag[f] = (n[40:-1] == "x")
65 self.mapcache = (node, mapping)
33 self.mapcache = (node, map, flag)
66 return mapping
34 return map
35
67
36 def readflags(self, node):
68 def readflags(self, node):
37 if node == nullid: return {} # don't upset local cache
69 return self.read(node)
38 if not self.mapcache or self.mapcache[0] != node:
39 self.read(node)
40 return self.mapcache[2]
41
70
42 def diff(self, a, b):
71 def diff(self, a, b):
43 return mdiff.textdiff(str(a), str(b))
72 return mdiff.textdiff(str(a), str(b))
44
73
45 def _search(self, m, s, lo=0, hi=None):
74 def _search(self, m, s, lo=0, hi=None):
46 '''return a tuple (start, end) that says where to find s within m.
75 '''return a tuple (start, end) that says where to find s within m.
47
76
48 If the string is found m[start:end] are the line containing
77 If the string is found m[start:end] are the line containing
49 that string. If start == end the string was not found and
78 that string. If start == end the string was not found and
50 they indicate the proper sorted insertion point. This was
79 they indicate the proper sorted insertion point. This was
51 taken from bisect_left, and modified to find line start/end as
80 taken from bisect_left, and modified to find line start/end as
52 it goes along.
81 it goes along.
53
82
54 m should be a buffer or a string
83 m should be a buffer or a string
55 s is a string'''
84 s is a string'''
56 def advance(i, c):
85 def advance(i, c):
57 while i < lenm and m[i] != c:
86 while i < lenm and m[i] != c:
58 i += 1
87 i += 1
59 return i
88 return i
60 lenm = len(m)
89 lenm = len(m)
61 if not hi:
90 if not hi:
62 hi = lenm
91 hi = lenm
63 while lo < hi:
92 while lo < hi:
64 mid = (lo + hi) // 2
93 mid = (lo + hi) // 2
65 start = mid
94 start = mid
66 while start > 0 and m[start-1] != '\n':
95 while start > 0 and m[start-1] != '\n':
67 start -= 1
96 start -= 1
68 end = advance(start, '\0')
97 end = advance(start, '\0')
69 if m[start:end] < s:
98 if m[start:end] < s:
70 # we know that after the null there are 40 bytes of sha1
99 # we know that after the null there are 40 bytes of sha1
71 # this translates to the bisect lo = mid + 1
100 # this translates to the bisect lo = mid + 1
72 lo = advance(end + 40, '\n') + 1
101 lo = advance(end + 40, '\n') + 1
73 else:
102 else:
74 # this translates to the bisect hi = mid
103 # this translates to the bisect hi = mid
75 hi = start
104 hi = start
76 end = advance(lo, '\0')
105 end = advance(lo, '\0')
77 found = m[lo:end]
106 found = m[lo:end]
78 if cmp(s, found) == 0:
107 if cmp(s, found) == 0:
79 # we know that after the null there are 40 bytes of sha1
108 # we know that after the null there are 40 bytes of sha1
80 end = advance(end + 40, '\n')
109 end = advance(end + 40, '\n')
81 return (lo, end+1)
110 return (lo, end+1)
82 else:
111 else:
83 return (lo, lo)
112 return (lo, lo)
84
113
85 def find(self, node, f):
114 def find(self, node, f):
86 '''look up entry for a single file efficiently.
115 '''look up entry for a single file efficiently.
87 return (node, flag) pair if found, (None, None) if not.'''
116 return (node, flag) pair if found, (None, None) if not.'''
88 if self.mapcache and node == self.mapcache[0]:
117 if self.mapcache and node == self.mapcache[0]:
89 return self.mapcache[1].get(f), self.mapcache[2].get(f)
118 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
90 text = self.revision(node)
119 text = self.revision(node)
91 start, end = self._search(text, f)
120 start, end = self._search(text, f)
92 if start == end:
121 if start == end:
93 return None, None
122 return None, None
94 l = text[start:end]
123 l = text[start:end]
95 f, n = l.split('\0')
124 f, n = l.split('\0')
96 return bin(n[:40]), n[40:-1] == 'x'
125 return bin(n[:40]), n[40:-1] == 'x'
97
126
98 def add(self, map, flags, transaction, link, p1=None, p2=None,
127 def add(self, map, flags, transaction, link, p1=None, p2=None,
99 changed=None):
128 changed=None):
100 # apply the changes collected during the bisect loop to our addlist
129 # apply the changes collected during the bisect loop to our addlist
101 # return a delta suitable for addrevision
130 # return a delta suitable for addrevision
102 def addlistdelta(addlist, x):
131 def addlistdelta(addlist, x):
103 # start from the bottom up
132 # start from the bottom up
104 # so changes to the offsets don't mess things up.
133 # so changes to the offsets don't mess things up.
105 i = len(x)
134 i = len(x)
106 while i > 0:
135 while i > 0:
107 i -= 1
136 i -= 1
108 start = x[i][0]
137 start = x[i][0]
109 end = x[i][1]
138 end = x[i][1]
110 if x[i][2]:
139 if x[i][2]:
111 addlist[start:end] = array.array('c', x[i][2])
140 addlist[start:end] = array.array('c', x[i][2])
112 else:
141 else:
113 del addlist[start:end]
142 del addlist[start:end]
114 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
143 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
115 for d in x ])
144 for d in x ])
116
145
117 # if we're using the listcache, make sure it is valid and
146 # if we're using the listcache, make sure it is valid and
118 # parented by the same node we're diffing against
147 # parented by the same node we're diffing against
119 if not changed or not self.listcache or not p1 or \
148 if not changed or not self.listcache or not p1 or \
120 self.mapcache[0] != p1:
149 self.mapcache[0] != p1:
121 files = map.keys()
150 files = map.keys()
122 files.sort()
151 files.sort()
123
152
124 # if this is changed to support newlines in filenames,
153 # if this is changed to support newlines in filenames,
125 # be sure to check the templates/ dir again (especially *-raw.tmpl)
154 # be sure to check the templates/ dir again (especially *-raw.tmpl)
126 text = ["%s\000%s%s\n" %
155 text = ["%s\000%s%s\n" % (f, hex(map[f]), flags.flags(f)) for f in files]
127 (f, hex(map[f]), flags[f] and "x" or '')
128 for f in files]
129 self.listcache = array.array('c', "".join(text))
156 self.listcache = array.array('c', "".join(text))
130 cachedelta = None
157 cachedelta = None
131 else:
158 else:
132 addlist = self.listcache
159 addlist = self.listcache
133
160
134 # combine the changed lists into one list for sorting
161 # combine the changed lists into one list for sorting
135 work = [[x, 0] for x in changed[0]]
162 work = [[x, 0] for x in changed[0]]
136 work[len(work):] = [[x, 1] for x in changed[1]]
163 work[len(work):] = [[x, 1] for x in changed[1]]
137 work.sort()
164 work.sort()
138
165
139 delta = []
166 delta = []
140 dstart = None
167 dstart = None
141 dend = None
168 dend = None
142 dline = [""]
169 dline = [""]
143 start = 0
170 start = 0
144 # zero copy representation of addlist as a buffer
171 # zero copy representation of addlist as a buffer
145 addbuf = buffer(addlist)
172 addbuf = buffer(addlist)
146
173
147 # start with a readonly loop that finds the offset of
174 # start with a readonly loop that finds the offset of
148 # each line and creates the deltas
175 # each line and creates the deltas
149 for w in work:
176 for w in work:
150 f = w[0]
177 f = w[0]
151 # bs will either be the index of the item or the insert point
178 # bs will either be the index of the item or the insert point
152 start, end = self._search(addbuf, f, start)
179 start, end = self._search(addbuf, f, start)
153 if w[1] == 0:
180 if w[1] == 0:
154 l = "%s\000%s%s\n" % (f, hex(map[f]),
181 l = "%s\000%s%s\n" % (f, hex(map[f]), flags.flags(f))
155 flags[f] and "x" or '')
156 else:
182 else:
157 l = ""
183 l = ""
158 if start == end and w[1] == 1:
184 if start == end and w[1] == 1:
159 # item we want to delete was not found, error out
185 # item we want to delete was not found, error out
160 raise AssertionError(
186 raise AssertionError(
161 _("failed to remove %s from manifest\n") % f)
187 _("failed to remove %s from manifest\n") % f)
162 if dstart != None and dstart <= start and dend >= start:
188 if dstart != None and dstart <= start and dend >= start:
163 if dend < end:
189 if dend < end:
164 dend = end
190 dend = end
165 if l:
191 if l:
166 dline.append(l)
192 dline.append(l)
167 else:
193 else:
168 if dstart != None:
194 if dstart != None:
169 delta.append([dstart, dend, "".join(dline)])
195 delta.append([dstart, dend, "".join(dline)])
170 dstart = start
196 dstart = start
171 dend = end
197 dend = end
172 dline = [l]
198 dline = [l]
173
199
174 if dstart != None:
200 if dstart != None:
175 delta.append([dstart, dend, "".join(dline)])
201 delta.append([dstart, dend, "".join(dline)])
176 # apply the delta to the addlist, and get a delta for addrevision
202 # apply the delta to the addlist, and get a delta for addrevision
177 cachedelta = addlistdelta(addlist, delta)
203 cachedelta = addlistdelta(addlist, delta)
178
204
179 # the delta is only valid if we've been processing the tip revision
205 # the delta is only valid if we've been processing the tip revision
180 if self.mapcache[0] != self.tip():
206 if self.mapcache[0] != self.tip():
181 cachedelta = None
207 cachedelta = None
182 self.listcache = addlist
208 self.listcache = addlist
183
209
184 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
210 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
185 p2, cachedelta)
211 p2, cachedelta)
186 self.mapcache = (n, map, flags)
212 self.mapcache = (n, map)
187
213
188 return n
214 return n
@@ -1,337 +1,337 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "util os tempfile")
11 demandload(globals(), "util os tempfile")
12
12
13 def merge3(repo, fn, my, other, p1, p2):
13 def merge3(repo, fn, my, other, p1, p2):
14 """perform a 3-way merge in the working directory"""
14 """perform a 3-way merge in the working directory"""
15
15
16 def temp(prefix, node):
16 def temp(prefix, node):
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
19 f = os.fdopen(fd, "wb")
19 f = os.fdopen(fd, "wb")
20 repo.wwrite(fn, fl.read(node), f)
20 repo.wwrite(fn, fl.read(node), f)
21 f.close()
21 f.close()
22 return name
22 return name
23
23
24 fl = repo.file(fn)
24 fl = repo.file(fn)
25 base = fl.ancestor(my, other)
25 base = fl.ancestor(my, other)
26 a = repo.wjoin(fn)
26 a = repo.wjoin(fn)
27 b = temp("base", base)
27 b = temp("base", base)
28 c = temp("other", other)
28 c = temp("other", other)
29
29
30 repo.ui.note(_("resolving %s\n") % fn)
30 repo.ui.note(_("resolving %s\n") % fn)
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 (fn, short(my), short(other), short(base)))
32 (fn, short(my), short(other), short(base)))
33
33
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 or "hgmerge")
35 or "hgmerge")
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 environ={'HG_FILE': fn,
37 environ={'HG_FILE': fn,
38 'HG_MY_NODE': p1,
38 'HG_MY_NODE': p1,
39 'HG_OTHER_NODE': p2,
39 'HG_OTHER_NODE': p2,
40 'HG_FILE_MY_NODE': hex(my),
40 'HG_FILE_MY_NODE': hex(my),
41 'HG_FILE_OTHER_NODE': hex(other),
41 'HG_FILE_OTHER_NODE': hex(other),
42 'HG_FILE_BASE_NODE': hex(base)})
42 'HG_FILE_BASE_NODE': hex(base)})
43 if r:
43 if r:
44 repo.ui.warn(_("merging %s failed!\n") % fn)
44 repo.ui.warn(_("merging %s failed!\n") % fn)
45
45
46 os.unlink(b)
46 os.unlink(b)
47 os.unlink(c)
47 os.unlink(c)
48 return r
48 return r
49
49
50 def update(repo, node, branchmerge=False, force=False, partial=None,
50 def update(repo, node, branchmerge=False, force=False, partial=None,
51 wlock=None, show_stats=True, remind=True):
51 wlock=None, show_stats=True, remind=True):
52
52
53 overwrite = force and not branchmerge
53 overwrite = force and not branchmerge
54 forcemerge = force and branchmerge
54 forcemerge = force and branchmerge
55
55
56 if not wlock:
56 if not wlock:
57 wlock = repo.wlock()
57 wlock = repo.wlock()
58
58
59 ### check phase
59 ### check phase
60
60
61 pl = repo.dirstate.parents()
61 pl = repo.dirstate.parents()
62 if not overwrite and pl[1] != nullid:
62 if not overwrite and pl[1] != nullid:
63 raise util.Abort(_("outstanding uncommitted merges"))
63 raise util.Abort(_("outstanding uncommitted merges"))
64
64
65 p1, p2 = pl[0], node
65 p1, p2 = pl[0], node
66 pa = repo.changelog.ancestor(p1, p2)
66 pa = repo.changelog.ancestor(p1, p2)
67
67
68 # is there a linear path from p1 to p2?
68 # is there a linear path from p1 to p2?
69 linear_path = (pa == p1 or pa == p2)
69 linear_path = (pa == p1 or pa == p2)
70 if branchmerge and linear_path:
70 if branchmerge and linear_path:
71 raise util.Abort(_("there is nothing to merge, just use "
71 raise util.Abort(_("there is nothing to merge, just use "
72 "'hg update' or look at 'hg heads'"))
72 "'hg update' or look at 'hg heads'"))
73
73
74 if not overwrite and not linear_path and not branchmerge:
74 if not overwrite and not linear_path and not branchmerge:
75 raise util.Abort(_("update spans branches, use 'hg merge' "
75 raise util.Abort(_("update spans branches, use 'hg merge' "
76 "or 'hg update -C' to lose changes"))
76 "or 'hg update -C' to lose changes"))
77
77
78 modified, added, removed, deleted, unknown = repo.changes()
78 modified, added, removed, deleted, unknown = repo.changes()
79 if branchmerge and not forcemerge:
79 if branchmerge and not forcemerge:
80 if modified or added or removed:
80 if modified or added or removed:
81 raise util.Abort(_("outstanding uncommitted changes"))
81 raise util.Abort(_("outstanding uncommitted changes"))
82
82
83 m1n = repo.changelog.read(p1)[0]
83 m1n = repo.changelog.read(p1)[0]
84 m2n = repo.changelog.read(p2)[0]
84 m2n = repo.changelog.read(p2)[0]
85 man = repo.manifest.ancestor(m1n, m2n)
85 man = repo.manifest.ancestor(m1n, m2n)
86 m1 = repo.manifest.read(m1n)
86 m1 = repo.manifest.read(m1n)
87 mf1 = repo.manifest.readflags(m1n)
87 mf1 = repo.manifest.readflags(m1n)
88 m2 = repo.manifest.read(m2n).copy()
88 m2 = repo.manifest.read(m2n).copy()
89 mf2 = repo.manifest.readflags(m2n)
89 mf2 = repo.manifest.readflags(m2n)
90 ma = repo.manifest.read(man)
90 ma = repo.manifest.read(man)
91 mfa = repo.manifest.readflags(man)
91 mfa = repo.manifest.readflags(man)
92
92
93 if not forcemerge and not overwrite:
93 if not forcemerge and not overwrite:
94 for f in unknown:
94 for f in unknown:
95 if f in m2:
95 if f in m2:
96 t1 = repo.wread(f)
96 t1 = repo.wread(f)
97 t2 = repo.file(f).read(m2[f])
97 t2 = repo.file(f).read(m2[f])
98 if cmp(t1, t2) != 0:
98 if cmp(t1, t2) != 0:
99 raise util.Abort(_("'%s' already exists in the working"
99 raise util.Abort(_("'%s' already exists in the working"
100 " dir and differs from remote") % f)
100 " dir and differs from remote") % f)
101
101
102 # resolve the manifest to determine which files
102 # resolve the manifest to determine which files
103 # we care about merging
103 # we care about merging
104 repo.ui.note(_("resolving manifests\n"))
104 repo.ui.note(_("resolving manifests\n"))
105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
106 (overwrite, branchmerge, partial and True or False, linear_path))
106 (overwrite, branchmerge, partial and True or False, linear_path))
107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
108 (short(man), short(m1n), short(m2n)))
108 (short(man), short(m1n), short(m2n)))
109
109
110 merge = {}
110 merge = {}
111 get = {}
111 get = {}
112 remove = []
112 remove = []
113
113
114 # construct a working dir manifest
114 # construct a working dir manifest
115 mw = m1.copy()
115 mw = m1.copy()
116 mfw = mf1.copy()
116 mfw = mf1.copy()
117 umap = dict.fromkeys(unknown)
117 umap = dict.fromkeys(unknown)
118
118
119 for f in added + modified + unknown:
119 for f in added + modified + unknown:
120 mw[f] = ""
120 mw[f] = ""
121 mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False))
121 mfw.set(f, util.is_exec(repo.wjoin(f), mfw.execf(f)))
122
122
123 for f in deleted + removed:
123 for f in deleted + removed:
124 if f in mw:
124 if f in mw:
125 del mw[f]
125 del mw[f]
126
126
127 # If we're jumping between revisions (as opposed to merging),
127 # If we're jumping between revisions (as opposed to merging),
128 # and if neither the working directory nor the target rev has
128 # and if neither the working directory nor the target rev has
129 # the file, then we need to remove it from the dirstate, to
129 # the file, then we need to remove it from the dirstate, to
130 # prevent the dirstate from listing the file when it is no
130 # prevent the dirstate from listing the file when it is no
131 # longer in the manifest.
131 # longer in the manifest.
132 if not partial and linear_path and f not in m2:
132 if not partial and linear_path and f not in m2:
133 repo.dirstate.forget((f,))
133 repo.dirstate.forget((f,))
134
134
135 # Compare manifests
135 # Compare manifests
136 for f, n in mw.iteritems():
136 for f, n in mw.iteritems():
137 if partial and not partial(f):
137 if partial and not partial(f):
138 continue
138 continue
139 if f in m2:
139 if f in m2:
140 s = 0
140 s = 0
141
141
142 # is the wfile new since m1, and match m2?
142 # is the wfile new since m1, and match m2?
143 if f not in m1:
143 if f not in m1:
144 t1 = repo.wread(f)
144 t1 = repo.wread(f)
145 t2 = repo.file(f).read(m2[f])
145 t2 = repo.file(f).read(m2[f])
146 if cmp(t1, t2) == 0:
146 if cmp(t1, t2) == 0:
147 n = m2[f]
147 n = m2[f]
148 del t1, t2
148 del t1, t2
149
149
150 # are files different?
150 # are files different?
151 if n != m2[f]:
151 if n != m2[f]:
152 a = ma.get(f, nullid)
152 a = ma.get(f, nullid)
153 # are both different from the ancestor?
153 # are both different from the ancestor?
154 if n != a and m2[f] != a:
154 if n != a and m2[f] != a:
155 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
155 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
156 # merge executable bits
156 # merge executable bits
157 # "if we changed or they changed, change in merge"
157 # "if we changed or they changed, change in merge"
158 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
158 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
159 mode = ((a^b) | (a^c)) ^ a
159 mode = ((a^b) | (a^c)) ^ a
160 merge[f] = (m1.get(f, nullid), m2[f], mode)
160 merge[f] = (m1.get(f, nullid), m2[f], mode)
161 s = 1
161 s = 1
162 # are we clobbering?
162 # are we clobbering?
163 # is remote's version newer?
163 # is remote's version newer?
164 # or are we going back in time?
164 # or are we going back in time?
165 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
165 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
166 repo.ui.debug(_(" remote %s is newer, get\n") % f)
166 repo.ui.debug(_(" remote %s is newer, get\n") % f)
167 get[f] = m2[f]
167 get[f] = m2[f]
168 s = 1
168 s = 1
169 elif f in umap or f in added:
169 elif f in umap or f in added:
170 # this unknown file is the same as the checkout
170 # this unknown file is the same as the checkout
171 # we need to reset the dirstate if the file was added
171 # we need to reset the dirstate if the file was added
172 get[f] = m2[f]
172 get[f] = m2[f]
173
173
174 if not s and mfw[f] != mf2[f]:
174 if not s and mfw.execf(f) != mf2.execf(f):
175 if overwrite:
175 if overwrite:
176 repo.ui.debug(_(" updating permissions for %s\n") % f)
176 repo.ui.debug(_(" updating permissions for %s\n") % f)
177 util.set_exec(repo.wjoin(f), mf2[f])
177 util.set_exec(repo.wjoin(f), mf2.execf(f))
178 else:
178 else:
179 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
179 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
180 mode = ((a^b) | (a^c)) ^ a
180 mode = ((a^b) | (a^c)) ^ a
181 if mode != b:
181 if mode != b:
182 repo.ui.debug(_(" updating permissions for %s\n")
182 repo.ui.debug(_(" updating permissions for %s\n")
183 % f)
183 % f)
184 util.set_exec(repo.wjoin(f), mode)
184 util.set_exec(repo.wjoin(f), mode)
185 del m2[f]
185 del m2[f]
186 elif f in ma:
186 elif f in ma:
187 if n != ma[f]:
187 if n != ma[f]:
188 r = _("d")
188 r = _("d")
189 if not overwrite and (linear_path or branchmerge):
189 if not overwrite and (linear_path or branchmerge):
190 r = repo.ui.prompt(
190 r = repo.ui.prompt(
191 (_(" local changed %s which remote deleted\n") % f) +
191 (_(" local changed %s which remote deleted\n") % f) +
192 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
192 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
193 if r == _("d"):
193 if r == _("d"):
194 remove.append(f)
194 remove.append(f)
195 else:
195 else:
196 repo.ui.debug(_("other deleted %s\n") % f)
196 repo.ui.debug(_("other deleted %s\n") % f)
197 remove.append(f) # other deleted it
197 remove.append(f) # other deleted it
198 else:
198 else:
199 # file is created on branch or in working directory
199 # file is created on branch or in working directory
200 if overwrite and f not in umap:
200 if overwrite and f not in umap:
201 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
201 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
202 remove.append(f)
202 remove.append(f)
203 elif n == m1.get(f, nullid): # same as parent
203 elif n == m1.get(f, nullid): # same as parent
204 if p2 == pa: # going backwards?
204 if p2 == pa: # going backwards?
205 repo.ui.debug(_("remote deleted %s\n") % f)
205 repo.ui.debug(_("remote deleted %s\n") % f)
206 remove.append(f)
206 remove.append(f)
207 else:
207 else:
208 repo.ui.debug(_("local modified %s, keeping\n") % f)
208 repo.ui.debug(_("local modified %s, keeping\n") % f)
209 else:
209 else:
210 repo.ui.debug(_("working dir created %s, keeping\n") % f)
210 repo.ui.debug(_("working dir created %s, keeping\n") % f)
211
211
212 for f, n in m2.iteritems():
212 for f, n in m2.iteritems():
213 if partial and not partial(f):
213 if partial and not partial(f):
214 continue
214 continue
215 if f[0] == "/":
215 if f[0] == "/":
216 continue
216 continue
217 if f in ma and n != ma[f]:
217 if f in ma and n != ma[f]:
218 r = _("k")
218 r = _("k")
219 if not overwrite and (linear_path or branchmerge):
219 if not overwrite and (linear_path or branchmerge):
220 r = repo.ui.prompt(
220 r = repo.ui.prompt(
221 (_("remote changed %s which local deleted\n") % f) +
221 (_("remote changed %s which local deleted\n") % f) +
222 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
222 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
223 if r == _("k"):
223 if r == _("k"):
224 get[f] = n
224 get[f] = n
225 elif f not in ma:
225 elif f not in ma:
226 repo.ui.debug(_("remote created %s\n") % f)
226 repo.ui.debug(_("remote created %s\n") % f)
227 get[f] = n
227 get[f] = n
228 else:
228 else:
229 if overwrite or p2 == pa: # going backwards?
229 if overwrite or p2 == pa: # going backwards?
230 repo.ui.debug(_("local deleted %s, recreating\n") % f)
230 repo.ui.debug(_("local deleted %s, recreating\n") % f)
231 get[f] = n
231 get[f] = n
232 else:
232 else:
233 repo.ui.debug(_("local deleted %s\n") % f)
233 repo.ui.debug(_("local deleted %s\n") % f)
234
234
235 del mw, m1, m2, ma
235 del mw, m1, m2, ma
236
236
237 if overwrite:
237 if overwrite:
238 for f in merge:
238 for f in merge:
239 get[f] = merge[f][1]
239 get[f] = merge[f][1]
240 merge = {}
240 merge = {}
241
241
242 if linear_path or overwrite:
242 if linear_path or overwrite:
243 # we don't need to do any magic, just jump to the new rev
243 # we don't need to do any magic, just jump to the new rev
244 p1, p2 = p2, nullid
244 p1, p2 = p2, nullid
245
245
246 xp1 = hex(p1)
246 xp1 = hex(p1)
247 xp2 = hex(p2)
247 xp2 = hex(p2)
248 if p2 == nullid: xxp2 = ''
248 if p2 == nullid: xxp2 = ''
249 else: xxp2 = xp2
249 else: xxp2 = xp2
250
250
251 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
251 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
252
252
253 # get the files we don't need to change
253 # get the files we don't need to change
254 files = get.keys()
254 files = get.keys()
255 files.sort()
255 files.sort()
256 for f in files:
256 for f in files:
257 if f[0] == "/":
257 if f[0] == "/":
258 continue
258 continue
259 repo.ui.note(_("getting %s\n") % f)
259 repo.ui.note(_("getting %s\n") % f)
260 t = repo.file(f).read(get[f])
260 t = repo.file(f).read(get[f])
261 repo.wwrite(f, t)
261 repo.wwrite(f, t)
262 util.set_exec(repo.wjoin(f), mf2[f])
262 util.set_exec(repo.wjoin(f), mf2.execf(f))
263 if not partial:
263 if not partial:
264 if branchmerge:
264 if branchmerge:
265 repo.dirstate.update([f], 'n', st_mtime=-1)
265 repo.dirstate.update([f], 'n', st_mtime=-1)
266 else:
266 else:
267 repo.dirstate.update([f], 'n')
267 repo.dirstate.update([f], 'n')
268
268
269 # merge the tricky bits
269 # merge the tricky bits
270 unresolved = []
270 unresolved = []
271 files = merge.keys()
271 files = merge.keys()
272 files.sort()
272 files.sort()
273 for f in files:
273 for f in files:
274 repo.ui.status(_("merging %s\n") % f)
274 repo.ui.status(_("merging %s\n") % f)
275 my, other, flag = merge[f]
275 my, other, flag = merge[f]
276 ret = merge3(repo, f, my, other, xp1, xp2)
276 ret = merge3(repo, f, my, other, xp1, xp2)
277 if ret:
277 if ret:
278 unresolved.append(f)
278 unresolved.append(f)
279 util.set_exec(repo.wjoin(f), flag)
279 util.set_exec(repo.wjoin(f), flag)
280 if not partial:
280 if not partial:
281 if branchmerge:
281 if branchmerge:
282 # We've done a branch merge, mark this file as merged
282 # We've done a branch merge, mark this file as merged
283 # so that we properly record the merger later
283 # so that we properly record the merger later
284 repo.dirstate.update([f], 'm')
284 repo.dirstate.update([f], 'm')
285 else:
285 else:
286 # We've update-merged a locally modified file, so
286 # We've update-merged a locally modified file, so
287 # we set the dirstate to emulate a normal checkout
287 # we set the dirstate to emulate a normal checkout
288 # of that file some time in the past. Thus our
288 # of that file some time in the past. Thus our
289 # merge will appear as a normal local file
289 # merge will appear as a normal local file
290 # modification.
290 # modification.
291 f_len = len(repo.file(f).read(other))
291 f_len = len(repo.file(f).read(other))
292 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
292 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
293
293
294 remove.sort()
294 remove.sort()
295 for f in remove:
295 for f in remove:
296 repo.ui.note(_("removing %s\n") % f)
296 repo.ui.note(_("removing %s\n") % f)
297 util.audit_path(f)
297 util.audit_path(f)
298 try:
298 try:
299 util.unlink(repo.wjoin(f))
299 util.unlink(repo.wjoin(f))
300 except OSError, inst:
300 except OSError, inst:
301 if inst.errno != errno.ENOENT:
301 if inst.errno != errno.ENOENT:
302 repo.ui.warn(_("update failed to remove %s: %s!\n") %
302 repo.ui.warn(_("update failed to remove %s: %s!\n") %
303 (f, inst.strerror))
303 (f, inst.strerror))
304 if not partial:
304 if not partial:
305 if branchmerge:
305 if branchmerge:
306 repo.dirstate.update(remove, 'r')
306 repo.dirstate.update(remove, 'r')
307 else:
307 else:
308 repo.dirstate.forget(remove)
308 repo.dirstate.forget(remove)
309
309
310 if not partial:
310 if not partial:
311 repo.dirstate.setparents(p1, p2)
311 repo.dirstate.setparents(p1, p2)
312
312
313 if show_stats:
313 if show_stats:
314 stats = ((len(get), _("updated")),
314 stats = ((len(get), _("updated")),
315 (len(merge) - len(unresolved), _("merged")),
315 (len(merge) - len(unresolved), _("merged")),
316 (len(remove), _("removed")),
316 (len(remove), _("removed")),
317 (len(unresolved), _("unresolved")))
317 (len(unresolved), _("unresolved")))
318 note = ", ".join([_("%d files %s") % s for s in stats])
318 note = ", ".join([_("%d files %s") % s for s in stats])
319 repo.ui.status("%s\n" % note)
319 repo.ui.status("%s\n" % note)
320 if not partial:
320 if not partial:
321 if branchmerge:
321 if branchmerge:
322 if unresolved:
322 if unresolved:
323 repo.ui.status(_("There are unresolved merges,"
323 repo.ui.status(_("There are unresolved merges,"
324 " you can redo the full merge using:\n"
324 " you can redo the full merge using:\n"
325 " hg update -C %s\n"
325 " hg update -C %s\n"
326 " hg merge %s\n"
326 " hg merge %s\n"
327 % (repo.changelog.rev(p1),
327 % (repo.changelog.rev(p1),
328 repo.changelog.rev(p2))))
328 repo.changelog.rev(p2))))
329 elif remind:
329 elif remind:
330 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
330 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
331 elif unresolved:
331 elif unresolved:
332 repo.ui.status(_("There are unresolved merges with"
332 repo.ui.status(_("There are unresolved merges with"
333 " locally modified files.\n"))
333 " locally modified files.\n"))
334
334
335 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
335 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
336 return len(unresolved)
336 return len(unresolved)
337
337
General Comments 0
You need to be logged in to leave comments. Login now