##// END OF EJS Templates
remove localrepository.changes....
Vadim Gelfer -
r2875:3d6efcbb default
parent child Browse files
Show More
@@ -1,153 +1,153 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # allow to use external programs to compare revisions, or revision
8 # allow to use external programs to compare revisions, or revision
9 # with working dir. program is called with two arguments: paths to
9 # with working dir. program is called with two arguments: paths to
10 # directories containing snapshots of files to compare.
10 # directories containing snapshots of files to compare.
11 #
11 #
12 # to enable:
12 # to enable:
13 #
13 #
14 # [extensions]
14 # [extensions]
15 # hgext.extdiff =
15 # hgext.extdiff =
16 #
16 #
17 # also allows to configure new diff commands, so you do not need to
17 # also allows to configure new diff commands, so you do not need to
18 # type "hg extdiff -p kdiff3" always.
18 # type "hg extdiff -p kdiff3" always.
19 #
19 #
20 # [extdiff]
20 # [extdiff]
21 # # add new command called vdiff, runs kdiff3
21 # # add new command called vdiff, runs kdiff3
22 # cmd.vdiff = kdiff3
22 # cmd.vdiff = kdiff3
23 # # add new command called meld, runs meld (no need to name twice)
23 # # add new command called meld, runs meld (no need to name twice)
24 # cmd.meld =
24 # cmd.meld =
25 # # add new command called vimdiff, runs gvimdiff with DirDiff plugin
25 # # add new command called vimdiff, runs gvimdiff with DirDiff plugin
26 # #(see http://www.vim.org/scripts/script.php?script_id=102)
26 # #(see http://www.vim.org/scripts/script.php?script_id=102)
27 # cmd.vimdiff = LC_ALL=C gvim -f '+bdel 1 2' '+ execute "DirDiff ".argv(0)." ".argv(1)'
27 # cmd.vimdiff = LC_ALL=C gvim -f '+bdel 1 2' '+ execute "DirDiff ".argv(0)." ".argv(1)'
28 #
28 #
29 # you can use -I/-X and list of file or directory names like normal
29 # you can use -I/-X and list of file or directory names like normal
30 # "hg diff" command. extdiff makes snapshots of only needed files, so
30 # "hg diff" command. extdiff makes snapshots of only needed files, so
31 # compare program will be fast.
31 # compare program will be fast.
32
32
33 from mercurial.demandload import demandload
33 from mercurial.demandload import demandload
34 from mercurial.i18n import gettext as _
34 from mercurial.i18n import gettext as _
35 from mercurial.node import *
35 from mercurial.node import *
36 demandload(globals(), 'mercurial:commands,util os shutil tempfile')
36 demandload(globals(), 'mercurial:commands,util os shutil tempfile')
37
37
38 def dodiff(ui, repo, diffcmd, pats, opts):
38 def dodiff(ui, repo, diffcmd, pats, opts):
39 def snapshot_node(files, node):
39 def snapshot_node(files, node):
40 '''snapshot files as of some revision'''
40 '''snapshot files as of some revision'''
41 changes = repo.changelog.read(node)
41 changes = repo.changelog.read(node)
42 mf = repo.manifest.read(changes[0])
42 mf = repo.manifest.read(changes[0])
43 dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
43 dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
44 base = os.path.join(tmproot, dirname)
44 base = os.path.join(tmproot, dirname)
45 os.mkdir(base)
45 os.mkdir(base)
46 if not ui.quiet:
46 if not ui.quiet:
47 ui.write_err(_('making snapshot of %d files from rev %s\n') %
47 ui.write_err(_('making snapshot of %d files from rev %s\n') %
48 (len(files), short(node)))
48 (len(files), short(node)))
49 for fn in files:
49 for fn in files:
50 wfn = util.pconvert(fn)
50 wfn = util.pconvert(fn)
51 ui.note(' %s\n' % wfn)
51 ui.note(' %s\n' % wfn)
52 dest = os.path.join(base, wfn)
52 dest = os.path.join(base, wfn)
53 destdir = os.path.dirname(dest)
53 destdir = os.path.dirname(dest)
54 if not os.path.isdir(destdir):
54 if not os.path.isdir(destdir):
55 os.makedirs(destdir)
55 os.makedirs(destdir)
56 repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
56 repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
57 return dirname
57 return dirname
58
58
59 def snapshot_wdir(files):
59 def snapshot_wdir(files):
60 '''snapshot files from working directory.
60 '''snapshot files from working directory.
61 if not using snapshot, -I/-X does not work and recursive diff
61 if not using snapshot, -I/-X does not work and recursive diff
62 in tools like kdiff3 and meld displays too many files.'''
62 in tools like kdiff3 and meld displays too many files.'''
63 dirname = os.path.basename(repo.root)
63 dirname = os.path.basename(repo.root)
64 base = os.path.join(tmproot, dirname)
64 base = os.path.join(tmproot, dirname)
65 os.mkdir(base)
65 os.mkdir(base)
66 if not ui.quiet:
66 if not ui.quiet:
67 ui.write_err(_('making snapshot of %d files from working dir\n') %
67 ui.write_err(_('making snapshot of %d files from working dir\n') %
68 (len(files)))
68 (len(files)))
69 for fn in files:
69 for fn in files:
70 wfn = util.pconvert(fn)
70 wfn = util.pconvert(fn)
71 ui.note(' %s\n' % wfn)
71 ui.note(' %s\n' % wfn)
72 dest = os.path.join(base, wfn)
72 dest = os.path.join(base, wfn)
73 destdir = os.path.dirname(dest)
73 destdir = os.path.dirname(dest)
74 if not os.path.isdir(destdir):
74 if not os.path.isdir(destdir):
75 os.makedirs(destdir)
75 os.makedirs(destdir)
76 fp = open(dest, 'w')
76 fp = open(dest, 'w')
77 for chunk in util.filechunkiter(repo.wopener(wfn)):
77 for chunk in util.filechunkiter(repo.wopener(wfn)):
78 fp.write(chunk)
78 fp.write(chunk)
79 return dirname
79 return dirname
80
80
81 node1, node2 = commands.revpair(ui, repo, opts['rev'])
81 node1, node2 = commands.revpair(ui, repo, opts['rev'])
82 files, matchfn, anypats = commands.matchpats(repo, pats, opts)
82 files, matchfn, anypats = commands.matchpats(repo, pats, opts)
83 modified, added, removed, deleted, unknown = repo.changes(
83 modified, added, removed, deleted, unknown = repo.status(
84 node1, node2, files, match=matchfn)
84 node1, node2, files, match=matchfn)[:5]
85 if not (modified or added or removed):
85 if not (modified or added or removed):
86 return 0
86 return 0
87
87
88 tmproot = tempfile.mkdtemp(prefix='extdiff.')
88 tmproot = tempfile.mkdtemp(prefix='extdiff.')
89 try:
89 try:
90 dir1 = snapshot_node(modified + removed, node1)
90 dir1 = snapshot_node(modified + removed, node1)
91 if node2:
91 if node2:
92 dir2 = snapshot_node(modified + added, node2)
92 dir2 = snapshot_node(modified + added, node2)
93 else:
93 else:
94 dir2 = snapshot_wdir(modified + added)
94 dir2 = snapshot_wdir(modified + added)
95 util.system('%s %s "%s" "%s"' %
95 util.system('%s %s "%s" "%s"' %
96 (diffcmd, ' '.join(opts['option']), dir1, dir2),
96 (diffcmd, ' '.join(opts['option']), dir1, dir2),
97 cwd=tmproot)
97 cwd=tmproot)
98 return 1
98 return 1
99 finally:
99 finally:
100 ui.note(_('cleaning up temp directory\n'))
100 ui.note(_('cleaning up temp directory\n'))
101 shutil.rmtree(tmproot)
101 shutil.rmtree(tmproot)
102
102
103 def extdiff(ui, repo, *pats, **opts):
103 def extdiff(ui, repo, *pats, **opts):
104 '''use external program to diff repository (or selected files)
104 '''use external program to diff repository (or selected files)
105
105
106 Show differences between revisions for the specified files, using
106 Show differences between revisions for the specified files, using
107 an external program. The default program used is "diff -Npru".
107 an external program. The default program used is "diff -Npru".
108 To select a different program, use the -p option. The program
108 To select a different program, use the -p option. The program
109 will be passed the names of two directories to compare. To pass
109 will be passed the names of two directories to compare. To pass
110 additional options to the program, use the -o option. These will
110 additional options to the program, use the -o option. These will
111 be passed before the names of the directories to compare.
111 be passed before the names of the directories to compare.
112
112
113 When two revision arguments are given, then changes are
113 When two revision arguments are given, then changes are
114 shown between those revisions. If only one revision is
114 shown between those revisions. If only one revision is
115 specified then that revision is compared to the working
115 specified then that revision is compared to the working
116 directory, and, when no revisions are specified, the
116 directory, and, when no revisions are specified, the
117 working directory files are compared to its parent.'''
117 working directory files are compared to its parent.'''
118 return dodiff(ui, repo, opts['program'] or 'diff -Npru', pats, opts)
118 return dodiff(ui, repo, opts['program'] or 'diff -Npru', pats, opts)
119
119
120 cmdtable = {
120 cmdtable = {
121 "extdiff":
121 "extdiff":
122 (extdiff,
122 (extdiff,
123 [('p', 'program', '', _('comparison program to run')),
123 [('p', 'program', '', _('comparison program to run')),
124 ('o', 'option', [], _('pass option to comparison program')),
124 ('o', 'option', [], _('pass option to comparison program')),
125 ('r', 'rev', [], _('revision')),
125 ('r', 'rev', [], _('revision')),
126 ('I', 'include', [], _('include names matching the given patterns')),
126 ('I', 'include', [], _('include names matching the given patterns')),
127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
128 _('hg extdiff [OPT]... [FILE]...')),
128 _('hg extdiff [OPT]... [FILE]...')),
129 }
129 }
130
130
131 def uisetup(ui):
131 def uisetup(ui):
132 for cmd, path in ui.configitems('extdiff'):
132 for cmd, path in ui.configitems('extdiff'):
133 if not cmd.startswith('cmd.'): continue
133 if not cmd.startswith('cmd.'): continue
134 cmd = cmd[4:]
134 cmd = cmd[4:]
135 if not path: path = cmd
135 if not path: path = cmd
136 def save(cmd, path):
136 def save(cmd, path):
137 '''use closure to save diff command to use'''
137 '''use closure to save diff command to use'''
138 def mydiff(ui, repo, *pats, **opts):
138 def mydiff(ui, repo, *pats, **opts):
139 return dodiff(ui, repo, path, pats, opts)
139 return dodiff(ui, repo, path, pats, opts)
140 mydiff.__doc__ = '''use %s to diff repository (or selected files)
140 mydiff.__doc__ = '''use %s to diff repository (or selected files)
141
141
142 Show differences between revisions for the specified
142 Show differences between revisions for the specified
143 files, using the %s program.
143 files, using the %s program.
144
144
145 When two revision arguments are given, then changes are
145 When two revision arguments are given, then changes are
146 shown between those revisions. If only one revision is
146 shown between those revisions. If only one revision is
147 specified then that revision is compared to the working
147 specified then that revision is compared to the working
148 directory, and, when no revisions are specified, the
148 directory, and, when no revisions are specified, the
149 working directory files are compared to its parent.''' % (cmd, cmd)
149 working directory files are compared to its parent.''' % (cmd, cmd)
150 return mydiff
150 return mydiff
151 cmdtable[cmd] = (save(cmd, path),
151 cmdtable[cmd] = (save(cmd, path),
152 cmdtable['extdiff'][1][1:],
152 cmdtable['extdiff'][1][1:],
153 _('hg %s [OPT]... [FILE]...') % cmd)
153 _('hg %s [OPT]... [FILE]...') % cmd)
@@ -1,269 +1,269 b''
1 # GnuPG signing extension for Mercurial
1 # GnuPG signing extension for Mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util
9 from mercurial import util
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import gettext as _
11 from mercurial.i18n import gettext as _
12
12
13 class gpg:
13 class gpg:
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f: os.unlink(f)
41 if f: os.unlink(f)
42 except: pass
42 except: pass
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 fl = repo.file(".hgsigs")
93 fl = repo.file(".hgsigs")
94 h = fl.heads()
94 h = fl.heads()
95 h.reverse()
95 h.reverse()
96 # read the heads
96 # read the heads
97 for r in h:
97 for r in h:
98 fn = ".hgsigs|%s" % hgnode.short(r)
98 fn = ".hgsigs|%s" % hgnode.short(r)
99 for item in parsefile(fl.read(r).splitlines(), fn):
99 for item in parsefile(fl.read(r).splitlines(), fn):
100 yield item
100 yield item
101 try:
101 try:
102 # read local signatures
102 # read local signatures
103 fn = "localsigs"
103 fn = "localsigs"
104 for item in parsefile(repo.opener(fn), fn):
104 for item in parsefile(repo.opener(fn), fn):
105 yield item
105 yield item
106 except IOError:
106 except IOError:
107 pass
107 pass
108
108
109 def getkeys(ui, repo, mygpg, sigdata, context):
109 def getkeys(ui, repo, mygpg, sigdata, context):
110 """get the keys who signed a data"""
110 """get the keys who signed a data"""
111 fn, ln = context
111 fn, ln = context
112 node, version, sig = sigdata
112 node, version, sig = sigdata
113 prefix = "%s:%d" % (fn, ln)
113 prefix = "%s:%d" % (fn, ln)
114 node = hgnode.bin(node)
114 node = hgnode.bin(node)
115
115
116 data = node2txt(repo, node, version)
116 data = node2txt(repo, node, version)
117 sig = binascii.a2b_base64(sig)
117 sig = binascii.a2b_base64(sig)
118 err, keys = mygpg.verify(data, sig)
118 err, keys = mygpg.verify(data, sig)
119 if err:
119 if err:
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 return None
121 return None
122
122
123 validkeys = []
123 validkeys = []
124 # warn for expired key and/or sigs
124 # warn for expired key and/or sigs
125 for key in keys:
125 for key in keys:
126 if key[0] == "BADSIG":
126 if key[0] == "BADSIG":
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 continue
128 continue
129 if key[0] == "EXPSIG":
129 if key[0] == "EXPSIG":
130 ui.write(_("%s Note: Signature has expired"
130 ui.write(_("%s Note: Signature has expired"
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 elif key[0] == "EXPKEYSIG":
132 elif key[0] == "EXPKEYSIG":
133 ui.write(_("%s Note: This key has expired"
133 ui.write(_("%s Note: This key has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 validkeys.append((key[1], key[2], key[3]))
135 validkeys.append((key[1], key[2], key[3]))
136 return validkeys
136 return validkeys
137
137
138 def sigs(ui, repo):
138 def sigs(ui, repo):
139 """list signed changesets"""
139 """list signed changesets"""
140 mygpg = newgpg(ui)
140 mygpg = newgpg(ui)
141 revs = {}
141 revs = {}
142
142
143 for data, context in sigwalk(repo):
143 for data, context in sigwalk(repo):
144 node, version, sig = data
144 node, version, sig = data
145 fn, ln = context
145 fn, ln = context
146 try:
146 try:
147 n = repo.lookup(node)
147 n = repo.lookup(node)
148 except KeyError:
148 except KeyError:
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 continue
150 continue
151 r = repo.changelog.rev(n)
151 r = repo.changelog.rev(n)
152 keys = getkeys(ui, repo, mygpg, data, context)
152 keys = getkeys(ui, repo, mygpg, data, context)
153 if not keys:
153 if not keys:
154 continue
154 continue
155 revs.setdefault(r, [])
155 revs.setdefault(r, [])
156 revs[r].extend(keys)
156 revs[r].extend(keys)
157 nodes = list(revs)
157 nodes = list(revs)
158 nodes.reverse()
158 nodes.reverse()
159 for rev in nodes:
159 for rev in nodes:
160 for k in revs[rev]:
160 for k in revs[rev]:
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163
163
164 def check(ui, repo, rev):
164 def check(ui, repo, rev):
165 """verify all the signatures there may be for a particular revision"""
165 """verify all the signatures there may be for a particular revision"""
166 mygpg = newgpg(ui)
166 mygpg = newgpg(ui)
167 rev = repo.lookup(rev)
167 rev = repo.lookup(rev)
168 hexrev = hgnode.hex(rev)
168 hexrev = hgnode.hex(rev)
169 keys = []
169 keys = []
170
170
171 for data, context in sigwalk(repo):
171 for data, context in sigwalk(repo):
172 node, version, sig = data
172 node, version, sig = data
173 if node == hexrev:
173 if node == hexrev:
174 k = getkeys(ui, repo, mygpg, data, context)
174 k = getkeys(ui, repo, mygpg, data, context)
175 if k:
175 if k:
176 keys.extend(k)
176 keys.extend(k)
177
177
178 if not keys:
178 if not keys:
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 return
180 return
181
181
182 # print summary
182 # print summary
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 for key in keys:
184 for key in keys:
185 ui.write(" %s\n" % keystr(ui, key))
185 ui.write(" %s\n" % keystr(ui, key))
186
186
187 def keystr(ui, key):
187 def keystr(ui, key):
188 """associate a string to a key (username, comment)"""
188 """associate a string to a key (username, comment)"""
189 keyid, user, fingerprint = key
189 keyid, user, fingerprint = key
190 comment = ui.config("gpg", fingerprint, None)
190 comment = ui.config("gpg", fingerprint, None)
191 if comment:
191 if comment:
192 return "%s (%s)" % (user, comment)
192 return "%s (%s)" % (user, comment)
193 else:
193 else:
194 return user
194 return user
195
195
196 def sign(ui, repo, *revs, **opts):
196 def sign(ui, repo, *revs, **opts):
197 """add a signature for the current tip or a given revision"""
197 """add a signature for the current tip or a given revision"""
198 mygpg = newgpg(ui, **opts)
198 mygpg = newgpg(ui, **opts)
199 sigver = "0"
199 sigver = "0"
200 sigmessage = ""
200 sigmessage = ""
201 if revs:
201 if revs:
202 nodes = [repo.lookup(n) for n in revs]
202 nodes = [repo.lookup(n) for n in revs]
203 else:
203 else:
204 nodes = [repo.changelog.tip()]
204 nodes = [repo.changelog.tip()]
205
205
206 for n in nodes:
206 for n in nodes:
207 hexnode = hgnode.hex(n)
207 hexnode = hgnode.hex(n)
208 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
208 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
209 hgnode.short(n)))
209 hgnode.short(n)))
210 # build data
210 # build data
211 data = node2txt(repo, n, sigver)
211 data = node2txt(repo, n, sigver)
212 sig = mygpg.sign(data)
212 sig = mygpg.sign(data)
213 if not sig:
213 if not sig:
214 raise util.Abort(_("Error while signing"))
214 raise util.Abort(_("Error while signing"))
215 sig = binascii.b2a_base64(sig)
215 sig = binascii.b2a_base64(sig)
216 sig = sig.replace("\n", "")
216 sig = sig.replace("\n", "")
217 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
217 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
218
218
219 # write it
219 # write it
220 if opts['local']:
220 if opts['local']:
221 repo.opener("localsigs", "ab").write(sigmessage)
221 repo.opener("localsigs", "ab").write(sigmessage)
222 return
222 return
223
223
224 for x in repo.changes():
224 for x in repo.status()[:5]:
225 if ".hgsigs" in x and not opts["force"]:
225 if ".hgsigs" in x and not opts["force"]:
226 raise util.Abort(_("working copy of .hgsigs is changed "
226 raise util.Abort(_("working copy of .hgsigs is changed "
227 "(please commit .hgsigs manually "
227 "(please commit .hgsigs manually "
228 "or use --force)"))
228 "or use --force)"))
229
229
230 repo.wfile(".hgsigs", "ab").write(sigmessage)
230 repo.wfile(".hgsigs", "ab").write(sigmessage)
231
231
232 if repo.dirstate.state(".hgsigs") == '?':
232 if repo.dirstate.state(".hgsigs") == '?':
233 repo.add([".hgsigs"])
233 repo.add([".hgsigs"])
234
234
235 if opts["no_commit"]:
235 if opts["no_commit"]:
236 return
236 return
237
237
238 message = opts['message']
238 message = opts['message']
239 if not message:
239 if not message:
240 message = "\n".join([_("Added signature for changeset %s")
240 message = "\n".join([_("Added signature for changeset %s")
241 % hgnode.hex(n)
241 % hgnode.hex(n)
242 for n in nodes])
242 for n in nodes])
243 try:
243 try:
244 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
244 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
245 except ValueError, inst:
245 except ValueError, inst:
246 raise util.Abort(str(inst))
246 raise util.Abort(str(inst))
247
247
248 def node2txt(repo, node, ver):
248 def node2txt(repo, node, ver):
249 """map a manifest into some text"""
249 """map a manifest into some text"""
250 if ver == "0":
250 if ver == "0":
251 return "%s\n" % hgnode.hex(node)
251 return "%s\n" % hgnode.hex(node)
252 else:
252 else:
253 raise util.Abort(_("unknown signature version"))
253 raise util.Abort(_("unknown signature version"))
254
254
255 cmdtable = {
255 cmdtable = {
256 "sign":
256 "sign":
257 (sign,
257 (sign,
258 [('l', 'local', None, _("make the signature local")),
258 [('l', 'local', None, _("make the signature local")),
259 ('f', 'force', None, _("sign even if the sigfile is modified")),
259 ('f', 'force', None, _("sign even if the sigfile is modified")),
260 ('', 'no-commit', None, _("do not commit the sigfile after signing")),
260 ('', 'no-commit', None, _("do not commit the sigfile after signing")),
261 ('m', 'message', "", _("commit message")),
261 ('m', 'message', "", _("commit message")),
262 ('d', 'date', "", _("date code")),
262 ('d', 'date', "", _("date code")),
263 ('u', 'user', "", _("user")),
263 ('u', 'user', "", _("user")),
264 ('k', 'key', "", _("the key id to sign with"))],
264 ('k', 'key', "", _("the key id to sign with"))],
265 _("hg sign [OPTION]... [REVISION]...")),
265 _("hg sign [OPTION]... [REVISION]...")),
266 "sigcheck": (check, [], _('hg sigcheck REVISION')),
266 "sigcheck": (check, [], _('hg sigcheck REVISION')),
267 "sigs": (sigs, [], _('hg sigs')),
267 "sigs": (sigs, [], _('hg sigs')),
268 }
268 }
269
269
@@ -1,299 +1,299 b''
1 # bisect extension for mercurial
1 # bisect extension for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 # Inspired by git bisect, extension skeleton taken from mq.py.
4 # Inspired by git bisect, extension skeleton taken from mq.py.
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.i18n import gettext as _
9 from mercurial.i18n import gettext as _
10 from mercurial.demandload import demandload
10 from mercurial.demandload import demandload
11 demandload(globals(), "os sys sets mercurial:hg,util,commands")
11 demandload(globals(), "os sys sets mercurial:hg,util,commands")
12
12
13 versionstr = "0.0.3"
13 versionstr = "0.0.3"
14
14
15 def lookup_rev(ui, repo, rev=None):
15 def lookup_rev(ui, repo, rev=None):
16 """returns rev or the checked-out revision if rev is None"""
16 """returns rev or the checked-out revision if rev is None"""
17 if not rev is None:
17 if not rev is None:
18 return repo.lookup(rev)
18 return repo.lookup(rev)
19 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
19 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
20 if len(parents) != 1:
20 if len(parents) != 1:
21 raise util.Abort(_("unexpected number of parents, "
21 raise util.Abort(_("unexpected number of parents, "
22 "please commit or revert"))
22 "please commit or revert"))
23 return parents.pop()
23 return parents.pop()
24
24
25 def check_clean(ui, repo):
25 def check_clean(ui, repo):
26 modified, added, removed, deleted, unknown = repo.changes()
26 modified, added, removed, deleted, unknown = repo.status()[:5]
27 if modified or added or removed:
27 if modified or added or removed:
28 ui.warn("Repository is not clean, please commit or revert\n")
28 ui.warn("Repository is not clean, please commit or revert\n")
29 sys.exit(1)
29 sys.exit(1)
30
30
31 class bisect(object):
31 class bisect(object):
32 """dichotomic search in the DAG of changesets"""
32 """dichotomic search in the DAG of changesets"""
33 def __init__(self, ui, repo):
33 def __init__(self, ui, repo):
34 self.repo = repo
34 self.repo = repo
35 self.path = repo.join("bisect")
35 self.path = repo.join("bisect")
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.ui = ui
37 self.ui = ui
38 self.goodrevs = []
38 self.goodrevs = []
39 self.badrev = None
39 self.badrev = None
40 self.good_dirty = 0
40 self.good_dirty = 0
41 self.bad_dirty = 0
41 self.bad_dirty = 0
42 self.good_path = "good"
42 self.good_path = "good"
43 self.bad_path = "bad"
43 self.bad_path = "bad"
44
44
45 if os.path.exists(os.path.join(self.path, self.good_path)):
45 if os.path.exists(os.path.join(self.path, self.good_path)):
46 self.goodrevs = self.opener(self.good_path).read().splitlines()
46 self.goodrevs = self.opener(self.good_path).read().splitlines()
47 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
47 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
48 if os.path.exists(os.path.join(self.path, self.bad_path)):
48 if os.path.exists(os.path.join(self.path, self.bad_path)):
49 r = self.opener(self.bad_path).read().splitlines()
49 r = self.opener(self.bad_path).read().splitlines()
50 if r:
50 if r:
51 self.badrev = hg.bin(r.pop(0))
51 self.badrev = hg.bin(r.pop(0))
52
52
53 def write(self):
53 def write(self):
54 if not os.path.isdir(self.path):
54 if not os.path.isdir(self.path):
55 return
55 return
56 f = self.opener(self.good_path, "w")
56 f = self.opener(self.good_path, "w")
57 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
57 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
58 if len(self.goodrevs) > 0:
58 if len(self.goodrevs) > 0:
59 f.write("\n")
59 f.write("\n")
60 f = self.opener(self.bad_path, "w")
60 f = self.opener(self.bad_path, "w")
61 if self.badrev:
61 if self.badrev:
62 f.write(hg.hex(self.badrev) + "\n")
62 f.write(hg.hex(self.badrev) + "\n")
63
63
64 def init(self):
64 def init(self):
65 """start a new bisection"""
65 """start a new bisection"""
66 if os.path.isdir(self.path):
66 if os.path.isdir(self.path):
67 raise util.Abort(_("bisect directory already exists\n"))
67 raise util.Abort(_("bisect directory already exists\n"))
68 os.mkdir(self.path)
68 os.mkdir(self.path)
69 check_clean(self.ui, self.repo)
69 check_clean(self.ui, self.repo)
70 return 0
70 return 0
71
71
72 def reset(self):
72 def reset(self):
73 """finish a bisection"""
73 """finish a bisection"""
74 if os.path.isdir(self.path):
74 if os.path.isdir(self.path):
75 sl = [os.path.join(self.path, p)
75 sl = [os.path.join(self.path, p)
76 for p in [self.bad_path, self.good_path]]
76 for p in [self.bad_path, self.good_path]]
77 for s in sl:
77 for s in sl:
78 if os.path.exists(s):
78 if os.path.exists(s):
79 os.unlink(s)
79 os.unlink(s)
80 os.rmdir(self.path)
80 os.rmdir(self.path)
81 # Not sure about this
81 # Not sure about this
82 #self.ui.write("Going back to tip\n")
82 #self.ui.write("Going back to tip\n")
83 #self.repo.update(self.repo.changelog.tip())
83 #self.repo.update(self.repo.changelog.tip())
84 return 1
84 return 1
85
85
86 def num_ancestors(self, head=None, stop=None):
86 def num_ancestors(self, head=None, stop=None):
87 """
87 """
88 returns a dict with the mapping:
88 returns a dict with the mapping:
89 node -> number of ancestors (self included)
89 node -> number of ancestors (self included)
90 for all nodes who are ancestor of head and
90 for all nodes who are ancestor of head and
91 not in stop.
91 not in stop.
92 """
92 """
93 if head is None:
93 if head is None:
94 head = self.badrev
94 head = self.badrev
95 return self.__ancestors_and_nb_ancestors(head, stop)[1]
95 return self.__ancestors_and_nb_ancestors(head, stop)[1]
96
96
97 def ancestors(self, head=None, stop=None):
97 def ancestors(self, head=None, stop=None):
98 """
98 """
99 returns the set of the ancestors of head (self included)
99 returns the set of the ancestors of head (self included)
100 who are not in stop.
100 who are not in stop.
101 """
101 """
102 if head is None:
102 if head is None:
103 head = self.badrev
103 head = self.badrev
104 return self.__ancestors_and_nb_ancestors(head, stop)[0]
104 return self.__ancestors_and_nb_ancestors(head, stop)[0]
105
105
106 def __ancestors_and_nb_ancestors(self, head, stop=None):
106 def __ancestors_and_nb_ancestors(self, head, stop=None):
107 """
107 """
108 if stop is None then ancestors of goodrevs are used as
108 if stop is None then ancestors of goodrevs are used as
109 lower limit.
109 lower limit.
110
110
111 returns (anc, n_child) where anc is the set of the ancestors of head
111 returns (anc, n_child) where anc is the set of the ancestors of head
112 and n_child is a dictionary with the following mapping:
112 and n_child is a dictionary with the following mapping:
113 node -> number of ancestors (self included)
113 node -> number of ancestors (self included)
114 """
114 """
115 cl = self.repo.changelog
115 cl = self.repo.changelog
116 if not stop:
116 if not stop:
117 stop = sets.Set([])
117 stop = sets.Set([])
118 for i in xrange(len(self.goodrevs)-1, -1, -1):
118 for i in xrange(len(self.goodrevs)-1, -1, -1):
119 g = self.goodrevs[i]
119 g = self.goodrevs[i]
120 if g in stop:
120 if g in stop:
121 continue
121 continue
122 stop.update(cl.reachable(g))
122 stop.update(cl.reachable(g))
123 def num_children(a):
123 def num_children(a):
124 """
124 """
125 returns a dictionnary with the following mapping
125 returns a dictionnary with the following mapping
126 node -> [number of children, empty set]
126 node -> [number of children, empty set]
127 """
127 """
128 d = {a: [0, sets.Set([])]}
128 d = {a: [0, sets.Set([])]}
129 for i in xrange(cl.rev(a)+1):
129 for i in xrange(cl.rev(a)+1):
130 n = cl.node(i)
130 n = cl.node(i)
131 if not d.has_key(n):
131 if not d.has_key(n):
132 d[n] = [0, sets.Set([])]
132 d[n] = [0, sets.Set([])]
133 parents = [p for p in cl.parents(n) if p != hg.nullid]
133 parents = [p for p in cl.parents(n) if p != hg.nullid]
134 for p in parents:
134 for p in parents:
135 d[p][0] += 1
135 d[p][0] += 1
136 return d
136 return d
137
137
138 if head in stop:
138 if head in stop:
139 raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
139 raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
140 % (cl.rev(head), hg.short(head)))
140 % (cl.rev(head), hg.short(head)))
141 n_child = num_children(head)
141 n_child = num_children(head)
142 for i in xrange(cl.rev(head)+1):
142 for i in xrange(cl.rev(head)+1):
143 n = cl.node(i)
143 n = cl.node(i)
144 parents = [p for p in cl.parents(n) if p != hg.nullid]
144 parents = [p for p in cl.parents(n) if p != hg.nullid]
145 for p in parents:
145 for p in parents:
146 n_child[p][0] -= 1
146 n_child[p][0] -= 1
147 if not n in stop:
147 if not n in stop:
148 n_child[n][1].union_update(n_child[p][1])
148 n_child[n][1].union_update(n_child[p][1])
149 if n_child[p][0] == 0:
149 if n_child[p][0] == 0:
150 n_child[p] = len(n_child[p][1])
150 n_child[p] = len(n_child[p][1])
151 if not n in stop:
151 if not n in stop:
152 n_child[n][1].add(n)
152 n_child[n][1].add(n)
153 if n_child[n][0] == 0:
153 if n_child[n][0] == 0:
154 if n == head:
154 if n == head:
155 anc = n_child[n][1]
155 anc = n_child[n][1]
156 n_child[n] = len(n_child[n][1])
156 n_child[n] = len(n_child[n][1])
157 return anc, n_child
157 return anc, n_child
158
158
159 def next(self):
159 def next(self):
160 if not self.badrev:
160 if not self.badrev:
161 raise util.Abort(_("You should give at least one bad revision"))
161 raise util.Abort(_("You should give at least one bad revision"))
162 if not self.goodrevs:
162 if not self.goodrevs:
163 self.ui.warn(_("No good revision given\n"))
163 self.ui.warn(_("No good revision given\n"))
164 self.ui.warn(_("Marking the first revision as good\n"))
164 self.ui.warn(_("Marking the first revision as good\n"))
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
166 self.badrev)
166 self.badrev)
167 tot = len(ancestors)
167 tot = len(ancestors)
168 if tot == 1:
168 if tot == 1:
169 if ancestors.pop() != self.badrev:
169 if ancestors.pop() != self.badrev:
170 raise util.Abort(_("Could not find the first bad revision"))
170 raise util.Abort(_("Could not find the first bad revision"))
171 self.ui.write(_("The first bad revision is:\n"))
171 self.ui.write(_("The first bad revision is:\n"))
172 displayer = commands.show_changeset(self.ui, self.repo, {})
172 displayer = commands.show_changeset(self.ui, self.repo, {})
173 displayer.show(changenode=self.badrev)
173 displayer.show(changenode=self.badrev)
174 return None
174 return None
175 best_rev = None
175 best_rev = None
176 best_len = -1
176 best_len = -1
177 for n in ancestors:
177 for n in ancestors:
178 l = num_ancestors[n]
178 l = num_ancestors[n]
179 l = min(l, tot - l)
179 l = min(l, tot - l)
180 if l > best_len:
180 if l > best_len:
181 best_len = l
181 best_len = l
182 best_rev = n
182 best_rev = n
183 assert best_rev is not None
183 assert best_rev is not None
184 nb_tests = 0
184 nb_tests = 0
185 q, r = divmod(tot, 2)
185 q, r = divmod(tot, 2)
186 while q:
186 while q:
187 nb_tests += 1
187 nb_tests += 1
188 q, r = divmod(q, 2)
188 q, r = divmod(q, 2)
189 msg = _("Testing changeset %s:%s (%s changesets remaining, "
189 msg = _("Testing changeset %s:%s (%s changesets remaining, "
190 "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
190 "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
191 hg.short(best_rev), tot, nb_tests)
191 hg.short(best_rev), tot, nb_tests)
192 self.ui.write(msg)
192 self.ui.write(msg)
193 return best_rev
193 return best_rev
194
194
195 def autonext(self):
195 def autonext(self):
196 """find and update to the next revision to test"""
196 """find and update to the next revision to test"""
197 check_clean(self.ui, self.repo)
197 check_clean(self.ui, self.repo)
198 rev = self.next()
198 rev = self.next()
199 if rev is not None:
199 if rev is not None:
200 return hg.clean(self.repo, rev)
200 return hg.clean(self.repo, rev)
201
201
202 def good(self, rev):
202 def good(self, rev):
203 self.goodrevs.append(rev)
203 self.goodrevs.append(rev)
204
204
205 def autogood(self, rev=None):
205 def autogood(self, rev=None):
206 """mark revision as good and update to the next revision to test"""
206 """mark revision as good and update to the next revision to test"""
207 check_clean(self.ui, self.repo)
207 check_clean(self.ui, self.repo)
208 rev = lookup_rev(self.ui, self.repo, rev)
208 rev = lookup_rev(self.ui, self.repo, rev)
209 self.good(rev)
209 self.good(rev)
210 if self.badrev:
210 if self.badrev:
211 return self.autonext()
211 return self.autonext()
212
212
213 def bad(self, rev):
213 def bad(self, rev):
214 self.badrev = rev
214 self.badrev = rev
215
215
216 def autobad(self, rev=None):
216 def autobad(self, rev=None):
217 """mark revision as bad and update to the next revision to test"""
217 """mark revision as bad and update to the next revision to test"""
218 check_clean(self.ui, self.repo)
218 check_clean(self.ui, self.repo)
219 rev = lookup_rev(self.ui, self.repo, rev)
219 rev = lookup_rev(self.ui, self.repo, rev)
220 self.bad(rev)
220 self.bad(rev)
221 if self.goodrevs:
221 if self.goodrevs:
222 self.autonext()
222 self.autonext()
223
223
224 # should we put it in the class ?
224 # should we put it in the class ?
225 def test(ui, repo, rev):
225 def test(ui, repo, rev):
226 """test the bisection code"""
226 """test the bisection code"""
227 b = bisect(ui, repo)
227 b = bisect(ui, repo)
228 rev = repo.lookup(rev)
228 rev = repo.lookup(rev)
229 ui.write("testing with rev %s\n" % hg.hex(rev))
229 ui.write("testing with rev %s\n" % hg.hex(rev))
230 anc = b.ancestors()
230 anc = b.ancestors()
231 while len(anc) > 1:
231 while len(anc) > 1:
232 if not rev in anc:
232 if not rev in anc:
233 ui.warn("failure while bisecting\n")
233 ui.warn("failure while bisecting\n")
234 sys.exit(1)
234 sys.exit(1)
235 ui.write("it worked :)\n")
235 ui.write("it worked :)\n")
236 new_rev = b.next()
236 new_rev = b.next()
237 ui.write("choosing if good or bad\n")
237 ui.write("choosing if good or bad\n")
238 if rev in b.ancestors(head=new_rev):
238 if rev in b.ancestors(head=new_rev):
239 b.bad(new_rev)
239 b.bad(new_rev)
240 ui.write("it is bad\n")
240 ui.write("it is bad\n")
241 else:
241 else:
242 b.good(new_rev)
242 b.good(new_rev)
243 ui.write("it is good\n")
243 ui.write("it is good\n")
244 anc = b.ancestors()
244 anc = b.ancestors()
245 #repo.update(new_rev, force=True)
245 #repo.update(new_rev, force=True)
246 for v in anc:
246 for v in anc:
247 if v != rev:
247 if v != rev:
248 ui.warn("fail to found cset! :(\n")
248 ui.warn("fail to found cset! :(\n")
249 return 1
249 return 1
250 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
250 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
251 ui.write("Everything is ok :)\n")
251 ui.write("Everything is ok :)\n")
252 return 0
252 return 0
253
253
254 def bisect_run(ui, repo, cmd=None, *args):
254 def bisect_run(ui, repo, cmd=None, *args):
255 """bisect extension: dichotomic search in the DAG of changesets
255 """bisect extension: dichotomic search in the DAG of changesets
256 for subcommands see "hg bisect help\"
256 for subcommands see "hg bisect help\"
257 """
257 """
258 def help_(cmd=None, *args):
258 def help_(cmd=None, *args):
259 """show help for a given bisect subcommand or all subcommands"""
259 """show help for a given bisect subcommand or all subcommands"""
260 cmdtable = bisectcmdtable
260 cmdtable = bisectcmdtable
261 if cmd:
261 if cmd:
262 doc = cmdtable[cmd][0].__doc__
262 doc = cmdtable[cmd][0].__doc__
263 synopsis = cmdtable[cmd][2]
263 synopsis = cmdtable[cmd][2]
264 ui.write(synopsis + "\n")
264 ui.write(synopsis + "\n")
265 ui.write("\n" + doc + "\n")
265 ui.write("\n" + doc + "\n")
266 return
266 return
267 ui.write(_("list of subcommands for the bisect extension\n\n"))
267 ui.write(_("list of subcommands for the bisect extension\n\n"))
268 cmds = cmdtable.keys()
268 cmds = cmdtable.keys()
269 cmds.sort()
269 cmds.sort()
270 m = max([len(c) for c in cmds])
270 m = max([len(c) for c in cmds])
271 for cmd in cmds:
271 for cmd in cmds:
272 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
272 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
273 ui.write(" %-*s %s\n" % (m, cmd, doc))
273 ui.write(" %-*s %s\n" % (m, cmd, doc))
274
274
275 b = bisect(ui, repo)
275 b = bisect(ui, repo)
276 bisectcmdtable = {
276 bisectcmdtable = {
277 "init": (b.init, 0, _("hg bisect init")),
277 "init": (b.init, 0, _("hg bisect init")),
278 "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
278 "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
279 "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
279 "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
280 "next": (b.autonext, 0, _("hg bisect next")),
280 "next": (b.autonext, 0, _("hg bisect next")),
281 "reset": (b.reset, 0, _("hg bisect reset")),
281 "reset": (b.reset, 0, _("hg bisect reset")),
282 "help": (help_, 1, _("hg bisect help [<subcommand>]")),
282 "help": (help_, 1, _("hg bisect help [<subcommand>]")),
283 }
283 }
284
284
285 if not bisectcmdtable.has_key(cmd):
285 if not bisectcmdtable.has_key(cmd):
286 ui.warn(_("bisect: Unknown sub-command\n"))
286 ui.warn(_("bisect: Unknown sub-command\n"))
287 return help_()
287 return help_()
288 if len(args) > bisectcmdtable[cmd][1]:
288 if len(args) > bisectcmdtable[cmd][1]:
289 ui.warn(_("bisect: Too many arguments\n"))
289 ui.warn(_("bisect: Too many arguments\n"))
290 return help_()
290 return help_()
291 try:
291 try:
292 return bisectcmdtable[cmd][0](*args)
292 return bisectcmdtable[cmd][0](*args)
293 finally:
293 finally:
294 b.write()
294 b.write()
295
295
296 cmdtable = {
296 cmdtable = {
297 "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
297 "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
298 #"bisect-test": (test, [], "hg bisect-test rev"),
298 #"bisect-test": (test, [], "hg bisect-test rev"),
299 }
299 }
@@ -1,336 +1,336 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import time, sys, signal, os
8 import time, sys, signal, os
9 from mercurial import hg, mdiff, fancyopts, commands, ui, util
9 from mercurial import hg, mdiff, fancyopts, commands, ui, util
10
10
11 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
11 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
12 changes=None, text=False):
12 changes=None, text=False):
13 def date(c):
13 def date(c):
14 return time.asctime(time.gmtime(c[2][0]))
14 return time.asctime(time.gmtime(c[2][0]))
15
15
16 if not changes:
16 if not changes:
17 changes = repo.changes(node1, node2, files, match=match)
17 changes = repo.status(node1, node2, files, match=match)[:5]
18 modified, added, removed, deleted, unknown = changes
18 modified, added, removed, deleted, unknown = changes
19 if files:
19 if files:
20 modified, added, removed = map(lambda x: filterfiles(files, x),
20 modified, added, removed = map(lambda x: filterfiles(files, x),
21 (modified, added, removed))
21 (modified, added, removed))
22
22
23 if not modified and not added and not removed:
23 if not modified and not added and not removed:
24 return
24 return
25
25
26 if node2:
26 if node2:
27 change = repo.changelog.read(node2)
27 change = repo.changelog.read(node2)
28 mmap2 = repo.manifest.read(change[0])
28 mmap2 = repo.manifest.read(change[0])
29 date2 = date(change)
29 date2 = date(change)
30 def read(f):
30 def read(f):
31 return repo.file(f).read(mmap2[f])
31 return repo.file(f).read(mmap2[f])
32 else:
32 else:
33 date2 = time.asctime()
33 date2 = time.asctime()
34 if not node1:
34 if not node1:
35 node1 = repo.dirstate.parents()[0]
35 node1 = repo.dirstate.parents()[0]
36 def read(f):
36 def read(f):
37 return repo.wfile(f).read()
37 return repo.wfile(f).read()
38
38
39 change = repo.changelog.read(node1)
39 change = repo.changelog.read(node1)
40 mmap = repo.manifest.read(change[0])
40 mmap = repo.manifest.read(change[0])
41 date1 = date(change)
41 date1 = date(change)
42
42
43 for f in modified:
43 for f in modified:
44 to = None
44 to = None
45 if f in mmap:
45 if f in mmap:
46 to = repo.file(f).read(mmap[f])
46 to = repo.file(f).read(mmap[f])
47 tn = read(f)
47 tn = read(f)
48 fp.write("diff --git a/%s b/%s\n" % (f, f))
48 fp.write("diff --git a/%s b/%s\n" % (f, f))
49 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
49 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
50 for f in added:
50 for f in added:
51 to = None
51 to = None
52 tn = read(f)
52 tn = read(f)
53 fp.write("diff --git /dev/null b/%s\n" % (f))
53 fp.write("diff --git /dev/null b/%s\n" % (f))
54 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
54 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
55 for f in removed:
55 for f in removed:
56 to = repo.file(f).read(mmap[f])
56 to = repo.file(f).read(mmap[f])
57 tn = None
57 tn = None
58 fp.write("diff --git a/%s /dev/null\n" % (f))
58 fp.write("diff --git a/%s /dev/null\n" % (f))
59 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
59 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
60
60
61 def difftree(ui, repo, node1=None, node2=None, **opts):
61 def difftree(ui, repo, node1=None, node2=None, **opts):
62 """diff trees from two commits"""
62 """diff trees from two commits"""
63 def __difftree(repo, node1, node2):
63 def __difftree(repo, node1, node2):
64 def date(c):
64 def date(c):
65 return time.asctime(time.gmtime(c[2][0]))
65 return time.asctime(time.gmtime(c[2][0]))
66
66
67 if node2:
67 if node2:
68 change = repo.changelog.read(node2)
68 change = repo.changelog.read(node2)
69 mmap2 = repo.manifest.read(change[0])
69 mmap2 = repo.manifest.read(change[0])
70 modified, added, removed, deleted, unknown = repo.changes(node1, node2)
70 modified, added, removed, deleted, unknown = repo.status(node1, node2)[:5]
71 def read(f): return repo.file(f).read(mmap2[f])
71 def read(f): return repo.file(f).read(mmap2[f])
72 date2 = date(change)
72 date2 = date(change)
73 else:
73 else:
74 date2 = time.asctime()
74 date2 = time.asctime()
75 modified, added, removed, deleted, unknown = repo.changes(node1)
75 modified, added, removed, deleted, unknown = repo.status(node1)[:5]
76 if not node1:
76 if not node1:
77 node1 = repo.dirstate.parents()[0]
77 node1 = repo.dirstate.parents()[0]
78 def read(f): return file(os.path.join(repo.root, f)).read()
78 def read(f): return file(os.path.join(repo.root, f)).read()
79
79
80 change = repo.changelog.read(node1)
80 change = repo.changelog.read(node1)
81 mmap = repo.manifest.read(change[0])
81 mmap = repo.manifest.read(change[0])
82 date1 = date(change)
82 date1 = date(change)
83 empty = "0" * 40;
83 empty = "0" * 40;
84
84
85 for f in modified:
85 for f in modified:
86 # TODO get file permissions
86 # TODO get file permissions
87 print ":100664 100664 %s %s M\t%s\t%s" % (hg.hex(mmap[f]),
87 print ":100664 100664 %s %s M\t%s\t%s" % (hg.hex(mmap[f]),
88 hg.hex(mmap2[f]), f, f)
88 hg.hex(mmap2[f]), f, f)
89 for f in added:
89 for f in added:
90 print ":000000 100664 %s %s N\t%s\t%s" % (empty, hg.hex(mmap2[f]), f, f)
90 print ":000000 100664 %s %s N\t%s\t%s" % (empty, hg.hex(mmap2[f]), f, f)
91 for f in removed:
91 for f in removed:
92 print ":100664 000000 %s %s D\t%s\t%s" % (hg.hex(mmap[f]), empty, f, f)
92 print ":100664 000000 %s %s D\t%s\t%s" % (hg.hex(mmap[f]), empty, f, f)
93 ##
93 ##
94
94
95 while True:
95 while True:
96 if opts['stdin']:
96 if opts['stdin']:
97 try:
97 try:
98 line = raw_input().split(' ')
98 line = raw_input().split(' ')
99 node1 = line[0]
99 node1 = line[0]
100 if len(line) > 1:
100 if len(line) > 1:
101 node2 = line[1]
101 node2 = line[1]
102 else:
102 else:
103 node2 = None
103 node2 = None
104 except EOFError:
104 except EOFError:
105 break
105 break
106 node1 = repo.lookup(node1)
106 node1 = repo.lookup(node1)
107 if node2:
107 if node2:
108 node2 = repo.lookup(node2)
108 node2 = repo.lookup(node2)
109 else:
109 else:
110 node2 = node1
110 node2 = node1
111 node1 = repo.changelog.parents(node1)[0]
111 node1 = repo.changelog.parents(node1)[0]
112 if opts['patch']:
112 if opts['patch']:
113 if opts['pretty']:
113 if opts['pretty']:
114 catcommit(repo, node2, "")
114 catcommit(repo, node2, "")
115 dodiff(sys.stdout, ui, repo, node1, node2)
115 dodiff(sys.stdout, ui, repo, node1, node2)
116 else:
116 else:
117 __difftree(repo, node1, node2)
117 __difftree(repo, node1, node2)
118 if not opts['stdin']:
118 if not opts['stdin']:
119 break
119 break
120
120
121 def catcommit(repo, n, prefix, changes=None):
121 def catcommit(repo, n, prefix, changes=None):
122 nlprefix = '\n' + prefix;
122 nlprefix = '\n' + prefix;
123 (p1, p2) = repo.changelog.parents(n)
123 (p1, p2) = repo.changelog.parents(n)
124 (h, h1, h2) = map(hg.hex, (n, p1, p2))
124 (h, h1, h2) = map(hg.hex, (n, p1, p2))
125 (i1, i2) = map(repo.changelog.rev, (p1, p2))
125 (i1, i2) = map(repo.changelog.rev, (p1, p2))
126 if not changes:
126 if not changes:
127 changes = repo.changelog.read(n)
127 changes = repo.changelog.read(n)
128 print "tree %s" % (hg.hex(changes[0]))
128 print "tree %s" % (hg.hex(changes[0]))
129 if i1 != -1: print "parent %s" % (h1)
129 if i1 != -1: print "parent %s" % (h1)
130 if i2 != -1: print "parent %s" % (h2)
130 if i2 != -1: print "parent %s" % (h2)
131 date_ar = changes[2]
131 date_ar = changes[2]
132 date = int(float(date_ar[0]))
132 date = int(float(date_ar[0]))
133 lines = changes[4].splitlines()
133 lines = changes[4].splitlines()
134 if lines and lines[-1].startswith('committer:'):
134 if lines and lines[-1].startswith('committer:'):
135 committer = lines[-1].split(': ')[1].rstrip()
135 committer = lines[-1].split(': ')[1].rstrip()
136 else:
136 else:
137 committer = changes[1]
137 committer = changes[1]
138
138
139 print "author %s %s %s" % (changes[1], date, date_ar[1])
139 print "author %s %s %s" % (changes[1], date, date_ar[1])
140 print "committer %s %s %s" % (committer, date, date_ar[1])
140 print "committer %s %s %s" % (committer, date, date_ar[1])
141 print ""
141 print ""
142 if prefix != "":
142 if prefix != "":
143 print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
143 print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
144 else:
144 else:
145 print changes[4]
145 print changes[4]
146 if prefix:
146 if prefix:
147 sys.stdout.write('\0')
147 sys.stdout.write('\0')
148
148
149 def base(ui, repo, node1, node2):
149 def base(ui, repo, node1, node2):
150 """Output common ancestor information"""
150 """Output common ancestor information"""
151 node1 = repo.lookup(node1)
151 node1 = repo.lookup(node1)
152 node2 = repo.lookup(node2)
152 node2 = repo.lookup(node2)
153 n = repo.changelog.ancestor(node1, node2)
153 n = repo.changelog.ancestor(node1, node2)
154 print hg.hex(n)
154 print hg.hex(n)
155
155
156 def catfile(ui, repo, type=None, r=None, **opts):
156 def catfile(ui, repo, type=None, r=None, **opts):
157 """cat a specific revision"""
157 """cat a specific revision"""
158 # in stdin mode, every line except the commit is prefixed with two
158 # in stdin mode, every line except the commit is prefixed with two
159 # spaces. This way the our caller can find the commit without magic
159 # spaces. This way the our caller can find the commit without magic
160 # strings
160 # strings
161 #
161 #
162 prefix = ""
162 prefix = ""
163 if opts['stdin']:
163 if opts['stdin']:
164 try:
164 try:
165 (type, r) = raw_input().split(' ');
165 (type, r) = raw_input().split(' ');
166 prefix = " "
166 prefix = " "
167 except EOFError:
167 except EOFError:
168 return
168 return
169
169
170 else:
170 else:
171 if not type or not r:
171 if not type or not r:
172 ui.warn("cat-file: type or revision not supplied\n")
172 ui.warn("cat-file: type or revision not supplied\n")
173 commands.help_(ui, 'cat-file')
173 commands.help_(ui, 'cat-file')
174
174
175 while r:
175 while r:
176 if type != "commit":
176 if type != "commit":
177 sys.stderr.write("aborting hg cat-file only understands commits\n")
177 sys.stderr.write("aborting hg cat-file only understands commits\n")
178 sys.exit(1);
178 sys.exit(1);
179 n = repo.lookup(r)
179 n = repo.lookup(r)
180 catcommit(repo, n, prefix)
180 catcommit(repo, n, prefix)
181 if opts['stdin']:
181 if opts['stdin']:
182 try:
182 try:
183 (type, r) = raw_input().split(' ');
183 (type, r) = raw_input().split(' ');
184 except EOFError:
184 except EOFError:
185 break
185 break
186 else:
186 else:
187 break
187 break
188
188
189 # git rev-tree is a confusing thing. You can supply a number of
189 # git rev-tree is a confusing thing. You can supply a number of
190 # commit sha1s on the command line, and it walks the commit history
190 # commit sha1s on the command line, and it walks the commit history
191 # telling you which commits are reachable from the supplied ones via
191 # telling you which commits are reachable from the supplied ones via
192 # a bitmask based on arg position.
192 # a bitmask based on arg position.
193 # you can specify a commit to stop at by starting the sha1 with ^
193 # you can specify a commit to stop at by starting the sha1 with ^
194 def revtree(args, repo, full="tree", maxnr=0, parents=False):
194 def revtree(args, repo, full="tree", maxnr=0, parents=False):
195 def chlogwalk():
195 def chlogwalk():
196 ch = repo.changelog
196 ch = repo.changelog
197 count = ch.count()
197 count = ch.count()
198 i = count
198 i = count
199 l = [0] * 100
199 l = [0] * 100
200 chunk = 100
200 chunk = 100
201 while True:
201 while True:
202 if chunk > i:
202 if chunk > i:
203 chunk = i
203 chunk = i
204 i = 0
204 i = 0
205 else:
205 else:
206 i -= chunk
206 i -= chunk
207
207
208 for x in xrange(0, chunk):
208 for x in xrange(0, chunk):
209 if i + x >= count:
209 if i + x >= count:
210 l[chunk - x:] = [0] * (chunk - x)
210 l[chunk - x:] = [0] * (chunk - x)
211 break
211 break
212 if full != None:
212 if full != None:
213 l[x] = ch.read(ch.node(i + x))
213 l[x] = ch.read(ch.node(i + x))
214 else:
214 else:
215 l[x] = 1
215 l[x] = 1
216 for x in xrange(chunk-1, -1, -1):
216 for x in xrange(chunk-1, -1, -1):
217 if l[x] != 0:
217 if l[x] != 0:
218 yield (i + x, full != None and l[x] or None)
218 yield (i + x, full != None and l[x] or None)
219 if i == 0:
219 if i == 0:
220 break
220 break
221
221
222 # calculate and return the reachability bitmask for sha
222 # calculate and return the reachability bitmask for sha
223 def is_reachable(ar, reachable, sha):
223 def is_reachable(ar, reachable, sha):
224 if len(ar) == 0:
224 if len(ar) == 0:
225 return 1
225 return 1
226 mask = 0
226 mask = 0
227 for i in range(len(ar)):
227 for i in range(len(ar)):
228 if sha in reachable[i]:
228 if sha in reachable[i]:
229 mask |= 1 << i
229 mask |= 1 << i
230
230
231 return mask
231 return mask
232
232
233 reachable = []
233 reachable = []
234 stop_sha1 = []
234 stop_sha1 = []
235 want_sha1 = []
235 want_sha1 = []
236 count = 0
236 count = 0
237
237
238 # figure out which commits they are asking for and which ones they
238 # figure out which commits they are asking for and which ones they
239 # want us to stop on
239 # want us to stop on
240 for i in range(len(args)):
240 for i in range(len(args)):
241 if args[i].startswith('^'):
241 if args[i].startswith('^'):
242 s = repo.lookup(args[i][1:])
242 s = repo.lookup(args[i][1:])
243 stop_sha1.append(s)
243 stop_sha1.append(s)
244 want_sha1.append(s)
244 want_sha1.append(s)
245 elif args[i] != 'HEAD':
245 elif args[i] != 'HEAD':
246 want_sha1.append(repo.lookup(args[i]))
246 want_sha1.append(repo.lookup(args[i]))
247
247
248 # calculate the graph for the supplied commits
248 # calculate the graph for the supplied commits
249 for i in range(len(want_sha1)):
249 for i in range(len(want_sha1)):
250 reachable.append({});
250 reachable.append({});
251 n = want_sha1[i];
251 n = want_sha1[i];
252 visit = [n];
252 visit = [n];
253 reachable[i][n] = 1
253 reachable[i][n] = 1
254 while visit:
254 while visit:
255 n = visit.pop(0)
255 n = visit.pop(0)
256 if n in stop_sha1:
256 if n in stop_sha1:
257 continue
257 continue
258 for p in repo.changelog.parents(n):
258 for p in repo.changelog.parents(n):
259 if p not in reachable[i]:
259 if p not in reachable[i]:
260 reachable[i][p] = 1
260 reachable[i][p] = 1
261 visit.append(p)
261 visit.append(p)
262 if p in stop_sha1:
262 if p in stop_sha1:
263 continue
263 continue
264
264
265 # walk the repository looking for commits that are in our
265 # walk the repository looking for commits that are in our
266 # reachability graph
266 # reachability graph
267 #for i in range(repo.changelog.count()-1, -1, -1):
267 #for i in range(repo.changelog.count()-1, -1, -1):
268 for i, changes in chlogwalk():
268 for i, changes in chlogwalk():
269 n = repo.changelog.node(i)
269 n = repo.changelog.node(i)
270 mask = is_reachable(want_sha1, reachable, n)
270 mask = is_reachable(want_sha1, reachable, n)
271 if mask:
271 if mask:
272 parentstr = ""
272 parentstr = ""
273 if parents:
273 if parents:
274 pp = repo.changelog.parents(n)
274 pp = repo.changelog.parents(n)
275 if pp[0] != hg.nullid:
275 if pp[0] != hg.nullid:
276 parentstr += " " + hg.hex(pp[0])
276 parentstr += " " + hg.hex(pp[0])
277 if pp[1] != hg.nullid:
277 if pp[1] != hg.nullid:
278 parentstr += " " + hg.hex(pp[1])
278 parentstr += " " + hg.hex(pp[1])
279 if not full:
279 if not full:
280 print hg.hex(n) + parentstr
280 print hg.hex(n) + parentstr
281 elif full is "commit":
281 elif full is "commit":
282 print hg.hex(n) + parentstr
282 print hg.hex(n) + parentstr
283 catcommit(repo, n, ' ', changes)
283 catcommit(repo, n, ' ', changes)
284 else:
284 else:
285 (p1, p2) = repo.changelog.parents(n)
285 (p1, p2) = repo.changelog.parents(n)
286 (h, h1, h2) = map(hg.hex, (n, p1, p2))
286 (h, h1, h2) = map(hg.hex, (n, p1, p2))
287 (i1, i2) = map(repo.changelog.rev, (p1, p2))
287 (i1, i2) = map(repo.changelog.rev, (p1, p2))
288
288
289 date = changes[2][0]
289 date = changes[2][0]
290 print "%s %s:%s" % (date, h, mask),
290 print "%s %s:%s" % (date, h, mask),
291 mask = is_reachable(want_sha1, reachable, p1)
291 mask = is_reachable(want_sha1, reachable, p1)
292 if i1 != -1 and mask > 0:
292 if i1 != -1 and mask > 0:
293 print "%s:%s " % (h1, mask),
293 print "%s:%s " % (h1, mask),
294 mask = is_reachable(want_sha1, reachable, p2)
294 mask = is_reachable(want_sha1, reachable, p2)
295 if i2 != -1 and mask > 0:
295 if i2 != -1 and mask > 0:
296 print "%s:%s " % (h2, mask),
296 print "%s:%s " % (h2, mask),
297 print ""
297 print ""
298 if maxnr and count >= maxnr:
298 if maxnr and count >= maxnr:
299 break
299 break
300 count += 1
300 count += 1
301
301
302 # git rev-list tries to order things by date, and has the ability to stop
302 # git rev-list tries to order things by date, and has the ability to stop
303 # at a given commit without walking the whole repo. TODO add the stop
303 # at a given commit without walking the whole repo. TODO add the stop
304 # parameter
304 # parameter
305 def revlist(ui, repo, *revs, **opts):
305 def revlist(ui, repo, *revs, **opts):
306 """print revisions"""
306 """print revisions"""
307 if opts['header']:
307 if opts['header']:
308 full = "commit"
308 full = "commit"
309 else:
309 else:
310 full = None
310 full = None
311 copy = [x for x in revs]
311 copy = [x for x in revs]
312 revtree(copy, repo, full, opts['max_count'], opts['parents'])
312 revtree(copy, repo, full, opts['max_count'], opts['parents'])
313
313
314 def view(ui, repo, *etc):
314 def view(ui, repo, *etc):
315 "start interactive history viewer"
315 "start interactive history viewer"
316 os.chdir(repo.root)
316 os.chdir(repo.root)
317 os.system(ui.config("hgk", "path", "hgk") + " " + " ".join(etc))
317 os.system(ui.config("hgk", "path", "hgk") + " " + " ".join(etc))
318
318
319 cmdtable = {
319 cmdtable = {
320 "view": (view, [], 'hg view'),
320 "view": (view, [], 'hg view'),
321 "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
321 "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
322 ('r', 'recursive', None, 'recursive'),
322 ('r', 'recursive', None, 'recursive'),
323 ('P', 'pretty', None, 'pretty'),
323 ('P', 'pretty', None, 'pretty'),
324 ('s', 'stdin', None, 'stdin'),
324 ('s', 'stdin', None, 'stdin'),
325 ('C', 'copy', None, 'detect copies'),
325 ('C', 'copy', None, 'detect copies'),
326 ('S', 'search', "", 'search')],
326 ('S', 'search', "", 'search')],
327 "hg git-diff-tree [options] node1 node2"),
327 "hg git-diff-tree [options] node1 node2"),
328 "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
328 "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
329 "hg debug-cat-file [options] type file"),
329 "hg debug-cat-file [options] type file"),
330 "debug-merge-base": (base, [], "hg debug-merge-base node node"),
330 "debug-merge-base": (base, [], "hg debug-merge-base node node"),
331 "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
331 "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
332 ('t', 'topo-order', None, 'topo-order'),
332 ('t', 'topo-order', None, 'topo-order'),
333 ('p', 'parents', None, 'parents'),
333 ('p', 'parents', None, 'parents'),
334 ('n', 'max-count', 0, 'max-count')],
334 ('n', 'max-count', 0, 'max-count')],
335 "hg debug-rev-list [options] revs"),
335 "hg debug-rev-list [options] revs"),
336 }
336 }
@@ -1,1999 +1,1998 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
33 from mercurial.i18n import gettext as _
34 demandload(globals(), "os sys re struct traceback errno bz2")
34 demandload(globals(), "os sys re struct traceback errno bz2")
35 demandload(globals(), "mercurial:commands,hg,patch,revlog,ui,util")
35 demandload(globals(), "mercurial:commands,hg,patch,revlog,ui,util")
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 class statusentry:
39 class statusentry:
40 def __init__(self, rev, name=None):
40 def __init__(self, rev, name=None):
41 if not name:
41 if not name:
42 fields = rev.split(':')
42 fields = rev.split(':')
43 if len(fields) == 2:
43 if len(fields) == 2:
44 self.rev, self.name = fields
44 self.rev, self.name = fields
45 else:
45 else:
46 self.rev, self.name = None, None
46 self.rev, self.name = None, None
47 else:
47 else:
48 self.rev, self.name = rev, name
48 self.rev, self.name = rev, name
49
49
50 def __str__(self):
50 def __str__(self):
51 return self.rev + ':' + self.name
51 return self.rev + ':' + self.name
52
52
53 class queue:
53 class queue:
54 def __init__(self, ui, path, patchdir=None):
54 def __init__(self, ui, path, patchdir=None):
55 self.basepath = path
55 self.basepath = path
56 self.path = patchdir or os.path.join(path, "patches")
56 self.path = patchdir or os.path.join(path, "patches")
57 self.opener = util.opener(self.path)
57 self.opener = util.opener(self.path)
58 self.ui = ui
58 self.ui = ui
59 self.applied = []
59 self.applied = []
60 self.full_series = []
60 self.full_series = []
61 self.applied_dirty = 0
61 self.applied_dirty = 0
62 self.series_dirty = 0
62 self.series_dirty = 0
63 self.series_path = "series"
63 self.series_path = "series"
64 self.status_path = "status"
64 self.status_path = "status"
65 self.guards_path = "guards"
65 self.guards_path = "guards"
66 self.active_guards = None
66 self.active_guards = None
67 self.guards_dirty = False
67 self.guards_dirty = False
68 self._diffopts = None
68 self._diffopts = None
69
69
70 if os.path.exists(self.join(self.series_path)):
70 if os.path.exists(self.join(self.series_path)):
71 self.full_series = self.opener(self.series_path).read().splitlines()
71 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.parse_series()
72 self.parse_series()
73
73
74 if os.path.exists(self.join(self.status_path)):
74 if os.path.exists(self.join(self.status_path)):
75 lines = self.opener(self.status_path).read().splitlines()
75 lines = self.opener(self.status_path).read().splitlines()
76 self.applied = [statusentry(l) for l in lines]
76 self.applied = [statusentry(l) for l in lines]
77
77
78 def diffopts(self):
78 def diffopts(self):
79 if self._diffopts is None:
79 if self._diffopts is None:
80 self._diffopts = self.ui.diffopts()
80 self._diffopts = self.ui.diffopts()
81 return self._diffopts
81 return self._diffopts
82
82
83 def join(self, *p):
83 def join(self, *p):
84 return os.path.join(self.path, *p)
84 return os.path.join(self.path, *p)
85
85
86 def find_series(self, patch):
86 def find_series(self, patch):
87 pre = re.compile("(\s*)([^#]+)")
87 pre = re.compile("(\s*)([^#]+)")
88 index = 0
88 index = 0
89 for l in self.full_series:
89 for l in self.full_series:
90 m = pre.match(l)
90 m = pre.match(l)
91 if m:
91 if m:
92 s = m.group(2)
92 s = m.group(2)
93 s = s.rstrip()
93 s = s.rstrip()
94 if s == patch:
94 if s == patch:
95 return index
95 return index
96 index += 1
96 index += 1
97 return None
97 return None
98
98
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100
100
101 def parse_series(self):
101 def parse_series(self):
102 self.series = []
102 self.series = []
103 self.series_guards = []
103 self.series_guards = []
104 for l in self.full_series:
104 for l in self.full_series:
105 h = l.find('#')
105 h = l.find('#')
106 if h == -1:
106 if h == -1:
107 patch = l
107 patch = l
108 comment = ''
108 comment = ''
109 elif h == 0:
109 elif h == 0:
110 continue
110 continue
111 else:
111 else:
112 patch = l[:h]
112 patch = l[:h]
113 comment = l[h:]
113 comment = l[h:]
114 patch = patch.strip()
114 patch = patch.strip()
115 if patch:
115 if patch:
116 self.series.append(patch)
116 self.series.append(patch)
117 self.series_guards.append(self.guard_re.findall(comment))
117 self.series_guards.append(self.guard_re.findall(comment))
118
118
119 def check_guard(self, guard):
119 def check_guard(self, guard):
120 bad_chars = '# \t\r\n\f'
120 bad_chars = '# \t\r\n\f'
121 first = guard[0]
121 first = guard[0]
122 for c in '-+':
122 for c in '-+':
123 if first == c:
123 if first == c:
124 return (_('guard %r starts with invalid character: %r') %
124 return (_('guard %r starts with invalid character: %r') %
125 (guard, c))
125 (guard, c))
126 for c in bad_chars:
126 for c in bad_chars:
127 if c in guard:
127 if c in guard:
128 return _('invalid character in guard %r: %r') % (guard, c)
128 return _('invalid character in guard %r: %r') % (guard, c)
129
129
130 def set_active(self, guards):
130 def set_active(self, guards):
131 for guard in guards:
131 for guard in guards:
132 bad = self.check_guard(guard)
132 bad = self.check_guard(guard)
133 if bad:
133 if bad:
134 raise util.Abort(bad)
134 raise util.Abort(bad)
135 guards = dict.fromkeys(guards).keys()
135 guards = dict.fromkeys(guards).keys()
136 guards.sort()
136 guards.sort()
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 self.active_guards = guards
138 self.active_guards = guards
139 self.guards_dirty = True
139 self.guards_dirty = True
140
140
141 def active(self):
141 def active(self):
142 if self.active_guards is None:
142 if self.active_guards is None:
143 self.active_guards = []
143 self.active_guards = []
144 try:
144 try:
145 guards = self.opener(self.guards_path).read().split()
145 guards = self.opener(self.guards_path).read().split()
146 except IOError, err:
146 except IOError, err:
147 if err.errno != errno.ENOENT: raise
147 if err.errno != errno.ENOENT: raise
148 guards = []
148 guards = []
149 for i, guard in enumerate(guards):
149 for i, guard in enumerate(guards):
150 bad = self.check_guard(guard)
150 bad = self.check_guard(guard)
151 if bad:
151 if bad:
152 self.ui.warn('%s:%d: %s\n' %
152 self.ui.warn('%s:%d: %s\n' %
153 (self.join(self.guards_path), i + 1, bad))
153 (self.join(self.guards_path), i + 1, bad))
154 else:
154 else:
155 self.active_guards.append(guard)
155 self.active_guards.append(guard)
156 return self.active_guards
156 return self.active_guards
157
157
158 def set_guards(self, idx, guards):
158 def set_guards(self, idx, guards):
159 for g in guards:
159 for g in guards:
160 if len(g) < 2:
160 if len(g) < 2:
161 raise util.Abort(_('guard %r too short') % g)
161 raise util.Abort(_('guard %r too short') % g)
162 if g[0] not in '-+':
162 if g[0] not in '-+':
163 raise util.Abort(_('guard %r starts with invalid char') % g)
163 raise util.Abort(_('guard %r starts with invalid char') % g)
164 bad = self.check_guard(g[1:])
164 bad = self.check_guard(g[1:])
165 if bad:
165 if bad:
166 raise util.Abort(bad)
166 raise util.Abort(bad)
167 drop = self.guard_re.sub('', self.full_series[idx])
167 drop = self.guard_re.sub('', self.full_series[idx])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 self.parse_series()
169 self.parse_series()
170 self.series_dirty = True
170 self.series_dirty = True
171
171
172 def pushable(self, idx):
172 def pushable(self, idx):
173 if isinstance(idx, str):
173 if isinstance(idx, str):
174 idx = self.series.index(idx)
174 idx = self.series.index(idx)
175 patchguards = self.series_guards[idx]
175 patchguards = self.series_guards[idx]
176 if not patchguards:
176 if not patchguards:
177 return True, None
177 return True, None
178 default = False
178 default = False
179 guards = self.active()
179 guards = self.active()
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 if exactneg:
181 if exactneg:
182 return False, exactneg[0]
182 return False, exactneg[0]
183 pos = [g for g in patchguards if g[0] == '+']
183 pos = [g for g in patchguards if g[0] == '+']
184 exactpos = [g for g in pos if g[1:] in guards]
184 exactpos = [g for g in pos if g[1:] in guards]
185 if pos:
185 if pos:
186 if exactpos:
186 if exactpos:
187 return True, exactpos[0]
187 return True, exactpos[0]
188 return False, pos
188 return False, pos
189 return True, ''
189 return True, ''
190
190
191 def explain_pushable(self, idx, all_patches=False):
191 def explain_pushable(self, idx, all_patches=False):
192 write = all_patches and self.ui.write or self.ui.warn
192 write = all_patches and self.ui.write or self.ui.warn
193 if all_patches or self.ui.verbose:
193 if all_patches or self.ui.verbose:
194 if isinstance(idx, str):
194 if isinstance(idx, str):
195 idx = self.series.index(idx)
195 idx = self.series.index(idx)
196 pushable, why = self.pushable(idx)
196 pushable, why = self.pushable(idx)
197 if all_patches and pushable:
197 if all_patches and pushable:
198 if why is None:
198 if why is None:
199 write(_('allowing %s - no guards in effect\n') %
199 write(_('allowing %s - no guards in effect\n') %
200 self.series[idx])
200 self.series[idx])
201 else:
201 else:
202 if not why:
202 if not why:
203 write(_('allowing %s - no matching negative guards\n') %
203 write(_('allowing %s - no matching negative guards\n') %
204 self.series[idx])
204 self.series[idx])
205 else:
205 else:
206 write(_('allowing %s - guarded by %r\n') %
206 write(_('allowing %s - guarded by %r\n') %
207 (self.series[idx], why))
207 (self.series[idx], why))
208 if not pushable:
208 if not pushable:
209 if why:
209 if why:
210 write(_('skipping %s - guarded by %r\n') %
210 write(_('skipping %s - guarded by %r\n') %
211 (self.series[idx], ' '.join(why)))
211 (self.series[idx], ' '.join(why)))
212 else:
212 else:
213 write(_('skipping %s - no matching guards\n') %
213 write(_('skipping %s - no matching guards\n') %
214 self.series[idx])
214 self.series[idx])
215
215
216 def save_dirty(self):
216 def save_dirty(self):
217 def write_list(items, path):
217 def write_list(items, path):
218 fp = self.opener(path, 'w')
218 fp = self.opener(path, 'w')
219 for i in items:
219 for i in items:
220 print >> fp, i
220 print >> fp, i
221 fp.close()
221 fp.close()
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225
225
226 def readheaders(self, patch):
226 def readheaders(self, patch):
227 def eatdiff(lines):
227 def eatdiff(lines):
228 while lines:
228 while lines:
229 l = lines[-1]
229 l = lines[-1]
230 if (l.startswith("diff -") or
230 if (l.startswith("diff -") or
231 l.startswith("Index:") or
231 l.startswith("Index:") or
232 l.startswith("===========")):
232 l.startswith("===========")):
233 del lines[-1]
233 del lines[-1]
234 else:
234 else:
235 break
235 break
236 def eatempty(lines):
236 def eatempty(lines):
237 while lines:
237 while lines:
238 l = lines[-1]
238 l = lines[-1]
239 if re.match('\s*$', l):
239 if re.match('\s*$', l):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243
243
244 pf = self.join(patch)
244 pf = self.join(patch)
245 message = []
245 message = []
246 comments = []
246 comments = []
247 user = None
247 user = None
248 date = None
248 date = None
249 format = None
249 format = None
250 subject = None
250 subject = None
251 diffstart = 0
251 diffstart = 0
252
252
253 for line in file(pf):
253 for line in file(pf):
254 line = line.rstrip()
254 line = line.rstrip()
255 if diffstart:
255 if diffstart:
256 if line.startswith('+++ '):
256 if line.startswith('+++ '):
257 diffstart = 2
257 diffstart = 2
258 break
258 break
259 if line.startswith("--- "):
259 if line.startswith("--- "):
260 diffstart = 1
260 diffstart = 1
261 continue
261 continue
262 elif format == "hgpatch":
262 elif format == "hgpatch":
263 # parse values when importing the result of an hg export
263 # parse values when importing the result of an hg export
264 if line.startswith("# User "):
264 if line.startswith("# User "):
265 user = line[7:]
265 user = line[7:]
266 elif line.startswith("# Date "):
266 elif line.startswith("# Date "):
267 date = line[7:]
267 date = line[7:]
268 elif not line.startswith("# ") and line:
268 elif not line.startswith("# ") and line:
269 message.append(line)
269 message.append(line)
270 format = None
270 format = None
271 elif line == '# HG changeset patch':
271 elif line == '# HG changeset patch':
272 format = "hgpatch"
272 format = "hgpatch"
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
274 line.startswith("subject: "))):
274 line.startswith("subject: "))):
275 subject = line[9:]
275 subject = line[9:]
276 format = "tag"
276 format = "tag"
277 elif (format != "tagdone" and (line.startswith("From: ") or
277 elif (format != "tagdone" and (line.startswith("From: ") or
278 line.startswith("from: "))):
278 line.startswith("from: "))):
279 user = line[6:]
279 user = line[6:]
280 format = "tag"
280 format = "tag"
281 elif format == "tag" and line == "":
281 elif format == "tag" and line == "":
282 # when looking for tags (subject: from: etc) they
282 # when looking for tags (subject: from: etc) they
283 # end once you find a blank line in the source
283 # end once you find a blank line in the source
284 format = "tagdone"
284 format = "tagdone"
285 elif message or line:
285 elif message or line:
286 message.append(line)
286 message.append(line)
287 comments.append(line)
287 comments.append(line)
288
288
289 eatdiff(message)
289 eatdiff(message)
290 eatdiff(comments)
290 eatdiff(comments)
291 eatempty(message)
291 eatempty(message)
292 eatempty(comments)
292 eatempty(comments)
293
293
294 # make sure message isn't empty
294 # make sure message isn't empty
295 if format and format.startswith("tag") and subject:
295 if format and format.startswith("tag") and subject:
296 message.insert(0, "")
296 message.insert(0, "")
297 message.insert(0, subject)
297 message.insert(0, subject)
298 return (message, comments, user, date, diffstart > 1)
298 return (message, comments, user, date, diffstart > 1)
299
299
300 def printdiff(self, repo, node1, node2=None, files=None,
300 def printdiff(self, repo, node1, node2=None, files=None,
301 fp=None, changes=None, opts=None):
301 fp=None, changes=None, opts=None):
302 patch.diff(repo, node1, node2, files,
302 patch.diff(repo, node1, node2, files,
303 fp=fp, changes=changes, opts=self.diffopts())
303 fp=fp, changes=changes, opts=self.diffopts())
304
304
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
306 # first try just applying the patch
306 # first try just applying the patch
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
308 strict=True, merge=rev, wlock=wlock)
308 strict=True, merge=rev, wlock=wlock)
309
309
310 if err == 0:
310 if err == 0:
311 return (err, n)
311 return (err, n)
312
312
313 if n is None:
313 if n is None:
314 raise util.Abort(_("apply failed for patch %s") % patch)
314 raise util.Abort(_("apply failed for patch %s") % patch)
315
315
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
317
317
318 # apply failed, strip away that rev and merge.
318 # apply failed, strip away that rev and merge.
319 hg.clean(repo, head, wlock=wlock)
319 hg.clean(repo, head, wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
321
321
322 c = repo.changelog.read(rev)
322 c = repo.changelog.read(rev)
323 ret = hg.merge(repo, rev, wlock=wlock)
323 ret = hg.merge(repo, rev, wlock=wlock)
324 if ret:
324 if ret:
325 raise util.Abort(_("update returned %d") % ret)
325 raise util.Abort(_("update returned %d") % ret)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
327 if n == None:
327 if n == None:
328 raise util.Abort(_("repo commit failed"))
328 raise util.Abort(_("repo commit failed"))
329 try:
329 try:
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
331 except:
331 except:
332 raise util.Abort(_("unable to read %s") % patch)
332 raise util.Abort(_("unable to read %s") % patch)
333
333
334 patchf = self.opener(patch, "w")
334 patchf = self.opener(patch, "w")
335 if comments:
335 if comments:
336 comments = "\n".join(comments) + '\n\n'
336 comments = "\n".join(comments) + '\n\n'
337 patchf.write(comments)
337 patchf.write(comments)
338 self.printdiff(repo, head, n, fp=patchf)
338 self.printdiff(repo, head, n, fp=patchf)
339 patchf.close()
339 patchf.close()
340 return (0, n)
340 return (0, n)
341
341
342 def qparents(self, repo, rev=None):
342 def qparents(self, repo, rev=None):
343 if rev is None:
343 if rev is None:
344 (p1, p2) = repo.dirstate.parents()
344 (p1, p2) = repo.dirstate.parents()
345 if p2 == revlog.nullid:
345 if p2 == revlog.nullid:
346 return p1
346 return p1
347 if len(self.applied) == 0:
347 if len(self.applied) == 0:
348 return None
348 return None
349 return revlog.bin(self.applied[-1].rev)
349 return revlog.bin(self.applied[-1].rev)
350 pp = repo.changelog.parents(rev)
350 pp = repo.changelog.parents(rev)
351 if pp[1] != revlog.nullid:
351 if pp[1] != revlog.nullid:
352 arevs = [ x.rev for x in self.applied ]
352 arevs = [ x.rev for x in self.applied ]
353 p0 = revlog.hex(pp[0])
353 p0 = revlog.hex(pp[0])
354 p1 = revlog.hex(pp[1])
354 p1 = revlog.hex(pp[1])
355 if p0 in arevs:
355 if p0 in arevs:
356 return pp[0]
356 return pp[0]
357 if p1 in arevs:
357 if p1 in arevs:
358 return pp[1]
358 return pp[1]
359 return pp[0]
359 return pp[0]
360
360
361 def mergepatch(self, repo, mergeq, series, wlock):
361 def mergepatch(self, repo, mergeq, series, wlock):
362 if len(self.applied) == 0:
362 if len(self.applied) == 0:
363 # each of the patches merged in will have two parents. This
363 # each of the patches merged in will have two parents. This
364 # can confuse the qrefresh, qdiff, and strip code because it
364 # can confuse the qrefresh, qdiff, and strip code because it
365 # needs to know which parent is actually in the patch queue.
365 # needs to know which parent is actually in the patch queue.
366 # so, we insert a merge marker with only one parent. This way
366 # so, we insert a merge marker with only one parent. This way
367 # the first patch in the queue is never a merge patch
367 # the first patch in the queue is never a merge patch
368 #
368 #
369 pname = ".hg.patches.merge.marker"
369 pname = ".hg.patches.merge.marker"
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
371 wlock=wlock)
371 wlock=wlock)
372 self.applied.append(statusentry(revlog.hex(n), pname))
372 self.applied.append(statusentry(revlog.hex(n), pname))
373 self.applied_dirty = 1
373 self.applied_dirty = 1
374
374
375 head = self.qparents(repo)
375 head = self.qparents(repo)
376
376
377 for patch in series:
377 for patch in series:
378 patch = mergeq.lookup(patch, strict=True)
378 patch = mergeq.lookup(patch, strict=True)
379 if not patch:
379 if not patch:
380 self.ui.warn("patch %s does not exist\n" % patch)
380 self.ui.warn("patch %s does not exist\n" % patch)
381 return (1, None)
381 return (1, None)
382 pushable, reason = self.pushable(patch)
382 pushable, reason = self.pushable(patch)
383 if not pushable:
383 if not pushable:
384 self.explain_pushable(patch, all_patches=True)
384 self.explain_pushable(patch, all_patches=True)
385 continue
385 continue
386 info = mergeq.isapplied(patch)
386 info = mergeq.isapplied(patch)
387 if not info:
387 if not info:
388 self.ui.warn("patch %s is not applied\n" % patch)
388 self.ui.warn("patch %s is not applied\n" % patch)
389 return (1, None)
389 return (1, None)
390 rev = revlog.bin(info[1])
390 rev = revlog.bin(info[1])
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
392 if head:
392 if head:
393 self.applied.append(statusentry(revlog.hex(head), patch))
393 self.applied.append(statusentry(revlog.hex(head), patch))
394 self.applied_dirty = 1
394 self.applied_dirty = 1
395 if err:
395 if err:
396 return (err, head)
396 return (err, head)
397 return (0, head)
397 return (0, head)
398
398
399 def patch(self, repo, patchfile):
399 def patch(self, repo, patchfile):
400 '''Apply patchfile to the working directory.
400 '''Apply patchfile to the working directory.
401 patchfile: file name of patch'''
401 patchfile: file name of patch'''
402 try:
402 try:
403 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
403 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
404 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
404 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
405 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
405 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
406 except:
406 except:
407 self.ui.warn("patch failed, unable to continue (try -v)\n")
407 self.ui.warn("patch failed, unable to continue (try -v)\n")
408 return (None, [], False)
408 return (None, [], False)
409 files = []
409 files = []
410 fuzz = False
410 fuzz = False
411 for l in f:
411 for l in f:
412 l = l.rstrip('\r\n');
412 l = l.rstrip('\r\n');
413 if self.ui.verbose:
413 if self.ui.verbose:
414 self.ui.warn(l + "\n")
414 self.ui.warn(l + "\n")
415 if l[:14] == 'patching file ':
415 if l[:14] == 'patching file ':
416 pf = os.path.normpath(util.parse_patch_output(l))
416 pf = os.path.normpath(util.parse_patch_output(l))
417 if pf not in files:
417 if pf not in files:
418 files.append(pf)
418 files.append(pf)
419 printed_file = False
419 printed_file = False
420 file_str = l
420 file_str = l
421 elif l.find('with fuzz') >= 0:
421 elif l.find('with fuzz') >= 0:
422 if not printed_file:
422 if not printed_file:
423 self.ui.warn(file_str + '\n')
423 self.ui.warn(file_str + '\n')
424 printed_file = True
424 printed_file = True
425 self.ui.warn(l + '\n')
425 self.ui.warn(l + '\n')
426 fuzz = True
426 fuzz = True
427 elif l.find('saving rejects to file') >= 0:
427 elif l.find('saving rejects to file') >= 0:
428 self.ui.warn(l + '\n')
428 self.ui.warn(l + '\n')
429 elif l.find('FAILED') >= 0:
429 elif l.find('FAILED') >= 0:
430 if not printed_file:
430 if not printed_file:
431 self.ui.warn(file_str + '\n')
431 self.ui.warn(file_str + '\n')
432 printed_file = True
432 printed_file = True
433 self.ui.warn(l + '\n')
433 self.ui.warn(l + '\n')
434
434
435 return (not f.close(), files, fuzz)
435 return (not f.close(), files, fuzz)
436
436
437 def apply(self, repo, series, list=False, update_status=True,
437 def apply(self, repo, series, list=False, update_status=True,
438 strict=False, patchdir=None, merge=None, wlock=None):
438 strict=False, patchdir=None, merge=None, wlock=None):
439 # TODO unify with commands.py
439 # TODO unify with commands.py
440 if not patchdir:
440 if not patchdir:
441 patchdir = self.path
441 patchdir = self.path
442 err = 0
442 err = 0
443 if not wlock:
443 if not wlock:
444 wlock = repo.wlock()
444 wlock = repo.wlock()
445 lock = repo.lock()
445 lock = repo.lock()
446 tr = repo.transaction()
446 tr = repo.transaction()
447 n = None
447 n = None
448 for patch in series:
448 for patch in series:
449 pushable, reason = self.pushable(patch)
449 pushable, reason = self.pushable(patch)
450 if not pushable:
450 if not pushable:
451 self.explain_pushable(patch, all_patches=True)
451 self.explain_pushable(patch, all_patches=True)
452 continue
452 continue
453 self.ui.warn("applying %s\n" % patch)
453 self.ui.warn("applying %s\n" % patch)
454 pf = os.path.join(patchdir, patch)
454 pf = os.path.join(patchdir, patch)
455
455
456 try:
456 try:
457 message, comments, user, date, patchfound = self.readheaders(patch)
457 message, comments, user, date, patchfound = self.readheaders(patch)
458 except:
458 except:
459 self.ui.warn("Unable to read %s\n" % pf)
459 self.ui.warn("Unable to read %s\n" % pf)
460 err = 1
460 err = 1
461 break
461 break
462
462
463 if not message:
463 if not message:
464 message = "imported patch %s\n" % patch
464 message = "imported patch %s\n" % patch
465 else:
465 else:
466 if list:
466 if list:
467 message.append("\nimported patch %s" % patch)
467 message.append("\nimported patch %s" % patch)
468 message = '\n'.join(message)
468 message = '\n'.join(message)
469
469
470 (patcherr, files, fuzz) = self.patch(repo, pf)
470 (patcherr, files, fuzz) = self.patch(repo, pf)
471 patcherr = not patcherr
471 patcherr = not patcherr
472
472
473 if merge and len(files) > 0:
473 if merge and len(files) > 0:
474 # Mark as merged and update dirstate parent info
474 # Mark as merged and update dirstate parent info
475 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
475 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
476 p1, p2 = repo.dirstate.parents()
476 p1, p2 = repo.dirstate.parents()
477 repo.dirstate.setparents(p1, merge)
477 repo.dirstate.setparents(p1, merge)
478 if len(files) > 0:
478 if len(files) > 0:
479 cwd = repo.getcwd()
479 cwd = repo.getcwd()
480 cfiles = files
480 cfiles = files
481 if cwd:
481 if cwd:
482 cfiles = [util.pathto(cwd, f) for f in files]
482 cfiles = [util.pathto(cwd, f) for f in files]
483 commands.addremove_lock(self.ui, repo, cfiles,
483 commands.addremove_lock(self.ui, repo, cfiles,
484 opts={}, wlock=wlock)
484 opts={}, wlock=wlock)
485 n = repo.commit(files, message, user, date, force=1, lock=lock,
485 n = repo.commit(files, message, user, date, force=1, lock=lock,
486 wlock=wlock)
486 wlock=wlock)
487
487
488 if n == None:
488 if n == None:
489 raise util.Abort(_("repo commit failed"))
489 raise util.Abort(_("repo commit failed"))
490
490
491 if update_status:
491 if update_status:
492 self.applied.append(statusentry(revlog.hex(n), patch))
492 self.applied.append(statusentry(revlog.hex(n), patch))
493
493
494 if patcherr:
494 if patcherr:
495 if not patchfound:
495 if not patchfound:
496 self.ui.warn("patch %s is empty\n" % patch)
496 self.ui.warn("patch %s is empty\n" % patch)
497 err = 0
497 err = 0
498 else:
498 else:
499 self.ui.warn("patch failed, rejects left in working dir\n")
499 self.ui.warn("patch failed, rejects left in working dir\n")
500 err = 1
500 err = 1
501 break
501 break
502
502
503 if fuzz and strict:
503 if fuzz and strict:
504 self.ui.warn("fuzz found when applying patch, stopping\n")
504 self.ui.warn("fuzz found when applying patch, stopping\n")
505 err = 1
505 err = 1
506 break
506 break
507 tr.close()
507 tr.close()
508 return (err, n)
508 return (err, n)
509
509
510 def delete(self, repo, patch, force=False):
510 def delete(self, repo, patch, force=False):
511 patch = self.lookup(patch, strict=True)
511 patch = self.lookup(patch, strict=True)
512 info = self.isapplied(patch)
512 info = self.isapplied(patch)
513 if info:
513 if info:
514 raise util.Abort(_("cannot delete applied patch %s") % patch)
514 raise util.Abort(_("cannot delete applied patch %s") % patch)
515 if patch not in self.series:
515 if patch not in self.series:
516 raise util.Abort(_("patch %s not in series file") % patch)
516 raise util.Abort(_("patch %s not in series file") % patch)
517 if force:
517 if force:
518 r = self.qrepo()
518 r = self.qrepo()
519 if r:
519 if r:
520 r.remove([patch], True)
520 r.remove([patch], True)
521 else:
521 else:
522 os.unlink(self.join(patch))
522 os.unlink(self.join(patch))
523 i = self.find_series(patch)
523 i = self.find_series(patch)
524 del self.full_series[i]
524 del self.full_series[i]
525 self.parse_series()
525 self.parse_series()
526 self.series_dirty = 1
526 self.series_dirty = 1
527
527
528 def check_toppatch(self, repo):
528 def check_toppatch(self, repo):
529 if len(self.applied) > 0:
529 if len(self.applied) > 0:
530 top = revlog.bin(self.applied[-1].rev)
530 top = revlog.bin(self.applied[-1].rev)
531 pp = repo.dirstate.parents()
531 pp = repo.dirstate.parents()
532 if top not in pp:
532 if top not in pp:
533 raise util.Abort(_("queue top not at same revision as working directory"))
533 raise util.Abort(_("queue top not at same revision as working directory"))
534 return top
534 return top
535 return None
535 return None
536 def check_localchanges(self, repo):
536 def check_localchanges(self, repo, force=False, refresh=True):
537 (c, a, r, d, u) = repo.changes(None, None)
537 m, a, r, d = repo.status()[:4]
538 if c or a or d or r:
538 if m or a or r or d:
539 raise util.Abort(_("local changes found, refresh first"))
539 if not force:
540 if refresh:
541 raise util.Abort(_("local changes found, refresh first"))
542 else:
543 raise util.Abort(_("local changes found"))
544 return m, a, r, d
540 def new(self, repo, patch, msg=None, force=None):
545 def new(self, repo, patch, msg=None, force=None):
541 if os.path.exists(self.join(patch)):
546 if os.path.exists(self.join(patch)):
542 raise util.Abort(_('patch "%s" already exists') % patch)
547 raise util.Abort(_('patch "%s" already exists') % patch)
543 commitfiles = []
548 m, a, r, d = self.check_localchanges(repo, force)
544 (c, a, r, d, u) = repo.changes(None, None)
549 commitfiles = m + a + r
545 if c or a or d or r:
546 if not force:
547 raise util.Abort(_("local changes found, refresh first"))
548 commitfiles = c + a + r
549 self.check_toppatch(repo)
550 self.check_toppatch(repo)
550 wlock = repo.wlock()
551 wlock = repo.wlock()
551 insert = self.full_series_end()
552 insert = self.full_series_end()
552 if msg:
553 if msg:
553 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
554 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
554 wlock=wlock)
555 wlock=wlock)
555 else:
556 else:
556 n = repo.commit(commitfiles,
557 n = repo.commit(commitfiles,
557 "New patch: %s" % patch, force=True, wlock=wlock)
558 "New patch: %s" % patch, force=True, wlock=wlock)
558 if n == None:
559 if n == None:
559 raise util.Abort(_("repo commit failed"))
560 raise util.Abort(_("repo commit failed"))
560 self.full_series[insert:insert] = [patch]
561 self.full_series[insert:insert] = [patch]
561 self.applied.append(statusentry(revlog.hex(n), patch))
562 self.applied.append(statusentry(revlog.hex(n), patch))
562 self.parse_series()
563 self.parse_series()
563 self.series_dirty = 1
564 self.series_dirty = 1
564 self.applied_dirty = 1
565 self.applied_dirty = 1
565 p = self.opener(patch, "w")
566 p = self.opener(patch, "w")
566 if msg:
567 if msg:
567 msg = msg + "\n"
568 msg = msg + "\n"
568 p.write(msg)
569 p.write(msg)
569 p.close()
570 p.close()
570 wlock = None
571 wlock = None
571 r = self.qrepo()
572 r = self.qrepo()
572 if r: r.add([patch])
573 if r: r.add([patch])
573 if commitfiles:
574 if commitfiles:
574 self.refresh(repo, short=True)
575 self.refresh(repo, short=True)
575
576
576 def strip(self, repo, rev, update=True, backup="all", wlock=None):
577 def strip(self, repo, rev, update=True, backup="all", wlock=None):
577 def limitheads(chlog, stop):
578 def limitheads(chlog, stop):
578 """return the list of all nodes that have no children"""
579 """return the list of all nodes that have no children"""
579 p = {}
580 p = {}
580 h = []
581 h = []
581 stoprev = 0
582 stoprev = 0
582 if stop in chlog.nodemap:
583 if stop in chlog.nodemap:
583 stoprev = chlog.rev(stop)
584 stoprev = chlog.rev(stop)
584
585
585 for r in range(chlog.count() - 1, -1, -1):
586 for r in range(chlog.count() - 1, -1, -1):
586 n = chlog.node(r)
587 n = chlog.node(r)
587 if n not in p:
588 if n not in p:
588 h.append(n)
589 h.append(n)
589 if n == stop:
590 if n == stop:
590 break
591 break
591 if r < stoprev:
592 if r < stoprev:
592 break
593 break
593 for pn in chlog.parents(n):
594 for pn in chlog.parents(n):
594 p[pn] = 1
595 p[pn] = 1
595 return h
596 return h
596
597
597 def bundle(cg):
598 def bundle(cg):
598 backupdir = repo.join("strip-backup")
599 backupdir = repo.join("strip-backup")
599 if not os.path.isdir(backupdir):
600 if not os.path.isdir(backupdir):
600 os.mkdir(backupdir)
601 os.mkdir(backupdir)
601 name = os.path.join(backupdir, "%s" % revlog.short(rev))
602 name = os.path.join(backupdir, "%s" % revlog.short(rev))
602 name = savename(name)
603 name = savename(name)
603 self.ui.warn("saving bundle to %s\n" % name)
604 self.ui.warn("saving bundle to %s\n" % name)
604 # TODO, exclusive open
605 # TODO, exclusive open
605 f = open(name, "wb")
606 f = open(name, "wb")
606 try:
607 try:
607 f.write("HG10")
608 f.write("HG10")
608 z = bz2.BZ2Compressor(9)
609 z = bz2.BZ2Compressor(9)
609 while 1:
610 while 1:
610 chunk = cg.read(4096)
611 chunk = cg.read(4096)
611 if not chunk:
612 if not chunk:
612 break
613 break
613 f.write(z.compress(chunk))
614 f.write(z.compress(chunk))
614 f.write(z.flush())
615 f.write(z.flush())
615 except:
616 except:
616 os.unlink(name)
617 os.unlink(name)
617 raise
618 raise
618 f.close()
619 f.close()
619 return name
620 return name
620
621
621 def stripall(rev, revnum):
622 def stripall(rev, revnum):
622 cl = repo.changelog
623 cl = repo.changelog
623 c = cl.read(rev)
624 c = cl.read(rev)
624 mm = repo.manifest.read(c[0])
625 mm = repo.manifest.read(c[0])
625 seen = {}
626 seen = {}
626
627
627 for x in xrange(revnum, cl.count()):
628 for x in xrange(revnum, cl.count()):
628 c = cl.read(cl.node(x))
629 c = cl.read(cl.node(x))
629 for f in c[3]:
630 for f in c[3]:
630 if f in seen:
631 if f in seen:
631 continue
632 continue
632 seen[f] = 1
633 seen[f] = 1
633 if f in mm:
634 if f in mm:
634 filerev = mm[f]
635 filerev = mm[f]
635 else:
636 else:
636 filerev = 0
637 filerev = 0
637 seen[f] = filerev
638 seen[f] = filerev
638 # we go in two steps here so the strip loop happens in a
639 # we go in two steps here so the strip loop happens in a
639 # sensible order. When stripping many files, this helps keep
640 # sensible order. When stripping many files, this helps keep
640 # our disk access patterns under control.
641 # our disk access patterns under control.
641 seen_list = seen.keys()
642 seen_list = seen.keys()
642 seen_list.sort()
643 seen_list.sort()
643 for f in seen_list:
644 for f in seen_list:
644 ff = repo.file(f)
645 ff = repo.file(f)
645 filerev = seen[f]
646 filerev = seen[f]
646 if filerev != 0:
647 if filerev != 0:
647 if filerev in ff.nodemap:
648 if filerev in ff.nodemap:
648 filerev = ff.rev(filerev)
649 filerev = ff.rev(filerev)
649 else:
650 else:
650 filerev = 0
651 filerev = 0
651 ff.strip(filerev, revnum)
652 ff.strip(filerev, revnum)
652
653
653 if not wlock:
654 if not wlock:
654 wlock = repo.wlock()
655 wlock = repo.wlock()
655 lock = repo.lock()
656 lock = repo.lock()
656 chlog = repo.changelog
657 chlog = repo.changelog
657 # TODO delete the undo files, and handle undo of merge sets
658 # TODO delete the undo files, and handle undo of merge sets
658 pp = chlog.parents(rev)
659 pp = chlog.parents(rev)
659 revnum = chlog.rev(rev)
660 revnum = chlog.rev(rev)
660
661
661 if update:
662 if update:
662 (c, a, r, d, u) = repo.changes(None, None)
663 self.check_localchanges(repo, refresh=False)
663 if c or a or d or r:
664 raise util.Abort(_("local changes found"))
665 urev = self.qparents(repo, rev)
664 urev = self.qparents(repo, rev)
666 hg.clean(repo, urev, wlock=wlock)
665 hg.clean(repo, urev, wlock=wlock)
667 repo.dirstate.write()
666 repo.dirstate.write()
668
667
669 # save is a list of all the branches we are truncating away
668 # save is a list of all the branches we are truncating away
670 # that we actually want to keep. changegroup will be used
669 # that we actually want to keep. changegroup will be used
671 # to preserve them and add them back after the truncate
670 # to preserve them and add them back after the truncate
672 saveheads = []
671 saveheads = []
673 savebases = {}
672 savebases = {}
674
673
675 heads = limitheads(chlog, rev)
674 heads = limitheads(chlog, rev)
676 seen = {}
675 seen = {}
677
676
678 # search through all the heads, finding those where the revision
677 # search through all the heads, finding those where the revision
679 # we want to strip away is an ancestor. Also look for merges
678 # we want to strip away is an ancestor. Also look for merges
680 # that might be turned into new heads by the strip.
679 # that might be turned into new heads by the strip.
681 while heads:
680 while heads:
682 h = heads.pop()
681 h = heads.pop()
683 n = h
682 n = h
684 while True:
683 while True:
685 seen[n] = 1
684 seen[n] = 1
686 pp = chlog.parents(n)
685 pp = chlog.parents(n)
687 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
686 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
688 if pp[1] not in seen:
687 if pp[1] not in seen:
689 heads.append(pp[1])
688 heads.append(pp[1])
690 if pp[0] == revlog.nullid:
689 if pp[0] == revlog.nullid:
691 break
690 break
692 if chlog.rev(pp[0]) < revnum:
691 if chlog.rev(pp[0]) < revnum:
693 break
692 break
694 n = pp[0]
693 n = pp[0]
695 if n == rev:
694 if n == rev:
696 break
695 break
697 r = chlog.reachable(h, rev)
696 r = chlog.reachable(h, rev)
698 if rev not in r:
697 if rev not in r:
699 saveheads.append(h)
698 saveheads.append(h)
700 for x in r:
699 for x in r:
701 if chlog.rev(x) > revnum:
700 if chlog.rev(x) > revnum:
702 savebases[x] = 1
701 savebases[x] = 1
703
702
704 # create a changegroup for all the branches we need to keep
703 # create a changegroup for all the branches we need to keep
705 if backup == "all":
704 if backup == "all":
706 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
705 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
707 bundle(backupch)
706 bundle(backupch)
708 if saveheads:
707 if saveheads:
709 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
708 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
710 chgrpfile = bundle(backupch)
709 chgrpfile = bundle(backupch)
711
710
712 stripall(rev, revnum)
711 stripall(rev, revnum)
713
712
714 change = chlog.read(rev)
713 change = chlog.read(rev)
715 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
714 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
716 chlog.strip(revnum, revnum)
715 chlog.strip(revnum, revnum)
717 if saveheads:
716 if saveheads:
718 self.ui.status("adding branch\n")
717 self.ui.status("adding branch\n")
719 commands.unbundle(self.ui, repo, chgrpfile, update=False)
718 commands.unbundle(self.ui, repo, chgrpfile, update=False)
720 if backup != "strip":
719 if backup != "strip":
721 os.unlink(chgrpfile)
720 os.unlink(chgrpfile)
722
721
723 def isapplied(self, patch):
722 def isapplied(self, patch):
724 """returns (index, rev, patch)"""
723 """returns (index, rev, patch)"""
725 for i in xrange(len(self.applied)):
724 for i in xrange(len(self.applied)):
726 a = self.applied[i]
725 a = self.applied[i]
727 if a.name == patch:
726 if a.name == patch:
728 return (i, a.rev, a.name)
727 return (i, a.rev, a.name)
729 return None
728 return None
730
729
731 # if the exact patch name does not exist, we try a few
730 # if the exact patch name does not exist, we try a few
732 # variations. If strict is passed, we try only #1
731 # variations. If strict is passed, we try only #1
733 #
732 #
734 # 1) a number to indicate an offset in the series file
733 # 1) a number to indicate an offset in the series file
735 # 2) a unique substring of the patch name was given
734 # 2) a unique substring of the patch name was given
736 # 3) patchname[-+]num to indicate an offset in the series file
735 # 3) patchname[-+]num to indicate an offset in the series file
737 def lookup(self, patch, strict=False):
736 def lookup(self, patch, strict=False):
738 patch = patch and str(patch)
737 patch = patch and str(patch)
739
738
740 def partial_name(s):
739 def partial_name(s):
741 if s in self.series:
740 if s in self.series:
742 return s
741 return s
743 matches = [x for x in self.series if s in x]
742 matches = [x for x in self.series if s in x]
744 if len(matches) > 1:
743 if len(matches) > 1:
745 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
744 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
746 for m in matches:
745 for m in matches:
747 self.ui.warn(' %s\n' % m)
746 self.ui.warn(' %s\n' % m)
748 return None
747 return None
749 if matches:
748 if matches:
750 return matches[0]
749 return matches[0]
751 if len(self.series) > 0 and len(self.applied) > 0:
750 if len(self.series) > 0 and len(self.applied) > 0:
752 if s == 'qtip':
751 if s == 'qtip':
753 return self.series[self.series_end()-1]
752 return self.series[self.series_end()-1]
754 if s == 'qbase':
753 if s == 'qbase':
755 return self.series[0]
754 return self.series[0]
756 return None
755 return None
757 if patch == None:
756 if patch == None:
758 return None
757 return None
759
758
760 # we don't want to return a partial match until we make
759 # we don't want to return a partial match until we make
761 # sure the file name passed in does not exist (checked below)
760 # sure the file name passed in does not exist (checked below)
762 res = partial_name(patch)
761 res = partial_name(patch)
763 if res and res == patch:
762 if res and res == patch:
764 return res
763 return res
765
764
766 if not os.path.isfile(self.join(patch)):
765 if not os.path.isfile(self.join(patch)):
767 try:
766 try:
768 sno = int(patch)
767 sno = int(patch)
769 except(ValueError, OverflowError):
768 except(ValueError, OverflowError):
770 pass
769 pass
771 else:
770 else:
772 if sno < len(self.series):
771 if sno < len(self.series):
773 return self.series[sno]
772 return self.series[sno]
774 if not strict:
773 if not strict:
775 # return any partial match made above
774 # return any partial match made above
776 if res:
775 if res:
777 return res
776 return res
778 minus = patch.rsplit('-', 1)
777 minus = patch.rsplit('-', 1)
779 if len(minus) > 1:
778 if len(minus) > 1:
780 res = partial_name(minus[0])
779 res = partial_name(minus[0])
781 if res:
780 if res:
782 i = self.series.index(res)
781 i = self.series.index(res)
783 try:
782 try:
784 off = int(minus[1] or 1)
783 off = int(minus[1] or 1)
785 except(ValueError, OverflowError):
784 except(ValueError, OverflowError):
786 pass
785 pass
787 else:
786 else:
788 if i - off >= 0:
787 if i - off >= 0:
789 return self.series[i - off]
788 return self.series[i - off]
790 plus = patch.rsplit('+', 1)
789 plus = patch.rsplit('+', 1)
791 if len(plus) > 1:
790 if len(plus) > 1:
792 res = partial_name(plus[0])
791 res = partial_name(plus[0])
793 if res:
792 if res:
794 i = self.series.index(res)
793 i = self.series.index(res)
795 try:
794 try:
796 off = int(plus[1] or 1)
795 off = int(plus[1] or 1)
797 except(ValueError, OverflowError):
796 except(ValueError, OverflowError):
798 pass
797 pass
799 else:
798 else:
800 if i + off < len(self.series):
799 if i + off < len(self.series):
801 return self.series[i + off]
800 return self.series[i + off]
802 raise util.Abort(_("patch %s not in series") % patch)
801 raise util.Abort(_("patch %s not in series") % patch)
803
802
804 def push(self, repo, patch=None, force=False, list=False,
803 def push(self, repo, patch=None, force=False, list=False,
805 mergeq=None, wlock=None):
804 mergeq=None, wlock=None):
806 if not wlock:
805 if not wlock:
807 wlock = repo.wlock()
806 wlock = repo.wlock()
808 patch = self.lookup(patch)
807 patch = self.lookup(patch)
809 if patch and self.isapplied(patch):
808 if patch and self.isapplied(patch):
810 self.ui.warn(_("patch %s is already applied\n") % patch)
809 self.ui.warn(_("patch %s is already applied\n") % patch)
811 sys.exit(1)
810 sys.exit(1)
812 if self.series_end() == len(self.series):
811 if self.series_end() == len(self.series):
813 self.ui.warn(_("patch series fully applied\n"))
812 self.ui.warn(_("patch series fully applied\n"))
814 sys.exit(1)
813 sys.exit(1)
815 if not force:
814 if not force:
816 self.check_localchanges(repo)
815 self.check_localchanges(repo)
817
816
818 self.applied_dirty = 1;
817 self.applied_dirty = 1;
819 start = self.series_end()
818 start = self.series_end()
820 if start > 0:
819 if start > 0:
821 self.check_toppatch(repo)
820 self.check_toppatch(repo)
822 if not patch:
821 if not patch:
823 patch = self.series[start]
822 patch = self.series[start]
824 end = start + 1
823 end = start + 1
825 else:
824 else:
826 end = self.series.index(patch, start) + 1
825 end = self.series.index(patch, start) + 1
827 s = self.series[start:end]
826 s = self.series[start:end]
828 if mergeq:
827 if mergeq:
829 ret = self.mergepatch(repo, mergeq, s, wlock)
828 ret = self.mergepatch(repo, mergeq, s, wlock)
830 else:
829 else:
831 ret = self.apply(repo, s, list, wlock=wlock)
830 ret = self.apply(repo, s, list, wlock=wlock)
832 top = self.applied[-1].name
831 top = self.applied[-1].name
833 if ret[0]:
832 if ret[0]:
834 self.ui.write("Errors during apply, please fix and refresh %s\n" %
833 self.ui.write("Errors during apply, please fix and refresh %s\n" %
835 top)
834 top)
836 else:
835 else:
837 self.ui.write("Now at: %s\n" % top)
836 self.ui.write("Now at: %s\n" % top)
838 return ret[0]
837 return ret[0]
839
838
840 def pop(self, repo, patch=None, force=False, update=True, all=False,
839 def pop(self, repo, patch=None, force=False, update=True, all=False,
841 wlock=None):
840 wlock=None):
842 def getfile(f, rev):
841 def getfile(f, rev):
843 t = repo.file(f).read(rev)
842 t = repo.file(f).read(rev)
844 try:
843 try:
845 repo.wfile(f, "w").write(t)
844 repo.wfile(f, "w").write(t)
846 except IOError:
845 except IOError:
847 try:
846 try:
848 os.makedirs(os.path.dirname(repo.wjoin(f)))
847 os.makedirs(os.path.dirname(repo.wjoin(f)))
849 except OSError, err:
848 except OSError, err:
850 if err.errno != errno.EEXIST: raise
849 if err.errno != errno.EEXIST: raise
851 repo.wfile(f, "w").write(t)
850 repo.wfile(f, "w").write(t)
852
851
853 if not wlock:
852 if not wlock:
854 wlock = repo.wlock()
853 wlock = repo.wlock()
855 if patch:
854 if patch:
856 # index, rev, patch
855 # index, rev, patch
857 info = self.isapplied(patch)
856 info = self.isapplied(patch)
858 if not info:
857 if not info:
859 patch = self.lookup(patch)
858 patch = self.lookup(patch)
860 info = self.isapplied(patch)
859 info = self.isapplied(patch)
861 if not info:
860 if not info:
862 raise util.Abort(_("patch %s is not applied") % patch)
861 raise util.Abort(_("patch %s is not applied") % patch)
863 if len(self.applied) == 0:
862 if len(self.applied) == 0:
864 self.ui.warn(_("no patches applied\n"))
863 self.ui.warn(_("no patches applied\n"))
865 sys.exit(1)
864 sys.exit(1)
866
865
867 if not update:
866 if not update:
868 parents = repo.dirstate.parents()
867 parents = repo.dirstate.parents()
869 rr = [ revlog.bin(x.rev) for x in self.applied ]
868 rr = [ revlog.bin(x.rev) for x in self.applied ]
870 for p in parents:
869 for p in parents:
871 if p in rr:
870 if p in rr:
872 self.ui.warn("qpop: forcing dirstate update\n")
871 self.ui.warn("qpop: forcing dirstate update\n")
873 update = True
872 update = True
874
873
875 if not force and update:
874 if not force and update:
876 self.check_localchanges(repo)
875 self.check_localchanges(repo)
877
876
878 self.applied_dirty = 1;
877 self.applied_dirty = 1;
879 end = len(self.applied)
878 end = len(self.applied)
880 if not patch:
879 if not patch:
881 if all:
880 if all:
882 popi = 0
881 popi = 0
883 else:
882 else:
884 popi = len(self.applied) - 1
883 popi = len(self.applied) - 1
885 else:
884 else:
886 popi = info[0] + 1
885 popi = info[0] + 1
887 if popi >= end:
886 if popi >= end:
888 self.ui.warn("qpop: %s is already at the top\n" % patch)
887 self.ui.warn("qpop: %s is already at the top\n" % patch)
889 return
888 return
890 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
889 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
891
890
892 start = info[0]
891 start = info[0]
893 rev = revlog.bin(info[1])
892 rev = revlog.bin(info[1])
894
893
895 # we know there are no local changes, so we can make a simplified
894 # we know there are no local changes, so we can make a simplified
896 # form of hg.update.
895 # form of hg.update.
897 if update:
896 if update:
898 top = self.check_toppatch(repo)
897 top = self.check_toppatch(repo)
899 qp = self.qparents(repo, rev)
898 qp = self.qparents(repo, rev)
900 changes = repo.changelog.read(qp)
899 changes = repo.changelog.read(qp)
901 mmap = repo.manifest.read(changes[0])
900 mmap = repo.manifest.read(changes[0])
902 (c, a, r, d, u) = repo.changes(qp, top)
901 m, a, r, d, u = repo.status(qp, top)[:5]
903 if d:
902 if d:
904 raise util.Abort("deletions found between repo revs")
903 raise util.Abort("deletions found between repo revs")
905 for f in c:
904 for f in m:
906 getfile(f, mmap[f])
905 getfile(f, mmap[f])
907 for f in r:
906 for f in r:
908 getfile(f, mmap[f])
907 getfile(f, mmap[f])
909 util.set_exec(repo.wjoin(f), mmap.execf(f))
908 util.set_exec(repo.wjoin(f), mmap.execf(f))
910 repo.dirstate.update(c + r, 'n')
909 repo.dirstate.update(m + r, 'n')
911 for f in a:
910 for f in a:
912 try: os.unlink(repo.wjoin(f))
911 try: os.unlink(repo.wjoin(f))
913 except: raise
912 except: raise
914 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
913 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
915 except: pass
914 except: pass
916 if a:
915 if a:
917 repo.dirstate.forget(a)
916 repo.dirstate.forget(a)
918 repo.dirstate.setparents(qp, revlog.nullid)
917 repo.dirstate.setparents(qp, revlog.nullid)
919 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
918 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
920 del self.applied[start:end]
919 del self.applied[start:end]
921 if len(self.applied):
920 if len(self.applied):
922 self.ui.write("Now at: %s\n" % self.applied[-1].name)
921 self.ui.write("Now at: %s\n" % self.applied[-1].name)
923 else:
922 else:
924 self.ui.write("Patch queue now empty\n")
923 self.ui.write("Patch queue now empty\n")
925
924
926 def diff(self, repo, files):
925 def diff(self, repo, files):
927 top = self.check_toppatch(repo)
926 top = self.check_toppatch(repo)
928 if not top:
927 if not top:
929 self.ui.write("No patches applied\n")
928 self.ui.write("No patches applied\n")
930 return
929 return
931 qp = self.qparents(repo, top)
930 qp = self.qparents(repo, top)
932 self.printdiff(repo, qp, files=files)
931 self.printdiff(repo, qp, files=files)
933
932
934 def refresh(self, repo, msg='', short=False):
933 def refresh(self, repo, msg='', short=False):
935 if len(self.applied) == 0:
934 if len(self.applied) == 0:
936 self.ui.write("No patches applied\n")
935 self.ui.write("No patches applied\n")
937 return
936 return
938 wlock = repo.wlock()
937 wlock = repo.wlock()
939 self.check_toppatch(repo)
938 self.check_toppatch(repo)
940 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
939 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
941 top = revlog.bin(top)
940 top = revlog.bin(top)
942 cparents = repo.changelog.parents(top)
941 cparents = repo.changelog.parents(top)
943 patchparent = self.qparents(repo, top)
942 patchparent = self.qparents(repo, top)
944 message, comments, user, date, patchfound = self.readheaders(patch)
943 message, comments, user, date, patchfound = self.readheaders(patch)
945
944
946 patchf = self.opener(patch, "w")
945 patchf = self.opener(patch, "w")
947 msg = msg.rstrip()
946 msg = msg.rstrip()
948 if msg:
947 if msg:
949 if comments:
948 if comments:
950 # Remove existing message.
949 # Remove existing message.
951 ci = 0
950 ci = 0
952 for mi in range(len(message)):
951 for mi in range(len(message)):
953 while message[mi] != comments[ci]:
952 while message[mi] != comments[ci]:
954 ci += 1
953 ci += 1
955 del comments[ci]
954 del comments[ci]
956 comments.append(msg)
955 comments.append(msg)
957 if comments:
956 if comments:
958 comments = "\n".join(comments) + '\n\n'
957 comments = "\n".join(comments) + '\n\n'
959 patchf.write(comments)
958 patchf.write(comments)
960
959
961 tip = repo.changelog.tip()
960 tip = repo.changelog.tip()
962 if top == tip:
961 if top == tip:
963 # if the top of our patch queue is also the tip, there is an
962 # if the top of our patch queue is also the tip, there is an
964 # optimization here. We update the dirstate in place and strip
963 # optimization here. We update the dirstate in place and strip
965 # off the tip commit. Then just commit the current directory
964 # off the tip commit. Then just commit the current directory
966 # tree. We can also send repo.commit the list of files
965 # tree. We can also send repo.commit the list of files
967 # changed to speed up the diff
966 # changed to speed up the diff
968 #
967 #
969 # in short mode, we only diff the files included in the
968 # in short mode, we only diff the files included in the
970 # patch already
969 # patch already
971 #
970 #
972 # this should really read:
971 # this should really read:
973 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
972 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
974 # but we do it backwards to take advantage of manifest/chlog
973 # but we do it backwards to take advantage of manifest/chlog
975 # caching against the next repo.changes call
974 # caching against the next repo.status call
976 #
975 #
977 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
976 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
978 if short:
977 if short:
979 filelist = cc + aa + dd
978 filelist = mm + aa + dd
980 else:
979 else:
981 filelist = None
980 filelist = None
982 (c, a, r, d, u) = repo.changes(None, None, filelist)
981 m, a, r, d, u = repo.status(files=filelist)[:5]
983
982
984 # we might end up with files that were added between tip and
983 # we might end up with files that were added between tip and
985 # the dirstate parent, but then changed in the local dirstate.
984 # the dirstate parent, but then changed in the local dirstate.
986 # in this case, we want them to only show up in the added section
985 # in this case, we want them to only show up in the added section
987 for x in c:
986 for x in m:
988 if x not in aa:
987 if x not in aa:
989 cc.append(x)
988 mm.append(x)
990 # we might end up with files added by the local dirstate that
989 # we might end up with files added by the local dirstate that
991 # were deleted by the patch. In this case, they should only
990 # were deleted by the patch. In this case, they should only
992 # show up in the changed section.
991 # show up in the changed section.
993 for x in a:
992 for x in a:
994 if x in dd:
993 if x in dd:
995 del dd[dd.index(x)]
994 del dd[dd.index(x)]
996 cc.append(x)
995 mm.append(x)
997 else:
996 else:
998 aa.append(x)
997 aa.append(x)
999 # make sure any files deleted in the local dirstate
998 # make sure any files deleted in the local dirstate
1000 # are not in the add or change column of the patch
999 # are not in the add or change column of the patch
1001 forget = []
1000 forget = []
1002 for x in d + r:
1001 for x in d + r:
1003 if x in aa:
1002 if x in aa:
1004 del aa[aa.index(x)]
1003 del aa[aa.index(x)]
1005 forget.append(x)
1004 forget.append(x)
1006 continue
1005 continue
1007 elif x in cc:
1006 elif x in mm:
1008 del cc[cc.index(x)]
1007 del mm[mm.index(x)]
1009 dd.append(x)
1008 dd.append(x)
1010
1009
1011 c = list(util.unique(cc))
1010 m = list(util.unique(mm))
1012 r = list(util.unique(dd))
1011 r = list(util.unique(dd))
1013 a = list(util.unique(aa))
1012 a = list(util.unique(aa))
1014 filelist = list(util.unique(c + r + a ))
1013 filelist = list(util.unique(m + r + a))
1015 self.printdiff(repo, patchparent, files=filelist,
1014 self.printdiff(repo, patchparent, files=filelist,
1016 changes=(c, a, r, [], u), fp=patchf)
1015 changes=(m, a, r, [], u), fp=patchf)
1017 patchf.close()
1016 patchf.close()
1018
1017
1019 changes = repo.changelog.read(tip)
1018 changes = repo.changelog.read(tip)
1020 repo.dirstate.setparents(*cparents)
1019 repo.dirstate.setparents(*cparents)
1021 repo.dirstate.update(a, 'a')
1020 repo.dirstate.update(a, 'a')
1022 repo.dirstate.update(r, 'r')
1021 repo.dirstate.update(r, 'r')
1023 repo.dirstate.update(c, 'n')
1022 repo.dirstate.update(m, 'n')
1024 repo.dirstate.forget(forget)
1023 repo.dirstate.forget(forget)
1025
1024
1026 if not msg:
1025 if not msg:
1027 if not message:
1026 if not message:
1028 message = "patch queue: %s\n" % patch
1027 message = "patch queue: %s\n" % patch
1029 else:
1028 else:
1030 message = "\n".join(message)
1029 message = "\n".join(message)
1031 else:
1030 else:
1032 message = msg
1031 message = msg
1033
1032
1034 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1033 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1035 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1034 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1036 self.applied[-1] = statusentry(revlog.hex(n), patch)
1035 self.applied[-1] = statusentry(revlog.hex(n), patch)
1037 self.applied_dirty = 1
1036 self.applied_dirty = 1
1038 else:
1037 else:
1039 self.printdiff(repo, patchparent, fp=patchf)
1038 self.printdiff(repo, patchparent, fp=patchf)
1040 patchf.close()
1039 patchf.close()
1041 self.pop(repo, force=True, wlock=wlock)
1040 self.pop(repo, force=True, wlock=wlock)
1042 self.push(repo, force=True, wlock=wlock)
1041 self.push(repo, force=True, wlock=wlock)
1043
1042
1044 def init(self, repo, create=False):
1043 def init(self, repo, create=False):
1045 if os.path.isdir(self.path):
1044 if os.path.isdir(self.path):
1046 raise util.Abort(_("patch queue directory already exists"))
1045 raise util.Abort(_("patch queue directory already exists"))
1047 os.mkdir(self.path)
1046 os.mkdir(self.path)
1048 if create:
1047 if create:
1049 return self.qrepo(create=True)
1048 return self.qrepo(create=True)
1050
1049
1051 def unapplied(self, repo, patch=None):
1050 def unapplied(self, repo, patch=None):
1052 if patch and patch not in self.series:
1051 if patch and patch not in self.series:
1053 raise util.Abort(_("patch %s is not in series file") % patch)
1052 raise util.Abort(_("patch %s is not in series file") % patch)
1054 if not patch:
1053 if not patch:
1055 start = self.series_end()
1054 start = self.series_end()
1056 else:
1055 else:
1057 start = self.series.index(patch) + 1
1056 start = self.series.index(patch) + 1
1058 unapplied = []
1057 unapplied = []
1059 for i in xrange(start, len(self.series)):
1058 for i in xrange(start, len(self.series)):
1060 pushable, reason = self.pushable(i)
1059 pushable, reason = self.pushable(i)
1061 if pushable:
1060 if pushable:
1062 unapplied.append((i, self.series[i]))
1061 unapplied.append((i, self.series[i]))
1063 self.explain_pushable(i)
1062 self.explain_pushable(i)
1064 return unapplied
1063 return unapplied
1065
1064
1066 def qseries(self, repo, missing=None, summary=False):
1065 def qseries(self, repo, missing=None, summary=False):
1067 start = self.series_end(all_patches=True)
1066 start = self.series_end(all_patches=True)
1068 if not missing:
1067 if not missing:
1069 for i in range(len(self.series)):
1068 for i in range(len(self.series)):
1070 patch = self.series[i]
1069 patch = self.series[i]
1071 if self.ui.verbose:
1070 if self.ui.verbose:
1072 if i < start:
1071 if i < start:
1073 status = 'A'
1072 status = 'A'
1074 elif self.pushable(i)[0]:
1073 elif self.pushable(i)[0]:
1075 status = 'U'
1074 status = 'U'
1076 else:
1075 else:
1077 status = 'G'
1076 status = 'G'
1078 self.ui.write('%d %s ' % (i, status))
1077 self.ui.write('%d %s ' % (i, status))
1079 if summary:
1078 if summary:
1080 msg = self.readheaders(patch)[0]
1079 msg = self.readheaders(patch)[0]
1081 msg = msg and ': ' + msg[0] or ': '
1080 msg = msg and ': ' + msg[0] or ': '
1082 else:
1081 else:
1083 msg = ''
1082 msg = ''
1084 self.ui.write('%s%s\n' % (patch, msg))
1083 self.ui.write('%s%s\n' % (patch, msg))
1085 else:
1084 else:
1086 msng_list = []
1085 msng_list = []
1087 for root, dirs, files in os.walk(self.path):
1086 for root, dirs, files in os.walk(self.path):
1088 d = root[len(self.path) + 1:]
1087 d = root[len(self.path) + 1:]
1089 for f in files:
1088 for f in files:
1090 fl = os.path.join(d, f)
1089 fl = os.path.join(d, f)
1091 if (fl not in self.series and
1090 if (fl not in self.series and
1092 fl not in (self.status_path, self.series_path)
1091 fl not in (self.status_path, self.series_path)
1093 and not fl.startswith('.')):
1092 and not fl.startswith('.')):
1094 msng_list.append(fl)
1093 msng_list.append(fl)
1095 msng_list.sort()
1094 msng_list.sort()
1096 for x in msng_list:
1095 for x in msng_list:
1097 if self.ui.verbose:
1096 if self.ui.verbose:
1098 self.ui.write("D ")
1097 self.ui.write("D ")
1099 self.ui.write("%s\n" % x)
1098 self.ui.write("%s\n" % x)
1100
1099
1101 def issaveline(self, l):
1100 def issaveline(self, l):
1102 if l.name == '.hg.patches.save.line':
1101 if l.name == '.hg.patches.save.line':
1103 return True
1102 return True
1104
1103
1105 def qrepo(self, create=False):
1104 def qrepo(self, create=False):
1106 if create or os.path.isdir(self.join(".hg")):
1105 if create or os.path.isdir(self.join(".hg")):
1107 return hg.repository(self.ui, path=self.path, create=create)
1106 return hg.repository(self.ui, path=self.path, create=create)
1108
1107
1109 def restore(self, repo, rev, delete=None, qupdate=None):
1108 def restore(self, repo, rev, delete=None, qupdate=None):
1110 c = repo.changelog.read(rev)
1109 c = repo.changelog.read(rev)
1111 desc = c[4].strip()
1110 desc = c[4].strip()
1112 lines = desc.splitlines()
1111 lines = desc.splitlines()
1113 i = 0
1112 i = 0
1114 datastart = None
1113 datastart = None
1115 series = []
1114 series = []
1116 applied = []
1115 applied = []
1117 qpp = None
1116 qpp = None
1118 for i in xrange(0, len(lines)):
1117 for i in xrange(0, len(lines)):
1119 if lines[i] == 'Patch Data:':
1118 if lines[i] == 'Patch Data:':
1120 datastart = i + 1
1119 datastart = i + 1
1121 elif lines[i].startswith('Dirstate:'):
1120 elif lines[i].startswith('Dirstate:'):
1122 l = lines[i].rstrip()
1121 l = lines[i].rstrip()
1123 l = l[10:].split(' ')
1122 l = l[10:].split(' ')
1124 qpp = [ hg.bin(x) for x in l ]
1123 qpp = [ hg.bin(x) for x in l ]
1125 elif datastart != None:
1124 elif datastart != None:
1126 l = lines[i].rstrip()
1125 l = lines[i].rstrip()
1127 se = statusentry(l)
1126 se = statusentry(l)
1128 file_ = se.name
1127 file_ = se.name
1129 if se.rev:
1128 if se.rev:
1130 applied.append(se)
1129 applied.append(se)
1131 series.append(file_)
1130 series.append(file_)
1132 if datastart == None:
1131 if datastart == None:
1133 self.ui.warn("No saved patch data found\n")
1132 self.ui.warn("No saved patch data found\n")
1134 return 1
1133 return 1
1135 self.ui.warn("restoring status: %s\n" % lines[0])
1134 self.ui.warn("restoring status: %s\n" % lines[0])
1136 self.full_series = series
1135 self.full_series = series
1137 self.applied = applied
1136 self.applied = applied
1138 self.parse_series()
1137 self.parse_series()
1139 self.series_dirty = 1
1138 self.series_dirty = 1
1140 self.applied_dirty = 1
1139 self.applied_dirty = 1
1141 heads = repo.changelog.heads()
1140 heads = repo.changelog.heads()
1142 if delete:
1141 if delete:
1143 if rev not in heads:
1142 if rev not in heads:
1144 self.ui.warn("save entry has children, leaving it alone\n")
1143 self.ui.warn("save entry has children, leaving it alone\n")
1145 else:
1144 else:
1146 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1145 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1147 pp = repo.dirstate.parents()
1146 pp = repo.dirstate.parents()
1148 if rev in pp:
1147 if rev in pp:
1149 update = True
1148 update = True
1150 else:
1149 else:
1151 update = False
1150 update = False
1152 self.strip(repo, rev, update=update, backup='strip')
1151 self.strip(repo, rev, update=update, backup='strip')
1153 if qpp:
1152 if qpp:
1154 self.ui.warn("saved queue repository parents: %s %s\n" %
1153 self.ui.warn("saved queue repository parents: %s %s\n" %
1155 (hg.short(qpp[0]), hg.short(qpp[1])))
1154 (hg.short(qpp[0]), hg.short(qpp[1])))
1156 if qupdate:
1155 if qupdate:
1157 print "queue directory updating"
1156 print "queue directory updating"
1158 r = self.qrepo()
1157 r = self.qrepo()
1159 if not r:
1158 if not r:
1160 self.ui.warn("Unable to load queue repository\n")
1159 self.ui.warn("Unable to load queue repository\n")
1161 return 1
1160 return 1
1162 hg.clean(r, qpp[0])
1161 hg.clean(r, qpp[0])
1163
1162
1164 def save(self, repo, msg=None):
1163 def save(self, repo, msg=None):
1165 if len(self.applied) == 0:
1164 if len(self.applied) == 0:
1166 self.ui.warn("save: no patches applied, exiting\n")
1165 self.ui.warn("save: no patches applied, exiting\n")
1167 return 1
1166 return 1
1168 if self.issaveline(self.applied[-1]):
1167 if self.issaveline(self.applied[-1]):
1169 self.ui.warn("status is already saved\n")
1168 self.ui.warn("status is already saved\n")
1170 return 1
1169 return 1
1171
1170
1172 ar = [ ':' + x for x in self.full_series ]
1171 ar = [ ':' + x for x in self.full_series ]
1173 if not msg:
1172 if not msg:
1174 msg = "hg patches saved state"
1173 msg = "hg patches saved state"
1175 else:
1174 else:
1176 msg = "hg patches: " + msg.rstrip('\r\n')
1175 msg = "hg patches: " + msg.rstrip('\r\n')
1177 r = self.qrepo()
1176 r = self.qrepo()
1178 if r:
1177 if r:
1179 pp = r.dirstate.parents()
1178 pp = r.dirstate.parents()
1180 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1179 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1181 msg += "\n\nPatch Data:\n"
1180 msg += "\n\nPatch Data:\n"
1182 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1181 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1183 "\n".join(ar) + '\n' or "")
1182 "\n".join(ar) + '\n' or "")
1184 n = repo.commit(None, text, user=None, force=1)
1183 n = repo.commit(None, text, user=None, force=1)
1185 if not n:
1184 if not n:
1186 self.ui.warn("repo commit failed\n")
1185 self.ui.warn("repo commit failed\n")
1187 return 1
1186 return 1
1188 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1187 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1189 self.applied_dirty = 1
1188 self.applied_dirty = 1
1190
1189
1191 def full_series_end(self):
1190 def full_series_end(self):
1192 if len(self.applied) > 0:
1191 if len(self.applied) > 0:
1193 p = self.applied[-1].name
1192 p = self.applied[-1].name
1194 end = self.find_series(p)
1193 end = self.find_series(p)
1195 if end == None:
1194 if end == None:
1196 return len(self.full_series)
1195 return len(self.full_series)
1197 return end + 1
1196 return end + 1
1198 return 0
1197 return 0
1199
1198
1200 def series_end(self, all_patches=False):
1199 def series_end(self, all_patches=False):
1201 end = 0
1200 end = 0
1202 def next(start):
1201 def next(start):
1203 if all_patches:
1202 if all_patches:
1204 return start
1203 return start
1205 i = start
1204 i = start
1206 while i < len(self.series):
1205 while i < len(self.series):
1207 p, reason = self.pushable(i)
1206 p, reason = self.pushable(i)
1208 if p:
1207 if p:
1209 break
1208 break
1210 self.explain_pushable(i)
1209 self.explain_pushable(i)
1211 i += 1
1210 i += 1
1212 return i
1211 return i
1213 if len(self.applied) > 0:
1212 if len(self.applied) > 0:
1214 p = self.applied[-1].name
1213 p = self.applied[-1].name
1215 try:
1214 try:
1216 end = self.series.index(p)
1215 end = self.series.index(p)
1217 except ValueError:
1216 except ValueError:
1218 return 0
1217 return 0
1219 return next(end + 1)
1218 return next(end + 1)
1220 return next(end)
1219 return next(end)
1221
1220
1222 def qapplied(self, repo, patch=None):
1221 def qapplied(self, repo, patch=None):
1223 if patch and patch not in self.series:
1222 if patch and patch not in self.series:
1224 raise util.Abort(_("patch %s is not in series file") % patch)
1223 raise util.Abort(_("patch %s is not in series file") % patch)
1225 if not patch:
1224 if not patch:
1226 end = len(self.applied)
1225 end = len(self.applied)
1227 else:
1226 else:
1228 end = self.series.index(patch) + 1
1227 end = self.series.index(patch) + 1
1229 for x in xrange(end):
1228 for x in xrange(end):
1230 p = self.appliedname(x)
1229 p = self.appliedname(x)
1231 self.ui.write("%s\n" % p)
1230 self.ui.write("%s\n" % p)
1232
1231
1233 def appliedname(self, index):
1232 def appliedname(self, index):
1234 pname = self.applied[index].name
1233 pname = self.applied[index].name
1235 if not self.ui.verbose:
1234 if not self.ui.verbose:
1236 p = pname
1235 p = pname
1237 else:
1236 else:
1238 p = str(self.series.index(pname)) + " " + p
1237 p = str(self.series.index(pname)) + " " + p
1239 return p
1238 return p
1240
1239
1241 def top(self, repo):
1240 def top(self, repo):
1242 if len(self.applied):
1241 if len(self.applied):
1243 p = self.appliedname(-1)
1242 p = self.appliedname(-1)
1244 self.ui.write(p + '\n')
1243 self.ui.write(p + '\n')
1245 else:
1244 else:
1246 self.ui.write("No patches applied\n")
1245 self.ui.write("No patches applied\n")
1247
1246
1248 def next(self, repo):
1247 def next(self, repo):
1249 end = self.series_end()
1248 end = self.series_end()
1250 if end == len(self.series):
1249 if end == len(self.series):
1251 self.ui.write("All patches applied\n")
1250 self.ui.write("All patches applied\n")
1252 else:
1251 else:
1253 p = self.series[end]
1252 p = self.series[end]
1254 if self.ui.verbose:
1253 if self.ui.verbose:
1255 self.ui.write("%d " % self.series.index(p))
1254 self.ui.write("%d " % self.series.index(p))
1256 self.ui.write(p + '\n')
1255 self.ui.write(p + '\n')
1257
1256
1258 def prev(self, repo):
1257 def prev(self, repo):
1259 if len(self.applied) > 1:
1258 if len(self.applied) > 1:
1260 p = self.appliedname(-2)
1259 p = self.appliedname(-2)
1261 self.ui.write(p + '\n')
1260 self.ui.write(p + '\n')
1262 elif len(self.applied) == 1:
1261 elif len(self.applied) == 1:
1263 self.ui.write("Only one patch applied\n")
1262 self.ui.write("Only one patch applied\n")
1264 else:
1263 else:
1265 self.ui.write("No patches applied\n")
1264 self.ui.write("No patches applied\n")
1266
1265
1267 def qimport(self, repo, files, patch=None, existing=None, force=None):
1266 def qimport(self, repo, files, patch=None, existing=None, force=None):
1268 if len(files) > 1 and patch:
1267 if len(files) > 1 and patch:
1269 raise util.Abort(_('option "-n" not valid when importing multiple '
1268 raise util.Abort(_('option "-n" not valid when importing multiple '
1270 'files'))
1269 'files'))
1271 i = 0
1270 i = 0
1272 added = []
1271 added = []
1273 for filename in files:
1272 for filename in files:
1274 if existing:
1273 if existing:
1275 if not patch:
1274 if not patch:
1276 patch = filename
1275 patch = filename
1277 if not os.path.isfile(self.join(patch)):
1276 if not os.path.isfile(self.join(patch)):
1278 raise util.Abort(_("patch %s does not exist") % patch)
1277 raise util.Abort(_("patch %s does not exist") % patch)
1279 else:
1278 else:
1280 try:
1279 try:
1281 text = file(filename).read()
1280 text = file(filename).read()
1282 except IOError:
1281 except IOError:
1283 raise util.Abort(_("unable to read %s") % patch)
1282 raise util.Abort(_("unable to read %s") % patch)
1284 if not patch:
1283 if not patch:
1285 patch = os.path.split(filename)[1]
1284 patch = os.path.split(filename)[1]
1286 if not force and os.path.exists(self.join(patch)):
1285 if not force and os.path.exists(self.join(patch)):
1287 raise util.Abort(_('patch "%s" already exists') % patch)
1286 raise util.Abort(_('patch "%s" already exists') % patch)
1288 patchf = self.opener(patch, "w")
1287 patchf = self.opener(patch, "w")
1289 patchf.write(text)
1288 patchf.write(text)
1290 if patch in self.series:
1289 if patch in self.series:
1291 raise util.Abort(_('patch %s is already in the series file')
1290 raise util.Abort(_('patch %s is already in the series file')
1292 % patch)
1291 % patch)
1293 index = self.full_series_end() + i
1292 index = self.full_series_end() + i
1294 self.full_series[index:index] = [patch]
1293 self.full_series[index:index] = [patch]
1295 self.parse_series()
1294 self.parse_series()
1296 self.ui.warn("adding %s to series file\n" % patch)
1295 self.ui.warn("adding %s to series file\n" % patch)
1297 i += 1
1296 i += 1
1298 added.append(patch)
1297 added.append(patch)
1299 patch = None
1298 patch = None
1300 self.series_dirty = 1
1299 self.series_dirty = 1
1301 qrepo = self.qrepo()
1300 qrepo = self.qrepo()
1302 if qrepo:
1301 if qrepo:
1303 qrepo.add(added)
1302 qrepo.add(added)
1304
1303
1305 def delete(ui, repo, patch, **opts):
1304 def delete(ui, repo, patch, **opts):
1306 """remove a patch from the series file
1305 """remove a patch from the series file
1307
1306
1308 The patch must not be applied.
1307 The patch must not be applied.
1309 With -f, deletes the patch file as well as the series entry."""
1308 With -f, deletes the patch file as well as the series entry."""
1310 q = repo.mq
1309 q = repo.mq
1311 q.delete(repo, patch, force=opts.get('force'))
1310 q.delete(repo, patch, force=opts.get('force'))
1312 q.save_dirty()
1311 q.save_dirty()
1313 return 0
1312 return 0
1314
1313
1315 def applied(ui, repo, patch=None, **opts):
1314 def applied(ui, repo, patch=None, **opts):
1316 """print the patches already applied"""
1315 """print the patches already applied"""
1317 repo.mq.qapplied(repo, patch)
1316 repo.mq.qapplied(repo, patch)
1318 return 0
1317 return 0
1319
1318
1320 def unapplied(ui, repo, patch=None, **opts):
1319 def unapplied(ui, repo, patch=None, **opts):
1321 """print the patches not yet applied"""
1320 """print the patches not yet applied"""
1322 for i, p in repo.mq.unapplied(repo, patch):
1321 for i, p in repo.mq.unapplied(repo, patch):
1323 if ui.verbose:
1322 if ui.verbose:
1324 ui.write("%d " % i)
1323 ui.write("%d " % i)
1325 ui.write("%s\n" % p)
1324 ui.write("%s\n" % p)
1326
1325
1327 def qimport(ui, repo, *filename, **opts):
1326 def qimport(ui, repo, *filename, **opts):
1328 """import a patch"""
1327 """import a patch"""
1329 q = repo.mq
1328 q = repo.mq
1330 q.qimport(repo, filename, patch=opts['name'],
1329 q.qimport(repo, filename, patch=opts['name'],
1331 existing=opts['existing'], force=opts['force'])
1330 existing=opts['existing'], force=opts['force'])
1332 q.save_dirty()
1331 q.save_dirty()
1333 return 0
1332 return 0
1334
1333
1335 def init(ui, repo, **opts):
1334 def init(ui, repo, **opts):
1336 """init a new queue repository
1335 """init a new queue repository
1337
1336
1338 The queue repository is unversioned by default. If -c is
1337 The queue repository is unversioned by default. If -c is
1339 specified, qinit will create a separate nested repository
1338 specified, qinit will create a separate nested repository
1340 for patches. Use qcommit to commit changes to this queue
1339 for patches. Use qcommit to commit changes to this queue
1341 repository."""
1340 repository."""
1342 q = repo.mq
1341 q = repo.mq
1343 r = q.init(repo, create=opts['create_repo'])
1342 r = q.init(repo, create=opts['create_repo'])
1344 q.save_dirty()
1343 q.save_dirty()
1345 if r:
1344 if r:
1346 fp = r.wopener('.hgignore', 'w')
1345 fp = r.wopener('.hgignore', 'w')
1347 print >> fp, 'syntax: glob'
1346 print >> fp, 'syntax: glob'
1348 print >> fp, 'status'
1347 print >> fp, 'status'
1349 fp.close()
1348 fp.close()
1350 r.wopener('series', 'w').close()
1349 r.wopener('series', 'w').close()
1351 r.add(['.hgignore', 'series'])
1350 r.add(['.hgignore', 'series'])
1352 return 0
1351 return 0
1353
1352
1354 def clone(ui, source, dest=None, **opts):
1353 def clone(ui, source, dest=None, **opts):
1355 '''clone main and patch repository at same time
1354 '''clone main and patch repository at same time
1356
1355
1357 If source is local, destination will have no patches applied. If
1356 If source is local, destination will have no patches applied. If
1358 source is remote, this command can not check if patches are
1357 source is remote, this command can not check if patches are
1359 applied in source, so cannot guarantee that patches are not
1358 applied in source, so cannot guarantee that patches are not
1360 applied in destination. If you clone remote repository, be sure
1359 applied in destination. If you clone remote repository, be sure
1361 before that it has no patches applied.
1360 before that it has no patches applied.
1362
1361
1363 Source patch repository is looked for in <src>/.hg/patches by
1362 Source patch repository is looked for in <src>/.hg/patches by
1364 default. Use -p <url> to change.
1363 default. Use -p <url> to change.
1365 '''
1364 '''
1366 commands.setremoteconfig(ui, opts)
1365 commands.setremoteconfig(ui, opts)
1367 if dest is None:
1366 if dest is None:
1368 dest = hg.defaultdest(source)
1367 dest = hg.defaultdest(source)
1369 sr = hg.repository(ui, ui.expandpath(source))
1368 sr = hg.repository(ui, ui.expandpath(source))
1370 qbase, destrev = None, None
1369 qbase, destrev = None, None
1371 if sr.local():
1370 if sr.local():
1372 reposetup(ui, sr)
1371 reposetup(ui, sr)
1373 if sr.mq.applied:
1372 if sr.mq.applied:
1374 qbase = revlog.bin(sr.mq.applied[0].rev)
1373 qbase = revlog.bin(sr.mq.applied[0].rev)
1375 if not hg.islocal(dest):
1374 if not hg.islocal(dest):
1376 destrev = sr.parents(qbase)[0]
1375 destrev = sr.parents(qbase)[0]
1377 ui.note(_('cloning main repo\n'))
1376 ui.note(_('cloning main repo\n'))
1378 sr, dr = hg.clone(ui, sr, dest,
1377 sr, dr = hg.clone(ui, sr, dest,
1379 pull=opts['pull'],
1378 pull=opts['pull'],
1380 rev=destrev,
1379 rev=destrev,
1381 update=False,
1380 update=False,
1382 stream=opts['uncompressed'])
1381 stream=opts['uncompressed'])
1383 ui.note(_('cloning patch repo\n'))
1382 ui.note(_('cloning patch repo\n'))
1384 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1383 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1385 dr.url() + '/.hg/patches',
1384 dr.url() + '/.hg/patches',
1386 pull=opts['pull'],
1385 pull=opts['pull'],
1387 update=not opts['noupdate'],
1386 update=not opts['noupdate'],
1388 stream=opts['uncompressed'])
1387 stream=opts['uncompressed'])
1389 if dr.local():
1388 if dr.local():
1390 if qbase:
1389 if qbase:
1391 ui.note(_('stripping applied patches from destination repo\n'))
1390 ui.note(_('stripping applied patches from destination repo\n'))
1392 reposetup(ui, dr)
1391 reposetup(ui, dr)
1393 dr.mq.strip(dr, qbase, update=False, backup=None)
1392 dr.mq.strip(dr, qbase, update=False, backup=None)
1394 if not opts['noupdate']:
1393 if not opts['noupdate']:
1395 ui.note(_('updating destination repo\n'))
1394 ui.note(_('updating destination repo\n'))
1396 hg.update(dr, dr.changelog.tip())
1395 hg.update(dr, dr.changelog.tip())
1397
1396
1398 def commit(ui, repo, *pats, **opts):
1397 def commit(ui, repo, *pats, **opts):
1399 """commit changes in the queue repository"""
1398 """commit changes in the queue repository"""
1400 q = repo.mq
1399 q = repo.mq
1401 r = q.qrepo()
1400 r = q.qrepo()
1402 if not r: raise util.Abort('no queue repository')
1401 if not r: raise util.Abort('no queue repository')
1403 commands.commit(r.ui, r, *pats, **opts)
1402 commands.commit(r.ui, r, *pats, **opts)
1404
1403
1405 def series(ui, repo, **opts):
1404 def series(ui, repo, **opts):
1406 """print the entire series file"""
1405 """print the entire series file"""
1407 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1406 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1408 return 0
1407 return 0
1409
1408
1410 def top(ui, repo, **opts):
1409 def top(ui, repo, **opts):
1411 """print the name of the current patch"""
1410 """print the name of the current patch"""
1412 repo.mq.top(repo)
1411 repo.mq.top(repo)
1413 return 0
1412 return 0
1414
1413
1415 def next(ui, repo, **opts):
1414 def next(ui, repo, **opts):
1416 """print the name of the next patch"""
1415 """print the name of the next patch"""
1417 repo.mq.next(repo)
1416 repo.mq.next(repo)
1418 return 0
1417 return 0
1419
1418
1420 def prev(ui, repo, **opts):
1419 def prev(ui, repo, **opts):
1421 """print the name of the previous patch"""
1420 """print the name of the previous patch"""
1422 repo.mq.prev(repo)
1421 repo.mq.prev(repo)
1423 return 0
1422 return 0
1424
1423
1425 def new(ui, repo, patch, **opts):
1424 def new(ui, repo, patch, **opts):
1426 """create a new patch
1425 """create a new patch
1427
1426
1428 qnew creates a new patch on top of the currently-applied patch
1427 qnew creates a new patch on top of the currently-applied patch
1429 (if any). It will refuse to run if there are any outstanding
1428 (if any). It will refuse to run if there are any outstanding
1430 changes unless -f is specified, in which case the patch will
1429 changes unless -f is specified, in which case the patch will
1431 be initialised with them.
1430 be initialised with them.
1432
1431
1433 -m or -l set the patch header as well as the commit message.
1432 -m or -l set the patch header as well as the commit message.
1434 If neither is specified, the patch header is empty and the
1433 If neither is specified, the patch header is empty and the
1435 commit message is 'New patch: PATCH'"""
1434 commit message is 'New patch: PATCH'"""
1436 q = repo.mq
1435 q = repo.mq
1437 message = commands.logmessage(opts)
1436 message = commands.logmessage(opts)
1438 q.new(repo, patch, msg=message, force=opts['force'])
1437 q.new(repo, patch, msg=message, force=opts['force'])
1439 q.save_dirty()
1438 q.save_dirty()
1440 return 0
1439 return 0
1441
1440
1442 def refresh(ui, repo, **opts):
1441 def refresh(ui, repo, **opts):
1443 """update the current patch"""
1442 """update the current patch"""
1444 q = repo.mq
1443 q = repo.mq
1445 message = commands.logmessage(opts)
1444 message = commands.logmessage(opts)
1446 if opts['edit']:
1445 if opts['edit']:
1447 if message:
1446 if message:
1448 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1447 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1449 patch = q.applied[-1].name
1448 patch = q.applied[-1].name
1450 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1449 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1451 message = ui.edit('\n'.join(message), user or ui.username())
1450 message = ui.edit('\n'.join(message), user or ui.username())
1452 q.refresh(repo, msg=message, short=opts['short'])
1451 q.refresh(repo, msg=message, short=opts['short'])
1453 q.save_dirty()
1452 q.save_dirty()
1454 return 0
1453 return 0
1455
1454
1456 def diff(ui, repo, *files, **opts):
1455 def diff(ui, repo, *files, **opts):
1457 """diff of the current patch"""
1456 """diff of the current patch"""
1458 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1457 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1459 repo.mq.diff(repo, list(files))
1458 repo.mq.diff(repo, list(files))
1460 return 0
1459 return 0
1461
1460
1462 def fold(ui, repo, *files, **opts):
1461 def fold(ui, repo, *files, **opts):
1463 """fold the named patches into the current patch
1462 """fold the named patches into the current patch
1464
1463
1465 Patches must not yet be applied. Each patch will be successively
1464 Patches must not yet be applied. Each patch will be successively
1466 applied to the current patch in the order given. If all the
1465 applied to the current patch in the order given. If all the
1467 patches apply successfully, the current patch will be refreshed
1466 patches apply successfully, the current patch will be refreshed
1468 with the new cumulative patch, and the folded patches will
1467 with the new cumulative patch, and the folded patches will
1469 be deleted. With -f/--force, the folded patch files will
1468 be deleted. With -f/--force, the folded patch files will
1470 be removed afterwards.
1469 be removed afterwards.
1471
1470
1472 The header for each folded patch will be concatenated with
1471 The header for each folded patch will be concatenated with
1473 the current patch header, separated by a line of '* * *'."""
1472 the current patch header, separated by a line of '* * *'."""
1474
1473
1475 q = repo.mq
1474 q = repo.mq
1476
1475
1477 if not files:
1476 if not files:
1478 raise util.Abort(_('qfold requires at least one patch name'))
1477 raise util.Abort(_('qfold requires at least one patch name'))
1479 if not q.check_toppatch(repo):
1478 if not q.check_toppatch(repo):
1480 raise util.Abort(_('No patches applied\n'))
1479 raise util.Abort(_('No patches applied\n'))
1481
1480
1482 message = commands.logmessage(opts)
1481 message = commands.logmessage(opts)
1483 if opts['edit']:
1482 if opts['edit']:
1484 if message:
1483 if message:
1485 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1484 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1486
1485
1487 parent = q.lookup('qtip')
1486 parent = q.lookup('qtip')
1488 patches = []
1487 patches = []
1489 messages = []
1488 messages = []
1490 for f in files:
1489 for f in files:
1491 patch = q.lookup(f)
1490 patch = q.lookup(f)
1492 if patch in patches or patch == parent:
1491 if patch in patches or patch == parent:
1493 ui.warn(_('Skipping already folded patch %s') % patch)
1492 ui.warn(_('Skipping already folded patch %s') % patch)
1494 if q.isapplied(patch):
1493 if q.isapplied(patch):
1495 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1494 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1496 patches.append(patch)
1495 patches.append(patch)
1497
1496
1498 for patch in patches:
1497 for patch in patches:
1499 if not message:
1498 if not message:
1500 messages.append(q.readheaders(patch)[0])
1499 messages.append(q.readheaders(patch)[0])
1501 pf = q.join(patch)
1500 pf = q.join(patch)
1502 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1501 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1503 if not patchsuccess:
1502 if not patchsuccess:
1504 raise util.Abort(_('Error folding patch %s') % patch)
1503 raise util.Abort(_('Error folding patch %s') % patch)
1505
1504
1506 if not message:
1505 if not message:
1507 message, comments, user = q.readheaders(parent)[0:3]
1506 message, comments, user = q.readheaders(parent)[0:3]
1508 for msg in messages:
1507 for msg in messages:
1509 message.append('* * *')
1508 message.append('* * *')
1510 message.extend(msg)
1509 message.extend(msg)
1511 message = '\n'.join(message)
1510 message = '\n'.join(message)
1512
1511
1513 if opts['edit']:
1512 if opts['edit']:
1514 message = ui.edit(message, user or ui.username())
1513 message = ui.edit(message, user or ui.username())
1515
1514
1516 q.refresh(repo, msg=message)
1515 q.refresh(repo, msg=message)
1517
1516
1518 for patch in patches:
1517 for patch in patches:
1519 q.delete(repo, patch, force=opts['force'])
1518 q.delete(repo, patch, force=opts['force'])
1520
1519
1521 q.save_dirty()
1520 q.save_dirty()
1522
1521
1523 def guard(ui, repo, *args, **opts):
1522 def guard(ui, repo, *args, **opts):
1524 '''set or print guards for a patch
1523 '''set or print guards for a patch
1525
1524
1526 guards control whether a patch can be pushed. a patch with no
1525 guards control whether a patch can be pushed. a patch with no
1527 guards is aways pushed. a patch with posative guard ("+foo") is
1526 guards is aways pushed. a patch with posative guard ("+foo") is
1528 pushed only if qselect command enables guard "foo". a patch with
1527 pushed only if qselect command enables guard "foo". a patch with
1529 nagative guard ("-foo") is never pushed if qselect command enables
1528 nagative guard ("-foo") is never pushed if qselect command enables
1530 guard "foo".
1529 guard "foo".
1531
1530
1532 with no arguments, default is to print current active guards.
1531 with no arguments, default is to print current active guards.
1533 with arguments, set active guards for patch.
1532 with arguments, set active guards for patch.
1534
1533
1535 to set nagative guard "-foo" on topmost patch ("--" is needed so
1534 to set nagative guard "-foo" on topmost patch ("--" is needed so
1536 hg will not interpret "-foo" as argument):
1535 hg will not interpret "-foo" as argument):
1537 hg qguard -- -foo
1536 hg qguard -- -foo
1538
1537
1539 to set guards on other patch:
1538 to set guards on other patch:
1540 hg qguard other.patch +2.6.17 -stable
1539 hg qguard other.patch +2.6.17 -stable
1541 '''
1540 '''
1542 def status(idx):
1541 def status(idx):
1543 guards = q.series_guards[idx] or ['unguarded']
1542 guards = q.series_guards[idx] or ['unguarded']
1544 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1543 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1545 q = repo.mq
1544 q = repo.mq
1546 patch = None
1545 patch = None
1547 args = list(args)
1546 args = list(args)
1548 if opts['list']:
1547 if opts['list']:
1549 if args or opts['none']:
1548 if args or opts['none']:
1550 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1549 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1551 for i in xrange(len(q.series)):
1550 for i in xrange(len(q.series)):
1552 status(i)
1551 status(i)
1553 return
1552 return
1554 if not args or args[0][0:1] in '-+':
1553 if not args or args[0][0:1] in '-+':
1555 if not q.applied:
1554 if not q.applied:
1556 raise util.Abort(_('no patches applied'))
1555 raise util.Abort(_('no patches applied'))
1557 patch = q.applied[-1].name
1556 patch = q.applied[-1].name
1558 if patch is None and args[0][0:1] not in '-+':
1557 if patch is None and args[0][0:1] not in '-+':
1559 patch = args.pop(0)
1558 patch = args.pop(0)
1560 if patch is None:
1559 if patch is None:
1561 raise util.Abort(_('no patch to work with'))
1560 raise util.Abort(_('no patch to work with'))
1562 if args or opts['none']:
1561 if args or opts['none']:
1563 q.set_guards(q.find_series(patch), args)
1562 q.set_guards(q.find_series(patch), args)
1564 q.save_dirty()
1563 q.save_dirty()
1565 else:
1564 else:
1566 status(q.series.index(q.lookup(patch)))
1565 status(q.series.index(q.lookup(patch)))
1567
1566
1568 def header(ui, repo, patch=None):
1567 def header(ui, repo, patch=None):
1569 """Print the header of the topmost or specified patch"""
1568 """Print the header of the topmost or specified patch"""
1570 q = repo.mq
1569 q = repo.mq
1571
1570
1572 if patch:
1571 if patch:
1573 patch = q.lookup(patch)
1572 patch = q.lookup(patch)
1574 else:
1573 else:
1575 if not q.applied:
1574 if not q.applied:
1576 ui.write('No patches applied\n')
1575 ui.write('No patches applied\n')
1577 return
1576 return
1578 patch = q.lookup('qtip')
1577 patch = q.lookup('qtip')
1579 message = repo.mq.readheaders(patch)[0]
1578 message = repo.mq.readheaders(patch)[0]
1580
1579
1581 ui.write('\n'.join(message) + '\n')
1580 ui.write('\n'.join(message) + '\n')
1582
1581
1583 def lastsavename(path):
1582 def lastsavename(path):
1584 (directory, base) = os.path.split(path)
1583 (directory, base) = os.path.split(path)
1585 names = os.listdir(directory)
1584 names = os.listdir(directory)
1586 namere = re.compile("%s.([0-9]+)" % base)
1585 namere = re.compile("%s.([0-9]+)" % base)
1587 maxindex = None
1586 maxindex = None
1588 maxname = None
1587 maxname = None
1589 for f in names:
1588 for f in names:
1590 m = namere.match(f)
1589 m = namere.match(f)
1591 if m:
1590 if m:
1592 index = int(m.group(1))
1591 index = int(m.group(1))
1593 if maxindex == None or index > maxindex:
1592 if maxindex == None or index > maxindex:
1594 maxindex = index
1593 maxindex = index
1595 maxname = f
1594 maxname = f
1596 if maxname:
1595 if maxname:
1597 return (os.path.join(directory, maxname), maxindex)
1596 return (os.path.join(directory, maxname), maxindex)
1598 return (None, None)
1597 return (None, None)
1599
1598
1600 def savename(path):
1599 def savename(path):
1601 (last, index) = lastsavename(path)
1600 (last, index) = lastsavename(path)
1602 if last is None:
1601 if last is None:
1603 index = 0
1602 index = 0
1604 newpath = path + ".%d" % (index + 1)
1603 newpath = path + ".%d" % (index + 1)
1605 return newpath
1604 return newpath
1606
1605
1607 def push(ui, repo, patch=None, **opts):
1606 def push(ui, repo, patch=None, **opts):
1608 """push the next patch onto the stack"""
1607 """push the next patch onto the stack"""
1609 q = repo.mq
1608 q = repo.mq
1610 mergeq = None
1609 mergeq = None
1611
1610
1612 if opts['all']:
1611 if opts['all']:
1613 patch = q.series[-1]
1612 patch = q.series[-1]
1614 if opts['merge']:
1613 if opts['merge']:
1615 if opts['name']:
1614 if opts['name']:
1616 newpath = opts['name']
1615 newpath = opts['name']
1617 else:
1616 else:
1618 newpath, i = lastsavename(q.path)
1617 newpath, i = lastsavename(q.path)
1619 if not newpath:
1618 if not newpath:
1620 ui.warn("no saved queues found, please use -n\n")
1619 ui.warn("no saved queues found, please use -n\n")
1621 return 1
1620 return 1
1622 mergeq = queue(ui, repo.join(""), newpath)
1621 mergeq = queue(ui, repo.join(""), newpath)
1623 ui.warn("merging with queue at: %s\n" % mergeq.path)
1622 ui.warn("merging with queue at: %s\n" % mergeq.path)
1624 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1623 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1625 mergeq=mergeq)
1624 mergeq=mergeq)
1626 q.save_dirty()
1625 q.save_dirty()
1627 return ret
1626 return ret
1628
1627
1629 def pop(ui, repo, patch=None, **opts):
1628 def pop(ui, repo, patch=None, **opts):
1630 """pop the current patch off the stack"""
1629 """pop the current patch off the stack"""
1631 localupdate = True
1630 localupdate = True
1632 if opts['name']:
1631 if opts['name']:
1633 q = queue(ui, repo.join(""), repo.join(opts['name']))
1632 q = queue(ui, repo.join(""), repo.join(opts['name']))
1634 ui.warn('using patch queue: %s\n' % q.path)
1633 ui.warn('using patch queue: %s\n' % q.path)
1635 localupdate = False
1634 localupdate = False
1636 else:
1635 else:
1637 q = repo.mq
1636 q = repo.mq
1638 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1637 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1639 q.save_dirty()
1638 q.save_dirty()
1640 return 0
1639 return 0
1641
1640
1642 def rename(ui, repo, patch, name=None, **opts):
1641 def rename(ui, repo, patch, name=None, **opts):
1643 """rename a patch
1642 """rename a patch
1644
1643
1645 With one argument, renames the current patch to PATCH1.
1644 With one argument, renames the current patch to PATCH1.
1646 With two arguments, renames PATCH1 to PATCH2."""
1645 With two arguments, renames PATCH1 to PATCH2."""
1647
1646
1648 q = repo.mq
1647 q = repo.mq
1649
1648
1650 if not name:
1649 if not name:
1651 name = patch
1650 name = patch
1652 patch = None
1651 patch = None
1653
1652
1654 if name in q.series:
1653 if name in q.series:
1655 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1654 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1656
1655
1657 absdest = q.join(name)
1656 absdest = q.join(name)
1658 if os.path.exists(absdest):
1657 if os.path.exists(absdest):
1659 raise util.Abort(_('%s already exists') % absdest)
1658 raise util.Abort(_('%s already exists') % absdest)
1660
1659
1661 if patch:
1660 if patch:
1662 patch = q.lookup(patch)
1661 patch = q.lookup(patch)
1663 else:
1662 else:
1664 if not q.applied:
1663 if not q.applied:
1665 ui.write(_('No patches applied\n'))
1664 ui.write(_('No patches applied\n'))
1666 return
1665 return
1667 patch = q.lookup('qtip')
1666 patch = q.lookup('qtip')
1668
1667
1669 if ui.verbose:
1668 if ui.verbose:
1670 ui.write('Renaming %s to %s\n' % (patch, name))
1669 ui.write('Renaming %s to %s\n' % (patch, name))
1671 i = q.find_series(patch)
1670 i = q.find_series(patch)
1672 q.full_series[i] = name
1671 q.full_series[i] = name
1673 q.parse_series()
1672 q.parse_series()
1674 q.series_dirty = 1
1673 q.series_dirty = 1
1675
1674
1676 info = q.isapplied(patch)
1675 info = q.isapplied(patch)
1677 if info:
1676 if info:
1678 q.applied[info[0]] = statusentry(info[1], name)
1677 q.applied[info[0]] = statusentry(info[1], name)
1679 q.applied_dirty = 1
1678 q.applied_dirty = 1
1680
1679
1681 util.rename(q.join(patch), absdest)
1680 util.rename(q.join(patch), absdest)
1682 r = q.qrepo()
1681 r = q.qrepo()
1683 if r:
1682 if r:
1684 wlock = r.wlock()
1683 wlock = r.wlock()
1685 if r.dirstate.state(name) == 'r':
1684 if r.dirstate.state(name) == 'r':
1686 r.undelete([name], wlock)
1685 r.undelete([name], wlock)
1687 r.copy(patch, name, wlock)
1686 r.copy(patch, name, wlock)
1688 r.remove([patch], False, wlock)
1687 r.remove([patch], False, wlock)
1689
1688
1690 q.save_dirty()
1689 q.save_dirty()
1691
1690
1692 def restore(ui, repo, rev, **opts):
1691 def restore(ui, repo, rev, **opts):
1693 """restore the queue state saved by a rev"""
1692 """restore the queue state saved by a rev"""
1694 rev = repo.lookup(rev)
1693 rev = repo.lookup(rev)
1695 q = repo.mq
1694 q = repo.mq
1696 q.restore(repo, rev, delete=opts['delete'],
1695 q.restore(repo, rev, delete=opts['delete'],
1697 qupdate=opts['update'])
1696 qupdate=opts['update'])
1698 q.save_dirty()
1697 q.save_dirty()
1699 return 0
1698 return 0
1700
1699
1701 def save(ui, repo, **opts):
1700 def save(ui, repo, **opts):
1702 """save current queue state"""
1701 """save current queue state"""
1703 q = repo.mq
1702 q = repo.mq
1704 message = commands.logmessage(opts)
1703 message = commands.logmessage(opts)
1705 ret = q.save(repo, msg=message)
1704 ret = q.save(repo, msg=message)
1706 if ret:
1705 if ret:
1707 return ret
1706 return ret
1708 q.save_dirty()
1707 q.save_dirty()
1709 if opts['copy']:
1708 if opts['copy']:
1710 path = q.path
1709 path = q.path
1711 if opts['name']:
1710 if opts['name']:
1712 newpath = os.path.join(q.basepath, opts['name'])
1711 newpath = os.path.join(q.basepath, opts['name'])
1713 if os.path.exists(newpath):
1712 if os.path.exists(newpath):
1714 if not os.path.isdir(newpath):
1713 if not os.path.isdir(newpath):
1715 raise util.Abort(_('destination %s exists and is not '
1714 raise util.Abort(_('destination %s exists and is not '
1716 'a directory') % newpath)
1715 'a directory') % newpath)
1717 if not opts['force']:
1716 if not opts['force']:
1718 raise util.Abort(_('destination %s exists, '
1717 raise util.Abort(_('destination %s exists, '
1719 'use -f to force') % newpath)
1718 'use -f to force') % newpath)
1720 else:
1719 else:
1721 newpath = savename(path)
1720 newpath = savename(path)
1722 ui.warn("copy %s to %s\n" % (path, newpath))
1721 ui.warn("copy %s to %s\n" % (path, newpath))
1723 util.copyfiles(path, newpath)
1722 util.copyfiles(path, newpath)
1724 if opts['empty']:
1723 if opts['empty']:
1725 try:
1724 try:
1726 os.unlink(q.join(q.status_path))
1725 os.unlink(q.join(q.status_path))
1727 except:
1726 except:
1728 pass
1727 pass
1729 return 0
1728 return 0
1730
1729
1731 def strip(ui, repo, rev, **opts):
1730 def strip(ui, repo, rev, **opts):
1732 """strip a revision and all later revs on the same branch"""
1731 """strip a revision and all later revs on the same branch"""
1733 rev = repo.lookup(rev)
1732 rev = repo.lookup(rev)
1734 backup = 'all'
1733 backup = 'all'
1735 if opts['backup']:
1734 if opts['backup']:
1736 backup = 'strip'
1735 backup = 'strip'
1737 elif opts['nobackup']:
1736 elif opts['nobackup']:
1738 backup = 'none'
1737 backup = 'none'
1739 repo.mq.strip(repo, rev, backup=backup)
1738 repo.mq.strip(repo, rev, backup=backup)
1740 return 0
1739 return 0
1741
1740
1742 def select(ui, repo, *args, **opts):
1741 def select(ui, repo, *args, **opts):
1743 '''set or print guarded patches to push
1742 '''set or print guarded patches to push
1744
1743
1745 use qguard command to set or print guards on patch. then use
1744 use qguard command to set or print guards on patch. then use
1746 qselect to tell mq which guards to use. example:
1745 qselect to tell mq which guards to use. example:
1747
1746
1748 qguard foo.patch -stable (nagative guard)
1747 qguard foo.patch -stable (nagative guard)
1749 qguard bar.patch +stable (posative guard)
1748 qguard bar.patch +stable (posative guard)
1750 qselect stable
1749 qselect stable
1751
1750
1752 this sets "stable" guard. mq will skip foo.patch (because it has
1751 this sets "stable" guard. mq will skip foo.patch (because it has
1753 nagative match) but push bar.patch (because it has posative
1752 nagative match) but push bar.patch (because it has posative
1754 match). patch is pushed if any posative guards match and no
1753 match). patch is pushed if any posative guards match and no
1755 nagative guards match.
1754 nagative guards match.
1756
1755
1757 with no arguments, default is to print current active guards.
1756 with no arguments, default is to print current active guards.
1758 with arguments, set active guards as given.
1757 with arguments, set active guards as given.
1759
1758
1760 use -n/--none to deactivate guards (no other arguments needed).
1759 use -n/--none to deactivate guards (no other arguments needed).
1761 when no guards active, patches with posative guards are skipped,
1760 when no guards active, patches with posative guards are skipped,
1762 patches with nagative guards are pushed.
1761 patches with nagative guards are pushed.
1763
1762
1764 qselect can change guards of applied patches. it does not pop
1763 qselect can change guards of applied patches. it does not pop
1765 guarded patches by default. use --pop to pop back to last applied
1764 guarded patches by default. use --pop to pop back to last applied
1766 patch that is not guarded. use --reapply (implies --pop) to push
1765 patch that is not guarded. use --reapply (implies --pop) to push
1767 back to current patch afterwards, but skip guarded patches.
1766 back to current patch afterwards, but skip guarded patches.
1768
1767
1769 use -s/--series to print list of all guards in series file (no
1768 use -s/--series to print list of all guards in series file (no
1770 other arguments needed). use -v for more information.'''
1769 other arguments needed). use -v for more information.'''
1771
1770
1772 q = repo.mq
1771 q = repo.mq
1773 guards = q.active()
1772 guards = q.active()
1774 if args or opts['none']:
1773 if args or opts['none']:
1775 old_unapplied = q.unapplied(repo)
1774 old_unapplied = q.unapplied(repo)
1776 old_guarded = [i for i in xrange(len(q.applied)) if
1775 old_guarded = [i for i in xrange(len(q.applied)) if
1777 not q.pushable(i)[0]]
1776 not q.pushable(i)[0]]
1778 q.set_active(args)
1777 q.set_active(args)
1779 q.save_dirty()
1778 q.save_dirty()
1780 if not args:
1779 if not args:
1781 ui.status(_('guards deactivated\n'))
1780 ui.status(_('guards deactivated\n'))
1782 if not opts['pop'] and not opts['reapply']:
1781 if not opts['pop'] and not opts['reapply']:
1783 unapplied = q.unapplied(repo)
1782 unapplied = q.unapplied(repo)
1784 guarded = [i for i in xrange(len(q.applied))
1783 guarded = [i for i in xrange(len(q.applied))
1785 if not q.pushable(i)[0]]
1784 if not q.pushable(i)[0]]
1786 if len(unapplied) != len(old_unapplied):
1785 if len(unapplied) != len(old_unapplied):
1787 ui.status(_('number of unguarded, unapplied patches has '
1786 ui.status(_('number of unguarded, unapplied patches has '
1788 'changed from %d to %d\n') %
1787 'changed from %d to %d\n') %
1789 (len(old_unapplied), len(unapplied)))
1788 (len(old_unapplied), len(unapplied)))
1790 if len(guarded) != len(old_guarded):
1789 if len(guarded) != len(old_guarded):
1791 ui.status(_('number of guarded, applied patches has changed '
1790 ui.status(_('number of guarded, applied patches has changed '
1792 'from %d to %d\n') %
1791 'from %d to %d\n') %
1793 (len(old_guarded), len(guarded)))
1792 (len(old_guarded), len(guarded)))
1794 elif opts['series']:
1793 elif opts['series']:
1795 guards = {}
1794 guards = {}
1796 noguards = 0
1795 noguards = 0
1797 for gs in q.series_guards:
1796 for gs in q.series_guards:
1798 if not gs:
1797 if not gs:
1799 noguards += 1
1798 noguards += 1
1800 for g in gs:
1799 for g in gs:
1801 guards.setdefault(g, 0)
1800 guards.setdefault(g, 0)
1802 guards[g] += 1
1801 guards[g] += 1
1803 if ui.verbose:
1802 if ui.verbose:
1804 guards['NONE'] = noguards
1803 guards['NONE'] = noguards
1805 guards = guards.items()
1804 guards = guards.items()
1806 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1805 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1807 if guards:
1806 if guards:
1808 ui.note(_('guards in series file:\n'))
1807 ui.note(_('guards in series file:\n'))
1809 for guard, count in guards:
1808 for guard, count in guards:
1810 ui.note('%2d ' % count)
1809 ui.note('%2d ' % count)
1811 ui.write(guard, '\n')
1810 ui.write(guard, '\n')
1812 else:
1811 else:
1813 ui.note(_('no guards in series file\n'))
1812 ui.note(_('no guards in series file\n'))
1814 else:
1813 else:
1815 if guards:
1814 if guards:
1816 ui.note(_('active guards:\n'))
1815 ui.note(_('active guards:\n'))
1817 for g in guards:
1816 for g in guards:
1818 ui.write(g, '\n')
1817 ui.write(g, '\n')
1819 else:
1818 else:
1820 ui.write(_('no active guards\n'))
1819 ui.write(_('no active guards\n'))
1821 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1820 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1822 popped = False
1821 popped = False
1823 if opts['pop'] or opts['reapply']:
1822 if opts['pop'] or opts['reapply']:
1824 for i in xrange(len(q.applied)):
1823 for i in xrange(len(q.applied)):
1825 pushable, reason = q.pushable(i)
1824 pushable, reason = q.pushable(i)
1826 if not pushable:
1825 if not pushable:
1827 ui.status(_('popping guarded patches\n'))
1826 ui.status(_('popping guarded patches\n'))
1828 popped = True
1827 popped = True
1829 if i == 0:
1828 if i == 0:
1830 q.pop(repo, all=True)
1829 q.pop(repo, all=True)
1831 else:
1830 else:
1832 q.pop(repo, i-1)
1831 q.pop(repo, i-1)
1833 break
1832 break
1834 if popped:
1833 if popped:
1835 try:
1834 try:
1836 if reapply:
1835 if reapply:
1837 ui.status(_('reapplying unguarded patches\n'))
1836 ui.status(_('reapplying unguarded patches\n'))
1838 q.push(repo, reapply)
1837 q.push(repo, reapply)
1839 finally:
1838 finally:
1840 q.save_dirty()
1839 q.save_dirty()
1841
1840
1842 def reposetup(ui, repo):
1841 def reposetup(ui, repo):
1843 class mqrepo(repo.__class__):
1842 class mqrepo(repo.__class__):
1844 def abort_if_wdir_patched(self, errmsg, force=False):
1843 def abort_if_wdir_patched(self, errmsg, force=False):
1845 if self.mq.applied and not force:
1844 if self.mq.applied and not force:
1846 parent = revlog.hex(self.dirstate.parents()[0])
1845 parent = revlog.hex(self.dirstate.parents()[0])
1847 if parent in [s.rev for s in self.mq.applied]:
1846 if parent in [s.rev for s in self.mq.applied]:
1848 raise util.Abort(errmsg)
1847 raise util.Abort(errmsg)
1849
1848
1850 def commit(self, *args, **opts):
1849 def commit(self, *args, **opts):
1851 if len(args) >= 6:
1850 if len(args) >= 6:
1852 force = args[5]
1851 force = args[5]
1853 else:
1852 else:
1854 force = opts.get('force')
1853 force = opts.get('force')
1855 self.abort_if_wdir_patched(
1854 self.abort_if_wdir_patched(
1856 _('cannot commit over an applied mq patch'),
1855 _('cannot commit over an applied mq patch'),
1857 force)
1856 force)
1858
1857
1859 return super(mqrepo, self).commit(*args, **opts)
1858 return super(mqrepo, self).commit(*args, **opts)
1860
1859
1861 def push(self, remote, force=False, revs=None):
1860 def push(self, remote, force=False, revs=None):
1862 if self.mq.applied and not force:
1861 if self.mq.applied and not force:
1863 raise util.Abort(_('source has mq patches applied'))
1862 raise util.Abort(_('source has mq patches applied'))
1864 return super(mqrepo, self).push(remote, force, revs)
1863 return super(mqrepo, self).push(remote, force, revs)
1865
1864
1866 def tags(self):
1865 def tags(self):
1867 if self.tagscache:
1866 if self.tagscache:
1868 return self.tagscache
1867 return self.tagscache
1869
1868
1870 tagscache = super(mqrepo, self).tags()
1869 tagscache = super(mqrepo, self).tags()
1871
1870
1872 q = self.mq
1871 q = self.mq
1873 if not q.applied:
1872 if not q.applied:
1874 return tagscache
1873 return tagscache
1875
1874
1876 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1875 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1877 mqtags.append((mqtags[-1][0], 'qtip'))
1876 mqtags.append((mqtags[-1][0], 'qtip'))
1878 mqtags.append((mqtags[0][0], 'qbase'))
1877 mqtags.append((mqtags[0][0], 'qbase'))
1879 for patch in mqtags:
1878 for patch in mqtags:
1880 if patch[1] in tagscache:
1879 if patch[1] in tagscache:
1881 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1880 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1882 else:
1881 else:
1883 tagscache[patch[1]] = revlog.bin(patch[0])
1882 tagscache[patch[1]] = revlog.bin(patch[0])
1884
1883
1885 return tagscache
1884 return tagscache
1886
1885
1887 if repo.local():
1886 if repo.local():
1888 repo.__class__ = mqrepo
1887 repo.__class__ = mqrepo
1889 repo.mq = queue(ui, repo.join(""))
1888 repo.mq = queue(ui, repo.join(""))
1890
1889
1891 cmdtable = {
1890 cmdtable = {
1892 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1891 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1893 "qclone": (clone,
1892 "qclone": (clone,
1894 [('', 'pull', None, _('use pull protocol to copy metadata')),
1893 [('', 'pull', None, _('use pull protocol to copy metadata')),
1895 ('U', 'noupdate', None, _('do not update the new working directories')),
1894 ('U', 'noupdate', None, _('do not update the new working directories')),
1896 ('', 'uncompressed', None,
1895 ('', 'uncompressed', None,
1897 _('use uncompressed transfer (fast over LAN)')),
1896 _('use uncompressed transfer (fast over LAN)')),
1898 ('e', 'ssh', '', _('specify ssh command to use')),
1897 ('e', 'ssh', '', _('specify ssh command to use')),
1899 ('p', 'patches', '', _('location of source patch repo')),
1898 ('p', 'patches', '', _('location of source patch repo')),
1900 ('', 'remotecmd', '',
1899 ('', 'remotecmd', '',
1901 _('specify hg command to run on the remote side'))],
1900 _('specify hg command to run on the remote side'))],
1902 'hg qclone [OPTION]... SOURCE [DEST]'),
1901 'hg qclone [OPTION]... SOURCE [DEST]'),
1903 "qcommit|qci":
1902 "qcommit|qci":
1904 (commit,
1903 (commit,
1905 commands.table["^commit|ci"][1],
1904 commands.table["^commit|ci"][1],
1906 'hg qcommit [OPTION]... [FILE]...'),
1905 'hg qcommit [OPTION]... [FILE]...'),
1907 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1906 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1908 "qdelete":
1907 "qdelete":
1909 (delete,
1908 (delete,
1910 [('f', 'force', None, _('delete patch file'))],
1909 [('f', 'force', None, _('delete patch file'))],
1911 'hg qdelete [-f] PATCH'),
1910 'hg qdelete [-f] PATCH'),
1912 'qfold':
1911 'qfold':
1913 (fold,
1912 (fold,
1914 [('e', 'edit', None, _('edit patch header')),
1913 [('e', 'edit', None, _('edit patch header')),
1915 ('f', 'force', None, _('delete folded patch files')),
1914 ('f', 'force', None, _('delete folded patch files')),
1916 ('m', 'message', '', _('set patch header to <text>')),
1915 ('m', 'message', '', _('set patch header to <text>')),
1917 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1916 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1918 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1917 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1919 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1918 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1920 ('n', 'none', None, _('drop all guards'))],
1919 ('n', 'none', None, _('drop all guards'))],
1921 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1920 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1922 'qheader': (header, [],
1921 'qheader': (header, [],
1923 _('hg qheader [PATCH]')),
1922 _('hg qheader [PATCH]')),
1924 "^qimport":
1923 "^qimport":
1925 (qimport,
1924 (qimport,
1926 [('e', 'existing', None, 'import file in patch dir'),
1925 [('e', 'existing', None, 'import file in patch dir'),
1927 ('n', 'name', '', 'patch file name'),
1926 ('n', 'name', '', 'patch file name'),
1928 ('f', 'force', None, 'overwrite existing files')],
1927 ('f', 'force', None, 'overwrite existing files')],
1929 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1928 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1930 "^qinit":
1929 "^qinit":
1931 (init,
1930 (init,
1932 [('c', 'create-repo', None, 'create queue repository')],
1931 [('c', 'create-repo', None, 'create queue repository')],
1933 'hg qinit [-c]'),
1932 'hg qinit [-c]'),
1934 "qnew":
1933 "qnew":
1935 (new,
1934 (new,
1936 [('m', 'message', '', _('use <text> as commit message')),
1935 [('m', 'message', '', _('use <text> as commit message')),
1937 ('l', 'logfile', '', _('read the commit message from <file>')),
1936 ('l', 'logfile', '', _('read the commit message from <file>')),
1938 ('f', 'force', None, _('import uncommitted changes into patch'))],
1937 ('f', 'force', None, _('import uncommitted changes into patch'))],
1939 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1938 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1940 "qnext": (next, [], 'hg qnext'),
1939 "qnext": (next, [], 'hg qnext'),
1941 "qprev": (prev, [], 'hg qprev'),
1940 "qprev": (prev, [], 'hg qprev'),
1942 "^qpop":
1941 "^qpop":
1943 (pop,
1942 (pop,
1944 [('a', 'all', None, 'pop all patches'),
1943 [('a', 'all', None, 'pop all patches'),
1945 ('n', 'name', '', 'queue name to pop'),
1944 ('n', 'name', '', 'queue name to pop'),
1946 ('f', 'force', None, 'forget any local changes')],
1945 ('f', 'force', None, 'forget any local changes')],
1947 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1946 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1948 "^qpush":
1947 "^qpush":
1949 (push,
1948 (push,
1950 [('f', 'force', None, 'apply if the patch has rejects'),
1949 [('f', 'force', None, 'apply if the patch has rejects'),
1951 ('l', 'list', None, 'list patch name in commit text'),
1950 ('l', 'list', None, 'list patch name in commit text'),
1952 ('a', 'all', None, 'apply all patches'),
1951 ('a', 'all', None, 'apply all patches'),
1953 ('m', 'merge', None, 'merge from another queue'),
1952 ('m', 'merge', None, 'merge from another queue'),
1954 ('n', 'name', '', 'merge queue name')],
1953 ('n', 'name', '', 'merge queue name')],
1955 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1954 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1956 "^qrefresh":
1955 "^qrefresh":
1957 (refresh,
1956 (refresh,
1958 [('e', 'edit', None, _('edit commit message')),
1957 [('e', 'edit', None, _('edit commit message')),
1959 ('m', 'message', '', _('change commit message with <text>')),
1958 ('m', 'message', '', _('change commit message with <text>')),
1960 ('l', 'logfile', '', _('change commit message with <file> content')),
1959 ('l', 'logfile', '', _('change commit message with <file> content')),
1961 ('s', 'short', None, 'short refresh')],
1960 ('s', 'short', None, 'short refresh')],
1962 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1961 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1963 'qrename|qmv':
1962 'qrename|qmv':
1964 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1963 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1965 "qrestore":
1964 "qrestore":
1966 (restore,
1965 (restore,
1967 [('d', 'delete', None, 'delete save entry'),
1966 [('d', 'delete', None, 'delete save entry'),
1968 ('u', 'update', None, 'update queue working dir')],
1967 ('u', 'update', None, 'update queue working dir')],
1969 'hg qrestore [-d] [-u] REV'),
1968 'hg qrestore [-d] [-u] REV'),
1970 "qsave":
1969 "qsave":
1971 (save,
1970 (save,
1972 [('m', 'message', '', _('use <text> as commit message')),
1971 [('m', 'message', '', _('use <text> as commit message')),
1973 ('l', 'logfile', '', _('read the commit message from <file>')),
1972 ('l', 'logfile', '', _('read the commit message from <file>')),
1974 ('c', 'copy', None, 'copy patch directory'),
1973 ('c', 'copy', None, 'copy patch directory'),
1975 ('n', 'name', '', 'copy directory name'),
1974 ('n', 'name', '', 'copy directory name'),
1976 ('e', 'empty', None, 'clear queue status file'),
1975 ('e', 'empty', None, 'clear queue status file'),
1977 ('f', 'force', None, 'force copy')],
1976 ('f', 'force', None, 'force copy')],
1978 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1977 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1979 "qselect": (select,
1978 "qselect": (select,
1980 [('n', 'none', None, _('disable all guards')),
1979 [('n', 'none', None, _('disable all guards')),
1981 ('s', 'series', None, _('list all guards in series file')),
1980 ('s', 'series', None, _('list all guards in series file')),
1982 ('', 'pop', None,
1981 ('', 'pop', None,
1983 _('pop to before first guarded applied patch')),
1982 _('pop to before first guarded applied patch')),
1984 ('', 'reapply', None, _('pop, then reapply patches'))],
1983 ('', 'reapply', None, _('pop, then reapply patches'))],
1985 'hg qselect [OPTION...] [GUARD...]'),
1984 'hg qselect [OPTION...] [GUARD...]'),
1986 "qseries":
1985 "qseries":
1987 (series,
1986 (series,
1988 [('m', 'missing', None, 'print patches not in series'),
1987 [('m', 'missing', None, 'print patches not in series'),
1989 ('s', 'summary', None, _('print first line of patch header'))],
1988 ('s', 'summary', None, _('print first line of patch header'))],
1990 'hg qseries [-m]'),
1989 'hg qseries [-m]'),
1991 "^strip":
1990 "^strip":
1992 (strip,
1991 (strip,
1993 [('f', 'force', None, 'force multi-head removal'),
1992 [('f', 'force', None, 'force multi-head removal'),
1994 ('b', 'backup', None, 'bundle unrelated changesets'),
1993 ('b', 'backup', None, 'bundle unrelated changesets'),
1995 ('n', 'nobackup', None, 'no backups')],
1994 ('n', 'nobackup', None, 'no backups')],
1996 'hg strip [-f] [-b] [-n] REV'),
1995 'hg strip [-f] [-b] [-n] REV'),
1997 "qtop": (top, [], 'hg qtop'),
1996 "qtop": (top, [], 'hg qtop'),
1998 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1997 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1999 }
1998 }
@@ -1,3525 +1,3524 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def matchpats(repo, pats=[], opts={}, head=''):
53 def matchpats(repo, pats=[], opts={}, head=''):
54 cwd = repo.getcwd()
54 cwd = repo.getcwd()
55 if not pats and cwd:
55 if not pats and cwd:
56 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
56 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
57 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
57 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
58 cwd = ''
58 cwd = ''
59 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
59 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
60 opts.get('exclude'), head)
60 opts.get('exclude'), head)
61
61
62 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
62 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
63 files, matchfn, anypats = matchpats(repo, pats, opts, head)
63 files, matchfn, anypats = matchpats(repo, pats, opts, head)
64 exact = dict(zip(files, files))
64 exact = dict(zip(files, files))
65 def walk():
65 def walk():
66 for src, fn in repo.walk(node=node, files=files, match=matchfn,
66 for src, fn in repo.walk(node=node, files=files, match=matchfn,
67 badmatch=badmatch):
67 badmatch=badmatch):
68 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
68 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
69 return files, matchfn, walk()
69 return files, matchfn, walk()
70
70
71 def walk(repo, pats, opts, node=None, head='', badmatch=None):
71 def walk(repo, pats, opts, node=None, head='', badmatch=None):
72 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
72 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
73 for r in results:
73 for r in results:
74 yield r
74 yield r
75
75
76 def walkchangerevs(ui, repo, pats, opts):
76 def walkchangerevs(ui, repo, pats, opts):
77 '''Iterate over files and the revs they changed in.
77 '''Iterate over files and the revs they changed in.
78
78
79 Callers most commonly need to iterate backwards over the history
79 Callers most commonly need to iterate backwards over the history
80 it is interested in. Doing so has awful (quadratic-looking)
80 it is interested in. Doing so has awful (quadratic-looking)
81 performance, so we use iterators in a "windowed" way.
81 performance, so we use iterators in a "windowed" way.
82
82
83 We walk a window of revisions in the desired order. Within the
83 We walk a window of revisions in the desired order. Within the
84 window, we first walk forwards to gather data, then in the desired
84 window, we first walk forwards to gather data, then in the desired
85 order (usually backwards) to display it.
85 order (usually backwards) to display it.
86
86
87 This function returns an (iterator, getchange, matchfn) tuple. The
87 This function returns an (iterator, getchange, matchfn) tuple. The
88 getchange function returns the changelog entry for a numeric
88 getchange function returns the changelog entry for a numeric
89 revision. The iterator yields 3-tuples. They will be of one of
89 revision. The iterator yields 3-tuples. They will be of one of
90 the following forms:
90 the following forms:
91
91
92 "window", incrementing, lastrev: stepping through a window,
92 "window", incrementing, lastrev: stepping through a window,
93 positive if walking forwards through revs, last rev in the
93 positive if walking forwards through revs, last rev in the
94 sequence iterated over - use to reset state for the current window
94 sequence iterated over - use to reset state for the current window
95
95
96 "add", rev, fns: out-of-order traversal of the given file names
96 "add", rev, fns: out-of-order traversal of the given file names
97 fns, which changed during revision rev - use to gather data for
97 fns, which changed during revision rev - use to gather data for
98 possible display
98 possible display
99
99
100 "iter", rev, None: in-order traversal of the revs earlier iterated
100 "iter", rev, None: in-order traversal of the revs earlier iterated
101 over with "add" - use to display data'''
101 over with "add" - use to display data'''
102
102
103 def increasing_windows(start, end, windowsize=8, sizelimit=512):
103 def increasing_windows(start, end, windowsize=8, sizelimit=512):
104 if start < end:
104 if start < end:
105 while start < end:
105 while start < end:
106 yield start, min(windowsize, end-start)
106 yield start, min(windowsize, end-start)
107 start += windowsize
107 start += windowsize
108 if windowsize < sizelimit:
108 if windowsize < sizelimit:
109 windowsize *= 2
109 windowsize *= 2
110 else:
110 else:
111 while start > end:
111 while start > end:
112 yield start, min(windowsize, start-end-1)
112 yield start, min(windowsize, start-end-1)
113 start -= windowsize
113 start -= windowsize
114 if windowsize < sizelimit:
114 if windowsize < sizelimit:
115 windowsize *= 2
115 windowsize *= 2
116
116
117
117
118 files, matchfn, anypats = matchpats(repo, pats, opts)
118 files, matchfn, anypats = matchpats(repo, pats, opts)
119 follow = opts.get('follow') or opts.get('follow_first')
119 follow = opts.get('follow') or opts.get('follow_first')
120
120
121 if repo.changelog.count() == 0:
121 if repo.changelog.count() == 0:
122 return [], False, matchfn
122 return [], False, matchfn
123
123
124 if follow:
124 if follow:
125 p = repo.dirstate.parents()[0]
125 p = repo.dirstate.parents()[0]
126 if p == nullid:
126 if p == nullid:
127 ui.warn(_('No working directory revision; defaulting to tip\n'))
127 ui.warn(_('No working directory revision; defaulting to tip\n'))
128 start = 'tip'
128 start = 'tip'
129 else:
129 else:
130 start = repo.changelog.rev(p)
130 start = repo.changelog.rev(p)
131 defrange = '%s:0' % start
131 defrange = '%s:0' % start
132 else:
132 else:
133 defrange = 'tip:0'
133 defrange = 'tip:0'
134 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
134 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
135 wanted = {}
135 wanted = {}
136 slowpath = anypats
136 slowpath = anypats
137 fncache = {}
137 fncache = {}
138
138
139 chcache = {}
139 chcache = {}
140 def getchange(rev):
140 def getchange(rev):
141 ch = chcache.get(rev)
141 ch = chcache.get(rev)
142 if ch is None:
142 if ch is None:
143 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
143 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
144 return ch
144 return ch
145
145
146 if not slowpath and not files:
146 if not slowpath and not files:
147 # No files, no patterns. Display all revs.
147 # No files, no patterns. Display all revs.
148 wanted = dict(zip(revs, revs))
148 wanted = dict(zip(revs, revs))
149 copies = []
149 copies = []
150 if not slowpath:
150 if not slowpath:
151 # Only files, no patterns. Check the history of each file.
151 # Only files, no patterns. Check the history of each file.
152 def filerevgen(filelog, node):
152 def filerevgen(filelog, node):
153 cl_count = repo.changelog.count()
153 cl_count = repo.changelog.count()
154 if node is None:
154 if node is None:
155 last = filelog.count() - 1
155 last = filelog.count() - 1
156 else:
156 else:
157 last = filelog.rev(node)
157 last = filelog.rev(node)
158 for i, window in increasing_windows(last, -1):
158 for i, window in increasing_windows(last, -1):
159 revs = []
159 revs = []
160 for j in xrange(i - window, i + 1):
160 for j in xrange(i - window, i + 1):
161 n = filelog.node(j)
161 n = filelog.node(j)
162 revs.append((filelog.linkrev(n),
162 revs.append((filelog.linkrev(n),
163 follow and filelog.renamed(n)))
163 follow and filelog.renamed(n)))
164 revs.reverse()
164 revs.reverse()
165 for rev in revs:
165 for rev in revs:
166 # only yield rev for which we have the changelog, it can
166 # only yield rev for which we have the changelog, it can
167 # happen while doing "hg log" during a pull or commit
167 # happen while doing "hg log" during a pull or commit
168 if rev[0] < cl_count:
168 if rev[0] < cl_count:
169 yield rev
169 yield rev
170 def iterfiles():
170 def iterfiles():
171 for filename in files:
171 for filename in files:
172 yield filename, None
172 yield filename, None
173 for filename_node in copies:
173 for filename_node in copies:
174 yield filename_node
174 yield filename_node
175 minrev, maxrev = min(revs), max(revs)
175 minrev, maxrev = min(revs), max(revs)
176 for file_, node in iterfiles():
176 for file_, node in iterfiles():
177 filelog = repo.file(file_)
177 filelog = repo.file(file_)
178 # A zero count may be a directory or deleted file, so
178 # A zero count may be a directory or deleted file, so
179 # try to find matching entries on the slow path.
179 # try to find matching entries on the slow path.
180 if filelog.count() == 0:
180 if filelog.count() == 0:
181 slowpath = True
181 slowpath = True
182 break
182 break
183 for rev, copied in filerevgen(filelog, node):
183 for rev, copied in filerevgen(filelog, node):
184 if rev <= maxrev:
184 if rev <= maxrev:
185 if rev < minrev:
185 if rev < minrev:
186 break
186 break
187 fncache.setdefault(rev, [])
187 fncache.setdefault(rev, [])
188 fncache[rev].append(file_)
188 fncache[rev].append(file_)
189 wanted[rev] = 1
189 wanted[rev] = 1
190 if follow and copied:
190 if follow and copied:
191 copies.append(copied)
191 copies.append(copied)
192 if slowpath:
192 if slowpath:
193 if follow:
193 if follow:
194 raise util.Abort(_('can only follow copies/renames for explicit '
194 raise util.Abort(_('can only follow copies/renames for explicit '
195 'file names'))
195 'file names'))
196
196
197 # The slow path checks files modified in every changeset.
197 # The slow path checks files modified in every changeset.
198 def changerevgen():
198 def changerevgen():
199 for i, window in increasing_windows(repo.changelog.count()-1, -1):
199 for i, window in increasing_windows(repo.changelog.count()-1, -1):
200 for j in xrange(i - window, i + 1):
200 for j in xrange(i - window, i + 1):
201 yield j, getchange(j)[3]
201 yield j, getchange(j)[3]
202
202
203 for rev, changefiles in changerevgen():
203 for rev, changefiles in changerevgen():
204 matches = filter(matchfn, changefiles)
204 matches = filter(matchfn, changefiles)
205 if matches:
205 if matches:
206 fncache[rev] = matches
206 fncache[rev] = matches
207 wanted[rev] = 1
207 wanted[rev] = 1
208
208
209 def iterate():
209 def iterate():
210 class followfilter:
210 class followfilter:
211 def __init__(self, onlyfirst=False):
211 def __init__(self, onlyfirst=False):
212 self.startrev = -1
212 self.startrev = -1
213 self.roots = []
213 self.roots = []
214 self.onlyfirst = onlyfirst
214 self.onlyfirst = onlyfirst
215
215
216 def match(self, rev):
216 def match(self, rev):
217 def realparents(rev):
217 def realparents(rev):
218 if self.onlyfirst:
218 if self.onlyfirst:
219 return repo.changelog.parentrevs(rev)[0:1]
219 return repo.changelog.parentrevs(rev)[0:1]
220 else:
220 else:
221 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
221 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
222
222
223 if self.startrev == -1:
223 if self.startrev == -1:
224 self.startrev = rev
224 self.startrev = rev
225 return True
225 return True
226
226
227 if rev > self.startrev:
227 if rev > self.startrev:
228 # forward: all descendants
228 # forward: all descendants
229 if not self.roots:
229 if not self.roots:
230 self.roots.append(self.startrev)
230 self.roots.append(self.startrev)
231 for parent in realparents(rev):
231 for parent in realparents(rev):
232 if parent in self.roots:
232 if parent in self.roots:
233 self.roots.append(rev)
233 self.roots.append(rev)
234 return True
234 return True
235 else:
235 else:
236 # backwards: all parents
236 # backwards: all parents
237 if not self.roots:
237 if not self.roots:
238 self.roots.extend(realparents(self.startrev))
238 self.roots.extend(realparents(self.startrev))
239 if rev in self.roots:
239 if rev in self.roots:
240 self.roots.remove(rev)
240 self.roots.remove(rev)
241 self.roots.extend(realparents(rev))
241 self.roots.extend(realparents(rev))
242 return True
242 return True
243
243
244 return False
244 return False
245
245
246 if follow and not files:
246 if follow and not files:
247 ff = followfilter(onlyfirst=opts.get('follow_first'))
247 ff = followfilter(onlyfirst=opts.get('follow_first'))
248 def want(rev):
248 def want(rev):
249 if rev not in wanted:
249 if rev not in wanted:
250 return False
250 return False
251 return ff.match(rev)
251 return ff.match(rev)
252 else:
252 else:
253 def want(rev):
253 def want(rev):
254 return rev in wanted
254 return rev in wanted
255
255
256 for i, window in increasing_windows(0, len(revs)):
256 for i, window in increasing_windows(0, len(revs)):
257 yield 'window', revs[0] < revs[-1], revs[-1]
257 yield 'window', revs[0] < revs[-1], revs[-1]
258 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
258 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
259 srevs = list(nrevs)
259 srevs = list(nrevs)
260 srevs.sort()
260 srevs.sort()
261 for rev in srevs:
261 for rev in srevs:
262 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
262 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
263 yield 'add', rev, fns
263 yield 'add', rev, fns
264 for rev in nrevs:
264 for rev in nrevs:
265 yield 'iter', rev, None
265 yield 'iter', rev, None
266 return iterate(), getchange, matchfn
266 return iterate(), getchange, matchfn
267
267
268 revrangesep = ':'
268 revrangesep = ':'
269
269
270 def revfix(repo, val, defval):
270 def revfix(repo, val, defval):
271 '''turn user-level id of changeset into rev number.
271 '''turn user-level id of changeset into rev number.
272 user-level id can be tag, changeset, rev number, or negative rev
272 user-level id can be tag, changeset, rev number, or negative rev
273 number relative to number of revs (-1 is tip, etc).'''
273 number relative to number of revs (-1 is tip, etc).'''
274 if not val:
274 if not val:
275 return defval
275 return defval
276 try:
276 try:
277 num = int(val)
277 num = int(val)
278 if str(num) != val:
278 if str(num) != val:
279 raise ValueError
279 raise ValueError
280 if num < 0:
280 if num < 0:
281 num += repo.changelog.count()
281 num += repo.changelog.count()
282 if num < 0:
282 if num < 0:
283 num = 0
283 num = 0
284 elif num >= repo.changelog.count():
284 elif num >= repo.changelog.count():
285 raise ValueError
285 raise ValueError
286 except ValueError:
286 except ValueError:
287 try:
287 try:
288 num = repo.changelog.rev(repo.lookup(val))
288 num = repo.changelog.rev(repo.lookup(val))
289 except KeyError:
289 except KeyError:
290 raise util.Abort(_('invalid revision identifier %s'), val)
290 raise util.Abort(_('invalid revision identifier %s'), val)
291 return num
291 return num
292
292
293 def revpair(ui, repo, revs):
293 def revpair(ui, repo, revs):
294 '''return pair of nodes, given list of revisions. second item can
294 '''return pair of nodes, given list of revisions. second item can
295 be None, meaning use working dir.'''
295 be None, meaning use working dir.'''
296 if not revs:
296 if not revs:
297 return repo.dirstate.parents()[0], None
297 return repo.dirstate.parents()[0], None
298 end = None
298 end = None
299 if len(revs) == 1:
299 if len(revs) == 1:
300 start = revs[0]
300 start = revs[0]
301 if revrangesep in start:
301 if revrangesep in start:
302 start, end = start.split(revrangesep, 1)
302 start, end = start.split(revrangesep, 1)
303 start = revfix(repo, start, 0)
303 start = revfix(repo, start, 0)
304 end = revfix(repo, end, repo.changelog.count() - 1)
304 end = revfix(repo, end, repo.changelog.count() - 1)
305 else:
305 else:
306 start = revfix(repo, start, None)
306 start = revfix(repo, start, None)
307 elif len(revs) == 2:
307 elif len(revs) == 2:
308 if revrangesep in revs[0] or revrangesep in revs[1]:
308 if revrangesep in revs[0] or revrangesep in revs[1]:
309 raise util.Abort(_('too many revisions specified'))
309 raise util.Abort(_('too many revisions specified'))
310 start = revfix(repo, revs[0], None)
310 start = revfix(repo, revs[0], None)
311 end = revfix(repo, revs[1], None)
311 end = revfix(repo, revs[1], None)
312 else:
312 else:
313 raise util.Abort(_('too many revisions specified'))
313 raise util.Abort(_('too many revisions specified'))
314 if end is not None: end = repo.lookup(str(end))
314 if end is not None: end = repo.lookup(str(end))
315 return repo.lookup(str(start)), end
315 return repo.lookup(str(start)), end
316
316
317 def revrange(ui, repo, revs):
317 def revrange(ui, repo, revs):
318 """Yield revision as strings from a list of revision specifications."""
318 """Yield revision as strings from a list of revision specifications."""
319 seen = {}
319 seen = {}
320 for spec in revs:
320 for spec in revs:
321 if revrangesep in spec:
321 if revrangesep in spec:
322 start, end = spec.split(revrangesep, 1)
322 start, end = spec.split(revrangesep, 1)
323 start = revfix(repo, start, 0)
323 start = revfix(repo, start, 0)
324 end = revfix(repo, end, repo.changelog.count() - 1)
324 end = revfix(repo, end, repo.changelog.count() - 1)
325 step = start > end and -1 or 1
325 step = start > end and -1 or 1
326 for rev in xrange(start, end+step, step):
326 for rev in xrange(start, end+step, step):
327 if rev in seen:
327 if rev in seen:
328 continue
328 continue
329 seen[rev] = 1
329 seen[rev] = 1
330 yield str(rev)
330 yield str(rev)
331 else:
331 else:
332 rev = revfix(repo, spec, None)
332 rev = revfix(repo, spec, None)
333 if rev in seen:
333 if rev in seen:
334 continue
334 continue
335 seen[rev] = 1
335 seen[rev] = 1
336 yield str(rev)
336 yield str(rev)
337
337
338 def write_bundle(cg, filename=None, compress=True):
338 def write_bundle(cg, filename=None, compress=True):
339 """Write a bundle file and return its filename.
339 """Write a bundle file and return its filename.
340
340
341 Existing files will not be overwritten.
341 Existing files will not be overwritten.
342 If no filename is specified, a temporary file is created.
342 If no filename is specified, a temporary file is created.
343 bz2 compression can be turned off.
343 bz2 compression can be turned off.
344 The bundle file will be deleted in case of errors.
344 The bundle file will be deleted in case of errors.
345 """
345 """
346 class nocompress(object):
346 class nocompress(object):
347 def compress(self, x):
347 def compress(self, x):
348 return x
348 return x
349 def flush(self):
349 def flush(self):
350 return ""
350 return ""
351
351
352 fh = None
352 fh = None
353 cleanup = None
353 cleanup = None
354 try:
354 try:
355 if filename:
355 if filename:
356 if os.path.exists(filename):
356 if os.path.exists(filename):
357 raise util.Abort(_("file '%s' already exists"), filename)
357 raise util.Abort(_("file '%s' already exists"), filename)
358 fh = open(filename, "wb")
358 fh = open(filename, "wb")
359 else:
359 else:
360 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
360 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
361 fh = os.fdopen(fd, "wb")
361 fh = os.fdopen(fd, "wb")
362 cleanup = filename
362 cleanup = filename
363
363
364 if compress:
364 if compress:
365 fh.write("HG10")
365 fh.write("HG10")
366 z = bz2.BZ2Compressor(9)
366 z = bz2.BZ2Compressor(9)
367 else:
367 else:
368 fh.write("HG10UN")
368 fh.write("HG10UN")
369 z = nocompress()
369 z = nocompress()
370 # parse the changegroup data, otherwise we will block
370 # parse the changegroup data, otherwise we will block
371 # in case of sshrepo because we don't know the end of the stream
371 # in case of sshrepo because we don't know the end of the stream
372
372
373 # an empty chunkiter is the end of the changegroup
373 # an empty chunkiter is the end of the changegroup
374 empty = False
374 empty = False
375 while not empty:
375 while not empty:
376 empty = True
376 empty = True
377 for chunk in changegroup.chunkiter(cg):
377 for chunk in changegroup.chunkiter(cg):
378 empty = False
378 empty = False
379 fh.write(z.compress(changegroup.genchunk(chunk)))
379 fh.write(z.compress(changegroup.genchunk(chunk)))
380 fh.write(z.compress(changegroup.closechunk()))
380 fh.write(z.compress(changegroup.closechunk()))
381 fh.write(z.flush())
381 fh.write(z.flush())
382 cleanup = None
382 cleanup = None
383 return filename
383 return filename
384 finally:
384 finally:
385 if fh is not None:
385 if fh is not None:
386 fh.close()
386 fh.close()
387 if cleanup is not None:
387 if cleanup is not None:
388 os.unlink(cleanup)
388 os.unlink(cleanup)
389
389
390 def trimuser(ui, name, rev, revcache):
390 def trimuser(ui, name, rev, revcache):
391 """trim the name of the user who committed a change"""
391 """trim the name of the user who committed a change"""
392 user = revcache.get(rev)
392 user = revcache.get(rev)
393 if user is None:
393 if user is None:
394 user = revcache[rev] = ui.shortuser(name)
394 user = revcache[rev] = ui.shortuser(name)
395 return user
395 return user
396
396
397 class changeset_printer(object):
397 class changeset_printer(object):
398 '''show changeset information when templating not requested.'''
398 '''show changeset information when templating not requested.'''
399
399
400 def __init__(self, ui, repo):
400 def __init__(self, ui, repo):
401 self.ui = ui
401 self.ui = ui
402 self.repo = repo
402 self.repo = repo
403
403
404 def show(self, rev=0, changenode=None, brinfo=None):
404 def show(self, rev=0, changenode=None, brinfo=None):
405 '''show a single changeset or file revision'''
405 '''show a single changeset or file revision'''
406 log = self.repo.changelog
406 log = self.repo.changelog
407 if changenode is None:
407 if changenode is None:
408 changenode = log.node(rev)
408 changenode = log.node(rev)
409 elif not rev:
409 elif not rev:
410 rev = log.rev(changenode)
410 rev = log.rev(changenode)
411
411
412 if self.ui.quiet:
412 if self.ui.quiet:
413 self.ui.write("%d:%s\n" % (rev, short(changenode)))
413 self.ui.write("%d:%s\n" % (rev, short(changenode)))
414 return
414 return
415
415
416 changes = log.read(changenode)
416 changes = log.read(changenode)
417 date = util.datestr(changes[2])
417 date = util.datestr(changes[2])
418
418
419 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
419 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
420 for p in log.parents(changenode)
420 for p in log.parents(changenode)
421 if self.ui.debugflag or p != nullid]
421 if self.ui.debugflag or p != nullid]
422 if (not self.ui.debugflag and len(parents) == 1 and
422 if (not self.ui.debugflag and len(parents) == 1 and
423 parents[0][0] == rev-1):
423 parents[0][0] == rev-1):
424 parents = []
424 parents = []
425
425
426 if self.ui.verbose:
426 if self.ui.verbose:
427 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
427 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
428 else:
428 else:
429 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
429 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
430
430
431 for tag in self.repo.nodetags(changenode):
431 for tag in self.repo.nodetags(changenode):
432 self.ui.status(_("tag: %s\n") % tag)
432 self.ui.status(_("tag: %s\n") % tag)
433 for parent in parents:
433 for parent in parents:
434 self.ui.write(_("parent: %d:%s\n") % parent)
434 self.ui.write(_("parent: %d:%s\n") % parent)
435
435
436 if brinfo and changenode in brinfo:
436 if brinfo and changenode in brinfo:
437 br = brinfo[changenode]
437 br = brinfo[changenode]
438 self.ui.write(_("branch: %s\n") % " ".join(br))
438 self.ui.write(_("branch: %s\n") % " ".join(br))
439
439
440 self.ui.debug(_("manifest: %d:%s\n") %
440 self.ui.debug(_("manifest: %d:%s\n") %
441 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
441 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
442 self.ui.status(_("user: %s\n") % changes[1])
442 self.ui.status(_("user: %s\n") % changes[1])
443 self.ui.status(_("date: %s\n") % date)
443 self.ui.status(_("date: %s\n") % date)
444
444
445 if self.ui.debugflag:
445 if self.ui.debugflag:
446 files = self.repo.changes(log.parents(changenode)[0], changenode)
446 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
447 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
447 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
448 files):
448 files):
449 if value:
449 if value:
450 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
450 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
451 else:
451 else:
452 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
452 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
453
453
454 description = changes[4].strip()
454 description = changes[4].strip()
455 if description:
455 if description:
456 if self.ui.verbose:
456 if self.ui.verbose:
457 self.ui.status(_("description:\n"))
457 self.ui.status(_("description:\n"))
458 self.ui.status(description)
458 self.ui.status(description)
459 self.ui.status("\n\n")
459 self.ui.status("\n\n")
460 else:
460 else:
461 self.ui.status(_("summary: %s\n") %
461 self.ui.status(_("summary: %s\n") %
462 description.splitlines()[0])
462 description.splitlines()[0])
463 self.ui.status("\n")
463 self.ui.status("\n")
464
464
465 def show_changeset(ui, repo, opts):
465 def show_changeset(ui, repo, opts):
466 '''show one changeset. uses template or regular display. caller
466 '''show one changeset. uses template or regular display. caller
467 can pass in 'style' and 'template' options in opts.'''
467 can pass in 'style' and 'template' options in opts.'''
468
468
469 tmpl = opts.get('template')
469 tmpl = opts.get('template')
470 if tmpl:
470 if tmpl:
471 tmpl = templater.parsestring(tmpl, quoted=False)
471 tmpl = templater.parsestring(tmpl, quoted=False)
472 else:
472 else:
473 tmpl = ui.config('ui', 'logtemplate')
473 tmpl = ui.config('ui', 'logtemplate')
474 if tmpl: tmpl = templater.parsestring(tmpl)
474 if tmpl: tmpl = templater.parsestring(tmpl)
475 mapfile = opts.get('style') or ui.config('ui', 'style')
475 mapfile = opts.get('style') or ui.config('ui', 'style')
476 if tmpl or mapfile:
476 if tmpl or mapfile:
477 if mapfile:
477 if mapfile:
478 if not os.path.isfile(mapfile):
478 if not os.path.isfile(mapfile):
479 mapname = templater.templatepath('map-cmdline.' + mapfile)
479 mapname = templater.templatepath('map-cmdline.' + mapfile)
480 if not mapname: mapname = templater.templatepath(mapfile)
480 if not mapname: mapname = templater.templatepath(mapfile)
481 if mapname: mapfile = mapname
481 if mapname: mapfile = mapname
482 try:
482 try:
483 t = templater.changeset_templater(ui, repo, mapfile)
483 t = templater.changeset_templater(ui, repo, mapfile)
484 except SyntaxError, inst:
484 except SyntaxError, inst:
485 raise util.Abort(inst.args[0])
485 raise util.Abort(inst.args[0])
486 if tmpl: t.use_template(tmpl)
486 if tmpl: t.use_template(tmpl)
487 return t
487 return t
488 return changeset_printer(ui, repo)
488 return changeset_printer(ui, repo)
489
489
490 def setremoteconfig(ui, opts):
490 def setremoteconfig(ui, opts):
491 "copy remote options to ui tree"
491 "copy remote options to ui tree"
492 if opts.get('ssh'):
492 if opts.get('ssh'):
493 ui.setconfig("ui", "ssh", opts['ssh'])
493 ui.setconfig("ui", "ssh", opts['ssh'])
494 if opts.get('remotecmd'):
494 if opts.get('remotecmd'):
495 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
495 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
496
496
497 def show_version(ui):
497 def show_version(ui):
498 """output version and copyright information"""
498 """output version and copyright information"""
499 ui.write(_("Mercurial Distributed SCM (version %s)\n")
499 ui.write(_("Mercurial Distributed SCM (version %s)\n")
500 % version.get_version())
500 % version.get_version())
501 ui.status(_(
501 ui.status(_(
502 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
502 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
503 "This is free software; see the source for copying conditions. "
503 "This is free software; see the source for copying conditions. "
504 "There is NO\nwarranty; "
504 "There is NO\nwarranty; "
505 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
505 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
506 ))
506 ))
507
507
508 def help_(ui, name=None, with_version=False):
508 def help_(ui, name=None, with_version=False):
509 """show help for a command, extension, or list of commands
509 """show help for a command, extension, or list of commands
510
510
511 With no arguments, print a list of commands and short help.
511 With no arguments, print a list of commands and short help.
512
512
513 Given a command name, print help for that command.
513 Given a command name, print help for that command.
514
514
515 Given an extension name, print help for that extension, and the
515 Given an extension name, print help for that extension, and the
516 commands it provides."""
516 commands it provides."""
517 option_lists = []
517 option_lists = []
518
518
519 def helpcmd(name):
519 def helpcmd(name):
520 if with_version:
520 if with_version:
521 show_version(ui)
521 show_version(ui)
522 ui.write('\n')
522 ui.write('\n')
523 aliases, i = findcmd(name)
523 aliases, i = findcmd(name)
524 # synopsis
524 # synopsis
525 ui.write("%s\n\n" % i[2])
525 ui.write("%s\n\n" % i[2])
526
526
527 # description
527 # description
528 doc = i[0].__doc__
528 doc = i[0].__doc__
529 if not doc:
529 if not doc:
530 doc = _("(No help text available)")
530 doc = _("(No help text available)")
531 if ui.quiet:
531 if ui.quiet:
532 doc = doc.splitlines(0)[0]
532 doc = doc.splitlines(0)[0]
533 ui.write("%s\n" % doc.rstrip())
533 ui.write("%s\n" % doc.rstrip())
534
534
535 if not ui.quiet:
535 if not ui.quiet:
536 # aliases
536 # aliases
537 if len(aliases) > 1:
537 if len(aliases) > 1:
538 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
538 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
539
539
540 # options
540 # options
541 if i[1]:
541 if i[1]:
542 option_lists.append(("options", i[1]))
542 option_lists.append(("options", i[1]))
543
543
544 def helplist(select=None):
544 def helplist(select=None):
545 h = {}
545 h = {}
546 cmds = {}
546 cmds = {}
547 for c, e in table.items():
547 for c, e in table.items():
548 f = c.split("|", 1)[0]
548 f = c.split("|", 1)[0]
549 if select and not select(f):
549 if select and not select(f):
550 continue
550 continue
551 if name == "shortlist" and not f.startswith("^"):
551 if name == "shortlist" and not f.startswith("^"):
552 continue
552 continue
553 f = f.lstrip("^")
553 f = f.lstrip("^")
554 if not ui.debugflag and f.startswith("debug"):
554 if not ui.debugflag and f.startswith("debug"):
555 continue
555 continue
556 doc = e[0].__doc__
556 doc = e[0].__doc__
557 if not doc:
557 if not doc:
558 doc = _("(No help text available)")
558 doc = _("(No help text available)")
559 h[f] = doc.splitlines(0)[0].rstrip()
559 h[f] = doc.splitlines(0)[0].rstrip()
560 cmds[f] = c.lstrip("^")
560 cmds[f] = c.lstrip("^")
561
561
562 fns = h.keys()
562 fns = h.keys()
563 fns.sort()
563 fns.sort()
564 m = max(map(len, fns))
564 m = max(map(len, fns))
565 for f in fns:
565 for f in fns:
566 if ui.verbose:
566 if ui.verbose:
567 commands = cmds[f].replace("|",", ")
567 commands = cmds[f].replace("|",", ")
568 ui.write(" %s:\n %s\n"%(commands, h[f]))
568 ui.write(" %s:\n %s\n"%(commands, h[f]))
569 else:
569 else:
570 ui.write(' %-*s %s\n' % (m, f, h[f]))
570 ui.write(' %-*s %s\n' % (m, f, h[f]))
571
571
572 def helpext(name):
572 def helpext(name):
573 try:
573 try:
574 mod = findext(name)
574 mod = findext(name)
575 except KeyError:
575 except KeyError:
576 raise UnknownCommand(name)
576 raise UnknownCommand(name)
577
577
578 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
578 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
579 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
579 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
580 for d in doc[1:]:
580 for d in doc[1:]:
581 ui.write(d, '\n')
581 ui.write(d, '\n')
582
582
583 ui.status('\n')
583 ui.status('\n')
584 if ui.verbose:
584 if ui.verbose:
585 ui.status(_('list of commands:\n\n'))
585 ui.status(_('list of commands:\n\n'))
586 else:
586 else:
587 ui.status(_('list of commands (use "hg help -v %s" '
587 ui.status(_('list of commands (use "hg help -v %s" '
588 'to show aliases and global options):\n\n') % name)
588 'to show aliases and global options):\n\n') % name)
589
589
590 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
590 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
591 helplist(modcmds.has_key)
591 helplist(modcmds.has_key)
592
592
593 if name and name != 'shortlist':
593 if name and name != 'shortlist':
594 try:
594 try:
595 helpcmd(name)
595 helpcmd(name)
596 except UnknownCommand:
596 except UnknownCommand:
597 helpext(name)
597 helpext(name)
598
598
599 else:
599 else:
600 # program name
600 # program name
601 if ui.verbose or with_version:
601 if ui.verbose or with_version:
602 show_version(ui)
602 show_version(ui)
603 else:
603 else:
604 ui.status(_("Mercurial Distributed SCM\n"))
604 ui.status(_("Mercurial Distributed SCM\n"))
605 ui.status('\n')
605 ui.status('\n')
606
606
607 # list of commands
607 # list of commands
608 if name == "shortlist":
608 if name == "shortlist":
609 ui.status(_('basic commands (use "hg help" '
609 ui.status(_('basic commands (use "hg help" '
610 'for the full list or option "-v" for details):\n\n'))
610 'for the full list or option "-v" for details):\n\n'))
611 elif ui.verbose:
611 elif ui.verbose:
612 ui.status(_('list of commands:\n\n'))
612 ui.status(_('list of commands:\n\n'))
613 else:
613 else:
614 ui.status(_('list of commands (use "hg help -v" '
614 ui.status(_('list of commands (use "hg help -v" '
615 'to show aliases and global options):\n\n'))
615 'to show aliases and global options):\n\n'))
616
616
617 helplist()
617 helplist()
618
618
619 # global options
619 # global options
620 if ui.verbose:
620 if ui.verbose:
621 option_lists.append(("global options", globalopts))
621 option_lists.append(("global options", globalopts))
622
622
623 # list all option lists
623 # list all option lists
624 opt_output = []
624 opt_output = []
625 for title, options in option_lists:
625 for title, options in option_lists:
626 opt_output.append(("\n%s:\n" % title, None))
626 opt_output.append(("\n%s:\n" % title, None))
627 for shortopt, longopt, default, desc in options:
627 for shortopt, longopt, default, desc in options:
628 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
628 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
629 longopt and " --%s" % longopt),
629 longopt and " --%s" % longopt),
630 "%s%s" % (desc,
630 "%s%s" % (desc,
631 default
631 default
632 and _(" (default: %s)") % default
632 and _(" (default: %s)") % default
633 or "")))
633 or "")))
634
634
635 if opt_output:
635 if opt_output:
636 opts_len = max([len(line[0]) for line in opt_output if line[1]])
636 opts_len = max([len(line[0]) for line in opt_output if line[1]])
637 for first, second in opt_output:
637 for first, second in opt_output:
638 if second:
638 if second:
639 ui.write(" %-*s %s\n" % (opts_len, first, second))
639 ui.write(" %-*s %s\n" % (opts_len, first, second))
640 else:
640 else:
641 ui.write("%s\n" % first)
641 ui.write("%s\n" % first)
642
642
643 # Commands start here, listed alphabetically
643 # Commands start here, listed alphabetically
644
644
645 def add(ui, repo, *pats, **opts):
645 def add(ui, repo, *pats, **opts):
646 """add the specified files on the next commit
646 """add the specified files on the next commit
647
647
648 Schedule files to be version controlled and added to the repository.
648 Schedule files to be version controlled and added to the repository.
649
649
650 The files will be added to the repository at the next commit.
650 The files will be added to the repository at the next commit.
651
651
652 If no names are given, add all files in the repository.
652 If no names are given, add all files in the repository.
653 """
653 """
654
654
655 names = []
655 names = []
656 for src, abs, rel, exact in walk(repo, pats, opts):
656 for src, abs, rel, exact in walk(repo, pats, opts):
657 if exact:
657 if exact:
658 if ui.verbose:
658 if ui.verbose:
659 ui.status(_('adding %s\n') % rel)
659 ui.status(_('adding %s\n') % rel)
660 names.append(abs)
660 names.append(abs)
661 elif repo.dirstate.state(abs) == '?':
661 elif repo.dirstate.state(abs) == '?':
662 ui.status(_('adding %s\n') % rel)
662 ui.status(_('adding %s\n') % rel)
663 names.append(abs)
663 names.append(abs)
664 if not opts.get('dry_run'):
664 if not opts.get('dry_run'):
665 repo.add(names)
665 repo.add(names)
666
666
667 def addremove(ui, repo, *pats, **opts):
667 def addremove(ui, repo, *pats, **opts):
668 """add all new files, delete all missing files (DEPRECATED)
668 """add all new files, delete all missing files (DEPRECATED)
669
669
670 (DEPRECATED)
670 (DEPRECATED)
671 Add all new files and remove all missing files from the repository.
671 Add all new files and remove all missing files from the repository.
672
672
673 New files are ignored if they match any of the patterns in .hgignore. As
673 New files are ignored if they match any of the patterns in .hgignore. As
674 with add, these changes take effect at the next commit.
674 with add, these changes take effect at the next commit.
675
675
676 This command is now deprecated and will be removed in a future
676 This command is now deprecated and will be removed in a future
677 release. Please use add and remove --after instead.
677 release. Please use add and remove --after instead.
678 """
678 """
679 ui.warn(_('(the addremove command is deprecated; use add and remove '
679 ui.warn(_('(the addremove command is deprecated; use add and remove '
680 '--after instead)\n'))
680 '--after instead)\n'))
681 return addremove_lock(ui, repo, pats, opts)
681 return addremove_lock(ui, repo, pats, opts)
682
682
683 def addremove_lock(ui, repo, pats, opts, wlock=None):
683 def addremove_lock(ui, repo, pats, opts, wlock=None):
684 add, remove = [], []
684 add, remove = [], []
685 for src, abs, rel, exact in walk(repo, pats, opts):
685 for src, abs, rel, exact in walk(repo, pats, opts):
686 if src == 'f' and repo.dirstate.state(abs) == '?':
686 if src == 'f' and repo.dirstate.state(abs) == '?':
687 add.append(abs)
687 add.append(abs)
688 if ui.verbose or not exact:
688 if ui.verbose or not exact:
689 ui.status(_('adding %s\n') % ((pats and rel) or abs))
689 ui.status(_('adding %s\n') % ((pats and rel) or abs))
690 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
690 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
691 remove.append(abs)
691 remove.append(abs)
692 if ui.verbose or not exact:
692 if ui.verbose or not exact:
693 ui.status(_('removing %s\n') % ((pats and rel) or abs))
693 ui.status(_('removing %s\n') % ((pats and rel) or abs))
694 if not opts.get('dry_run'):
694 if not opts.get('dry_run'):
695 repo.add(add, wlock=wlock)
695 repo.add(add, wlock=wlock)
696 repo.remove(remove, wlock=wlock)
696 repo.remove(remove, wlock=wlock)
697
697
698 def annotate(ui, repo, *pats, **opts):
698 def annotate(ui, repo, *pats, **opts):
699 """show changeset information per file line
699 """show changeset information per file line
700
700
701 List changes in files, showing the revision id responsible for each line
701 List changes in files, showing the revision id responsible for each line
702
702
703 This command is useful to discover who did a change or when a change took
703 This command is useful to discover who did a change or when a change took
704 place.
704 place.
705
705
706 Without the -a option, annotate will avoid processing files it
706 Without the -a option, annotate will avoid processing files it
707 detects as binary. With -a, annotate will generate an annotation
707 detects as binary. With -a, annotate will generate an annotation
708 anyway, probably with undesirable results.
708 anyway, probably with undesirable results.
709 """
709 """
710 def getnode(rev):
710 def getnode(rev):
711 return short(repo.changelog.node(rev))
711 return short(repo.changelog.node(rev))
712
712
713 ucache = {}
713 ucache = {}
714 def getname(rev):
714 def getname(rev):
715 try:
715 try:
716 return ucache[rev]
716 return ucache[rev]
717 except:
717 except:
718 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
718 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
719 ucache[rev] = u
719 ucache[rev] = u
720 return u
720 return u
721
721
722 dcache = {}
722 dcache = {}
723 def getdate(rev):
723 def getdate(rev):
724 datestr = dcache.get(rev)
724 datestr = dcache.get(rev)
725 if datestr is None:
725 if datestr is None:
726 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
726 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
727 return datestr
727 return datestr
728
728
729 if not pats:
729 if not pats:
730 raise util.Abort(_('at least one file name or pattern required'))
730 raise util.Abort(_('at least one file name or pattern required'))
731
731
732 opmap = [['user', getname], ['number', str], ['changeset', getnode],
732 opmap = [['user', getname], ['number', str], ['changeset', getnode],
733 ['date', getdate]]
733 ['date', getdate]]
734 if not opts['user'] and not opts['changeset'] and not opts['date']:
734 if not opts['user'] and not opts['changeset'] and not opts['date']:
735 opts['number'] = 1
735 opts['number'] = 1
736
736
737 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
737 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
738
738
739 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
739 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
740 fctx = ctx.filectx(abs)
740 fctx = ctx.filectx(abs)
741 if not opts['text'] and util.binary(fctx.data()):
741 if not opts['text'] and util.binary(fctx.data()):
742 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
742 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
743 continue
743 continue
744
744
745 lines = fctx.annotate()
745 lines = fctx.annotate()
746 pieces = []
746 pieces = []
747
747
748 for o, f in opmap:
748 for o, f in opmap:
749 if opts[o]:
749 if opts[o]:
750 l = [f(n) for n, dummy in lines]
750 l = [f(n) for n, dummy in lines]
751 if l:
751 if l:
752 m = max(map(len, l))
752 m = max(map(len, l))
753 pieces.append(["%*s" % (m, x) for x in l])
753 pieces.append(["%*s" % (m, x) for x in l])
754
754
755 if pieces:
755 if pieces:
756 for p, l in zip(zip(*pieces), lines):
756 for p, l in zip(zip(*pieces), lines):
757 ui.write("%s: %s" % (" ".join(p), l[1]))
757 ui.write("%s: %s" % (" ".join(p), l[1]))
758
758
759 def archive(ui, repo, dest, **opts):
759 def archive(ui, repo, dest, **opts):
760 '''create unversioned archive of a repository revision
760 '''create unversioned archive of a repository revision
761
761
762 By default, the revision used is the parent of the working
762 By default, the revision used is the parent of the working
763 directory; use "-r" to specify a different revision.
763 directory; use "-r" to specify a different revision.
764
764
765 To specify the type of archive to create, use "-t". Valid
765 To specify the type of archive to create, use "-t". Valid
766 types are:
766 types are:
767
767
768 "files" (default): a directory full of files
768 "files" (default): a directory full of files
769 "tar": tar archive, uncompressed
769 "tar": tar archive, uncompressed
770 "tbz2": tar archive, compressed using bzip2
770 "tbz2": tar archive, compressed using bzip2
771 "tgz": tar archive, compressed using gzip
771 "tgz": tar archive, compressed using gzip
772 "uzip": zip archive, uncompressed
772 "uzip": zip archive, uncompressed
773 "zip": zip archive, compressed using deflate
773 "zip": zip archive, compressed using deflate
774
774
775 The exact name of the destination archive or directory is given
775 The exact name of the destination archive or directory is given
776 using a format string; see "hg help export" for details.
776 using a format string; see "hg help export" for details.
777
777
778 Each member added to an archive file has a directory prefix
778 Each member added to an archive file has a directory prefix
779 prepended. Use "-p" to specify a format string for the prefix.
779 prepended. Use "-p" to specify a format string for the prefix.
780 The default is the basename of the archive, with suffixes removed.
780 The default is the basename of the archive, with suffixes removed.
781 '''
781 '''
782
782
783 if opts['rev']:
783 if opts['rev']:
784 node = repo.lookup(opts['rev'])
784 node = repo.lookup(opts['rev'])
785 else:
785 else:
786 node, p2 = repo.dirstate.parents()
786 node, p2 = repo.dirstate.parents()
787 if p2 != nullid:
787 if p2 != nullid:
788 raise util.Abort(_('uncommitted merge - please provide a '
788 raise util.Abort(_('uncommitted merge - please provide a '
789 'specific revision'))
789 'specific revision'))
790
790
791 dest = cmdutil.make_filename(repo, dest, node)
791 dest = cmdutil.make_filename(repo, dest, node)
792 if os.path.realpath(dest) == repo.root:
792 if os.path.realpath(dest) == repo.root:
793 raise util.Abort(_('repository root cannot be destination'))
793 raise util.Abort(_('repository root cannot be destination'))
794 dummy, matchfn, dummy = matchpats(repo, [], opts)
794 dummy, matchfn, dummy = matchpats(repo, [], opts)
795 kind = opts.get('type') or 'files'
795 kind = opts.get('type') or 'files'
796 prefix = opts['prefix']
796 prefix = opts['prefix']
797 if dest == '-':
797 if dest == '-':
798 if kind == 'files':
798 if kind == 'files':
799 raise util.Abort(_('cannot archive plain files to stdout'))
799 raise util.Abort(_('cannot archive plain files to stdout'))
800 dest = sys.stdout
800 dest = sys.stdout
801 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
801 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
802 prefix = cmdutil.make_filename(repo, prefix, node)
802 prefix = cmdutil.make_filename(repo, prefix, node)
803 archival.archive(repo, dest, node, kind, not opts['no_decode'],
803 archival.archive(repo, dest, node, kind, not opts['no_decode'],
804 matchfn, prefix)
804 matchfn, prefix)
805
805
806 def backout(ui, repo, rev, **opts):
806 def backout(ui, repo, rev, **opts):
807 '''reverse effect of earlier changeset
807 '''reverse effect of earlier changeset
808
808
809 Commit the backed out changes as a new changeset. The new
809 Commit the backed out changes as a new changeset. The new
810 changeset is a child of the backed out changeset.
810 changeset is a child of the backed out changeset.
811
811
812 If you back out a changeset other than the tip, a new head is
812 If you back out a changeset other than the tip, a new head is
813 created. This head is the parent of the working directory. If
813 created. This head is the parent of the working directory. If
814 you back out an old changeset, your working directory will appear
814 you back out an old changeset, your working directory will appear
815 old after the backout. You should merge the backout changeset
815 old after the backout. You should merge the backout changeset
816 with another head.
816 with another head.
817
817
818 The --merge option remembers the parent of the working directory
818 The --merge option remembers the parent of the working directory
819 before starting the backout, then merges the new head with that
819 before starting the backout, then merges the new head with that
820 changeset afterwards. This saves you from doing the merge by
820 changeset afterwards. This saves you from doing the merge by
821 hand. The result of this merge is not committed, as for a normal
821 hand. The result of this merge is not committed, as for a normal
822 merge.'''
822 merge.'''
823
823
824 bail_if_changed(repo)
824 bail_if_changed(repo)
825 op1, op2 = repo.dirstate.parents()
825 op1, op2 = repo.dirstate.parents()
826 if op2 != nullid:
826 if op2 != nullid:
827 raise util.Abort(_('outstanding uncommitted merge'))
827 raise util.Abort(_('outstanding uncommitted merge'))
828 node = repo.lookup(rev)
828 node = repo.lookup(rev)
829 p1, p2 = repo.changelog.parents(node)
829 p1, p2 = repo.changelog.parents(node)
830 if p1 == nullid:
830 if p1 == nullid:
831 raise util.Abort(_('cannot back out a change with no parents'))
831 raise util.Abort(_('cannot back out a change with no parents'))
832 if p2 != nullid:
832 if p2 != nullid:
833 if not opts['parent']:
833 if not opts['parent']:
834 raise util.Abort(_('cannot back out a merge changeset without '
834 raise util.Abort(_('cannot back out a merge changeset without '
835 '--parent'))
835 '--parent'))
836 p = repo.lookup(opts['parent'])
836 p = repo.lookup(opts['parent'])
837 if p not in (p1, p2):
837 if p not in (p1, p2):
838 raise util.Abort(_('%s is not a parent of %s' %
838 raise util.Abort(_('%s is not a parent of %s' %
839 (short(p), short(node))))
839 (short(p), short(node))))
840 parent = p
840 parent = p
841 else:
841 else:
842 if opts['parent']:
842 if opts['parent']:
843 raise util.Abort(_('cannot use --parent on non-merge changeset'))
843 raise util.Abort(_('cannot use --parent on non-merge changeset'))
844 parent = p1
844 parent = p1
845 hg.clean(repo, node, show_stats=False)
845 hg.clean(repo, node, show_stats=False)
846 revert_opts = opts.copy()
846 revert_opts = opts.copy()
847 revert_opts['rev'] = hex(parent)
847 revert_opts['rev'] = hex(parent)
848 revert(ui, repo, **revert_opts)
848 revert(ui, repo, **revert_opts)
849 commit_opts = opts.copy()
849 commit_opts = opts.copy()
850 commit_opts['addremove'] = False
850 commit_opts['addremove'] = False
851 if not commit_opts['message'] and not commit_opts['logfile']:
851 if not commit_opts['message'] and not commit_opts['logfile']:
852 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
852 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
853 commit_opts['force_editor'] = True
853 commit_opts['force_editor'] = True
854 commit(ui, repo, **commit_opts)
854 commit(ui, repo, **commit_opts)
855 def nice(node):
855 def nice(node):
856 return '%d:%s' % (repo.changelog.rev(node), short(node))
856 return '%d:%s' % (repo.changelog.rev(node), short(node))
857 ui.status(_('changeset %s backs out changeset %s\n') %
857 ui.status(_('changeset %s backs out changeset %s\n') %
858 (nice(repo.changelog.tip()), nice(node)))
858 (nice(repo.changelog.tip()), nice(node)))
859 if op1 != node:
859 if op1 != node:
860 if opts['merge']:
860 if opts['merge']:
861 ui.status(_('merging with changeset %s\n') % nice(op1))
861 ui.status(_('merging with changeset %s\n') % nice(op1))
862 n = _lookup(repo, hex(op1))
862 n = _lookup(repo, hex(op1))
863 hg.merge(repo, n)
863 hg.merge(repo, n)
864 else:
864 else:
865 ui.status(_('the backout changeset is a new head - '
865 ui.status(_('the backout changeset is a new head - '
866 'do not forget to merge\n'))
866 'do not forget to merge\n'))
867 ui.status(_('(use "backout --merge" '
867 ui.status(_('(use "backout --merge" '
868 'if you want to auto-merge)\n'))
868 'if you want to auto-merge)\n'))
869
869
870 def bundle(ui, repo, fname, dest=None, **opts):
870 def bundle(ui, repo, fname, dest=None, **opts):
871 """create a changegroup file
871 """create a changegroup file
872
872
873 Generate a compressed changegroup file collecting all changesets
873 Generate a compressed changegroup file collecting all changesets
874 not found in the other repository.
874 not found in the other repository.
875
875
876 This file can then be transferred using conventional means and
876 This file can then be transferred using conventional means and
877 applied to another repository with the unbundle command. This is
877 applied to another repository with the unbundle command. This is
878 useful when native push and pull are not available or when
878 useful when native push and pull are not available or when
879 exporting an entire repository is undesirable. The standard file
879 exporting an entire repository is undesirable. The standard file
880 extension is ".hg".
880 extension is ".hg".
881
881
882 Unlike import/export, this exactly preserves all changeset
882 Unlike import/export, this exactly preserves all changeset
883 contents including permissions, rename data, and revision history.
883 contents including permissions, rename data, and revision history.
884 """
884 """
885 dest = ui.expandpath(dest or 'default-push', dest or 'default')
885 dest = ui.expandpath(dest or 'default-push', dest or 'default')
886 other = hg.repository(ui, dest)
886 other = hg.repository(ui, dest)
887 o = repo.findoutgoing(other, force=opts['force'])
887 o = repo.findoutgoing(other, force=opts['force'])
888 cg = repo.changegroup(o, 'bundle')
888 cg = repo.changegroup(o, 'bundle')
889 write_bundle(cg, fname)
889 write_bundle(cg, fname)
890
890
891 def cat(ui, repo, file1, *pats, **opts):
891 def cat(ui, repo, file1, *pats, **opts):
892 """output the latest or given revisions of files
892 """output the latest or given revisions of files
893
893
894 Print the specified files as they were at the given revision.
894 Print the specified files as they were at the given revision.
895 If no revision is given then the tip is used.
895 If no revision is given then the tip is used.
896
896
897 Output may be to a file, in which case the name of the file is
897 Output may be to a file, in which case the name of the file is
898 given using a format string. The formatting rules are the same as
898 given using a format string. The formatting rules are the same as
899 for the export command, with the following additions:
899 for the export command, with the following additions:
900
900
901 %s basename of file being printed
901 %s basename of file being printed
902 %d dirname of file being printed, or '.' if in repo root
902 %d dirname of file being printed, or '.' if in repo root
903 %p root-relative path name of file being printed
903 %p root-relative path name of file being printed
904 """
904 """
905 ctx = repo.changectx(opts['rev'] or "-1")
905 ctx = repo.changectx(opts['rev'] or "-1")
906 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
906 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
907 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
907 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
908 fp.write(ctx.filectx(abs).data())
908 fp.write(ctx.filectx(abs).data())
909
909
910 def clone(ui, source, dest=None, **opts):
910 def clone(ui, source, dest=None, **opts):
911 """make a copy of an existing repository
911 """make a copy of an existing repository
912
912
913 Create a copy of an existing repository in a new directory.
913 Create a copy of an existing repository in a new directory.
914
914
915 If no destination directory name is specified, it defaults to the
915 If no destination directory name is specified, it defaults to the
916 basename of the source.
916 basename of the source.
917
917
918 The location of the source is added to the new repository's
918 The location of the source is added to the new repository's
919 .hg/hgrc file, as the default to be used for future pulls.
919 .hg/hgrc file, as the default to be used for future pulls.
920
920
921 For efficiency, hardlinks are used for cloning whenever the source
921 For efficiency, hardlinks are used for cloning whenever the source
922 and destination are on the same filesystem (note this applies only
922 and destination are on the same filesystem (note this applies only
923 to the repository data, not to the checked out files). Some
923 to the repository data, not to the checked out files). Some
924 filesystems, such as AFS, implement hardlinking incorrectly, but
924 filesystems, such as AFS, implement hardlinking incorrectly, but
925 do not report errors. In these cases, use the --pull option to
925 do not report errors. In these cases, use the --pull option to
926 avoid hardlinking.
926 avoid hardlinking.
927
927
928 You can safely clone repositories and checked out files using full
928 You can safely clone repositories and checked out files using full
929 hardlinks with
929 hardlinks with
930
930
931 $ cp -al REPO REPOCLONE
931 $ cp -al REPO REPOCLONE
932
932
933 which is the fastest way to clone. However, the operation is not
933 which is the fastest way to clone. However, the operation is not
934 atomic (making sure REPO is not modified during the operation is
934 atomic (making sure REPO is not modified during the operation is
935 up to you) and you have to make sure your editor breaks hardlinks
935 up to you) and you have to make sure your editor breaks hardlinks
936 (Emacs and most Linux Kernel tools do so).
936 (Emacs and most Linux Kernel tools do so).
937
937
938 If you use the -r option to clone up to a specific revision, no
938 If you use the -r option to clone up to a specific revision, no
939 subsequent revisions will be present in the cloned repository.
939 subsequent revisions will be present in the cloned repository.
940 This option implies --pull, even on local repositories.
940 This option implies --pull, even on local repositories.
941
941
942 See pull for valid source format details.
942 See pull for valid source format details.
943
943
944 It is possible to specify an ssh:// URL as the destination, but no
944 It is possible to specify an ssh:// URL as the destination, but no
945 .hg/hgrc will be created on the remote side. Look at the help text
945 .hg/hgrc will be created on the remote side. Look at the help text
946 for the pull command for important details about ssh:// URLs.
946 for the pull command for important details about ssh:// URLs.
947 """
947 """
948 setremoteconfig(ui, opts)
948 setremoteconfig(ui, opts)
949 hg.clone(ui, ui.expandpath(source), dest,
949 hg.clone(ui, ui.expandpath(source), dest,
950 pull=opts['pull'],
950 pull=opts['pull'],
951 stream=opts['uncompressed'],
951 stream=opts['uncompressed'],
952 rev=opts['rev'],
952 rev=opts['rev'],
953 update=not opts['noupdate'])
953 update=not opts['noupdate'])
954
954
955 def commit(ui, repo, *pats, **opts):
955 def commit(ui, repo, *pats, **opts):
956 """commit the specified files or all outstanding changes
956 """commit the specified files or all outstanding changes
957
957
958 Commit changes to the given files into the repository.
958 Commit changes to the given files into the repository.
959
959
960 If a list of files is omitted, all changes reported by "hg status"
960 If a list of files is omitted, all changes reported by "hg status"
961 will be committed.
961 will be committed.
962
962
963 If no commit message is specified, the editor configured in your hgrc
963 If no commit message is specified, the editor configured in your hgrc
964 or in the EDITOR environment variable is started to enter a message.
964 or in the EDITOR environment variable is started to enter a message.
965 """
965 """
966 message = logmessage(opts)
966 message = logmessage(opts)
967
967
968 if opts['addremove']:
968 if opts['addremove']:
969 addremove_lock(ui, repo, pats, opts)
969 addremove_lock(ui, repo, pats, opts)
970 fns, match, anypats = matchpats(repo, pats, opts)
970 fns, match, anypats = matchpats(repo, pats, opts)
971 if pats:
971 if pats:
972 modified, added, removed, deleted, unknown = (
972 modified, added, removed = repo.status(files=fns, match=match)[:3]
973 repo.changes(files=fns, match=match))
974 files = modified + added + removed
973 files = modified + added + removed
975 else:
974 else:
976 files = []
975 files = []
977 try:
976 try:
978 repo.commit(files, message, opts['user'], opts['date'], match,
977 repo.commit(files, message, opts['user'], opts['date'], match,
979 force_editor=opts.get('force_editor'))
978 force_editor=opts.get('force_editor'))
980 except ValueError, inst:
979 except ValueError, inst:
981 raise util.Abort(str(inst))
980 raise util.Abort(str(inst))
982
981
983 def docopy(ui, repo, pats, opts, wlock):
982 def docopy(ui, repo, pats, opts, wlock):
984 # called with the repo lock held
983 # called with the repo lock held
985 cwd = repo.getcwd()
984 cwd = repo.getcwd()
986 errors = 0
985 errors = 0
987 copied = []
986 copied = []
988 targets = {}
987 targets = {}
989
988
990 def okaytocopy(abs, rel, exact):
989 def okaytocopy(abs, rel, exact):
991 reasons = {'?': _('is not managed'),
990 reasons = {'?': _('is not managed'),
992 'a': _('has been marked for add'),
991 'a': _('has been marked for add'),
993 'r': _('has been marked for remove')}
992 'r': _('has been marked for remove')}
994 state = repo.dirstate.state(abs)
993 state = repo.dirstate.state(abs)
995 reason = reasons.get(state)
994 reason = reasons.get(state)
996 if reason:
995 if reason:
997 if state == 'a':
996 if state == 'a':
998 origsrc = repo.dirstate.copied(abs)
997 origsrc = repo.dirstate.copied(abs)
999 if origsrc is not None:
998 if origsrc is not None:
1000 return origsrc
999 return origsrc
1001 if exact:
1000 if exact:
1002 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1001 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1003 else:
1002 else:
1004 return abs
1003 return abs
1005
1004
1006 def copy(origsrc, abssrc, relsrc, target, exact):
1005 def copy(origsrc, abssrc, relsrc, target, exact):
1007 abstarget = util.canonpath(repo.root, cwd, target)
1006 abstarget = util.canonpath(repo.root, cwd, target)
1008 reltarget = util.pathto(cwd, abstarget)
1007 reltarget = util.pathto(cwd, abstarget)
1009 prevsrc = targets.get(abstarget)
1008 prevsrc = targets.get(abstarget)
1010 if prevsrc is not None:
1009 if prevsrc is not None:
1011 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1012 (reltarget, abssrc, prevsrc))
1011 (reltarget, abssrc, prevsrc))
1013 return
1012 return
1014 if (not opts['after'] and os.path.exists(reltarget) or
1013 if (not opts['after'] and os.path.exists(reltarget) or
1015 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1014 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1016 if not opts['force']:
1015 if not opts['force']:
1017 ui.warn(_('%s: not overwriting - file exists\n') %
1016 ui.warn(_('%s: not overwriting - file exists\n') %
1018 reltarget)
1017 reltarget)
1019 return
1018 return
1020 if not opts['after'] and not opts.get('dry_run'):
1019 if not opts['after'] and not opts.get('dry_run'):
1021 os.unlink(reltarget)
1020 os.unlink(reltarget)
1022 if opts['after']:
1021 if opts['after']:
1023 if not os.path.exists(reltarget):
1022 if not os.path.exists(reltarget):
1024 return
1023 return
1025 else:
1024 else:
1026 targetdir = os.path.dirname(reltarget) or '.'
1025 targetdir = os.path.dirname(reltarget) or '.'
1027 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1026 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1028 os.makedirs(targetdir)
1027 os.makedirs(targetdir)
1029 try:
1028 try:
1030 restore = repo.dirstate.state(abstarget) == 'r'
1029 restore = repo.dirstate.state(abstarget) == 'r'
1031 if restore and not opts.get('dry_run'):
1030 if restore and not opts.get('dry_run'):
1032 repo.undelete([abstarget], wlock)
1031 repo.undelete([abstarget], wlock)
1033 try:
1032 try:
1034 if not opts.get('dry_run'):
1033 if not opts.get('dry_run'):
1035 shutil.copyfile(relsrc, reltarget)
1034 shutil.copyfile(relsrc, reltarget)
1036 shutil.copymode(relsrc, reltarget)
1035 shutil.copymode(relsrc, reltarget)
1037 restore = False
1036 restore = False
1038 finally:
1037 finally:
1039 if restore:
1038 if restore:
1040 repo.remove([abstarget], wlock)
1039 repo.remove([abstarget], wlock)
1041 except shutil.Error, inst:
1040 except shutil.Error, inst:
1042 raise util.Abort(str(inst))
1041 raise util.Abort(str(inst))
1043 except IOError, inst:
1042 except IOError, inst:
1044 if inst.errno == errno.ENOENT:
1043 if inst.errno == errno.ENOENT:
1045 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1044 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1046 else:
1045 else:
1047 ui.warn(_('%s: cannot copy - %s\n') %
1046 ui.warn(_('%s: cannot copy - %s\n') %
1048 (relsrc, inst.strerror))
1047 (relsrc, inst.strerror))
1049 errors += 1
1048 errors += 1
1050 return
1049 return
1051 if ui.verbose or not exact:
1050 if ui.verbose or not exact:
1052 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1051 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1053 targets[abstarget] = abssrc
1052 targets[abstarget] = abssrc
1054 if abstarget != origsrc and not opts.get('dry_run'):
1053 if abstarget != origsrc and not opts.get('dry_run'):
1055 repo.copy(origsrc, abstarget, wlock)
1054 repo.copy(origsrc, abstarget, wlock)
1056 copied.append((abssrc, relsrc, exact))
1055 copied.append((abssrc, relsrc, exact))
1057
1056
1058 def targetpathfn(pat, dest, srcs):
1057 def targetpathfn(pat, dest, srcs):
1059 if os.path.isdir(pat):
1058 if os.path.isdir(pat):
1060 abspfx = util.canonpath(repo.root, cwd, pat)
1059 abspfx = util.canonpath(repo.root, cwd, pat)
1061 if destdirexists:
1060 if destdirexists:
1062 striplen = len(os.path.split(abspfx)[0])
1061 striplen = len(os.path.split(abspfx)[0])
1063 else:
1062 else:
1064 striplen = len(abspfx)
1063 striplen = len(abspfx)
1065 if striplen:
1064 if striplen:
1066 striplen += len(os.sep)
1065 striplen += len(os.sep)
1067 res = lambda p: os.path.join(dest, p[striplen:])
1066 res = lambda p: os.path.join(dest, p[striplen:])
1068 elif destdirexists:
1067 elif destdirexists:
1069 res = lambda p: os.path.join(dest, os.path.basename(p))
1068 res = lambda p: os.path.join(dest, os.path.basename(p))
1070 else:
1069 else:
1071 res = lambda p: dest
1070 res = lambda p: dest
1072 return res
1071 return res
1073
1072
1074 def targetpathafterfn(pat, dest, srcs):
1073 def targetpathafterfn(pat, dest, srcs):
1075 if util.patkind(pat, None)[0]:
1074 if util.patkind(pat, None)[0]:
1076 # a mercurial pattern
1075 # a mercurial pattern
1077 res = lambda p: os.path.join(dest, os.path.basename(p))
1076 res = lambda p: os.path.join(dest, os.path.basename(p))
1078 else:
1077 else:
1079 abspfx = util.canonpath(repo.root, cwd, pat)
1078 abspfx = util.canonpath(repo.root, cwd, pat)
1080 if len(abspfx) < len(srcs[0][0]):
1079 if len(abspfx) < len(srcs[0][0]):
1081 # A directory. Either the target path contains the last
1080 # A directory. Either the target path contains the last
1082 # component of the source path or it does not.
1081 # component of the source path or it does not.
1083 def evalpath(striplen):
1082 def evalpath(striplen):
1084 score = 0
1083 score = 0
1085 for s in srcs:
1084 for s in srcs:
1086 t = os.path.join(dest, s[0][striplen:])
1085 t = os.path.join(dest, s[0][striplen:])
1087 if os.path.exists(t):
1086 if os.path.exists(t):
1088 score += 1
1087 score += 1
1089 return score
1088 return score
1090
1089
1091 striplen = len(abspfx)
1090 striplen = len(abspfx)
1092 if striplen:
1091 if striplen:
1093 striplen += len(os.sep)
1092 striplen += len(os.sep)
1094 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1093 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1095 score = evalpath(striplen)
1094 score = evalpath(striplen)
1096 striplen1 = len(os.path.split(abspfx)[0])
1095 striplen1 = len(os.path.split(abspfx)[0])
1097 if striplen1:
1096 if striplen1:
1098 striplen1 += len(os.sep)
1097 striplen1 += len(os.sep)
1099 if evalpath(striplen1) > score:
1098 if evalpath(striplen1) > score:
1100 striplen = striplen1
1099 striplen = striplen1
1101 res = lambda p: os.path.join(dest, p[striplen:])
1100 res = lambda p: os.path.join(dest, p[striplen:])
1102 else:
1101 else:
1103 # a file
1102 # a file
1104 if destdirexists:
1103 if destdirexists:
1105 res = lambda p: os.path.join(dest, os.path.basename(p))
1104 res = lambda p: os.path.join(dest, os.path.basename(p))
1106 else:
1105 else:
1107 res = lambda p: dest
1106 res = lambda p: dest
1108 return res
1107 return res
1109
1108
1110
1109
1111 pats = list(pats)
1110 pats = list(pats)
1112 if not pats:
1111 if not pats:
1113 raise util.Abort(_('no source or destination specified'))
1112 raise util.Abort(_('no source or destination specified'))
1114 if len(pats) == 1:
1113 if len(pats) == 1:
1115 raise util.Abort(_('no destination specified'))
1114 raise util.Abort(_('no destination specified'))
1116 dest = pats.pop()
1115 dest = pats.pop()
1117 destdirexists = os.path.isdir(dest)
1116 destdirexists = os.path.isdir(dest)
1118 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1117 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1119 raise util.Abort(_('with multiple sources, destination must be an '
1118 raise util.Abort(_('with multiple sources, destination must be an '
1120 'existing directory'))
1119 'existing directory'))
1121 if opts['after']:
1120 if opts['after']:
1122 tfn = targetpathafterfn
1121 tfn = targetpathafterfn
1123 else:
1122 else:
1124 tfn = targetpathfn
1123 tfn = targetpathfn
1125 copylist = []
1124 copylist = []
1126 for pat in pats:
1125 for pat in pats:
1127 srcs = []
1126 srcs = []
1128 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1127 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1129 origsrc = okaytocopy(abssrc, relsrc, exact)
1128 origsrc = okaytocopy(abssrc, relsrc, exact)
1130 if origsrc:
1129 if origsrc:
1131 srcs.append((origsrc, abssrc, relsrc, exact))
1130 srcs.append((origsrc, abssrc, relsrc, exact))
1132 if not srcs:
1131 if not srcs:
1133 continue
1132 continue
1134 copylist.append((tfn(pat, dest, srcs), srcs))
1133 copylist.append((tfn(pat, dest, srcs), srcs))
1135 if not copylist:
1134 if not copylist:
1136 raise util.Abort(_('no files to copy'))
1135 raise util.Abort(_('no files to copy'))
1137
1136
1138 for targetpath, srcs in copylist:
1137 for targetpath, srcs in copylist:
1139 for origsrc, abssrc, relsrc, exact in srcs:
1138 for origsrc, abssrc, relsrc, exact in srcs:
1140 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1139 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1141
1140
1142 if errors:
1141 if errors:
1143 ui.warn(_('(consider using --after)\n'))
1142 ui.warn(_('(consider using --after)\n'))
1144 return errors, copied
1143 return errors, copied
1145
1144
1146 def copy(ui, repo, *pats, **opts):
1145 def copy(ui, repo, *pats, **opts):
1147 """mark files as copied for the next commit
1146 """mark files as copied for the next commit
1148
1147
1149 Mark dest as having copies of source files. If dest is a
1148 Mark dest as having copies of source files. If dest is a
1150 directory, copies are put in that directory. If dest is a file,
1149 directory, copies are put in that directory. If dest is a file,
1151 there can only be one source.
1150 there can only be one source.
1152
1151
1153 By default, this command copies the contents of files as they
1152 By default, this command copies the contents of files as they
1154 stand in the working directory. If invoked with --after, the
1153 stand in the working directory. If invoked with --after, the
1155 operation is recorded, but no copying is performed.
1154 operation is recorded, but no copying is performed.
1156
1155
1157 This command takes effect in the next commit.
1156 This command takes effect in the next commit.
1158
1157
1159 NOTE: This command should be treated as experimental. While it
1158 NOTE: This command should be treated as experimental. While it
1160 should properly record copied files, this information is not yet
1159 should properly record copied files, this information is not yet
1161 fully used by merge, nor fully reported by log.
1160 fully used by merge, nor fully reported by log.
1162 """
1161 """
1163 wlock = repo.wlock(0)
1162 wlock = repo.wlock(0)
1164 errs, copied = docopy(ui, repo, pats, opts, wlock)
1163 errs, copied = docopy(ui, repo, pats, opts, wlock)
1165 return errs
1164 return errs
1166
1165
1167 def debugancestor(ui, index, rev1, rev2):
1166 def debugancestor(ui, index, rev1, rev2):
1168 """find the ancestor revision of two revisions in a given index"""
1167 """find the ancestor revision of two revisions in a given index"""
1169 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1168 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1170 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1169 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1171 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1170 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1172
1171
1173 def debugcomplete(ui, cmd='', **opts):
1172 def debugcomplete(ui, cmd='', **opts):
1174 """returns the completion list associated with the given command"""
1173 """returns the completion list associated with the given command"""
1175
1174
1176 if opts['options']:
1175 if opts['options']:
1177 options = []
1176 options = []
1178 otables = [globalopts]
1177 otables = [globalopts]
1179 if cmd:
1178 if cmd:
1180 aliases, entry = findcmd(cmd)
1179 aliases, entry = findcmd(cmd)
1181 otables.append(entry[1])
1180 otables.append(entry[1])
1182 for t in otables:
1181 for t in otables:
1183 for o in t:
1182 for o in t:
1184 if o[0]:
1183 if o[0]:
1185 options.append('-%s' % o[0])
1184 options.append('-%s' % o[0])
1186 options.append('--%s' % o[1])
1185 options.append('--%s' % o[1])
1187 ui.write("%s\n" % "\n".join(options))
1186 ui.write("%s\n" % "\n".join(options))
1188 return
1187 return
1189
1188
1190 clist = findpossible(cmd).keys()
1189 clist = findpossible(cmd).keys()
1191 clist.sort()
1190 clist.sort()
1192 ui.write("%s\n" % "\n".join(clist))
1191 ui.write("%s\n" % "\n".join(clist))
1193
1192
1194 def debugrebuildstate(ui, repo, rev=None):
1193 def debugrebuildstate(ui, repo, rev=None):
1195 """rebuild the dirstate as it would look like for the given revision"""
1194 """rebuild the dirstate as it would look like for the given revision"""
1196 if not rev:
1195 if not rev:
1197 rev = repo.changelog.tip()
1196 rev = repo.changelog.tip()
1198 else:
1197 else:
1199 rev = repo.lookup(rev)
1198 rev = repo.lookup(rev)
1200 change = repo.changelog.read(rev)
1199 change = repo.changelog.read(rev)
1201 n = change[0]
1200 n = change[0]
1202 files = repo.manifest.read(n)
1201 files = repo.manifest.read(n)
1203 wlock = repo.wlock()
1202 wlock = repo.wlock()
1204 repo.dirstate.rebuild(rev, files)
1203 repo.dirstate.rebuild(rev, files)
1205
1204
1206 def debugcheckstate(ui, repo):
1205 def debugcheckstate(ui, repo):
1207 """validate the correctness of the current dirstate"""
1206 """validate the correctness of the current dirstate"""
1208 parent1, parent2 = repo.dirstate.parents()
1207 parent1, parent2 = repo.dirstate.parents()
1209 repo.dirstate.read()
1208 repo.dirstate.read()
1210 dc = repo.dirstate.map
1209 dc = repo.dirstate.map
1211 keys = dc.keys()
1210 keys = dc.keys()
1212 keys.sort()
1211 keys.sort()
1213 m1n = repo.changelog.read(parent1)[0]
1212 m1n = repo.changelog.read(parent1)[0]
1214 m2n = repo.changelog.read(parent2)[0]
1213 m2n = repo.changelog.read(parent2)[0]
1215 m1 = repo.manifest.read(m1n)
1214 m1 = repo.manifest.read(m1n)
1216 m2 = repo.manifest.read(m2n)
1215 m2 = repo.manifest.read(m2n)
1217 errors = 0
1216 errors = 0
1218 for f in dc:
1217 for f in dc:
1219 state = repo.dirstate.state(f)
1218 state = repo.dirstate.state(f)
1220 if state in "nr" and f not in m1:
1219 if state in "nr" and f not in m1:
1221 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1220 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1222 errors += 1
1221 errors += 1
1223 if state in "a" and f in m1:
1222 if state in "a" and f in m1:
1224 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1223 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1225 errors += 1
1224 errors += 1
1226 if state in "m" and f not in m1 and f not in m2:
1225 if state in "m" and f not in m1 and f not in m2:
1227 ui.warn(_("%s in state %s, but not in either manifest\n") %
1226 ui.warn(_("%s in state %s, but not in either manifest\n") %
1228 (f, state))
1227 (f, state))
1229 errors += 1
1228 errors += 1
1230 for f in m1:
1229 for f in m1:
1231 state = repo.dirstate.state(f)
1230 state = repo.dirstate.state(f)
1232 if state not in "nrm":
1231 if state not in "nrm":
1233 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1232 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1234 errors += 1
1233 errors += 1
1235 if errors:
1234 if errors:
1236 error = _(".hg/dirstate inconsistent with current parent's manifest")
1235 error = _(".hg/dirstate inconsistent with current parent's manifest")
1237 raise util.Abort(error)
1236 raise util.Abort(error)
1238
1237
1239 def debugconfig(ui, repo, *values):
1238 def debugconfig(ui, repo, *values):
1240 """show combined config settings from all hgrc files
1239 """show combined config settings from all hgrc files
1241
1240
1242 With no args, print names and values of all config items.
1241 With no args, print names and values of all config items.
1243
1242
1244 With one arg of the form section.name, print just the value of
1243 With one arg of the form section.name, print just the value of
1245 that config item.
1244 that config item.
1246
1245
1247 With multiple args, print names and values of all config items
1246 With multiple args, print names and values of all config items
1248 with matching section names."""
1247 with matching section names."""
1249
1248
1250 if values:
1249 if values:
1251 if len([v for v in values if '.' in v]) > 1:
1250 if len([v for v in values if '.' in v]) > 1:
1252 raise util.Abort(_('only one config item permitted'))
1251 raise util.Abort(_('only one config item permitted'))
1253 for section, name, value in ui.walkconfig():
1252 for section, name, value in ui.walkconfig():
1254 sectname = section + '.' + name
1253 sectname = section + '.' + name
1255 if values:
1254 if values:
1256 for v in values:
1255 for v in values:
1257 if v == section:
1256 if v == section:
1258 ui.write('%s=%s\n' % (sectname, value))
1257 ui.write('%s=%s\n' % (sectname, value))
1259 elif v == sectname:
1258 elif v == sectname:
1260 ui.write(value, '\n')
1259 ui.write(value, '\n')
1261 else:
1260 else:
1262 ui.write('%s=%s\n' % (sectname, value))
1261 ui.write('%s=%s\n' % (sectname, value))
1263
1262
1264 def debugsetparents(ui, repo, rev1, rev2=None):
1263 def debugsetparents(ui, repo, rev1, rev2=None):
1265 """manually set the parents of the current working directory
1264 """manually set the parents of the current working directory
1266
1265
1267 This is useful for writing repository conversion tools, but should
1266 This is useful for writing repository conversion tools, but should
1268 be used with care.
1267 be used with care.
1269 """
1268 """
1270
1269
1271 if not rev2:
1270 if not rev2:
1272 rev2 = hex(nullid)
1271 rev2 = hex(nullid)
1273
1272
1274 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1273 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1275
1274
1276 def debugstate(ui, repo):
1275 def debugstate(ui, repo):
1277 """show the contents of the current dirstate"""
1276 """show the contents of the current dirstate"""
1278 repo.dirstate.read()
1277 repo.dirstate.read()
1279 dc = repo.dirstate.map
1278 dc = repo.dirstate.map
1280 keys = dc.keys()
1279 keys = dc.keys()
1281 keys.sort()
1280 keys.sort()
1282 for file_ in keys:
1281 for file_ in keys:
1283 ui.write("%c %3o %10d %s %s\n"
1282 ui.write("%c %3o %10d %s %s\n"
1284 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1283 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1285 time.strftime("%x %X",
1284 time.strftime("%x %X",
1286 time.localtime(dc[file_][3])), file_))
1285 time.localtime(dc[file_][3])), file_))
1287 for f in repo.dirstate.copies:
1286 for f in repo.dirstate.copies:
1288 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1287 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1289
1288
1290 def debugdata(ui, file_, rev):
1289 def debugdata(ui, file_, rev):
1291 """dump the contents of an data file revision"""
1290 """dump the contents of an data file revision"""
1292 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1293 file_[:-2] + ".i", file_, 0)
1292 file_[:-2] + ".i", file_, 0)
1294 try:
1293 try:
1295 ui.write(r.revision(r.lookup(rev)))
1294 ui.write(r.revision(r.lookup(rev)))
1296 except KeyError:
1295 except KeyError:
1297 raise util.Abort(_('invalid revision identifier %s'), rev)
1296 raise util.Abort(_('invalid revision identifier %s'), rev)
1298
1297
1299 def debugindex(ui, file_):
1298 def debugindex(ui, file_):
1300 """dump the contents of an index file"""
1299 """dump the contents of an index file"""
1301 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1300 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1302 ui.write(" rev offset length base linkrev" +
1301 ui.write(" rev offset length base linkrev" +
1303 " nodeid p1 p2\n")
1302 " nodeid p1 p2\n")
1304 for i in range(r.count()):
1303 for i in range(r.count()):
1305 node = r.node(i)
1304 node = r.node(i)
1306 pp = r.parents(node)
1305 pp = r.parents(node)
1307 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1306 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1308 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1307 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1309 short(node), short(pp[0]), short(pp[1])))
1308 short(node), short(pp[0]), short(pp[1])))
1310
1309
1311 def debugindexdot(ui, file_):
1310 def debugindexdot(ui, file_):
1312 """dump an index DAG as a .dot file"""
1311 """dump an index DAG as a .dot file"""
1313 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1312 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1314 ui.write("digraph G {\n")
1313 ui.write("digraph G {\n")
1315 for i in range(r.count()):
1314 for i in range(r.count()):
1316 node = r.node(i)
1315 node = r.node(i)
1317 pp = r.parents(node)
1316 pp = r.parents(node)
1318 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1317 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1319 if pp[1] != nullid:
1318 if pp[1] != nullid:
1320 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1319 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1321 ui.write("}\n")
1320 ui.write("}\n")
1322
1321
1323 def debugrename(ui, repo, file, rev=None):
1322 def debugrename(ui, repo, file, rev=None):
1324 """dump rename information"""
1323 """dump rename information"""
1325 r = repo.file(relpath(repo, [file])[0])
1324 r = repo.file(relpath(repo, [file])[0])
1326 if rev:
1325 if rev:
1327 try:
1326 try:
1328 # assume all revision numbers are for changesets
1327 # assume all revision numbers are for changesets
1329 n = repo.lookup(rev)
1328 n = repo.lookup(rev)
1330 change = repo.changelog.read(n)
1329 change = repo.changelog.read(n)
1331 m = repo.manifest.read(change[0])
1330 m = repo.manifest.read(change[0])
1332 n = m[relpath(repo, [file])[0]]
1331 n = m[relpath(repo, [file])[0]]
1333 except (hg.RepoError, KeyError):
1332 except (hg.RepoError, KeyError):
1334 n = r.lookup(rev)
1333 n = r.lookup(rev)
1335 else:
1334 else:
1336 n = r.tip()
1335 n = r.tip()
1337 m = r.renamed(n)
1336 m = r.renamed(n)
1338 if m:
1337 if m:
1339 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1338 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1340 else:
1339 else:
1341 ui.write(_("not renamed\n"))
1340 ui.write(_("not renamed\n"))
1342
1341
1343 def debugwalk(ui, repo, *pats, **opts):
1342 def debugwalk(ui, repo, *pats, **opts):
1344 """show how files match on given patterns"""
1343 """show how files match on given patterns"""
1345 items = list(walk(repo, pats, opts))
1344 items = list(walk(repo, pats, opts))
1346 if not items:
1345 if not items:
1347 return
1346 return
1348 fmt = '%%s %%-%ds %%-%ds %%s' % (
1347 fmt = '%%s %%-%ds %%-%ds %%s' % (
1349 max([len(abs) for (src, abs, rel, exact) in items]),
1348 max([len(abs) for (src, abs, rel, exact) in items]),
1350 max([len(rel) for (src, abs, rel, exact) in items]))
1349 max([len(rel) for (src, abs, rel, exact) in items]))
1351 for src, abs, rel, exact in items:
1350 for src, abs, rel, exact in items:
1352 line = fmt % (src, abs, rel, exact and 'exact' or '')
1351 line = fmt % (src, abs, rel, exact and 'exact' or '')
1353 ui.write("%s\n" % line.rstrip())
1352 ui.write("%s\n" % line.rstrip())
1354
1353
1355 def diff(ui, repo, *pats, **opts):
1354 def diff(ui, repo, *pats, **opts):
1356 """diff repository (or selected files)
1355 """diff repository (or selected files)
1357
1356
1358 Show differences between revisions for the specified files.
1357 Show differences between revisions for the specified files.
1359
1358
1360 Differences between files are shown using the unified diff format.
1359 Differences between files are shown using the unified diff format.
1361
1360
1362 When two revision arguments are given, then changes are shown
1361 When two revision arguments are given, then changes are shown
1363 between those revisions. If only one revision is specified then
1362 between those revisions. If only one revision is specified then
1364 that revision is compared to the working directory, and, when no
1363 that revision is compared to the working directory, and, when no
1365 revisions are specified, the working directory files are compared
1364 revisions are specified, the working directory files are compared
1366 to its parent.
1365 to its parent.
1367
1366
1368 Without the -a option, diff will avoid generating diffs of files
1367 Without the -a option, diff will avoid generating diffs of files
1369 it detects as binary. With -a, diff will generate a diff anyway,
1368 it detects as binary. With -a, diff will generate a diff anyway,
1370 probably with undesirable results.
1369 probably with undesirable results.
1371 """
1370 """
1372 node1, node2 = revpair(ui, repo, opts['rev'])
1371 node1, node2 = revpair(ui, repo, opts['rev'])
1373
1372
1374 fns, matchfn, anypats = matchpats(repo, pats, opts)
1373 fns, matchfn, anypats = matchpats(repo, pats, opts)
1375
1374
1376 patch.diff(repo, node1, node2, fns, match=matchfn,
1375 patch.diff(repo, node1, node2, fns, match=matchfn,
1377 opts=ui.diffopts(opts))
1376 opts=ui.diffopts(opts))
1378
1377
1379 def export(ui, repo, *changesets, **opts):
1378 def export(ui, repo, *changesets, **opts):
1380 """dump the header and diffs for one or more changesets
1379 """dump the header and diffs for one or more changesets
1381
1380
1382 Print the changeset header and diffs for one or more revisions.
1381 Print the changeset header and diffs for one or more revisions.
1383
1382
1384 The information shown in the changeset header is: author,
1383 The information shown in the changeset header is: author,
1385 changeset hash, parent and commit comment.
1384 changeset hash, parent and commit comment.
1386
1385
1387 Output may be to a file, in which case the name of the file is
1386 Output may be to a file, in which case the name of the file is
1388 given using a format string. The formatting rules are as follows:
1387 given using a format string. The formatting rules are as follows:
1389
1388
1390 %% literal "%" character
1389 %% literal "%" character
1391 %H changeset hash (40 bytes of hexadecimal)
1390 %H changeset hash (40 bytes of hexadecimal)
1392 %N number of patches being generated
1391 %N number of patches being generated
1393 %R changeset revision number
1392 %R changeset revision number
1394 %b basename of the exporting repository
1393 %b basename of the exporting repository
1395 %h short-form changeset hash (12 bytes of hexadecimal)
1394 %h short-form changeset hash (12 bytes of hexadecimal)
1396 %n zero-padded sequence number, starting at 1
1395 %n zero-padded sequence number, starting at 1
1397 %r zero-padded changeset revision number
1396 %r zero-padded changeset revision number
1398
1397
1399 Without the -a option, export will avoid generating diffs of files
1398 Without the -a option, export will avoid generating diffs of files
1400 it detects as binary. With -a, export will generate a diff anyway,
1399 it detects as binary. With -a, export will generate a diff anyway,
1401 probably with undesirable results.
1400 probably with undesirable results.
1402
1401
1403 With the --switch-parent option, the diff will be against the second
1402 With the --switch-parent option, the diff will be against the second
1404 parent. It can be useful to review a merge.
1403 parent. It can be useful to review a merge.
1405 """
1404 """
1406 if not changesets:
1405 if not changesets:
1407 raise util.Abort(_("export requires at least one changeset"))
1406 raise util.Abort(_("export requires at least one changeset"))
1408 revs = list(revrange(ui, repo, changesets))
1407 revs = list(revrange(ui, repo, changesets))
1409 if len(revs) > 1:
1408 if len(revs) > 1:
1410 ui.note(_('exporting patches:\n'))
1409 ui.note(_('exporting patches:\n'))
1411 else:
1410 else:
1412 ui.note(_('exporting patch:\n'))
1411 ui.note(_('exporting patch:\n'))
1413 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1412 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1414 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1413 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1415
1414
1416 def forget(ui, repo, *pats, **opts):
1415 def forget(ui, repo, *pats, **opts):
1417 """don't add the specified files on the next commit (DEPRECATED)
1416 """don't add the specified files on the next commit (DEPRECATED)
1418
1417
1419 (DEPRECATED)
1418 (DEPRECATED)
1420 Undo an 'hg add' scheduled for the next commit.
1419 Undo an 'hg add' scheduled for the next commit.
1421
1420
1422 This command is now deprecated and will be removed in a future
1421 This command is now deprecated and will be removed in a future
1423 release. Please use revert instead.
1422 release. Please use revert instead.
1424 """
1423 """
1425 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1424 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1426 forget = []
1425 forget = []
1427 for src, abs, rel, exact in walk(repo, pats, opts):
1426 for src, abs, rel, exact in walk(repo, pats, opts):
1428 if repo.dirstate.state(abs) == 'a':
1427 if repo.dirstate.state(abs) == 'a':
1429 forget.append(abs)
1428 forget.append(abs)
1430 if ui.verbose or not exact:
1429 if ui.verbose or not exact:
1431 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1430 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1432 repo.forget(forget)
1431 repo.forget(forget)
1433
1432
1434 def grep(ui, repo, pattern, *pats, **opts):
1433 def grep(ui, repo, pattern, *pats, **opts):
1435 """search for a pattern in specified files and revisions
1434 """search for a pattern in specified files and revisions
1436
1435
1437 Search revisions of files for a regular expression.
1436 Search revisions of files for a regular expression.
1438
1437
1439 This command behaves differently than Unix grep. It only accepts
1438 This command behaves differently than Unix grep. It only accepts
1440 Python/Perl regexps. It searches repository history, not the
1439 Python/Perl regexps. It searches repository history, not the
1441 working directory. It always prints the revision number in which
1440 working directory. It always prints the revision number in which
1442 a match appears.
1441 a match appears.
1443
1442
1444 By default, grep only prints output for the first revision of a
1443 By default, grep only prints output for the first revision of a
1445 file in which it finds a match. To get it to print every revision
1444 file in which it finds a match. To get it to print every revision
1446 that contains a change in match status ("-" for a match that
1445 that contains a change in match status ("-" for a match that
1447 becomes a non-match, or "+" for a non-match that becomes a match),
1446 becomes a non-match, or "+" for a non-match that becomes a match),
1448 use the --all flag.
1447 use the --all flag.
1449 """
1448 """
1450 reflags = 0
1449 reflags = 0
1451 if opts['ignore_case']:
1450 if opts['ignore_case']:
1452 reflags |= re.I
1451 reflags |= re.I
1453 regexp = re.compile(pattern, reflags)
1452 regexp = re.compile(pattern, reflags)
1454 sep, eol = ':', '\n'
1453 sep, eol = ':', '\n'
1455 if opts['print0']:
1454 if opts['print0']:
1456 sep = eol = '\0'
1455 sep = eol = '\0'
1457
1456
1458 fcache = {}
1457 fcache = {}
1459 def getfile(fn):
1458 def getfile(fn):
1460 if fn not in fcache:
1459 if fn not in fcache:
1461 fcache[fn] = repo.file(fn)
1460 fcache[fn] = repo.file(fn)
1462 return fcache[fn]
1461 return fcache[fn]
1463
1462
1464 def matchlines(body):
1463 def matchlines(body):
1465 begin = 0
1464 begin = 0
1466 linenum = 0
1465 linenum = 0
1467 while True:
1466 while True:
1468 match = regexp.search(body, begin)
1467 match = regexp.search(body, begin)
1469 if not match:
1468 if not match:
1470 break
1469 break
1471 mstart, mend = match.span()
1470 mstart, mend = match.span()
1472 linenum += body.count('\n', begin, mstart) + 1
1471 linenum += body.count('\n', begin, mstart) + 1
1473 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1472 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1474 lend = body.find('\n', mend)
1473 lend = body.find('\n', mend)
1475 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1474 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1476 begin = lend + 1
1475 begin = lend + 1
1477
1476
1478 class linestate(object):
1477 class linestate(object):
1479 def __init__(self, line, linenum, colstart, colend):
1478 def __init__(self, line, linenum, colstart, colend):
1480 self.line = line
1479 self.line = line
1481 self.linenum = linenum
1480 self.linenum = linenum
1482 self.colstart = colstart
1481 self.colstart = colstart
1483 self.colend = colend
1482 self.colend = colend
1484
1483
1485 def __eq__(self, other):
1484 def __eq__(self, other):
1486 return self.line == other.line
1485 return self.line == other.line
1487
1486
1488 matches = {}
1487 matches = {}
1489 copies = {}
1488 copies = {}
1490 def grepbody(fn, rev, body):
1489 def grepbody(fn, rev, body):
1491 matches[rev].setdefault(fn, [])
1490 matches[rev].setdefault(fn, [])
1492 m = matches[rev][fn]
1491 m = matches[rev][fn]
1493 for lnum, cstart, cend, line in matchlines(body):
1492 for lnum, cstart, cend, line in matchlines(body):
1494 s = linestate(line, lnum, cstart, cend)
1493 s = linestate(line, lnum, cstart, cend)
1495 m.append(s)
1494 m.append(s)
1496
1495
1497 def difflinestates(a, b):
1496 def difflinestates(a, b):
1498 sm = difflib.SequenceMatcher(None, a, b)
1497 sm = difflib.SequenceMatcher(None, a, b)
1499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1498 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1500 if tag == 'insert':
1499 if tag == 'insert':
1501 for i in range(blo, bhi):
1500 for i in range(blo, bhi):
1502 yield ('+', b[i])
1501 yield ('+', b[i])
1503 elif tag == 'delete':
1502 elif tag == 'delete':
1504 for i in range(alo, ahi):
1503 for i in range(alo, ahi):
1505 yield ('-', a[i])
1504 yield ('-', a[i])
1506 elif tag == 'replace':
1505 elif tag == 'replace':
1507 for i in range(alo, ahi):
1506 for i in range(alo, ahi):
1508 yield ('-', a[i])
1507 yield ('-', a[i])
1509 for i in range(blo, bhi):
1508 for i in range(blo, bhi):
1510 yield ('+', b[i])
1509 yield ('+', b[i])
1511
1510
1512 prev = {}
1511 prev = {}
1513 ucache = {}
1512 ucache = {}
1514 def display(fn, rev, states, prevstates):
1513 def display(fn, rev, states, prevstates):
1515 counts = {'-': 0, '+': 0}
1514 counts = {'-': 0, '+': 0}
1516 filerevmatches = {}
1515 filerevmatches = {}
1517 if incrementing or not opts['all']:
1516 if incrementing or not opts['all']:
1518 a, b = prevstates, states
1517 a, b = prevstates, states
1519 else:
1518 else:
1520 a, b = states, prevstates
1519 a, b = states, prevstates
1521 for change, l in difflinestates(a, b):
1520 for change, l in difflinestates(a, b):
1522 if incrementing or not opts['all']:
1521 if incrementing or not opts['all']:
1523 r = rev
1522 r = rev
1524 else:
1523 else:
1525 r = prev[fn]
1524 r = prev[fn]
1526 cols = [fn, str(r)]
1525 cols = [fn, str(r)]
1527 if opts['line_number']:
1526 if opts['line_number']:
1528 cols.append(str(l.linenum))
1527 cols.append(str(l.linenum))
1529 if opts['all']:
1528 if opts['all']:
1530 cols.append(change)
1529 cols.append(change)
1531 if opts['user']:
1530 if opts['user']:
1532 cols.append(trimuser(ui, getchange(r)[1], rev,
1531 cols.append(trimuser(ui, getchange(r)[1], rev,
1533 ucache))
1532 ucache))
1534 if opts['files_with_matches']:
1533 if opts['files_with_matches']:
1535 c = (fn, rev)
1534 c = (fn, rev)
1536 if c in filerevmatches:
1535 if c in filerevmatches:
1537 continue
1536 continue
1538 filerevmatches[c] = 1
1537 filerevmatches[c] = 1
1539 else:
1538 else:
1540 cols.append(l.line)
1539 cols.append(l.line)
1541 ui.write(sep.join(cols), eol)
1540 ui.write(sep.join(cols), eol)
1542 counts[change] += 1
1541 counts[change] += 1
1543 return counts['+'], counts['-']
1542 return counts['+'], counts['-']
1544
1543
1545 fstate = {}
1544 fstate = {}
1546 skip = {}
1545 skip = {}
1547 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1546 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1548 count = 0
1547 count = 0
1549 incrementing = False
1548 incrementing = False
1550 follow = opts.get('follow')
1549 follow = opts.get('follow')
1551 for st, rev, fns in changeiter:
1550 for st, rev, fns in changeiter:
1552 if st == 'window':
1551 if st == 'window':
1553 incrementing = rev
1552 incrementing = rev
1554 matches.clear()
1553 matches.clear()
1555 copies.clear()
1554 copies.clear()
1556 elif st == 'add':
1555 elif st == 'add':
1557 change = repo.changelog.read(repo.lookup(str(rev)))
1556 change = repo.changelog.read(repo.lookup(str(rev)))
1558 mf = repo.manifest.read(change[0])
1557 mf = repo.manifest.read(change[0])
1559 matches[rev] = {}
1558 matches[rev] = {}
1560 for fn in fns:
1559 for fn in fns:
1561 if fn in skip:
1560 if fn in skip:
1562 continue
1561 continue
1563 fstate.setdefault(fn, {})
1562 fstate.setdefault(fn, {})
1564 copies.setdefault(rev, {})
1563 copies.setdefault(rev, {})
1565 try:
1564 try:
1566 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1565 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1567 if follow:
1566 if follow:
1568 copied = getfile(fn).renamed(mf[fn])
1567 copied = getfile(fn).renamed(mf[fn])
1569 if copied:
1568 if copied:
1570 copies[rev][fn] = copied[0]
1569 copies[rev][fn] = copied[0]
1571 except KeyError:
1570 except KeyError:
1572 pass
1571 pass
1573 elif st == 'iter':
1572 elif st == 'iter':
1574 states = matches[rev].items()
1573 states = matches[rev].items()
1575 states.sort()
1574 states.sort()
1576 for fn, m in states:
1575 for fn, m in states:
1577 copy = copies[rev].get(fn)
1576 copy = copies[rev].get(fn)
1578 if fn in skip:
1577 if fn in skip:
1579 if copy:
1578 if copy:
1580 skip[copy] = True
1579 skip[copy] = True
1581 continue
1580 continue
1582 if incrementing or not opts['all'] or fstate[fn]:
1581 if incrementing or not opts['all'] or fstate[fn]:
1583 pos, neg = display(fn, rev, m, fstate[fn])
1582 pos, neg = display(fn, rev, m, fstate[fn])
1584 count += pos + neg
1583 count += pos + neg
1585 if pos and not opts['all']:
1584 if pos and not opts['all']:
1586 skip[fn] = True
1585 skip[fn] = True
1587 if copy:
1586 if copy:
1588 skip[copy] = True
1587 skip[copy] = True
1589 fstate[fn] = m
1588 fstate[fn] = m
1590 if copy:
1589 if copy:
1591 fstate[copy] = m
1590 fstate[copy] = m
1592 prev[fn] = rev
1591 prev[fn] = rev
1593
1592
1594 if not incrementing:
1593 if not incrementing:
1595 fstate = fstate.items()
1594 fstate = fstate.items()
1596 fstate.sort()
1595 fstate.sort()
1597 for fn, state in fstate:
1596 for fn, state in fstate:
1598 if fn in skip:
1597 if fn in skip:
1599 continue
1598 continue
1600 if fn not in copies[prev[fn]]:
1599 if fn not in copies[prev[fn]]:
1601 display(fn, rev, {}, state)
1600 display(fn, rev, {}, state)
1602 return (count == 0 and 1) or 0
1601 return (count == 0 and 1) or 0
1603
1602
1604 def heads(ui, repo, **opts):
1603 def heads(ui, repo, **opts):
1605 """show current repository heads
1604 """show current repository heads
1606
1605
1607 Show all repository head changesets.
1606 Show all repository head changesets.
1608
1607
1609 Repository "heads" are changesets that don't have children
1608 Repository "heads" are changesets that don't have children
1610 changesets. They are where development generally takes place and
1609 changesets. They are where development generally takes place and
1611 are the usual targets for update and merge operations.
1610 are the usual targets for update and merge operations.
1612 """
1611 """
1613 if opts['rev']:
1612 if opts['rev']:
1614 heads = repo.heads(repo.lookup(opts['rev']))
1613 heads = repo.heads(repo.lookup(opts['rev']))
1615 else:
1614 else:
1616 heads = repo.heads()
1615 heads = repo.heads()
1617 br = None
1616 br = None
1618 if opts['branches']:
1617 if opts['branches']:
1619 br = repo.branchlookup(heads)
1618 br = repo.branchlookup(heads)
1620 displayer = show_changeset(ui, repo, opts)
1619 displayer = show_changeset(ui, repo, opts)
1621 for n in heads:
1620 for n in heads:
1622 displayer.show(changenode=n, brinfo=br)
1621 displayer.show(changenode=n, brinfo=br)
1623
1622
1624 def identify(ui, repo):
1623 def identify(ui, repo):
1625 """print information about the working copy
1624 """print information about the working copy
1626
1625
1627 Print a short summary of the current state of the repo.
1626 Print a short summary of the current state of the repo.
1628
1627
1629 This summary identifies the repository state using one or two parent
1628 This summary identifies the repository state using one or two parent
1630 hash identifiers, followed by a "+" if there are uncommitted changes
1629 hash identifiers, followed by a "+" if there are uncommitted changes
1631 in the working directory, followed by a list of tags for this revision.
1630 in the working directory, followed by a list of tags for this revision.
1632 """
1631 """
1633 parents = [p for p in repo.dirstate.parents() if p != nullid]
1632 parents = [p for p in repo.dirstate.parents() if p != nullid]
1634 if not parents:
1633 if not parents:
1635 ui.write(_("unknown\n"))
1634 ui.write(_("unknown\n"))
1636 return
1635 return
1637
1636
1638 hexfunc = ui.verbose and hex or short
1637 hexfunc = ui.verbose and hex or short
1639 modified, added, removed, deleted, unknown = repo.changes()
1638 modified, added, removed, deleted = repo.status()[:4]
1640 output = ["%s%s" %
1639 output = ["%s%s" %
1641 ('+'.join([hexfunc(parent) for parent in parents]),
1640 ('+'.join([hexfunc(parent) for parent in parents]),
1642 (modified or added or removed or deleted) and "+" or "")]
1641 (modified or added or removed or deleted) and "+" or "")]
1643
1642
1644 if not ui.quiet:
1643 if not ui.quiet:
1645 # multiple tags for a single parent separated by '/'
1644 # multiple tags for a single parent separated by '/'
1646 parenttags = ['/'.join(tags)
1645 parenttags = ['/'.join(tags)
1647 for tags in map(repo.nodetags, parents) if tags]
1646 for tags in map(repo.nodetags, parents) if tags]
1648 # tags for multiple parents separated by ' + '
1647 # tags for multiple parents separated by ' + '
1649 if parenttags:
1648 if parenttags:
1650 output.append(' + '.join(parenttags))
1649 output.append(' + '.join(parenttags))
1651
1650
1652 ui.write("%s\n" % ' '.join(output))
1651 ui.write("%s\n" % ' '.join(output))
1653
1652
1654 def import_(ui, repo, patch1, *patches, **opts):
1653 def import_(ui, repo, patch1, *patches, **opts):
1655 """import an ordered set of patches
1654 """import an ordered set of patches
1656
1655
1657 Import a list of patches and commit them individually.
1656 Import a list of patches and commit them individually.
1658
1657
1659 If there are outstanding changes in the working directory, import
1658 If there are outstanding changes in the working directory, import
1660 will abort unless given the -f flag.
1659 will abort unless given the -f flag.
1661
1660
1662 You can import a patch straight from a mail message. Even patches
1661 You can import a patch straight from a mail message. Even patches
1663 as attachments work (body part must be type text/plain or
1662 as attachments work (body part must be type text/plain or
1664 text/x-patch to be used). From and Subject headers of email
1663 text/x-patch to be used). From and Subject headers of email
1665 message are used as default committer and commit message. All
1664 message are used as default committer and commit message. All
1666 text/plain body parts before first diff are added to commit
1665 text/plain body parts before first diff are added to commit
1667 message.
1666 message.
1668
1667
1669 If imported patch was generated by hg export, user and description
1668 If imported patch was generated by hg export, user and description
1670 from patch override values from message headers and body. Values
1669 from patch override values from message headers and body. Values
1671 given on command line with -m and -u override these.
1670 given on command line with -m and -u override these.
1672
1671
1673 To read a patch from standard input, use patch name "-".
1672 To read a patch from standard input, use patch name "-".
1674 """
1673 """
1675 patches = (patch1,) + patches
1674 patches = (patch1,) + patches
1676
1675
1677 if not opts['force']:
1676 if not opts['force']:
1678 bail_if_changed(repo)
1677 bail_if_changed(repo)
1679
1678
1680 d = opts["base"]
1679 d = opts["base"]
1681 strip = opts["strip"]
1680 strip = opts["strip"]
1682
1681
1683 wlock = repo.wlock()
1682 wlock = repo.wlock()
1684 lock = repo.lock()
1683 lock = repo.lock()
1685
1684
1686 for p in patches:
1685 for p in patches:
1687 pf = os.path.join(d, p)
1686 pf = os.path.join(d, p)
1688
1687
1689 if pf == '-':
1688 if pf == '-':
1690 ui.status(_("applying patch from stdin\n"))
1689 ui.status(_("applying patch from stdin\n"))
1691 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1690 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1692 else:
1691 else:
1693 ui.status(_("applying %s\n") % p)
1692 ui.status(_("applying %s\n") % p)
1694 tmpname, message, user, date = patch.extract(ui, file(pf))
1693 tmpname, message, user, date = patch.extract(ui, file(pf))
1695
1694
1696 if tmpname is None:
1695 if tmpname is None:
1697 raise util.Abort(_('no diffs found'))
1696 raise util.Abort(_('no diffs found'))
1698
1697
1699 try:
1698 try:
1700 if opts['message']:
1699 if opts['message']:
1701 # pickup the cmdline msg
1700 # pickup the cmdline msg
1702 message = opts['message']
1701 message = opts['message']
1703 elif message:
1702 elif message:
1704 # pickup the patch msg
1703 # pickup the patch msg
1705 message = message.strip()
1704 message = message.strip()
1706 else:
1705 else:
1707 # launch the editor
1706 # launch the editor
1708 message = None
1707 message = None
1709 ui.debug(_('message:\n%s\n') % message)
1708 ui.debug(_('message:\n%s\n') % message)
1710
1709
1711 files = patch.patch(strip, tmpname, ui, cwd=repo.root)
1710 files = patch.patch(strip, tmpname, ui, cwd=repo.root)
1712 removes = []
1711 removes = []
1713 if len(files) > 0:
1712 if len(files) > 0:
1714 cfiles = files.keys()
1713 cfiles = files.keys()
1715 copies = []
1714 copies = []
1716 copts = {'after': False, 'force': False}
1715 copts = {'after': False, 'force': False}
1717 cwd = repo.getcwd()
1716 cwd = repo.getcwd()
1718 if cwd:
1717 if cwd:
1719 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1718 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1720 for f in files:
1719 for f in files:
1721 ctype, gp = files[f]
1720 ctype, gp = files[f]
1722 if ctype == 'RENAME':
1721 if ctype == 'RENAME':
1723 copies.append((gp.oldpath, gp.path, gp.copymod))
1722 copies.append((gp.oldpath, gp.path, gp.copymod))
1724 removes.append(gp.oldpath)
1723 removes.append(gp.oldpath)
1725 elif ctype == 'COPY':
1724 elif ctype == 'COPY':
1726 copies.append((gp.oldpath, gp.path, gp.copymod))
1725 copies.append((gp.oldpath, gp.path, gp.copymod))
1727 elif ctype == 'DELETE':
1726 elif ctype == 'DELETE':
1728 removes.append(gp.path)
1727 removes.append(gp.path)
1729 for src, dst, after in copies:
1728 for src, dst, after in copies:
1730 absdst = os.path.join(repo.root, dst)
1729 absdst = os.path.join(repo.root, dst)
1731 if not after and os.path.exists(absdst):
1730 if not after and os.path.exists(absdst):
1732 raise util.Abort(_('patch creates existing file %s') % dst)
1731 raise util.Abort(_('patch creates existing file %s') % dst)
1733 if cwd:
1732 if cwd:
1734 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1733 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1735 copts['after'] = after
1734 copts['after'] = after
1736 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1735 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1737 if errs:
1736 if errs:
1738 raise util.Abort(errs)
1737 raise util.Abort(errs)
1739 if removes:
1738 if removes:
1740 repo.remove(removes, True, wlock=wlock)
1739 repo.remove(removes, True, wlock=wlock)
1741 for f in files:
1740 for f in files:
1742 ctype, gp = files[f]
1741 ctype, gp = files[f]
1743 if gp and gp.mode:
1742 if gp and gp.mode:
1744 x = gp.mode & 0100 != 0
1743 x = gp.mode & 0100 != 0
1745 dst = os.path.join(repo.root, gp.path)
1744 dst = os.path.join(repo.root, gp.path)
1746 util.set_exec(dst, x)
1745 util.set_exec(dst, x)
1747 addremove_lock(ui, repo, cfiles, {}, wlock=wlock)
1746 addremove_lock(ui, repo, cfiles, {}, wlock=wlock)
1748 files = files.keys()
1747 files = files.keys()
1749 files.extend([r for r in removes if r not in files])
1748 files.extend([r for r in removes if r not in files])
1750 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1749 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1751 finally:
1750 finally:
1752 os.unlink(tmpname)
1751 os.unlink(tmpname)
1753
1752
1754 def incoming(ui, repo, source="default", **opts):
1753 def incoming(ui, repo, source="default", **opts):
1755 """show new changesets found in source
1754 """show new changesets found in source
1756
1755
1757 Show new changesets found in the specified path/URL or the default
1756 Show new changesets found in the specified path/URL or the default
1758 pull location. These are the changesets that would be pulled if a pull
1757 pull location. These are the changesets that would be pulled if a pull
1759 was requested.
1758 was requested.
1760
1759
1761 For remote repository, using --bundle avoids downloading the changesets
1760 For remote repository, using --bundle avoids downloading the changesets
1762 twice if the incoming is followed by a pull.
1761 twice if the incoming is followed by a pull.
1763
1762
1764 See pull for valid source format details.
1763 See pull for valid source format details.
1765 """
1764 """
1766 source = ui.expandpath(source)
1765 source = ui.expandpath(source)
1767 setremoteconfig(ui, opts)
1766 setremoteconfig(ui, opts)
1768
1767
1769 other = hg.repository(ui, source)
1768 other = hg.repository(ui, source)
1770 incoming = repo.findincoming(other, force=opts["force"])
1769 incoming = repo.findincoming(other, force=opts["force"])
1771 if not incoming:
1770 if not incoming:
1772 ui.status(_("no changes found\n"))
1771 ui.status(_("no changes found\n"))
1773 return
1772 return
1774
1773
1775 cleanup = None
1774 cleanup = None
1776 try:
1775 try:
1777 fname = opts["bundle"]
1776 fname = opts["bundle"]
1778 if fname or not other.local():
1777 if fname or not other.local():
1779 # create a bundle (uncompressed if other repo is not local)
1778 # create a bundle (uncompressed if other repo is not local)
1780 cg = other.changegroup(incoming, "incoming")
1779 cg = other.changegroup(incoming, "incoming")
1781 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1780 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1782 # keep written bundle?
1781 # keep written bundle?
1783 if opts["bundle"]:
1782 if opts["bundle"]:
1784 cleanup = None
1783 cleanup = None
1785 if not other.local():
1784 if not other.local():
1786 # use the created uncompressed bundlerepo
1785 # use the created uncompressed bundlerepo
1787 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1786 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1788
1787
1789 revs = None
1788 revs = None
1790 if opts['rev']:
1789 if opts['rev']:
1791 revs = [other.lookup(rev) for rev in opts['rev']]
1790 revs = [other.lookup(rev) for rev in opts['rev']]
1792 o = other.changelog.nodesbetween(incoming, revs)[0]
1791 o = other.changelog.nodesbetween(incoming, revs)[0]
1793 if opts['newest_first']:
1792 if opts['newest_first']:
1794 o.reverse()
1793 o.reverse()
1795 displayer = show_changeset(ui, other, opts)
1794 displayer = show_changeset(ui, other, opts)
1796 for n in o:
1795 for n in o:
1797 parents = [p for p in other.changelog.parents(n) if p != nullid]
1796 parents = [p for p in other.changelog.parents(n) if p != nullid]
1798 if opts['no_merges'] and len(parents) == 2:
1797 if opts['no_merges'] and len(parents) == 2:
1799 continue
1798 continue
1800 displayer.show(changenode=n)
1799 displayer.show(changenode=n)
1801 if opts['patch']:
1800 if opts['patch']:
1802 prev = (parents and parents[0]) or nullid
1801 prev = (parents and parents[0]) or nullid
1803 patch.diff(repo, other, prev, n)
1802 patch.diff(repo, other, prev, n)
1804 ui.write("\n")
1803 ui.write("\n")
1805 finally:
1804 finally:
1806 if hasattr(other, 'close'):
1805 if hasattr(other, 'close'):
1807 other.close()
1806 other.close()
1808 if cleanup:
1807 if cleanup:
1809 os.unlink(cleanup)
1808 os.unlink(cleanup)
1810
1809
1811 def init(ui, dest=".", **opts):
1810 def init(ui, dest=".", **opts):
1812 """create a new repository in the given directory
1811 """create a new repository in the given directory
1813
1812
1814 Initialize a new repository in the given directory. If the given
1813 Initialize a new repository in the given directory. If the given
1815 directory does not exist, it is created.
1814 directory does not exist, it is created.
1816
1815
1817 If no directory is given, the current directory is used.
1816 If no directory is given, the current directory is used.
1818
1817
1819 It is possible to specify an ssh:// URL as the destination.
1818 It is possible to specify an ssh:// URL as the destination.
1820 Look at the help text for the pull command for important details
1819 Look at the help text for the pull command for important details
1821 about ssh:// URLs.
1820 about ssh:// URLs.
1822 """
1821 """
1823 setremoteconfig(ui, opts)
1822 setremoteconfig(ui, opts)
1824 hg.repository(ui, dest, create=1)
1823 hg.repository(ui, dest, create=1)
1825
1824
1826 def locate(ui, repo, *pats, **opts):
1825 def locate(ui, repo, *pats, **opts):
1827 """locate files matching specific patterns
1826 """locate files matching specific patterns
1828
1827
1829 Print all files under Mercurial control whose names match the
1828 Print all files under Mercurial control whose names match the
1830 given patterns.
1829 given patterns.
1831
1830
1832 This command searches the current directory and its
1831 This command searches the current directory and its
1833 subdirectories. To search an entire repository, move to the root
1832 subdirectories. To search an entire repository, move to the root
1834 of the repository.
1833 of the repository.
1835
1834
1836 If no patterns are given to match, this command prints all file
1835 If no patterns are given to match, this command prints all file
1837 names.
1836 names.
1838
1837
1839 If you want to feed the output of this command into the "xargs"
1838 If you want to feed the output of this command into the "xargs"
1840 command, use the "-0" option to both this command and "xargs".
1839 command, use the "-0" option to both this command and "xargs".
1841 This will avoid the problem of "xargs" treating single filenames
1840 This will avoid the problem of "xargs" treating single filenames
1842 that contain white space as multiple filenames.
1841 that contain white space as multiple filenames.
1843 """
1842 """
1844 end = opts['print0'] and '\0' or '\n'
1843 end = opts['print0'] and '\0' or '\n'
1845 rev = opts['rev']
1844 rev = opts['rev']
1846 if rev:
1845 if rev:
1847 node = repo.lookup(rev)
1846 node = repo.lookup(rev)
1848 else:
1847 else:
1849 node = None
1848 node = None
1850
1849
1851 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1850 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1852 head='(?:.*/|)'):
1851 head='(?:.*/|)'):
1853 if not node and repo.dirstate.state(abs) == '?':
1852 if not node and repo.dirstate.state(abs) == '?':
1854 continue
1853 continue
1855 if opts['fullpath']:
1854 if opts['fullpath']:
1856 ui.write(os.path.join(repo.root, abs), end)
1855 ui.write(os.path.join(repo.root, abs), end)
1857 else:
1856 else:
1858 ui.write(((pats and rel) or abs), end)
1857 ui.write(((pats and rel) or abs), end)
1859
1858
1860 def log(ui, repo, *pats, **opts):
1859 def log(ui, repo, *pats, **opts):
1861 """show revision history of entire repository or files
1860 """show revision history of entire repository or files
1862
1861
1863 Print the revision history of the specified files or the entire
1862 Print the revision history of the specified files or the entire
1864 project.
1863 project.
1865
1864
1866 File history is shown without following rename or copy history of
1865 File history is shown without following rename or copy history of
1867 files. Use -f/--follow with a file name to follow history across
1866 files. Use -f/--follow with a file name to follow history across
1868 renames and copies. --follow without a file name will only show
1867 renames and copies. --follow without a file name will only show
1869 ancestors or descendants of the starting revision. --follow-first
1868 ancestors or descendants of the starting revision. --follow-first
1870 only follows the first parent of merge revisions.
1869 only follows the first parent of merge revisions.
1871
1870
1872 If no revision range is specified, the default is tip:0 unless
1871 If no revision range is specified, the default is tip:0 unless
1873 --follow is set, in which case the working directory parent is
1872 --follow is set, in which case the working directory parent is
1874 used as the starting revision.
1873 used as the starting revision.
1875
1874
1876 By default this command outputs: changeset id and hash, tags,
1875 By default this command outputs: changeset id and hash, tags,
1877 non-trivial parents, user, date and time, and a summary for each
1876 non-trivial parents, user, date and time, and a summary for each
1878 commit. When the -v/--verbose switch is used, the list of changed
1877 commit. When the -v/--verbose switch is used, the list of changed
1879 files and full commit message is shown.
1878 files and full commit message is shown.
1880 """
1879 """
1881 class dui(object):
1880 class dui(object):
1882 # Implement and delegate some ui protocol. Save hunks of
1881 # Implement and delegate some ui protocol. Save hunks of
1883 # output for later display in the desired order.
1882 # output for later display in the desired order.
1884 def __init__(self, ui):
1883 def __init__(self, ui):
1885 self.ui = ui
1884 self.ui = ui
1886 self.hunk = {}
1885 self.hunk = {}
1887 self.header = {}
1886 self.header = {}
1888 def bump(self, rev):
1887 def bump(self, rev):
1889 self.rev = rev
1888 self.rev = rev
1890 self.hunk[rev] = []
1889 self.hunk[rev] = []
1891 self.header[rev] = []
1890 self.header[rev] = []
1892 def note(self, *args):
1891 def note(self, *args):
1893 if self.verbose:
1892 if self.verbose:
1894 self.write(*args)
1893 self.write(*args)
1895 def status(self, *args):
1894 def status(self, *args):
1896 if not self.quiet:
1895 if not self.quiet:
1897 self.write(*args)
1896 self.write(*args)
1898 def write(self, *args):
1897 def write(self, *args):
1899 self.hunk[self.rev].append(args)
1898 self.hunk[self.rev].append(args)
1900 def write_header(self, *args):
1899 def write_header(self, *args):
1901 self.header[self.rev].append(args)
1900 self.header[self.rev].append(args)
1902 def debug(self, *args):
1901 def debug(self, *args):
1903 if self.debugflag:
1902 if self.debugflag:
1904 self.write(*args)
1903 self.write(*args)
1905 def __getattr__(self, key):
1904 def __getattr__(self, key):
1906 return getattr(self.ui, key)
1905 return getattr(self.ui, key)
1907
1906
1908 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1907 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1909
1908
1910 if opts['limit']:
1909 if opts['limit']:
1911 try:
1910 try:
1912 limit = int(opts['limit'])
1911 limit = int(opts['limit'])
1913 except ValueError:
1912 except ValueError:
1914 raise util.Abort(_('limit must be a positive integer'))
1913 raise util.Abort(_('limit must be a positive integer'))
1915 if limit <= 0: raise util.Abort(_('limit must be positive'))
1914 if limit <= 0: raise util.Abort(_('limit must be positive'))
1916 else:
1915 else:
1917 limit = sys.maxint
1916 limit = sys.maxint
1918 count = 0
1917 count = 0
1919
1918
1920 displayer = show_changeset(ui, repo, opts)
1919 displayer = show_changeset(ui, repo, opts)
1921 for st, rev, fns in changeiter:
1920 for st, rev, fns in changeiter:
1922 if st == 'window':
1921 if st == 'window':
1923 du = dui(ui)
1922 du = dui(ui)
1924 displayer.ui = du
1923 displayer.ui = du
1925 elif st == 'add':
1924 elif st == 'add':
1926 du.bump(rev)
1925 du.bump(rev)
1927 changenode = repo.changelog.node(rev)
1926 changenode = repo.changelog.node(rev)
1928 parents = [p for p in repo.changelog.parents(changenode)
1927 parents = [p for p in repo.changelog.parents(changenode)
1929 if p != nullid]
1928 if p != nullid]
1930 if opts['no_merges'] and len(parents) == 2:
1929 if opts['no_merges'] and len(parents) == 2:
1931 continue
1930 continue
1932 if opts['only_merges'] and len(parents) != 2:
1931 if opts['only_merges'] and len(parents) != 2:
1933 continue
1932 continue
1934
1933
1935 if opts['keyword']:
1934 if opts['keyword']:
1936 changes = getchange(rev)
1935 changes = getchange(rev)
1937 miss = 0
1936 miss = 0
1938 for k in [kw.lower() for kw in opts['keyword']]:
1937 for k in [kw.lower() for kw in opts['keyword']]:
1939 if not (k in changes[1].lower() or
1938 if not (k in changes[1].lower() or
1940 k in changes[4].lower() or
1939 k in changes[4].lower() or
1941 k in " ".join(changes[3][:20]).lower()):
1940 k in " ".join(changes[3][:20]).lower()):
1942 miss = 1
1941 miss = 1
1943 break
1942 break
1944 if miss:
1943 if miss:
1945 continue
1944 continue
1946
1945
1947 br = None
1946 br = None
1948 if opts['branches']:
1947 if opts['branches']:
1949 br = repo.branchlookup([repo.changelog.node(rev)])
1948 br = repo.branchlookup([repo.changelog.node(rev)])
1950
1949
1951 displayer.show(rev, brinfo=br)
1950 displayer.show(rev, brinfo=br)
1952 if opts['patch']:
1951 if opts['patch']:
1953 prev = (parents and parents[0]) or nullid
1952 prev = (parents and parents[0]) or nullid
1954 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1953 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1955 du.write("\n\n")
1954 du.write("\n\n")
1956 elif st == 'iter':
1955 elif st == 'iter':
1957 if count == limit: break
1956 if count == limit: break
1958 if du.header[rev]:
1957 if du.header[rev]:
1959 for args in du.header[rev]:
1958 for args in du.header[rev]:
1960 ui.write_header(*args)
1959 ui.write_header(*args)
1961 if du.hunk[rev]:
1960 if du.hunk[rev]:
1962 count += 1
1961 count += 1
1963 for args in du.hunk[rev]:
1962 for args in du.hunk[rev]:
1964 ui.write(*args)
1963 ui.write(*args)
1965
1964
1966 def manifest(ui, repo, rev=None):
1965 def manifest(ui, repo, rev=None):
1967 """output the latest or given revision of the project manifest
1966 """output the latest or given revision of the project manifest
1968
1967
1969 Print a list of version controlled files for the given revision.
1968 Print a list of version controlled files for the given revision.
1970
1969
1971 The manifest is the list of files being version controlled. If no revision
1970 The manifest is the list of files being version controlled. If no revision
1972 is given then the tip is used.
1971 is given then the tip is used.
1973 """
1972 """
1974 if rev:
1973 if rev:
1975 try:
1974 try:
1976 # assume all revision numbers are for changesets
1975 # assume all revision numbers are for changesets
1977 n = repo.lookup(rev)
1976 n = repo.lookup(rev)
1978 change = repo.changelog.read(n)
1977 change = repo.changelog.read(n)
1979 n = change[0]
1978 n = change[0]
1980 except hg.RepoError:
1979 except hg.RepoError:
1981 n = repo.manifest.lookup(rev)
1980 n = repo.manifest.lookup(rev)
1982 else:
1981 else:
1983 n = repo.manifest.tip()
1982 n = repo.manifest.tip()
1984 m = repo.manifest.read(n)
1983 m = repo.manifest.read(n)
1985 files = m.keys()
1984 files = m.keys()
1986 files.sort()
1985 files.sort()
1987
1986
1988 for f in files:
1987 for f in files:
1989 ui.write("%40s %3s %s\n" % (hex(m[f]),
1988 ui.write("%40s %3s %s\n" % (hex(m[f]),
1990 m.execf(f) and "755" or "644", f))
1989 m.execf(f) and "755" or "644", f))
1991
1990
1992 def merge(ui, repo, node=None, force=None, branch=None):
1991 def merge(ui, repo, node=None, force=None, branch=None):
1993 """Merge working directory with another revision
1992 """Merge working directory with another revision
1994
1993
1995 Merge the contents of the current working directory and the
1994 Merge the contents of the current working directory and the
1996 requested revision. Files that changed between either parent are
1995 requested revision. Files that changed between either parent are
1997 marked as changed for the next commit and a commit must be
1996 marked as changed for the next commit and a commit must be
1998 performed before any further updates are allowed.
1997 performed before any further updates are allowed.
1999 """
1998 """
2000
1999
2001 node = _lookup(repo, node, branch)
2000 node = _lookup(repo, node, branch)
2002 return hg.merge(repo, node, force=force)
2001 return hg.merge(repo, node, force=force)
2003
2002
2004 def outgoing(ui, repo, dest=None, **opts):
2003 def outgoing(ui, repo, dest=None, **opts):
2005 """show changesets not found in destination
2004 """show changesets not found in destination
2006
2005
2007 Show changesets not found in the specified destination repository or
2006 Show changesets not found in the specified destination repository or
2008 the default push location. These are the changesets that would be pushed
2007 the default push location. These are the changesets that would be pushed
2009 if a push was requested.
2008 if a push was requested.
2010
2009
2011 See pull for valid destination format details.
2010 See pull for valid destination format details.
2012 """
2011 """
2013 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2012 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2014 setremoteconfig(ui, opts)
2013 setremoteconfig(ui, opts)
2015 revs = None
2014 revs = None
2016 if opts['rev']:
2015 if opts['rev']:
2017 revs = [repo.lookup(rev) for rev in opts['rev']]
2016 revs = [repo.lookup(rev) for rev in opts['rev']]
2018
2017
2019 other = hg.repository(ui, dest)
2018 other = hg.repository(ui, dest)
2020 o = repo.findoutgoing(other, force=opts['force'])
2019 o = repo.findoutgoing(other, force=opts['force'])
2021 if not o:
2020 if not o:
2022 ui.status(_("no changes found\n"))
2021 ui.status(_("no changes found\n"))
2023 return
2022 return
2024 o = repo.changelog.nodesbetween(o, revs)[0]
2023 o = repo.changelog.nodesbetween(o, revs)[0]
2025 if opts['newest_first']:
2024 if opts['newest_first']:
2026 o.reverse()
2025 o.reverse()
2027 displayer = show_changeset(ui, repo, opts)
2026 displayer = show_changeset(ui, repo, opts)
2028 for n in o:
2027 for n in o:
2029 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2028 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2030 if opts['no_merges'] and len(parents) == 2:
2029 if opts['no_merges'] and len(parents) == 2:
2031 continue
2030 continue
2032 displayer.show(changenode=n)
2031 displayer.show(changenode=n)
2033 if opts['patch']:
2032 if opts['patch']:
2034 prev = (parents and parents[0]) or nullid
2033 prev = (parents and parents[0]) or nullid
2035 patch.diff(repo, prev, n)
2034 patch.diff(repo, prev, n)
2036 ui.write("\n")
2035 ui.write("\n")
2037
2036
2038 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2037 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2039 """show the parents of the working dir or revision
2038 """show the parents of the working dir or revision
2040
2039
2041 Print the working directory's parent revisions.
2040 Print the working directory's parent revisions.
2042 """
2041 """
2043 # legacy
2042 # legacy
2044 if file_ and not rev:
2043 if file_ and not rev:
2045 try:
2044 try:
2046 rev = repo.lookup(file_)
2045 rev = repo.lookup(file_)
2047 file_ = None
2046 file_ = None
2048 except hg.RepoError:
2047 except hg.RepoError:
2049 pass
2048 pass
2050 else:
2049 else:
2051 ui.warn(_("'hg parent REV' is deprecated, "
2050 ui.warn(_("'hg parent REV' is deprecated, "
2052 "please use 'hg parents -r REV instead\n"))
2051 "please use 'hg parents -r REV instead\n"))
2053
2052
2054 if rev:
2053 if rev:
2055 if file_:
2054 if file_:
2056 ctx = repo.filectx(file_, changeid=rev)
2055 ctx = repo.filectx(file_, changeid=rev)
2057 else:
2056 else:
2058 ctx = repo.changectx(rev)
2057 ctx = repo.changectx(rev)
2059 p = [cp.node() for cp in ctx.parents()]
2058 p = [cp.node() for cp in ctx.parents()]
2060 else:
2059 else:
2061 p = repo.dirstate.parents()
2060 p = repo.dirstate.parents()
2062
2061
2063 br = None
2062 br = None
2064 if branches is not None:
2063 if branches is not None:
2065 br = repo.branchlookup(p)
2064 br = repo.branchlookup(p)
2066 displayer = show_changeset(ui, repo, opts)
2065 displayer = show_changeset(ui, repo, opts)
2067 for n in p:
2066 for n in p:
2068 if n != nullid:
2067 if n != nullid:
2069 displayer.show(changenode=n, brinfo=br)
2068 displayer.show(changenode=n, brinfo=br)
2070
2069
2071 def paths(ui, repo, search=None):
2070 def paths(ui, repo, search=None):
2072 """show definition of symbolic path names
2071 """show definition of symbolic path names
2073
2072
2074 Show definition of symbolic path name NAME. If no name is given, show
2073 Show definition of symbolic path name NAME. If no name is given, show
2075 definition of available names.
2074 definition of available names.
2076
2075
2077 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2076 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2078 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2077 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2079 """
2078 """
2080 if search:
2079 if search:
2081 for name, path in ui.configitems("paths"):
2080 for name, path in ui.configitems("paths"):
2082 if name == search:
2081 if name == search:
2083 ui.write("%s\n" % path)
2082 ui.write("%s\n" % path)
2084 return
2083 return
2085 ui.warn(_("not found!\n"))
2084 ui.warn(_("not found!\n"))
2086 return 1
2085 return 1
2087 else:
2086 else:
2088 for name, path in ui.configitems("paths"):
2087 for name, path in ui.configitems("paths"):
2089 ui.write("%s = %s\n" % (name, path))
2088 ui.write("%s = %s\n" % (name, path))
2090
2089
2091 def postincoming(ui, repo, modheads, optupdate):
2090 def postincoming(ui, repo, modheads, optupdate):
2092 if modheads == 0:
2091 if modheads == 0:
2093 return
2092 return
2094 if optupdate:
2093 if optupdate:
2095 if modheads == 1:
2094 if modheads == 1:
2096 return hg.update(repo, repo.changelog.tip()) # update
2095 return hg.update(repo, repo.changelog.tip()) # update
2097 else:
2096 else:
2098 ui.status(_("not updating, since new heads added\n"))
2097 ui.status(_("not updating, since new heads added\n"))
2099 if modheads > 1:
2098 if modheads > 1:
2100 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2099 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2101 else:
2100 else:
2102 ui.status(_("(run 'hg update' to get a working copy)\n"))
2101 ui.status(_("(run 'hg update' to get a working copy)\n"))
2103
2102
2104 def pull(ui, repo, source="default", **opts):
2103 def pull(ui, repo, source="default", **opts):
2105 """pull changes from the specified source
2104 """pull changes from the specified source
2106
2105
2107 Pull changes from a remote repository to a local one.
2106 Pull changes from a remote repository to a local one.
2108
2107
2109 This finds all changes from the repository at the specified path
2108 This finds all changes from the repository at the specified path
2110 or URL and adds them to the local repository. By default, this
2109 or URL and adds them to the local repository. By default, this
2111 does not update the copy of the project in the working directory.
2110 does not update the copy of the project in the working directory.
2112
2111
2113 Valid URLs are of the form:
2112 Valid URLs are of the form:
2114
2113
2115 local/filesystem/path
2114 local/filesystem/path
2116 http://[user@]host[:port]/[path]
2115 http://[user@]host[:port]/[path]
2117 https://[user@]host[:port]/[path]
2116 https://[user@]host[:port]/[path]
2118 ssh://[user@]host[:port]/[path]
2117 ssh://[user@]host[:port]/[path]
2119
2118
2120 Some notes about using SSH with Mercurial:
2119 Some notes about using SSH with Mercurial:
2121 - SSH requires an accessible shell account on the destination machine
2120 - SSH requires an accessible shell account on the destination machine
2122 and a copy of hg in the remote path or specified with as remotecmd.
2121 and a copy of hg in the remote path or specified with as remotecmd.
2123 - path is relative to the remote user's home directory by default.
2122 - path is relative to the remote user's home directory by default.
2124 Use an extra slash at the start of a path to specify an absolute path:
2123 Use an extra slash at the start of a path to specify an absolute path:
2125 ssh://example.com//tmp/repository
2124 ssh://example.com//tmp/repository
2126 - Mercurial doesn't use its own compression via SSH; the right thing
2125 - Mercurial doesn't use its own compression via SSH; the right thing
2127 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2126 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2128 Host *.mylocalnetwork.example.com
2127 Host *.mylocalnetwork.example.com
2129 Compression off
2128 Compression off
2130 Host *
2129 Host *
2131 Compression on
2130 Compression on
2132 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2131 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2133 with the --ssh command line option.
2132 with the --ssh command line option.
2134 """
2133 """
2135 source = ui.expandpath(source)
2134 source = ui.expandpath(source)
2136 setremoteconfig(ui, opts)
2135 setremoteconfig(ui, opts)
2137
2136
2138 other = hg.repository(ui, source)
2137 other = hg.repository(ui, source)
2139 ui.status(_('pulling from %s\n') % (source))
2138 ui.status(_('pulling from %s\n') % (source))
2140 revs = None
2139 revs = None
2141 if opts['rev'] and not other.local():
2140 if opts['rev'] and not other.local():
2142 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2141 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2143 elif opts['rev']:
2142 elif opts['rev']:
2144 revs = [other.lookup(rev) for rev in opts['rev']]
2143 revs = [other.lookup(rev) for rev in opts['rev']]
2145 modheads = repo.pull(other, heads=revs, force=opts['force'])
2144 modheads = repo.pull(other, heads=revs, force=opts['force'])
2146 return postincoming(ui, repo, modheads, opts['update'])
2145 return postincoming(ui, repo, modheads, opts['update'])
2147
2146
2148 def push(ui, repo, dest=None, **opts):
2147 def push(ui, repo, dest=None, **opts):
2149 """push changes to the specified destination
2148 """push changes to the specified destination
2150
2149
2151 Push changes from the local repository to the given destination.
2150 Push changes from the local repository to the given destination.
2152
2151
2153 This is the symmetrical operation for pull. It helps to move
2152 This is the symmetrical operation for pull. It helps to move
2154 changes from the current repository to a different one. If the
2153 changes from the current repository to a different one. If the
2155 destination is local this is identical to a pull in that directory
2154 destination is local this is identical to a pull in that directory
2156 from the current one.
2155 from the current one.
2157
2156
2158 By default, push will refuse to run if it detects the result would
2157 By default, push will refuse to run if it detects the result would
2159 increase the number of remote heads. This generally indicates the
2158 increase the number of remote heads. This generally indicates the
2160 the client has forgotten to sync and merge before pushing.
2159 the client has forgotten to sync and merge before pushing.
2161
2160
2162 Valid URLs are of the form:
2161 Valid URLs are of the form:
2163
2162
2164 local/filesystem/path
2163 local/filesystem/path
2165 ssh://[user@]host[:port]/[path]
2164 ssh://[user@]host[:port]/[path]
2166
2165
2167 Look at the help text for the pull command for important details
2166 Look at the help text for the pull command for important details
2168 about ssh:// URLs.
2167 about ssh:// URLs.
2169
2168
2170 Pushing to http:// and https:// URLs is possible, too, if this
2169 Pushing to http:// and https:// URLs is possible, too, if this
2171 feature is enabled on the remote Mercurial server.
2170 feature is enabled on the remote Mercurial server.
2172 """
2171 """
2173 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2172 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2174 setremoteconfig(ui, opts)
2173 setremoteconfig(ui, opts)
2175
2174
2176 other = hg.repository(ui, dest)
2175 other = hg.repository(ui, dest)
2177 ui.status('pushing to %s\n' % (dest))
2176 ui.status('pushing to %s\n' % (dest))
2178 revs = None
2177 revs = None
2179 if opts['rev']:
2178 if opts['rev']:
2180 revs = [repo.lookup(rev) for rev in opts['rev']]
2179 revs = [repo.lookup(rev) for rev in opts['rev']]
2181 r = repo.push(other, opts['force'], revs=revs)
2180 r = repo.push(other, opts['force'], revs=revs)
2182 return r == 0
2181 return r == 0
2183
2182
2184 def rawcommit(ui, repo, *flist, **rc):
2183 def rawcommit(ui, repo, *flist, **rc):
2185 """raw commit interface (DEPRECATED)
2184 """raw commit interface (DEPRECATED)
2186
2185
2187 (DEPRECATED)
2186 (DEPRECATED)
2188 Lowlevel commit, for use in helper scripts.
2187 Lowlevel commit, for use in helper scripts.
2189
2188
2190 This command is not intended to be used by normal users, as it is
2189 This command is not intended to be used by normal users, as it is
2191 primarily useful for importing from other SCMs.
2190 primarily useful for importing from other SCMs.
2192
2191
2193 This command is now deprecated and will be removed in a future
2192 This command is now deprecated and will be removed in a future
2194 release, please use debugsetparents and commit instead.
2193 release, please use debugsetparents and commit instead.
2195 """
2194 """
2196
2195
2197 ui.warn(_("(the rawcommit command is deprecated)\n"))
2196 ui.warn(_("(the rawcommit command is deprecated)\n"))
2198
2197
2199 message = rc['message']
2198 message = rc['message']
2200 if not message and rc['logfile']:
2199 if not message and rc['logfile']:
2201 try:
2200 try:
2202 message = open(rc['logfile']).read()
2201 message = open(rc['logfile']).read()
2203 except IOError:
2202 except IOError:
2204 pass
2203 pass
2205 if not message and not rc['logfile']:
2204 if not message and not rc['logfile']:
2206 raise util.Abort(_("missing commit message"))
2205 raise util.Abort(_("missing commit message"))
2207
2206
2208 files = relpath(repo, list(flist))
2207 files = relpath(repo, list(flist))
2209 if rc['files']:
2208 if rc['files']:
2210 files += open(rc['files']).read().splitlines()
2209 files += open(rc['files']).read().splitlines()
2211
2210
2212 rc['parent'] = map(repo.lookup, rc['parent'])
2211 rc['parent'] = map(repo.lookup, rc['parent'])
2213
2212
2214 try:
2213 try:
2215 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2214 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2216 except ValueError, inst:
2215 except ValueError, inst:
2217 raise util.Abort(str(inst))
2216 raise util.Abort(str(inst))
2218
2217
2219 def recover(ui, repo):
2218 def recover(ui, repo):
2220 """roll back an interrupted transaction
2219 """roll back an interrupted transaction
2221
2220
2222 Recover from an interrupted commit or pull.
2221 Recover from an interrupted commit or pull.
2223
2222
2224 This command tries to fix the repository status after an interrupted
2223 This command tries to fix the repository status after an interrupted
2225 operation. It should only be necessary when Mercurial suggests it.
2224 operation. It should only be necessary when Mercurial suggests it.
2226 """
2225 """
2227 if repo.recover():
2226 if repo.recover():
2228 return hg.verify(repo)
2227 return hg.verify(repo)
2229 return 1
2228 return 1
2230
2229
2231 def remove(ui, repo, *pats, **opts):
2230 def remove(ui, repo, *pats, **opts):
2232 """remove the specified files on the next commit
2231 """remove the specified files on the next commit
2233
2232
2234 Schedule the indicated files for removal from the repository.
2233 Schedule the indicated files for removal from the repository.
2235
2234
2236 This command schedules the files to be removed at the next commit.
2235 This command schedules the files to be removed at the next commit.
2237 This only removes files from the current branch, not from the
2236 This only removes files from the current branch, not from the
2238 entire project history. If the files still exist in the working
2237 entire project history. If the files still exist in the working
2239 directory, they will be deleted from it. If invoked with --after,
2238 directory, they will be deleted from it. If invoked with --after,
2240 files that have been manually deleted are marked as removed.
2239 files that have been manually deleted are marked as removed.
2241
2240
2242 Modified files and added files are not removed by default. To
2241 Modified files and added files are not removed by default. To
2243 remove them, use the -f/--force option.
2242 remove them, use the -f/--force option.
2244 """
2243 """
2245 names = []
2244 names = []
2246 if not opts['after'] and not pats:
2245 if not opts['after'] and not pats:
2247 raise util.Abort(_('no files specified'))
2246 raise util.Abort(_('no files specified'))
2248 files, matchfn, anypats = matchpats(repo, pats, opts)
2247 files, matchfn, anypats = matchpats(repo, pats, opts)
2249 exact = dict.fromkeys(files)
2248 exact = dict.fromkeys(files)
2250 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2249 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2251 modified, added, removed, deleted, unknown = mardu
2250 modified, added, removed, deleted, unknown = mardu
2252 remove, forget = [], []
2251 remove, forget = [], []
2253 for src, abs, rel, exact in walk(repo, pats, opts):
2252 for src, abs, rel, exact in walk(repo, pats, opts):
2254 reason = None
2253 reason = None
2255 if abs not in deleted and opts['after']:
2254 if abs not in deleted and opts['after']:
2256 reason = _('is still present')
2255 reason = _('is still present')
2257 elif abs in modified and not opts['force']:
2256 elif abs in modified and not opts['force']:
2258 reason = _('is modified (use -f to force removal)')
2257 reason = _('is modified (use -f to force removal)')
2259 elif abs in added:
2258 elif abs in added:
2260 if opts['force']:
2259 if opts['force']:
2261 forget.append(abs)
2260 forget.append(abs)
2262 continue
2261 continue
2263 reason = _('has been marked for add (use -f to force removal)')
2262 reason = _('has been marked for add (use -f to force removal)')
2264 elif abs in unknown:
2263 elif abs in unknown:
2265 reason = _('is not managed')
2264 reason = _('is not managed')
2266 elif abs in removed:
2265 elif abs in removed:
2267 continue
2266 continue
2268 if reason:
2267 if reason:
2269 if exact:
2268 if exact:
2270 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2269 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2271 else:
2270 else:
2272 if ui.verbose or not exact:
2271 if ui.verbose or not exact:
2273 ui.status(_('removing %s\n') % rel)
2272 ui.status(_('removing %s\n') % rel)
2274 remove.append(abs)
2273 remove.append(abs)
2275 repo.forget(forget)
2274 repo.forget(forget)
2276 repo.remove(remove, unlink=not opts['after'])
2275 repo.remove(remove, unlink=not opts['after'])
2277
2276
2278 def rename(ui, repo, *pats, **opts):
2277 def rename(ui, repo, *pats, **opts):
2279 """rename files; equivalent of copy + remove
2278 """rename files; equivalent of copy + remove
2280
2279
2281 Mark dest as copies of sources; mark sources for deletion. If
2280 Mark dest as copies of sources; mark sources for deletion. If
2282 dest is a directory, copies are put in that directory. If dest is
2281 dest is a directory, copies are put in that directory. If dest is
2283 a file, there can only be one source.
2282 a file, there can only be one source.
2284
2283
2285 By default, this command copies the contents of files as they
2284 By default, this command copies the contents of files as they
2286 stand in the working directory. If invoked with --after, the
2285 stand in the working directory. If invoked with --after, the
2287 operation is recorded, but no copying is performed.
2286 operation is recorded, but no copying is performed.
2288
2287
2289 This command takes effect in the next commit.
2288 This command takes effect in the next commit.
2290
2289
2291 NOTE: This command should be treated as experimental. While it
2290 NOTE: This command should be treated as experimental. While it
2292 should properly record rename files, this information is not yet
2291 should properly record rename files, this information is not yet
2293 fully used by merge, nor fully reported by log.
2292 fully used by merge, nor fully reported by log.
2294 """
2293 """
2295 wlock = repo.wlock(0)
2294 wlock = repo.wlock(0)
2296 errs, copied = docopy(ui, repo, pats, opts, wlock)
2295 errs, copied = docopy(ui, repo, pats, opts, wlock)
2297 names = []
2296 names = []
2298 for abs, rel, exact in copied:
2297 for abs, rel, exact in copied:
2299 if ui.verbose or not exact:
2298 if ui.verbose or not exact:
2300 ui.status(_('removing %s\n') % rel)
2299 ui.status(_('removing %s\n') % rel)
2301 names.append(abs)
2300 names.append(abs)
2302 if not opts.get('dry_run'):
2301 if not opts.get('dry_run'):
2303 repo.remove(names, True, wlock)
2302 repo.remove(names, True, wlock)
2304 return errs
2303 return errs
2305
2304
2306 def revert(ui, repo, *pats, **opts):
2305 def revert(ui, repo, *pats, **opts):
2307 """revert files or dirs to their states as of some revision
2306 """revert files or dirs to their states as of some revision
2308
2307
2309 With no revision specified, revert the named files or directories
2308 With no revision specified, revert the named files or directories
2310 to the contents they had in the parent of the working directory.
2309 to the contents they had in the parent of the working directory.
2311 This restores the contents of the affected files to an unmodified
2310 This restores the contents of the affected files to an unmodified
2312 state. If the working directory has two parents, you must
2311 state. If the working directory has two parents, you must
2313 explicitly specify the revision to revert to.
2312 explicitly specify the revision to revert to.
2314
2313
2315 Modified files are saved with a .orig suffix before reverting.
2314 Modified files are saved with a .orig suffix before reverting.
2316 To disable these backups, use --no-backup.
2315 To disable these backups, use --no-backup.
2317
2316
2318 Using the -r option, revert the given files or directories to
2317 Using the -r option, revert the given files or directories to
2319 their contents as of a specific revision. This can be helpful to"roll
2318 their contents as of a specific revision. This can be helpful to"roll
2320 back" some or all of a change that should not have been committed.
2319 back" some or all of a change that should not have been committed.
2321
2320
2322 Revert modifies the working directory. It does not commit any
2321 Revert modifies the working directory. It does not commit any
2323 changes, or change the parent of the working directory. If you
2322 changes, or change the parent of the working directory. If you
2324 revert to a revision other than the parent of the working
2323 revert to a revision other than the parent of the working
2325 directory, the reverted files will thus appear modified
2324 directory, the reverted files will thus appear modified
2326 afterwards.
2325 afterwards.
2327
2326
2328 If a file has been deleted, it is recreated. If the executable
2327 If a file has been deleted, it is recreated. If the executable
2329 mode of a file was changed, it is reset.
2328 mode of a file was changed, it is reset.
2330
2329
2331 If names are given, all files matching the names are reverted.
2330 If names are given, all files matching the names are reverted.
2332
2331
2333 If no arguments are given, all files in the repository are reverted.
2332 If no arguments are given, all files in the repository are reverted.
2334 """
2333 """
2335 parent, p2 = repo.dirstate.parents()
2334 parent, p2 = repo.dirstate.parents()
2336 if opts['rev']:
2335 if opts['rev']:
2337 node = repo.lookup(opts['rev'])
2336 node = repo.lookup(opts['rev'])
2338 elif p2 != nullid:
2337 elif p2 != nullid:
2339 raise util.Abort(_('working dir has two parents; '
2338 raise util.Abort(_('working dir has two parents; '
2340 'you must specify the revision to revert to'))
2339 'you must specify the revision to revert to'))
2341 else:
2340 else:
2342 node = parent
2341 node = parent
2343 mf = repo.manifest.read(repo.changelog.read(node)[0])
2342 mf = repo.manifest.read(repo.changelog.read(node)[0])
2344 if node == parent:
2343 if node == parent:
2345 pmf = mf
2344 pmf = mf
2346 else:
2345 else:
2347 pmf = None
2346 pmf = None
2348
2347
2349 wlock = repo.wlock()
2348 wlock = repo.wlock()
2350
2349
2351 # need all matching names in dirstate and manifest of target rev,
2350 # need all matching names in dirstate and manifest of target rev,
2352 # so have to walk both. do not print errors if files exist in one
2351 # so have to walk both. do not print errors if files exist in one
2353 # but not other.
2352 # but not other.
2354
2353
2355 names = {}
2354 names = {}
2356 target_only = {}
2355 target_only = {}
2357
2356
2358 # walk dirstate.
2357 # walk dirstate.
2359
2358
2360 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2359 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2361 names[abs] = (rel, exact)
2360 names[abs] = (rel, exact)
2362 if src == 'b':
2361 if src == 'b':
2363 target_only[abs] = True
2362 target_only[abs] = True
2364
2363
2365 # walk target manifest.
2364 # walk target manifest.
2366
2365
2367 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2366 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2368 badmatch=names.has_key):
2367 badmatch=names.has_key):
2369 if abs in names: continue
2368 if abs in names: continue
2370 names[abs] = (rel, exact)
2369 names[abs] = (rel, exact)
2371 target_only[abs] = True
2370 target_only[abs] = True
2372
2371
2373 changes = repo.changes(match=names.has_key, wlock=wlock)
2372 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2374 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2373 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2375
2374
2376 revert = ([], _('reverting %s\n'))
2375 revert = ([], _('reverting %s\n'))
2377 add = ([], _('adding %s\n'))
2376 add = ([], _('adding %s\n'))
2378 remove = ([], _('removing %s\n'))
2377 remove = ([], _('removing %s\n'))
2379 forget = ([], _('forgetting %s\n'))
2378 forget = ([], _('forgetting %s\n'))
2380 undelete = ([], _('undeleting %s\n'))
2379 undelete = ([], _('undeleting %s\n'))
2381 update = {}
2380 update = {}
2382
2381
2383 disptable = (
2382 disptable = (
2384 # dispatch table:
2383 # dispatch table:
2385 # file state
2384 # file state
2386 # action if in target manifest
2385 # action if in target manifest
2387 # action if not in target manifest
2386 # action if not in target manifest
2388 # make backup if in target manifest
2387 # make backup if in target manifest
2389 # make backup if not in target manifest
2388 # make backup if not in target manifest
2390 (modified, revert, remove, True, True),
2389 (modified, revert, remove, True, True),
2391 (added, revert, forget, True, False),
2390 (added, revert, forget, True, False),
2392 (removed, undelete, None, False, False),
2391 (removed, undelete, None, False, False),
2393 (deleted, revert, remove, False, False),
2392 (deleted, revert, remove, False, False),
2394 (unknown, add, None, True, False),
2393 (unknown, add, None, True, False),
2395 (target_only, add, None, False, False),
2394 (target_only, add, None, False, False),
2396 )
2395 )
2397
2396
2398 entries = names.items()
2397 entries = names.items()
2399 entries.sort()
2398 entries.sort()
2400
2399
2401 for abs, (rel, exact) in entries:
2400 for abs, (rel, exact) in entries:
2402 mfentry = mf.get(abs)
2401 mfentry = mf.get(abs)
2403 def handle(xlist, dobackup):
2402 def handle(xlist, dobackup):
2404 xlist[0].append(abs)
2403 xlist[0].append(abs)
2405 update[abs] = 1
2404 update[abs] = 1
2406 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2405 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2407 bakname = "%s.orig" % rel
2406 bakname = "%s.orig" % rel
2408 ui.note(_('saving current version of %s as %s\n') %
2407 ui.note(_('saving current version of %s as %s\n') %
2409 (rel, bakname))
2408 (rel, bakname))
2410 if not opts.get('dry_run'):
2409 if not opts.get('dry_run'):
2411 shutil.copyfile(rel, bakname)
2410 shutil.copyfile(rel, bakname)
2412 shutil.copymode(rel, bakname)
2411 shutil.copymode(rel, bakname)
2413 if ui.verbose or not exact:
2412 if ui.verbose or not exact:
2414 ui.status(xlist[1] % rel)
2413 ui.status(xlist[1] % rel)
2415 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2414 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2416 if abs not in table: continue
2415 if abs not in table: continue
2417 # file has changed in dirstate
2416 # file has changed in dirstate
2418 if mfentry:
2417 if mfentry:
2419 handle(hitlist, backuphit)
2418 handle(hitlist, backuphit)
2420 elif misslist is not None:
2419 elif misslist is not None:
2421 handle(misslist, backupmiss)
2420 handle(misslist, backupmiss)
2422 else:
2421 else:
2423 if exact: ui.warn(_('file not managed: %s\n' % rel))
2422 if exact: ui.warn(_('file not managed: %s\n' % rel))
2424 break
2423 break
2425 else:
2424 else:
2426 # file has not changed in dirstate
2425 # file has not changed in dirstate
2427 if node == parent:
2426 if node == parent:
2428 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2427 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2429 continue
2428 continue
2430 if pmf is None:
2429 if pmf is None:
2431 # only need parent manifest in this unlikely case,
2430 # only need parent manifest in this unlikely case,
2432 # so do not read by default
2431 # so do not read by default
2433 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2432 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2434 if abs in pmf:
2433 if abs in pmf:
2435 if mfentry:
2434 if mfentry:
2436 # if version of file is same in parent and target
2435 # if version of file is same in parent and target
2437 # manifests, do nothing
2436 # manifests, do nothing
2438 if pmf[abs] != mfentry:
2437 if pmf[abs] != mfentry:
2439 handle(revert, False)
2438 handle(revert, False)
2440 else:
2439 else:
2441 handle(remove, False)
2440 handle(remove, False)
2442
2441
2443 if not opts.get('dry_run'):
2442 if not opts.get('dry_run'):
2444 repo.dirstate.forget(forget[0])
2443 repo.dirstate.forget(forget[0])
2445 r = hg.revert(repo, node, update.has_key, wlock)
2444 r = hg.revert(repo, node, update.has_key, wlock)
2446 repo.dirstate.update(add[0], 'a')
2445 repo.dirstate.update(add[0], 'a')
2447 repo.dirstate.update(undelete[0], 'n')
2446 repo.dirstate.update(undelete[0], 'n')
2448 repo.dirstate.update(remove[0], 'r')
2447 repo.dirstate.update(remove[0], 'r')
2449 return r
2448 return r
2450
2449
2451 def rollback(ui, repo):
2450 def rollback(ui, repo):
2452 """roll back the last transaction in this repository
2451 """roll back the last transaction in this repository
2453
2452
2454 Roll back the last transaction in this repository, restoring the
2453 Roll back the last transaction in this repository, restoring the
2455 project to its state prior to the transaction.
2454 project to its state prior to the transaction.
2456
2455
2457 Transactions are used to encapsulate the effects of all commands
2456 Transactions are used to encapsulate the effects of all commands
2458 that create new changesets or propagate existing changesets into a
2457 that create new changesets or propagate existing changesets into a
2459 repository. For example, the following commands are transactional,
2458 repository. For example, the following commands are transactional,
2460 and their effects can be rolled back:
2459 and their effects can be rolled back:
2461
2460
2462 commit
2461 commit
2463 import
2462 import
2464 pull
2463 pull
2465 push (with this repository as destination)
2464 push (with this repository as destination)
2466 unbundle
2465 unbundle
2467
2466
2468 This command should be used with care. There is only one level of
2467 This command should be used with care. There is only one level of
2469 rollback, and there is no way to undo a rollback.
2468 rollback, and there is no way to undo a rollback.
2470
2469
2471 This command is not intended for use on public repositories. Once
2470 This command is not intended for use on public repositories. Once
2472 changes are visible for pull by other users, rolling a transaction
2471 changes are visible for pull by other users, rolling a transaction
2473 back locally is ineffective (someone else may already have pulled
2472 back locally is ineffective (someone else may already have pulled
2474 the changes). Furthermore, a race is possible with readers of the
2473 the changes). Furthermore, a race is possible with readers of the
2475 repository; for example an in-progress pull from the repository
2474 repository; for example an in-progress pull from the repository
2476 may fail if a rollback is performed.
2475 may fail if a rollback is performed.
2477 """
2476 """
2478 repo.rollback()
2477 repo.rollback()
2479
2478
2480 def root(ui, repo):
2479 def root(ui, repo):
2481 """print the root (top) of the current working dir
2480 """print the root (top) of the current working dir
2482
2481
2483 Print the root directory of the current repository.
2482 Print the root directory of the current repository.
2484 """
2483 """
2485 ui.write(repo.root + "\n")
2484 ui.write(repo.root + "\n")
2486
2485
2487 def serve(ui, repo, **opts):
2486 def serve(ui, repo, **opts):
2488 """export the repository via HTTP
2487 """export the repository via HTTP
2489
2488
2490 Start a local HTTP repository browser and pull server.
2489 Start a local HTTP repository browser and pull server.
2491
2490
2492 By default, the server logs accesses to stdout and errors to
2491 By default, the server logs accesses to stdout and errors to
2493 stderr. Use the "-A" and "-E" options to log to files.
2492 stderr. Use the "-A" and "-E" options to log to files.
2494 """
2493 """
2495
2494
2496 if opts["stdio"]:
2495 if opts["stdio"]:
2497 if repo is None:
2496 if repo is None:
2498 raise hg.RepoError(_('no repo found'))
2497 raise hg.RepoError(_('no repo found'))
2499 s = sshserver.sshserver(ui, repo)
2498 s = sshserver.sshserver(ui, repo)
2500 s.serve_forever()
2499 s.serve_forever()
2501
2500
2502 optlist = ("name templates style address port ipv6"
2501 optlist = ("name templates style address port ipv6"
2503 " accesslog errorlog webdir_conf")
2502 " accesslog errorlog webdir_conf")
2504 for o in optlist.split():
2503 for o in optlist.split():
2505 if opts[o]:
2504 if opts[o]:
2506 ui.setconfig("web", o, opts[o])
2505 ui.setconfig("web", o, opts[o])
2507
2506
2508 if repo is None and not ui.config("web", "webdir_conf"):
2507 if repo is None and not ui.config("web", "webdir_conf"):
2509 raise hg.RepoError(_('no repo found'))
2508 raise hg.RepoError(_('no repo found'))
2510
2509
2511 if opts['daemon'] and not opts['daemon_pipefds']:
2510 if opts['daemon'] and not opts['daemon_pipefds']:
2512 rfd, wfd = os.pipe()
2511 rfd, wfd = os.pipe()
2513 args = sys.argv[:]
2512 args = sys.argv[:]
2514 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2513 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2515 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2514 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2516 args[0], args)
2515 args[0], args)
2517 os.close(wfd)
2516 os.close(wfd)
2518 os.read(rfd, 1)
2517 os.read(rfd, 1)
2519 os._exit(0)
2518 os._exit(0)
2520
2519
2521 try:
2520 try:
2522 httpd = hgweb.server.create_server(ui, repo)
2521 httpd = hgweb.server.create_server(ui, repo)
2523 except socket.error, inst:
2522 except socket.error, inst:
2524 raise util.Abort(_('cannot start server: ') + inst.args[1])
2523 raise util.Abort(_('cannot start server: ') + inst.args[1])
2525
2524
2526 if ui.verbose:
2525 if ui.verbose:
2527 addr, port = httpd.socket.getsockname()
2526 addr, port = httpd.socket.getsockname()
2528 if addr == '0.0.0.0':
2527 if addr == '0.0.0.0':
2529 addr = socket.gethostname()
2528 addr = socket.gethostname()
2530 else:
2529 else:
2531 try:
2530 try:
2532 addr = socket.gethostbyaddr(addr)[0]
2531 addr = socket.gethostbyaddr(addr)[0]
2533 except socket.error:
2532 except socket.error:
2534 pass
2533 pass
2535 if port != 80:
2534 if port != 80:
2536 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2535 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2537 else:
2536 else:
2538 ui.status(_('listening at http://%s/\n') % addr)
2537 ui.status(_('listening at http://%s/\n') % addr)
2539
2538
2540 if opts['pid_file']:
2539 if opts['pid_file']:
2541 fp = open(opts['pid_file'], 'w')
2540 fp = open(opts['pid_file'], 'w')
2542 fp.write(str(os.getpid()) + '\n')
2541 fp.write(str(os.getpid()) + '\n')
2543 fp.close()
2542 fp.close()
2544
2543
2545 if opts['daemon_pipefds']:
2544 if opts['daemon_pipefds']:
2546 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2545 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2547 os.close(rfd)
2546 os.close(rfd)
2548 os.write(wfd, 'y')
2547 os.write(wfd, 'y')
2549 os.close(wfd)
2548 os.close(wfd)
2550 sys.stdout.flush()
2549 sys.stdout.flush()
2551 sys.stderr.flush()
2550 sys.stderr.flush()
2552 fd = os.open(util.nulldev, os.O_RDWR)
2551 fd = os.open(util.nulldev, os.O_RDWR)
2553 if fd != 0: os.dup2(fd, 0)
2552 if fd != 0: os.dup2(fd, 0)
2554 if fd != 1: os.dup2(fd, 1)
2553 if fd != 1: os.dup2(fd, 1)
2555 if fd != 2: os.dup2(fd, 2)
2554 if fd != 2: os.dup2(fd, 2)
2556 if fd not in (0, 1, 2): os.close(fd)
2555 if fd not in (0, 1, 2): os.close(fd)
2557
2556
2558 httpd.serve_forever()
2557 httpd.serve_forever()
2559
2558
2560 def status(ui, repo, *pats, **opts):
2559 def status(ui, repo, *pats, **opts):
2561 """show changed files in the working directory
2560 """show changed files in the working directory
2562
2561
2563 Show status of files in the repository. If names are given, only
2562 Show status of files in the repository. If names are given, only
2564 files that match are shown. Files that are clean or ignored, are
2563 files that match are shown. Files that are clean or ignored, are
2565 not listed unless -c (clean), -i (ignored) or -A is given.
2564 not listed unless -c (clean), -i (ignored) or -A is given.
2566
2565
2567 The codes used to show the status of files are:
2566 The codes used to show the status of files are:
2568 M = modified
2567 M = modified
2569 A = added
2568 A = added
2570 R = removed
2569 R = removed
2571 C = clean
2570 C = clean
2572 ! = deleted, but still tracked
2571 ! = deleted, but still tracked
2573 ? = not tracked
2572 ? = not tracked
2574 I = ignored (not shown by default)
2573 I = ignored (not shown by default)
2575 = the previous added file was copied from here
2574 = the previous added file was copied from here
2576 """
2575 """
2577
2576
2578 all = opts['all']
2577 all = opts['all']
2579
2578
2580 files, matchfn, anypats = matchpats(repo, pats, opts)
2579 files, matchfn, anypats = matchpats(repo, pats, opts)
2581 cwd = (pats and repo.getcwd()) or ''
2580 cwd = (pats and repo.getcwd()) or ''
2582 modified, added, removed, deleted, unknown, ignored, clean = [
2581 modified, added, removed, deleted, unknown, ignored, clean = [
2583 [util.pathto(cwd, x) for x in n]
2582 [util.pathto(cwd, x) for x in n]
2584 for n in repo.status(files=files, match=matchfn,
2583 for n in repo.status(files=files, match=matchfn,
2585 list_ignored=all or opts['ignored'],
2584 list_ignored=all or opts['ignored'],
2586 list_clean=all or opts['clean'])]
2585 list_clean=all or opts['clean'])]
2587
2586
2588 changetypes = (('modified', 'M', modified),
2587 changetypes = (('modified', 'M', modified),
2589 ('added', 'A', added),
2588 ('added', 'A', added),
2590 ('removed', 'R', removed),
2589 ('removed', 'R', removed),
2591 ('deleted', '!', deleted),
2590 ('deleted', '!', deleted),
2592 ('unknown', '?', unknown),
2591 ('unknown', '?', unknown),
2593 ('ignored', 'I', ignored))
2592 ('ignored', 'I', ignored))
2594
2593
2595 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2594 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2596
2595
2597 end = opts['print0'] and '\0' or '\n'
2596 end = opts['print0'] and '\0' or '\n'
2598
2597
2599 for opt, char, changes in ([ct for ct in explicit_changetypes
2598 for opt, char, changes in ([ct for ct in explicit_changetypes
2600 if all or opts[ct[0]]]
2599 if all or opts[ct[0]]]
2601 or changetypes):
2600 or changetypes):
2602 if opts['no_status']:
2601 if opts['no_status']:
2603 format = "%%s%s" % end
2602 format = "%%s%s" % end
2604 else:
2603 else:
2605 format = "%s %%s%s" % (char, end)
2604 format = "%s %%s%s" % (char, end)
2606
2605
2607 for f in changes:
2606 for f in changes:
2608 ui.write(format % f)
2607 ui.write(format % f)
2609 if ((all or opts.get('copies')) and not opts.get('no_status')
2608 if ((all or opts.get('copies')) and not opts.get('no_status')
2610 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2609 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2611 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2610 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2612
2611
2613 def tag(ui, repo, name, rev_=None, **opts):
2612 def tag(ui, repo, name, rev_=None, **opts):
2614 """add a tag for the current tip or a given revision
2613 """add a tag for the current tip or a given revision
2615
2614
2616 Name a particular revision using <name>.
2615 Name a particular revision using <name>.
2617
2616
2618 Tags are used to name particular revisions of the repository and are
2617 Tags are used to name particular revisions of the repository and are
2619 very useful to compare different revision, to go back to significant
2618 very useful to compare different revision, to go back to significant
2620 earlier versions or to mark branch points as releases, etc.
2619 earlier versions or to mark branch points as releases, etc.
2621
2620
2622 If no revision is given, the parent of the working directory is used.
2621 If no revision is given, the parent of the working directory is used.
2623
2622
2624 To facilitate version control, distribution, and merging of tags,
2623 To facilitate version control, distribution, and merging of tags,
2625 they are stored as a file named ".hgtags" which is managed
2624 they are stored as a file named ".hgtags" which is managed
2626 similarly to other project files and can be hand-edited if
2625 similarly to other project files and can be hand-edited if
2627 necessary. The file '.hg/localtags' is used for local tags (not
2626 necessary. The file '.hg/localtags' is used for local tags (not
2628 shared among repositories).
2627 shared among repositories).
2629 """
2628 """
2630 if name in ['tip', '.']:
2629 if name in ['tip', '.']:
2631 raise util.Abort(_("the name '%s' is reserved") % name)
2630 raise util.Abort(_("the name '%s' is reserved") % name)
2632 if rev_ is not None:
2631 if rev_ is not None:
2633 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2632 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2634 "please use 'hg tag [-r REV] NAME' instead\n"))
2633 "please use 'hg tag [-r REV] NAME' instead\n"))
2635 if opts['rev']:
2634 if opts['rev']:
2636 raise util.Abort(_("use only one form to specify the revision"))
2635 raise util.Abort(_("use only one form to specify the revision"))
2637 if opts['rev']:
2636 if opts['rev']:
2638 rev_ = opts['rev']
2637 rev_ = opts['rev']
2639 if rev_:
2638 if rev_:
2640 r = hex(repo.lookup(rev_))
2639 r = hex(repo.lookup(rev_))
2641 else:
2640 else:
2642 p1, p2 = repo.dirstate.parents()
2641 p1, p2 = repo.dirstate.parents()
2643 if p1 == nullid:
2642 if p1 == nullid:
2644 raise util.Abort(_('no revision to tag'))
2643 raise util.Abort(_('no revision to tag'))
2645 if p2 != nullid:
2644 if p2 != nullid:
2646 raise util.Abort(_('outstanding uncommitted merges'))
2645 raise util.Abort(_('outstanding uncommitted merges'))
2647 r = hex(p1)
2646 r = hex(p1)
2648
2647
2649 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2648 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2650 opts['date'])
2649 opts['date'])
2651
2650
2652 def tags(ui, repo):
2651 def tags(ui, repo):
2653 """list repository tags
2652 """list repository tags
2654
2653
2655 List the repository tags.
2654 List the repository tags.
2656
2655
2657 This lists both regular and local tags.
2656 This lists both regular and local tags.
2658 """
2657 """
2659
2658
2660 l = repo.tagslist()
2659 l = repo.tagslist()
2661 l.reverse()
2660 l.reverse()
2662 for t, n in l:
2661 for t, n in l:
2663 try:
2662 try:
2664 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2663 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2665 except KeyError:
2664 except KeyError:
2666 r = " ?:?"
2665 r = " ?:?"
2667 if ui.quiet:
2666 if ui.quiet:
2668 ui.write("%s\n" % t)
2667 ui.write("%s\n" % t)
2669 else:
2668 else:
2670 ui.write("%-30s %s\n" % (t, r))
2669 ui.write("%-30s %s\n" % (t, r))
2671
2670
2672 def tip(ui, repo, **opts):
2671 def tip(ui, repo, **opts):
2673 """show the tip revision
2672 """show the tip revision
2674
2673
2675 Show the tip revision.
2674 Show the tip revision.
2676 """
2675 """
2677 n = repo.changelog.tip()
2676 n = repo.changelog.tip()
2678 br = None
2677 br = None
2679 if opts['branches']:
2678 if opts['branches']:
2680 br = repo.branchlookup([n])
2679 br = repo.branchlookup([n])
2681 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2680 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2682 if opts['patch']:
2681 if opts['patch']:
2683 patch.diff(repo, repo.changelog.parents(n)[0], n)
2682 patch.diff(repo, repo.changelog.parents(n)[0], n)
2684
2683
2685 def unbundle(ui, repo, fname, **opts):
2684 def unbundle(ui, repo, fname, **opts):
2686 """apply a changegroup file
2685 """apply a changegroup file
2687
2686
2688 Apply a compressed changegroup file generated by the bundle
2687 Apply a compressed changegroup file generated by the bundle
2689 command.
2688 command.
2690 """
2689 """
2691 f = urllib.urlopen(fname)
2690 f = urllib.urlopen(fname)
2692
2691
2693 header = f.read(6)
2692 header = f.read(6)
2694 if not header.startswith("HG"):
2693 if not header.startswith("HG"):
2695 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2694 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2696 elif not header.startswith("HG10"):
2695 elif not header.startswith("HG10"):
2697 raise util.Abort(_("%s: unknown bundle version") % fname)
2696 raise util.Abort(_("%s: unknown bundle version") % fname)
2698 elif header == "HG10BZ":
2697 elif header == "HG10BZ":
2699 def generator(f):
2698 def generator(f):
2700 zd = bz2.BZ2Decompressor()
2699 zd = bz2.BZ2Decompressor()
2701 zd.decompress("BZ")
2700 zd.decompress("BZ")
2702 for chunk in f:
2701 for chunk in f:
2703 yield zd.decompress(chunk)
2702 yield zd.decompress(chunk)
2704 elif header == "HG10UN":
2703 elif header == "HG10UN":
2705 def generator(f):
2704 def generator(f):
2706 for chunk in f:
2705 for chunk in f:
2707 yield chunk
2706 yield chunk
2708 else:
2707 else:
2709 raise util.Abort(_("%s: unknown bundle compression type")
2708 raise util.Abort(_("%s: unknown bundle compression type")
2710 % fname)
2709 % fname)
2711 gen = generator(util.filechunkiter(f, 4096))
2710 gen = generator(util.filechunkiter(f, 4096))
2712 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2711 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2713 'bundle:' + fname)
2712 'bundle:' + fname)
2714 return postincoming(ui, repo, modheads, opts['update'])
2713 return postincoming(ui, repo, modheads, opts['update'])
2715
2714
2716 def undo(ui, repo):
2715 def undo(ui, repo):
2717 """undo the last commit or pull (DEPRECATED)
2716 """undo the last commit or pull (DEPRECATED)
2718
2717
2719 (DEPRECATED)
2718 (DEPRECATED)
2720 This command is now deprecated and will be removed in a future
2719 This command is now deprecated and will be removed in a future
2721 release. Please use the rollback command instead. For usage
2720 release. Please use the rollback command instead. For usage
2722 instructions, see the rollback command.
2721 instructions, see the rollback command.
2723 """
2722 """
2724 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2723 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2725 repo.rollback()
2724 repo.rollback()
2726
2725
2727 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2726 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2728 branch=None):
2727 branch=None):
2729 """update or merge working directory
2728 """update or merge working directory
2730
2729
2731 Update the working directory to the specified revision.
2730 Update the working directory to the specified revision.
2732
2731
2733 If there are no outstanding changes in the working directory and
2732 If there are no outstanding changes in the working directory and
2734 there is a linear relationship between the current version and the
2733 there is a linear relationship between the current version and the
2735 requested version, the result is the requested version.
2734 requested version, the result is the requested version.
2736
2735
2737 To merge the working directory with another revision, use the
2736 To merge the working directory with another revision, use the
2738 merge command.
2737 merge command.
2739
2738
2740 By default, update will refuse to run if doing so would require
2739 By default, update will refuse to run if doing so would require
2741 merging or discarding local changes.
2740 merging or discarding local changes.
2742 """
2741 """
2743 node = _lookup(repo, node, branch)
2742 node = _lookup(repo, node, branch)
2744 if merge:
2743 if merge:
2745 ui.warn(_('(the -m/--merge option is deprecated; '
2744 ui.warn(_('(the -m/--merge option is deprecated; '
2746 'use the merge command instead)\n'))
2745 'use the merge command instead)\n'))
2747 return hg.merge(repo, node, force=force)
2746 return hg.merge(repo, node, force=force)
2748 elif clean:
2747 elif clean:
2749 return hg.clean(repo, node)
2748 return hg.clean(repo, node)
2750 else:
2749 else:
2751 return hg.update(repo, node)
2750 return hg.update(repo, node)
2752
2751
2753 def _lookup(repo, node, branch=None):
2752 def _lookup(repo, node, branch=None):
2754 if branch:
2753 if branch:
2755 br = repo.branchlookup(branch=branch)
2754 br = repo.branchlookup(branch=branch)
2756 found = []
2755 found = []
2757 for x in br:
2756 for x in br:
2758 if branch in br[x]:
2757 if branch in br[x]:
2759 found.append(x)
2758 found.append(x)
2760 if len(found) > 1:
2759 if len(found) > 1:
2761 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2760 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2762 for x in found:
2761 for x in found:
2763 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2762 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2764 raise util.Abort("")
2763 raise util.Abort("")
2765 if len(found) == 1:
2764 if len(found) == 1:
2766 node = found[0]
2765 node = found[0]
2767 repo.ui.warn(_("Using head %s for branch %s\n")
2766 repo.ui.warn(_("Using head %s for branch %s\n")
2768 % (short(node), branch))
2767 % (short(node), branch))
2769 else:
2768 else:
2770 raise util.Abort(_("branch %s not found\n") % (branch))
2769 raise util.Abort(_("branch %s not found\n") % (branch))
2771 else:
2770 else:
2772 node = node and repo.lookup(node) or repo.changelog.tip()
2771 node = node and repo.lookup(node) or repo.changelog.tip()
2773 return node
2772 return node
2774
2773
2775 def verify(ui, repo):
2774 def verify(ui, repo):
2776 """verify the integrity of the repository
2775 """verify the integrity of the repository
2777
2776
2778 Verify the integrity of the current repository.
2777 Verify the integrity of the current repository.
2779
2778
2780 This will perform an extensive check of the repository's
2779 This will perform an extensive check of the repository's
2781 integrity, validating the hashes and checksums of each entry in
2780 integrity, validating the hashes and checksums of each entry in
2782 the changelog, manifest, and tracked files, as well as the
2781 the changelog, manifest, and tracked files, as well as the
2783 integrity of their crosslinks and indices.
2782 integrity of their crosslinks and indices.
2784 """
2783 """
2785 return hg.verify(repo)
2784 return hg.verify(repo)
2786
2785
2787 # Command options and aliases are listed here, alphabetically
2786 # Command options and aliases are listed here, alphabetically
2788
2787
2789 table = {
2788 table = {
2790 "^add":
2789 "^add":
2791 (add,
2790 (add,
2792 [('I', 'include', [], _('include names matching the given patterns')),
2791 [('I', 'include', [], _('include names matching the given patterns')),
2793 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2792 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2793 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 _('hg add [OPTION]... [FILE]...')),
2794 _('hg add [OPTION]... [FILE]...')),
2796 "debugaddremove|addremove":
2795 "debugaddremove|addremove":
2797 (addremove,
2796 (addremove,
2798 [('I', 'include', [], _('include names matching the given patterns')),
2797 [('I', 'include', [], _('include names matching the given patterns')),
2799 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2798 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2800 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2799 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2801 _('hg addremove [OPTION]... [FILE]...')),
2800 _('hg addremove [OPTION]... [FILE]...')),
2802 "^annotate":
2801 "^annotate":
2803 (annotate,
2802 (annotate,
2804 [('r', 'rev', '', _('annotate the specified revision')),
2803 [('r', 'rev', '', _('annotate the specified revision')),
2805 ('a', 'text', None, _('treat all files as text')),
2804 ('a', 'text', None, _('treat all files as text')),
2806 ('u', 'user', None, _('list the author')),
2805 ('u', 'user', None, _('list the author')),
2807 ('d', 'date', None, _('list the date')),
2806 ('d', 'date', None, _('list the date')),
2808 ('n', 'number', None, _('list the revision number (default)')),
2807 ('n', 'number', None, _('list the revision number (default)')),
2809 ('c', 'changeset', None, _('list the changeset')),
2808 ('c', 'changeset', None, _('list the changeset')),
2810 ('I', 'include', [], _('include names matching the given patterns')),
2809 ('I', 'include', [], _('include names matching the given patterns')),
2811 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2810 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2812 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2811 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2813 "archive":
2812 "archive":
2814 (archive,
2813 (archive,
2815 [('', 'no-decode', None, _('do not pass files through decoders')),
2814 [('', 'no-decode', None, _('do not pass files through decoders')),
2816 ('p', 'prefix', '', _('directory prefix for files in archive')),
2815 ('p', 'prefix', '', _('directory prefix for files in archive')),
2817 ('r', 'rev', '', _('revision to distribute')),
2816 ('r', 'rev', '', _('revision to distribute')),
2818 ('t', 'type', '', _('type of distribution to create')),
2817 ('t', 'type', '', _('type of distribution to create')),
2819 ('I', 'include', [], _('include names matching the given patterns')),
2818 ('I', 'include', [], _('include names matching the given patterns')),
2820 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2819 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2821 _('hg archive [OPTION]... DEST')),
2820 _('hg archive [OPTION]... DEST')),
2822 "backout":
2821 "backout":
2823 (backout,
2822 (backout,
2824 [('', 'merge', None,
2823 [('', 'merge', None,
2825 _('merge with old dirstate parent after backout')),
2824 _('merge with old dirstate parent after backout')),
2826 ('m', 'message', '', _('use <text> as commit message')),
2825 ('m', 'message', '', _('use <text> as commit message')),
2827 ('l', 'logfile', '', _('read commit message from <file>')),
2826 ('l', 'logfile', '', _('read commit message from <file>')),
2828 ('d', 'date', '', _('record datecode as commit date')),
2827 ('d', 'date', '', _('record datecode as commit date')),
2829 ('', 'parent', '', _('parent to choose when backing out merge')),
2828 ('', 'parent', '', _('parent to choose when backing out merge')),
2830 ('u', 'user', '', _('record user as committer')),
2829 ('u', 'user', '', _('record user as committer')),
2831 ('I', 'include', [], _('include names matching the given patterns')),
2830 ('I', 'include', [], _('include names matching the given patterns')),
2832 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2831 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2833 _('hg backout [OPTION]... REV')),
2832 _('hg backout [OPTION]... REV')),
2834 "bundle":
2833 "bundle":
2835 (bundle,
2834 (bundle,
2836 [('f', 'force', None,
2835 [('f', 'force', None,
2837 _('run even when remote repository is unrelated'))],
2836 _('run even when remote repository is unrelated'))],
2838 _('hg bundle FILE DEST')),
2837 _('hg bundle FILE DEST')),
2839 "cat":
2838 "cat":
2840 (cat,
2839 (cat,
2841 [('o', 'output', '', _('print output to file with formatted name')),
2840 [('o', 'output', '', _('print output to file with formatted name')),
2842 ('r', 'rev', '', _('print the given revision')),
2841 ('r', 'rev', '', _('print the given revision')),
2843 ('I', 'include', [], _('include names matching the given patterns')),
2842 ('I', 'include', [], _('include names matching the given patterns')),
2844 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2843 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2845 _('hg cat [OPTION]... FILE...')),
2844 _('hg cat [OPTION]... FILE...')),
2846 "^clone":
2845 "^clone":
2847 (clone,
2846 (clone,
2848 [('U', 'noupdate', None, _('do not update the new working directory')),
2847 [('U', 'noupdate', None, _('do not update the new working directory')),
2849 ('r', 'rev', [],
2848 ('r', 'rev', [],
2850 _('a changeset you would like to have after cloning')),
2849 _('a changeset you would like to have after cloning')),
2851 ('', 'pull', None, _('use pull protocol to copy metadata')),
2850 ('', 'pull', None, _('use pull protocol to copy metadata')),
2852 ('', 'uncompressed', None,
2851 ('', 'uncompressed', None,
2853 _('use uncompressed transfer (fast over LAN)')),
2852 _('use uncompressed transfer (fast over LAN)')),
2854 ('e', 'ssh', '', _('specify ssh command to use')),
2853 ('e', 'ssh', '', _('specify ssh command to use')),
2855 ('', 'remotecmd', '',
2854 ('', 'remotecmd', '',
2856 _('specify hg command to run on the remote side'))],
2855 _('specify hg command to run on the remote side'))],
2857 _('hg clone [OPTION]... SOURCE [DEST]')),
2856 _('hg clone [OPTION]... SOURCE [DEST]')),
2858 "^commit|ci":
2857 "^commit|ci":
2859 (commit,
2858 (commit,
2860 [('A', 'addremove', None,
2859 [('A', 'addremove', None,
2861 _('mark new/missing files as added/removed before committing')),
2860 _('mark new/missing files as added/removed before committing')),
2862 ('m', 'message', '', _('use <text> as commit message')),
2861 ('m', 'message', '', _('use <text> as commit message')),
2863 ('l', 'logfile', '', _('read the commit message from <file>')),
2862 ('l', 'logfile', '', _('read the commit message from <file>')),
2864 ('d', 'date', '', _('record datecode as commit date')),
2863 ('d', 'date', '', _('record datecode as commit date')),
2865 ('u', 'user', '', _('record user as commiter')),
2864 ('u', 'user', '', _('record user as commiter')),
2866 ('I', 'include', [], _('include names matching the given patterns')),
2865 ('I', 'include', [], _('include names matching the given patterns')),
2867 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2868 _('hg commit [OPTION]... [FILE]...')),
2867 _('hg commit [OPTION]... [FILE]...')),
2869 "copy|cp":
2868 "copy|cp":
2870 (copy,
2869 (copy,
2871 [('A', 'after', None, _('record a copy that has already occurred')),
2870 [('A', 'after', None, _('record a copy that has already occurred')),
2872 ('f', 'force', None,
2871 ('f', 'force', None,
2873 _('forcibly copy over an existing managed file')),
2872 _('forcibly copy over an existing managed file')),
2874 ('I', 'include', [], _('include names matching the given patterns')),
2873 ('I', 'include', [], _('include names matching the given patterns')),
2875 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2874 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2876 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2875 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2877 _('hg copy [OPTION]... [SOURCE]... DEST')),
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2878 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2879 "debugcomplete":
2878 "debugcomplete":
2880 (debugcomplete,
2879 (debugcomplete,
2881 [('o', 'options', None, _('show the command options'))],
2880 [('o', 'options', None, _('show the command options'))],
2882 _('debugcomplete [-o] CMD')),
2881 _('debugcomplete [-o] CMD')),
2883 "debugrebuildstate":
2882 "debugrebuildstate":
2884 (debugrebuildstate,
2883 (debugrebuildstate,
2885 [('r', 'rev', '', _('revision to rebuild to'))],
2884 [('r', 'rev', '', _('revision to rebuild to'))],
2886 _('debugrebuildstate [-r REV] [REV]')),
2885 _('debugrebuildstate [-r REV] [REV]')),
2887 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2888 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2887 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2889 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2890 "debugstate": (debugstate, [], _('debugstate')),
2889 "debugstate": (debugstate, [], _('debugstate')),
2891 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2890 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2892 "debugindex": (debugindex, [], _('debugindex FILE')),
2891 "debugindex": (debugindex, [], _('debugindex FILE')),
2893 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2892 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2894 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2893 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2895 "debugwalk":
2894 "debugwalk":
2896 (debugwalk,
2895 (debugwalk,
2897 [('I', 'include', [], _('include names matching the given patterns')),
2896 [('I', 'include', [], _('include names matching the given patterns')),
2898 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2897 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2899 _('debugwalk [OPTION]... [FILE]...')),
2898 _('debugwalk [OPTION]... [FILE]...')),
2900 "^diff":
2899 "^diff":
2901 (diff,
2900 (diff,
2902 [('r', 'rev', [], _('revision')),
2901 [('r', 'rev', [], _('revision')),
2903 ('a', 'text', None, _('treat all files as text')),
2902 ('a', 'text', None, _('treat all files as text')),
2904 ('p', 'show-function', None,
2903 ('p', 'show-function', None,
2905 _('show which function each change is in')),
2904 _('show which function each change is in')),
2906 ('w', 'ignore-all-space', None,
2905 ('w', 'ignore-all-space', None,
2907 _('ignore white space when comparing lines')),
2906 _('ignore white space when comparing lines')),
2908 ('b', 'ignore-space-change', None,
2907 ('b', 'ignore-space-change', None,
2909 _('ignore changes in the amount of white space')),
2908 _('ignore changes in the amount of white space')),
2910 ('B', 'ignore-blank-lines', None,
2909 ('B', 'ignore-blank-lines', None,
2911 _('ignore changes whose lines are all blank')),
2910 _('ignore changes whose lines are all blank')),
2912 ('I', 'include', [], _('include names matching the given patterns')),
2911 ('I', 'include', [], _('include names matching the given patterns')),
2913 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2912 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2914 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2913 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2915 "^export":
2914 "^export":
2916 (export,
2915 (export,
2917 [('o', 'output', '', _('print output to file with formatted name')),
2916 [('o', 'output', '', _('print output to file with formatted name')),
2918 ('a', 'text', None, _('treat all files as text')),
2917 ('a', 'text', None, _('treat all files as text')),
2919 ('', 'switch-parent', None, _('diff against the second parent'))],
2918 ('', 'switch-parent', None, _('diff against the second parent'))],
2920 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2919 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2921 "debugforget|forget":
2920 "debugforget|forget":
2922 (forget,
2921 (forget,
2923 [('I', 'include', [], _('include names matching the given patterns')),
2922 [('I', 'include', [], _('include names matching the given patterns')),
2924 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2925 _('hg forget [OPTION]... FILE...')),
2924 _('hg forget [OPTION]... FILE...')),
2926 "grep":
2925 "grep":
2927 (grep,
2926 (grep,
2928 [('0', 'print0', None, _('end fields with NUL')),
2927 [('0', 'print0', None, _('end fields with NUL')),
2929 ('', 'all', None, _('print all revisions that match')),
2928 ('', 'all', None, _('print all revisions that match')),
2930 ('f', 'follow', None,
2929 ('f', 'follow', None,
2931 _('follow changeset history, or file history across copies and renames')),
2930 _('follow changeset history, or file history across copies and renames')),
2932 ('i', 'ignore-case', None, _('ignore case when matching')),
2931 ('i', 'ignore-case', None, _('ignore case when matching')),
2933 ('l', 'files-with-matches', None,
2932 ('l', 'files-with-matches', None,
2934 _('print only filenames and revs that match')),
2933 _('print only filenames and revs that match')),
2935 ('n', 'line-number', None, _('print matching line numbers')),
2934 ('n', 'line-number', None, _('print matching line numbers')),
2936 ('r', 'rev', [], _('search in given revision range')),
2935 ('r', 'rev', [], _('search in given revision range')),
2937 ('u', 'user', None, _('print user who committed change')),
2936 ('u', 'user', None, _('print user who committed change')),
2938 ('I', 'include', [], _('include names matching the given patterns')),
2937 ('I', 'include', [], _('include names matching the given patterns')),
2939 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2938 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2940 _('hg grep [OPTION]... PATTERN [FILE]...')),
2939 _('hg grep [OPTION]... PATTERN [FILE]...')),
2941 "heads":
2940 "heads":
2942 (heads,
2941 (heads,
2943 [('b', 'branches', None, _('show branches')),
2942 [('b', 'branches', None, _('show branches')),
2944 ('', 'style', '', _('display using template map file')),
2943 ('', 'style', '', _('display using template map file')),
2945 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2944 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2946 ('', 'template', '', _('display with template'))],
2945 ('', 'template', '', _('display with template'))],
2947 _('hg heads [-b] [-r <rev>]')),
2946 _('hg heads [-b] [-r <rev>]')),
2948 "help": (help_, [], _('hg help [COMMAND]')),
2947 "help": (help_, [], _('hg help [COMMAND]')),
2949 "identify|id": (identify, [], _('hg identify')),
2948 "identify|id": (identify, [], _('hg identify')),
2950 "import|patch":
2949 "import|patch":
2951 (import_,
2950 (import_,
2952 [('p', 'strip', 1,
2951 [('p', 'strip', 1,
2953 _('directory strip option for patch. This has the same\n'
2952 _('directory strip option for patch. This has the same\n'
2954 'meaning as the corresponding patch option')),
2953 'meaning as the corresponding patch option')),
2955 ('m', 'message', '', _('use <text> as commit message')),
2954 ('m', 'message', '', _('use <text> as commit message')),
2956 ('b', 'base', '', _('base path')),
2955 ('b', 'base', '', _('base path')),
2957 ('f', 'force', None,
2956 ('f', 'force', None,
2958 _('skip check for outstanding uncommitted changes'))],
2957 _('skip check for outstanding uncommitted changes'))],
2959 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2958 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2960 "incoming|in": (incoming,
2959 "incoming|in": (incoming,
2961 [('M', 'no-merges', None, _('do not show merges')),
2960 [('M', 'no-merges', None, _('do not show merges')),
2962 ('f', 'force', None,
2961 ('f', 'force', None,
2963 _('run even when remote repository is unrelated')),
2962 _('run even when remote repository is unrelated')),
2964 ('', 'style', '', _('display using template map file')),
2963 ('', 'style', '', _('display using template map file')),
2965 ('n', 'newest-first', None, _('show newest record first')),
2964 ('n', 'newest-first', None, _('show newest record first')),
2966 ('', 'bundle', '', _('file to store the bundles into')),
2965 ('', 'bundle', '', _('file to store the bundles into')),
2967 ('p', 'patch', None, _('show patch')),
2966 ('p', 'patch', None, _('show patch')),
2968 ('r', 'rev', [], _('a specific revision you would like to pull')),
2967 ('r', 'rev', [], _('a specific revision you would like to pull')),
2969 ('', 'template', '', _('display with template')),
2968 ('', 'template', '', _('display with template')),
2970 ('e', 'ssh', '', _('specify ssh command to use')),
2969 ('e', 'ssh', '', _('specify ssh command to use')),
2971 ('', 'remotecmd', '',
2970 ('', 'remotecmd', '',
2972 _('specify hg command to run on the remote side'))],
2971 _('specify hg command to run on the remote side'))],
2973 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2972 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2974 ' [--bundle FILENAME] [SOURCE]')),
2973 ' [--bundle FILENAME] [SOURCE]')),
2975 "^init":
2974 "^init":
2976 (init,
2975 (init,
2977 [('e', 'ssh', '', _('specify ssh command to use')),
2976 [('e', 'ssh', '', _('specify ssh command to use')),
2978 ('', 'remotecmd', '',
2977 ('', 'remotecmd', '',
2979 _('specify hg command to run on the remote side'))],
2978 _('specify hg command to run on the remote side'))],
2980 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2979 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2981 "locate":
2980 "locate":
2982 (locate,
2981 (locate,
2983 [('r', 'rev', '', _('search the repository as it stood at rev')),
2982 [('r', 'rev', '', _('search the repository as it stood at rev')),
2984 ('0', 'print0', None,
2983 ('0', 'print0', None,
2985 _('end filenames with NUL, for use with xargs')),
2984 _('end filenames with NUL, for use with xargs')),
2986 ('f', 'fullpath', None,
2985 ('f', 'fullpath', None,
2987 _('print complete paths from the filesystem root')),
2986 _('print complete paths from the filesystem root')),
2988 ('I', 'include', [], _('include names matching the given patterns')),
2987 ('I', 'include', [], _('include names matching the given patterns')),
2989 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2988 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2990 _('hg locate [OPTION]... [PATTERN]...')),
2989 _('hg locate [OPTION]... [PATTERN]...')),
2991 "^log|history":
2990 "^log|history":
2992 (log,
2991 (log,
2993 [('b', 'branches', None, _('show branches')),
2992 [('b', 'branches', None, _('show branches')),
2994 ('f', 'follow', None,
2993 ('f', 'follow', None,
2995 _('follow changeset history, or file history across copies and renames')),
2994 _('follow changeset history, or file history across copies and renames')),
2996 ('', 'follow-first', None,
2995 ('', 'follow-first', None,
2997 _('only follow the first parent of merge changesets')),
2996 _('only follow the first parent of merge changesets')),
2998 ('k', 'keyword', [], _('search for a keyword')),
2997 ('k', 'keyword', [], _('search for a keyword')),
2999 ('l', 'limit', '', _('limit number of changes displayed')),
2998 ('l', 'limit', '', _('limit number of changes displayed')),
3000 ('r', 'rev', [], _('show the specified revision or range')),
2999 ('r', 'rev', [], _('show the specified revision or range')),
3001 ('M', 'no-merges', None, _('do not show merges')),
3000 ('M', 'no-merges', None, _('do not show merges')),
3002 ('', 'style', '', _('display using template map file')),
3001 ('', 'style', '', _('display using template map file')),
3003 ('m', 'only-merges', None, _('show only merges')),
3002 ('m', 'only-merges', None, _('show only merges')),
3004 ('p', 'patch', None, _('show patch')),
3003 ('p', 'patch', None, _('show patch')),
3005 ('', 'template', '', _('display with template')),
3004 ('', 'template', '', _('display with template')),
3006 ('I', 'include', [], _('include names matching the given patterns')),
3005 ('I', 'include', [], _('include names matching the given patterns')),
3007 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3006 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3008 _('hg log [OPTION]... [FILE]')),
3007 _('hg log [OPTION]... [FILE]')),
3009 "manifest": (manifest, [], _('hg manifest [REV]')),
3008 "manifest": (manifest, [], _('hg manifest [REV]')),
3010 "merge":
3009 "merge":
3011 (merge,
3010 (merge,
3012 [('b', 'branch', '', _('merge with head of a specific branch')),
3011 [('b', 'branch', '', _('merge with head of a specific branch')),
3013 ('f', 'force', None, _('force a merge with outstanding changes'))],
3012 ('f', 'force', None, _('force a merge with outstanding changes'))],
3014 _('hg merge [-b TAG] [-f] [REV]')),
3013 _('hg merge [-b TAG] [-f] [REV]')),
3015 "outgoing|out": (outgoing,
3014 "outgoing|out": (outgoing,
3016 [('M', 'no-merges', None, _('do not show merges')),
3015 [('M', 'no-merges', None, _('do not show merges')),
3017 ('f', 'force', None,
3016 ('f', 'force', None,
3018 _('run even when remote repository is unrelated')),
3017 _('run even when remote repository is unrelated')),
3019 ('p', 'patch', None, _('show patch')),
3018 ('p', 'patch', None, _('show patch')),
3020 ('', 'style', '', _('display using template map file')),
3019 ('', 'style', '', _('display using template map file')),
3021 ('r', 'rev', [], _('a specific revision you would like to push')),
3020 ('r', 'rev', [], _('a specific revision you would like to push')),
3022 ('n', 'newest-first', None, _('show newest record first')),
3021 ('n', 'newest-first', None, _('show newest record first')),
3023 ('', 'template', '', _('display with template')),
3022 ('', 'template', '', _('display with template')),
3024 ('e', 'ssh', '', _('specify ssh command to use')),
3023 ('e', 'ssh', '', _('specify ssh command to use')),
3025 ('', 'remotecmd', '',
3024 ('', 'remotecmd', '',
3026 _('specify hg command to run on the remote side'))],
3025 _('specify hg command to run on the remote side'))],
3027 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3026 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3028 "^parents":
3027 "^parents":
3029 (parents,
3028 (parents,
3030 [('b', 'branches', None, _('show branches')),
3029 [('b', 'branches', None, _('show branches')),
3031 ('r', 'rev', '', _('show parents from the specified rev')),
3030 ('r', 'rev', '', _('show parents from the specified rev')),
3032 ('', 'style', '', _('display using template map file')),
3031 ('', 'style', '', _('display using template map file')),
3033 ('', 'template', '', _('display with template'))],
3032 ('', 'template', '', _('display with template'))],
3034 _('hg parents [-b] [-r REV] [FILE]')),
3033 _('hg parents [-b] [-r REV] [FILE]')),
3035 "paths": (paths, [], _('hg paths [NAME]')),
3034 "paths": (paths, [], _('hg paths [NAME]')),
3036 "^pull":
3035 "^pull":
3037 (pull,
3036 (pull,
3038 [('u', 'update', None,
3037 [('u', 'update', None,
3039 _('update the working directory to tip after pull')),
3038 _('update the working directory to tip after pull')),
3040 ('e', 'ssh', '', _('specify ssh command to use')),
3039 ('e', 'ssh', '', _('specify ssh command to use')),
3041 ('f', 'force', None,
3040 ('f', 'force', None,
3042 _('run even when remote repository is unrelated')),
3041 _('run even when remote repository is unrelated')),
3043 ('r', 'rev', [], _('a specific revision you would like to pull')),
3042 ('r', 'rev', [], _('a specific revision you would like to pull')),
3044 ('', 'remotecmd', '',
3043 ('', 'remotecmd', '',
3045 _('specify hg command to run on the remote side'))],
3044 _('specify hg command to run on the remote side'))],
3046 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3045 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3047 "^push":
3046 "^push":
3048 (push,
3047 (push,
3049 [('f', 'force', None, _('force push')),
3048 [('f', 'force', None, _('force push')),
3050 ('e', 'ssh', '', _('specify ssh command to use')),
3049 ('e', 'ssh', '', _('specify ssh command to use')),
3051 ('r', 'rev', [], _('a specific revision you would like to push')),
3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3052 ('', 'remotecmd', '',
3051 ('', 'remotecmd', '',
3053 _('specify hg command to run on the remote side'))],
3052 _('specify hg command to run on the remote side'))],
3054 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3053 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3055 "debugrawcommit|rawcommit":
3054 "debugrawcommit|rawcommit":
3056 (rawcommit,
3055 (rawcommit,
3057 [('p', 'parent', [], _('parent')),
3056 [('p', 'parent', [], _('parent')),
3058 ('d', 'date', '', _('date code')),
3057 ('d', 'date', '', _('date code')),
3059 ('u', 'user', '', _('user')),
3058 ('u', 'user', '', _('user')),
3060 ('F', 'files', '', _('file list')),
3059 ('F', 'files', '', _('file list')),
3061 ('m', 'message', '', _('commit message')),
3060 ('m', 'message', '', _('commit message')),
3062 ('l', 'logfile', '', _('commit message file'))],
3061 ('l', 'logfile', '', _('commit message file'))],
3063 _('hg debugrawcommit [OPTION]... [FILE]...')),
3062 _('hg debugrawcommit [OPTION]... [FILE]...')),
3064 "recover": (recover, [], _('hg recover')),
3063 "recover": (recover, [], _('hg recover')),
3065 "^remove|rm":
3064 "^remove|rm":
3066 (remove,
3065 (remove,
3067 [('A', 'after', None, _('record remove that has already occurred')),
3066 [('A', 'after', None, _('record remove that has already occurred')),
3068 ('f', 'force', None, _('remove file even if modified')),
3067 ('f', 'force', None, _('remove file even if modified')),
3069 ('I', 'include', [], _('include names matching the given patterns')),
3068 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3069 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3071 _('hg remove [OPTION]... FILE...')),
3070 _('hg remove [OPTION]... FILE...')),
3072 "rename|mv":
3071 "rename|mv":
3073 (rename,
3072 (rename,
3074 [('A', 'after', None, _('record a rename that has already occurred')),
3073 [('A', 'after', None, _('record a rename that has already occurred')),
3075 ('f', 'force', None,
3074 ('f', 'force', None,
3076 _('forcibly copy over an existing managed file')),
3075 _('forcibly copy over an existing managed file')),
3077 ('I', 'include', [], _('include names matching the given patterns')),
3076 ('I', 'include', [], _('include names matching the given patterns')),
3078 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3077 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3079 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3078 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 _('hg rename [OPTION]... SOURCE... DEST')),
3079 _('hg rename [OPTION]... SOURCE... DEST')),
3081 "^revert":
3080 "^revert":
3082 (revert,
3081 (revert,
3083 [('r', 'rev', '', _('revision to revert to')),
3082 [('r', 'rev', '', _('revision to revert to')),
3084 ('', 'no-backup', None, _('do not save backup copies of files')),
3083 ('', 'no-backup', None, _('do not save backup copies of files')),
3085 ('I', 'include', [], _('include names matching given patterns')),
3084 ('I', 'include', [], _('include names matching given patterns')),
3086 ('X', 'exclude', [], _('exclude names matching given patterns')),
3085 ('X', 'exclude', [], _('exclude names matching given patterns')),
3087 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3086 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3088 _('hg revert [-r REV] [NAME]...')),
3087 _('hg revert [-r REV] [NAME]...')),
3089 "rollback": (rollback, [], _('hg rollback')),
3088 "rollback": (rollback, [], _('hg rollback')),
3090 "root": (root, [], _('hg root')),
3089 "root": (root, [], _('hg root')),
3091 "^serve":
3090 "^serve":
3092 (serve,
3091 (serve,
3093 [('A', 'accesslog', '', _('name of access log file to write to')),
3092 [('A', 'accesslog', '', _('name of access log file to write to')),
3094 ('d', 'daemon', None, _('run server in background')),
3093 ('d', 'daemon', None, _('run server in background')),
3095 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3094 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3096 ('E', 'errorlog', '', _('name of error log file to write to')),
3095 ('E', 'errorlog', '', _('name of error log file to write to')),
3097 ('p', 'port', 0, _('port to use (default: 8000)')),
3096 ('p', 'port', 0, _('port to use (default: 8000)')),
3098 ('a', 'address', '', _('address to use')),
3097 ('a', 'address', '', _('address to use')),
3099 ('n', 'name', '',
3098 ('n', 'name', '',
3100 _('name to show in web pages (default: working dir)')),
3099 _('name to show in web pages (default: working dir)')),
3101 ('', 'webdir-conf', '', _('name of the webdir config file'
3100 ('', 'webdir-conf', '', _('name of the webdir config file'
3102 ' (serve more than one repo)')),
3101 ' (serve more than one repo)')),
3103 ('', 'pid-file', '', _('name of file to write process ID to')),
3102 ('', 'pid-file', '', _('name of file to write process ID to')),
3104 ('', 'stdio', None, _('for remote clients')),
3103 ('', 'stdio', None, _('for remote clients')),
3105 ('t', 'templates', '', _('web templates to use')),
3104 ('t', 'templates', '', _('web templates to use')),
3106 ('', 'style', '', _('template style to use')),
3105 ('', 'style', '', _('template style to use')),
3107 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3106 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3108 _('hg serve [OPTION]...')),
3107 _('hg serve [OPTION]...')),
3109 "^status|st":
3108 "^status|st":
3110 (status,
3109 (status,
3111 [('A', 'all', None, _('show status of all files')),
3110 [('A', 'all', None, _('show status of all files')),
3112 ('m', 'modified', None, _('show only modified files')),
3111 ('m', 'modified', None, _('show only modified files')),
3113 ('a', 'added', None, _('show only added files')),
3112 ('a', 'added', None, _('show only added files')),
3114 ('r', 'removed', None, _('show only removed files')),
3113 ('r', 'removed', None, _('show only removed files')),
3115 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3114 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3116 ('c', 'clean', None, _('show only files without changes')),
3115 ('c', 'clean', None, _('show only files without changes')),
3117 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3116 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3118 ('i', 'ignored', None, _('show ignored files')),
3117 ('i', 'ignored', None, _('show ignored files')),
3119 ('n', 'no-status', None, _('hide status prefix')),
3118 ('n', 'no-status', None, _('hide status prefix')),
3120 ('C', 'copies', None, _('show source of copied files')),
3119 ('C', 'copies', None, _('show source of copied files')),
3121 ('0', 'print0', None,
3120 ('0', 'print0', None,
3122 _('end filenames with NUL, for use with xargs')),
3121 _('end filenames with NUL, for use with xargs')),
3123 ('I', 'include', [], _('include names matching the given patterns')),
3122 ('I', 'include', [], _('include names matching the given patterns')),
3124 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3123 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3125 _('hg status [OPTION]... [FILE]...')),
3124 _('hg status [OPTION]... [FILE]...')),
3126 "tag":
3125 "tag":
3127 (tag,
3126 (tag,
3128 [('l', 'local', None, _('make the tag local')),
3127 [('l', 'local', None, _('make the tag local')),
3129 ('m', 'message', '', _('message for tag commit log entry')),
3128 ('m', 'message', '', _('message for tag commit log entry')),
3130 ('d', 'date', '', _('record datecode as commit date')),
3129 ('d', 'date', '', _('record datecode as commit date')),
3131 ('u', 'user', '', _('record user as commiter')),
3130 ('u', 'user', '', _('record user as commiter')),
3132 ('r', 'rev', '', _('revision to tag'))],
3131 ('r', 'rev', '', _('revision to tag'))],
3133 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3132 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3134 "tags": (tags, [], _('hg tags')),
3133 "tags": (tags, [], _('hg tags')),
3135 "tip":
3134 "tip":
3136 (tip,
3135 (tip,
3137 [('b', 'branches', None, _('show branches')),
3136 [('b', 'branches', None, _('show branches')),
3138 ('', 'style', '', _('display using template map file')),
3137 ('', 'style', '', _('display using template map file')),
3139 ('p', 'patch', None, _('show patch')),
3138 ('p', 'patch', None, _('show patch')),
3140 ('', 'template', '', _('display with template'))],
3139 ('', 'template', '', _('display with template'))],
3141 _('hg tip [-b] [-p]')),
3140 _('hg tip [-b] [-p]')),
3142 "unbundle":
3141 "unbundle":
3143 (unbundle,
3142 (unbundle,
3144 [('u', 'update', None,
3143 [('u', 'update', None,
3145 _('update the working directory to tip after unbundle'))],
3144 _('update the working directory to tip after unbundle'))],
3146 _('hg unbundle [-u] FILE')),
3145 _('hg unbundle [-u] FILE')),
3147 "debugundo|undo": (undo, [], _('hg undo')),
3146 "debugundo|undo": (undo, [], _('hg undo')),
3148 "^update|up|checkout|co":
3147 "^update|up|checkout|co":
3149 (update,
3148 (update,
3150 [('b', 'branch', '', _('checkout the head of a specific branch')),
3149 [('b', 'branch', '', _('checkout the head of a specific branch')),
3151 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3150 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3152 ('C', 'clean', None, _('overwrite locally modified files')),
3151 ('C', 'clean', None, _('overwrite locally modified files')),
3153 ('f', 'force', None, _('force a merge with outstanding changes'))],
3152 ('f', 'force', None, _('force a merge with outstanding changes'))],
3154 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3153 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3155 "verify": (verify, [], _('hg verify')),
3154 "verify": (verify, [], _('hg verify')),
3156 "version": (show_version, [], _('hg version')),
3155 "version": (show_version, [], _('hg version')),
3157 }
3156 }
3158
3157
3159 globalopts = [
3158 globalopts = [
3160 ('R', 'repository', '',
3159 ('R', 'repository', '',
3161 _('repository root directory or symbolic path name')),
3160 _('repository root directory or symbolic path name')),
3162 ('', 'cwd', '', _('change working directory')),
3161 ('', 'cwd', '', _('change working directory')),
3163 ('y', 'noninteractive', None,
3162 ('y', 'noninteractive', None,
3164 _('do not prompt, assume \'yes\' for any required answers')),
3163 _('do not prompt, assume \'yes\' for any required answers')),
3165 ('q', 'quiet', None, _('suppress output')),
3164 ('q', 'quiet', None, _('suppress output')),
3166 ('v', 'verbose', None, _('enable additional output')),
3165 ('v', 'verbose', None, _('enable additional output')),
3167 ('', 'config', [], _('set/override config option')),
3166 ('', 'config', [], _('set/override config option')),
3168 ('', 'debug', None, _('enable debugging output')),
3167 ('', 'debug', None, _('enable debugging output')),
3169 ('', 'debugger', None, _('start debugger')),
3168 ('', 'debugger', None, _('start debugger')),
3170 ('', 'lsprof', None, _('print improved command execution profile')),
3169 ('', 'lsprof', None, _('print improved command execution profile')),
3171 ('', 'traceback', None, _('print traceback on exception')),
3170 ('', 'traceback', None, _('print traceback on exception')),
3172 ('', 'time', None, _('time how long the command takes')),
3171 ('', 'time', None, _('time how long the command takes')),
3173 ('', 'profile', None, _('print command execution profile')),
3172 ('', 'profile', None, _('print command execution profile')),
3174 ('', 'version', None, _('output version information and exit')),
3173 ('', 'version', None, _('output version information and exit')),
3175 ('h', 'help', None, _('display help and exit')),
3174 ('h', 'help', None, _('display help and exit')),
3176 ]
3175 ]
3177
3176
3178 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3177 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3179 " debugindex debugindexdot")
3178 " debugindex debugindexdot")
3180 optionalrepo = ("paths serve debugconfig")
3179 optionalrepo = ("paths serve debugconfig")
3181
3180
3182 def findpossible(cmd):
3181 def findpossible(cmd):
3183 """
3182 """
3184 Return cmd -> (aliases, command table entry)
3183 Return cmd -> (aliases, command table entry)
3185 for each matching command.
3184 for each matching command.
3186 Return debug commands (or their aliases) only if no normal command matches.
3185 Return debug commands (or their aliases) only if no normal command matches.
3187 """
3186 """
3188 choice = {}
3187 choice = {}
3189 debugchoice = {}
3188 debugchoice = {}
3190 for e in table.keys():
3189 for e in table.keys():
3191 aliases = e.lstrip("^").split("|")
3190 aliases = e.lstrip("^").split("|")
3192 found = None
3191 found = None
3193 if cmd in aliases:
3192 if cmd in aliases:
3194 found = cmd
3193 found = cmd
3195 else:
3194 else:
3196 for a in aliases:
3195 for a in aliases:
3197 if a.startswith(cmd):
3196 if a.startswith(cmd):
3198 found = a
3197 found = a
3199 break
3198 break
3200 if found is not None:
3199 if found is not None:
3201 if aliases[0].startswith("debug"):
3200 if aliases[0].startswith("debug"):
3202 debugchoice[found] = (aliases, table[e])
3201 debugchoice[found] = (aliases, table[e])
3203 else:
3202 else:
3204 choice[found] = (aliases, table[e])
3203 choice[found] = (aliases, table[e])
3205
3204
3206 if not choice and debugchoice:
3205 if not choice and debugchoice:
3207 choice = debugchoice
3206 choice = debugchoice
3208
3207
3209 return choice
3208 return choice
3210
3209
3211 def findcmd(cmd):
3210 def findcmd(cmd):
3212 """Return (aliases, command table entry) for command string."""
3211 """Return (aliases, command table entry) for command string."""
3213 choice = findpossible(cmd)
3212 choice = findpossible(cmd)
3214
3213
3215 if choice.has_key(cmd):
3214 if choice.has_key(cmd):
3216 return choice[cmd]
3215 return choice[cmd]
3217
3216
3218 if len(choice) > 1:
3217 if len(choice) > 1:
3219 clist = choice.keys()
3218 clist = choice.keys()
3220 clist.sort()
3219 clist.sort()
3221 raise AmbiguousCommand(cmd, clist)
3220 raise AmbiguousCommand(cmd, clist)
3222
3221
3223 if choice:
3222 if choice:
3224 return choice.values()[0]
3223 return choice.values()[0]
3225
3224
3226 raise UnknownCommand(cmd)
3225 raise UnknownCommand(cmd)
3227
3226
3228 def catchterm(*args):
3227 def catchterm(*args):
3229 raise util.SignalInterrupt
3228 raise util.SignalInterrupt
3230
3229
3231 def run():
3230 def run():
3232 sys.exit(dispatch(sys.argv[1:]))
3231 sys.exit(dispatch(sys.argv[1:]))
3233
3232
3234 class ParseError(Exception):
3233 class ParseError(Exception):
3235 """Exception raised on errors in parsing the command line."""
3234 """Exception raised on errors in parsing the command line."""
3236
3235
3237 def parse(ui, args):
3236 def parse(ui, args):
3238 options = {}
3237 options = {}
3239 cmdoptions = {}
3238 cmdoptions = {}
3240
3239
3241 try:
3240 try:
3242 args = fancyopts.fancyopts(args, globalopts, options)
3241 args = fancyopts.fancyopts(args, globalopts, options)
3243 except fancyopts.getopt.GetoptError, inst:
3242 except fancyopts.getopt.GetoptError, inst:
3244 raise ParseError(None, inst)
3243 raise ParseError(None, inst)
3245
3244
3246 if args:
3245 if args:
3247 cmd, args = args[0], args[1:]
3246 cmd, args = args[0], args[1:]
3248 aliases, i = findcmd(cmd)
3247 aliases, i = findcmd(cmd)
3249 cmd = aliases[0]
3248 cmd = aliases[0]
3250 defaults = ui.config("defaults", cmd)
3249 defaults = ui.config("defaults", cmd)
3251 if defaults:
3250 if defaults:
3252 args = defaults.split() + args
3251 args = defaults.split() + args
3253 c = list(i[1])
3252 c = list(i[1])
3254 else:
3253 else:
3255 cmd = None
3254 cmd = None
3256 c = []
3255 c = []
3257
3256
3258 # combine global options into local
3257 # combine global options into local
3259 for o in globalopts:
3258 for o in globalopts:
3260 c.append((o[0], o[1], options[o[1]], o[3]))
3259 c.append((o[0], o[1], options[o[1]], o[3]))
3261
3260
3262 try:
3261 try:
3263 args = fancyopts.fancyopts(args, c, cmdoptions)
3262 args = fancyopts.fancyopts(args, c, cmdoptions)
3264 except fancyopts.getopt.GetoptError, inst:
3263 except fancyopts.getopt.GetoptError, inst:
3265 raise ParseError(cmd, inst)
3264 raise ParseError(cmd, inst)
3266
3265
3267 # separate global options back out
3266 # separate global options back out
3268 for o in globalopts:
3267 for o in globalopts:
3269 n = o[1]
3268 n = o[1]
3270 options[n] = cmdoptions[n]
3269 options[n] = cmdoptions[n]
3271 del cmdoptions[n]
3270 del cmdoptions[n]
3272
3271
3273 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3272 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3274
3273
3275 external = {}
3274 external = {}
3276
3275
3277 def findext(name):
3276 def findext(name):
3278 '''return module with given extension name'''
3277 '''return module with given extension name'''
3279 try:
3278 try:
3280 return sys.modules[external[name]]
3279 return sys.modules[external[name]]
3281 except KeyError:
3280 except KeyError:
3282 for k, v in external.iteritems():
3281 for k, v in external.iteritems():
3283 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3282 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3284 return sys.modules[v]
3283 return sys.modules[v]
3285 raise KeyError(name)
3284 raise KeyError(name)
3286
3285
3287 def dispatch(args):
3286 def dispatch(args):
3288 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3287 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3289 num = getattr(signal, name, None)
3288 num = getattr(signal, name, None)
3290 if num: signal.signal(num, catchterm)
3289 if num: signal.signal(num, catchterm)
3291
3290
3292 try:
3291 try:
3293 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3292 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3294 except util.Abort, inst:
3293 except util.Abort, inst:
3295 sys.stderr.write(_("abort: %s\n") % inst)
3294 sys.stderr.write(_("abort: %s\n") % inst)
3296 return -1
3295 return -1
3297
3296
3298 for ext_name, load_from_name in u.extensions():
3297 for ext_name, load_from_name in u.extensions():
3299 try:
3298 try:
3300 if load_from_name:
3299 if load_from_name:
3301 # the module will be loaded in sys.modules
3300 # the module will be loaded in sys.modules
3302 # choose an unique name so that it doesn't
3301 # choose an unique name so that it doesn't
3303 # conflicts with other modules
3302 # conflicts with other modules
3304 module_name = "hgext_%s" % ext_name.replace('.', '_')
3303 module_name = "hgext_%s" % ext_name.replace('.', '_')
3305 mod = imp.load_source(module_name, load_from_name)
3304 mod = imp.load_source(module_name, load_from_name)
3306 else:
3305 else:
3307 def importh(name):
3306 def importh(name):
3308 mod = __import__(name)
3307 mod = __import__(name)
3309 components = name.split('.')
3308 components = name.split('.')
3310 for comp in components[1:]:
3309 for comp in components[1:]:
3311 mod = getattr(mod, comp)
3310 mod = getattr(mod, comp)
3312 return mod
3311 return mod
3313 try:
3312 try:
3314 mod = importh("hgext.%s" % ext_name)
3313 mod = importh("hgext.%s" % ext_name)
3315 except ImportError:
3314 except ImportError:
3316 mod = importh(ext_name)
3315 mod = importh(ext_name)
3317 external[ext_name] = mod.__name__
3316 external[ext_name] = mod.__name__
3318 except (util.SignalInterrupt, KeyboardInterrupt):
3317 except (util.SignalInterrupt, KeyboardInterrupt):
3319 raise
3318 raise
3320 except Exception, inst:
3319 except Exception, inst:
3321 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3320 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3322 if u.print_exc():
3321 if u.print_exc():
3323 return 1
3322 return 1
3324
3323
3325 for name in external.itervalues():
3324 for name in external.itervalues():
3326 mod = sys.modules[name]
3325 mod = sys.modules[name]
3327 uisetup = getattr(mod, 'uisetup', None)
3326 uisetup = getattr(mod, 'uisetup', None)
3328 if uisetup:
3327 if uisetup:
3329 uisetup(u)
3328 uisetup(u)
3330 cmdtable = getattr(mod, 'cmdtable', {})
3329 cmdtable = getattr(mod, 'cmdtable', {})
3331 for t in cmdtable:
3330 for t in cmdtable:
3332 if t in table:
3331 if t in table:
3333 u.warn(_("module %s overrides %s\n") % (name, t))
3332 u.warn(_("module %s overrides %s\n") % (name, t))
3334 table.update(cmdtable)
3333 table.update(cmdtable)
3335
3334
3336 try:
3335 try:
3337 cmd, func, args, options, cmdoptions = parse(u, args)
3336 cmd, func, args, options, cmdoptions = parse(u, args)
3338 if options["time"]:
3337 if options["time"]:
3339 def get_times():
3338 def get_times():
3340 t = os.times()
3339 t = os.times()
3341 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3340 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3342 t = (t[0], t[1], t[2], t[3], time.clock())
3341 t = (t[0], t[1], t[2], t[3], time.clock())
3343 return t
3342 return t
3344 s = get_times()
3343 s = get_times()
3345 def print_time():
3344 def print_time():
3346 t = get_times()
3345 t = get_times()
3347 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3346 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3348 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3347 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3349 atexit.register(print_time)
3348 atexit.register(print_time)
3350
3349
3351 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3350 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3352 not options["noninteractive"], options["traceback"],
3351 not options["noninteractive"], options["traceback"],
3353 options["config"])
3352 options["config"])
3354
3353
3355 # enter the debugger before command execution
3354 # enter the debugger before command execution
3356 if options['debugger']:
3355 if options['debugger']:
3357 pdb.set_trace()
3356 pdb.set_trace()
3358
3357
3359 try:
3358 try:
3360 if options['cwd']:
3359 if options['cwd']:
3361 try:
3360 try:
3362 os.chdir(options['cwd'])
3361 os.chdir(options['cwd'])
3363 except OSError, inst:
3362 except OSError, inst:
3364 raise util.Abort('%s: %s' %
3363 raise util.Abort('%s: %s' %
3365 (options['cwd'], inst.strerror))
3364 (options['cwd'], inst.strerror))
3366
3365
3367 path = u.expandpath(options["repository"]) or ""
3366 path = u.expandpath(options["repository"]) or ""
3368 repo = path and hg.repository(u, path=path) or None
3367 repo = path and hg.repository(u, path=path) or None
3369
3368
3370 if options['help']:
3369 if options['help']:
3371 return help_(u, cmd, options['version'])
3370 return help_(u, cmd, options['version'])
3372 elif options['version']:
3371 elif options['version']:
3373 return show_version(u)
3372 return show_version(u)
3374 elif not cmd:
3373 elif not cmd:
3375 return help_(u, 'shortlist')
3374 return help_(u, 'shortlist')
3376
3375
3377 if cmd not in norepo.split():
3376 if cmd not in norepo.split():
3378 try:
3377 try:
3379 if not repo:
3378 if not repo:
3380 repo = hg.repository(u, path=path)
3379 repo = hg.repository(u, path=path)
3381 u = repo.ui
3380 u = repo.ui
3382 for name in external.itervalues():
3381 for name in external.itervalues():
3383 mod = sys.modules[name]
3382 mod = sys.modules[name]
3384 if hasattr(mod, 'reposetup'):
3383 if hasattr(mod, 'reposetup'):
3385 mod.reposetup(u, repo)
3384 mod.reposetup(u, repo)
3386 hg.repo_setup_hooks.append(mod.reposetup)
3385 hg.repo_setup_hooks.append(mod.reposetup)
3387 except hg.RepoError:
3386 except hg.RepoError:
3388 if cmd not in optionalrepo.split():
3387 if cmd not in optionalrepo.split():
3389 raise
3388 raise
3390 d = lambda: func(u, repo, *args, **cmdoptions)
3389 d = lambda: func(u, repo, *args, **cmdoptions)
3391 else:
3390 else:
3392 d = lambda: func(u, *args, **cmdoptions)
3391 d = lambda: func(u, *args, **cmdoptions)
3393
3392
3394 # reupdate the options, repo/.hg/hgrc may have changed them
3393 # reupdate the options, repo/.hg/hgrc may have changed them
3395 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3394 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3396 not options["noninteractive"], options["traceback"],
3395 not options["noninteractive"], options["traceback"],
3397 options["config"])
3396 options["config"])
3398
3397
3399 try:
3398 try:
3400 if options['profile']:
3399 if options['profile']:
3401 import hotshot, hotshot.stats
3400 import hotshot, hotshot.stats
3402 prof = hotshot.Profile("hg.prof")
3401 prof = hotshot.Profile("hg.prof")
3403 try:
3402 try:
3404 try:
3403 try:
3405 return prof.runcall(d)
3404 return prof.runcall(d)
3406 except:
3405 except:
3407 try:
3406 try:
3408 u.warn(_('exception raised - generating '
3407 u.warn(_('exception raised - generating '
3409 'profile anyway\n'))
3408 'profile anyway\n'))
3410 except:
3409 except:
3411 pass
3410 pass
3412 raise
3411 raise
3413 finally:
3412 finally:
3414 prof.close()
3413 prof.close()
3415 stats = hotshot.stats.load("hg.prof")
3414 stats = hotshot.stats.load("hg.prof")
3416 stats.strip_dirs()
3415 stats.strip_dirs()
3417 stats.sort_stats('time', 'calls')
3416 stats.sort_stats('time', 'calls')
3418 stats.print_stats(40)
3417 stats.print_stats(40)
3419 elif options['lsprof']:
3418 elif options['lsprof']:
3420 try:
3419 try:
3421 from mercurial import lsprof
3420 from mercurial import lsprof
3422 except ImportError:
3421 except ImportError:
3423 raise util.Abort(_(
3422 raise util.Abort(_(
3424 'lsprof not available - install from '
3423 'lsprof not available - install from '
3425 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3424 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3426 p = lsprof.Profiler()
3425 p = lsprof.Profiler()
3427 p.enable(subcalls=True)
3426 p.enable(subcalls=True)
3428 try:
3427 try:
3429 return d()
3428 return d()
3430 finally:
3429 finally:
3431 p.disable()
3430 p.disable()
3432 stats = lsprof.Stats(p.getstats())
3431 stats = lsprof.Stats(p.getstats())
3433 stats.sort()
3432 stats.sort()
3434 stats.pprint(top=10, file=sys.stderr, climit=5)
3433 stats.pprint(top=10, file=sys.stderr, climit=5)
3435 else:
3434 else:
3436 return d()
3435 return d()
3437 finally:
3436 finally:
3438 u.flush()
3437 u.flush()
3439 except:
3438 except:
3440 # enter the debugger when we hit an exception
3439 # enter the debugger when we hit an exception
3441 if options['debugger']:
3440 if options['debugger']:
3442 pdb.post_mortem(sys.exc_info()[2])
3441 pdb.post_mortem(sys.exc_info()[2])
3443 u.print_exc()
3442 u.print_exc()
3444 raise
3443 raise
3445 except ParseError, inst:
3444 except ParseError, inst:
3446 if inst.args[0]:
3445 if inst.args[0]:
3447 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3446 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3448 help_(u, inst.args[0])
3447 help_(u, inst.args[0])
3449 else:
3448 else:
3450 u.warn(_("hg: %s\n") % inst.args[1])
3449 u.warn(_("hg: %s\n") % inst.args[1])
3451 help_(u, 'shortlist')
3450 help_(u, 'shortlist')
3452 except AmbiguousCommand, inst:
3451 except AmbiguousCommand, inst:
3453 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3452 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3454 (inst.args[0], " ".join(inst.args[1])))
3453 (inst.args[0], " ".join(inst.args[1])))
3455 except UnknownCommand, inst:
3454 except UnknownCommand, inst:
3456 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3455 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3457 help_(u, 'shortlist')
3456 help_(u, 'shortlist')
3458 except hg.RepoError, inst:
3457 except hg.RepoError, inst:
3459 u.warn(_("abort: %s!\n") % inst)
3458 u.warn(_("abort: %s!\n") % inst)
3460 except lock.LockHeld, inst:
3459 except lock.LockHeld, inst:
3461 if inst.errno == errno.ETIMEDOUT:
3460 if inst.errno == errno.ETIMEDOUT:
3462 reason = _('timed out waiting for lock held by %s') % inst.locker
3461 reason = _('timed out waiting for lock held by %s') % inst.locker
3463 else:
3462 else:
3464 reason = _('lock held by %s') % inst.locker
3463 reason = _('lock held by %s') % inst.locker
3465 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3464 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3466 except lock.LockUnavailable, inst:
3465 except lock.LockUnavailable, inst:
3467 u.warn(_("abort: could not lock %s: %s\n") %
3466 u.warn(_("abort: could not lock %s: %s\n") %
3468 (inst.desc or inst.filename, inst.strerror))
3467 (inst.desc or inst.filename, inst.strerror))
3469 except revlog.RevlogError, inst:
3468 except revlog.RevlogError, inst:
3470 u.warn(_("abort: "), inst, "!\n")
3469 u.warn(_("abort: "), inst, "!\n")
3471 except util.SignalInterrupt:
3470 except util.SignalInterrupt:
3472 u.warn(_("killed!\n"))
3471 u.warn(_("killed!\n"))
3473 except KeyboardInterrupt:
3472 except KeyboardInterrupt:
3474 try:
3473 try:
3475 u.warn(_("interrupted!\n"))
3474 u.warn(_("interrupted!\n"))
3476 except IOError, inst:
3475 except IOError, inst:
3477 if inst.errno == errno.EPIPE:
3476 if inst.errno == errno.EPIPE:
3478 if u.debugflag:
3477 if u.debugflag:
3479 u.warn(_("\nbroken pipe\n"))
3478 u.warn(_("\nbroken pipe\n"))
3480 else:
3479 else:
3481 raise
3480 raise
3482 except IOError, inst:
3481 except IOError, inst:
3483 if hasattr(inst, "code"):
3482 if hasattr(inst, "code"):
3484 u.warn(_("abort: %s\n") % inst)
3483 u.warn(_("abort: %s\n") % inst)
3485 elif hasattr(inst, "reason"):
3484 elif hasattr(inst, "reason"):
3486 u.warn(_("abort: error: %s\n") % inst.reason[1])
3485 u.warn(_("abort: error: %s\n") % inst.reason[1])
3487 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3486 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3488 if u.debugflag:
3487 if u.debugflag:
3489 u.warn(_("broken pipe\n"))
3488 u.warn(_("broken pipe\n"))
3490 elif getattr(inst, "strerror", None):
3489 elif getattr(inst, "strerror", None):
3491 if getattr(inst, "filename", None):
3490 if getattr(inst, "filename", None):
3492 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3491 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3493 else:
3492 else:
3494 u.warn(_("abort: %s\n") % inst.strerror)
3493 u.warn(_("abort: %s\n") % inst.strerror)
3495 else:
3494 else:
3496 raise
3495 raise
3497 except OSError, inst:
3496 except OSError, inst:
3498 if hasattr(inst, "filename"):
3497 if hasattr(inst, "filename"):
3499 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3498 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3500 else:
3499 else:
3501 u.warn(_("abort: %s\n") % inst.strerror)
3500 u.warn(_("abort: %s\n") % inst.strerror)
3502 except util.Abort, inst:
3501 except util.Abort, inst:
3503 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3502 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3504 except TypeError, inst:
3503 except TypeError, inst:
3505 # was this an argument error?
3504 # was this an argument error?
3506 tb = traceback.extract_tb(sys.exc_info()[2])
3505 tb = traceback.extract_tb(sys.exc_info()[2])
3507 if len(tb) > 2: # no
3506 if len(tb) > 2: # no
3508 raise
3507 raise
3509 u.debug(inst, "\n")
3508 u.debug(inst, "\n")
3510 u.warn(_("%s: invalid arguments\n") % cmd)
3509 u.warn(_("%s: invalid arguments\n") % cmd)
3511 help_(u, cmd)
3510 help_(u, cmd)
3512 except SystemExit, inst:
3511 except SystemExit, inst:
3513 # Commands shouldn't sys.exit directly, but give a return code.
3512 # Commands shouldn't sys.exit directly, but give a return code.
3514 # Just in case catch this and and pass exit code to caller.
3513 # Just in case catch this and and pass exit code to caller.
3515 return inst.code
3514 return inst.code
3516 except:
3515 except:
3517 u.warn(_("** unknown exception encountered, details follow\n"))
3516 u.warn(_("** unknown exception encountered, details follow\n"))
3518 u.warn(_("** report bug details to "
3517 u.warn(_("** report bug details to "
3519 "http://www.selenic.com/mercurial/bts\n"))
3518 "http://www.selenic.com/mercurial/bts\n"))
3520 u.warn(_("** or mercurial@selenic.com\n"))
3519 u.warn(_("** or mercurial@selenic.com\n"))
3521 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3520 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3522 % version.get_version())
3521 % version.get_version())
3523 raise
3522 raise
3524
3523
3525 return -1
3524 return -1
@@ -1,1757 +1,1747 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def url(self):
86 def url(self):
87 return 'file:' + self.root
87 return 'file:' + self.root
88
88
89 def hook(self, name, throw=False, **args):
89 def hook(self, name, throw=False, **args):
90 def callhook(hname, funcname):
90 def callhook(hname, funcname):
91 '''call python hook. hook is callable object, looked up as
91 '''call python hook. hook is callable object, looked up as
92 name in python module. if callable returns "true", hook
92 name in python module. if callable returns "true", hook
93 fails, else passes. if hook raises exception, treated as
93 fails, else passes. if hook raises exception, treated as
94 hook failure. exception propagates if throw is "true".
94 hook failure. exception propagates if throw is "true".
95
95
96 reason for "true" meaning "hook failed" is so that
96 reason for "true" meaning "hook failed" is so that
97 unmodified commands (e.g. mercurial.commands.update) can
97 unmodified commands (e.g. mercurial.commands.update) can
98 be run as hooks without wrappers to convert return values.'''
98 be run as hooks without wrappers to convert return values.'''
99
99
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 d = funcname.rfind('.')
101 d = funcname.rfind('.')
102 if d == -1:
102 if d == -1:
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 % (hname, funcname))
104 % (hname, funcname))
105 modname = funcname[:d]
105 modname = funcname[:d]
106 try:
106 try:
107 obj = __import__(modname)
107 obj = __import__(modname)
108 except ImportError:
108 except ImportError:
109 try:
109 try:
110 # extensions are loaded with hgext_ prefix
110 # extensions are loaded with hgext_ prefix
111 obj = __import__("hgext_%s" % modname)
111 obj = __import__("hgext_%s" % modname)
112 except ImportError:
112 except ImportError:
113 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
114 '(import of "%s" failed)') %
114 '(import of "%s" failed)') %
115 (hname, modname))
115 (hname, modname))
116 try:
116 try:
117 for p in funcname.split('.')[1:]:
117 for p in funcname.split('.')[1:]:
118 obj = getattr(obj, p)
118 obj = getattr(obj, p)
119 except AttributeError, err:
119 except AttributeError, err:
120 raise util.Abort(_('%s hook is invalid '
120 raise util.Abort(_('%s hook is invalid '
121 '("%s" is not defined)') %
121 '("%s" is not defined)') %
122 (hname, funcname))
122 (hname, funcname))
123 if not callable(obj):
123 if not callable(obj):
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not callable)') %
125 '("%s" is not callable)') %
126 (hname, funcname))
126 (hname, funcname))
127 try:
127 try:
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 except (KeyboardInterrupt, util.SignalInterrupt):
129 except (KeyboardInterrupt, util.SignalInterrupt):
130 raise
130 raise
131 except Exception, exc:
131 except Exception, exc:
132 if isinstance(exc, util.Abort):
132 if isinstance(exc, util.Abort):
133 self.ui.warn(_('error: %s hook failed: %s\n') %
133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 (hname, exc.args[0] % exc.args[1:]))
134 (hname, exc.args[0] % exc.args[1:]))
135 else:
135 else:
136 self.ui.warn(_('error: %s hook raised an exception: '
136 self.ui.warn(_('error: %s hook raised an exception: '
137 '%s\n') % (hname, exc))
137 '%s\n') % (hname, exc))
138 if throw:
138 if throw:
139 raise
139 raise
140 self.ui.print_exc()
140 self.ui.print_exc()
141 return True
141 return True
142 if r:
142 if r:
143 if throw:
143 if throw:
144 raise util.Abort(_('%s hook failed') % hname)
144 raise util.Abort(_('%s hook failed') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 return r
146 return r
147
147
148 def runhook(name, cmd):
148 def runhook(name, cmd):
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 r = util.system(cmd, environ=env, cwd=self.root)
151 r = util.system(cmd, environ=env, cwd=self.root)
152 if r:
152 if r:
153 desc, r = util.explain_exit(r)
153 desc, r = util.explain_exit(r)
154 if throw:
154 if throw:
155 raise util.Abort(_('%s hook %s') % (name, desc))
155 raise util.Abort(_('%s hook %s') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 return r
157 return r
158
158
159 r = False
159 r = False
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 if hname.split(".", 1)[0] == name and cmd]
161 if hname.split(".", 1)[0] == name and cmd]
162 hooks.sort()
162 hooks.sort()
163 for hname, cmd in hooks:
163 for hname, cmd in hooks:
164 if cmd.startswith('python:'):
164 if cmd.startswith('python:'):
165 r = callhook(hname, cmd[7:].strip()) or r
165 r = callhook(hname, cmd[7:].strip()) or r
166 else:
166 else:
167 r = runhook(hname, cmd) or r
167 r = runhook(hname, cmd) or r
168 return r
168 return r
169
169
170 tag_disallowed = ':\r\n'
170 tag_disallowed = ':\r\n'
171
171
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
174
174
175 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
176 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
177 changeset is committed with the change.
177 changeset is committed with the change.
178
178
179 keyword arguments:
179 keyword arguments:
180
180
181 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
182 (default False)
182 (default False)
183
183
184 message: commit message to use if committing
184 message: commit message to use if committing
185
185
186 user: name of user to use if committing
186 user: name of user to use if committing
187
187
188 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
189
189
190 for c in self.tag_disallowed:
190 for c in self.tag_disallowed:
191 if c in name:
191 if c in name:
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193
193
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195
195
196 if local:
196 if local:
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 self.hook('tag', node=node, tag=name, local=local)
198 self.hook('tag', node=node, tag=name, local=local)
199 return
199 return
200
200
201 for x in self.changes():
201 for x in self.status()[:5]:
202 if '.hgtags' in x:
202 if '.hgtags' in x:
203 raise util.Abort(_('working copy of .hgtags is changed '
203 raise util.Abort(_('working copy of .hgtags is changed '
204 '(please commit .hgtags manually)'))
204 '(please commit .hgtags manually)'))
205
205
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 if self.dirstate.state('.hgtags') == '?':
207 if self.dirstate.state('.hgtags') == '?':
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 if not message:
210 if not message:
211 message = _('Added tag %s for changeset %s') % (name, node)
211 message = _('Added tag %s for changeset %s') % (name, node)
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=node, tag=name, local=local)
214 self.hook('tag', node=node, tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
295 if key == '.':
296 key = self.dirstate.parents()[0]
296 key = self.dirstate.parents()[0]
297 if key == nullid:
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
298 raise repo.RepoError(_("no revision checked out"))
299 try:
299 try:
300 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
301 except:
301 except:
302 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
303
303
304 def dev(self):
304 def dev(self):
305 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
306
306
307 def local(self):
307 def local(self):
308 return True
308 return True
309
309
310 def join(self, f):
310 def join(self, f):
311 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
312
312
313 def wjoin(self, f):
313 def wjoin(self, f):
314 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
315
315
316 def file(self, f):
316 def file(self, f):
317 if f[0] == '/':
317 if f[0] == '/':
318 f = f[1:]
318 f = f[1:]
319 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
320
320
321 def changectx(self, changeid):
321 def changectx(self, changeid):
322 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
323
323
324 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
325 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
326 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
327 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
328
328
329 def getcwd(self):
329 def getcwd(self):
330 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
331
331
332 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
333 return self.wopener(f, mode)
333 return self.wopener(f, mode)
334
334
335 def wread(self, filename):
335 def wread(self, filename):
336 if self.encodepats == None:
336 if self.encodepats == None:
337 l = []
337 l = []
338 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
340 l.append((mf, cmd))
340 l.append((mf, cmd))
341 self.encodepats = l
341 self.encodepats = l
342
342
343 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
344
344
345 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
346 if mf(filename):
346 if mf(filename):
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
348 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
349 break
349 break
350
350
351 return data
351 return data
352
352
353 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
354 if self.decodepats == None:
354 if self.decodepats == None:
355 l = []
355 l = []
356 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
358 l.append((mf, cmd))
358 l.append((mf, cmd))
359 self.decodepats = l
359 self.decodepats = l
360
360
361 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
362 if mf(filename):
362 if mf(filename):
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
365 break
365 break
366
366
367 if fd:
367 if fd:
368 return fd.write(data)
368 return fd.write(data)
369 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
370
370
371 def transaction(self):
371 def transaction(self):
372 tr = self.transhandle
372 tr = self.transhandle
373 if tr != None and tr.running():
373 if tr != None and tr.running():
374 return tr.nest()
374 return tr.nest()
375
375
376 # save dirstate for rollback
376 # save dirstate for rollback
377 try:
377 try:
378 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
379 except IOError:
379 except IOError:
380 ds = ""
380 ds = ""
381 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
382
382
383 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
384 self.join("journal"),
384 self.join("journal"),
385 aftertrans(self.path))
385 aftertrans(self.path))
386 self.transhandle = tr
386 self.transhandle = tr
387 return tr
387 return tr
388
388
389 def recover(self):
389 def recover(self):
390 l = self.lock()
390 l = self.lock()
391 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
392 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
393 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
394 self.reload()
394 self.reload()
395 return True
395 return True
396 else:
396 else:
397 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
398 return False
398 return False
399
399
400 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
401 if not wlock:
401 if not wlock:
402 wlock = self.wlock()
402 wlock = self.wlock()
403 l = self.lock()
403 l = self.lock()
404 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
405 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
406 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
408 self.reload()
408 self.reload()
409 self.wreload()
409 self.wreload()
410 else:
410 else:
411 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
412
412
413 def wreload(self):
413 def wreload(self):
414 self.dirstate.read()
414 self.dirstate.read()
415
415
416 def reload(self):
416 def reload(self):
417 self.changelog.load()
417 self.changelog.load()
418 self.manifest.load()
418 self.manifest.load()
419 self.tagscache = None
419 self.tagscache = None
420 self.nodetagscache = None
420 self.nodetagscache = None
421
421
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
423 desc=None):
423 desc=None):
424 try:
424 try:
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
426 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
427 if not wait:
427 if not wait:
428 raise
428 raise
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
430 (desc, inst.args[0]))
430 (desc, inst.args[0]))
431 # default to 600 seconds timeout
431 # default to 600 seconds timeout
432 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
433 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
434 releasefn, desc=desc)
434 releasefn, desc=desc)
435 if acquirefn:
435 if acquirefn:
436 acquirefn()
436 acquirefn()
437 return l
437 return l
438
438
439 def lock(self, wait=1):
439 def lock(self, wait=1):
440 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
441 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
442
442
443 def wlock(self, wait=1):
443 def wlock(self, wait=1):
444 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
445 self.wreload,
445 self.wreload,
446 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
447
447
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
449 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
450 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
452
452
453 if fp2 != nullid:
453 if fp2 != nullid:
454 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
455 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
456 if fpa == fp1:
456 if fpa == fp1:
457 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
458 elif fpa == fp2:
458 elif fpa == fp2:
459 fp2 = nullid
459 fp2 = nullid
460
460
461 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
462 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
463 return (fp1, None, None)
463 return (fp1, None, None)
464
464
465 return (None, fp1, fp2)
465 return (None, fp1, fp2)
466
466
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
468 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
471 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
472 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
473 m1 = self.manifest.read(c1[0]).copy()
473 m1 = self.manifest.read(c1[0]).copy()
474 m2 = self.manifest.read(c2[0])
474 m2 = self.manifest.read(c2[0])
475 changed = []
475 changed = []
476
476
477 if orig_parent == p1:
477 if orig_parent == p1:
478 update_dirstate = 1
478 update_dirstate = 1
479 else:
479 else:
480 update_dirstate = 0
480 update_dirstate = 0
481
481
482 if not wlock:
482 if not wlock:
483 wlock = self.wlock()
483 wlock = self.wlock()
484 l = self.lock()
484 l = self.lock()
485 tr = self.transaction()
485 tr = self.transaction()
486 linkrev = self.changelog.count()
486 linkrev = self.changelog.count()
487 for f in files:
487 for f in files:
488 try:
488 try:
489 t = self.wread(f)
489 t = self.wread(f)
490 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
490 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
491 r = self.file(f)
491 r = self.file(f)
492
492
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
494 if entry:
494 if entry:
495 m1[f] = entry
495 m1[f] = entry
496 continue
496 continue
497
497
498 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
498 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
499 changed.append(f)
499 changed.append(f)
500 if update_dirstate:
500 if update_dirstate:
501 self.dirstate.update([f], "n")
501 self.dirstate.update([f], "n")
502 except IOError:
502 except IOError:
503 try:
503 try:
504 del m1[f]
504 del m1[f]
505 if update_dirstate:
505 if update_dirstate:
506 self.dirstate.forget([f])
506 self.dirstate.forget([f])
507 except:
507 except:
508 # deleted from p2?
508 # deleted from p2?
509 pass
509 pass
510
510
511 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
511 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
512 user = user or self.ui.username()
512 user = user or self.ui.username()
513 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
513 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
514 tr.close()
514 tr.close()
515 if update_dirstate:
515 if update_dirstate:
516 self.dirstate.setparents(n, nullid)
516 self.dirstate.setparents(n, nullid)
517
517
518 def commit(self, files=None, text="", user=None, date=None,
518 def commit(self, files=None, text="", user=None, date=None,
519 match=util.always, force=False, lock=None, wlock=None,
519 match=util.always, force=False, lock=None, wlock=None,
520 force_editor=False):
520 force_editor=False):
521 commit = []
521 commit = []
522 remove = []
522 remove = []
523 changed = []
523 changed = []
524
524
525 if files:
525 if files:
526 for f in files:
526 for f in files:
527 s = self.dirstate.state(f)
527 s = self.dirstate.state(f)
528 if s in 'nmai':
528 if s in 'nmai':
529 commit.append(f)
529 commit.append(f)
530 elif s == 'r':
530 elif s == 'r':
531 remove.append(f)
531 remove.append(f)
532 else:
532 else:
533 self.ui.warn(_("%s not tracked!\n") % f)
533 self.ui.warn(_("%s not tracked!\n") % f)
534 else:
534 else:
535 modified, added, removed, deleted, unknown = self.changes(match=match)
535 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
536 commit = modified + added
536 commit = modified + added
537 remove = removed
537 remove = removed
538
538
539 p1, p2 = self.dirstate.parents()
539 p1, p2 = self.dirstate.parents()
540 c1 = self.changelog.read(p1)
540 c1 = self.changelog.read(p1)
541 c2 = self.changelog.read(p2)
541 c2 = self.changelog.read(p2)
542 m1 = self.manifest.read(c1[0]).copy()
542 m1 = self.manifest.read(c1[0]).copy()
543 m2 = self.manifest.read(c2[0])
543 m2 = self.manifest.read(c2[0])
544
544
545 if not commit and not remove and not force and p2 == nullid:
545 if not commit and not remove and not force and p2 == nullid:
546 self.ui.status(_("nothing changed\n"))
546 self.ui.status(_("nothing changed\n"))
547 return None
547 return None
548
548
549 xp1 = hex(p1)
549 xp1 = hex(p1)
550 if p2 == nullid: xp2 = ''
550 if p2 == nullid: xp2 = ''
551 else: xp2 = hex(p2)
551 else: xp2 = hex(p2)
552
552
553 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
553 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
554
554
555 if not wlock:
555 if not wlock:
556 wlock = self.wlock()
556 wlock = self.wlock()
557 if not lock:
557 if not lock:
558 lock = self.lock()
558 lock = self.lock()
559 tr = self.transaction()
559 tr = self.transaction()
560
560
561 # check in files
561 # check in files
562 new = {}
562 new = {}
563 linkrev = self.changelog.count()
563 linkrev = self.changelog.count()
564 commit.sort()
564 commit.sort()
565 for f in commit:
565 for f in commit:
566 self.ui.note(f + "\n")
566 self.ui.note(f + "\n")
567 try:
567 try:
568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
569 t = self.wread(f)
569 t = self.wread(f)
570 except IOError:
570 except IOError:
571 self.ui.warn(_("trouble committing %s!\n") % f)
571 self.ui.warn(_("trouble committing %s!\n") % f)
572 raise
572 raise
573
573
574 r = self.file(f)
574 r = self.file(f)
575
575
576 meta = {}
576 meta = {}
577 cp = self.dirstate.copied(f)
577 cp = self.dirstate.copied(f)
578 if cp:
578 if cp:
579 meta["copy"] = cp
579 meta["copy"] = cp
580 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
580 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
581 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
581 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
582 fp1, fp2 = nullid, nullid
582 fp1, fp2 = nullid, nullid
583 else:
583 else:
584 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
584 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
585 if entry:
585 if entry:
586 new[f] = entry
586 new[f] = entry
587 continue
587 continue
588
588
589 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
589 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
590 # remember what we've added so that we can later calculate
590 # remember what we've added so that we can later calculate
591 # the files to pull from a set of changesets
591 # the files to pull from a set of changesets
592 changed.append(f)
592 changed.append(f)
593
593
594 # update manifest
594 # update manifest
595 m1.update(new)
595 m1.update(new)
596 for f in remove:
596 for f in remove:
597 if f in m1:
597 if f in m1:
598 del m1[f]
598 del m1[f]
599 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
599 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
600 (new, remove))
600 (new, remove))
601
601
602 # add changeset
602 # add changeset
603 new = new.keys()
603 new = new.keys()
604 new.sort()
604 new.sort()
605
605
606 user = user or self.ui.username()
606 user = user or self.ui.username()
607 if not text or force_editor:
607 if not text or force_editor:
608 edittext = []
608 edittext = []
609 if text:
609 if text:
610 edittext.append(text)
610 edittext.append(text)
611 edittext.append("")
611 edittext.append("")
612 if p2 != nullid:
612 if p2 != nullid:
613 edittext.append("HG: branch merge")
613 edittext.append("HG: branch merge")
614 edittext.extend(["HG: changed %s" % f for f in changed])
614 edittext.extend(["HG: changed %s" % f for f in changed])
615 edittext.extend(["HG: removed %s" % f for f in remove])
615 edittext.extend(["HG: removed %s" % f for f in remove])
616 if not changed and not remove:
616 if not changed and not remove:
617 edittext.append("HG: no files changed")
617 edittext.append("HG: no files changed")
618 edittext.append("")
618 edittext.append("")
619 # run editor in the repository root
619 # run editor in the repository root
620 olddir = os.getcwd()
620 olddir = os.getcwd()
621 os.chdir(self.root)
621 os.chdir(self.root)
622 text = self.ui.edit("\n".join(edittext), user)
622 text = self.ui.edit("\n".join(edittext), user)
623 os.chdir(olddir)
623 os.chdir(olddir)
624
624
625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
626 while lines and not lines[0]:
626 while lines and not lines[0]:
627 del lines[0]
627 del lines[0]
628 if not lines:
628 if not lines:
629 return None
629 return None
630 text = '\n'.join(lines)
630 text = '\n'.join(lines)
631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
633 parent2=xp2)
633 parent2=xp2)
634 tr.close()
634 tr.close()
635
635
636 self.dirstate.setparents(n)
636 self.dirstate.setparents(n)
637 self.dirstate.update(new, "n")
637 self.dirstate.update(new, "n")
638 self.dirstate.forget(remove)
638 self.dirstate.forget(remove)
639
639
640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
641 return n
641 return n
642
642
643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
644 if node:
644 if node:
645 fdict = dict.fromkeys(files)
645 fdict = dict.fromkeys(files)
646 for fn in self.manifest.read(self.changelog.read(node)[0]):
646 for fn in self.manifest.read(self.changelog.read(node)[0]):
647 fdict.pop(fn, None)
647 fdict.pop(fn, None)
648 if match(fn):
648 if match(fn):
649 yield 'm', fn
649 yield 'm', fn
650 for fn in fdict:
650 for fn in fdict:
651 if badmatch and badmatch(fn):
651 if badmatch and badmatch(fn):
652 if match(fn):
652 if match(fn):
653 yield 'b', fn
653 yield 'b', fn
654 else:
654 else:
655 self.ui.warn(_('%s: No such file in rev %s\n') % (
655 self.ui.warn(_('%s: No such file in rev %s\n') % (
656 util.pathto(self.getcwd(), fn), short(node)))
656 util.pathto(self.getcwd(), fn), short(node)))
657 else:
657 else:
658 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
658 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
659 yield src, fn
659 yield src, fn
660
660
661 def status(self, node1=None, node2=None, files=[], match=util.always,
661 def status(self, node1=None, node2=None, files=[], match=util.always,
662 wlock=None, list_ignored=False, list_clean=False):
662 wlock=None, list_ignored=False, list_clean=False):
663 """return status of files between two nodes or node and working directory
663 """return status of files between two nodes or node and working directory
664
664
665 If node1 is None, use the first dirstate parent instead.
665 If node1 is None, use the first dirstate parent instead.
666 If node2 is None, compare node1 with working directory.
666 If node2 is None, compare node1 with working directory.
667 """
667 """
668
668
669 def fcmp(fn, mf):
669 def fcmp(fn, mf):
670 t1 = self.wread(fn)
670 t1 = self.wread(fn)
671 t2 = self.file(fn).read(mf.get(fn, nullid))
671 t2 = self.file(fn).read(mf.get(fn, nullid))
672 return cmp(t1, t2)
672 return cmp(t1, t2)
673
673
674 def mfmatches(node):
674 def mfmatches(node):
675 change = self.changelog.read(node)
675 change = self.changelog.read(node)
676 mf = dict(self.manifest.read(change[0]))
676 mf = dict(self.manifest.read(change[0]))
677 for fn in mf.keys():
677 for fn in mf.keys():
678 if not match(fn):
678 if not match(fn):
679 del mf[fn]
679 del mf[fn]
680 return mf
680 return mf
681
681
682 modified, added, removed, deleted, unknown = [], [], [], [], []
682 modified, added, removed, deleted, unknown = [], [], [], [], []
683 ignored, clean = [], []
683 ignored, clean = [], []
684
684
685 compareworking = False
685 compareworking = False
686 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
686 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
687 compareworking = True
687 compareworking = True
688
688
689 if not compareworking:
689 if not compareworking:
690 # read the manifest from node1 before the manifest from node2,
690 # read the manifest from node1 before the manifest from node2,
691 # so that we'll hit the manifest cache if we're going through
691 # so that we'll hit the manifest cache if we're going through
692 # all the revisions in parent->child order.
692 # all the revisions in parent->child order.
693 mf1 = mfmatches(node1)
693 mf1 = mfmatches(node1)
694
694
695 # are we comparing the working directory?
695 # are we comparing the working directory?
696 if not node2:
696 if not node2:
697 if not wlock:
697 if not wlock:
698 try:
698 try:
699 wlock = self.wlock(wait=0)
699 wlock = self.wlock(wait=0)
700 except lock.LockException:
700 except lock.LockException:
701 wlock = None
701 wlock = None
702 (lookup, modified, added, removed, deleted, unknown,
702 (lookup, modified, added, removed, deleted, unknown,
703 ignored, clean) = self.dirstate.status(files, match,
703 ignored, clean) = self.dirstate.status(files, match,
704 list_ignored, list_clean)
704 list_ignored, list_clean)
705
705
706 # are we comparing working dir against its parent?
706 # are we comparing working dir against its parent?
707 if compareworking:
707 if compareworking:
708 if lookup:
708 if lookup:
709 # do a full compare of any files that might have changed
709 # do a full compare of any files that might have changed
710 mf2 = mfmatches(self.dirstate.parents()[0])
710 mf2 = mfmatches(self.dirstate.parents()[0])
711 for f in lookup:
711 for f in lookup:
712 if fcmp(f, mf2):
712 if fcmp(f, mf2):
713 modified.append(f)
713 modified.append(f)
714 elif wlock is not None:
714 elif wlock is not None:
715 self.dirstate.update([f], "n")
715 self.dirstate.update([f], "n")
716 else:
716 else:
717 # we are comparing working dir against non-parent
717 # we are comparing working dir against non-parent
718 # generate a pseudo-manifest for the working dir
718 # generate a pseudo-manifest for the working dir
719 mf2 = mfmatches(self.dirstate.parents()[0])
719 mf2 = mfmatches(self.dirstate.parents()[0])
720 for f in lookup + modified + added:
720 for f in lookup + modified + added:
721 mf2[f] = ""
721 mf2[f] = ""
722 for f in removed:
722 for f in removed:
723 if f in mf2:
723 if f in mf2:
724 del mf2[f]
724 del mf2[f]
725 else:
725 else:
726 # we are comparing two revisions
726 # we are comparing two revisions
727 mf2 = mfmatches(node2)
727 mf2 = mfmatches(node2)
728
728
729 if not compareworking:
729 if not compareworking:
730 # flush lists from dirstate before comparing manifests
730 # flush lists from dirstate before comparing manifests
731 modified, added, clean = [], [], []
731 modified, added, clean = [], [], []
732
732
733 # make sure to sort the files so we talk to the disk in a
733 # make sure to sort the files so we talk to the disk in a
734 # reasonable order
734 # reasonable order
735 mf2keys = mf2.keys()
735 mf2keys = mf2.keys()
736 mf2keys.sort()
736 mf2keys.sort()
737 for fn in mf2keys:
737 for fn in mf2keys:
738 if mf1.has_key(fn):
738 if mf1.has_key(fn):
739 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
739 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
740 modified.append(fn)
740 modified.append(fn)
741 elif list_clean:
741 elif list_clean:
742 clean.append(fn)
742 clean.append(fn)
743 del mf1[fn]
743 del mf1[fn]
744 else:
744 else:
745 added.append(fn)
745 added.append(fn)
746
746
747 removed = mf1.keys()
747 removed = mf1.keys()
748
748
749 # sort and return results:
749 # sort and return results:
750 for l in modified, added, removed, deleted, unknown, ignored, clean:
750 for l in modified, added, removed, deleted, unknown, ignored, clean:
751 l.sort()
751 l.sort()
752 return (modified, added, removed, deleted, unknown, ignored, clean)
752 return (modified, added, removed, deleted, unknown, ignored, clean)
753
753
754 def changes(self, node1=None, node2=None, files=[], match=util.always,
755 wlock=None, list_ignored=False, list_clean=False):
756 '''DEPRECATED - use status instead'''
757 marduit = self.status(node1, node2, files, match, wlock,
758 list_ignored, list_clean)
759 if list_ignored:
760 return marduit[:-1]
761 else:
762 return marduit[:-2]
763
764 def add(self, list, wlock=None):
754 def add(self, list, wlock=None):
765 if not wlock:
755 if not wlock:
766 wlock = self.wlock()
756 wlock = self.wlock()
767 for f in list:
757 for f in list:
768 p = self.wjoin(f)
758 p = self.wjoin(f)
769 if not os.path.exists(p):
759 if not os.path.exists(p):
770 self.ui.warn(_("%s does not exist!\n") % f)
760 self.ui.warn(_("%s does not exist!\n") % f)
771 elif not os.path.isfile(p):
761 elif not os.path.isfile(p):
772 self.ui.warn(_("%s not added: only files supported currently\n")
762 self.ui.warn(_("%s not added: only files supported currently\n")
773 % f)
763 % f)
774 elif self.dirstate.state(f) in 'an':
764 elif self.dirstate.state(f) in 'an':
775 self.ui.warn(_("%s already tracked!\n") % f)
765 self.ui.warn(_("%s already tracked!\n") % f)
776 else:
766 else:
777 self.dirstate.update([f], "a")
767 self.dirstate.update([f], "a")
778
768
779 def forget(self, list, wlock=None):
769 def forget(self, list, wlock=None):
780 if not wlock:
770 if not wlock:
781 wlock = self.wlock()
771 wlock = self.wlock()
782 for f in list:
772 for f in list:
783 if self.dirstate.state(f) not in 'ai':
773 if self.dirstate.state(f) not in 'ai':
784 self.ui.warn(_("%s not added!\n") % f)
774 self.ui.warn(_("%s not added!\n") % f)
785 else:
775 else:
786 self.dirstate.forget([f])
776 self.dirstate.forget([f])
787
777
788 def remove(self, list, unlink=False, wlock=None):
778 def remove(self, list, unlink=False, wlock=None):
789 if unlink:
779 if unlink:
790 for f in list:
780 for f in list:
791 try:
781 try:
792 util.unlink(self.wjoin(f))
782 util.unlink(self.wjoin(f))
793 except OSError, inst:
783 except OSError, inst:
794 if inst.errno != errno.ENOENT:
784 if inst.errno != errno.ENOENT:
795 raise
785 raise
796 if not wlock:
786 if not wlock:
797 wlock = self.wlock()
787 wlock = self.wlock()
798 for f in list:
788 for f in list:
799 p = self.wjoin(f)
789 p = self.wjoin(f)
800 if os.path.exists(p):
790 if os.path.exists(p):
801 self.ui.warn(_("%s still exists!\n") % f)
791 self.ui.warn(_("%s still exists!\n") % f)
802 elif self.dirstate.state(f) == 'a':
792 elif self.dirstate.state(f) == 'a':
803 self.dirstate.forget([f])
793 self.dirstate.forget([f])
804 elif f not in self.dirstate:
794 elif f not in self.dirstate:
805 self.ui.warn(_("%s not tracked!\n") % f)
795 self.ui.warn(_("%s not tracked!\n") % f)
806 else:
796 else:
807 self.dirstate.update([f], "r")
797 self.dirstate.update([f], "r")
808
798
809 def undelete(self, list, wlock=None):
799 def undelete(self, list, wlock=None):
810 p = self.dirstate.parents()[0]
800 p = self.dirstate.parents()[0]
811 mn = self.changelog.read(p)[0]
801 mn = self.changelog.read(p)[0]
812 m = self.manifest.read(mn)
802 m = self.manifest.read(mn)
813 if not wlock:
803 if not wlock:
814 wlock = self.wlock()
804 wlock = self.wlock()
815 for f in list:
805 for f in list:
816 if self.dirstate.state(f) not in "r":
806 if self.dirstate.state(f) not in "r":
817 self.ui.warn("%s not removed!\n" % f)
807 self.ui.warn("%s not removed!\n" % f)
818 else:
808 else:
819 t = self.file(f).read(m[f])
809 t = self.file(f).read(m[f])
820 self.wwrite(f, t)
810 self.wwrite(f, t)
821 util.set_exec(self.wjoin(f), m.execf(f))
811 util.set_exec(self.wjoin(f), m.execf(f))
822 self.dirstate.update([f], "n")
812 self.dirstate.update([f], "n")
823
813
824 def copy(self, source, dest, wlock=None):
814 def copy(self, source, dest, wlock=None):
825 p = self.wjoin(dest)
815 p = self.wjoin(dest)
826 if not os.path.exists(p):
816 if not os.path.exists(p):
827 self.ui.warn(_("%s does not exist!\n") % dest)
817 self.ui.warn(_("%s does not exist!\n") % dest)
828 elif not os.path.isfile(p):
818 elif not os.path.isfile(p):
829 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
819 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
830 else:
820 else:
831 if not wlock:
821 if not wlock:
832 wlock = self.wlock()
822 wlock = self.wlock()
833 if self.dirstate.state(dest) == '?':
823 if self.dirstate.state(dest) == '?':
834 self.dirstate.update([dest], "a")
824 self.dirstate.update([dest], "a")
835 self.dirstate.copy(source, dest)
825 self.dirstate.copy(source, dest)
836
826
837 def heads(self, start=None):
827 def heads(self, start=None):
838 heads = self.changelog.heads(start)
828 heads = self.changelog.heads(start)
839 # sort the output in rev descending order
829 # sort the output in rev descending order
840 heads = [(-self.changelog.rev(h), h) for h in heads]
830 heads = [(-self.changelog.rev(h), h) for h in heads]
841 heads.sort()
831 heads.sort()
842 return [n for (r, n) in heads]
832 return [n for (r, n) in heads]
843
833
844 # branchlookup returns a dict giving a list of branches for
834 # branchlookup returns a dict giving a list of branches for
845 # each head. A branch is defined as the tag of a node or
835 # each head. A branch is defined as the tag of a node or
846 # the branch of the node's parents. If a node has multiple
836 # the branch of the node's parents. If a node has multiple
847 # branch tags, tags are eliminated if they are visible from other
837 # branch tags, tags are eliminated if they are visible from other
848 # branch tags.
838 # branch tags.
849 #
839 #
850 # So, for this graph: a->b->c->d->e
840 # So, for this graph: a->b->c->d->e
851 # \ /
841 # \ /
852 # aa -----/
842 # aa -----/
853 # a has tag 2.6.12
843 # a has tag 2.6.12
854 # d has tag 2.6.13
844 # d has tag 2.6.13
855 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
845 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
856 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
846 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
857 # from the list.
847 # from the list.
858 #
848 #
859 # It is possible that more than one head will have the same branch tag.
849 # It is possible that more than one head will have the same branch tag.
860 # callers need to check the result for multiple heads under the same
850 # callers need to check the result for multiple heads under the same
861 # branch tag if that is a problem for them (ie checkout of a specific
851 # branch tag if that is a problem for them (ie checkout of a specific
862 # branch).
852 # branch).
863 #
853 #
864 # passing in a specific branch will limit the depth of the search
854 # passing in a specific branch will limit the depth of the search
865 # through the parents. It won't limit the branches returned in the
855 # through the parents. It won't limit the branches returned in the
866 # result though.
856 # result though.
867 def branchlookup(self, heads=None, branch=None):
857 def branchlookup(self, heads=None, branch=None):
868 if not heads:
858 if not heads:
869 heads = self.heads()
859 heads = self.heads()
870 headt = [ h for h in heads ]
860 headt = [ h for h in heads ]
871 chlog = self.changelog
861 chlog = self.changelog
872 branches = {}
862 branches = {}
873 merges = []
863 merges = []
874 seenmerge = {}
864 seenmerge = {}
875
865
876 # traverse the tree once for each head, recording in the branches
866 # traverse the tree once for each head, recording in the branches
877 # dict which tags are visible from this head. The branches
867 # dict which tags are visible from this head. The branches
878 # dict also records which tags are visible from each tag
868 # dict also records which tags are visible from each tag
879 # while we traverse.
869 # while we traverse.
880 while headt or merges:
870 while headt or merges:
881 if merges:
871 if merges:
882 n, found = merges.pop()
872 n, found = merges.pop()
883 visit = [n]
873 visit = [n]
884 else:
874 else:
885 h = headt.pop()
875 h = headt.pop()
886 visit = [h]
876 visit = [h]
887 found = [h]
877 found = [h]
888 seen = {}
878 seen = {}
889 while visit:
879 while visit:
890 n = visit.pop()
880 n = visit.pop()
891 if n in seen:
881 if n in seen:
892 continue
882 continue
893 pp = chlog.parents(n)
883 pp = chlog.parents(n)
894 tags = self.nodetags(n)
884 tags = self.nodetags(n)
895 if tags:
885 if tags:
896 for x in tags:
886 for x in tags:
897 if x == 'tip':
887 if x == 'tip':
898 continue
888 continue
899 for f in found:
889 for f in found:
900 branches.setdefault(f, {})[n] = 1
890 branches.setdefault(f, {})[n] = 1
901 branches.setdefault(n, {})[n] = 1
891 branches.setdefault(n, {})[n] = 1
902 break
892 break
903 if n not in found:
893 if n not in found:
904 found.append(n)
894 found.append(n)
905 if branch in tags:
895 if branch in tags:
906 continue
896 continue
907 seen[n] = 1
897 seen[n] = 1
908 if pp[1] != nullid and n not in seenmerge:
898 if pp[1] != nullid and n not in seenmerge:
909 merges.append((pp[1], [x for x in found]))
899 merges.append((pp[1], [x for x in found]))
910 seenmerge[n] = 1
900 seenmerge[n] = 1
911 if pp[0] != nullid:
901 if pp[0] != nullid:
912 visit.append(pp[0])
902 visit.append(pp[0])
913 # traverse the branches dict, eliminating branch tags from each
903 # traverse the branches dict, eliminating branch tags from each
914 # head that are visible from another branch tag for that head.
904 # head that are visible from another branch tag for that head.
915 out = {}
905 out = {}
916 viscache = {}
906 viscache = {}
917 for h in heads:
907 for h in heads:
918 def visible(node):
908 def visible(node):
919 if node in viscache:
909 if node in viscache:
920 return viscache[node]
910 return viscache[node]
921 ret = {}
911 ret = {}
922 visit = [node]
912 visit = [node]
923 while visit:
913 while visit:
924 x = visit.pop()
914 x = visit.pop()
925 if x in viscache:
915 if x in viscache:
926 ret.update(viscache[x])
916 ret.update(viscache[x])
927 elif x not in ret:
917 elif x not in ret:
928 ret[x] = 1
918 ret[x] = 1
929 if x in branches:
919 if x in branches:
930 visit[len(visit):] = branches[x].keys()
920 visit[len(visit):] = branches[x].keys()
931 viscache[node] = ret
921 viscache[node] = ret
932 return ret
922 return ret
933 if h not in branches:
923 if h not in branches:
934 continue
924 continue
935 # O(n^2), but somewhat limited. This only searches the
925 # O(n^2), but somewhat limited. This only searches the
936 # tags visible from a specific head, not all the tags in the
926 # tags visible from a specific head, not all the tags in the
937 # whole repo.
927 # whole repo.
938 for b in branches[h]:
928 for b in branches[h]:
939 vis = False
929 vis = False
940 for bb in branches[h].keys():
930 for bb in branches[h].keys():
941 if b != bb:
931 if b != bb:
942 if b in visible(bb):
932 if b in visible(bb):
943 vis = True
933 vis = True
944 break
934 break
945 if not vis:
935 if not vis:
946 l = out.setdefault(h, [])
936 l = out.setdefault(h, [])
947 l[len(l):] = self.nodetags(b)
937 l[len(l):] = self.nodetags(b)
948 return out
938 return out
949
939
950 def branches(self, nodes):
940 def branches(self, nodes):
951 if not nodes:
941 if not nodes:
952 nodes = [self.changelog.tip()]
942 nodes = [self.changelog.tip()]
953 b = []
943 b = []
954 for n in nodes:
944 for n in nodes:
955 t = n
945 t = n
956 while 1:
946 while 1:
957 p = self.changelog.parents(n)
947 p = self.changelog.parents(n)
958 if p[1] != nullid or p[0] == nullid:
948 if p[1] != nullid or p[0] == nullid:
959 b.append((t, n, p[0], p[1]))
949 b.append((t, n, p[0], p[1]))
960 break
950 break
961 n = p[0]
951 n = p[0]
962 return b
952 return b
963
953
964 def between(self, pairs):
954 def between(self, pairs):
965 r = []
955 r = []
966
956
967 for top, bottom in pairs:
957 for top, bottom in pairs:
968 n, l, i = top, [], 0
958 n, l, i = top, [], 0
969 f = 1
959 f = 1
970
960
971 while n != bottom:
961 while n != bottom:
972 p = self.changelog.parents(n)[0]
962 p = self.changelog.parents(n)[0]
973 if i == f:
963 if i == f:
974 l.append(n)
964 l.append(n)
975 f = f * 2
965 f = f * 2
976 n = p
966 n = p
977 i += 1
967 i += 1
978
968
979 r.append(l)
969 r.append(l)
980
970
981 return r
971 return r
982
972
983 def findincoming(self, remote, base=None, heads=None, force=False):
973 def findincoming(self, remote, base=None, heads=None, force=False):
984 """Return list of roots of the subsets of missing nodes from remote
974 """Return list of roots of the subsets of missing nodes from remote
985
975
986 If base dict is specified, assume that these nodes and their parents
976 If base dict is specified, assume that these nodes and their parents
987 exist on the remote side and that no child of a node of base exists
977 exist on the remote side and that no child of a node of base exists
988 in both remote and self.
978 in both remote and self.
989 Furthermore base will be updated to include the nodes that exists
979 Furthermore base will be updated to include the nodes that exists
990 in self and remote but no children exists in self and remote.
980 in self and remote but no children exists in self and remote.
991 If a list of heads is specified, return only nodes which are heads
981 If a list of heads is specified, return only nodes which are heads
992 or ancestors of these heads.
982 or ancestors of these heads.
993
983
994 All the ancestors of base are in self and in remote.
984 All the ancestors of base are in self and in remote.
995 All the descendants of the list returned are missing in self.
985 All the descendants of the list returned are missing in self.
996 (and so we know that the rest of the nodes are missing in remote, see
986 (and so we know that the rest of the nodes are missing in remote, see
997 outgoing)
987 outgoing)
998 """
988 """
999 m = self.changelog.nodemap
989 m = self.changelog.nodemap
1000 search = []
990 search = []
1001 fetch = {}
991 fetch = {}
1002 seen = {}
992 seen = {}
1003 seenbranch = {}
993 seenbranch = {}
1004 if base == None:
994 if base == None:
1005 base = {}
995 base = {}
1006
996
1007 if not heads:
997 if not heads:
1008 heads = remote.heads()
998 heads = remote.heads()
1009
999
1010 if self.changelog.tip() == nullid:
1000 if self.changelog.tip() == nullid:
1011 base[nullid] = 1
1001 base[nullid] = 1
1012 if heads != [nullid]:
1002 if heads != [nullid]:
1013 return [nullid]
1003 return [nullid]
1014 return []
1004 return []
1015
1005
1016 # assume we're closer to the tip than the root
1006 # assume we're closer to the tip than the root
1017 # and start by examining the heads
1007 # and start by examining the heads
1018 self.ui.status(_("searching for changes\n"))
1008 self.ui.status(_("searching for changes\n"))
1019
1009
1020 unknown = []
1010 unknown = []
1021 for h in heads:
1011 for h in heads:
1022 if h not in m:
1012 if h not in m:
1023 unknown.append(h)
1013 unknown.append(h)
1024 else:
1014 else:
1025 base[h] = 1
1015 base[h] = 1
1026
1016
1027 if not unknown:
1017 if not unknown:
1028 return []
1018 return []
1029
1019
1030 req = dict.fromkeys(unknown)
1020 req = dict.fromkeys(unknown)
1031 reqcnt = 0
1021 reqcnt = 0
1032
1022
1033 # search through remote branches
1023 # search through remote branches
1034 # a 'branch' here is a linear segment of history, with four parts:
1024 # a 'branch' here is a linear segment of history, with four parts:
1035 # head, root, first parent, second parent
1025 # head, root, first parent, second parent
1036 # (a branch always has two parents (or none) by definition)
1026 # (a branch always has two parents (or none) by definition)
1037 unknown = remote.branches(unknown)
1027 unknown = remote.branches(unknown)
1038 while unknown:
1028 while unknown:
1039 r = []
1029 r = []
1040 while unknown:
1030 while unknown:
1041 n = unknown.pop(0)
1031 n = unknown.pop(0)
1042 if n[0] in seen:
1032 if n[0] in seen:
1043 continue
1033 continue
1044
1034
1045 self.ui.debug(_("examining %s:%s\n")
1035 self.ui.debug(_("examining %s:%s\n")
1046 % (short(n[0]), short(n[1])))
1036 % (short(n[0]), short(n[1])))
1047 if n[0] == nullid: # found the end of the branch
1037 if n[0] == nullid: # found the end of the branch
1048 pass
1038 pass
1049 elif n in seenbranch:
1039 elif n in seenbranch:
1050 self.ui.debug(_("branch already found\n"))
1040 self.ui.debug(_("branch already found\n"))
1051 continue
1041 continue
1052 elif n[1] and n[1] in m: # do we know the base?
1042 elif n[1] and n[1] in m: # do we know the base?
1053 self.ui.debug(_("found incomplete branch %s:%s\n")
1043 self.ui.debug(_("found incomplete branch %s:%s\n")
1054 % (short(n[0]), short(n[1])))
1044 % (short(n[0]), short(n[1])))
1055 search.append(n) # schedule branch range for scanning
1045 search.append(n) # schedule branch range for scanning
1056 seenbranch[n] = 1
1046 seenbranch[n] = 1
1057 else:
1047 else:
1058 if n[1] not in seen and n[1] not in fetch:
1048 if n[1] not in seen and n[1] not in fetch:
1059 if n[2] in m and n[3] in m:
1049 if n[2] in m and n[3] in m:
1060 self.ui.debug(_("found new changeset %s\n") %
1050 self.ui.debug(_("found new changeset %s\n") %
1061 short(n[1]))
1051 short(n[1]))
1062 fetch[n[1]] = 1 # earliest unknown
1052 fetch[n[1]] = 1 # earliest unknown
1063 for p in n[2:4]:
1053 for p in n[2:4]:
1064 if p in m:
1054 if p in m:
1065 base[p] = 1 # latest known
1055 base[p] = 1 # latest known
1066
1056
1067 for p in n[2:4]:
1057 for p in n[2:4]:
1068 if p not in req and p not in m:
1058 if p not in req and p not in m:
1069 r.append(p)
1059 r.append(p)
1070 req[p] = 1
1060 req[p] = 1
1071 seen[n[0]] = 1
1061 seen[n[0]] = 1
1072
1062
1073 if r:
1063 if r:
1074 reqcnt += 1
1064 reqcnt += 1
1075 self.ui.debug(_("request %d: %s\n") %
1065 self.ui.debug(_("request %d: %s\n") %
1076 (reqcnt, " ".join(map(short, r))))
1066 (reqcnt, " ".join(map(short, r))))
1077 for p in range(0, len(r), 10):
1067 for p in range(0, len(r), 10):
1078 for b in remote.branches(r[p:p+10]):
1068 for b in remote.branches(r[p:p+10]):
1079 self.ui.debug(_("received %s:%s\n") %
1069 self.ui.debug(_("received %s:%s\n") %
1080 (short(b[0]), short(b[1])))
1070 (short(b[0]), short(b[1])))
1081 unknown.append(b)
1071 unknown.append(b)
1082
1072
1083 # do binary search on the branches we found
1073 # do binary search on the branches we found
1084 while search:
1074 while search:
1085 n = search.pop(0)
1075 n = search.pop(0)
1086 reqcnt += 1
1076 reqcnt += 1
1087 l = remote.between([(n[0], n[1])])[0]
1077 l = remote.between([(n[0], n[1])])[0]
1088 l.append(n[1])
1078 l.append(n[1])
1089 p = n[0]
1079 p = n[0]
1090 f = 1
1080 f = 1
1091 for i in l:
1081 for i in l:
1092 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1082 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1093 if i in m:
1083 if i in m:
1094 if f <= 2:
1084 if f <= 2:
1095 self.ui.debug(_("found new branch changeset %s\n") %
1085 self.ui.debug(_("found new branch changeset %s\n") %
1096 short(p))
1086 short(p))
1097 fetch[p] = 1
1087 fetch[p] = 1
1098 base[i] = 1
1088 base[i] = 1
1099 else:
1089 else:
1100 self.ui.debug(_("narrowed branch search to %s:%s\n")
1090 self.ui.debug(_("narrowed branch search to %s:%s\n")
1101 % (short(p), short(i)))
1091 % (short(p), short(i)))
1102 search.append((p, i))
1092 search.append((p, i))
1103 break
1093 break
1104 p, f = i, f * 2
1094 p, f = i, f * 2
1105
1095
1106 # sanity check our fetch list
1096 # sanity check our fetch list
1107 for f in fetch.keys():
1097 for f in fetch.keys():
1108 if f in m:
1098 if f in m:
1109 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1099 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1110
1100
1111 if base.keys() == [nullid]:
1101 if base.keys() == [nullid]:
1112 if force:
1102 if force:
1113 self.ui.warn(_("warning: repository is unrelated\n"))
1103 self.ui.warn(_("warning: repository is unrelated\n"))
1114 else:
1104 else:
1115 raise util.Abort(_("repository is unrelated"))
1105 raise util.Abort(_("repository is unrelated"))
1116
1106
1117 self.ui.note(_("found new changesets starting at ") +
1107 self.ui.note(_("found new changesets starting at ") +
1118 " ".join([short(f) for f in fetch]) + "\n")
1108 " ".join([short(f) for f in fetch]) + "\n")
1119
1109
1120 self.ui.debug(_("%d total queries\n") % reqcnt)
1110 self.ui.debug(_("%d total queries\n") % reqcnt)
1121
1111
1122 return fetch.keys()
1112 return fetch.keys()
1123
1113
1124 def findoutgoing(self, remote, base=None, heads=None, force=False):
1114 def findoutgoing(self, remote, base=None, heads=None, force=False):
1125 """Return list of nodes that are roots of subsets not in remote
1115 """Return list of nodes that are roots of subsets not in remote
1126
1116
1127 If base dict is specified, assume that these nodes and their parents
1117 If base dict is specified, assume that these nodes and their parents
1128 exist on the remote side.
1118 exist on the remote side.
1129 If a list of heads is specified, return only nodes which are heads
1119 If a list of heads is specified, return only nodes which are heads
1130 or ancestors of these heads, and return a second element which
1120 or ancestors of these heads, and return a second element which
1131 contains all remote heads which get new children.
1121 contains all remote heads which get new children.
1132 """
1122 """
1133 if base == None:
1123 if base == None:
1134 base = {}
1124 base = {}
1135 self.findincoming(remote, base, heads, force=force)
1125 self.findincoming(remote, base, heads, force=force)
1136
1126
1137 self.ui.debug(_("common changesets up to ")
1127 self.ui.debug(_("common changesets up to ")
1138 + " ".join(map(short, base.keys())) + "\n")
1128 + " ".join(map(short, base.keys())) + "\n")
1139
1129
1140 remain = dict.fromkeys(self.changelog.nodemap)
1130 remain = dict.fromkeys(self.changelog.nodemap)
1141
1131
1142 # prune everything remote has from the tree
1132 # prune everything remote has from the tree
1143 del remain[nullid]
1133 del remain[nullid]
1144 remove = base.keys()
1134 remove = base.keys()
1145 while remove:
1135 while remove:
1146 n = remove.pop(0)
1136 n = remove.pop(0)
1147 if n in remain:
1137 if n in remain:
1148 del remain[n]
1138 del remain[n]
1149 for p in self.changelog.parents(n):
1139 for p in self.changelog.parents(n):
1150 remove.append(p)
1140 remove.append(p)
1151
1141
1152 # find every node whose parents have been pruned
1142 # find every node whose parents have been pruned
1153 subset = []
1143 subset = []
1154 # find every remote head that will get new children
1144 # find every remote head that will get new children
1155 updated_heads = {}
1145 updated_heads = {}
1156 for n in remain:
1146 for n in remain:
1157 p1, p2 = self.changelog.parents(n)
1147 p1, p2 = self.changelog.parents(n)
1158 if p1 not in remain and p2 not in remain:
1148 if p1 not in remain and p2 not in remain:
1159 subset.append(n)
1149 subset.append(n)
1160 if heads:
1150 if heads:
1161 if p1 in heads:
1151 if p1 in heads:
1162 updated_heads[p1] = True
1152 updated_heads[p1] = True
1163 if p2 in heads:
1153 if p2 in heads:
1164 updated_heads[p2] = True
1154 updated_heads[p2] = True
1165
1155
1166 # this is the set of all roots we have to push
1156 # this is the set of all roots we have to push
1167 if heads:
1157 if heads:
1168 return subset, updated_heads.keys()
1158 return subset, updated_heads.keys()
1169 else:
1159 else:
1170 return subset
1160 return subset
1171
1161
1172 def pull(self, remote, heads=None, force=False, lock=None):
1162 def pull(self, remote, heads=None, force=False, lock=None):
1173 mylock = False
1163 mylock = False
1174 if not lock:
1164 if not lock:
1175 lock = self.lock()
1165 lock = self.lock()
1176 mylock = True
1166 mylock = True
1177
1167
1178 try:
1168 try:
1179 fetch = self.findincoming(remote, force=force)
1169 fetch = self.findincoming(remote, force=force)
1180 if fetch == [nullid]:
1170 if fetch == [nullid]:
1181 self.ui.status(_("requesting all changes\n"))
1171 self.ui.status(_("requesting all changes\n"))
1182
1172
1183 if not fetch:
1173 if not fetch:
1184 self.ui.status(_("no changes found\n"))
1174 self.ui.status(_("no changes found\n"))
1185 return 0
1175 return 0
1186
1176
1187 if heads is None:
1177 if heads is None:
1188 cg = remote.changegroup(fetch, 'pull')
1178 cg = remote.changegroup(fetch, 'pull')
1189 else:
1179 else:
1190 cg = remote.changegroupsubset(fetch, heads, 'pull')
1180 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 return self.addchangegroup(cg, 'pull', remote.url())
1181 return self.addchangegroup(cg, 'pull', remote.url())
1192 finally:
1182 finally:
1193 if mylock:
1183 if mylock:
1194 lock.release()
1184 lock.release()
1195
1185
1196 def push(self, remote, force=False, revs=None):
1186 def push(self, remote, force=False, revs=None):
1197 # there are two ways to push to remote repo:
1187 # there are two ways to push to remote repo:
1198 #
1188 #
1199 # addchangegroup assumes local user can lock remote
1189 # addchangegroup assumes local user can lock remote
1200 # repo (local filesystem, old ssh servers).
1190 # repo (local filesystem, old ssh servers).
1201 #
1191 #
1202 # unbundle assumes local user cannot lock remote repo (new ssh
1192 # unbundle assumes local user cannot lock remote repo (new ssh
1203 # servers, http servers).
1193 # servers, http servers).
1204
1194
1205 if remote.capable('unbundle'):
1195 if remote.capable('unbundle'):
1206 return self.push_unbundle(remote, force, revs)
1196 return self.push_unbundle(remote, force, revs)
1207 return self.push_addchangegroup(remote, force, revs)
1197 return self.push_addchangegroup(remote, force, revs)
1208
1198
1209 def prepush(self, remote, force, revs):
1199 def prepush(self, remote, force, revs):
1210 base = {}
1200 base = {}
1211 remote_heads = remote.heads()
1201 remote_heads = remote.heads()
1212 inc = self.findincoming(remote, base, remote_heads, force=force)
1202 inc = self.findincoming(remote, base, remote_heads, force=force)
1213 if not force and inc:
1203 if not force and inc:
1214 self.ui.warn(_("abort: unsynced remote changes!\n"))
1204 self.ui.warn(_("abort: unsynced remote changes!\n"))
1215 self.ui.status(_("(did you forget to sync?"
1205 self.ui.status(_("(did you forget to sync?"
1216 " use push -f to force)\n"))
1206 " use push -f to force)\n"))
1217 return None, 1
1207 return None, 1
1218
1208
1219 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1209 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1220 if revs is not None:
1210 if revs is not None:
1221 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1211 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1222 else:
1212 else:
1223 bases, heads = update, self.changelog.heads()
1213 bases, heads = update, self.changelog.heads()
1224
1214
1225 if not bases:
1215 if not bases:
1226 self.ui.status(_("no changes found\n"))
1216 self.ui.status(_("no changes found\n"))
1227 return None, 1
1217 return None, 1
1228 elif not force:
1218 elif not force:
1229 # FIXME we don't properly detect creation of new heads
1219 # FIXME we don't properly detect creation of new heads
1230 # in the push -r case, assume the user knows what he's doing
1220 # in the push -r case, assume the user knows what he's doing
1231 if not revs and len(remote_heads) < len(heads) \
1221 if not revs and len(remote_heads) < len(heads) \
1232 and remote_heads != [nullid]:
1222 and remote_heads != [nullid]:
1233 self.ui.warn(_("abort: push creates new remote branches!\n"))
1223 self.ui.warn(_("abort: push creates new remote branches!\n"))
1234 self.ui.status(_("(did you forget to merge?"
1224 self.ui.status(_("(did you forget to merge?"
1235 " use push -f to force)\n"))
1225 " use push -f to force)\n"))
1236 return None, 1
1226 return None, 1
1237
1227
1238 if revs is None:
1228 if revs is None:
1239 cg = self.changegroup(update, 'push')
1229 cg = self.changegroup(update, 'push')
1240 else:
1230 else:
1241 cg = self.changegroupsubset(update, revs, 'push')
1231 cg = self.changegroupsubset(update, revs, 'push')
1242 return cg, remote_heads
1232 return cg, remote_heads
1243
1233
1244 def push_addchangegroup(self, remote, force, revs):
1234 def push_addchangegroup(self, remote, force, revs):
1245 lock = remote.lock()
1235 lock = remote.lock()
1246
1236
1247 ret = self.prepush(remote, force, revs)
1237 ret = self.prepush(remote, force, revs)
1248 if ret[0] is not None:
1238 if ret[0] is not None:
1249 cg, remote_heads = ret
1239 cg, remote_heads = ret
1250 return remote.addchangegroup(cg, 'push', self.url())
1240 return remote.addchangegroup(cg, 'push', self.url())
1251 return ret[1]
1241 return ret[1]
1252
1242
1253 def push_unbundle(self, remote, force, revs):
1243 def push_unbundle(self, remote, force, revs):
1254 # local repo finds heads on server, finds out what revs it
1244 # local repo finds heads on server, finds out what revs it
1255 # must push. once revs transferred, if server finds it has
1245 # must push. once revs transferred, if server finds it has
1256 # different heads (someone else won commit/push race), server
1246 # different heads (someone else won commit/push race), server
1257 # aborts.
1247 # aborts.
1258
1248
1259 ret = self.prepush(remote, force, revs)
1249 ret = self.prepush(remote, force, revs)
1260 if ret[0] is not None:
1250 if ret[0] is not None:
1261 cg, remote_heads = ret
1251 cg, remote_heads = ret
1262 if force: remote_heads = ['force']
1252 if force: remote_heads = ['force']
1263 return remote.unbundle(cg, remote_heads, 'push')
1253 return remote.unbundle(cg, remote_heads, 'push')
1264 return ret[1]
1254 return ret[1]
1265
1255
1266 def changegroupsubset(self, bases, heads, source):
1256 def changegroupsubset(self, bases, heads, source):
1267 """This function generates a changegroup consisting of all the nodes
1257 """This function generates a changegroup consisting of all the nodes
1268 that are descendents of any of the bases, and ancestors of any of
1258 that are descendents of any of the bases, and ancestors of any of
1269 the heads.
1259 the heads.
1270
1260
1271 It is fairly complex as determining which filenodes and which
1261 It is fairly complex as determining which filenodes and which
1272 manifest nodes need to be included for the changeset to be complete
1262 manifest nodes need to be included for the changeset to be complete
1273 is non-trivial.
1263 is non-trivial.
1274
1264
1275 Another wrinkle is doing the reverse, figuring out which changeset in
1265 Another wrinkle is doing the reverse, figuring out which changeset in
1276 the changegroup a particular filenode or manifestnode belongs to."""
1266 the changegroup a particular filenode or manifestnode belongs to."""
1277
1267
1278 self.hook('preoutgoing', throw=True, source=source)
1268 self.hook('preoutgoing', throw=True, source=source)
1279
1269
1280 # Set up some initial variables
1270 # Set up some initial variables
1281 # Make it easy to refer to self.changelog
1271 # Make it easy to refer to self.changelog
1282 cl = self.changelog
1272 cl = self.changelog
1283 # msng is short for missing - compute the list of changesets in this
1273 # msng is short for missing - compute the list of changesets in this
1284 # changegroup.
1274 # changegroup.
1285 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1275 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1286 # Some bases may turn out to be superfluous, and some heads may be
1276 # Some bases may turn out to be superfluous, and some heads may be
1287 # too. nodesbetween will return the minimal set of bases and heads
1277 # too. nodesbetween will return the minimal set of bases and heads
1288 # necessary to re-create the changegroup.
1278 # necessary to re-create the changegroup.
1289
1279
1290 # Known heads are the list of heads that it is assumed the recipient
1280 # Known heads are the list of heads that it is assumed the recipient
1291 # of this changegroup will know about.
1281 # of this changegroup will know about.
1292 knownheads = {}
1282 knownheads = {}
1293 # We assume that all parents of bases are known heads.
1283 # We assume that all parents of bases are known heads.
1294 for n in bases:
1284 for n in bases:
1295 for p in cl.parents(n):
1285 for p in cl.parents(n):
1296 if p != nullid:
1286 if p != nullid:
1297 knownheads[p] = 1
1287 knownheads[p] = 1
1298 knownheads = knownheads.keys()
1288 knownheads = knownheads.keys()
1299 if knownheads:
1289 if knownheads:
1300 # Now that we know what heads are known, we can compute which
1290 # Now that we know what heads are known, we can compute which
1301 # changesets are known. The recipient must know about all
1291 # changesets are known. The recipient must know about all
1302 # changesets required to reach the known heads from the null
1292 # changesets required to reach the known heads from the null
1303 # changeset.
1293 # changeset.
1304 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1294 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1305 junk = None
1295 junk = None
1306 # Transform the list into an ersatz set.
1296 # Transform the list into an ersatz set.
1307 has_cl_set = dict.fromkeys(has_cl_set)
1297 has_cl_set = dict.fromkeys(has_cl_set)
1308 else:
1298 else:
1309 # If there were no known heads, the recipient cannot be assumed to
1299 # If there were no known heads, the recipient cannot be assumed to
1310 # know about any changesets.
1300 # know about any changesets.
1311 has_cl_set = {}
1301 has_cl_set = {}
1312
1302
1313 # Make it easy to refer to self.manifest
1303 # Make it easy to refer to self.manifest
1314 mnfst = self.manifest
1304 mnfst = self.manifest
1315 # We don't know which manifests are missing yet
1305 # We don't know which manifests are missing yet
1316 msng_mnfst_set = {}
1306 msng_mnfst_set = {}
1317 # Nor do we know which filenodes are missing.
1307 # Nor do we know which filenodes are missing.
1318 msng_filenode_set = {}
1308 msng_filenode_set = {}
1319
1309
1320 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1310 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1321 junk = None
1311 junk = None
1322
1312
1323 # A changeset always belongs to itself, so the changenode lookup
1313 # A changeset always belongs to itself, so the changenode lookup
1324 # function for a changenode is identity.
1314 # function for a changenode is identity.
1325 def identity(x):
1315 def identity(x):
1326 return x
1316 return x
1327
1317
1328 # A function generating function. Sets up an environment for the
1318 # A function generating function. Sets up an environment for the
1329 # inner function.
1319 # inner function.
1330 def cmp_by_rev_func(revlog):
1320 def cmp_by_rev_func(revlog):
1331 # Compare two nodes by their revision number in the environment's
1321 # Compare two nodes by their revision number in the environment's
1332 # revision history. Since the revision number both represents the
1322 # revision history. Since the revision number both represents the
1333 # most efficient order to read the nodes in, and represents a
1323 # most efficient order to read the nodes in, and represents a
1334 # topological sorting of the nodes, this function is often useful.
1324 # topological sorting of the nodes, this function is often useful.
1335 def cmp_by_rev(a, b):
1325 def cmp_by_rev(a, b):
1336 return cmp(revlog.rev(a), revlog.rev(b))
1326 return cmp(revlog.rev(a), revlog.rev(b))
1337 return cmp_by_rev
1327 return cmp_by_rev
1338
1328
1339 # If we determine that a particular file or manifest node must be a
1329 # If we determine that a particular file or manifest node must be a
1340 # node that the recipient of the changegroup will already have, we can
1330 # node that the recipient of the changegroup will already have, we can
1341 # also assume the recipient will have all the parents. This function
1331 # also assume the recipient will have all the parents. This function
1342 # prunes them from the set of missing nodes.
1332 # prunes them from the set of missing nodes.
1343 def prune_parents(revlog, hasset, msngset):
1333 def prune_parents(revlog, hasset, msngset):
1344 haslst = hasset.keys()
1334 haslst = hasset.keys()
1345 haslst.sort(cmp_by_rev_func(revlog))
1335 haslst.sort(cmp_by_rev_func(revlog))
1346 for node in haslst:
1336 for node in haslst:
1347 parentlst = [p for p in revlog.parents(node) if p != nullid]
1337 parentlst = [p for p in revlog.parents(node) if p != nullid]
1348 while parentlst:
1338 while parentlst:
1349 n = parentlst.pop()
1339 n = parentlst.pop()
1350 if n not in hasset:
1340 if n not in hasset:
1351 hasset[n] = 1
1341 hasset[n] = 1
1352 p = [p for p in revlog.parents(n) if p != nullid]
1342 p = [p for p in revlog.parents(n) if p != nullid]
1353 parentlst.extend(p)
1343 parentlst.extend(p)
1354 for n in hasset:
1344 for n in hasset:
1355 msngset.pop(n, None)
1345 msngset.pop(n, None)
1356
1346
1357 # This is a function generating function used to set up an environment
1347 # This is a function generating function used to set up an environment
1358 # for the inner function to execute in.
1348 # for the inner function to execute in.
1359 def manifest_and_file_collector(changedfileset):
1349 def manifest_and_file_collector(changedfileset):
1360 # This is an information gathering function that gathers
1350 # This is an information gathering function that gathers
1361 # information from each changeset node that goes out as part of
1351 # information from each changeset node that goes out as part of
1362 # the changegroup. The information gathered is a list of which
1352 # the changegroup. The information gathered is a list of which
1363 # manifest nodes are potentially required (the recipient may
1353 # manifest nodes are potentially required (the recipient may
1364 # already have them) and total list of all files which were
1354 # already have them) and total list of all files which were
1365 # changed in any changeset in the changegroup.
1355 # changed in any changeset in the changegroup.
1366 #
1356 #
1367 # We also remember the first changenode we saw any manifest
1357 # We also remember the first changenode we saw any manifest
1368 # referenced by so we can later determine which changenode 'owns'
1358 # referenced by so we can later determine which changenode 'owns'
1369 # the manifest.
1359 # the manifest.
1370 def collect_manifests_and_files(clnode):
1360 def collect_manifests_and_files(clnode):
1371 c = cl.read(clnode)
1361 c = cl.read(clnode)
1372 for f in c[3]:
1362 for f in c[3]:
1373 # This is to make sure we only have one instance of each
1363 # This is to make sure we only have one instance of each
1374 # filename string for each filename.
1364 # filename string for each filename.
1375 changedfileset.setdefault(f, f)
1365 changedfileset.setdefault(f, f)
1376 msng_mnfst_set.setdefault(c[0], clnode)
1366 msng_mnfst_set.setdefault(c[0], clnode)
1377 return collect_manifests_and_files
1367 return collect_manifests_and_files
1378
1368
1379 # Figure out which manifest nodes (of the ones we think might be part
1369 # Figure out which manifest nodes (of the ones we think might be part
1380 # of the changegroup) the recipient must know about and remove them
1370 # of the changegroup) the recipient must know about and remove them
1381 # from the changegroup.
1371 # from the changegroup.
1382 def prune_manifests():
1372 def prune_manifests():
1383 has_mnfst_set = {}
1373 has_mnfst_set = {}
1384 for n in msng_mnfst_set:
1374 for n in msng_mnfst_set:
1385 # If a 'missing' manifest thinks it belongs to a changenode
1375 # If a 'missing' manifest thinks it belongs to a changenode
1386 # the recipient is assumed to have, obviously the recipient
1376 # the recipient is assumed to have, obviously the recipient
1387 # must have that manifest.
1377 # must have that manifest.
1388 linknode = cl.node(mnfst.linkrev(n))
1378 linknode = cl.node(mnfst.linkrev(n))
1389 if linknode in has_cl_set:
1379 if linknode in has_cl_set:
1390 has_mnfst_set[n] = 1
1380 has_mnfst_set[n] = 1
1391 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1381 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1392
1382
1393 # Use the information collected in collect_manifests_and_files to say
1383 # Use the information collected in collect_manifests_and_files to say
1394 # which changenode any manifestnode belongs to.
1384 # which changenode any manifestnode belongs to.
1395 def lookup_manifest_link(mnfstnode):
1385 def lookup_manifest_link(mnfstnode):
1396 return msng_mnfst_set[mnfstnode]
1386 return msng_mnfst_set[mnfstnode]
1397
1387
1398 # A function generating function that sets up the initial environment
1388 # A function generating function that sets up the initial environment
1399 # the inner function.
1389 # the inner function.
1400 def filenode_collector(changedfiles):
1390 def filenode_collector(changedfiles):
1401 next_rev = [0]
1391 next_rev = [0]
1402 # This gathers information from each manifestnode included in the
1392 # This gathers information from each manifestnode included in the
1403 # changegroup about which filenodes the manifest node references
1393 # changegroup about which filenodes the manifest node references
1404 # so we can include those in the changegroup too.
1394 # so we can include those in the changegroup too.
1405 #
1395 #
1406 # It also remembers which changenode each filenode belongs to. It
1396 # It also remembers which changenode each filenode belongs to. It
1407 # does this by assuming the a filenode belongs to the changenode
1397 # does this by assuming the a filenode belongs to the changenode
1408 # the first manifest that references it belongs to.
1398 # the first manifest that references it belongs to.
1409 def collect_msng_filenodes(mnfstnode):
1399 def collect_msng_filenodes(mnfstnode):
1410 r = mnfst.rev(mnfstnode)
1400 r = mnfst.rev(mnfstnode)
1411 if r == next_rev[0]:
1401 if r == next_rev[0]:
1412 # If the last rev we looked at was the one just previous,
1402 # If the last rev we looked at was the one just previous,
1413 # we only need to see a diff.
1403 # we only need to see a diff.
1414 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1404 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1415 # For each line in the delta
1405 # For each line in the delta
1416 for dline in delta.splitlines():
1406 for dline in delta.splitlines():
1417 # get the filename and filenode for that line
1407 # get the filename and filenode for that line
1418 f, fnode = dline.split('\0')
1408 f, fnode = dline.split('\0')
1419 fnode = bin(fnode[:40])
1409 fnode = bin(fnode[:40])
1420 f = changedfiles.get(f, None)
1410 f = changedfiles.get(f, None)
1421 # And if the file is in the list of files we care
1411 # And if the file is in the list of files we care
1422 # about.
1412 # about.
1423 if f is not None:
1413 if f is not None:
1424 # Get the changenode this manifest belongs to
1414 # Get the changenode this manifest belongs to
1425 clnode = msng_mnfst_set[mnfstnode]
1415 clnode = msng_mnfst_set[mnfstnode]
1426 # Create the set of filenodes for the file if
1416 # Create the set of filenodes for the file if
1427 # there isn't one already.
1417 # there isn't one already.
1428 ndset = msng_filenode_set.setdefault(f, {})
1418 ndset = msng_filenode_set.setdefault(f, {})
1429 # And set the filenode's changelog node to the
1419 # And set the filenode's changelog node to the
1430 # manifest's if it hasn't been set already.
1420 # manifest's if it hasn't been set already.
1431 ndset.setdefault(fnode, clnode)
1421 ndset.setdefault(fnode, clnode)
1432 else:
1422 else:
1433 # Otherwise we need a full manifest.
1423 # Otherwise we need a full manifest.
1434 m = mnfst.read(mnfstnode)
1424 m = mnfst.read(mnfstnode)
1435 # For every file in we care about.
1425 # For every file in we care about.
1436 for f in changedfiles:
1426 for f in changedfiles:
1437 fnode = m.get(f, None)
1427 fnode = m.get(f, None)
1438 # If it's in the manifest
1428 # If it's in the manifest
1439 if fnode is not None:
1429 if fnode is not None:
1440 # See comments above.
1430 # See comments above.
1441 clnode = msng_mnfst_set[mnfstnode]
1431 clnode = msng_mnfst_set[mnfstnode]
1442 ndset = msng_filenode_set.setdefault(f, {})
1432 ndset = msng_filenode_set.setdefault(f, {})
1443 ndset.setdefault(fnode, clnode)
1433 ndset.setdefault(fnode, clnode)
1444 # Remember the revision we hope to see next.
1434 # Remember the revision we hope to see next.
1445 next_rev[0] = r + 1
1435 next_rev[0] = r + 1
1446 return collect_msng_filenodes
1436 return collect_msng_filenodes
1447
1437
1448 # We have a list of filenodes we think we need for a file, lets remove
1438 # We have a list of filenodes we think we need for a file, lets remove
1449 # all those we now the recipient must have.
1439 # all those we now the recipient must have.
1450 def prune_filenodes(f, filerevlog):
1440 def prune_filenodes(f, filerevlog):
1451 msngset = msng_filenode_set[f]
1441 msngset = msng_filenode_set[f]
1452 hasset = {}
1442 hasset = {}
1453 # If a 'missing' filenode thinks it belongs to a changenode we
1443 # If a 'missing' filenode thinks it belongs to a changenode we
1454 # assume the recipient must have, then the recipient must have
1444 # assume the recipient must have, then the recipient must have
1455 # that filenode.
1445 # that filenode.
1456 for n in msngset:
1446 for n in msngset:
1457 clnode = cl.node(filerevlog.linkrev(n))
1447 clnode = cl.node(filerevlog.linkrev(n))
1458 if clnode in has_cl_set:
1448 if clnode in has_cl_set:
1459 hasset[n] = 1
1449 hasset[n] = 1
1460 prune_parents(filerevlog, hasset, msngset)
1450 prune_parents(filerevlog, hasset, msngset)
1461
1451
1462 # A function generator function that sets up the a context for the
1452 # A function generator function that sets up the a context for the
1463 # inner function.
1453 # inner function.
1464 def lookup_filenode_link_func(fname):
1454 def lookup_filenode_link_func(fname):
1465 msngset = msng_filenode_set[fname]
1455 msngset = msng_filenode_set[fname]
1466 # Lookup the changenode the filenode belongs to.
1456 # Lookup the changenode the filenode belongs to.
1467 def lookup_filenode_link(fnode):
1457 def lookup_filenode_link(fnode):
1468 return msngset[fnode]
1458 return msngset[fnode]
1469 return lookup_filenode_link
1459 return lookup_filenode_link
1470
1460
1471 # Now that we have all theses utility functions to help out and
1461 # Now that we have all theses utility functions to help out and
1472 # logically divide up the task, generate the group.
1462 # logically divide up the task, generate the group.
1473 def gengroup():
1463 def gengroup():
1474 # The set of changed files starts empty.
1464 # The set of changed files starts empty.
1475 changedfiles = {}
1465 changedfiles = {}
1476 # Create a changenode group generator that will call our functions
1466 # Create a changenode group generator that will call our functions
1477 # back to lookup the owning changenode and collect information.
1467 # back to lookup the owning changenode and collect information.
1478 group = cl.group(msng_cl_lst, identity,
1468 group = cl.group(msng_cl_lst, identity,
1479 manifest_and_file_collector(changedfiles))
1469 manifest_and_file_collector(changedfiles))
1480 for chnk in group:
1470 for chnk in group:
1481 yield chnk
1471 yield chnk
1482
1472
1483 # The list of manifests has been collected by the generator
1473 # The list of manifests has been collected by the generator
1484 # calling our functions back.
1474 # calling our functions back.
1485 prune_manifests()
1475 prune_manifests()
1486 msng_mnfst_lst = msng_mnfst_set.keys()
1476 msng_mnfst_lst = msng_mnfst_set.keys()
1487 # Sort the manifestnodes by revision number.
1477 # Sort the manifestnodes by revision number.
1488 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1478 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1489 # Create a generator for the manifestnodes that calls our lookup
1479 # Create a generator for the manifestnodes that calls our lookup
1490 # and data collection functions back.
1480 # and data collection functions back.
1491 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1481 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1492 filenode_collector(changedfiles))
1482 filenode_collector(changedfiles))
1493 for chnk in group:
1483 for chnk in group:
1494 yield chnk
1484 yield chnk
1495
1485
1496 # These are no longer needed, dereference and toss the memory for
1486 # These are no longer needed, dereference and toss the memory for
1497 # them.
1487 # them.
1498 msng_mnfst_lst = None
1488 msng_mnfst_lst = None
1499 msng_mnfst_set.clear()
1489 msng_mnfst_set.clear()
1500
1490
1501 changedfiles = changedfiles.keys()
1491 changedfiles = changedfiles.keys()
1502 changedfiles.sort()
1492 changedfiles.sort()
1503 # Go through all our files in order sorted by name.
1493 # Go through all our files in order sorted by name.
1504 for fname in changedfiles:
1494 for fname in changedfiles:
1505 filerevlog = self.file(fname)
1495 filerevlog = self.file(fname)
1506 # Toss out the filenodes that the recipient isn't really
1496 # Toss out the filenodes that the recipient isn't really
1507 # missing.
1497 # missing.
1508 if msng_filenode_set.has_key(fname):
1498 if msng_filenode_set.has_key(fname):
1509 prune_filenodes(fname, filerevlog)
1499 prune_filenodes(fname, filerevlog)
1510 msng_filenode_lst = msng_filenode_set[fname].keys()
1500 msng_filenode_lst = msng_filenode_set[fname].keys()
1511 else:
1501 else:
1512 msng_filenode_lst = []
1502 msng_filenode_lst = []
1513 # If any filenodes are left, generate the group for them,
1503 # If any filenodes are left, generate the group for them,
1514 # otherwise don't bother.
1504 # otherwise don't bother.
1515 if len(msng_filenode_lst) > 0:
1505 if len(msng_filenode_lst) > 0:
1516 yield changegroup.genchunk(fname)
1506 yield changegroup.genchunk(fname)
1517 # Sort the filenodes by their revision #
1507 # Sort the filenodes by their revision #
1518 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1508 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1519 # Create a group generator and only pass in a changenode
1509 # Create a group generator and only pass in a changenode
1520 # lookup function as we need to collect no information
1510 # lookup function as we need to collect no information
1521 # from filenodes.
1511 # from filenodes.
1522 group = filerevlog.group(msng_filenode_lst,
1512 group = filerevlog.group(msng_filenode_lst,
1523 lookup_filenode_link_func(fname))
1513 lookup_filenode_link_func(fname))
1524 for chnk in group:
1514 for chnk in group:
1525 yield chnk
1515 yield chnk
1526 if msng_filenode_set.has_key(fname):
1516 if msng_filenode_set.has_key(fname):
1527 # Don't need this anymore, toss it to free memory.
1517 # Don't need this anymore, toss it to free memory.
1528 del msng_filenode_set[fname]
1518 del msng_filenode_set[fname]
1529 # Signal that no more groups are left.
1519 # Signal that no more groups are left.
1530 yield changegroup.closechunk()
1520 yield changegroup.closechunk()
1531
1521
1532 if msng_cl_lst:
1522 if msng_cl_lst:
1533 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1523 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1534
1524
1535 return util.chunkbuffer(gengroup())
1525 return util.chunkbuffer(gengroup())
1536
1526
1537 def changegroup(self, basenodes, source):
1527 def changegroup(self, basenodes, source):
1538 """Generate a changegroup of all nodes that we have that a recipient
1528 """Generate a changegroup of all nodes that we have that a recipient
1539 doesn't.
1529 doesn't.
1540
1530
1541 This is much easier than the previous function as we can assume that
1531 This is much easier than the previous function as we can assume that
1542 the recipient has any changenode we aren't sending them."""
1532 the recipient has any changenode we aren't sending them."""
1543
1533
1544 self.hook('preoutgoing', throw=True, source=source)
1534 self.hook('preoutgoing', throw=True, source=source)
1545
1535
1546 cl = self.changelog
1536 cl = self.changelog
1547 nodes = cl.nodesbetween(basenodes, None)[0]
1537 nodes = cl.nodesbetween(basenodes, None)[0]
1548 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1538 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1549
1539
1550 def identity(x):
1540 def identity(x):
1551 return x
1541 return x
1552
1542
1553 def gennodelst(revlog):
1543 def gennodelst(revlog):
1554 for r in xrange(0, revlog.count()):
1544 for r in xrange(0, revlog.count()):
1555 n = revlog.node(r)
1545 n = revlog.node(r)
1556 if revlog.linkrev(n) in revset:
1546 if revlog.linkrev(n) in revset:
1557 yield n
1547 yield n
1558
1548
1559 def changed_file_collector(changedfileset):
1549 def changed_file_collector(changedfileset):
1560 def collect_changed_files(clnode):
1550 def collect_changed_files(clnode):
1561 c = cl.read(clnode)
1551 c = cl.read(clnode)
1562 for fname in c[3]:
1552 for fname in c[3]:
1563 changedfileset[fname] = 1
1553 changedfileset[fname] = 1
1564 return collect_changed_files
1554 return collect_changed_files
1565
1555
1566 def lookuprevlink_func(revlog):
1556 def lookuprevlink_func(revlog):
1567 def lookuprevlink(n):
1557 def lookuprevlink(n):
1568 return cl.node(revlog.linkrev(n))
1558 return cl.node(revlog.linkrev(n))
1569 return lookuprevlink
1559 return lookuprevlink
1570
1560
1571 def gengroup():
1561 def gengroup():
1572 # construct a list of all changed files
1562 # construct a list of all changed files
1573 changedfiles = {}
1563 changedfiles = {}
1574
1564
1575 for chnk in cl.group(nodes, identity,
1565 for chnk in cl.group(nodes, identity,
1576 changed_file_collector(changedfiles)):
1566 changed_file_collector(changedfiles)):
1577 yield chnk
1567 yield chnk
1578 changedfiles = changedfiles.keys()
1568 changedfiles = changedfiles.keys()
1579 changedfiles.sort()
1569 changedfiles.sort()
1580
1570
1581 mnfst = self.manifest
1571 mnfst = self.manifest
1582 nodeiter = gennodelst(mnfst)
1572 nodeiter = gennodelst(mnfst)
1583 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1573 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1584 yield chnk
1574 yield chnk
1585
1575
1586 for fname in changedfiles:
1576 for fname in changedfiles:
1587 filerevlog = self.file(fname)
1577 filerevlog = self.file(fname)
1588 nodeiter = gennodelst(filerevlog)
1578 nodeiter = gennodelst(filerevlog)
1589 nodeiter = list(nodeiter)
1579 nodeiter = list(nodeiter)
1590 if nodeiter:
1580 if nodeiter:
1591 yield changegroup.genchunk(fname)
1581 yield changegroup.genchunk(fname)
1592 lookup = lookuprevlink_func(filerevlog)
1582 lookup = lookuprevlink_func(filerevlog)
1593 for chnk in filerevlog.group(nodeiter, lookup):
1583 for chnk in filerevlog.group(nodeiter, lookup):
1594 yield chnk
1584 yield chnk
1595
1585
1596 yield changegroup.closechunk()
1586 yield changegroup.closechunk()
1597
1587
1598 if nodes:
1588 if nodes:
1599 self.hook('outgoing', node=hex(nodes[0]), source=source)
1589 self.hook('outgoing', node=hex(nodes[0]), source=source)
1600
1590
1601 return util.chunkbuffer(gengroup())
1591 return util.chunkbuffer(gengroup())
1602
1592
1603 def addchangegroup(self, source, srctype, url):
1593 def addchangegroup(self, source, srctype, url):
1604 """add changegroup to repo.
1594 """add changegroup to repo.
1605 returns number of heads modified or added + 1."""
1595 returns number of heads modified or added + 1."""
1606
1596
1607 def csmap(x):
1597 def csmap(x):
1608 self.ui.debug(_("add changeset %s\n") % short(x))
1598 self.ui.debug(_("add changeset %s\n") % short(x))
1609 return cl.count()
1599 return cl.count()
1610
1600
1611 def revmap(x):
1601 def revmap(x):
1612 return cl.rev(x)
1602 return cl.rev(x)
1613
1603
1614 if not source:
1604 if not source:
1615 return 0
1605 return 0
1616
1606
1617 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1607 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1618
1608
1619 changesets = files = revisions = 0
1609 changesets = files = revisions = 0
1620
1610
1621 tr = self.transaction()
1611 tr = self.transaction()
1622
1612
1623 # write changelog data to temp files so concurrent readers will not see
1613 # write changelog data to temp files so concurrent readers will not see
1624 # inconsistent view
1614 # inconsistent view
1625 cl = None
1615 cl = None
1626 try:
1616 try:
1627 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1617 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1628
1618
1629 oldheads = len(cl.heads())
1619 oldheads = len(cl.heads())
1630
1620
1631 # pull off the changeset group
1621 # pull off the changeset group
1632 self.ui.status(_("adding changesets\n"))
1622 self.ui.status(_("adding changesets\n"))
1633 cor = cl.count() - 1
1623 cor = cl.count() - 1
1634 chunkiter = changegroup.chunkiter(source)
1624 chunkiter = changegroup.chunkiter(source)
1635 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1625 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1636 raise util.Abort(_("received changelog group is empty"))
1626 raise util.Abort(_("received changelog group is empty"))
1637 cnr = cl.count() - 1
1627 cnr = cl.count() - 1
1638 changesets = cnr - cor
1628 changesets = cnr - cor
1639
1629
1640 # pull off the manifest group
1630 # pull off the manifest group
1641 self.ui.status(_("adding manifests\n"))
1631 self.ui.status(_("adding manifests\n"))
1642 chunkiter = changegroup.chunkiter(source)
1632 chunkiter = changegroup.chunkiter(source)
1643 # no need to check for empty manifest group here:
1633 # no need to check for empty manifest group here:
1644 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1634 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1645 # no new manifest will be created and the manifest group will
1635 # no new manifest will be created and the manifest group will
1646 # be empty during the pull
1636 # be empty during the pull
1647 self.manifest.addgroup(chunkiter, revmap, tr)
1637 self.manifest.addgroup(chunkiter, revmap, tr)
1648
1638
1649 # process the files
1639 # process the files
1650 self.ui.status(_("adding file changes\n"))
1640 self.ui.status(_("adding file changes\n"))
1651 while 1:
1641 while 1:
1652 f = changegroup.getchunk(source)
1642 f = changegroup.getchunk(source)
1653 if not f:
1643 if not f:
1654 break
1644 break
1655 self.ui.debug(_("adding %s revisions\n") % f)
1645 self.ui.debug(_("adding %s revisions\n") % f)
1656 fl = self.file(f)
1646 fl = self.file(f)
1657 o = fl.count()
1647 o = fl.count()
1658 chunkiter = changegroup.chunkiter(source)
1648 chunkiter = changegroup.chunkiter(source)
1659 if fl.addgroup(chunkiter, revmap, tr) is None:
1649 if fl.addgroup(chunkiter, revmap, tr) is None:
1660 raise util.Abort(_("received file revlog group is empty"))
1650 raise util.Abort(_("received file revlog group is empty"))
1661 revisions += fl.count() - o
1651 revisions += fl.count() - o
1662 files += 1
1652 files += 1
1663
1653
1664 cl.writedata()
1654 cl.writedata()
1665 finally:
1655 finally:
1666 if cl:
1656 if cl:
1667 cl.cleanup()
1657 cl.cleanup()
1668
1658
1669 # make changelog see real files again
1659 # make changelog see real files again
1670 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1660 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1671 self.changelog.checkinlinesize(tr)
1661 self.changelog.checkinlinesize(tr)
1672
1662
1673 newheads = len(self.changelog.heads())
1663 newheads = len(self.changelog.heads())
1674 heads = ""
1664 heads = ""
1675 if oldheads and newheads != oldheads:
1665 if oldheads and newheads != oldheads:
1676 heads = _(" (%+d heads)") % (newheads - oldheads)
1666 heads = _(" (%+d heads)") % (newheads - oldheads)
1677
1667
1678 self.ui.status(_("added %d changesets"
1668 self.ui.status(_("added %d changesets"
1679 " with %d changes to %d files%s\n")
1669 " with %d changes to %d files%s\n")
1680 % (changesets, revisions, files, heads))
1670 % (changesets, revisions, files, heads))
1681
1671
1682 if changesets > 0:
1672 if changesets > 0:
1683 self.hook('pretxnchangegroup', throw=True,
1673 self.hook('pretxnchangegroup', throw=True,
1684 node=hex(self.changelog.node(cor+1)), source=srctype,
1674 node=hex(self.changelog.node(cor+1)), source=srctype,
1685 url=url)
1675 url=url)
1686
1676
1687 tr.close()
1677 tr.close()
1688
1678
1689 if changesets > 0:
1679 if changesets > 0:
1690 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1680 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1691 source=srctype, url=url)
1681 source=srctype, url=url)
1692
1682
1693 for i in range(cor + 1, cnr + 1):
1683 for i in range(cor + 1, cnr + 1):
1694 self.hook("incoming", node=hex(self.changelog.node(i)),
1684 self.hook("incoming", node=hex(self.changelog.node(i)),
1695 source=srctype, url=url)
1685 source=srctype, url=url)
1696
1686
1697 return newheads - oldheads + 1
1687 return newheads - oldheads + 1
1698
1688
1699
1689
1700 def stream_in(self, remote):
1690 def stream_in(self, remote):
1701 fp = remote.stream_out()
1691 fp = remote.stream_out()
1702 resp = int(fp.readline())
1692 resp = int(fp.readline())
1703 if resp != 0:
1693 if resp != 0:
1704 raise util.Abort(_('operation forbidden by server'))
1694 raise util.Abort(_('operation forbidden by server'))
1705 self.ui.status(_('streaming all changes\n'))
1695 self.ui.status(_('streaming all changes\n'))
1706 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1696 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1707 self.ui.status(_('%d files to transfer, %s of data\n') %
1697 self.ui.status(_('%d files to transfer, %s of data\n') %
1708 (total_files, util.bytecount(total_bytes)))
1698 (total_files, util.bytecount(total_bytes)))
1709 start = time.time()
1699 start = time.time()
1710 for i in xrange(total_files):
1700 for i in xrange(total_files):
1711 name, size = fp.readline().split('\0', 1)
1701 name, size = fp.readline().split('\0', 1)
1712 size = int(size)
1702 size = int(size)
1713 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1703 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1714 ofp = self.opener(name, 'w')
1704 ofp = self.opener(name, 'w')
1715 for chunk in util.filechunkiter(fp, limit=size):
1705 for chunk in util.filechunkiter(fp, limit=size):
1716 ofp.write(chunk)
1706 ofp.write(chunk)
1717 ofp.close()
1707 ofp.close()
1718 elapsed = time.time() - start
1708 elapsed = time.time() - start
1719 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1709 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1720 (util.bytecount(total_bytes), elapsed,
1710 (util.bytecount(total_bytes), elapsed,
1721 util.bytecount(total_bytes / elapsed)))
1711 util.bytecount(total_bytes / elapsed)))
1722 self.reload()
1712 self.reload()
1723 return len(self.heads()) + 1
1713 return len(self.heads()) + 1
1724
1714
1725 def clone(self, remote, heads=[], stream=False):
1715 def clone(self, remote, heads=[], stream=False):
1726 '''clone remote repository.
1716 '''clone remote repository.
1727
1717
1728 keyword arguments:
1718 keyword arguments:
1729 heads: list of revs to clone (forces use of pull)
1719 heads: list of revs to clone (forces use of pull)
1730 stream: use streaming clone if possible'''
1720 stream: use streaming clone if possible'''
1731
1721
1732 # now, all clients that can request uncompressed clones can
1722 # now, all clients that can request uncompressed clones can
1733 # read repo formats supported by all servers that can serve
1723 # read repo formats supported by all servers that can serve
1734 # them.
1724 # them.
1735
1725
1736 # if revlog format changes, client will have to check version
1726 # if revlog format changes, client will have to check version
1737 # and format flags on "stream" capability, and use
1727 # and format flags on "stream" capability, and use
1738 # uncompressed only if compatible.
1728 # uncompressed only if compatible.
1739
1729
1740 if stream and not heads and remote.capable('stream'):
1730 if stream and not heads and remote.capable('stream'):
1741 return self.stream_in(remote)
1731 return self.stream_in(remote)
1742 return self.pull(remote, heads)
1732 return self.pull(remote, heads)
1743
1733
1744 # used to avoid circular references so destructors work
1734 # used to avoid circular references so destructors work
1745 def aftertrans(base):
1735 def aftertrans(base):
1746 p = base
1736 p = base
1747 def a():
1737 def a():
1748 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1738 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1749 util.rename(os.path.join(p, "journal.dirstate"),
1739 util.rename(os.path.join(p, "journal.dirstate"),
1750 os.path.join(p, "undo.dirstate"))
1740 os.path.join(p, "undo.dirstate"))
1751 return a
1741 return a
1752
1742
1753 def instance(ui, path, create):
1743 def instance(ui, path, create):
1754 return localrepository(ui, util.drop_scheme('file', path), create)
1744 return localrepository(ui, util.drop_scheme('file', path), create)
1755
1745
1756 def islocal(path):
1746 def islocal(path):
1757 return True
1747 return True
@@ -1,334 +1,334 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "util os tempfile")
11 demandload(globals(), "util os tempfile")
12
12
13 def merge3(repo, fn, my, other, p1, p2):
13 def merge3(repo, fn, my, other, p1, p2):
14 """perform a 3-way merge in the working directory"""
14 """perform a 3-way merge in the working directory"""
15
15
16 def temp(prefix, node):
16 def temp(prefix, node):
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
19 f = os.fdopen(fd, "wb")
19 f = os.fdopen(fd, "wb")
20 repo.wwrite(fn, fl.read(node), f)
20 repo.wwrite(fn, fl.read(node), f)
21 f.close()
21 f.close()
22 return name
22 return name
23
23
24 fl = repo.file(fn)
24 fl = repo.file(fn)
25 base = fl.ancestor(my, other)
25 base = fl.ancestor(my, other)
26 a = repo.wjoin(fn)
26 a = repo.wjoin(fn)
27 b = temp("base", base)
27 b = temp("base", base)
28 c = temp("other", other)
28 c = temp("other", other)
29
29
30 repo.ui.note(_("resolving %s\n") % fn)
30 repo.ui.note(_("resolving %s\n") % fn)
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 (fn, short(my), short(other), short(base)))
32 (fn, short(my), short(other), short(base)))
33
33
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 or "hgmerge")
35 or "hgmerge")
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 environ={'HG_FILE': fn,
37 environ={'HG_FILE': fn,
38 'HG_MY_NODE': p1,
38 'HG_MY_NODE': p1,
39 'HG_OTHER_NODE': p2,
39 'HG_OTHER_NODE': p2,
40 'HG_FILE_MY_NODE': hex(my),
40 'HG_FILE_MY_NODE': hex(my),
41 'HG_FILE_OTHER_NODE': hex(other),
41 'HG_FILE_OTHER_NODE': hex(other),
42 'HG_FILE_BASE_NODE': hex(base)})
42 'HG_FILE_BASE_NODE': hex(base)})
43 if r:
43 if r:
44 repo.ui.warn(_("merging %s failed!\n") % fn)
44 repo.ui.warn(_("merging %s failed!\n") % fn)
45
45
46 os.unlink(b)
46 os.unlink(b)
47 os.unlink(c)
47 os.unlink(c)
48 return r
48 return r
49
49
50 def update(repo, node, branchmerge=False, force=False, partial=None,
50 def update(repo, node, branchmerge=False, force=False, partial=None,
51 wlock=None, show_stats=True, remind=True):
51 wlock=None, show_stats=True, remind=True):
52
52
53 overwrite = force and not branchmerge
53 overwrite = force and not branchmerge
54 forcemerge = force and branchmerge
54 forcemerge = force and branchmerge
55
55
56 if not wlock:
56 if not wlock:
57 wlock = repo.wlock()
57 wlock = repo.wlock()
58
58
59 ### check phase
59 ### check phase
60
60
61 pl = repo.dirstate.parents()
61 pl = repo.dirstate.parents()
62 if not overwrite and pl[1] != nullid:
62 if not overwrite and pl[1] != nullid:
63 raise util.Abort(_("outstanding uncommitted merges"))
63 raise util.Abort(_("outstanding uncommitted merges"))
64
64
65 p1, p2 = pl[0], node
65 p1, p2 = pl[0], node
66 pa = repo.changelog.ancestor(p1, p2)
66 pa = repo.changelog.ancestor(p1, p2)
67
67
68 # is there a linear path from p1 to p2?
68 # is there a linear path from p1 to p2?
69 linear_path = (pa == p1 or pa == p2)
69 linear_path = (pa == p1 or pa == p2)
70 if branchmerge and linear_path:
70 if branchmerge and linear_path:
71 raise util.Abort(_("there is nothing to merge, just use "
71 raise util.Abort(_("there is nothing to merge, just use "
72 "'hg update' or look at 'hg heads'"))
72 "'hg update' or look at 'hg heads'"))
73
73
74 if not overwrite and not linear_path and not branchmerge:
74 if not overwrite and not linear_path and not branchmerge:
75 raise util.Abort(_("update spans branches, use 'hg merge' "
75 raise util.Abort(_("update spans branches, use 'hg merge' "
76 "or 'hg update -C' to lose changes"))
76 "or 'hg update -C' to lose changes"))
77
77
78 modified, added, removed, deleted, unknown = repo.changes()
78 modified, added, removed, deleted, unknown = repo.status()[:5]
79 if branchmerge and not forcemerge:
79 if branchmerge and not forcemerge:
80 if modified or added or removed:
80 if modified or added or removed:
81 raise util.Abort(_("outstanding uncommitted changes"))
81 raise util.Abort(_("outstanding uncommitted changes"))
82
82
83 m1n = repo.changelog.read(p1)[0]
83 m1n = repo.changelog.read(p1)[0]
84 m2n = repo.changelog.read(p2)[0]
84 m2n = repo.changelog.read(p2)[0]
85 man = repo.manifest.ancestor(m1n, m2n)
85 man = repo.manifest.ancestor(m1n, m2n)
86 m1 = repo.manifest.read(m1n)
86 m1 = repo.manifest.read(m1n)
87 m2 = repo.manifest.read(m2n).copy()
87 m2 = repo.manifest.read(m2n).copy()
88 ma = repo.manifest.read(man)
88 ma = repo.manifest.read(man)
89
89
90 if not forcemerge and not overwrite:
90 if not forcemerge and not overwrite:
91 for f in unknown:
91 for f in unknown:
92 if f in m2:
92 if f in m2:
93 t1 = repo.wread(f)
93 t1 = repo.wread(f)
94 t2 = repo.file(f).read(m2[f])
94 t2 = repo.file(f).read(m2[f])
95 if cmp(t1, t2) != 0:
95 if cmp(t1, t2) != 0:
96 raise util.Abort(_("'%s' already exists in the working"
96 raise util.Abort(_("'%s' already exists in the working"
97 " dir and differs from remote") % f)
97 " dir and differs from remote") % f)
98
98
99 # resolve the manifest to determine which files
99 # resolve the manifest to determine which files
100 # we care about merging
100 # we care about merging
101 repo.ui.note(_("resolving manifests\n"))
101 repo.ui.note(_("resolving manifests\n"))
102 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
102 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
103 (overwrite, branchmerge, partial and True or False, linear_path))
103 (overwrite, branchmerge, partial and True or False, linear_path))
104 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
104 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
105 (short(man), short(m1n), short(m2n)))
105 (short(man), short(m1n), short(m2n)))
106
106
107 merge = {}
107 merge = {}
108 get = {}
108 get = {}
109 remove = []
109 remove = []
110
110
111 # construct a working dir manifest
111 # construct a working dir manifest
112 mw = m1.copy()
112 mw = m1.copy()
113 umap = dict.fromkeys(unknown)
113 umap = dict.fromkeys(unknown)
114
114
115 for f in added + modified + unknown:
115 for f in added + modified + unknown:
116 mw[f] = ""
116 mw[f] = ""
117 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
117 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
118
118
119 for f in deleted + removed:
119 for f in deleted + removed:
120 if f in mw:
120 if f in mw:
121 del mw[f]
121 del mw[f]
122
122
123 # If we're jumping between revisions (as opposed to merging),
123 # If we're jumping between revisions (as opposed to merging),
124 # and if neither the working directory nor the target rev has
124 # and if neither the working directory nor the target rev has
125 # the file, then we need to remove it from the dirstate, to
125 # the file, then we need to remove it from the dirstate, to
126 # prevent the dirstate from listing the file when it is no
126 # prevent the dirstate from listing the file when it is no
127 # longer in the manifest.
127 # longer in the manifest.
128 if not partial and linear_path and f not in m2:
128 if not partial and linear_path and f not in m2:
129 repo.dirstate.forget((f,))
129 repo.dirstate.forget((f,))
130
130
131 # Compare manifests
131 # Compare manifests
132 for f, n in mw.iteritems():
132 for f, n in mw.iteritems():
133 if partial and not partial(f):
133 if partial and not partial(f):
134 continue
134 continue
135 if f in m2:
135 if f in m2:
136 s = 0
136 s = 0
137
137
138 # is the wfile new since m1, and match m2?
138 # is the wfile new since m1, and match m2?
139 if f not in m1:
139 if f not in m1:
140 t1 = repo.wread(f)
140 t1 = repo.wread(f)
141 t2 = repo.file(f).read(m2[f])
141 t2 = repo.file(f).read(m2[f])
142 if cmp(t1, t2) == 0:
142 if cmp(t1, t2) == 0:
143 n = m2[f]
143 n = m2[f]
144 del t1, t2
144 del t1, t2
145
145
146 # are files different?
146 # are files different?
147 if n != m2[f]:
147 if n != m2[f]:
148 a = ma.get(f, nullid)
148 a = ma.get(f, nullid)
149 # are both different from the ancestor?
149 # are both different from the ancestor?
150 if n != a and m2[f] != a:
150 if n != a and m2[f] != a:
151 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
151 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 # merge executable bits
152 # merge executable bits
153 # "if we changed or they changed, change in merge"
153 # "if we changed or they changed, change in merge"
154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
155 mode = ((a^b) | (a^c)) ^ a
155 mode = ((a^b) | (a^c)) ^ a
156 merge[f] = (mode, m1.get(f, nullid), m2[f])
156 merge[f] = (mode, m1.get(f, nullid), m2[f])
157 s = 1
157 s = 1
158 # are we clobbering?
158 # are we clobbering?
159 # is remote's version newer?
159 # is remote's version newer?
160 # or are we going back in time?
160 # or are we going back in time?
161 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
161 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
162 repo.ui.debug(_(" remote %s is newer, get\n") % f)
162 repo.ui.debug(_(" remote %s is newer, get\n") % f)
163 get[f] = (m2.execf(f), m2[f])
163 get[f] = (m2.execf(f), m2[f])
164 s = 1
164 s = 1
165 elif f in umap or f in added:
165 elif f in umap or f in added:
166 # this unknown file is the same as the checkout
166 # this unknown file is the same as the checkout
167 # we need to reset the dirstate if the file was added
167 # we need to reset the dirstate if the file was added
168 get[f] = (m2.execf(f), m2[f])
168 get[f] = (m2.execf(f), m2[f])
169
169
170 if not s and mw.execf(f) != m2.execf(f):
170 if not s and mw.execf(f) != m2.execf(f):
171 if overwrite:
171 if overwrite:
172 repo.ui.debug(_(" updating permissions for %s\n") % f)
172 repo.ui.debug(_(" updating permissions for %s\n") % f)
173 util.set_exec(repo.wjoin(f), m2.execf(f))
173 util.set_exec(repo.wjoin(f), m2.execf(f))
174 else:
174 else:
175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
176 mode = ((a^b) | (a^c)) ^ a
176 mode = ((a^b) | (a^c)) ^ a
177 if mode != b:
177 if mode != b:
178 repo.ui.debug(_(" updating permissions for %s\n")
178 repo.ui.debug(_(" updating permissions for %s\n")
179 % f)
179 % f)
180 util.set_exec(repo.wjoin(f), mode)
180 util.set_exec(repo.wjoin(f), mode)
181 del m2[f]
181 del m2[f]
182 elif f in ma:
182 elif f in ma:
183 if n != ma[f]:
183 if n != ma[f]:
184 r = _("d")
184 r = _("d")
185 if not overwrite and (linear_path or branchmerge):
185 if not overwrite and (linear_path or branchmerge):
186 r = repo.ui.prompt(
186 r = repo.ui.prompt(
187 (_(" local changed %s which remote deleted\n") % f) +
187 (_(" local changed %s which remote deleted\n") % f) +
188 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
188 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
189 if r == _("d"):
189 if r == _("d"):
190 remove.append(f)
190 remove.append(f)
191 else:
191 else:
192 repo.ui.debug(_("other deleted %s\n") % f)
192 repo.ui.debug(_("other deleted %s\n") % f)
193 remove.append(f) # other deleted it
193 remove.append(f) # other deleted it
194 else:
194 else:
195 # file is created on branch or in working directory
195 # file is created on branch or in working directory
196 if overwrite and f not in umap:
196 if overwrite and f not in umap:
197 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
197 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
198 remove.append(f)
198 remove.append(f)
199 elif n == m1.get(f, nullid): # same as parent
199 elif n == m1.get(f, nullid): # same as parent
200 if p2 == pa: # going backwards?
200 if p2 == pa: # going backwards?
201 repo.ui.debug(_("remote deleted %s\n") % f)
201 repo.ui.debug(_("remote deleted %s\n") % f)
202 remove.append(f)
202 remove.append(f)
203 else:
203 else:
204 repo.ui.debug(_("local modified %s, keeping\n") % f)
204 repo.ui.debug(_("local modified %s, keeping\n") % f)
205 else:
205 else:
206 repo.ui.debug(_("working dir created %s, keeping\n") % f)
206 repo.ui.debug(_("working dir created %s, keeping\n") % f)
207
207
208 for f, n in m2.iteritems():
208 for f, n in m2.iteritems():
209 if partial and not partial(f):
209 if partial and not partial(f):
210 continue
210 continue
211 if f[0] == "/":
211 if f[0] == "/":
212 continue
212 continue
213 if f in ma and n != ma[f]:
213 if f in ma and n != ma[f]:
214 r = _("k")
214 r = _("k")
215 if not overwrite and (linear_path or branchmerge):
215 if not overwrite and (linear_path or branchmerge):
216 r = repo.ui.prompt(
216 r = repo.ui.prompt(
217 (_("remote changed %s which local deleted\n") % f) +
217 (_("remote changed %s which local deleted\n") % f) +
218 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
218 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
219 if r == _("k"):
219 if r == _("k"):
220 get[f] = (m2.execf(f), n)
220 get[f] = (m2.execf(f), n)
221 elif f not in ma:
221 elif f not in ma:
222 repo.ui.debug(_("remote created %s\n") % f)
222 repo.ui.debug(_("remote created %s\n") % f)
223 get[f] = (m2.execf(f), n)
223 get[f] = (m2.execf(f), n)
224 else:
224 else:
225 if overwrite or p2 == pa: # going backwards?
225 if overwrite or p2 == pa: # going backwards?
226 repo.ui.debug(_("local deleted %s, recreating\n") % f)
226 repo.ui.debug(_("local deleted %s, recreating\n") % f)
227 get[f] = (m2.execf(f), n)
227 get[f] = (m2.execf(f), n)
228 else:
228 else:
229 repo.ui.debug(_("local deleted %s\n") % f)
229 repo.ui.debug(_("local deleted %s\n") % f)
230
230
231 del mw, m1, m2, ma
231 del mw, m1, m2, ma
232
232
233 if overwrite:
233 if overwrite:
234 for f in merge:
234 for f in merge:
235 get[f] = merge[f][:2]
235 get[f] = merge[f][:2]
236 merge = {}
236 merge = {}
237
237
238 if linear_path or overwrite:
238 if linear_path or overwrite:
239 # we don't need to do any magic, just jump to the new rev
239 # we don't need to do any magic, just jump to the new rev
240 p1, p2 = p2, nullid
240 p1, p2 = p2, nullid
241
241
242 xp1 = hex(p1)
242 xp1 = hex(p1)
243 xp2 = hex(p2)
243 xp2 = hex(p2)
244 if p2 == nullid: xxp2 = ''
244 if p2 == nullid: xxp2 = ''
245 else: xxp2 = xp2
245 else: xxp2 = xp2
246
246
247 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
247 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
248
248
249 # get the files we don't need to change
249 # get the files we don't need to change
250 files = get.keys()
250 files = get.keys()
251 files.sort()
251 files.sort()
252 for f in files:
252 for f in files:
253 flag, node = get[f]
253 flag, node = get[f]
254 if f[0] == "/":
254 if f[0] == "/":
255 continue
255 continue
256 repo.ui.note(_("getting %s\n") % f)
256 repo.ui.note(_("getting %s\n") % f)
257 t = repo.file(f).read(node)
257 t = repo.file(f).read(node)
258 repo.wwrite(f, t)
258 repo.wwrite(f, t)
259 util.set_exec(repo.wjoin(f), flag)
259 util.set_exec(repo.wjoin(f), flag)
260 if not partial:
260 if not partial:
261 if branchmerge:
261 if branchmerge:
262 repo.dirstate.update([f], 'n', st_mtime=-1)
262 repo.dirstate.update([f], 'n', st_mtime=-1)
263 else:
263 else:
264 repo.dirstate.update([f], 'n')
264 repo.dirstate.update([f], 'n')
265
265
266 # merge the tricky bits
266 # merge the tricky bits
267 unresolved = []
267 unresolved = []
268 files = merge.keys()
268 files = merge.keys()
269 files.sort()
269 files.sort()
270 for f in files:
270 for f in files:
271 repo.ui.status(_("merging %s\n") % f)
271 repo.ui.status(_("merging %s\n") % f)
272 flag, my, other = merge[f]
272 flag, my, other = merge[f]
273 ret = merge3(repo, f, my, other, xp1, xp2)
273 ret = merge3(repo, f, my, other, xp1, xp2)
274 if ret:
274 if ret:
275 unresolved.append(f)
275 unresolved.append(f)
276 util.set_exec(repo.wjoin(f), flag)
276 util.set_exec(repo.wjoin(f), flag)
277 if not partial:
277 if not partial:
278 if branchmerge:
278 if branchmerge:
279 # We've done a branch merge, mark this file as merged
279 # We've done a branch merge, mark this file as merged
280 # so that we properly record the merger later
280 # so that we properly record the merger later
281 repo.dirstate.update([f], 'm')
281 repo.dirstate.update([f], 'm')
282 else:
282 else:
283 # We've update-merged a locally modified file, so
283 # We've update-merged a locally modified file, so
284 # we set the dirstate to emulate a normal checkout
284 # we set the dirstate to emulate a normal checkout
285 # of that file some time in the past. Thus our
285 # of that file some time in the past. Thus our
286 # merge will appear as a normal local file
286 # merge will appear as a normal local file
287 # modification.
287 # modification.
288 f_len = len(repo.file(f).read(other))
288 f_len = len(repo.file(f).read(other))
289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
290
290
291 remove.sort()
291 remove.sort()
292 for f in remove:
292 for f in remove:
293 repo.ui.note(_("removing %s\n") % f)
293 repo.ui.note(_("removing %s\n") % f)
294 util.audit_path(f)
294 util.audit_path(f)
295 try:
295 try:
296 util.unlink(repo.wjoin(f))
296 util.unlink(repo.wjoin(f))
297 except OSError, inst:
297 except OSError, inst:
298 if inst.errno != errno.ENOENT:
298 if inst.errno != errno.ENOENT:
299 repo.ui.warn(_("update failed to remove %s: %s!\n") %
299 repo.ui.warn(_("update failed to remove %s: %s!\n") %
300 (f, inst.strerror))
300 (f, inst.strerror))
301 if not partial:
301 if not partial:
302 if branchmerge:
302 if branchmerge:
303 repo.dirstate.update(remove, 'r')
303 repo.dirstate.update(remove, 'r')
304 else:
304 else:
305 repo.dirstate.forget(remove)
305 repo.dirstate.forget(remove)
306
306
307 if not partial:
307 if not partial:
308 repo.dirstate.setparents(p1, p2)
308 repo.dirstate.setparents(p1, p2)
309
309
310 if show_stats:
310 if show_stats:
311 stats = ((len(get), _("updated")),
311 stats = ((len(get), _("updated")),
312 (len(merge) - len(unresolved), _("merged")),
312 (len(merge) - len(unresolved), _("merged")),
313 (len(remove), _("removed")),
313 (len(remove), _("removed")),
314 (len(unresolved), _("unresolved")))
314 (len(unresolved), _("unresolved")))
315 note = ", ".join([_("%d files %s") % s for s in stats])
315 note = ", ".join([_("%d files %s") % s for s in stats])
316 repo.ui.status("%s\n" % note)
316 repo.ui.status("%s\n" % note)
317 if not partial:
317 if not partial:
318 if branchmerge:
318 if branchmerge:
319 if unresolved:
319 if unresolved:
320 repo.ui.status(_("There are unresolved merges,"
320 repo.ui.status(_("There are unresolved merges,"
321 " you can redo the full merge using:\n"
321 " you can redo the full merge using:\n"
322 " hg update -C %s\n"
322 " hg update -C %s\n"
323 " hg merge %s\n"
323 " hg merge %s\n"
324 % (repo.changelog.rev(p1),
324 % (repo.changelog.rev(p1),
325 repo.changelog.rev(p2))))
325 repo.changelog.rev(p2))))
326 elif remind:
326 elif remind:
327 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
327 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
328 elif unresolved:
328 elif unresolved:
329 repo.ui.status(_("There are unresolved merges with"
329 repo.ui.status(_("There are unresolved merges with"
330 " locally modified files.\n"))
330 " locally modified files.\n"))
331
331
332 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
332 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
333 return len(unresolved)
333 return len(unresolved)
334
334
@@ -1,366 +1,366 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), "cmdutil mdiff util")
11 demandload(globals(), "cmdutil mdiff util")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
13
13
14 def extract(ui, fileobj):
14 def extract(ui, fileobj):
15 '''extract patch from data read from fileobj.
15 '''extract patch from data read from fileobj.
16
16
17 patch can be normal patch or contained in email message.
17 patch can be normal patch or contained in email message.
18
18
19 return tuple (filename, message, user, date). any item in returned
19 return tuple (filename, message, user, date). any item in returned
20 tuple can be None. if filename is None, fileobj did not contain
20 tuple can be None. if filename is None, fileobj did not contain
21 patch. caller must unlink filename when done.'''
21 patch. caller must unlink filename when done.'''
22
22
23 # attempt to detect the start of a patch
23 # attempt to detect the start of a patch
24 # (this heuristic is borrowed from quilt)
24 # (this heuristic is borrowed from quilt)
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
28
28
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
30 tmpfp = os.fdopen(fd, 'w')
30 tmpfp = os.fdopen(fd, 'w')
31 try:
31 try:
32 hgpatch = False
32 hgpatch = False
33
33
34 msg = email.Parser.Parser().parse(fileobj)
34 msg = email.Parser.Parser().parse(fileobj)
35
35
36 message = msg['Subject']
36 message = msg['Subject']
37 user = msg['From']
37 user = msg['From']
38 # should try to parse msg['Date']
38 # should try to parse msg['Date']
39 date = None
39 date = None
40
40
41 if message:
41 if message:
42 message = message.replace('\n\t', ' ')
42 message = message.replace('\n\t', ' ')
43 ui.debug('Subject: %s\n' % message)
43 ui.debug('Subject: %s\n' % message)
44 if user:
44 if user:
45 ui.debug('From: %s\n' % user)
45 ui.debug('From: %s\n' % user)
46 diffs_seen = 0
46 diffs_seen = 0
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
48
48
49 for part in msg.walk():
49 for part in msg.walk():
50 content_type = part.get_content_type()
50 content_type = part.get_content_type()
51 ui.debug('Content-Type: %s\n' % content_type)
51 ui.debug('Content-Type: %s\n' % content_type)
52 if content_type not in ok_types:
52 if content_type not in ok_types:
53 continue
53 continue
54 payload = part.get_payload(decode=True)
54 payload = part.get_payload(decode=True)
55 m = diffre.search(payload)
55 m = diffre.search(payload)
56 if m:
56 if m:
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
58 diffs_seen += 1
58 diffs_seen += 1
59 cfp = cStringIO.StringIO()
59 cfp = cStringIO.StringIO()
60 if message:
60 if message:
61 cfp.write(message)
61 cfp.write(message)
62 cfp.write('\n')
62 cfp.write('\n')
63 for line in payload[:m.start(0)].splitlines():
63 for line in payload[:m.start(0)].splitlines():
64 if line.startswith('# HG changeset patch'):
64 if line.startswith('# HG changeset patch'):
65 ui.debug(_('patch generated by hg export\n'))
65 ui.debug(_('patch generated by hg export\n'))
66 hgpatch = True
66 hgpatch = True
67 # drop earlier commit message content
67 # drop earlier commit message content
68 cfp.seek(0)
68 cfp.seek(0)
69 cfp.truncate()
69 cfp.truncate()
70 elif hgpatch:
70 elif hgpatch:
71 if line.startswith('# User '):
71 if line.startswith('# User '):
72 user = line[7:]
72 user = line[7:]
73 ui.debug('From: %s\n' % user)
73 ui.debug('From: %s\n' % user)
74 elif line.startswith("# Date "):
74 elif line.startswith("# Date "):
75 date = line[7:]
75 date = line[7:]
76 if not line.startswith('# '):
76 if not line.startswith('# '):
77 cfp.write(line)
77 cfp.write(line)
78 cfp.write('\n')
78 cfp.write('\n')
79 message = cfp.getvalue()
79 message = cfp.getvalue()
80 if tmpfp:
80 if tmpfp:
81 tmpfp.write(payload)
81 tmpfp.write(payload)
82 if not payload.endswith('\n'):
82 if not payload.endswith('\n'):
83 tmpfp.write('\n')
83 tmpfp.write('\n')
84 elif not diffs_seen and message and content_type == 'text/plain':
84 elif not diffs_seen and message and content_type == 'text/plain':
85 message += '\n' + payload
85 message += '\n' + payload
86 except:
86 except:
87 tmpfp.close()
87 tmpfp.close()
88 os.unlink(tmpname)
88 os.unlink(tmpname)
89 raise
89 raise
90
90
91 tmpfp.close()
91 tmpfp.close()
92 if not diffs_seen:
92 if not diffs_seen:
93 os.unlink(tmpname)
93 os.unlink(tmpname)
94 return None, message, user, date
94 return None, message, user, date
95 return tmpname, message, user, date
95 return tmpname, message, user, date
96
96
97 def readgitpatch(patchname):
97 def readgitpatch(patchname):
98 """extract git-style metadata about patches from <patchname>"""
98 """extract git-style metadata about patches from <patchname>"""
99 class gitpatch:
99 class gitpatch:
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
101 def __init__(self, path):
101 def __init__(self, path):
102 self.path = path
102 self.path = path
103 self.oldpath = None
103 self.oldpath = None
104 self.mode = None
104 self.mode = None
105 self.op = 'MODIFY'
105 self.op = 'MODIFY'
106 self.copymod = False
106 self.copymod = False
107 self.lineno = 0
107 self.lineno = 0
108
108
109 # Filter patch for git information
109 # Filter patch for git information
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
111 pf = file(patchname)
111 pf = file(patchname)
112 gp = None
112 gp = None
113 gitpatches = []
113 gitpatches = []
114 # Can have a git patch with only metadata, causing patch to complain
114 # Can have a git patch with only metadata, causing patch to complain
115 dopatch = False
115 dopatch = False
116
116
117 lineno = 0
117 lineno = 0
118 for line in pf:
118 for line in pf:
119 lineno += 1
119 lineno += 1
120 if line.startswith('diff --git'):
120 if line.startswith('diff --git'):
121 m = gitre.match(line)
121 m = gitre.match(line)
122 if m:
122 if m:
123 if gp:
123 if gp:
124 gitpatches.append(gp)
124 gitpatches.append(gp)
125 src, dst = m.group(1,2)
125 src, dst = m.group(1,2)
126 gp = gitpatch(dst)
126 gp = gitpatch(dst)
127 gp.lineno = lineno
127 gp.lineno = lineno
128 elif gp:
128 elif gp:
129 if line.startswith('--- '):
129 if line.startswith('--- '):
130 if gp.op in ('COPY', 'RENAME'):
130 if gp.op in ('COPY', 'RENAME'):
131 gp.copymod = True
131 gp.copymod = True
132 dopatch = 'filter'
132 dopatch = 'filter'
133 gitpatches.append(gp)
133 gitpatches.append(gp)
134 gp = None
134 gp = None
135 if not dopatch:
135 if not dopatch:
136 dopatch = True
136 dopatch = True
137 continue
137 continue
138 if line.startswith('rename from '):
138 if line.startswith('rename from '):
139 gp.op = 'RENAME'
139 gp.op = 'RENAME'
140 gp.oldpath = line[12:].rstrip()
140 gp.oldpath = line[12:].rstrip()
141 elif line.startswith('rename to '):
141 elif line.startswith('rename to '):
142 gp.path = line[10:].rstrip()
142 gp.path = line[10:].rstrip()
143 elif line.startswith('copy from '):
143 elif line.startswith('copy from '):
144 gp.op = 'COPY'
144 gp.op = 'COPY'
145 gp.oldpath = line[10:].rstrip()
145 gp.oldpath = line[10:].rstrip()
146 elif line.startswith('copy to '):
146 elif line.startswith('copy to '):
147 gp.path = line[8:].rstrip()
147 gp.path = line[8:].rstrip()
148 elif line.startswith('deleted file'):
148 elif line.startswith('deleted file'):
149 gp.op = 'DELETE'
149 gp.op = 'DELETE'
150 elif line.startswith('new file mode '):
150 elif line.startswith('new file mode '):
151 gp.op = 'ADD'
151 gp.op = 'ADD'
152 gp.mode = int(line.rstrip()[-3:], 8)
152 gp.mode = int(line.rstrip()[-3:], 8)
153 elif line.startswith('new mode '):
153 elif line.startswith('new mode '):
154 gp.mode = int(line.rstrip()[-3:], 8)
154 gp.mode = int(line.rstrip()[-3:], 8)
155 if gp:
155 if gp:
156 gitpatches.append(gp)
156 gitpatches.append(gp)
157
157
158 if not gitpatches:
158 if not gitpatches:
159 dopatch = True
159 dopatch = True
160
160
161 return (dopatch, gitpatches)
161 return (dopatch, gitpatches)
162
162
163 def dogitpatch(patchname, gitpatches):
163 def dogitpatch(patchname, gitpatches):
164 """Preprocess git patch so that vanilla patch can handle it"""
164 """Preprocess git patch so that vanilla patch can handle it"""
165 pf = file(patchname)
165 pf = file(patchname)
166 pfline = 1
166 pfline = 1
167
167
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
169 tmpfp = os.fdopen(fd, 'w')
169 tmpfp = os.fdopen(fd, 'w')
170
170
171 try:
171 try:
172 for i in range(len(gitpatches)):
172 for i in range(len(gitpatches)):
173 p = gitpatches[i]
173 p = gitpatches[i]
174 if not p.copymod:
174 if not p.copymod:
175 continue
175 continue
176
176
177 if os.path.exists(p.path):
177 if os.path.exists(p.path):
178 raise util.Abort(_("cannot create %s: destination already exists") %
178 raise util.Abort(_("cannot create %s: destination already exists") %
179 p.path)
179 p.path)
180
180
181 (src, dst) = [os.path.join(os.getcwd(), n)
181 (src, dst) = [os.path.join(os.getcwd(), n)
182 for n in (p.oldpath, p.path)]
182 for n in (p.oldpath, p.path)]
183
183
184 targetdir = os.path.dirname(dst)
184 targetdir = os.path.dirname(dst)
185 if not os.path.isdir(targetdir):
185 if not os.path.isdir(targetdir):
186 os.makedirs(targetdir)
186 os.makedirs(targetdir)
187 try:
187 try:
188 shutil.copyfile(src, dst)
188 shutil.copyfile(src, dst)
189 shutil.copymode(src, dst)
189 shutil.copymode(src, dst)
190 except shutil.Error, inst:
190 except shutil.Error, inst:
191 raise util.Abort(str(inst))
191 raise util.Abort(str(inst))
192
192
193 # rewrite patch hunk
193 # rewrite patch hunk
194 while pfline < p.lineno:
194 while pfline < p.lineno:
195 tmpfp.write(pf.readline())
195 tmpfp.write(pf.readline())
196 pfline += 1
196 pfline += 1
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
198 line = pf.readline()
198 line = pf.readline()
199 pfline += 1
199 pfline += 1
200 while not line.startswith('--- a/'):
200 while not line.startswith('--- a/'):
201 tmpfp.write(line)
201 tmpfp.write(line)
202 line = pf.readline()
202 line = pf.readline()
203 pfline += 1
203 pfline += 1
204 tmpfp.write('--- a/%s\n' % p.path)
204 tmpfp.write('--- a/%s\n' % p.path)
205
205
206 line = pf.readline()
206 line = pf.readline()
207 while line:
207 while line:
208 tmpfp.write(line)
208 tmpfp.write(line)
209 line = pf.readline()
209 line = pf.readline()
210 except:
210 except:
211 tmpfp.close()
211 tmpfp.close()
212 os.unlink(patchname)
212 os.unlink(patchname)
213 raise
213 raise
214
214
215 tmpfp.close()
215 tmpfp.close()
216 return patchname
216 return patchname
217
217
218 def patch(strip, patchname, ui, cwd=None):
218 def patch(strip, patchname, ui, cwd=None):
219 """apply the patch <patchname> to the working directory.
219 """apply the patch <patchname> to the working directory.
220 a list of patched files is returned"""
220 a list of patched files is returned"""
221
221
222 (dopatch, gitpatches) = readgitpatch(patchname)
222 (dopatch, gitpatches) = readgitpatch(patchname)
223
223
224 files = {}
224 files = {}
225 if dopatch:
225 if dopatch:
226 if dopatch == 'filter':
226 if dopatch == 'filter':
227 patchname = dogitpatch(patchname, gitpatches)
227 patchname = dogitpatch(patchname, gitpatches)
228 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
228 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
229 args = []
229 args = []
230 if cwd:
230 if cwd:
231 args.append('-d %s' % util.shellquote(cwd))
231 args.append('-d %s' % util.shellquote(cwd))
232 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
232 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
233 util.shellquote(patchname)))
233 util.shellquote(patchname)))
234
234
235 if dopatch == 'filter':
235 if dopatch == 'filter':
236 False and os.unlink(patchname)
236 False and os.unlink(patchname)
237
237
238 for line in fp:
238 for line in fp:
239 line = line.rstrip()
239 line = line.rstrip()
240 ui.status("%s\n" % line)
240 ui.status("%s\n" % line)
241 if line.startswith('patching file '):
241 if line.startswith('patching file '):
242 pf = util.parse_patch_output(line)
242 pf = util.parse_patch_output(line)
243 files.setdefault(pf, (None, None))
243 files.setdefault(pf, (None, None))
244 code = fp.close()
244 code = fp.close()
245 if code:
245 if code:
246 raise util.Abort(_("patch command failed: %s") %
246 raise util.Abort(_("patch command failed: %s") %
247 util.explain_exit(code)[0])
247 util.explain_exit(code)[0])
248
248
249 for gp in gitpatches:
249 for gp in gitpatches:
250 files[gp.path] = (gp.op, gp)
250 files[gp.path] = (gp.op, gp)
251
251
252 return files
252 return files
253
253
254 def diff(repo, node1=None, node2=None, files=None, match=util.always,
254 def diff(repo, node1=None, node2=None, files=None, match=util.always,
255 fp=None, changes=None, opts=None):
255 fp=None, changes=None, opts=None):
256 '''print diff of changes to files between two nodes, or node and
256 '''print diff of changes to files between two nodes, or node and
257 working directory.
257 working directory.
258
258
259 if node1 is None, use first dirstate parent instead.
259 if node1 is None, use first dirstate parent instead.
260 if node2 is None, compare node1 with working directory.'''
260 if node2 is None, compare node1 with working directory.'''
261
261
262 if opts is None:
262 if opts is None:
263 opts = mdiff.defaultopts
263 opts = mdiff.defaultopts
264 if fp is None:
264 if fp is None:
265 fp = repo.ui
265 fp = repo.ui
266
266
267 if not node1:
267 if not node1:
268 node1 = repo.dirstate.parents()[0]
268 node1 = repo.dirstate.parents()[0]
269 # reading the data for node1 early allows it to play nicely
269 # reading the data for node1 early allows it to play nicely
270 # with repo.changes and the revlog cache.
270 # with repo.status and the revlog cache.
271 change = repo.changelog.read(node1)
271 change = repo.changelog.read(node1)
272 mmap = repo.manifest.read(change[0])
272 mmap = repo.manifest.read(change[0])
273 date1 = util.datestr(change[2])
273 date1 = util.datestr(change[2])
274
274
275 if not changes:
275 if not changes:
276 changes = repo.changes(node1, node2, files, match=match)
276 changes = repo.status(node1, node2, files, match=match)[:5]
277 modified, added, removed, deleted, unknown = changes
277 modified, added, removed, deleted, unknown = changes
278 if files:
278 if files:
279 def filterfiles(filters):
279 def filterfiles(filters):
280 l = [x for x in files if x in filters]
280 l = [x for x in files if x in filters]
281
281
282 for t in filters:
282 for t in filters:
283 if t and t[-1] != "/":
283 if t and t[-1] != "/":
284 t += "/"
284 t += "/"
285 l += [x for x in files if x.startswith(t)]
285 l += [x for x in files if x.startswith(t)]
286 return l
286 return l
287
287
288 modified, added, removed = map(lambda x: filterfiles(x),
288 modified, added, removed = map(lambda x: filterfiles(x),
289 (modified, added, removed))
289 (modified, added, removed))
290
290
291 if not modified and not added and not removed:
291 if not modified and not added and not removed:
292 return
292 return
293
293
294 if node2:
294 if node2:
295 change = repo.changelog.read(node2)
295 change = repo.changelog.read(node2)
296 mmap2 = repo.manifest.read(change[0])
296 mmap2 = repo.manifest.read(change[0])
297 _date2 = util.datestr(change[2])
297 _date2 = util.datestr(change[2])
298 def date2(f):
298 def date2(f):
299 return _date2
299 return _date2
300 def read(f):
300 def read(f):
301 return repo.file(f).read(mmap2[f])
301 return repo.file(f).read(mmap2[f])
302 else:
302 else:
303 tz = util.makedate()[1]
303 tz = util.makedate()[1]
304 _date2 = util.datestr()
304 _date2 = util.datestr()
305 def date2(f):
305 def date2(f):
306 try:
306 try:
307 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
307 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
308 except OSError, err:
308 except OSError, err:
309 if err.errno != errno.ENOENT: raise
309 if err.errno != errno.ENOENT: raise
310 return _date2
310 return _date2
311 def read(f):
311 def read(f):
312 return repo.wread(f)
312 return repo.wread(f)
313
313
314 if repo.ui.quiet:
314 if repo.ui.quiet:
315 r = None
315 r = None
316 else:
316 else:
317 hexfunc = repo.ui.verbose and hex or short
317 hexfunc = repo.ui.verbose and hex or short
318 r = [hexfunc(node) for node in [node1, node2] if node]
318 r = [hexfunc(node) for node in [node1, node2] if node]
319
319
320 all = modified + added + removed
320 all = modified + added + removed
321 all.sort()
321 all.sort()
322 for f in all:
322 for f in all:
323 to = None
323 to = None
324 tn = None
324 tn = None
325 if f in mmap:
325 if f in mmap:
326 to = repo.file(f).read(mmap[f])
326 to = repo.file(f).read(mmap[f])
327 if f not in removed:
327 if f not in removed:
328 tn = read(f)
328 tn = read(f)
329 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
329 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
330
330
331 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
331 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
332 opts=None):
332 opts=None):
333 '''export changesets as hg patches.'''
333 '''export changesets as hg patches.'''
334
334
335 total = len(revs)
335 total = len(revs)
336 revwidth = max(map(len, revs))
336 revwidth = max(map(len, revs))
337
337
338 def single(node, seqno, fp):
338 def single(node, seqno, fp):
339 parents = [p for p in repo.changelog.parents(node) if p != nullid]
339 parents = [p for p in repo.changelog.parents(node) if p != nullid]
340 if switch_parent:
340 if switch_parent:
341 parents.reverse()
341 parents.reverse()
342 prev = (parents and parents[0]) or nullid
342 prev = (parents and parents[0]) or nullid
343 change = repo.changelog.read(node)
343 change = repo.changelog.read(node)
344
344
345 if not fp:
345 if not fp:
346 fp = cmdutil.make_file(repo, template, node, total=total,
346 fp = cmdutil.make_file(repo, template, node, total=total,
347 seqno=seqno, revwidth=revwidth)
347 seqno=seqno, revwidth=revwidth)
348 if fp not in (sys.stdout, repo.ui):
348 if fp not in (sys.stdout, repo.ui):
349 repo.ui.note("%s\n" % fp.name)
349 repo.ui.note("%s\n" % fp.name)
350
350
351 fp.write("# HG changeset patch\n")
351 fp.write("# HG changeset patch\n")
352 fp.write("# User %s\n" % change[1])
352 fp.write("# User %s\n" % change[1])
353 fp.write("# Date %d %d\n" % change[2])
353 fp.write("# Date %d %d\n" % change[2])
354 fp.write("# Node ID %s\n" % hex(node))
354 fp.write("# Node ID %s\n" % hex(node))
355 fp.write("# Parent %s\n" % hex(prev))
355 fp.write("# Parent %s\n" % hex(prev))
356 if len(parents) > 1:
356 if len(parents) > 1:
357 fp.write("# Parent %s\n" % hex(parents[1]))
357 fp.write("# Parent %s\n" % hex(parents[1]))
358 fp.write(change[4].rstrip())
358 fp.write(change[4].rstrip())
359 fp.write("\n\n")
359 fp.write("\n\n")
360
360
361 diff(repo, prev, node, fp=fp, opts=opts)
361 diff(repo, prev, node, fp=fp, opts=opts)
362 if fp not in (sys.stdout, repo.ui):
362 if fp not in (sys.stdout, repo.ui):
363 fp.close()
363 fp.close()
364
364
365 for seqno, cset in enumerate(revs):
365 for seqno, cset in enumerate(revs):
366 single(cset, seqno, fp)
366 single(cset, seqno, fp)
@@ -1,532 +1,532 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
11 demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
12
12
13 esctable = {
13 esctable = {
14 '\\': '\\',
14 '\\': '\\',
15 'r': '\r',
15 'r': '\r',
16 't': '\t',
16 't': '\t',
17 'n': '\n',
17 'n': '\n',
18 'v': '\v',
18 'v': '\v',
19 }
19 }
20
20
21 def parsestring(s, quoted=True):
21 def parsestring(s, quoted=True):
22 '''parse a string using simple c-like syntax.
22 '''parse a string using simple c-like syntax.
23 string must be in quotes if quoted is True.'''
23 string must be in quotes if quoted is True.'''
24 fp = cStringIO.StringIO()
24 fp = cStringIO.StringIO()
25 if quoted:
25 if quoted:
26 first = s[0]
26 first = s[0]
27 if len(s) < 2: raise SyntaxError(_('string too short'))
27 if len(s) < 2: raise SyntaxError(_('string too short'))
28 if first not in "'\"": raise SyntaxError(_('invalid quote'))
28 if first not in "'\"": raise SyntaxError(_('invalid quote'))
29 if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
29 if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
30 s = s[1:-1]
30 s = s[1:-1]
31 escape = False
31 escape = False
32 for c in s:
32 for c in s:
33 if escape:
33 if escape:
34 fp.write(esctable.get(c, c))
34 fp.write(esctable.get(c, c))
35 escape = False
35 escape = False
36 elif c == '\\': escape = True
36 elif c == '\\': escape = True
37 elif quoted and c == first: raise SyntaxError(_('string ends early'))
37 elif quoted and c == first: raise SyntaxError(_('string ends early'))
38 else: fp.write(c)
38 else: fp.write(c)
39 if escape: raise SyntaxError(_('unterminated escape'))
39 if escape: raise SyntaxError(_('unterminated escape'))
40 return fp.getvalue()
40 return fp.getvalue()
41
41
42 class templater(object):
42 class templater(object):
43 '''template expansion engine.
43 '''template expansion engine.
44
44
45 template expansion works like this. a map file contains key=value
45 template expansion works like this. a map file contains key=value
46 pairs. if value is quoted, it is treated as string. otherwise, it
46 pairs. if value is quoted, it is treated as string. otherwise, it
47 is treated as name of template file.
47 is treated as name of template file.
48
48
49 templater is asked to expand a key in map. it looks up key, and
49 templater is asked to expand a key in map. it looks up key, and
50 looks for atrings like this: {foo}. it expands {foo} by looking up
50 looks for atrings like this: {foo}. it expands {foo} by looking up
51 foo in map, and substituting it. expansion is recursive: it stops
51 foo in map, and substituting it. expansion is recursive: it stops
52 when there is no more {foo} to replace.
52 when there is no more {foo} to replace.
53
53
54 expansion also allows formatting and filtering.
54 expansion also allows formatting and filtering.
55
55
56 format uses key to expand each item in list. syntax is
56 format uses key to expand each item in list. syntax is
57 {key%format}.
57 {key%format}.
58
58
59 filter uses function to transform value. syntax is
59 filter uses function to transform value. syntax is
60 {key|filter1|filter2|...}.'''
60 {key|filter1|filter2|...}.'''
61
61
62 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
62 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
63 '''set up template engine.
63 '''set up template engine.
64 mapfile is name of file to read map definitions from.
64 mapfile is name of file to read map definitions from.
65 filters is dict of functions. each transforms a value into another.
65 filters is dict of functions. each transforms a value into another.
66 defaults is dict of default map definitions.'''
66 defaults is dict of default map definitions.'''
67 self.mapfile = mapfile or 'template'
67 self.mapfile = mapfile or 'template'
68 self.cache = cache.copy()
68 self.cache = cache.copy()
69 self.map = {}
69 self.map = {}
70 self.base = (mapfile and os.path.dirname(mapfile)) or ''
70 self.base = (mapfile and os.path.dirname(mapfile)) or ''
71 self.filters = filters
71 self.filters = filters
72 self.defaults = defaults
72 self.defaults = defaults
73
73
74 if not mapfile:
74 if not mapfile:
75 return
75 return
76 i = 0
76 i = 0
77 for l in file(mapfile):
77 for l in file(mapfile):
78 l = l.strip()
78 l = l.strip()
79 i += 1
79 i += 1
80 if not l or l[0] in '#;': continue
80 if not l or l[0] in '#;': continue
81 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
81 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
82 if m:
82 if m:
83 key, val = m.groups()
83 key, val = m.groups()
84 if val[0] in "'\"":
84 if val[0] in "'\"":
85 try:
85 try:
86 self.cache[key] = parsestring(val)
86 self.cache[key] = parsestring(val)
87 except SyntaxError, inst:
87 except SyntaxError, inst:
88 raise SyntaxError('%s:%s: %s' %
88 raise SyntaxError('%s:%s: %s' %
89 (mapfile, i, inst.args[0]))
89 (mapfile, i, inst.args[0]))
90 else:
90 else:
91 self.map[key] = os.path.join(self.base, val)
91 self.map[key] = os.path.join(self.base, val)
92 else:
92 else:
93 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
93 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
94
94
95 def __contains__(self, key):
95 def __contains__(self, key):
96 return key in self.cache
96 return key in self.cache
97
97
98 def __call__(self, t, **map):
98 def __call__(self, t, **map):
99 '''perform expansion.
99 '''perform expansion.
100 t is name of map element to expand.
100 t is name of map element to expand.
101 map is added elements to use during expansion.'''
101 map is added elements to use during expansion.'''
102 m = self.defaults.copy()
102 m = self.defaults.copy()
103 m.update(map)
103 m.update(map)
104 try:
104 try:
105 tmpl = self.cache[t]
105 tmpl = self.cache[t]
106 except KeyError:
106 except KeyError:
107 try:
107 try:
108 tmpl = self.cache[t] = file(self.map[t]).read()
108 tmpl = self.cache[t] = file(self.map[t]).read()
109 except IOError, inst:
109 except IOError, inst:
110 raise IOError(inst.args[0], _('template file %s: %s') %
110 raise IOError(inst.args[0], _('template file %s: %s') %
111 (self.map[t], inst.args[1]))
111 (self.map[t], inst.args[1]))
112 return self.template(tmpl, self.filters, **m)
112 return self.template(tmpl, self.filters, **m)
113
113
114 template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
114 template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
115 r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
115 r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
116 r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
116 r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
117
117
118 def template(self, tmpl, filters={}, **map):
118 def template(self, tmpl, filters={}, **map):
119 lm = map.copy()
119 lm = map.copy()
120 while tmpl:
120 while tmpl:
121 m = self.template_re.search(tmpl)
121 m = self.template_re.search(tmpl)
122 if m:
122 if m:
123 start, end = m.span(0)
123 start, end = m.span(0)
124 s, e = tmpl[start], tmpl[end - 1]
124 s, e = tmpl[start], tmpl[end - 1]
125 key = m.group(1)
125 key = m.group(1)
126 if ((s == '#' and e != '#') or (s == '{' and e != '}')):
126 if ((s == '#' and e != '#') or (s == '{' and e != '}')):
127 raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
127 raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
128 (s, e, key))
128 (s, e, key))
129 if start:
129 if start:
130 yield tmpl[:start]
130 yield tmpl[:start]
131 v = map.get(key, "")
131 v = map.get(key, "")
132 v = callable(v) and v(**map) or v
132 v = callable(v) and v(**map) or v
133
133
134 format = m.group(2)
134 format = m.group(2)
135 fl = m.group(4)
135 fl = m.group(4)
136
136
137 if format:
137 if format:
138 q = v.__iter__
138 q = v.__iter__
139 for i in q():
139 for i in q():
140 lm.update(i)
140 lm.update(i)
141 yield self(format[1:], **lm)
141 yield self(format[1:], **lm)
142
142
143 v = ""
143 v = ""
144
144
145 elif fl:
145 elif fl:
146 for f in fl.split("|")[1:]:
146 for f in fl.split("|")[1:]:
147 v = filters[f](v)
147 v = filters[f](v)
148
148
149 yield v
149 yield v
150 tmpl = tmpl[end:]
150 tmpl = tmpl[end:]
151 else:
151 else:
152 yield tmpl
152 yield tmpl
153 break
153 break
154
154
155 agescales = [("second", 1),
155 agescales = [("second", 1),
156 ("minute", 60),
156 ("minute", 60),
157 ("hour", 3600),
157 ("hour", 3600),
158 ("day", 3600 * 24),
158 ("day", 3600 * 24),
159 ("week", 3600 * 24 * 7),
159 ("week", 3600 * 24 * 7),
160 ("month", 3600 * 24 * 30),
160 ("month", 3600 * 24 * 30),
161 ("year", 3600 * 24 * 365)]
161 ("year", 3600 * 24 * 365)]
162
162
163 agescales.reverse()
163 agescales.reverse()
164
164
165 def age(date):
165 def age(date):
166 '''turn a (timestamp, tzoff) tuple into an age string.'''
166 '''turn a (timestamp, tzoff) tuple into an age string.'''
167
167
168 def plural(t, c):
168 def plural(t, c):
169 if c == 1:
169 if c == 1:
170 return t
170 return t
171 return t + "s"
171 return t + "s"
172 def fmt(t, c):
172 def fmt(t, c):
173 return "%d %s" % (c, plural(t, c))
173 return "%d %s" % (c, plural(t, c))
174
174
175 now = time.time()
175 now = time.time()
176 then = date[0]
176 then = date[0]
177 delta = max(1, int(now - then))
177 delta = max(1, int(now - then))
178
178
179 for t, s in agescales:
179 for t, s in agescales:
180 n = delta / s
180 n = delta / s
181 if n >= 2 or s == 1:
181 if n >= 2 or s == 1:
182 return fmt(t, n)
182 return fmt(t, n)
183
183
184 def stringify(thing):
184 def stringify(thing):
185 '''turn nested template iterator into string.'''
185 '''turn nested template iterator into string.'''
186 cs = cStringIO.StringIO()
186 cs = cStringIO.StringIO()
187 def walk(things):
187 def walk(things):
188 for t in things:
188 for t in things:
189 if hasattr(t, '__iter__'):
189 if hasattr(t, '__iter__'):
190 walk(t)
190 walk(t)
191 else:
191 else:
192 cs.write(t)
192 cs.write(t)
193 walk(thing)
193 walk(thing)
194 return cs.getvalue()
194 return cs.getvalue()
195
195
196 para_re = None
196 para_re = None
197 space_re = None
197 space_re = None
198
198
199 def fill(text, width):
199 def fill(text, width):
200 '''fill many paragraphs.'''
200 '''fill many paragraphs.'''
201 global para_re, space_re
201 global para_re, space_re
202 if para_re is None:
202 if para_re is None:
203 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
203 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
204 space_re = re.compile(r' +')
204 space_re = re.compile(r' +')
205
205
206 def findparas():
206 def findparas():
207 start = 0
207 start = 0
208 while True:
208 while True:
209 m = para_re.search(text, start)
209 m = para_re.search(text, start)
210 if not m:
210 if not m:
211 w = len(text)
211 w = len(text)
212 while w > start and text[w-1].isspace(): w -= 1
212 while w > start and text[w-1].isspace(): w -= 1
213 yield text[start:w], text[w:]
213 yield text[start:w], text[w:]
214 break
214 break
215 yield text[start:m.start(0)], m.group(1)
215 yield text[start:m.start(0)], m.group(1)
216 start = m.end(1)
216 start = m.end(1)
217
217
218 fp = cStringIO.StringIO()
218 fp = cStringIO.StringIO()
219 for para, rest in findparas():
219 for para, rest in findparas():
220 fp.write(space_re.sub(' ', textwrap.fill(para, width)))
220 fp.write(space_re.sub(' ', textwrap.fill(para, width)))
221 fp.write(rest)
221 fp.write(rest)
222 return fp.getvalue()
222 return fp.getvalue()
223
223
224 def firstline(text):
224 def firstline(text):
225 '''return the first line of text'''
225 '''return the first line of text'''
226 try:
226 try:
227 return text.splitlines(1)[0].rstrip('\r\n')
227 return text.splitlines(1)[0].rstrip('\r\n')
228 except IndexError:
228 except IndexError:
229 return ''
229 return ''
230
230
231 def isodate(date):
231 def isodate(date):
232 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
232 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
233 return util.datestr(date, format='%Y-%m-%d %H:%M')
233 return util.datestr(date, format='%Y-%m-%d %H:%M')
234
234
235 def hgdate(date):
235 def hgdate(date):
236 '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
236 '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
237 return "%d %d" % date
237 return "%d %d" % date
238
238
239 def nl2br(text):
239 def nl2br(text):
240 '''replace raw newlines with xhtml line breaks.'''
240 '''replace raw newlines with xhtml line breaks.'''
241 return text.replace('\n', '<br/>\n')
241 return text.replace('\n', '<br/>\n')
242
242
243 def obfuscate(text):
243 def obfuscate(text):
244 text = unicode(text, 'utf-8', 'replace')
244 text = unicode(text, 'utf-8', 'replace')
245 return ''.join(['&#%d;' % ord(c) for c in text])
245 return ''.join(['&#%d;' % ord(c) for c in text])
246
246
247 def domain(author):
247 def domain(author):
248 '''get domain of author, or empty string if none.'''
248 '''get domain of author, or empty string if none.'''
249 f = author.find('@')
249 f = author.find('@')
250 if f == -1: return ''
250 if f == -1: return ''
251 author = author[f+1:]
251 author = author[f+1:]
252 f = author.find('>')
252 f = author.find('>')
253 if f >= 0: author = author[:f]
253 if f >= 0: author = author[:f]
254 return author
254 return author
255
255
256 def email(author):
256 def email(author):
257 '''get email of author.'''
257 '''get email of author.'''
258 r = author.find('>')
258 r = author.find('>')
259 if r == -1: r = None
259 if r == -1: r = None
260 return author[author.find('<')+1:r]
260 return author[author.find('<')+1:r]
261
261
262 def person(author):
262 def person(author):
263 '''get name of author, or else username.'''
263 '''get name of author, or else username.'''
264 f = author.find('<')
264 f = author.find('<')
265 if f == -1: return util.shortuser(author)
265 if f == -1: return util.shortuser(author)
266 return author[:f].rstrip()
266 return author[:f].rstrip()
267
267
268 def shortdate(date):
268 def shortdate(date):
269 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
269 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
270 return util.datestr(date, format='%Y-%m-%d', timezone=False)
270 return util.datestr(date, format='%Y-%m-%d', timezone=False)
271
271
272 def indent(text, prefix):
272 def indent(text, prefix):
273 '''indent each non-empty line of text after first with prefix.'''
273 '''indent each non-empty line of text after first with prefix.'''
274 fp = cStringIO.StringIO()
274 fp = cStringIO.StringIO()
275 lines = text.splitlines()
275 lines = text.splitlines()
276 num_lines = len(lines)
276 num_lines = len(lines)
277 for i in xrange(num_lines):
277 for i in xrange(num_lines):
278 l = lines[i]
278 l = lines[i]
279 if i and l.strip(): fp.write(prefix)
279 if i and l.strip(): fp.write(prefix)
280 fp.write(l)
280 fp.write(l)
281 if i < num_lines - 1 or text.endswith('\n'):
281 if i < num_lines - 1 or text.endswith('\n'):
282 fp.write('\n')
282 fp.write('\n')
283 return fp.getvalue()
283 return fp.getvalue()
284
284
285 common_filters = {
285 common_filters = {
286 "addbreaks": nl2br,
286 "addbreaks": nl2br,
287 "basename": os.path.basename,
287 "basename": os.path.basename,
288 "age": age,
288 "age": age,
289 "date": lambda x: util.datestr(x),
289 "date": lambda x: util.datestr(x),
290 "domain": domain,
290 "domain": domain,
291 "email": email,
291 "email": email,
292 "escape": lambda x: cgi.escape(x, True),
292 "escape": lambda x: cgi.escape(x, True),
293 "fill68": lambda x: fill(x, width=68),
293 "fill68": lambda x: fill(x, width=68),
294 "fill76": lambda x: fill(x, width=76),
294 "fill76": lambda x: fill(x, width=76),
295 "firstline": firstline,
295 "firstline": firstline,
296 "tabindent": lambda x: indent(x, '\t'),
296 "tabindent": lambda x: indent(x, '\t'),
297 "hgdate": hgdate,
297 "hgdate": hgdate,
298 "isodate": isodate,
298 "isodate": isodate,
299 "obfuscate": obfuscate,
299 "obfuscate": obfuscate,
300 "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
300 "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
301 "person": person,
301 "person": person,
302 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
302 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
303 "short": lambda x: x[:12],
303 "short": lambda x: x[:12],
304 "shortdate": shortdate,
304 "shortdate": shortdate,
305 "stringify": stringify,
305 "stringify": stringify,
306 "strip": lambda x: x.strip(),
306 "strip": lambda x: x.strip(),
307 "urlescape": lambda x: urllib.quote(x),
307 "urlescape": lambda x: urllib.quote(x),
308 "user": lambda x: util.shortuser(x),
308 "user": lambda x: util.shortuser(x),
309 }
309 }
310
310
311 def templatepath(name=None):
311 def templatepath(name=None):
312 '''return location of template file or directory (if no name).
312 '''return location of template file or directory (if no name).
313 returns None if not found.'''
313 returns None if not found.'''
314
314
315 # executable version (py2exe) doesn't support __file__
315 # executable version (py2exe) doesn't support __file__
316 if hasattr(sys, 'frozen'):
316 if hasattr(sys, 'frozen'):
317 module = sys.executable
317 module = sys.executable
318 else:
318 else:
319 module = __file__
319 module = __file__
320 for f in 'templates', '../templates':
320 for f in 'templates', '../templates':
321 fl = f.split('/')
321 fl = f.split('/')
322 if name: fl.append(name)
322 if name: fl.append(name)
323 p = os.path.join(os.path.dirname(module), *fl)
323 p = os.path.join(os.path.dirname(module), *fl)
324 if (name and os.path.exists(p)) or os.path.isdir(p):
324 if (name and os.path.exists(p)) or os.path.isdir(p):
325 return os.path.normpath(p)
325 return os.path.normpath(p)
326
326
327 class changeset_templater(object):
327 class changeset_templater(object):
328 '''format changeset information.'''
328 '''format changeset information.'''
329
329
330 def __init__(self, ui, repo, mapfile, dest=None):
330 def __init__(self, ui, repo, mapfile, dest=None):
331 self.t = templater(mapfile, common_filters,
331 self.t = templater(mapfile, common_filters,
332 cache={'parent': '{rev}:{node|short} ',
332 cache={'parent': '{rev}:{node|short} ',
333 'manifest': '{rev}:{node|short}'})
333 'manifest': '{rev}:{node|short}'})
334 self.ui = ui
334 self.ui = ui
335 self.dest = dest
335 self.dest = dest
336 self.repo = repo
336 self.repo = repo
337
337
338 def use_template(self, t):
338 def use_template(self, t):
339 '''set template string to use'''
339 '''set template string to use'''
340 self.t.cache['changeset'] = t
340 self.t.cache['changeset'] = t
341
341
342 def write(self, thing, header=False):
342 def write(self, thing, header=False):
343 '''write expanded template.
343 '''write expanded template.
344 uses in-order recursive traverse of iterators.'''
344 uses in-order recursive traverse of iterators.'''
345 dest = self.dest or self.ui
345 dest = self.dest or self.ui
346 for t in thing:
346 for t in thing:
347 if hasattr(t, '__iter__'):
347 if hasattr(t, '__iter__'):
348 self.write(t, header=header)
348 self.write(t, header=header)
349 elif header:
349 elif header:
350 dest.write_header(t)
350 dest.write_header(t)
351 else:
351 else:
352 dest.write(t)
352 dest.write(t)
353
353
354 def write_header(self, thing):
354 def write_header(self, thing):
355 self.write(thing, header=True)
355 self.write(thing, header=True)
356
356
357 def show(self, rev=0, changenode=None, brinfo=None, changes=None,
357 def show(self, rev=0, changenode=None, brinfo=None, changes=None,
358 **props):
358 **props):
359 '''show a single changeset or file revision'''
359 '''show a single changeset or file revision'''
360 log = self.repo.changelog
360 log = self.repo.changelog
361 if changenode is None:
361 if changenode is None:
362 changenode = log.node(rev)
362 changenode = log.node(rev)
363 elif not rev:
363 elif not rev:
364 rev = log.rev(changenode)
364 rev = log.rev(changenode)
365 if changes is None:
365 if changes is None:
366 changes = log.read(changenode)
366 changes = log.read(changenode)
367
367
368 def showlist(name, values, plural=None, **args):
368 def showlist(name, values, plural=None, **args):
369 '''expand set of values.
369 '''expand set of values.
370 name is name of key in template map.
370 name is name of key in template map.
371 values is list of strings or dicts.
371 values is list of strings or dicts.
372 plural is plural of name, if not simply name + 's'.
372 plural is plural of name, if not simply name + 's'.
373
373
374 expansion works like this, given name 'foo'.
374 expansion works like this, given name 'foo'.
375
375
376 if values is empty, expand 'no_foos'.
376 if values is empty, expand 'no_foos'.
377
377
378 if 'foo' not in template map, return values as a string,
378 if 'foo' not in template map, return values as a string,
379 joined by space.
379 joined by space.
380
380
381 expand 'start_foos'.
381 expand 'start_foos'.
382
382
383 for each value, expand 'foo'. if 'last_foo' in template
383 for each value, expand 'foo'. if 'last_foo' in template
384 map, expand it instead of 'foo' for last key.
384 map, expand it instead of 'foo' for last key.
385
385
386 expand 'end_foos'.
386 expand 'end_foos'.
387 '''
387 '''
388 if plural: names = plural
388 if plural: names = plural
389 else: names = name + 's'
389 else: names = name + 's'
390 if not values:
390 if not values:
391 noname = 'no_' + names
391 noname = 'no_' + names
392 if noname in self.t:
392 if noname in self.t:
393 yield self.t(noname, **args)
393 yield self.t(noname, **args)
394 return
394 return
395 if name not in self.t:
395 if name not in self.t:
396 if isinstance(values[0], str):
396 if isinstance(values[0], str):
397 yield ' '.join(values)
397 yield ' '.join(values)
398 else:
398 else:
399 for v in values:
399 for v in values:
400 yield dict(v, **args)
400 yield dict(v, **args)
401 return
401 return
402 startname = 'start_' + names
402 startname = 'start_' + names
403 if startname in self.t:
403 if startname in self.t:
404 yield self.t(startname, **args)
404 yield self.t(startname, **args)
405 vargs = args.copy()
405 vargs = args.copy()
406 def one(v, tag=name):
406 def one(v, tag=name):
407 try:
407 try:
408 vargs.update(v)
408 vargs.update(v)
409 except (AttributeError, ValueError):
409 except (AttributeError, ValueError):
410 try:
410 try:
411 for a, b in v:
411 for a, b in v:
412 vargs[a] = b
412 vargs[a] = b
413 except ValueError:
413 except ValueError:
414 vargs[name] = v
414 vargs[name] = v
415 return self.t(tag, **vargs)
415 return self.t(tag, **vargs)
416 lastname = 'last_' + name
416 lastname = 'last_' + name
417 if lastname in self.t:
417 if lastname in self.t:
418 last = values.pop()
418 last = values.pop()
419 else:
419 else:
420 last = None
420 last = None
421 for v in values:
421 for v in values:
422 yield one(v)
422 yield one(v)
423 if last is not None:
423 if last is not None:
424 yield one(last, tag=lastname)
424 yield one(last, tag=lastname)
425 endname = 'end_' + names
425 endname = 'end_' + names
426 if endname in self.t:
426 if endname in self.t:
427 yield self.t(endname, **args)
427 yield self.t(endname, **args)
428
428
429 if brinfo:
429 if brinfo:
430 def showbranches(**args):
430 def showbranches(**args):
431 if changenode in brinfo:
431 if changenode in brinfo:
432 for x in showlist('branch', brinfo[changenode],
432 for x in showlist('branch', brinfo[changenode],
433 plural='branches', **args):
433 plural='branches', **args):
434 yield x
434 yield x
435 else:
435 else:
436 showbranches = ''
436 showbranches = ''
437
437
438 if self.ui.debugflag:
438 if self.ui.debugflag:
439 def showmanifest(**args):
439 def showmanifest(**args):
440 args = args.copy()
440 args = args.copy()
441 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
441 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
442 node=hex(changes[0])))
442 node=hex(changes[0])))
443 yield self.t('manifest', **args)
443 yield self.t('manifest', **args)
444 else:
444 else:
445 showmanifest = ''
445 showmanifest = ''
446
446
447 def showparents(**args):
447 def showparents(**args):
448 parents = [[('rev', log.rev(p)), ('node', hex(p))]
448 parents = [[('rev', log.rev(p)), ('node', hex(p))]
449 for p in log.parents(changenode)
449 for p in log.parents(changenode)
450 if self.ui.debugflag or p != nullid]
450 if self.ui.debugflag or p != nullid]
451 if (not self.ui.debugflag and len(parents) == 1 and
451 if (not self.ui.debugflag and len(parents) == 1 and
452 parents[0][0][1] == rev - 1):
452 parents[0][0][1] == rev - 1):
453 return
453 return
454 for x in showlist('parent', parents, **args):
454 for x in showlist('parent', parents, **args):
455 yield x
455 yield x
456
456
457 def showtags(**args):
457 def showtags(**args):
458 for x in showlist('tag', self.repo.nodetags(changenode), **args):
458 for x in showlist('tag', self.repo.nodetags(changenode), **args):
459 yield x
459 yield x
460
460
461 if self.ui.debugflag:
461 if self.ui.debugflag:
462 files = self.repo.changes(log.parents(changenode)[0], changenode)
462 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
463 def showfiles(**args):
463 def showfiles(**args):
464 for x in showlist('file', files[0], **args): yield x
464 for x in showlist('file', files[0], **args): yield x
465 def showadds(**args):
465 def showadds(**args):
466 for x in showlist('file_add', files[1], **args): yield x
466 for x in showlist('file_add', files[1], **args): yield x
467 def showdels(**args):
467 def showdels(**args):
468 for x in showlist('file_del', files[2], **args): yield x
468 for x in showlist('file_del', files[2], **args): yield x
469 else:
469 else:
470 def showfiles(**args):
470 def showfiles(**args):
471 for x in showlist('file', changes[3], **args): yield x
471 for x in showlist('file', changes[3], **args): yield x
472 showadds = ''
472 showadds = ''
473 showdels = ''
473 showdels = ''
474
474
475 defprops = {
475 defprops = {
476 'author': changes[1],
476 'author': changes[1],
477 'branches': showbranches,
477 'branches': showbranches,
478 'date': changes[2],
478 'date': changes[2],
479 'desc': changes[4],
479 'desc': changes[4],
480 'file_adds': showadds,
480 'file_adds': showadds,
481 'file_dels': showdels,
481 'file_dels': showdels,
482 'files': showfiles,
482 'files': showfiles,
483 'manifest': showmanifest,
483 'manifest': showmanifest,
484 'node': hex(changenode),
484 'node': hex(changenode),
485 'parents': showparents,
485 'parents': showparents,
486 'rev': rev,
486 'rev': rev,
487 'tags': showtags,
487 'tags': showtags,
488 }
488 }
489 props = props.copy()
489 props = props.copy()
490 props.update(defprops)
490 props.update(defprops)
491
491
492 try:
492 try:
493 if self.ui.debugflag and 'header_debug' in self.t:
493 if self.ui.debugflag and 'header_debug' in self.t:
494 key = 'header_debug'
494 key = 'header_debug'
495 elif self.ui.quiet and 'header_quiet' in self.t:
495 elif self.ui.quiet and 'header_quiet' in self.t:
496 key = 'header_quiet'
496 key = 'header_quiet'
497 elif self.ui.verbose and 'header_verbose' in self.t:
497 elif self.ui.verbose and 'header_verbose' in self.t:
498 key = 'header_verbose'
498 key = 'header_verbose'
499 elif 'header' in self.t:
499 elif 'header' in self.t:
500 key = 'header'
500 key = 'header'
501 else:
501 else:
502 key = ''
502 key = ''
503 if key:
503 if key:
504 self.write_header(self.t(key, **props))
504 self.write_header(self.t(key, **props))
505 if self.ui.debugflag and 'changeset_debug' in self.t:
505 if self.ui.debugflag and 'changeset_debug' in self.t:
506 key = 'changeset_debug'
506 key = 'changeset_debug'
507 elif self.ui.quiet and 'changeset_quiet' in self.t:
507 elif self.ui.quiet and 'changeset_quiet' in self.t:
508 key = 'changeset_quiet'
508 key = 'changeset_quiet'
509 elif self.ui.verbose and 'changeset_verbose' in self.t:
509 elif self.ui.verbose and 'changeset_verbose' in self.t:
510 key = 'changeset_verbose'
510 key = 'changeset_verbose'
511 else:
511 else:
512 key = 'changeset'
512 key = 'changeset'
513 self.write(self.t(key, **props))
513 self.write(self.t(key, **props))
514 except KeyError, inst:
514 except KeyError, inst:
515 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
515 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
516 inst.args[0]))
516 inst.args[0]))
517 except SyntaxError, inst:
517 except SyntaxError, inst:
518 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
518 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
519
519
520 class stringio(object):
520 class stringio(object):
521 '''wrap cStringIO for use by changeset_templater.'''
521 '''wrap cStringIO for use by changeset_templater.'''
522 def __init__(self):
522 def __init__(self):
523 self.fp = cStringIO.StringIO()
523 self.fp = cStringIO.StringIO()
524
524
525 def write(self, *args):
525 def write(self, *args):
526 for a in args:
526 for a in args:
527 self.fp.write(a)
527 self.fp.write(a)
528
528
529 write_header = write
529 write_header = write
530
530
531 def __getattr__(self, key):
531 def __getattr__(self, key):
532 return getattr(self.fp, key)
532 return getattr(self.fp, key)
General Comments 0
You need to be logged in to leave comments. Login now