##// END OF EJS Templates
remove localrepository.changes....
Vadim Gelfer -
r2875:3d6efcbb default
parent child Browse files
Show More
@@ -1,153 +1,153 b''
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # allow to use external programs to compare revisions, or revision
9 9 # with working dir. program is called with two arguments: paths to
10 10 # directories containing snapshots of files to compare.
11 11 #
12 12 # to enable:
13 13 #
14 14 # [extensions]
15 15 # hgext.extdiff =
16 16 #
17 17 # also allows to configure new diff commands, so you do not need to
18 18 # type "hg extdiff -p kdiff3" always.
19 19 #
20 20 # [extdiff]
21 21 # # add new command called vdiff, runs kdiff3
22 22 # cmd.vdiff = kdiff3
23 23 # # add new command called meld, runs meld (no need to name twice)
24 24 # cmd.meld =
25 25 # # add new command called vimdiff, runs gvimdiff with DirDiff plugin
26 26 # #(see http://www.vim.org/scripts/script.php?script_id=102)
27 27 # cmd.vimdiff = LC_ALL=C gvim -f '+bdel 1 2' '+ execute "DirDiff ".argv(0)." ".argv(1)'
28 28 #
29 29 # you can use -I/-X and list of file or directory names like normal
30 30 # "hg diff" command. extdiff makes snapshots of only needed files, so
31 31 # compare program will be fast.
32 32
33 33 from mercurial.demandload import demandload
34 34 from mercurial.i18n import gettext as _
35 35 from mercurial.node import *
36 36 demandload(globals(), 'mercurial:commands,util os shutil tempfile')
37 37
38 38 def dodiff(ui, repo, diffcmd, pats, opts):
39 39 def snapshot_node(files, node):
40 40 '''snapshot files as of some revision'''
41 41 changes = repo.changelog.read(node)
42 42 mf = repo.manifest.read(changes[0])
43 43 dirname = '%s.%s' % (os.path.basename(repo.root), short(node))
44 44 base = os.path.join(tmproot, dirname)
45 45 os.mkdir(base)
46 46 if not ui.quiet:
47 47 ui.write_err(_('making snapshot of %d files from rev %s\n') %
48 48 (len(files), short(node)))
49 49 for fn in files:
50 50 wfn = util.pconvert(fn)
51 51 ui.note(' %s\n' % wfn)
52 52 dest = os.path.join(base, wfn)
53 53 destdir = os.path.dirname(dest)
54 54 if not os.path.isdir(destdir):
55 55 os.makedirs(destdir)
56 56 repo.wwrite(wfn, repo.file(fn).read(mf[fn]), open(dest, 'w'))
57 57 return dirname
58 58
59 59 def snapshot_wdir(files):
60 60 '''snapshot files from working directory.
61 61 if not using snapshot, -I/-X does not work and recursive diff
62 62 in tools like kdiff3 and meld displays too many files.'''
63 63 dirname = os.path.basename(repo.root)
64 64 base = os.path.join(tmproot, dirname)
65 65 os.mkdir(base)
66 66 if not ui.quiet:
67 67 ui.write_err(_('making snapshot of %d files from working dir\n') %
68 68 (len(files)))
69 69 for fn in files:
70 70 wfn = util.pconvert(fn)
71 71 ui.note(' %s\n' % wfn)
72 72 dest = os.path.join(base, wfn)
73 73 destdir = os.path.dirname(dest)
74 74 if not os.path.isdir(destdir):
75 75 os.makedirs(destdir)
76 76 fp = open(dest, 'w')
77 77 for chunk in util.filechunkiter(repo.wopener(wfn)):
78 78 fp.write(chunk)
79 79 return dirname
80 80
81 81 node1, node2 = commands.revpair(ui, repo, opts['rev'])
82 82 files, matchfn, anypats = commands.matchpats(repo, pats, opts)
83 modified, added, removed, deleted, unknown = repo.changes(
84 node1, node2, files, match=matchfn)
83 modified, added, removed, deleted, unknown = repo.status(
84 node1, node2, files, match=matchfn)[:5]
85 85 if not (modified or added or removed):
86 86 return 0
87 87
88 88 tmproot = tempfile.mkdtemp(prefix='extdiff.')
89 89 try:
90 90 dir1 = snapshot_node(modified + removed, node1)
91 91 if node2:
92 92 dir2 = snapshot_node(modified + added, node2)
93 93 else:
94 94 dir2 = snapshot_wdir(modified + added)
95 95 util.system('%s %s "%s" "%s"' %
96 96 (diffcmd, ' '.join(opts['option']), dir1, dir2),
97 97 cwd=tmproot)
98 98 return 1
99 99 finally:
100 100 ui.note(_('cleaning up temp directory\n'))
101 101 shutil.rmtree(tmproot)
102 102
103 103 def extdiff(ui, repo, *pats, **opts):
104 104 '''use external program to diff repository (or selected files)
105 105
106 106 Show differences between revisions for the specified files, using
107 107 an external program. The default program used is "diff -Npru".
108 108 To select a different program, use the -p option. The program
109 109 will be passed the names of two directories to compare. To pass
110 110 additional options to the program, use the -o option. These will
111 111 be passed before the names of the directories to compare.
112 112
113 113 When two revision arguments are given, then changes are
114 114 shown between those revisions. If only one revision is
115 115 specified then that revision is compared to the working
116 116 directory, and, when no revisions are specified, the
117 117 working directory files are compared to its parent.'''
118 118 return dodiff(ui, repo, opts['program'] or 'diff -Npru', pats, opts)
119 119
120 120 cmdtable = {
121 121 "extdiff":
122 122 (extdiff,
123 123 [('p', 'program', '', _('comparison program to run')),
124 124 ('o', 'option', [], _('pass option to comparison program')),
125 125 ('r', 'rev', [], _('revision')),
126 126 ('I', 'include', [], _('include names matching the given patterns')),
127 127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
128 128 _('hg extdiff [OPT]... [FILE]...')),
129 129 }
130 130
131 131 def uisetup(ui):
132 132 for cmd, path in ui.configitems('extdiff'):
133 133 if not cmd.startswith('cmd.'): continue
134 134 cmd = cmd[4:]
135 135 if not path: path = cmd
136 136 def save(cmd, path):
137 137 '''use closure to save diff command to use'''
138 138 def mydiff(ui, repo, *pats, **opts):
139 139 return dodiff(ui, repo, path, pats, opts)
140 140 mydiff.__doc__ = '''use %s to diff repository (or selected files)
141 141
142 142 Show differences between revisions for the specified
143 143 files, using the %s program.
144 144
145 145 When two revision arguments are given, then changes are
146 146 shown between those revisions. If only one revision is
147 147 specified then that revision is compared to the working
148 148 directory, and, when no revisions are specified, the
149 149 working directory files are compared to its parent.''' % (cmd, cmd)
150 150 return mydiff
151 151 cmdtable[cmd] = (save(cmd, path),
152 152 cmdtable['extdiff'][1][1:],
153 153 _('hg %s [OPT]... [FILE]...') % cmd)
@@ -1,269 +1,269 b''
1 1 # GnuPG signing extension for Mercurial
2 2 #
3 3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, tempfile, binascii
9 9 from mercurial import util
10 10 from mercurial import node as hgnode
11 11 from mercurial.i18n import gettext as _
12 12
13 13 class gpg:
14 14 def __init__(self, path, key=None):
15 15 self.path = path
16 16 self.key = (key and " --local-user \"%s\"" % key) or ""
17 17
18 18 def sign(self, data):
19 19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 20 return util.filter(data, gpgcmd)
21 21
22 22 def verify(self, data, sig):
23 23 """ returns of the good and bad signatures"""
24 24 sigfile = datafile = None
25 25 try:
26 26 # create temporary files
27 27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 28 fp = os.fdopen(fd, 'wb')
29 29 fp.write(sig)
30 30 fp.close()
31 31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 32 fp = os.fdopen(fd, 'wb')
33 33 fp.write(data)
34 34 fp.close()
35 35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 37 ret = util.filter("", gpgcmd)
38 38 finally:
39 39 for f in (sigfile, datafile):
40 40 try:
41 41 if f: os.unlink(f)
42 42 except: pass
43 43 keys = []
44 44 key, fingerprint = None, None
45 45 err = ""
46 46 for l in ret.splitlines():
47 47 # see DETAILS in the gnupg documentation
48 48 # filter the logger output
49 49 if not l.startswith("[GNUPG:]"):
50 50 continue
51 51 l = l[9:]
52 52 if l.startswith("ERRSIG"):
53 53 err = _("error while verifying signature")
54 54 break
55 55 elif l.startswith("VALIDSIG"):
56 56 # fingerprint of the primary key
57 57 fingerprint = l.split()[10]
58 58 elif (l.startswith("GOODSIG") or
59 59 l.startswith("EXPSIG") or
60 60 l.startswith("EXPKEYSIG") or
61 61 l.startswith("BADSIG")):
62 62 if key is not None:
63 63 keys.append(key + [fingerprint])
64 64 key = l.split(" ", 2)
65 65 fingerprint = None
66 66 if err:
67 67 return err, []
68 68 if key is not None:
69 69 keys.append(key + [fingerprint])
70 70 return err, keys
71 71
72 72 def newgpg(ui, **opts):
73 73 """create a new gpg instance"""
74 74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 75 gpgkey = opts.get('key')
76 76 if not gpgkey:
77 77 gpgkey = ui.config("gpg", "key", None)
78 78 return gpg(gpgpath, gpgkey)
79 79
80 80 def sigwalk(repo):
81 81 """
82 82 walk over every sigs, yields a couple
83 83 ((node, version, sig), (filename, linenumber))
84 84 """
85 85 def parsefile(fileiter, context):
86 86 ln = 1
87 87 for l in fileiter:
88 88 if not l:
89 89 continue
90 90 yield (l.split(" ", 2), (context, ln))
91 91 ln +=1
92 92
93 93 fl = repo.file(".hgsigs")
94 94 h = fl.heads()
95 95 h.reverse()
96 96 # read the heads
97 97 for r in h:
98 98 fn = ".hgsigs|%s" % hgnode.short(r)
99 99 for item in parsefile(fl.read(r).splitlines(), fn):
100 100 yield item
101 101 try:
102 102 # read local signatures
103 103 fn = "localsigs"
104 104 for item in parsefile(repo.opener(fn), fn):
105 105 yield item
106 106 except IOError:
107 107 pass
108 108
109 109 def getkeys(ui, repo, mygpg, sigdata, context):
110 110 """get the keys who signed a data"""
111 111 fn, ln = context
112 112 node, version, sig = sigdata
113 113 prefix = "%s:%d" % (fn, ln)
114 114 node = hgnode.bin(node)
115 115
116 116 data = node2txt(repo, node, version)
117 117 sig = binascii.a2b_base64(sig)
118 118 err, keys = mygpg.verify(data, sig)
119 119 if err:
120 120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 121 return None
122 122
123 123 validkeys = []
124 124 # warn for expired key and/or sigs
125 125 for key in keys:
126 126 if key[0] == "BADSIG":
127 127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 128 continue
129 129 if key[0] == "EXPSIG":
130 130 ui.write(_("%s Note: Signature has expired"
131 131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 132 elif key[0] == "EXPKEYSIG":
133 133 ui.write(_("%s Note: This key has expired"
134 134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 135 validkeys.append((key[1], key[2], key[3]))
136 136 return validkeys
137 137
138 138 def sigs(ui, repo):
139 139 """list signed changesets"""
140 140 mygpg = newgpg(ui)
141 141 revs = {}
142 142
143 143 for data, context in sigwalk(repo):
144 144 node, version, sig = data
145 145 fn, ln = context
146 146 try:
147 147 n = repo.lookup(node)
148 148 except KeyError:
149 149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 150 continue
151 151 r = repo.changelog.rev(n)
152 152 keys = getkeys(ui, repo, mygpg, data, context)
153 153 if not keys:
154 154 continue
155 155 revs.setdefault(r, [])
156 156 revs[r].extend(keys)
157 157 nodes = list(revs)
158 158 nodes.reverse()
159 159 for rev in nodes:
160 160 for k in revs[rev]:
161 161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163 163
164 164 def check(ui, repo, rev):
165 165 """verify all the signatures there may be for a particular revision"""
166 166 mygpg = newgpg(ui)
167 167 rev = repo.lookup(rev)
168 168 hexrev = hgnode.hex(rev)
169 169 keys = []
170 170
171 171 for data, context in sigwalk(repo):
172 172 node, version, sig = data
173 173 if node == hexrev:
174 174 k = getkeys(ui, repo, mygpg, data, context)
175 175 if k:
176 176 keys.extend(k)
177 177
178 178 if not keys:
179 179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 180 return
181 181
182 182 # print summary
183 183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 184 for key in keys:
185 185 ui.write(" %s\n" % keystr(ui, key))
186 186
187 187 def keystr(ui, key):
188 188 """associate a string to a key (username, comment)"""
189 189 keyid, user, fingerprint = key
190 190 comment = ui.config("gpg", fingerprint, None)
191 191 if comment:
192 192 return "%s (%s)" % (user, comment)
193 193 else:
194 194 return user
195 195
196 196 def sign(ui, repo, *revs, **opts):
197 197 """add a signature for the current tip or a given revision"""
198 198 mygpg = newgpg(ui, **opts)
199 199 sigver = "0"
200 200 sigmessage = ""
201 201 if revs:
202 202 nodes = [repo.lookup(n) for n in revs]
203 203 else:
204 204 nodes = [repo.changelog.tip()]
205 205
206 206 for n in nodes:
207 207 hexnode = hgnode.hex(n)
208 208 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
209 209 hgnode.short(n)))
210 210 # build data
211 211 data = node2txt(repo, n, sigver)
212 212 sig = mygpg.sign(data)
213 213 if not sig:
214 214 raise util.Abort(_("Error while signing"))
215 215 sig = binascii.b2a_base64(sig)
216 216 sig = sig.replace("\n", "")
217 217 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
218 218
219 219 # write it
220 220 if opts['local']:
221 221 repo.opener("localsigs", "ab").write(sigmessage)
222 222 return
223 223
224 for x in repo.changes():
224 for x in repo.status()[:5]:
225 225 if ".hgsigs" in x and not opts["force"]:
226 226 raise util.Abort(_("working copy of .hgsigs is changed "
227 227 "(please commit .hgsigs manually "
228 228 "or use --force)"))
229 229
230 230 repo.wfile(".hgsigs", "ab").write(sigmessage)
231 231
232 232 if repo.dirstate.state(".hgsigs") == '?':
233 233 repo.add([".hgsigs"])
234 234
235 235 if opts["no_commit"]:
236 236 return
237 237
238 238 message = opts['message']
239 239 if not message:
240 240 message = "\n".join([_("Added signature for changeset %s")
241 241 % hgnode.hex(n)
242 242 for n in nodes])
243 243 try:
244 244 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
245 245 except ValueError, inst:
246 246 raise util.Abort(str(inst))
247 247
248 248 def node2txt(repo, node, ver):
249 249 """map a manifest into some text"""
250 250 if ver == "0":
251 251 return "%s\n" % hgnode.hex(node)
252 252 else:
253 253 raise util.Abort(_("unknown signature version"))
254 254
255 255 cmdtable = {
256 256 "sign":
257 257 (sign,
258 258 [('l', 'local', None, _("make the signature local")),
259 259 ('f', 'force', None, _("sign even if the sigfile is modified")),
260 260 ('', 'no-commit', None, _("do not commit the sigfile after signing")),
261 261 ('m', 'message', "", _("commit message")),
262 262 ('d', 'date', "", _("date code")),
263 263 ('u', 'user', "", _("user")),
264 264 ('k', 'key', "", _("the key id to sign with"))],
265 265 _("hg sign [OPTION]... [REVISION]...")),
266 266 "sigcheck": (check, [], _('hg sigcheck REVISION')),
267 267 "sigs": (sigs, [], _('hg sigs')),
268 268 }
269 269
@@ -1,299 +1,299 b''
1 1 # bisect extension for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 4 # Inspired by git bisect, extension skeleton taken from mq.py.
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from mercurial.i18n import gettext as _
10 10 from mercurial.demandload import demandload
11 11 demandload(globals(), "os sys sets mercurial:hg,util,commands")
12 12
13 13 versionstr = "0.0.3"
14 14
15 15 def lookup_rev(ui, repo, rev=None):
16 16 """returns rev or the checked-out revision if rev is None"""
17 17 if not rev is None:
18 18 return repo.lookup(rev)
19 19 parents = [p for p in repo.dirstate.parents() if p != hg.nullid]
20 20 if len(parents) != 1:
21 21 raise util.Abort(_("unexpected number of parents, "
22 22 "please commit or revert"))
23 23 return parents.pop()
24 24
25 25 def check_clean(ui, repo):
26 modified, added, removed, deleted, unknown = repo.changes()
27 if modified or added or removed:
28 ui.warn("Repository is not clean, please commit or revert\n")
29 sys.exit(1)
26 modified, added, removed, deleted, unknown = repo.status()[:5]
27 if modified or added or removed:
28 ui.warn("Repository is not clean, please commit or revert\n")
29 sys.exit(1)
30 30
31 31 class bisect(object):
32 32 """dichotomic search in the DAG of changesets"""
33 33 def __init__(self, ui, repo):
34 34 self.repo = repo
35 35 self.path = repo.join("bisect")
36 36 self.opener = util.opener(self.path)
37 37 self.ui = ui
38 38 self.goodrevs = []
39 39 self.badrev = None
40 40 self.good_dirty = 0
41 41 self.bad_dirty = 0
42 42 self.good_path = "good"
43 43 self.bad_path = "bad"
44 44
45 45 if os.path.exists(os.path.join(self.path, self.good_path)):
46 46 self.goodrevs = self.opener(self.good_path).read().splitlines()
47 47 self.goodrevs = [hg.bin(x) for x in self.goodrevs]
48 48 if os.path.exists(os.path.join(self.path, self.bad_path)):
49 49 r = self.opener(self.bad_path).read().splitlines()
50 50 if r:
51 51 self.badrev = hg.bin(r.pop(0))
52 52
53 53 def write(self):
54 54 if not os.path.isdir(self.path):
55 55 return
56 56 f = self.opener(self.good_path, "w")
57 57 f.write("\n".join([hg.hex(r) for r in self.goodrevs]))
58 58 if len(self.goodrevs) > 0:
59 59 f.write("\n")
60 60 f = self.opener(self.bad_path, "w")
61 61 if self.badrev:
62 62 f.write(hg.hex(self.badrev) + "\n")
63 63
64 64 def init(self):
65 65 """start a new bisection"""
66 66 if os.path.isdir(self.path):
67 67 raise util.Abort(_("bisect directory already exists\n"))
68 68 os.mkdir(self.path)
69 69 check_clean(self.ui, self.repo)
70 70 return 0
71 71
72 72 def reset(self):
73 73 """finish a bisection"""
74 74 if os.path.isdir(self.path):
75 75 sl = [os.path.join(self.path, p)
76 76 for p in [self.bad_path, self.good_path]]
77 77 for s in sl:
78 78 if os.path.exists(s):
79 79 os.unlink(s)
80 80 os.rmdir(self.path)
81 81 # Not sure about this
82 82 #self.ui.write("Going back to tip\n")
83 83 #self.repo.update(self.repo.changelog.tip())
84 84 return 1
85 85
86 86 def num_ancestors(self, head=None, stop=None):
87 87 """
88 88 returns a dict with the mapping:
89 89 node -> number of ancestors (self included)
90 90 for all nodes who are ancestor of head and
91 91 not in stop.
92 92 """
93 93 if head is None:
94 94 head = self.badrev
95 95 return self.__ancestors_and_nb_ancestors(head, stop)[1]
96 96
97 97 def ancestors(self, head=None, stop=None):
98 98 """
99 99 returns the set of the ancestors of head (self included)
100 100 who are not in stop.
101 101 """
102 102 if head is None:
103 103 head = self.badrev
104 104 return self.__ancestors_and_nb_ancestors(head, stop)[0]
105 105
106 106 def __ancestors_and_nb_ancestors(self, head, stop=None):
107 107 """
108 108 if stop is None then ancestors of goodrevs are used as
109 109 lower limit.
110 110
111 111 returns (anc, n_child) where anc is the set of the ancestors of head
112 112 and n_child is a dictionary with the following mapping:
113 113 node -> number of ancestors (self included)
114 114 """
115 115 cl = self.repo.changelog
116 116 if not stop:
117 117 stop = sets.Set([])
118 118 for i in xrange(len(self.goodrevs)-1, -1, -1):
119 119 g = self.goodrevs[i]
120 120 if g in stop:
121 121 continue
122 122 stop.update(cl.reachable(g))
123 123 def num_children(a):
124 124 """
125 125 returns a dictionnary with the following mapping
126 126 node -> [number of children, empty set]
127 127 """
128 128 d = {a: [0, sets.Set([])]}
129 129 for i in xrange(cl.rev(a)+1):
130 130 n = cl.node(i)
131 131 if not d.has_key(n):
132 132 d[n] = [0, sets.Set([])]
133 133 parents = [p for p in cl.parents(n) if p != hg.nullid]
134 134 for p in parents:
135 135 d[p][0] += 1
136 136 return d
137 137
138 138 if head in stop:
139 139 raise util.Abort(_("Unconsistent state, %s:%s is good and bad")
140 140 % (cl.rev(head), hg.short(head)))
141 141 n_child = num_children(head)
142 142 for i in xrange(cl.rev(head)+1):
143 143 n = cl.node(i)
144 144 parents = [p for p in cl.parents(n) if p != hg.nullid]
145 145 for p in parents:
146 146 n_child[p][0] -= 1
147 147 if not n in stop:
148 148 n_child[n][1].union_update(n_child[p][1])
149 149 if n_child[p][0] == 0:
150 150 n_child[p] = len(n_child[p][1])
151 151 if not n in stop:
152 152 n_child[n][1].add(n)
153 153 if n_child[n][0] == 0:
154 154 if n == head:
155 155 anc = n_child[n][1]
156 156 n_child[n] = len(n_child[n][1])
157 157 return anc, n_child
158 158
159 159 def next(self):
160 160 if not self.badrev:
161 161 raise util.Abort(_("You should give at least one bad revision"))
162 162 if not self.goodrevs:
163 163 self.ui.warn(_("No good revision given\n"))
164 164 self.ui.warn(_("Marking the first revision as good\n"))
165 165 ancestors, num_ancestors = self.__ancestors_and_nb_ancestors(
166 166 self.badrev)
167 167 tot = len(ancestors)
168 168 if tot == 1:
169 169 if ancestors.pop() != self.badrev:
170 170 raise util.Abort(_("Could not find the first bad revision"))
171 171 self.ui.write(_("The first bad revision is:\n"))
172 172 displayer = commands.show_changeset(self.ui, self.repo, {})
173 173 displayer.show(changenode=self.badrev)
174 174 return None
175 175 best_rev = None
176 176 best_len = -1
177 177 for n in ancestors:
178 178 l = num_ancestors[n]
179 179 l = min(l, tot - l)
180 180 if l > best_len:
181 181 best_len = l
182 182 best_rev = n
183 183 assert best_rev is not None
184 184 nb_tests = 0
185 185 q, r = divmod(tot, 2)
186 186 while q:
187 187 nb_tests += 1
188 188 q, r = divmod(q, 2)
189 189 msg = _("Testing changeset %s:%s (%s changesets remaining, "
190 190 "~%s tests)\n") % (self.repo.changelog.rev(best_rev),
191 191 hg.short(best_rev), tot, nb_tests)
192 192 self.ui.write(msg)
193 193 return best_rev
194 194
195 195 def autonext(self):
196 196 """find and update to the next revision to test"""
197 197 check_clean(self.ui, self.repo)
198 198 rev = self.next()
199 199 if rev is not None:
200 200 return hg.clean(self.repo, rev)
201 201
202 202 def good(self, rev):
203 203 self.goodrevs.append(rev)
204 204
205 205 def autogood(self, rev=None):
206 206 """mark revision as good and update to the next revision to test"""
207 207 check_clean(self.ui, self.repo)
208 208 rev = lookup_rev(self.ui, self.repo, rev)
209 209 self.good(rev)
210 210 if self.badrev:
211 211 return self.autonext()
212 212
213 213 def bad(self, rev):
214 214 self.badrev = rev
215 215
216 216 def autobad(self, rev=None):
217 217 """mark revision as bad and update to the next revision to test"""
218 218 check_clean(self.ui, self.repo)
219 219 rev = lookup_rev(self.ui, self.repo, rev)
220 220 self.bad(rev)
221 221 if self.goodrevs:
222 222 self.autonext()
223 223
224 224 # should we put it in the class ?
225 225 def test(ui, repo, rev):
226 226 """test the bisection code"""
227 227 b = bisect(ui, repo)
228 228 rev = repo.lookup(rev)
229 229 ui.write("testing with rev %s\n" % hg.hex(rev))
230 230 anc = b.ancestors()
231 231 while len(anc) > 1:
232 232 if not rev in anc:
233 233 ui.warn("failure while bisecting\n")
234 234 sys.exit(1)
235 235 ui.write("it worked :)\n")
236 236 new_rev = b.next()
237 237 ui.write("choosing if good or bad\n")
238 238 if rev in b.ancestors(head=new_rev):
239 239 b.bad(new_rev)
240 240 ui.write("it is bad\n")
241 241 else:
242 242 b.good(new_rev)
243 243 ui.write("it is good\n")
244 244 anc = b.ancestors()
245 245 #repo.update(new_rev, force=True)
246 246 for v in anc:
247 247 if v != rev:
248 248 ui.warn("fail to found cset! :(\n")
249 249 return 1
250 250 ui.write("Found bad cset: %s\n" % hg.hex(b.badrev))
251 251 ui.write("Everything is ok :)\n")
252 252 return 0
253 253
254 254 def bisect_run(ui, repo, cmd=None, *args):
255 255 """bisect extension: dichotomic search in the DAG of changesets
256 256 for subcommands see "hg bisect help\"
257 257 """
258 258 def help_(cmd=None, *args):
259 259 """show help for a given bisect subcommand or all subcommands"""
260 260 cmdtable = bisectcmdtable
261 261 if cmd:
262 262 doc = cmdtable[cmd][0].__doc__
263 263 synopsis = cmdtable[cmd][2]
264 264 ui.write(synopsis + "\n")
265 265 ui.write("\n" + doc + "\n")
266 266 return
267 267 ui.write(_("list of subcommands for the bisect extension\n\n"))
268 268 cmds = cmdtable.keys()
269 269 cmds.sort()
270 270 m = max([len(c) for c in cmds])
271 271 for cmd in cmds:
272 272 doc = cmdtable[cmd][0].__doc__.splitlines(0)[0].rstrip()
273 273 ui.write(" %-*s %s\n" % (m, cmd, doc))
274 274
275 275 b = bisect(ui, repo)
276 276 bisectcmdtable = {
277 277 "init": (b.init, 0, _("hg bisect init")),
278 278 "bad": (b.autobad, 1, _("hg bisect bad [<rev>]")),
279 279 "good": (b.autogood, 1, _("hg bisect good [<rev>]")),
280 280 "next": (b.autonext, 0, _("hg bisect next")),
281 281 "reset": (b.reset, 0, _("hg bisect reset")),
282 282 "help": (help_, 1, _("hg bisect help [<subcommand>]")),
283 283 }
284 284
285 285 if not bisectcmdtable.has_key(cmd):
286 286 ui.warn(_("bisect: Unknown sub-command\n"))
287 287 return help_()
288 288 if len(args) > bisectcmdtable[cmd][1]:
289 289 ui.warn(_("bisect: Too many arguments\n"))
290 290 return help_()
291 291 try:
292 292 return bisectcmdtable[cmd][0](*args)
293 293 finally:
294 294 b.write()
295 295
296 296 cmdtable = {
297 297 "bisect": (bisect_run, [], _("hg bisect [help|init|reset|next|good|bad]")),
298 298 #"bisect-test": (test, [], "hg bisect-test rev"),
299 299 }
@@ -1,336 +1,336 b''
1 1 # Minimal support for git commands on an hg repository
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import time, sys, signal, os
9 9 from mercurial import hg, mdiff, fancyopts, commands, ui, util
10 10
11 11 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
12 12 changes=None, text=False):
13 13 def date(c):
14 14 return time.asctime(time.gmtime(c[2][0]))
15 15
16 16 if not changes:
17 changes = repo.changes(node1, node2, files, match=match)
17 changes = repo.status(node1, node2, files, match=match)[:5]
18 18 modified, added, removed, deleted, unknown = changes
19 19 if files:
20 20 modified, added, removed = map(lambda x: filterfiles(files, x),
21 21 (modified, added, removed))
22 22
23 23 if not modified and not added and not removed:
24 24 return
25 25
26 26 if node2:
27 27 change = repo.changelog.read(node2)
28 28 mmap2 = repo.manifest.read(change[0])
29 29 date2 = date(change)
30 30 def read(f):
31 31 return repo.file(f).read(mmap2[f])
32 32 else:
33 33 date2 = time.asctime()
34 34 if not node1:
35 35 node1 = repo.dirstate.parents()[0]
36 36 def read(f):
37 37 return repo.wfile(f).read()
38 38
39 39 change = repo.changelog.read(node1)
40 40 mmap = repo.manifest.read(change[0])
41 41 date1 = date(change)
42 42
43 43 for f in modified:
44 44 to = None
45 45 if f in mmap:
46 46 to = repo.file(f).read(mmap[f])
47 47 tn = read(f)
48 48 fp.write("diff --git a/%s b/%s\n" % (f, f))
49 49 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
50 50 for f in added:
51 51 to = None
52 52 tn = read(f)
53 53 fp.write("diff --git /dev/null b/%s\n" % (f))
54 54 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
55 55 for f in removed:
56 56 to = repo.file(f).read(mmap[f])
57 57 tn = None
58 58 fp.write("diff --git a/%s /dev/null\n" % (f))
59 59 fp.write(mdiff.unidiff(to, date1, tn, date2, f, None, text=text))
60 60
61 61 def difftree(ui, repo, node1=None, node2=None, **opts):
62 62 """diff trees from two commits"""
63 63 def __difftree(repo, node1, node2):
64 64 def date(c):
65 65 return time.asctime(time.gmtime(c[2][0]))
66 66
67 67 if node2:
68 68 change = repo.changelog.read(node2)
69 69 mmap2 = repo.manifest.read(change[0])
70 modified, added, removed, deleted, unknown = repo.changes(node1, node2)
70 modified, added, removed, deleted, unknown = repo.status(node1, node2)[:5]
71 71 def read(f): return repo.file(f).read(mmap2[f])
72 72 date2 = date(change)
73 73 else:
74 74 date2 = time.asctime()
75 modified, added, removed, deleted, unknown = repo.changes(node1)
75 modified, added, removed, deleted, unknown = repo.status(node1)[:5]
76 76 if not node1:
77 77 node1 = repo.dirstate.parents()[0]
78 78 def read(f): return file(os.path.join(repo.root, f)).read()
79 79
80 80 change = repo.changelog.read(node1)
81 81 mmap = repo.manifest.read(change[0])
82 82 date1 = date(change)
83 83 empty = "0" * 40;
84 84
85 85 for f in modified:
86 86 # TODO get file permissions
87 87 print ":100664 100664 %s %s M\t%s\t%s" % (hg.hex(mmap[f]),
88 88 hg.hex(mmap2[f]), f, f)
89 89 for f in added:
90 90 print ":000000 100664 %s %s N\t%s\t%s" % (empty, hg.hex(mmap2[f]), f, f)
91 91 for f in removed:
92 92 print ":100664 000000 %s %s D\t%s\t%s" % (hg.hex(mmap[f]), empty, f, f)
93 93 ##
94 94
95 95 while True:
96 96 if opts['stdin']:
97 97 try:
98 98 line = raw_input().split(' ')
99 99 node1 = line[0]
100 100 if len(line) > 1:
101 101 node2 = line[1]
102 102 else:
103 103 node2 = None
104 104 except EOFError:
105 105 break
106 106 node1 = repo.lookup(node1)
107 107 if node2:
108 108 node2 = repo.lookup(node2)
109 109 else:
110 110 node2 = node1
111 111 node1 = repo.changelog.parents(node1)[0]
112 112 if opts['patch']:
113 113 if opts['pretty']:
114 114 catcommit(repo, node2, "")
115 115 dodiff(sys.stdout, ui, repo, node1, node2)
116 116 else:
117 117 __difftree(repo, node1, node2)
118 118 if not opts['stdin']:
119 119 break
120 120
121 121 def catcommit(repo, n, prefix, changes=None):
122 122 nlprefix = '\n' + prefix;
123 123 (p1, p2) = repo.changelog.parents(n)
124 124 (h, h1, h2) = map(hg.hex, (n, p1, p2))
125 125 (i1, i2) = map(repo.changelog.rev, (p1, p2))
126 126 if not changes:
127 127 changes = repo.changelog.read(n)
128 128 print "tree %s" % (hg.hex(changes[0]))
129 129 if i1 != -1: print "parent %s" % (h1)
130 130 if i2 != -1: print "parent %s" % (h2)
131 131 date_ar = changes[2]
132 132 date = int(float(date_ar[0]))
133 133 lines = changes[4].splitlines()
134 134 if lines and lines[-1].startswith('committer:'):
135 135 committer = lines[-1].split(': ')[1].rstrip()
136 136 else:
137 137 committer = changes[1]
138 138
139 139 print "author %s %s %s" % (changes[1], date, date_ar[1])
140 140 print "committer %s %s %s" % (committer, date, date_ar[1])
141 141 print ""
142 142 if prefix != "":
143 143 print "%s%s" % (prefix, changes[4].replace('\n', nlprefix).strip())
144 144 else:
145 145 print changes[4]
146 146 if prefix:
147 147 sys.stdout.write('\0')
148 148
149 149 def base(ui, repo, node1, node2):
150 150 """Output common ancestor information"""
151 151 node1 = repo.lookup(node1)
152 152 node2 = repo.lookup(node2)
153 153 n = repo.changelog.ancestor(node1, node2)
154 154 print hg.hex(n)
155 155
156 156 def catfile(ui, repo, type=None, r=None, **opts):
157 157 """cat a specific revision"""
158 158 # in stdin mode, every line except the commit is prefixed with two
159 159 # spaces. This way the our caller can find the commit without magic
160 160 # strings
161 161 #
162 162 prefix = ""
163 163 if opts['stdin']:
164 164 try:
165 165 (type, r) = raw_input().split(' ');
166 166 prefix = " "
167 167 except EOFError:
168 168 return
169 169
170 170 else:
171 171 if not type or not r:
172 172 ui.warn("cat-file: type or revision not supplied\n")
173 173 commands.help_(ui, 'cat-file')
174 174
175 175 while r:
176 176 if type != "commit":
177 177 sys.stderr.write("aborting hg cat-file only understands commits\n")
178 178 sys.exit(1);
179 179 n = repo.lookup(r)
180 180 catcommit(repo, n, prefix)
181 181 if opts['stdin']:
182 182 try:
183 183 (type, r) = raw_input().split(' ');
184 184 except EOFError:
185 185 break
186 186 else:
187 187 break
188 188
189 189 # git rev-tree is a confusing thing. You can supply a number of
190 190 # commit sha1s on the command line, and it walks the commit history
191 191 # telling you which commits are reachable from the supplied ones via
192 192 # a bitmask based on arg position.
193 193 # you can specify a commit to stop at by starting the sha1 with ^
194 194 def revtree(args, repo, full="tree", maxnr=0, parents=False):
195 195 def chlogwalk():
196 196 ch = repo.changelog
197 197 count = ch.count()
198 198 i = count
199 199 l = [0] * 100
200 200 chunk = 100
201 201 while True:
202 202 if chunk > i:
203 203 chunk = i
204 204 i = 0
205 205 else:
206 206 i -= chunk
207 207
208 208 for x in xrange(0, chunk):
209 209 if i + x >= count:
210 210 l[chunk - x:] = [0] * (chunk - x)
211 211 break
212 212 if full != None:
213 213 l[x] = ch.read(ch.node(i + x))
214 214 else:
215 215 l[x] = 1
216 216 for x in xrange(chunk-1, -1, -1):
217 217 if l[x] != 0:
218 218 yield (i + x, full != None and l[x] or None)
219 219 if i == 0:
220 220 break
221 221
222 222 # calculate and return the reachability bitmask for sha
223 223 def is_reachable(ar, reachable, sha):
224 224 if len(ar) == 0:
225 225 return 1
226 226 mask = 0
227 227 for i in range(len(ar)):
228 228 if sha in reachable[i]:
229 229 mask |= 1 << i
230 230
231 231 return mask
232 232
233 233 reachable = []
234 234 stop_sha1 = []
235 235 want_sha1 = []
236 236 count = 0
237 237
238 238 # figure out which commits they are asking for and which ones they
239 239 # want us to stop on
240 240 for i in range(len(args)):
241 241 if args[i].startswith('^'):
242 242 s = repo.lookup(args[i][1:])
243 243 stop_sha1.append(s)
244 244 want_sha1.append(s)
245 245 elif args[i] != 'HEAD':
246 246 want_sha1.append(repo.lookup(args[i]))
247 247
248 248 # calculate the graph for the supplied commits
249 249 for i in range(len(want_sha1)):
250 250 reachable.append({});
251 251 n = want_sha1[i];
252 252 visit = [n];
253 253 reachable[i][n] = 1
254 254 while visit:
255 255 n = visit.pop(0)
256 256 if n in stop_sha1:
257 257 continue
258 258 for p in repo.changelog.parents(n):
259 259 if p not in reachable[i]:
260 260 reachable[i][p] = 1
261 261 visit.append(p)
262 262 if p in stop_sha1:
263 263 continue
264 264
265 265 # walk the repository looking for commits that are in our
266 266 # reachability graph
267 267 #for i in range(repo.changelog.count()-1, -1, -1):
268 268 for i, changes in chlogwalk():
269 269 n = repo.changelog.node(i)
270 270 mask = is_reachable(want_sha1, reachable, n)
271 271 if mask:
272 272 parentstr = ""
273 273 if parents:
274 274 pp = repo.changelog.parents(n)
275 275 if pp[0] != hg.nullid:
276 276 parentstr += " " + hg.hex(pp[0])
277 277 if pp[1] != hg.nullid:
278 278 parentstr += " " + hg.hex(pp[1])
279 279 if not full:
280 280 print hg.hex(n) + parentstr
281 281 elif full is "commit":
282 282 print hg.hex(n) + parentstr
283 283 catcommit(repo, n, ' ', changes)
284 284 else:
285 285 (p1, p2) = repo.changelog.parents(n)
286 286 (h, h1, h2) = map(hg.hex, (n, p1, p2))
287 287 (i1, i2) = map(repo.changelog.rev, (p1, p2))
288 288
289 289 date = changes[2][0]
290 290 print "%s %s:%s" % (date, h, mask),
291 291 mask = is_reachable(want_sha1, reachable, p1)
292 292 if i1 != -1 and mask > 0:
293 293 print "%s:%s " % (h1, mask),
294 294 mask = is_reachable(want_sha1, reachable, p2)
295 295 if i2 != -1 and mask > 0:
296 296 print "%s:%s " % (h2, mask),
297 297 print ""
298 298 if maxnr and count >= maxnr:
299 299 break
300 300 count += 1
301 301
302 302 # git rev-list tries to order things by date, and has the ability to stop
303 303 # at a given commit without walking the whole repo. TODO add the stop
304 304 # parameter
305 305 def revlist(ui, repo, *revs, **opts):
306 306 """print revisions"""
307 307 if opts['header']:
308 308 full = "commit"
309 309 else:
310 310 full = None
311 311 copy = [x for x in revs]
312 312 revtree(copy, repo, full, opts['max_count'], opts['parents'])
313 313
314 314 def view(ui, repo, *etc):
315 315 "start interactive history viewer"
316 316 os.chdir(repo.root)
317 317 os.system(ui.config("hgk", "path", "hgk") + " " + " ".join(etc))
318 318
319 319 cmdtable = {
320 320 "view": (view, [], 'hg view'),
321 321 "debug-diff-tree": (difftree, [('p', 'patch', None, 'generate patch'),
322 322 ('r', 'recursive', None, 'recursive'),
323 323 ('P', 'pretty', None, 'pretty'),
324 324 ('s', 'stdin', None, 'stdin'),
325 325 ('C', 'copy', None, 'detect copies'),
326 326 ('S', 'search', "", 'search')],
327 327 "hg git-diff-tree [options] node1 node2"),
328 328 "debug-cat-file": (catfile, [('s', 'stdin', None, 'stdin')],
329 329 "hg debug-cat-file [options] type file"),
330 330 "debug-merge-base": (base, [], "hg debug-merge-base node node"),
331 331 "debug-rev-list": (revlist, [('H', 'header', None, 'header'),
332 332 ('t', 'topo-order', None, 'topo-order'),
333 333 ('p', 'parents', None, 'parents'),
334 334 ('n', 'max-count', 0, 'max-count')],
335 335 "hg debug-rev-list [options] revs"),
336 336 }
@@ -1,1999 +1,1998 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.demandload import *
33 33 from mercurial.i18n import gettext as _
34 34 demandload(globals(), "os sys re struct traceback errno bz2")
35 35 demandload(globals(), "mercurial:commands,hg,patch,revlog,ui,util")
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 class statusentry:
40 40 def __init__(self, rev, name=None):
41 41 if not name:
42 42 fields = rev.split(':')
43 43 if len(fields) == 2:
44 44 self.rev, self.name = fields
45 45 else:
46 46 self.rev, self.name = None, None
47 47 else:
48 48 self.rev, self.name = rev, name
49 49
50 50 def __str__(self):
51 51 return self.rev + ':' + self.name
52 52
53 53 class queue:
54 54 def __init__(self, ui, path, patchdir=None):
55 55 self.basepath = path
56 56 self.path = patchdir or os.path.join(path, "patches")
57 57 self.opener = util.opener(self.path)
58 58 self.ui = ui
59 59 self.applied = []
60 60 self.full_series = []
61 61 self.applied_dirty = 0
62 62 self.series_dirty = 0
63 63 self.series_path = "series"
64 64 self.status_path = "status"
65 65 self.guards_path = "guards"
66 66 self.active_guards = None
67 67 self.guards_dirty = False
68 68 self._diffopts = None
69 69
70 70 if os.path.exists(self.join(self.series_path)):
71 71 self.full_series = self.opener(self.series_path).read().splitlines()
72 72 self.parse_series()
73 73
74 74 if os.path.exists(self.join(self.status_path)):
75 75 lines = self.opener(self.status_path).read().splitlines()
76 76 self.applied = [statusentry(l) for l in lines]
77 77
78 78 def diffopts(self):
79 79 if self._diffopts is None:
80 80 self._diffopts = self.ui.diffopts()
81 81 return self._diffopts
82 82
83 83 def join(self, *p):
84 84 return os.path.join(self.path, *p)
85 85
86 86 def find_series(self, patch):
87 87 pre = re.compile("(\s*)([^#]+)")
88 88 index = 0
89 89 for l in self.full_series:
90 90 m = pre.match(l)
91 91 if m:
92 92 s = m.group(2)
93 93 s = s.rstrip()
94 94 if s == patch:
95 95 return index
96 96 index += 1
97 97 return None
98 98
99 99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100 100
101 101 def parse_series(self):
102 102 self.series = []
103 103 self.series_guards = []
104 104 for l in self.full_series:
105 105 h = l.find('#')
106 106 if h == -1:
107 107 patch = l
108 108 comment = ''
109 109 elif h == 0:
110 110 continue
111 111 else:
112 112 patch = l[:h]
113 113 comment = l[h:]
114 114 patch = patch.strip()
115 115 if patch:
116 116 self.series.append(patch)
117 117 self.series_guards.append(self.guard_re.findall(comment))
118 118
119 119 def check_guard(self, guard):
120 120 bad_chars = '# \t\r\n\f'
121 121 first = guard[0]
122 122 for c in '-+':
123 123 if first == c:
124 124 return (_('guard %r starts with invalid character: %r') %
125 125 (guard, c))
126 126 for c in bad_chars:
127 127 if c in guard:
128 128 return _('invalid character in guard %r: %r') % (guard, c)
129 129
130 130 def set_active(self, guards):
131 131 for guard in guards:
132 132 bad = self.check_guard(guard)
133 133 if bad:
134 134 raise util.Abort(bad)
135 135 guards = dict.fromkeys(guards).keys()
136 136 guards.sort()
137 137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 138 self.active_guards = guards
139 139 self.guards_dirty = True
140 140
141 141 def active(self):
142 142 if self.active_guards is None:
143 143 self.active_guards = []
144 144 try:
145 145 guards = self.opener(self.guards_path).read().split()
146 146 except IOError, err:
147 147 if err.errno != errno.ENOENT: raise
148 148 guards = []
149 149 for i, guard in enumerate(guards):
150 150 bad = self.check_guard(guard)
151 151 if bad:
152 152 self.ui.warn('%s:%d: %s\n' %
153 153 (self.join(self.guards_path), i + 1, bad))
154 154 else:
155 155 self.active_guards.append(guard)
156 156 return self.active_guards
157 157
158 158 def set_guards(self, idx, guards):
159 159 for g in guards:
160 160 if len(g) < 2:
161 161 raise util.Abort(_('guard %r too short') % g)
162 162 if g[0] not in '-+':
163 163 raise util.Abort(_('guard %r starts with invalid char') % g)
164 164 bad = self.check_guard(g[1:])
165 165 if bad:
166 166 raise util.Abort(bad)
167 167 drop = self.guard_re.sub('', self.full_series[idx])
168 168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 169 self.parse_series()
170 170 self.series_dirty = True
171 171
172 172 def pushable(self, idx):
173 173 if isinstance(idx, str):
174 174 idx = self.series.index(idx)
175 175 patchguards = self.series_guards[idx]
176 176 if not patchguards:
177 177 return True, None
178 178 default = False
179 179 guards = self.active()
180 180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 181 if exactneg:
182 182 return False, exactneg[0]
183 183 pos = [g for g in patchguards if g[0] == '+']
184 184 exactpos = [g for g in pos if g[1:] in guards]
185 185 if pos:
186 186 if exactpos:
187 187 return True, exactpos[0]
188 188 return False, pos
189 189 return True, ''
190 190
191 191 def explain_pushable(self, idx, all_patches=False):
192 192 write = all_patches and self.ui.write or self.ui.warn
193 193 if all_patches or self.ui.verbose:
194 194 if isinstance(idx, str):
195 195 idx = self.series.index(idx)
196 196 pushable, why = self.pushable(idx)
197 197 if all_patches and pushable:
198 198 if why is None:
199 199 write(_('allowing %s - no guards in effect\n') %
200 200 self.series[idx])
201 201 else:
202 202 if not why:
203 203 write(_('allowing %s - no matching negative guards\n') %
204 204 self.series[idx])
205 205 else:
206 206 write(_('allowing %s - guarded by %r\n') %
207 207 (self.series[idx], why))
208 208 if not pushable:
209 209 if why:
210 210 write(_('skipping %s - guarded by %r\n') %
211 211 (self.series[idx], ' '.join(why)))
212 212 else:
213 213 write(_('skipping %s - no matching guards\n') %
214 214 self.series[idx])
215 215
216 216 def save_dirty(self):
217 217 def write_list(items, path):
218 218 fp = self.opener(path, 'w')
219 219 for i in items:
220 220 print >> fp, i
221 221 fp.close()
222 222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 223 if self.series_dirty: write_list(self.full_series, self.series_path)
224 224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225 225
226 226 def readheaders(self, patch):
227 227 def eatdiff(lines):
228 228 while lines:
229 229 l = lines[-1]
230 230 if (l.startswith("diff -") or
231 231 l.startswith("Index:") or
232 232 l.startswith("===========")):
233 233 del lines[-1]
234 234 else:
235 235 break
236 236 def eatempty(lines):
237 237 while lines:
238 238 l = lines[-1]
239 239 if re.match('\s*$', l):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243
244 244 pf = self.join(patch)
245 245 message = []
246 246 comments = []
247 247 user = None
248 248 date = None
249 249 format = None
250 250 subject = None
251 251 diffstart = 0
252 252
253 253 for line in file(pf):
254 254 line = line.rstrip()
255 255 if diffstart:
256 256 if line.startswith('+++ '):
257 257 diffstart = 2
258 258 break
259 259 if line.startswith("--- "):
260 260 diffstart = 1
261 261 continue
262 262 elif format == "hgpatch":
263 263 # parse values when importing the result of an hg export
264 264 if line.startswith("# User "):
265 265 user = line[7:]
266 266 elif line.startswith("# Date "):
267 267 date = line[7:]
268 268 elif not line.startswith("# ") and line:
269 269 message.append(line)
270 270 format = None
271 271 elif line == '# HG changeset patch':
272 272 format = "hgpatch"
273 273 elif (format != "tagdone" and (line.startswith("Subject: ") or
274 274 line.startswith("subject: "))):
275 275 subject = line[9:]
276 276 format = "tag"
277 277 elif (format != "tagdone" and (line.startswith("From: ") or
278 278 line.startswith("from: "))):
279 279 user = line[6:]
280 280 format = "tag"
281 281 elif format == "tag" and line == "":
282 282 # when looking for tags (subject: from: etc) they
283 283 # end once you find a blank line in the source
284 284 format = "tagdone"
285 285 elif message or line:
286 286 message.append(line)
287 287 comments.append(line)
288 288
289 289 eatdiff(message)
290 290 eatdiff(comments)
291 291 eatempty(message)
292 292 eatempty(comments)
293 293
294 294 # make sure message isn't empty
295 295 if format and format.startswith("tag") and subject:
296 296 message.insert(0, "")
297 297 message.insert(0, subject)
298 298 return (message, comments, user, date, diffstart > 1)
299 299
300 300 def printdiff(self, repo, node1, node2=None, files=None,
301 301 fp=None, changes=None, opts=None):
302 302 patch.diff(repo, node1, node2, files,
303 303 fp=fp, changes=changes, opts=self.diffopts())
304 304
305 305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
306 306 # first try just applying the patch
307 307 (err, n) = self.apply(repo, [ patch ], update_status=False,
308 308 strict=True, merge=rev, wlock=wlock)
309 309
310 310 if err == 0:
311 311 return (err, n)
312 312
313 313 if n is None:
314 314 raise util.Abort(_("apply failed for patch %s") % patch)
315 315
316 316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
317 317
318 318 # apply failed, strip away that rev and merge.
319 319 hg.clean(repo, head, wlock=wlock)
320 320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
321 321
322 322 c = repo.changelog.read(rev)
323 323 ret = hg.merge(repo, rev, wlock=wlock)
324 324 if ret:
325 325 raise util.Abort(_("update returned %d") % ret)
326 326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
327 327 if n == None:
328 328 raise util.Abort(_("repo commit failed"))
329 329 try:
330 330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
331 331 except:
332 332 raise util.Abort(_("unable to read %s") % patch)
333 333
334 334 patchf = self.opener(patch, "w")
335 335 if comments:
336 336 comments = "\n".join(comments) + '\n\n'
337 337 patchf.write(comments)
338 338 self.printdiff(repo, head, n, fp=patchf)
339 339 patchf.close()
340 340 return (0, n)
341 341
342 342 def qparents(self, repo, rev=None):
343 343 if rev is None:
344 344 (p1, p2) = repo.dirstate.parents()
345 345 if p2 == revlog.nullid:
346 346 return p1
347 347 if len(self.applied) == 0:
348 348 return None
349 349 return revlog.bin(self.applied[-1].rev)
350 350 pp = repo.changelog.parents(rev)
351 351 if pp[1] != revlog.nullid:
352 352 arevs = [ x.rev for x in self.applied ]
353 353 p0 = revlog.hex(pp[0])
354 354 p1 = revlog.hex(pp[1])
355 355 if p0 in arevs:
356 356 return pp[0]
357 357 if p1 in arevs:
358 358 return pp[1]
359 359 return pp[0]
360 360
361 361 def mergepatch(self, repo, mergeq, series, wlock):
362 362 if len(self.applied) == 0:
363 363 # each of the patches merged in will have two parents. This
364 364 # can confuse the qrefresh, qdiff, and strip code because it
365 365 # needs to know which parent is actually in the patch queue.
366 366 # so, we insert a merge marker with only one parent. This way
367 367 # the first patch in the queue is never a merge patch
368 368 #
369 369 pname = ".hg.patches.merge.marker"
370 370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
371 371 wlock=wlock)
372 372 self.applied.append(statusentry(revlog.hex(n), pname))
373 373 self.applied_dirty = 1
374 374
375 375 head = self.qparents(repo)
376 376
377 377 for patch in series:
378 378 patch = mergeq.lookup(patch, strict=True)
379 379 if not patch:
380 380 self.ui.warn("patch %s does not exist\n" % patch)
381 381 return (1, None)
382 382 pushable, reason = self.pushable(patch)
383 383 if not pushable:
384 384 self.explain_pushable(patch, all_patches=True)
385 385 continue
386 386 info = mergeq.isapplied(patch)
387 387 if not info:
388 388 self.ui.warn("patch %s is not applied\n" % patch)
389 389 return (1, None)
390 390 rev = revlog.bin(info[1])
391 391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
392 392 if head:
393 393 self.applied.append(statusentry(revlog.hex(head), patch))
394 394 self.applied_dirty = 1
395 395 if err:
396 396 return (err, head)
397 397 return (0, head)
398 398
399 399 def patch(self, repo, patchfile):
400 400 '''Apply patchfile to the working directory.
401 401 patchfile: file name of patch'''
402 402 try:
403 403 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
404 404 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
405 405 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
406 406 except:
407 407 self.ui.warn("patch failed, unable to continue (try -v)\n")
408 408 return (None, [], False)
409 409 files = []
410 410 fuzz = False
411 411 for l in f:
412 412 l = l.rstrip('\r\n');
413 413 if self.ui.verbose:
414 414 self.ui.warn(l + "\n")
415 415 if l[:14] == 'patching file ':
416 416 pf = os.path.normpath(util.parse_patch_output(l))
417 417 if pf not in files:
418 418 files.append(pf)
419 419 printed_file = False
420 420 file_str = l
421 421 elif l.find('with fuzz') >= 0:
422 422 if not printed_file:
423 423 self.ui.warn(file_str + '\n')
424 424 printed_file = True
425 425 self.ui.warn(l + '\n')
426 426 fuzz = True
427 427 elif l.find('saving rejects to file') >= 0:
428 428 self.ui.warn(l + '\n')
429 429 elif l.find('FAILED') >= 0:
430 430 if not printed_file:
431 431 self.ui.warn(file_str + '\n')
432 432 printed_file = True
433 433 self.ui.warn(l + '\n')
434 434
435 435 return (not f.close(), files, fuzz)
436 436
437 437 def apply(self, repo, series, list=False, update_status=True,
438 438 strict=False, patchdir=None, merge=None, wlock=None):
439 439 # TODO unify with commands.py
440 440 if not patchdir:
441 441 patchdir = self.path
442 442 err = 0
443 443 if not wlock:
444 444 wlock = repo.wlock()
445 445 lock = repo.lock()
446 446 tr = repo.transaction()
447 447 n = None
448 448 for patch in series:
449 449 pushable, reason = self.pushable(patch)
450 450 if not pushable:
451 451 self.explain_pushable(patch, all_patches=True)
452 452 continue
453 453 self.ui.warn("applying %s\n" % patch)
454 454 pf = os.path.join(patchdir, patch)
455 455
456 456 try:
457 457 message, comments, user, date, patchfound = self.readheaders(patch)
458 458 except:
459 459 self.ui.warn("Unable to read %s\n" % pf)
460 460 err = 1
461 461 break
462 462
463 463 if not message:
464 464 message = "imported patch %s\n" % patch
465 465 else:
466 466 if list:
467 467 message.append("\nimported patch %s" % patch)
468 468 message = '\n'.join(message)
469 469
470 470 (patcherr, files, fuzz) = self.patch(repo, pf)
471 471 patcherr = not patcherr
472 472
473 473 if merge and len(files) > 0:
474 474 # Mark as merged and update dirstate parent info
475 475 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
476 476 p1, p2 = repo.dirstate.parents()
477 477 repo.dirstate.setparents(p1, merge)
478 478 if len(files) > 0:
479 479 cwd = repo.getcwd()
480 480 cfiles = files
481 481 if cwd:
482 482 cfiles = [util.pathto(cwd, f) for f in files]
483 483 commands.addremove_lock(self.ui, repo, cfiles,
484 484 opts={}, wlock=wlock)
485 485 n = repo.commit(files, message, user, date, force=1, lock=lock,
486 486 wlock=wlock)
487 487
488 488 if n == None:
489 489 raise util.Abort(_("repo commit failed"))
490 490
491 491 if update_status:
492 492 self.applied.append(statusentry(revlog.hex(n), patch))
493 493
494 494 if patcherr:
495 495 if not patchfound:
496 496 self.ui.warn("patch %s is empty\n" % patch)
497 497 err = 0
498 498 else:
499 499 self.ui.warn("patch failed, rejects left in working dir\n")
500 500 err = 1
501 501 break
502 502
503 503 if fuzz and strict:
504 504 self.ui.warn("fuzz found when applying patch, stopping\n")
505 505 err = 1
506 506 break
507 507 tr.close()
508 508 return (err, n)
509 509
510 510 def delete(self, repo, patch, force=False):
511 511 patch = self.lookup(patch, strict=True)
512 512 info = self.isapplied(patch)
513 513 if info:
514 514 raise util.Abort(_("cannot delete applied patch %s") % patch)
515 515 if patch not in self.series:
516 516 raise util.Abort(_("patch %s not in series file") % patch)
517 517 if force:
518 518 r = self.qrepo()
519 519 if r:
520 520 r.remove([patch], True)
521 521 else:
522 522 os.unlink(self.join(patch))
523 523 i = self.find_series(patch)
524 524 del self.full_series[i]
525 525 self.parse_series()
526 526 self.series_dirty = 1
527 527
528 528 def check_toppatch(self, repo):
529 529 if len(self.applied) > 0:
530 530 top = revlog.bin(self.applied[-1].rev)
531 531 pp = repo.dirstate.parents()
532 532 if top not in pp:
533 533 raise util.Abort(_("queue top not at same revision as working directory"))
534 534 return top
535 535 return None
536 def check_localchanges(self, repo):
537 (c, a, r, d, u) = repo.changes(None, None)
538 if c or a or d or r:
539 raise util.Abort(_("local changes found, refresh first"))
536 def check_localchanges(self, repo, force=False, refresh=True):
537 m, a, r, d = repo.status()[:4]
538 if m or a or r or d:
539 if not force:
540 if refresh:
541 raise util.Abort(_("local changes found, refresh first"))
542 else:
543 raise util.Abort(_("local changes found"))
544 return m, a, r, d
540 545 def new(self, repo, patch, msg=None, force=None):
541 546 if os.path.exists(self.join(patch)):
542 547 raise util.Abort(_('patch "%s" already exists') % patch)
543 commitfiles = []
544 (c, a, r, d, u) = repo.changes(None, None)
545 if c or a or d or r:
546 if not force:
547 raise util.Abort(_("local changes found, refresh first"))
548 commitfiles = c + a + r
548 m, a, r, d = self.check_localchanges(repo, force)
549 commitfiles = m + a + r
549 550 self.check_toppatch(repo)
550 551 wlock = repo.wlock()
551 552 insert = self.full_series_end()
552 553 if msg:
553 554 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
554 555 wlock=wlock)
555 556 else:
556 557 n = repo.commit(commitfiles,
557 558 "New patch: %s" % patch, force=True, wlock=wlock)
558 559 if n == None:
559 560 raise util.Abort(_("repo commit failed"))
560 561 self.full_series[insert:insert] = [patch]
561 562 self.applied.append(statusentry(revlog.hex(n), patch))
562 563 self.parse_series()
563 564 self.series_dirty = 1
564 565 self.applied_dirty = 1
565 566 p = self.opener(patch, "w")
566 567 if msg:
567 568 msg = msg + "\n"
568 569 p.write(msg)
569 570 p.close()
570 571 wlock = None
571 572 r = self.qrepo()
572 573 if r: r.add([patch])
573 574 if commitfiles:
574 575 self.refresh(repo, short=True)
575 576
576 577 def strip(self, repo, rev, update=True, backup="all", wlock=None):
577 578 def limitheads(chlog, stop):
578 579 """return the list of all nodes that have no children"""
579 580 p = {}
580 581 h = []
581 582 stoprev = 0
582 583 if stop in chlog.nodemap:
583 584 stoprev = chlog.rev(stop)
584 585
585 586 for r in range(chlog.count() - 1, -1, -1):
586 587 n = chlog.node(r)
587 588 if n not in p:
588 589 h.append(n)
589 590 if n == stop:
590 591 break
591 592 if r < stoprev:
592 593 break
593 594 for pn in chlog.parents(n):
594 595 p[pn] = 1
595 596 return h
596 597
597 598 def bundle(cg):
598 599 backupdir = repo.join("strip-backup")
599 600 if not os.path.isdir(backupdir):
600 601 os.mkdir(backupdir)
601 602 name = os.path.join(backupdir, "%s" % revlog.short(rev))
602 603 name = savename(name)
603 604 self.ui.warn("saving bundle to %s\n" % name)
604 605 # TODO, exclusive open
605 606 f = open(name, "wb")
606 607 try:
607 608 f.write("HG10")
608 609 z = bz2.BZ2Compressor(9)
609 610 while 1:
610 611 chunk = cg.read(4096)
611 612 if not chunk:
612 613 break
613 614 f.write(z.compress(chunk))
614 615 f.write(z.flush())
615 616 except:
616 617 os.unlink(name)
617 618 raise
618 619 f.close()
619 620 return name
620 621
621 622 def stripall(rev, revnum):
622 623 cl = repo.changelog
623 624 c = cl.read(rev)
624 625 mm = repo.manifest.read(c[0])
625 626 seen = {}
626 627
627 628 for x in xrange(revnum, cl.count()):
628 629 c = cl.read(cl.node(x))
629 630 for f in c[3]:
630 631 if f in seen:
631 632 continue
632 633 seen[f] = 1
633 634 if f in mm:
634 635 filerev = mm[f]
635 636 else:
636 637 filerev = 0
637 638 seen[f] = filerev
638 639 # we go in two steps here so the strip loop happens in a
639 640 # sensible order. When stripping many files, this helps keep
640 641 # our disk access patterns under control.
641 642 seen_list = seen.keys()
642 643 seen_list.sort()
643 644 for f in seen_list:
644 645 ff = repo.file(f)
645 646 filerev = seen[f]
646 647 if filerev != 0:
647 648 if filerev in ff.nodemap:
648 649 filerev = ff.rev(filerev)
649 650 else:
650 651 filerev = 0
651 652 ff.strip(filerev, revnum)
652 653
653 654 if not wlock:
654 655 wlock = repo.wlock()
655 656 lock = repo.lock()
656 657 chlog = repo.changelog
657 658 # TODO delete the undo files, and handle undo of merge sets
658 659 pp = chlog.parents(rev)
659 660 revnum = chlog.rev(rev)
660 661
661 662 if update:
662 (c, a, r, d, u) = repo.changes(None, None)
663 if c or a or d or r:
664 raise util.Abort(_("local changes found"))
663 self.check_localchanges(repo, refresh=False)
665 664 urev = self.qparents(repo, rev)
666 665 hg.clean(repo, urev, wlock=wlock)
667 666 repo.dirstate.write()
668 667
669 668 # save is a list of all the branches we are truncating away
670 669 # that we actually want to keep. changegroup will be used
671 670 # to preserve them and add them back after the truncate
672 671 saveheads = []
673 672 savebases = {}
674 673
675 674 heads = limitheads(chlog, rev)
676 675 seen = {}
677 676
678 677 # search through all the heads, finding those where the revision
679 678 # we want to strip away is an ancestor. Also look for merges
680 679 # that might be turned into new heads by the strip.
681 680 while heads:
682 681 h = heads.pop()
683 682 n = h
684 683 while True:
685 684 seen[n] = 1
686 685 pp = chlog.parents(n)
687 686 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
688 687 if pp[1] not in seen:
689 688 heads.append(pp[1])
690 689 if pp[0] == revlog.nullid:
691 690 break
692 691 if chlog.rev(pp[0]) < revnum:
693 692 break
694 693 n = pp[0]
695 694 if n == rev:
696 695 break
697 696 r = chlog.reachable(h, rev)
698 697 if rev not in r:
699 698 saveheads.append(h)
700 699 for x in r:
701 700 if chlog.rev(x) > revnum:
702 701 savebases[x] = 1
703 702
704 703 # create a changegroup for all the branches we need to keep
705 704 if backup == "all":
706 705 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
707 706 bundle(backupch)
708 707 if saveheads:
709 708 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
710 709 chgrpfile = bundle(backupch)
711 710
712 711 stripall(rev, revnum)
713 712
714 713 change = chlog.read(rev)
715 714 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
716 715 chlog.strip(revnum, revnum)
717 716 if saveheads:
718 717 self.ui.status("adding branch\n")
719 718 commands.unbundle(self.ui, repo, chgrpfile, update=False)
720 719 if backup != "strip":
721 720 os.unlink(chgrpfile)
722 721
723 722 def isapplied(self, patch):
724 723 """returns (index, rev, patch)"""
725 724 for i in xrange(len(self.applied)):
726 725 a = self.applied[i]
727 726 if a.name == patch:
728 727 return (i, a.rev, a.name)
729 728 return None
730 729
731 730 # if the exact patch name does not exist, we try a few
732 731 # variations. If strict is passed, we try only #1
733 732 #
734 733 # 1) a number to indicate an offset in the series file
735 734 # 2) a unique substring of the patch name was given
736 735 # 3) patchname[-+]num to indicate an offset in the series file
737 736 def lookup(self, patch, strict=False):
738 737 patch = patch and str(patch)
739 738
740 739 def partial_name(s):
741 740 if s in self.series:
742 741 return s
743 742 matches = [x for x in self.series if s in x]
744 743 if len(matches) > 1:
745 744 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
746 745 for m in matches:
747 746 self.ui.warn(' %s\n' % m)
748 747 return None
749 748 if matches:
750 749 return matches[0]
751 750 if len(self.series) > 0 and len(self.applied) > 0:
752 751 if s == 'qtip':
753 752 return self.series[self.series_end()-1]
754 753 if s == 'qbase':
755 754 return self.series[0]
756 755 return None
757 756 if patch == None:
758 757 return None
759 758
760 759 # we don't want to return a partial match until we make
761 760 # sure the file name passed in does not exist (checked below)
762 761 res = partial_name(patch)
763 762 if res and res == patch:
764 763 return res
765 764
766 765 if not os.path.isfile(self.join(patch)):
767 766 try:
768 767 sno = int(patch)
769 768 except(ValueError, OverflowError):
770 769 pass
771 770 else:
772 771 if sno < len(self.series):
773 772 return self.series[sno]
774 773 if not strict:
775 774 # return any partial match made above
776 775 if res:
777 776 return res
778 777 minus = patch.rsplit('-', 1)
779 778 if len(minus) > 1:
780 779 res = partial_name(minus[0])
781 780 if res:
782 781 i = self.series.index(res)
783 782 try:
784 783 off = int(minus[1] or 1)
785 784 except(ValueError, OverflowError):
786 785 pass
787 786 else:
788 787 if i - off >= 0:
789 788 return self.series[i - off]
790 789 plus = patch.rsplit('+', 1)
791 790 if len(plus) > 1:
792 791 res = partial_name(plus[0])
793 792 if res:
794 793 i = self.series.index(res)
795 794 try:
796 795 off = int(plus[1] or 1)
797 796 except(ValueError, OverflowError):
798 797 pass
799 798 else:
800 799 if i + off < len(self.series):
801 800 return self.series[i + off]
802 801 raise util.Abort(_("patch %s not in series") % patch)
803 802
804 803 def push(self, repo, patch=None, force=False, list=False,
805 804 mergeq=None, wlock=None):
806 805 if not wlock:
807 806 wlock = repo.wlock()
808 807 patch = self.lookup(patch)
809 808 if patch and self.isapplied(patch):
810 809 self.ui.warn(_("patch %s is already applied\n") % patch)
811 810 sys.exit(1)
812 811 if self.series_end() == len(self.series):
813 812 self.ui.warn(_("patch series fully applied\n"))
814 813 sys.exit(1)
815 814 if not force:
816 815 self.check_localchanges(repo)
817 816
818 817 self.applied_dirty = 1;
819 818 start = self.series_end()
820 819 if start > 0:
821 820 self.check_toppatch(repo)
822 821 if not patch:
823 822 patch = self.series[start]
824 823 end = start + 1
825 824 else:
826 825 end = self.series.index(patch, start) + 1
827 826 s = self.series[start:end]
828 827 if mergeq:
829 828 ret = self.mergepatch(repo, mergeq, s, wlock)
830 829 else:
831 830 ret = self.apply(repo, s, list, wlock=wlock)
832 831 top = self.applied[-1].name
833 832 if ret[0]:
834 833 self.ui.write("Errors during apply, please fix and refresh %s\n" %
835 834 top)
836 835 else:
837 836 self.ui.write("Now at: %s\n" % top)
838 837 return ret[0]
839 838
840 839 def pop(self, repo, patch=None, force=False, update=True, all=False,
841 840 wlock=None):
842 841 def getfile(f, rev):
843 842 t = repo.file(f).read(rev)
844 843 try:
845 844 repo.wfile(f, "w").write(t)
846 845 except IOError:
847 846 try:
848 847 os.makedirs(os.path.dirname(repo.wjoin(f)))
849 848 except OSError, err:
850 849 if err.errno != errno.EEXIST: raise
851 850 repo.wfile(f, "w").write(t)
852 851
853 852 if not wlock:
854 853 wlock = repo.wlock()
855 854 if patch:
856 855 # index, rev, patch
857 856 info = self.isapplied(patch)
858 857 if not info:
859 858 patch = self.lookup(patch)
860 859 info = self.isapplied(patch)
861 860 if not info:
862 861 raise util.Abort(_("patch %s is not applied") % patch)
863 862 if len(self.applied) == 0:
864 863 self.ui.warn(_("no patches applied\n"))
865 864 sys.exit(1)
866 865
867 866 if not update:
868 867 parents = repo.dirstate.parents()
869 868 rr = [ revlog.bin(x.rev) for x in self.applied ]
870 869 for p in parents:
871 870 if p in rr:
872 871 self.ui.warn("qpop: forcing dirstate update\n")
873 872 update = True
874 873
875 874 if not force and update:
876 875 self.check_localchanges(repo)
877 876
878 877 self.applied_dirty = 1;
879 878 end = len(self.applied)
880 879 if not patch:
881 880 if all:
882 881 popi = 0
883 882 else:
884 883 popi = len(self.applied) - 1
885 884 else:
886 885 popi = info[0] + 1
887 886 if popi >= end:
888 887 self.ui.warn("qpop: %s is already at the top\n" % patch)
889 888 return
890 889 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
891 890
892 891 start = info[0]
893 892 rev = revlog.bin(info[1])
894 893
895 894 # we know there are no local changes, so we can make a simplified
896 895 # form of hg.update.
897 896 if update:
898 897 top = self.check_toppatch(repo)
899 898 qp = self.qparents(repo, rev)
900 899 changes = repo.changelog.read(qp)
901 900 mmap = repo.manifest.read(changes[0])
902 (c, a, r, d, u) = repo.changes(qp, top)
901 m, a, r, d, u = repo.status(qp, top)[:5]
903 902 if d:
904 903 raise util.Abort("deletions found between repo revs")
905 for f in c:
904 for f in m:
906 905 getfile(f, mmap[f])
907 906 for f in r:
908 907 getfile(f, mmap[f])
909 908 util.set_exec(repo.wjoin(f), mmap.execf(f))
910 repo.dirstate.update(c + r, 'n')
909 repo.dirstate.update(m + r, 'n')
911 910 for f in a:
912 911 try: os.unlink(repo.wjoin(f))
913 912 except: raise
914 913 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
915 914 except: pass
916 915 if a:
917 916 repo.dirstate.forget(a)
918 917 repo.dirstate.setparents(qp, revlog.nullid)
919 918 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
920 919 del self.applied[start:end]
921 920 if len(self.applied):
922 921 self.ui.write("Now at: %s\n" % self.applied[-1].name)
923 922 else:
924 923 self.ui.write("Patch queue now empty\n")
925 924
926 925 def diff(self, repo, files):
927 926 top = self.check_toppatch(repo)
928 927 if not top:
929 928 self.ui.write("No patches applied\n")
930 929 return
931 930 qp = self.qparents(repo, top)
932 931 self.printdiff(repo, qp, files=files)
933 932
934 933 def refresh(self, repo, msg='', short=False):
935 934 if len(self.applied) == 0:
936 935 self.ui.write("No patches applied\n")
937 936 return
938 937 wlock = repo.wlock()
939 938 self.check_toppatch(repo)
940 939 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
941 940 top = revlog.bin(top)
942 941 cparents = repo.changelog.parents(top)
943 942 patchparent = self.qparents(repo, top)
944 943 message, comments, user, date, patchfound = self.readheaders(patch)
945 944
946 945 patchf = self.opener(patch, "w")
947 946 msg = msg.rstrip()
948 947 if msg:
949 948 if comments:
950 949 # Remove existing message.
951 950 ci = 0
952 951 for mi in range(len(message)):
953 952 while message[mi] != comments[ci]:
954 953 ci += 1
955 954 del comments[ci]
956 955 comments.append(msg)
957 956 if comments:
958 957 comments = "\n".join(comments) + '\n\n'
959 958 patchf.write(comments)
960 959
961 960 tip = repo.changelog.tip()
962 961 if top == tip:
963 962 # if the top of our patch queue is also the tip, there is an
964 963 # optimization here. We update the dirstate in place and strip
965 964 # off the tip commit. Then just commit the current directory
966 965 # tree. We can also send repo.commit the list of files
967 966 # changed to speed up the diff
968 967 #
969 968 # in short mode, we only diff the files included in the
970 969 # patch already
971 970 #
972 971 # this should really read:
973 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
972 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
974 973 # but we do it backwards to take advantage of manifest/chlog
975 # caching against the next repo.changes call
974 # caching against the next repo.status call
976 975 #
977 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
976 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
978 977 if short:
979 filelist = cc + aa + dd
978 filelist = mm + aa + dd
980 979 else:
981 980 filelist = None
982 (c, a, r, d, u) = repo.changes(None, None, filelist)
981 m, a, r, d, u = repo.status(files=filelist)[:5]
983 982
984 983 # we might end up with files that were added between tip and
985 984 # the dirstate parent, but then changed in the local dirstate.
986 985 # in this case, we want them to only show up in the added section
987 for x in c:
986 for x in m:
988 987 if x not in aa:
989 cc.append(x)
988 mm.append(x)
990 989 # we might end up with files added by the local dirstate that
991 990 # were deleted by the patch. In this case, they should only
992 991 # show up in the changed section.
993 992 for x in a:
994 993 if x in dd:
995 994 del dd[dd.index(x)]
996 cc.append(x)
995 mm.append(x)
997 996 else:
998 997 aa.append(x)
999 998 # make sure any files deleted in the local dirstate
1000 999 # are not in the add or change column of the patch
1001 1000 forget = []
1002 1001 for x in d + r:
1003 1002 if x in aa:
1004 1003 del aa[aa.index(x)]
1005 1004 forget.append(x)
1006 1005 continue
1007 elif x in cc:
1008 del cc[cc.index(x)]
1006 elif x in mm:
1007 del mm[mm.index(x)]
1009 1008 dd.append(x)
1010 1009
1011 c = list(util.unique(cc))
1010 m = list(util.unique(mm))
1012 1011 r = list(util.unique(dd))
1013 1012 a = list(util.unique(aa))
1014 filelist = list(util.unique(c + r + a ))
1013 filelist = list(util.unique(m + r + a))
1015 1014 self.printdiff(repo, patchparent, files=filelist,
1016 changes=(c, a, r, [], u), fp=patchf)
1015 changes=(m, a, r, [], u), fp=patchf)
1017 1016 patchf.close()
1018 1017
1019 1018 changes = repo.changelog.read(tip)
1020 1019 repo.dirstate.setparents(*cparents)
1021 1020 repo.dirstate.update(a, 'a')
1022 1021 repo.dirstate.update(r, 'r')
1023 repo.dirstate.update(c, 'n')
1022 repo.dirstate.update(m, 'n')
1024 1023 repo.dirstate.forget(forget)
1025 1024
1026 1025 if not msg:
1027 1026 if not message:
1028 1027 message = "patch queue: %s\n" % patch
1029 1028 else:
1030 1029 message = "\n".join(message)
1031 1030 else:
1032 1031 message = msg
1033 1032
1034 1033 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1035 1034 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1036 1035 self.applied[-1] = statusentry(revlog.hex(n), patch)
1037 1036 self.applied_dirty = 1
1038 1037 else:
1039 1038 self.printdiff(repo, patchparent, fp=patchf)
1040 1039 patchf.close()
1041 1040 self.pop(repo, force=True, wlock=wlock)
1042 1041 self.push(repo, force=True, wlock=wlock)
1043 1042
1044 1043 def init(self, repo, create=False):
1045 1044 if os.path.isdir(self.path):
1046 1045 raise util.Abort(_("patch queue directory already exists"))
1047 1046 os.mkdir(self.path)
1048 1047 if create:
1049 1048 return self.qrepo(create=True)
1050 1049
1051 1050 def unapplied(self, repo, patch=None):
1052 1051 if patch and patch not in self.series:
1053 1052 raise util.Abort(_("patch %s is not in series file") % patch)
1054 1053 if not patch:
1055 1054 start = self.series_end()
1056 1055 else:
1057 1056 start = self.series.index(patch) + 1
1058 1057 unapplied = []
1059 1058 for i in xrange(start, len(self.series)):
1060 1059 pushable, reason = self.pushable(i)
1061 1060 if pushable:
1062 1061 unapplied.append((i, self.series[i]))
1063 1062 self.explain_pushable(i)
1064 1063 return unapplied
1065 1064
1066 1065 def qseries(self, repo, missing=None, summary=False):
1067 1066 start = self.series_end(all_patches=True)
1068 1067 if not missing:
1069 1068 for i in range(len(self.series)):
1070 1069 patch = self.series[i]
1071 1070 if self.ui.verbose:
1072 1071 if i < start:
1073 1072 status = 'A'
1074 1073 elif self.pushable(i)[0]:
1075 1074 status = 'U'
1076 1075 else:
1077 1076 status = 'G'
1078 1077 self.ui.write('%d %s ' % (i, status))
1079 1078 if summary:
1080 1079 msg = self.readheaders(patch)[0]
1081 1080 msg = msg and ': ' + msg[0] or ': '
1082 1081 else:
1083 1082 msg = ''
1084 1083 self.ui.write('%s%s\n' % (patch, msg))
1085 1084 else:
1086 1085 msng_list = []
1087 1086 for root, dirs, files in os.walk(self.path):
1088 1087 d = root[len(self.path) + 1:]
1089 1088 for f in files:
1090 1089 fl = os.path.join(d, f)
1091 1090 if (fl not in self.series and
1092 1091 fl not in (self.status_path, self.series_path)
1093 1092 and not fl.startswith('.')):
1094 1093 msng_list.append(fl)
1095 1094 msng_list.sort()
1096 1095 for x in msng_list:
1097 1096 if self.ui.verbose:
1098 1097 self.ui.write("D ")
1099 1098 self.ui.write("%s\n" % x)
1100 1099
1101 1100 def issaveline(self, l):
1102 1101 if l.name == '.hg.patches.save.line':
1103 1102 return True
1104 1103
1105 1104 def qrepo(self, create=False):
1106 1105 if create or os.path.isdir(self.join(".hg")):
1107 1106 return hg.repository(self.ui, path=self.path, create=create)
1108 1107
1109 1108 def restore(self, repo, rev, delete=None, qupdate=None):
1110 1109 c = repo.changelog.read(rev)
1111 1110 desc = c[4].strip()
1112 1111 lines = desc.splitlines()
1113 1112 i = 0
1114 1113 datastart = None
1115 1114 series = []
1116 1115 applied = []
1117 1116 qpp = None
1118 1117 for i in xrange(0, len(lines)):
1119 1118 if lines[i] == 'Patch Data:':
1120 1119 datastart = i + 1
1121 1120 elif lines[i].startswith('Dirstate:'):
1122 1121 l = lines[i].rstrip()
1123 1122 l = l[10:].split(' ')
1124 1123 qpp = [ hg.bin(x) for x in l ]
1125 1124 elif datastart != None:
1126 1125 l = lines[i].rstrip()
1127 1126 se = statusentry(l)
1128 1127 file_ = se.name
1129 1128 if se.rev:
1130 1129 applied.append(se)
1131 1130 series.append(file_)
1132 1131 if datastart == None:
1133 1132 self.ui.warn("No saved patch data found\n")
1134 1133 return 1
1135 1134 self.ui.warn("restoring status: %s\n" % lines[0])
1136 1135 self.full_series = series
1137 1136 self.applied = applied
1138 1137 self.parse_series()
1139 1138 self.series_dirty = 1
1140 1139 self.applied_dirty = 1
1141 1140 heads = repo.changelog.heads()
1142 1141 if delete:
1143 1142 if rev not in heads:
1144 1143 self.ui.warn("save entry has children, leaving it alone\n")
1145 1144 else:
1146 1145 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1147 1146 pp = repo.dirstate.parents()
1148 1147 if rev in pp:
1149 1148 update = True
1150 1149 else:
1151 1150 update = False
1152 1151 self.strip(repo, rev, update=update, backup='strip')
1153 1152 if qpp:
1154 1153 self.ui.warn("saved queue repository parents: %s %s\n" %
1155 1154 (hg.short(qpp[0]), hg.short(qpp[1])))
1156 1155 if qupdate:
1157 1156 print "queue directory updating"
1158 1157 r = self.qrepo()
1159 1158 if not r:
1160 1159 self.ui.warn("Unable to load queue repository\n")
1161 1160 return 1
1162 1161 hg.clean(r, qpp[0])
1163 1162
1164 1163 def save(self, repo, msg=None):
1165 1164 if len(self.applied) == 0:
1166 1165 self.ui.warn("save: no patches applied, exiting\n")
1167 1166 return 1
1168 1167 if self.issaveline(self.applied[-1]):
1169 1168 self.ui.warn("status is already saved\n")
1170 1169 return 1
1171 1170
1172 1171 ar = [ ':' + x for x in self.full_series ]
1173 1172 if not msg:
1174 1173 msg = "hg patches saved state"
1175 1174 else:
1176 1175 msg = "hg patches: " + msg.rstrip('\r\n')
1177 1176 r = self.qrepo()
1178 1177 if r:
1179 1178 pp = r.dirstate.parents()
1180 1179 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1181 1180 msg += "\n\nPatch Data:\n"
1182 1181 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1183 1182 "\n".join(ar) + '\n' or "")
1184 1183 n = repo.commit(None, text, user=None, force=1)
1185 1184 if not n:
1186 1185 self.ui.warn("repo commit failed\n")
1187 1186 return 1
1188 1187 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1189 1188 self.applied_dirty = 1
1190 1189
1191 1190 def full_series_end(self):
1192 1191 if len(self.applied) > 0:
1193 1192 p = self.applied[-1].name
1194 1193 end = self.find_series(p)
1195 1194 if end == None:
1196 1195 return len(self.full_series)
1197 1196 return end + 1
1198 1197 return 0
1199 1198
1200 1199 def series_end(self, all_patches=False):
1201 1200 end = 0
1202 1201 def next(start):
1203 1202 if all_patches:
1204 1203 return start
1205 1204 i = start
1206 1205 while i < len(self.series):
1207 1206 p, reason = self.pushable(i)
1208 1207 if p:
1209 1208 break
1210 1209 self.explain_pushable(i)
1211 1210 i += 1
1212 1211 return i
1213 1212 if len(self.applied) > 0:
1214 1213 p = self.applied[-1].name
1215 1214 try:
1216 1215 end = self.series.index(p)
1217 1216 except ValueError:
1218 1217 return 0
1219 1218 return next(end + 1)
1220 1219 return next(end)
1221 1220
1222 1221 def qapplied(self, repo, patch=None):
1223 1222 if patch and patch not in self.series:
1224 1223 raise util.Abort(_("patch %s is not in series file") % patch)
1225 1224 if not patch:
1226 1225 end = len(self.applied)
1227 1226 else:
1228 1227 end = self.series.index(patch) + 1
1229 1228 for x in xrange(end):
1230 1229 p = self.appliedname(x)
1231 1230 self.ui.write("%s\n" % p)
1232 1231
1233 1232 def appliedname(self, index):
1234 1233 pname = self.applied[index].name
1235 1234 if not self.ui.verbose:
1236 1235 p = pname
1237 1236 else:
1238 1237 p = str(self.series.index(pname)) + " " + p
1239 1238 return p
1240 1239
1241 1240 def top(self, repo):
1242 1241 if len(self.applied):
1243 1242 p = self.appliedname(-1)
1244 1243 self.ui.write(p + '\n')
1245 1244 else:
1246 1245 self.ui.write("No patches applied\n")
1247 1246
1248 1247 def next(self, repo):
1249 1248 end = self.series_end()
1250 1249 if end == len(self.series):
1251 1250 self.ui.write("All patches applied\n")
1252 1251 else:
1253 1252 p = self.series[end]
1254 1253 if self.ui.verbose:
1255 1254 self.ui.write("%d " % self.series.index(p))
1256 1255 self.ui.write(p + '\n')
1257 1256
1258 1257 def prev(self, repo):
1259 1258 if len(self.applied) > 1:
1260 1259 p = self.appliedname(-2)
1261 1260 self.ui.write(p + '\n')
1262 1261 elif len(self.applied) == 1:
1263 1262 self.ui.write("Only one patch applied\n")
1264 1263 else:
1265 1264 self.ui.write("No patches applied\n")
1266 1265
1267 1266 def qimport(self, repo, files, patch=None, existing=None, force=None):
1268 1267 if len(files) > 1 and patch:
1269 1268 raise util.Abort(_('option "-n" not valid when importing multiple '
1270 1269 'files'))
1271 1270 i = 0
1272 1271 added = []
1273 1272 for filename in files:
1274 1273 if existing:
1275 1274 if not patch:
1276 1275 patch = filename
1277 1276 if not os.path.isfile(self.join(patch)):
1278 1277 raise util.Abort(_("patch %s does not exist") % patch)
1279 1278 else:
1280 1279 try:
1281 1280 text = file(filename).read()
1282 1281 except IOError:
1283 1282 raise util.Abort(_("unable to read %s") % patch)
1284 1283 if not patch:
1285 1284 patch = os.path.split(filename)[1]
1286 1285 if not force and os.path.exists(self.join(patch)):
1287 1286 raise util.Abort(_('patch "%s" already exists') % patch)
1288 1287 patchf = self.opener(patch, "w")
1289 1288 patchf.write(text)
1290 1289 if patch in self.series:
1291 1290 raise util.Abort(_('patch %s is already in the series file')
1292 1291 % patch)
1293 1292 index = self.full_series_end() + i
1294 1293 self.full_series[index:index] = [patch]
1295 1294 self.parse_series()
1296 1295 self.ui.warn("adding %s to series file\n" % patch)
1297 1296 i += 1
1298 1297 added.append(patch)
1299 1298 patch = None
1300 1299 self.series_dirty = 1
1301 1300 qrepo = self.qrepo()
1302 1301 if qrepo:
1303 1302 qrepo.add(added)
1304 1303
1305 1304 def delete(ui, repo, patch, **opts):
1306 1305 """remove a patch from the series file
1307 1306
1308 1307 The patch must not be applied.
1309 1308 With -f, deletes the patch file as well as the series entry."""
1310 1309 q = repo.mq
1311 1310 q.delete(repo, patch, force=opts.get('force'))
1312 1311 q.save_dirty()
1313 1312 return 0
1314 1313
1315 1314 def applied(ui, repo, patch=None, **opts):
1316 1315 """print the patches already applied"""
1317 1316 repo.mq.qapplied(repo, patch)
1318 1317 return 0
1319 1318
1320 1319 def unapplied(ui, repo, patch=None, **opts):
1321 1320 """print the patches not yet applied"""
1322 1321 for i, p in repo.mq.unapplied(repo, patch):
1323 1322 if ui.verbose:
1324 1323 ui.write("%d " % i)
1325 1324 ui.write("%s\n" % p)
1326 1325
1327 1326 def qimport(ui, repo, *filename, **opts):
1328 1327 """import a patch"""
1329 1328 q = repo.mq
1330 1329 q.qimport(repo, filename, patch=opts['name'],
1331 1330 existing=opts['existing'], force=opts['force'])
1332 1331 q.save_dirty()
1333 1332 return 0
1334 1333
1335 1334 def init(ui, repo, **opts):
1336 1335 """init a new queue repository
1337 1336
1338 1337 The queue repository is unversioned by default. If -c is
1339 1338 specified, qinit will create a separate nested repository
1340 1339 for patches. Use qcommit to commit changes to this queue
1341 1340 repository."""
1342 1341 q = repo.mq
1343 1342 r = q.init(repo, create=opts['create_repo'])
1344 1343 q.save_dirty()
1345 1344 if r:
1346 1345 fp = r.wopener('.hgignore', 'w')
1347 1346 print >> fp, 'syntax: glob'
1348 1347 print >> fp, 'status'
1349 1348 fp.close()
1350 1349 r.wopener('series', 'w').close()
1351 1350 r.add(['.hgignore', 'series'])
1352 1351 return 0
1353 1352
1354 1353 def clone(ui, source, dest=None, **opts):
1355 1354 '''clone main and patch repository at same time
1356 1355
1357 1356 If source is local, destination will have no patches applied. If
1358 1357 source is remote, this command can not check if patches are
1359 1358 applied in source, so cannot guarantee that patches are not
1360 1359 applied in destination. If you clone remote repository, be sure
1361 1360 before that it has no patches applied.
1362 1361
1363 1362 Source patch repository is looked for in <src>/.hg/patches by
1364 1363 default. Use -p <url> to change.
1365 1364 '''
1366 1365 commands.setremoteconfig(ui, opts)
1367 1366 if dest is None:
1368 1367 dest = hg.defaultdest(source)
1369 1368 sr = hg.repository(ui, ui.expandpath(source))
1370 1369 qbase, destrev = None, None
1371 1370 if sr.local():
1372 1371 reposetup(ui, sr)
1373 1372 if sr.mq.applied:
1374 1373 qbase = revlog.bin(sr.mq.applied[0].rev)
1375 1374 if not hg.islocal(dest):
1376 1375 destrev = sr.parents(qbase)[0]
1377 1376 ui.note(_('cloning main repo\n'))
1378 1377 sr, dr = hg.clone(ui, sr, dest,
1379 1378 pull=opts['pull'],
1380 1379 rev=destrev,
1381 1380 update=False,
1382 1381 stream=opts['uncompressed'])
1383 1382 ui.note(_('cloning patch repo\n'))
1384 1383 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1385 1384 dr.url() + '/.hg/patches',
1386 1385 pull=opts['pull'],
1387 1386 update=not opts['noupdate'],
1388 1387 stream=opts['uncompressed'])
1389 1388 if dr.local():
1390 1389 if qbase:
1391 1390 ui.note(_('stripping applied patches from destination repo\n'))
1392 1391 reposetup(ui, dr)
1393 1392 dr.mq.strip(dr, qbase, update=False, backup=None)
1394 1393 if not opts['noupdate']:
1395 1394 ui.note(_('updating destination repo\n'))
1396 1395 hg.update(dr, dr.changelog.tip())
1397 1396
1398 1397 def commit(ui, repo, *pats, **opts):
1399 1398 """commit changes in the queue repository"""
1400 1399 q = repo.mq
1401 1400 r = q.qrepo()
1402 1401 if not r: raise util.Abort('no queue repository')
1403 1402 commands.commit(r.ui, r, *pats, **opts)
1404 1403
1405 1404 def series(ui, repo, **opts):
1406 1405 """print the entire series file"""
1407 1406 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1408 1407 return 0
1409 1408
1410 1409 def top(ui, repo, **opts):
1411 1410 """print the name of the current patch"""
1412 1411 repo.mq.top(repo)
1413 1412 return 0
1414 1413
1415 1414 def next(ui, repo, **opts):
1416 1415 """print the name of the next patch"""
1417 1416 repo.mq.next(repo)
1418 1417 return 0
1419 1418
1420 1419 def prev(ui, repo, **opts):
1421 1420 """print the name of the previous patch"""
1422 1421 repo.mq.prev(repo)
1423 1422 return 0
1424 1423
1425 1424 def new(ui, repo, patch, **opts):
1426 1425 """create a new patch
1427 1426
1428 1427 qnew creates a new patch on top of the currently-applied patch
1429 1428 (if any). It will refuse to run if there are any outstanding
1430 1429 changes unless -f is specified, in which case the patch will
1431 1430 be initialised with them.
1432 1431
1433 1432 -m or -l set the patch header as well as the commit message.
1434 1433 If neither is specified, the patch header is empty and the
1435 1434 commit message is 'New patch: PATCH'"""
1436 1435 q = repo.mq
1437 1436 message = commands.logmessage(opts)
1438 1437 q.new(repo, patch, msg=message, force=opts['force'])
1439 1438 q.save_dirty()
1440 1439 return 0
1441 1440
1442 1441 def refresh(ui, repo, **opts):
1443 1442 """update the current patch"""
1444 1443 q = repo.mq
1445 1444 message = commands.logmessage(opts)
1446 1445 if opts['edit']:
1447 1446 if message:
1448 1447 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1449 1448 patch = q.applied[-1].name
1450 1449 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1451 1450 message = ui.edit('\n'.join(message), user or ui.username())
1452 1451 q.refresh(repo, msg=message, short=opts['short'])
1453 1452 q.save_dirty()
1454 1453 return 0
1455 1454
1456 1455 def diff(ui, repo, *files, **opts):
1457 1456 """diff of the current patch"""
1458 1457 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1459 1458 repo.mq.diff(repo, list(files))
1460 1459 return 0
1461 1460
1462 1461 def fold(ui, repo, *files, **opts):
1463 1462 """fold the named patches into the current patch
1464 1463
1465 1464 Patches must not yet be applied. Each patch will be successively
1466 1465 applied to the current patch in the order given. If all the
1467 1466 patches apply successfully, the current patch will be refreshed
1468 1467 with the new cumulative patch, and the folded patches will
1469 1468 be deleted. With -f/--force, the folded patch files will
1470 1469 be removed afterwards.
1471 1470
1472 1471 The header for each folded patch will be concatenated with
1473 1472 the current patch header, separated by a line of '* * *'."""
1474 1473
1475 1474 q = repo.mq
1476 1475
1477 1476 if not files:
1478 1477 raise util.Abort(_('qfold requires at least one patch name'))
1479 1478 if not q.check_toppatch(repo):
1480 1479 raise util.Abort(_('No patches applied\n'))
1481 1480
1482 1481 message = commands.logmessage(opts)
1483 1482 if opts['edit']:
1484 1483 if message:
1485 1484 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1486 1485
1487 1486 parent = q.lookup('qtip')
1488 1487 patches = []
1489 1488 messages = []
1490 1489 for f in files:
1491 1490 patch = q.lookup(f)
1492 1491 if patch in patches or patch == parent:
1493 1492 ui.warn(_('Skipping already folded patch %s') % patch)
1494 1493 if q.isapplied(patch):
1495 1494 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1496 1495 patches.append(patch)
1497 1496
1498 1497 for patch in patches:
1499 1498 if not message:
1500 1499 messages.append(q.readheaders(patch)[0])
1501 1500 pf = q.join(patch)
1502 1501 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1503 1502 if not patchsuccess:
1504 1503 raise util.Abort(_('Error folding patch %s') % patch)
1505 1504
1506 1505 if not message:
1507 1506 message, comments, user = q.readheaders(parent)[0:3]
1508 1507 for msg in messages:
1509 1508 message.append('* * *')
1510 1509 message.extend(msg)
1511 1510 message = '\n'.join(message)
1512 1511
1513 1512 if opts['edit']:
1514 1513 message = ui.edit(message, user or ui.username())
1515 1514
1516 1515 q.refresh(repo, msg=message)
1517 1516
1518 1517 for patch in patches:
1519 1518 q.delete(repo, patch, force=opts['force'])
1520 1519
1521 1520 q.save_dirty()
1522 1521
1523 1522 def guard(ui, repo, *args, **opts):
1524 1523 '''set or print guards for a patch
1525 1524
1526 1525 guards control whether a patch can be pushed. a patch with no
1527 1526 guards is aways pushed. a patch with posative guard ("+foo") is
1528 1527 pushed only if qselect command enables guard "foo". a patch with
1529 1528 nagative guard ("-foo") is never pushed if qselect command enables
1530 1529 guard "foo".
1531 1530
1532 1531 with no arguments, default is to print current active guards.
1533 1532 with arguments, set active guards for patch.
1534 1533
1535 1534 to set nagative guard "-foo" on topmost patch ("--" is needed so
1536 1535 hg will not interpret "-foo" as argument):
1537 1536 hg qguard -- -foo
1538 1537
1539 1538 to set guards on other patch:
1540 1539 hg qguard other.patch +2.6.17 -stable
1541 1540 '''
1542 1541 def status(idx):
1543 1542 guards = q.series_guards[idx] or ['unguarded']
1544 1543 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1545 1544 q = repo.mq
1546 1545 patch = None
1547 1546 args = list(args)
1548 1547 if opts['list']:
1549 1548 if args or opts['none']:
1550 1549 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1551 1550 for i in xrange(len(q.series)):
1552 1551 status(i)
1553 1552 return
1554 1553 if not args or args[0][0:1] in '-+':
1555 1554 if not q.applied:
1556 1555 raise util.Abort(_('no patches applied'))
1557 1556 patch = q.applied[-1].name
1558 1557 if patch is None and args[0][0:1] not in '-+':
1559 1558 patch = args.pop(0)
1560 1559 if patch is None:
1561 1560 raise util.Abort(_('no patch to work with'))
1562 1561 if args or opts['none']:
1563 1562 q.set_guards(q.find_series(patch), args)
1564 1563 q.save_dirty()
1565 1564 else:
1566 1565 status(q.series.index(q.lookup(patch)))
1567 1566
1568 1567 def header(ui, repo, patch=None):
1569 1568 """Print the header of the topmost or specified patch"""
1570 1569 q = repo.mq
1571 1570
1572 1571 if patch:
1573 1572 patch = q.lookup(patch)
1574 1573 else:
1575 1574 if not q.applied:
1576 1575 ui.write('No patches applied\n')
1577 1576 return
1578 1577 patch = q.lookup('qtip')
1579 1578 message = repo.mq.readheaders(patch)[0]
1580 1579
1581 1580 ui.write('\n'.join(message) + '\n')
1582 1581
1583 1582 def lastsavename(path):
1584 1583 (directory, base) = os.path.split(path)
1585 1584 names = os.listdir(directory)
1586 1585 namere = re.compile("%s.([0-9]+)" % base)
1587 1586 maxindex = None
1588 1587 maxname = None
1589 1588 for f in names:
1590 1589 m = namere.match(f)
1591 1590 if m:
1592 1591 index = int(m.group(1))
1593 1592 if maxindex == None or index > maxindex:
1594 1593 maxindex = index
1595 1594 maxname = f
1596 1595 if maxname:
1597 1596 return (os.path.join(directory, maxname), maxindex)
1598 1597 return (None, None)
1599 1598
1600 1599 def savename(path):
1601 1600 (last, index) = lastsavename(path)
1602 1601 if last is None:
1603 1602 index = 0
1604 1603 newpath = path + ".%d" % (index + 1)
1605 1604 return newpath
1606 1605
1607 1606 def push(ui, repo, patch=None, **opts):
1608 1607 """push the next patch onto the stack"""
1609 1608 q = repo.mq
1610 1609 mergeq = None
1611 1610
1612 1611 if opts['all']:
1613 1612 patch = q.series[-1]
1614 1613 if opts['merge']:
1615 1614 if opts['name']:
1616 1615 newpath = opts['name']
1617 1616 else:
1618 1617 newpath, i = lastsavename(q.path)
1619 1618 if not newpath:
1620 1619 ui.warn("no saved queues found, please use -n\n")
1621 1620 return 1
1622 1621 mergeq = queue(ui, repo.join(""), newpath)
1623 1622 ui.warn("merging with queue at: %s\n" % mergeq.path)
1624 1623 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1625 1624 mergeq=mergeq)
1626 1625 q.save_dirty()
1627 1626 return ret
1628 1627
1629 1628 def pop(ui, repo, patch=None, **opts):
1630 1629 """pop the current patch off the stack"""
1631 1630 localupdate = True
1632 1631 if opts['name']:
1633 1632 q = queue(ui, repo.join(""), repo.join(opts['name']))
1634 1633 ui.warn('using patch queue: %s\n' % q.path)
1635 1634 localupdate = False
1636 1635 else:
1637 1636 q = repo.mq
1638 1637 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1639 1638 q.save_dirty()
1640 1639 return 0
1641 1640
1642 1641 def rename(ui, repo, patch, name=None, **opts):
1643 1642 """rename a patch
1644 1643
1645 1644 With one argument, renames the current patch to PATCH1.
1646 1645 With two arguments, renames PATCH1 to PATCH2."""
1647 1646
1648 1647 q = repo.mq
1649 1648
1650 1649 if not name:
1651 1650 name = patch
1652 1651 patch = None
1653 1652
1654 1653 if name in q.series:
1655 1654 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1656 1655
1657 1656 absdest = q.join(name)
1658 1657 if os.path.exists(absdest):
1659 1658 raise util.Abort(_('%s already exists') % absdest)
1660 1659
1661 1660 if patch:
1662 1661 patch = q.lookup(patch)
1663 1662 else:
1664 1663 if not q.applied:
1665 1664 ui.write(_('No patches applied\n'))
1666 1665 return
1667 1666 patch = q.lookup('qtip')
1668 1667
1669 1668 if ui.verbose:
1670 1669 ui.write('Renaming %s to %s\n' % (patch, name))
1671 1670 i = q.find_series(patch)
1672 1671 q.full_series[i] = name
1673 1672 q.parse_series()
1674 1673 q.series_dirty = 1
1675 1674
1676 1675 info = q.isapplied(patch)
1677 1676 if info:
1678 1677 q.applied[info[0]] = statusentry(info[1], name)
1679 1678 q.applied_dirty = 1
1680 1679
1681 1680 util.rename(q.join(patch), absdest)
1682 1681 r = q.qrepo()
1683 1682 if r:
1684 1683 wlock = r.wlock()
1685 1684 if r.dirstate.state(name) == 'r':
1686 1685 r.undelete([name], wlock)
1687 1686 r.copy(patch, name, wlock)
1688 1687 r.remove([patch], False, wlock)
1689 1688
1690 1689 q.save_dirty()
1691 1690
1692 1691 def restore(ui, repo, rev, **opts):
1693 1692 """restore the queue state saved by a rev"""
1694 1693 rev = repo.lookup(rev)
1695 1694 q = repo.mq
1696 1695 q.restore(repo, rev, delete=opts['delete'],
1697 1696 qupdate=opts['update'])
1698 1697 q.save_dirty()
1699 1698 return 0
1700 1699
1701 1700 def save(ui, repo, **opts):
1702 1701 """save current queue state"""
1703 1702 q = repo.mq
1704 1703 message = commands.logmessage(opts)
1705 1704 ret = q.save(repo, msg=message)
1706 1705 if ret:
1707 1706 return ret
1708 1707 q.save_dirty()
1709 1708 if opts['copy']:
1710 1709 path = q.path
1711 1710 if opts['name']:
1712 1711 newpath = os.path.join(q.basepath, opts['name'])
1713 1712 if os.path.exists(newpath):
1714 1713 if not os.path.isdir(newpath):
1715 1714 raise util.Abort(_('destination %s exists and is not '
1716 1715 'a directory') % newpath)
1717 1716 if not opts['force']:
1718 1717 raise util.Abort(_('destination %s exists, '
1719 1718 'use -f to force') % newpath)
1720 1719 else:
1721 1720 newpath = savename(path)
1722 1721 ui.warn("copy %s to %s\n" % (path, newpath))
1723 1722 util.copyfiles(path, newpath)
1724 1723 if opts['empty']:
1725 1724 try:
1726 1725 os.unlink(q.join(q.status_path))
1727 1726 except:
1728 1727 pass
1729 1728 return 0
1730 1729
1731 1730 def strip(ui, repo, rev, **opts):
1732 1731 """strip a revision and all later revs on the same branch"""
1733 1732 rev = repo.lookup(rev)
1734 1733 backup = 'all'
1735 1734 if opts['backup']:
1736 1735 backup = 'strip'
1737 1736 elif opts['nobackup']:
1738 1737 backup = 'none'
1739 1738 repo.mq.strip(repo, rev, backup=backup)
1740 1739 return 0
1741 1740
1742 1741 def select(ui, repo, *args, **opts):
1743 1742 '''set or print guarded patches to push
1744 1743
1745 1744 use qguard command to set or print guards on patch. then use
1746 1745 qselect to tell mq which guards to use. example:
1747 1746
1748 1747 qguard foo.patch -stable (nagative guard)
1749 1748 qguard bar.patch +stable (posative guard)
1750 1749 qselect stable
1751 1750
1752 1751 this sets "stable" guard. mq will skip foo.patch (because it has
1753 1752 nagative match) but push bar.patch (because it has posative
1754 1753 match). patch is pushed if any posative guards match and no
1755 1754 nagative guards match.
1756 1755
1757 1756 with no arguments, default is to print current active guards.
1758 1757 with arguments, set active guards as given.
1759 1758
1760 1759 use -n/--none to deactivate guards (no other arguments needed).
1761 1760 when no guards active, patches with posative guards are skipped,
1762 1761 patches with nagative guards are pushed.
1763 1762
1764 1763 qselect can change guards of applied patches. it does not pop
1765 1764 guarded patches by default. use --pop to pop back to last applied
1766 1765 patch that is not guarded. use --reapply (implies --pop) to push
1767 1766 back to current patch afterwards, but skip guarded patches.
1768 1767
1769 1768 use -s/--series to print list of all guards in series file (no
1770 1769 other arguments needed). use -v for more information.'''
1771 1770
1772 1771 q = repo.mq
1773 1772 guards = q.active()
1774 1773 if args or opts['none']:
1775 1774 old_unapplied = q.unapplied(repo)
1776 1775 old_guarded = [i for i in xrange(len(q.applied)) if
1777 1776 not q.pushable(i)[0]]
1778 1777 q.set_active(args)
1779 1778 q.save_dirty()
1780 1779 if not args:
1781 1780 ui.status(_('guards deactivated\n'))
1782 1781 if not opts['pop'] and not opts['reapply']:
1783 1782 unapplied = q.unapplied(repo)
1784 1783 guarded = [i for i in xrange(len(q.applied))
1785 1784 if not q.pushable(i)[0]]
1786 1785 if len(unapplied) != len(old_unapplied):
1787 1786 ui.status(_('number of unguarded, unapplied patches has '
1788 1787 'changed from %d to %d\n') %
1789 1788 (len(old_unapplied), len(unapplied)))
1790 1789 if len(guarded) != len(old_guarded):
1791 1790 ui.status(_('number of guarded, applied patches has changed '
1792 1791 'from %d to %d\n') %
1793 1792 (len(old_guarded), len(guarded)))
1794 1793 elif opts['series']:
1795 1794 guards = {}
1796 1795 noguards = 0
1797 1796 for gs in q.series_guards:
1798 1797 if not gs:
1799 1798 noguards += 1
1800 1799 for g in gs:
1801 1800 guards.setdefault(g, 0)
1802 1801 guards[g] += 1
1803 1802 if ui.verbose:
1804 1803 guards['NONE'] = noguards
1805 1804 guards = guards.items()
1806 1805 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1807 1806 if guards:
1808 1807 ui.note(_('guards in series file:\n'))
1809 1808 for guard, count in guards:
1810 1809 ui.note('%2d ' % count)
1811 1810 ui.write(guard, '\n')
1812 1811 else:
1813 1812 ui.note(_('no guards in series file\n'))
1814 1813 else:
1815 1814 if guards:
1816 1815 ui.note(_('active guards:\n'))
1817 1816 for g in guards:
1818 1817 ui.write(g, '\n')
1819 1818 else:
1820 1819 ui.write(_('no active guards\n'))
1821 1820 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1822 1821 popped = False
1823 1822 if opts['pop'] or opts['reapply']:
1824 1823 for i in xrange(len(q.applied)):
1825 1824 pushable, reason = q.pushable(i)
1826 1825 if not pushable:
1827 1826 ui.status(_('popping guarded patches\n'))
1828 1827 popped = True
1829 1828 if i == 0:
1830 1829 q.pop(repo, all=True)
1831 1830 else:
1832 1831 q.pop(repo, i-1)
1833 1832 break
1834 1833 if popped:
1835 1834 try:
1836 1835 if reapply:
1837 1836 ui.status(_('reapplying unguarded patches\n'))
1838 1837 q.push(repo, reapply)
1839 1838 finally:
1840 1839 q.save_dirty()
1841 1840
1842 1841 def reposetup(ui, repo):
1843 1842 class mqrepo(repo.__class__):
1844 1843 def abort_if_wdir_patched(self, errmsg, force=False):
1845 1844 if self.mq.applied and not force:
1846 1845 parent = revlog.hex(self.dirstate.parents()[0])
1847 1846 if parent in [s.rev for s in self.mq.applied]:
1848 1847 raise util.Abort(errmsg)
1849 1848
1850 1849 def commit(self, *args, **opts):
1851 1850 if len(args) >= 6:
1852 1851 force = args[5]
1853 1852 else:
1854 1853 force = opts.get('force')
1855 1854 self.abort_if_wdir_patched(
1856 1855 _('cannot commit over an applied mq patch'),
1857 1856 force)
1858 1857
1859 1858 return super(mqrepo, self).commit(*args, **opts)
1860 1859
1861 1860 def push(self, remote, force=False, revs=None):
1862 1861 if self.mq.applied and not force:
1863 1862 raise util.Abort(_('source has mq patches applied'))
1864 1863 return super(mqrepo, self).push(remote, force, revs)
1865 1864
1866 1865 def tags(self):
1867 1866 if self.tagscache:
1868 1867 return self.tagscache
1869 1868
1870 1869 tagscache = super(mqrepo, self).tags()
1871 1870
1872 1871 q = self.mq
1873 1872 if not q.applied:
1874 1873 return tagscache
1875 1874
1876 1875 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1877 1876 mqtags.append((mqtags[-1][0], 'qtip'))
1878 1877 mqtags.append((mqtags[0][0], 'qbase'))
1879 1878 for patch in mqtags:
1880 1879 if patch[1] in tagscache:
1881 1880 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1882 1881 else:
1883 1882 tagscache[patch[1]] = revlog.bin(patch[0])
1884 1883
1885 1884 return tagscache
1886 1885
1887 1886 if repo.local():
1888 1887 repo.__class__ = mqrepo
1889 1888 repo.mq = queue(ui, repo.join(""))
1890 1889
1891 1890 cmdtable = {
1892 1891 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1893 1892 "qclone": (clone,
1894 1893 [('', 'pull', None, _('use pull protocol to copy metadata')),
1895 1894 ('U', 'noupdate', None, _('do not update the new working directories')),
1896 1895 ('', 'uncompressed', None,
1897 1896 _('use uncompressed transfer (fast over LAN)')),
1898 1897 ('e', 'ssh', '', _('specify ssh command to use')),
1899 1898 ('p', 'patches', '', _('location of source patch repo')),
1900 1899 ('', 'remotecmd', '',
1901 1900 _('specify hg command to run on the remote side'))],
1902 1901 'hg qclone [OPTION]... SOURCE [DEST]'),
1903 1902 "qcommit|qci":
1904 1903 (commit,
1905 1904 commands.table["^commit|ci"][1],
1906 1905 'hg qcommit [OPTION]... [FILE]...'),
1907 1906 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1908 1907 "qdelete":
1909 1908 (delete,
1910 1909 [('f', 'force', None, _('delete patch file'))],
1911 1910 'hg qdelete [-f] PATCH'),
1912 1911 'qfold':
1913 1912 (fold,
1914 1913 [('e', 'edit', None, _('edit patch header')),
1915 1914 ('f', 'force', None, _('delete folded patch files')),
1916 1915 ('m', 'message', '', _('set patch header to <text>')),
1917 1916 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1918 1917 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1919 1918 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1920 1919 ('n', 'none', None, _('drop all guards'))],
1921 1920 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1922 1921 'qheader': (header, [],
1923 1922 _('hg qheader [PATCH]')),
1924 1923 "^qimport":
1925 1924 (qimport,
1926 1925 [('e', 'existing', None, 'import file in patch dir'),
1927 1926 ('n', 'name', '', 'patch file name'),
1928 1927 ('f', 'force', None, 'overwrite existing files')],
1929 1928 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1930 1929 "^qinit":
1931 1930 (init,
1932 1931 [('c', 'create-repo', None, 'create queue repository')],
1933 1932 'hg qinit [-c]'),
1934 1933 "qnew":
1935 1934 (new,
1936 1935 [('m', 'message', '', _('use <text> as commit message')),
1937 1936 ('l', 'logfile', '', _('read the commit message from <file>')),
1938 1937 ('f', 'force', None, _('import uncommitted changes into patch'))],
1939 1938 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1940 1939 "qnext": (next, [], 'hg qnext'),
1941 1940 "qprev": (prev, [], 'hg qprev'),
1942 1941 "^qpop":
1943 1942 (pop,
1944 1943 [('a', 'all', None, 'pop all patches'),
1945 1944 ('n', 'name', '', 'queue name to pop'),
1946 1945 ('f', 'force', None, 'forget any local changes')],
1947 1946 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1948 1947 "^qpush":
1949 1948 (push,
1950 1949 [('f', 'force', None, 'apply if the patch has rejects'),
1951 1950 ('l', 'list', None, 'list patch name in commit text'),
1952 1951 ('a', 'all', None, 'apply all patches'),
1953 1952 ('m', 'merge', None, 'merge from another queue'),
1954 1953 ('n', 'name', '', 'merge queue name')],
1955 1954 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1956 1955 "^qrefresh":
1957 1956 (refresh,
1958 1957 [('e', 'edit', None, _('edit commit message')),
1959 1958 ('m', 'message', '', _('change commit message with <text>')),
1960 1959 ('l', 'logfile', '', _('change commit message with <file> content')),
1961 1960 ('s', 'short', None, 'short refresh')],
1962 1961 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1963 1962 'qrename|qmv':
1964 1963 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1965 1964 "qrestore":
1966 1965 (restore,
1967 1966 [('d', 'delete', None, 'delete save entry'),
1968 1967 ('u', 'update', None, 'update queue working dir')],
1969 1968 'hg qrestore [-d] [-u] REV'),
1970 1969 "qsave":
1971 1970 (save,
1972 1971 [('m', 'message', '', _('use <text> as commit message')),
1973 1972 ('l', 'logfile', '', _('read the commit message from <file>')),
1974 1973 ('c', 'copy', None, 'copy patch directory'),
1975 1974 ('n', 'name', '', 'copy directory name'),
1976 1975 ('e', 'empty', None, 'clear queue status file'),
1977 1976 ('f', 'force', None, 'force copy')],
1978 1977 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1979 1978 "qselect": (select,
1980 1979 [('n', 'none', None, _('disable all guards')),
1981 1980 ('s', 'series', None, _('list all guards in series file')),
1982 1981 ('', 'pop', None,
1983 1982 _('pop to before first guarded applied patch')),
1984 1983 ('', 'reapply', None, _('pop, then reapply patches'))],
1985 1984 'hg qselect [OPTION...] [GUARD...]'),
1986 1985 "qseries":
1987 1986 (series,
1988 1987 [('m', 'missing', None, 'print patches not in series'),
1989 1988 ('s', 'summary', None, _('print first line of patch header'))],
1990 1989 'hg qseries [-m]'),
1991 1990 "^strip":
1992 1991 (strip,
1993 1992 [('f', 'force', None, 'force multi-head removal'),
1994 1993 ('b', 'backup', None, 'bundle unrelated changesets'),
1995 1994 ('n', 'nobackup', None, 'no backups')],
1996 1995 'hg strip [-f] [-b] [-n] REV'),
1997 1996 "qtop": (top, [], 'hg qtop'),
1998 1997 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1999 1998 }
@@ -1,3525 +1,3524 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup")
16 16 demandload(globals(), "cmdutil hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted = repo.status()[:4]
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def logmessage(opts):
35 35 """ get the log message according to -m and -l option """
36 36 message = opts['message']
37 37 logfile = opts['logfile']
38 38
39 39 if message and logfile:
40 40 raise util.Abort(_('options --message and --logfile are mutually '
41 41 'exclusive'))
42 42 if not message and logfile:
43 43 try:
44 44 if logfile == '-':
45 45 message = sys.stdin.read()
46 46 else:
47 47 message = open(logfile).read()
48 48 except IOError, inst:
49 49 raise util.Abort(_("can't read commit message '%s': %s") %
50 50 (logfile, inst.strerror))
51 51 return message
52 52
53 53 def matchpats(repo, pats=[], opts={}, head=''):
54 54 cwd = repo.getcwd()
55 55 if not pats and cwd:
56 56 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
57 57 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
58 58 cwd = ''
59 59 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
60 60 opts.get('exclude'), head)
61 61
62 62 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
63 63 files, matchfn, anypats = matchpats(repo, pats, opts, head)
64 64 exact = dict(zip(files, files))
65 65 def walk():
66 66 for src, fn in repo.walk(node=node, files=files, match=matchfn,
67 67 badmatch=badmatch):
68 68 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
69 69 return files, matchfn, walk()
70 70
71 71 def walk(repo, pats, opts, node=None, head='', badmatch=None):
72 72 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
73 73 for r in results:
74 74 yield r
75 75
76 76 def walkchangerevs(ui, repo, pats, opts):
77 77 '''Iterate over files and the revs they changed in.
78 78
79 79 Callers most commonly need to iterate backwards over the history
80 80 it is interested in. Doing so has awful (quadratic-looking)
81 81 performance, so we use iterators in a "windowed" way.
82 82
83 83 We walk a window of revisions in the desired order. Within the
84 84 window, we first walk forwards to gather data, then in the desired
85 85 order (usually backwards) to display it.
86 86
87 87 This function returns an (iterator, getchange, matchfn) tuple. The
88 88 getchange function returns the changelog entry for a numeric
89 89 revision. The iterator yields 3-tuples. They will be of one of
90 90 the following forms:
91 91
92 92 "window", incrementing, lastrev: stepping through a window,
93 93 positive if walking forwards through revs, last rev in the
94 94 sequence iterated over - use to reset state for the current window
95 95
96 96 "add", rev, fns: out-of-order traversal of the given file names
97 97 fns, which changed during revision rev - use to gather data for
98 98 possible display
99 99
100 100 "iter", rev, None: in-order traversal of the revs earlier iterated
101 101 over with "add" - use to display data'''
102 102
103 103 def increasing_windows(start, end, windowsize=8, sizelimit=512):
104 104 if start < end:
105 105 while start < end:
106 106 yield start, min(windowsize, end-start)
107 107 start += windowsize
108 108 if windowsize < sizelimit:
109 109 windowsize *= 2
110 110 else:
111 111 while start > end:
112 112 yield start, min(windowsize, start-end-1)
113 113 start -= windowsize
114 114 if windowsize < sizelimit:
115 115 windowsize *= 2
116 116
117 117
118 118 files, matchfn, anypats = matchpats(repo, pats, opts)
119 119 follow = opts.get('follow') or opts.get('follow_first')
120 120
121 121 if repo.changelog.count() == 0:
122 122 return [], False, matchfn
123 123
124 124 if follow:
125 125 p = repo.dirstate.parents()[0]
126 126 if p == nullid:
127 127 ui.warn(_('No working directory revision; defaulting to tip\n'))
128 128 start = 'tip'
129 129 else:
130 130 start = repo.changelog.rev(p)
131 131 defrange = '%s:0' % start
132 132 else:
133 133 defrange = 'tip:0'
134 134 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
135 135 wanted = {}
136 136 slowpath = anypats
137 137 fncache = {}
138 138
139 139 chcache = {}
140 140 def getchange(rev):
141 141 ch = chcache.get(rev)
142 142 if ch is None:
143 143 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
144 144 return ch
145 145
146 146 if not slowpath and not files:
147 147 # No files, no patterns. Display all revs.
148 148 wanted = dict(zip(revs, revs))
149 149 copies = []
150 150 if not slowpath:
151 151 # Only files, no patterns. Check the history of each file.
152 152 def filerevgen(filelog, node):
153 153 cl_count = repo.changelog.count()
154 154 if node is None:
155 155 last = filelog.count() - 1
156 156 else:
157 157 last = filelog.rev(node)
158 158 for i, window in increasing_windows(last, -1):
159 159 revs = []
160 160 for j in xrange(i - window, i + 1):
161 161 n = filelog.node(j)
162 162 revs.append((filelog.linkrev(n),
163 163 follow and filelog.renamed(n)))
164 164 revs.reverse()
165 165 for rev in revs:
166 166 # only yield rev for which we have the changelog, it can
167 167 # happen while doing "hg log" during a pull or commit
168 168 if rev[0] < cl_count:
169 169 yield rev
170 170 def iterfiles():
171 171 for filename in files:
172 172 yield filename, None
173 173 for filename_node in copies:
174 174 yield filename_node
175 175 minrev, maxrev = min(revs), max(revs)
176 176 for file_, node in iterfiles():
177 177 filelog = repo.file(file_)
178 178 # A zero count may be a directory or deleted file, so
179 179 # try to find matching entries on the slow path.
180 180 if filelog.count() == 0:
181 181 slowpath = True
182 182 break
183 183 for rev, copied in filerevgen(filelog, node):
184 184 if rev <= maxrev:
185 185 if rev < minrev:
186 186 break
187 187 fncache.setdefault(rev, [])
188 188 fncache[rev].append(file_)
189 189 wanted[rev] = 1
190 190 if follow and copied:
191 191 copies.append(copied)
192 192 if slowpath:
193 193 if follow:
194 194 raise util.Abort(_('can only follow copies/renames for explicit '
195 195 'file names'))
196 196
197 197 # The slow path checks files modified in every changeset.
198 198 def changerevgen():
199 199 for i, window in increasing_windows(repo.changelog.count()-1, -1):
200 200 for j in xrange(i - window, i + 1):
201 201 yield j, getchange(j)[3]
202 202
203 203 for rev, changefiles in changerevgen():
204 204 matches = filter(matchfn, changefiles)
205 205 if matches:
206 206 fncache[rev] = matches
207 207 wanted[rev] = 1
208 208
209 209 def iterate():
210 210 class followfilter:
211 211 def __init__(self, onlyfirst=False):
212 212 self.startrev = -1
213 213 self.roots = []
214 214 self.onlyfirst = onlyfirst
215 215
216 216 def match(self, rev):
217 217 def realparents(rev):
218 218 if self.onlyfirst:
219 219 return repo.changelog.parentrevs(rev)[0:1]
220 220 else:
221 221 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
222 222
223 223 if self.startrev == -1:
224 224 self.startrev = rev
225 225 return True
226 226
227 227 if rev > self.startrev:
228 228 # forward: all descendants
229 229 if not self.roots:
230 230 self.roots.append(self.startrev)
231 231 for parent in realparents(rev):
232 232 if parent in self.roots:
233 233 self.roots.append(rev)
234 234 return True
235 235 else:
236 236 # backwards: all parents
237 237 if not self.roots:
238 238 self.roots.extend(realparents(self.startrev))
239 239 if rev in self.roots:
240 240 self.roots.remove(rev)
241 241 self.roots.extend(realparents(rev))
242 242 return True
243 243
244 244 return False
245 245
246 246 if follow and not files:
247 247 ff = followfilter(onlyfirst=opts.get('follow_first'))
248 248 def want(rev):
249 249 if rev not in wanted:
250 250 return False
251 251 return ff.match(rev)
252 252 else:
253 253 def want(rev):
254 254 return rev in wanted
255 255
256 256 for i, window in increasing_windows(0, len(revs)):
257 257 yield 'window', revs[0] < revs[-1], revs[-1]
258 258 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
259 259 srevs = list(nrevs)
260 260 srevs.sort()
261 261 for rev in srevs:
262 262 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
263 263 yield 'add', rev, fns
264 264 for rev in nrevs:
265 265 yield 'iter', rev, None
266 266 return iterate(), getchange, matchfn
267 267
268 268 revrangesep = ':'
269 269
270 270 def revfix(repo, val, defval):
271 271 '''turn user-level id of changeset into rev number.
272 272 user-level id can be tag, changeset, rev number, or negative rev
273 273 number relative to number of revs (-1 is tip, etc).'''
274 274 if not val:
275 275 return defval
276 276 try:
277 277 num = int(val)
278 278 if str(num) != val:
279 279 raise ValueError
280 280 if num < 0:
281 281 num += repo.changelog.count()
282 282 if num < 0:
283 283 num = 0
284 284 elif num >= repo.changelog.count():
285 285 raise ValueError
286 286 except ValueError:
287 287 try:
288 288 num = repo.changelog.rev(repo.lookup(val))
289 289 except KeyError:
290 290 raise util.Abort(_('invalid revision identifier %s'), val)
291 291 return num
292 292
293 293 def revpair(ui, repo, revs):
294 294 '''return pair of nodes, given list of revisions. second item can
295 295 be None, meaning use working dir.'''
296 296 if not revs:
297 297 return repo.dirstate.parents()[0], None
298 298 end = None
299 299 if len(revs) == 1:
300 300 start = revs[0]
301 301 if revrangesep in start:
302 302 start, end = start.split(revrangesep, 1)
303 303 start = revfix(repo, start, 0)
304 304 end = revfix(repo, end, repo.changelog.count() - 1)
305 305 else:
306 306 start = revfix(repo, start, None)
307 307 elif len(revs) == 2:
308 308 if revrangesep in revs[0] or revrangesep in revs[1]:
309 309 raise util.Abort(_('too many revisions specified'))
310 310 start = revfix(repo, revs[0], None)
311 311 end = revfix(repo, revs[1], None)
312 312 else:
313 313 raise util.Abort(_('too many revisions specified'))
314 314 if end is not None: end = repo.lookup(str(end))
315 315 return repo.lookup(str(start)), end
316 316
317 317 def revrange(ui, repo, revs):
318 318 """Yield revision as strings from a list of revision specifications."""
319 319 seen = {}
320 320 for spec in revs:
321 321 if revrangesep in spec:
322 322 start, end = spec.split(revrangesep, 1)
323 323 start = revfix(repo, start, 0)
324 324 end = revfix(repo, end, repo.changelog.count() - 1)
325 325 step = start > end and -1 or 1
326 326 for rev in xrange(start, end+step, step):
327 327 if rev in seen:
328 328 continue
329 329 seen[rev] = 1
330 330 yield str(rev)
331 331 else:
332 332 rev = revfix(repo, spec, None)
333 333 if rev in seen:
334 334 continue
335 335 seen[rev] = 1
336 336 yield str(rev)
337 337
338 338 def write_bundle(cg, filename=None, compress=True):
339 339 """Write a bundle file and return its filename.
340 340
341 341 Existing files will not be overwritten.
342 342 If no filename is specified, a temporary file is created.
343 343 bz2 compression can be turned off.
344 344 The bundle file will be deleted in case of errors.
345 345 """
346 346 class nocompress(object):
347 347 def compress(self, x):
348 348 return x
349 349 def flush(self):
350 350 return ""
351 351
352 352 fh = None
353 353 cleanup = None
354 354 try:
355 355 if filename:
356 356 if os.path.exists(filename):
357 357 raise util.Abort(_("file '%s' already exists"), filename)
358 358 fh = open(filename, "wb")
359 359 else:
360 360 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
361 361 fh = os.fdopen(fd, "wb")
362 362 cleanup = filename
363 363
364 364 if compress:
365 365 fh.write("HG10")
366 366 z = bz2.BZ2Compressor(9)
367 367 else:
368 368 fh.write("HG10UN")
369 369 z = nocompress()
370 370 # parse the changegroup data, otherwise we will block
371 371 # in case of sshrepo because we don't know the end of the stream
372 372
373 373 # an empty chunkiter is the end of the changegroup
374 374 empty = False
375 375 while not empty:
376 376 empty = True
377 377 for chunk in changegroup.chunkiter(cg):
378 378 empty = False
379 379 fh.write(z.compress(changegroup.genchunk(chunk)))
380 380 fh.write(z.compress(changegroup.closechunk()))
381 381 fh.write(z.flush())
382 382 cleanup = None
383 383 return filename
384 384 finally:
385 385 if fh is not None:
386 386 fh.close()
387 387 if cleanup is not None:
388 388 os.unlink(cleanup)
389 389
390 390 def trimuser(ui, name, rev, revcache):
391 391 """trim the name of the user who committed a change"""
392 392 user = revcache.get(rev)
393 393 if user is None:
394 394 user = revcache[rev] = ui.shortuser(name)
395 395 return user
396 396
397 397 class changeset_printer(object):
398 398 '''show changeset information when templating not requested.'''
399 399
400 400 def __init__(self, ui, repo):
401 401 self.ui = ui
402 402 self.repo = repo
403 403
404 404 def show(self, rev=0, changenode=None, brinfo=None):
405 405 '''show a single changeset or file revision'''
406 406 log = self.repo.changelog
407 407 if changenode is None:
408 408 changenode = log.node(rev)
409 409 elif not rev:
410 410 rev = log.rev(changenode)
411 411
412 412 if self.ui.quiet:
413 413 self.ui.write("%d:%s\n" % (rev, short(changenode)))
414 414 return
415 415
416 416 changes = log.read(changenode)
417 417 date = util.datestr(changes[2])
418 418
419 419 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
420 420 for p in log.parents(changenode)
421 421 if self.ui.debugflag or p != nullid]
422 422 if (not self.ui.debugflag and len(parents) == 1 and
423 423 parents[0][0] == rev-1):
424 424 parents = []
425 425
426 426 if self.ui.verbose:
427 427 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
428 428 else:
429 429 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
430 430
431 431 for tag in self.repo.nodetags(changenode):
432 432 self.ui.status(_("tag: %s\n") % tag)
433 433 for parent in parents:
434 434 self.ui.write(_("parent: %d:%s\n") % parent)
435 435
436 436 if brinfo and changenode in brinfo:
437 437 br = brinfo[changenode]
438 438 self.ui.write(_("branch: %s\n") % " ".join(br))
439 439
440 440 self.ui.debug(_("manifest: %d:%s\n") %
441 441 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
442 442 self.ui.status(_("user: %s\n") % changes[1])
443 443 self.ui.status(_("date: %s\n") % date)
444 444
445 445 if self.ui.debugflag:
446 files = self.repo.changes(log.parents(changenode)[0], changenode)
446 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
447 447 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
448 448 files):
449 449 if value:
450 450 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
451 451 else:
452 452 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
453 453
454 454 description = changes[4].strip()
455 455 if description:
456 456 if self.ui.verbose:
457 457 self.ui.status(_("description:\n"))
458 458 self.ui.status(description)
459 459 self.ui.status("\n\n")
460 460 else:
461 461 self.ui.status(_("summary: %s\n") %
462 462 description.splitlines()[0])
463 463 self.ui.status("\n")
464 464
465 465 def show_changeset(ui, repo, opts):
466 466 '''show one changeset. uses template or regular display. caller
467 467 can pass in 'style' and 'template' options in opts.'''
468 468
469 469 tmpl = opts.get('template')
470 470 if tmpl:
471 471 tmpl = templater.parsestring(tmpl, quoted=False)
472 472 else:
473 473 tmpl = ui.config('ui', 'logtemplate')
474 474 if tmpl: tmpl = templater.parsestring(tmpl)
475 475 mapfile = opts.get('style') or ui.config('ui', 'style')
476 476 if tmpl or mapfile:
477 477 if mapfile:
478 478 if not os.path.isfile(mapfile):
479 479 mapname = templater.templatepath('map-cmdline.' + mapfile)
480 480 if not mapname: mapname = templater.templatepath(mapfile)
481 481 if mapname: mapfile = mapname
482 482 try:
483 483 t = templater.changeset_templater(ui, repo, mapfile)
484 484 except SyntaxError, inst:
485 485 raise util.Abort(inst.args[0])
486 486 if tmpl: t.use_template(tmpl)
487 487 return t
488 488 return changeset_printer(ui, repo)
489 489
490 490 def setremoteconfig(ui, opts):
491 491 "copy remote options to ui tree"
492 492 if opts.get('ssh'):
493 493 ui.setconfig("ui", "ssh", opts['ssh'])
494 494 if opts.get('remotecmd'):
495 495 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
496 496
497 497 def show_version(ui):
498 498 """output version and copyright information"""
499 499 ui.write(_("Mercurial Distributed SCM (version %s)\n")
500 500 % version.get_version())
501 501 ui.status(_(
502 502 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
503 503 "This is free software; see the source for copying conditions. "
504 504 "There is NO\nwarranty; "
505 505 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
506 506 ))
507 507
508 508 def help_(ui, name=None, with_version=False):
509 509 """show help for a command, extension, or list of commands
510 510
511 511 With no arguments, print a list of commands and short help.
512 512
513 513 Given a command name, print help for that command.
514 514
515 515 Given an extension name, print help for that extension, and the
516 516 commands it provides."""
517 517 option_lists = []
518 518
519 519 def helpcmd(name):
520 520 if with_version:
521 521 show_version(ui)
522 522 ui.write('\n')
523 523 aliases, i = findcmd(name)
524 524 # synopsis
525 525 ui.write("%s\n\n" % i[2])
526 526
527 527 # description
528 528 doc = i[0].__doc__
529 529 if not doc:
530 530 doc = _("(No help text available)")
531 531 if ui.quiet:
532 532 doc = doc.splitlines(0)[0]
533 533 ui.write("%s\n" % doc.rstrip())
534 534
535 535 if not ui.quiet:
536 536 # aliases
537 537 if len(aliases) > 1:
538 538 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
539 539
540 540 # options
541 541 if i[1]:
542 542 option_lists.append(("options", i[1]))
543 543
544 544 def helplist(select=None):
545 545 h = {}
546 546 cmds = {}
547 547 for c, e in table.items():
548 548 f = c.split("|", 1)[0]
549 549 if select and not select(f):
550 550 continue
551 551 if name == "shortlist" and not f.startswith("^"):
552 552 continue
553 553 f = f.lstrip("^")
554 554 if not ui.debugflag and f.startswith("debug"):
555 555 continue
556 556 doc = e[0].__doc__
557 557 if not doc:
558 558 doc = _("(No help text available)")
559 559 h[f] = doc.splitlines(0)[0].rstrip()
560 560 cmds[f] = c.lstrip("^")
561 561
562 562 fns = h.keys()
563 563 fns.sort()
564 564 m = max(map(len, fns))
565 565 for f in fns:
566 566 if ui.verbose:
567 567 commands = cmds[f].replace("|",", ")
568 568 ui.write(" %s:\n %s\n"%(commands, h[f]))
569 569 else:
570 570 ui.write(' %-*s %s\n' % (m, f, h[f]))
571 571
572 572 def helpext(name):
573 573 try:
574 574 mod = findext(name)
575 575 except KeyError:
576 576 raise UnknownCommand(name)
577 577
578 578 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
579 579 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
580 580 for d in doc[1:]:
581 581 ui.write(d, '\n')
582 582
583 583 ui.status('\n')
584 584 if ui.verbose:
585 585 ui.status(_('list of commands:\n\n'))
586 586 else:
587 587 ui.status(_('list of commands (use "hg help -v %s" '
588 588 'to show aliases and global options):\n\n') % name)
589 589
590 590 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
591 591 helplist(modcmds.has_key)
592 592
593 593 if name and name != 'shortlist':
594 594 try:
595 595 helpcmd(name)
596 596 except UnknownCommand:
597 597 helpext(name)
598 598
599 599 else:
600 600 # program name
601 601 if ui.verbose or with_version:
602 602 show_version(ui)
603 603 else:
604 604 ui.status(_("Mercurial Distributed SCM\n"))
605 605 ui.status('\n')
606 606
607 607 # list of commands
608 608 if name == "shortlist":
609 609 ui.status(_('basic commands (use "hg help" '
610 610 'for the full list or option "-v" for details):\n\n'))
611 611 elif ui.verbose:
612 612 ui.status(_('list of commands:\n\n'))
613 613 else:
614 614 ui.status(_('list of commands (use "hg help -v" '
615 615 'to show aliases and global options):\n\n'))
616 616
617 617 helplist()
618 618
619 619 # global options
620 620 if ui.verbose:
621 621 option_lists.append(("global options", globalopts))
622 622
623 623 # list all option lists
624 624 opt_output = []
625 625 for title, options in option_lists:
626 626 opt_output.append(("\n%s:\n" % title, None))
627 627 for shortopt, longopt, default, desc in options:
628 628 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
629 629 longopt and " --%s" % longopt),
630 630 "%s%s" % (desc,
631 631 default
632 632 and _(" (default: %s)") % default
633 633 or "")))
634 634
635 635 if opt_output:
636 636 opts_len = max([len(line[0]) for line in opt_output if line[1]])
637 637 for first, second in opt_output:
638 638 if second:
639 639 ui.write(" %-*s %s\n" % (opts_len, first, second))
640 640 else:
641 641 ui.write("%s\n" % first)
642 642
643 643 # Commands start here, listed alphabetically
644 644
645 645 def add(ui, repo, *pats, **opts):
646 646 """add the specified files on the next commit
647 647
648 648 Schedule files to be version controlled and added to the repository.
649 649
650 650 The files will be added to the repository at the next commit.
651 651
652 652 If no names are given, add all files in the repository.
653 653 """
654 654
655 655 names = []
656 656 for src, abs, rel, exact in walk(repo, pats, opts):
657 657 if exact:
658 658 if ui.verbose:
659 659 ui.status(_('adding %s\n') % rel)
660 660 names.append(abs)
661 661 elif repo.dirstate.state(abs) == '?':
662 662 ui.status(_('adding %s\n') % rel)
663 663 names.append(abs)
664 664 if not opts.get('dry_run'):
665 665 repo.add(names)
666 666
667 667 def addremove(ui, repo, *pats, **opts):
668 668 """add all new files, delete all missing files (DEPRECATED)
669 669
670 670 (DEPRECATED)
671 671 Add all new files and remove all missing files from the repository.
672 672
673 673 New files are ignored if they match any of the patterns in .hgignore. As
674 674 with add, these changes take effect at the next commit.
675 675
676 676 This command is now deprecated and will be removed in a future
677 677 release. Please use add and remove --after instead.
678 678 """
679 679 ui.warn(_('(the addremove command is deprecated; use add and remove '
680 680 '--after instead)\n'))
681 681 return addremove_lock(ui, repo, pats, opts)
682 682
683 683 def addremove_lock(ui, repo, pats, opts, wlock=None):
684 684 add, remove = [], []
685 685 for src, abs, rel, exact in walk(repo, pats, opts):
686 686 if src == 'f' and repo.dirstate.state(abs) == '?':
687 687 add.append(abs)
688 688 if ui.verbose or not exact:
689 689 ui.status(_('adding %s\n') % ((pats and rel) or abs))
690 690 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
691 691 remove.append(abs)
692 692 if ui.verbose or not exact:
693 693 ui.status(_('removing %s\n') % ((pats and rel) or abs))
694 694 if not opts.get('dry_run'):
695 695 repo.add(add, wlock=wlock)
696 696 repo.remove(remove, wlock=wlock)
697 697
698 698 def annotate(ui, repo, *pats, **opts):
699 699 """show changeset information per file line
700 700
701 701 List changes in files, showing the revision id responsible for each line
702 702
703 703 This command is useful to discover who did a change or when a change took
704 704 place.
705 705
706 706 Without the -a option, annotate will avoid processing files it
707 707 detects as binary. With -a, annotate will generate an annotation
708 708 anyway, probably with undesirable results.
709 709 """
710 710 def getnode(rev):
711 711 return short(repo.changelog.node(rev))
712 712
713 713 ucache = {}
714 714 def getname(rev):
715 715 try:
716 716 return ucache[rev]
717 717 except:
718 718 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
719 719 ucache[rev] = u
720 720 return u
721 721
722 722 dcache = {}
723 723 def getdate(rev):
724 724 datestr = dcache.get(rev)
725 725 if datestr is None:
726 726 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
727 727 return datestr
728 728
729 729 if not pats:
730 730 raise util.Abort(_('at least one file name or pattern required'))
731 731
732 732 opmap = [['user', getname], ['number', str], ['changeset', getnode],
733 733 ['date', getdate]]
734 734 if not opts['user'] and not opts['changeset'] and not opts['date']:
735 735 opts['number'] = 1
736 736
737 737 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
738 738
739 739 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
740 740 fctx = ctx.filectx(abs)
741 741 if not opts['text'] and util.binary(fctx.data()):
742 742 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
743 743 continue
744 744
745 745 lines = fctx.annotate()
746 746 pieces = []
747 747
748 748 for o, f in opmap:
749 749 if opts[o]:
750 750 l = [f(n) for n, dummy in lines]
751 751 if l:
752 752 m = max(map(len, l))
753 753 pieces.append(["%*s" % (m, x) for x in l])
754 754
755 755 if pieces:
756 756 for p, l in zip(zip(*pieces), lines):
757 757 ui.write("%s: %s" % (" ".join(p), l[1]))
758 758
759 759 def archive(ui, repo, dest, **opts):
760 760 '''create unversioned archive of a repository revision
761 761
762 762 By default, the revision used is the parent of the working
763 763 directory; use "-r" to specify a different revision.
764 764
765 765 To specify the type of archive to create, use "-t". Valid
766 766 types are:
767 767
768 768 "files" (default): a directory full of files
769 769 "tar": tar archive, uncompressed
770 770 "tbz2": tar archive, compressed using bzip2
771 771 "tgz": tar archive, compressed using gzip
772 772 "uzip": zip archive, uncompressed
773 773 "zip": zip archive, compressed using deflate
774 774
775 775 The exact name of the destination archive or directory is given
776 776 using a format string; see "hg help export" for details.
777 777
778 778 Each member added to an archive file has a directory prefix
779 779 prepended. Use "-p" to specify a format string for the prefix.
780 780 The default is the basename of the archive, with suffixes removed.
781 781 '''
782 782
783 783 if opts['rev']:
784 784 node = repo.lookup(opts['rev'])
785 785 else:
786 786 node, p2 = repo.dirstate.parents()
787 787 if p2 != nullid:
788 788 raise util.Abort(_('uncommitted merge - please provide a '
789 789 'specific revision'))
790 790
791 791 dest = cmdutil.make_filename(repo, dest, node)
792 792 if os.path.realpath(dest) == repo.root:
793 793 raise util.Abort(_('repository root cannot be destination'))
794 794 dummy, matchfn, dummy = matchpats(repo, [], opts)
795 795 kind = opts.get('type') or 'files'
796 796 prefix = opts['prefix']
797 797 if dest == '-':
798 798 if kind == 'files':
799 799 raise util.Abort(_('cannot archive plain files to stdout'))
800 800 dest = sys.stdout
801 801 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
802 802 prefix = cmdutil.make_filename(repo, prefix, node)
803 803 archival.archive(repo, dest, node, kind, not opts['no_decode'],
804 804 matchfn, prefix)
805 805
806 806 def backout(ui, repo, rev, **opts):
807 807 '''reverse effect of earlier changeset
808 808
809 809 Commit the backed out changes as a new changeset. The new
810 810 changeset is a child of the backed out changeset.
811 811
812 812 If you back out a changeset other than the tip, a new head is
813 813 created. This head is the parent of the working directory. If
814 814 you back out an old changeset, your working directory will appear
815 815 old after the backout. You should merge the backout changeset
816 816 with another head.
817 817
818 818 The --merge option remembers the parent of the working directory
819 819 before starting the backout, then merges the new head with that
820 820 changeset afterwards. This saves you from doing the merge by
821 821 hand. The result of this merge is not committed, as for a normal
822 822 merge.'''
823 823
824 824 bail_if_changed(repo)
825 825 op1, op2 = repo.dirstate.parents()
826 826 if op2 != nullid:
827 827 raise util.Abort(_('outstanding uncommitted merge'))
828 828 node = repo.lookup(rev)
829 829 p1, p2 = repo.changelog.parents(node)
830 830 if p1 == nullid:
831 831 raise util.Abort(_('cannot back out a change with no parents'))
832 832 if p2 != nullid:
833 833 if not opts['parent']:
834 834 raise util.Abort(_('cannot back out a merge changeset without '
835 835 '--parent'))
836 836 p = repo.lookup(opts['parent'])
837 837 if p not in (p1, p2):
838 838 raise util.Abort(_('%s is not a parent of %s' %
839 839 (short(p), short(node))))
840 840 parent = p
841 841 else:
842 842 if opts['parent']:
843 843 raise util.Abort(_('cannot use --parent on non-merge changeset'))
844 844 parent = p1
845 845 hg.clean(repo, node, show_stats=False)
846 846 revert_opts = opts.copy()
847 847 revert_opts['rev'] = hex(parent)
848 848 revert(ui, repo, **revert_opts)
849 849 commit_opts = opts.copy()
850 850 commit_opts['addremove'] = False
851 851 if not commit_opts['message'] and not commit_opts['logfile']:
852 852 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
853 853 commit_opts['force_editor'] = True
854 854 commit(ui, repo, **commit_opts)
855 855 def nice(node):
856 856 return '%d:%s' % (repo.changelog.rev(node), short(node))
857 857 ui.status(_('changeset %s backs out changeset %s\n') %
858 858 (nice(repo.changelog.tip()), nice(node)))
859 859 if op1 != node:
860 860 if opts['merge']:
861 861 ui.status(_('merging with changeset %s\n') % nice(op1))
862 862 n = _lookup(repo, hex(op1))
863 863 hg.merge(repo, n)
864 864 else:
865 865 ui.status(_('the backout changeset is a new head - '
866 866 'do not forget to merge\n'))
867 867 ui.status(_('(use "backout --merge" '
868 868 'if you want to auto-merge)\n'))
869 869
870 870 def bundle(ui, repo, fname, dest=None, **opts):
871 871 """create a changegroup file
872 872
873 873 Generate a compressed changegroup file collecting all changesets
874 874 not found in the other repository.
875 875
876 876 This file can then be transferred using conventional means and
877 877 applied to another repository with the unbundle command. This is
878 878 useful when native push and pull are not available or when
879 879 exporting an entire repository is undesirable. The standard file
880 880 extension is ".hg".
881 881
882 882 Unlike import/export, this exactly preserves all changeset
883 883 contents including permissions, rename data, and revision history.
884 884 """
885 885 dest = ui.expandpath(dest or 'default-push', dest or 'default')
886 886 other = hg.repository(ui, dest)
887 887 o = repo.findoutgoing(other, force=opts['force'])
888 888 cg = repo.changegroup(o, 'bundle')
889 889 write_bundle(cg, fname)
890 890
891 891 def cat(ui, repo, file1, *pats, **opts):
892 892 """output the latest or given revisions of files
893 893
894 894 Print the specified files as they were at the given revision.
895 895 If no revision is given then the tip is used.
896 896
897 897 Output may be to a file, in which case the name of the file is
898 898 given using a format string. The formatting rules are the same as
899 899 for the export command, with the following additions:
900 900
901 901 %s basename of file being printed
902 902 %d dirname of file being printed, or '.' if in repo root
903 903 %p root-relative path name of file being printed
904 904 """
905 905 ctx = repo.changectx(opts['rev'] or "-1")
906 906 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
907 907 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
908 908 fp.write(ctx.filectx(abs).data())
909 909
910 910 def clone(ui, source, dest=None, **opts):
911 911 """make a copy of an existing repository
912 912
913 913 Create a copy of an existing repository in a new directory.
914 914
915 915 If no destination directory name is specified, it defaults to the
916 916 basename of the source.
917 917
918 918 The location of the source is added to the new repository's
919 919 .hg/hgrc file, as the default to be used for future pulls.
920 920
921 921 For efficiency, hardlinks are used for cloning whenever the source
922 922 and destination are on the same filesystem (note this applies only
923 923 to the repository data, not to the checked out files). Some
924 924 filesystems, such as AFS, implement hardlinking incorrectly, but
925 925 do not report errors. In these cases, use the --pull option to
926 926 avoid hardlinking.
927 927
928 928 You can safely clone repositories and checked out files using full
929 929 hardlinks with
930 930
931 931 $ cp -al REPO REPOCLONE
932 932
933 933 which is the fastest way to clone. However, the operation is not
934 934 atomic (making sure REPO is not modified during the operation is
935 935 up to you) and you have to make sure your editor breaks hardlinks
936 936 (Emacs and most Linux Kernel tools do so).
937 937
938 938 If you use the -r option to clone up to a specific revision, no
939 939 subsequent revisions will be present in the cloned repository.
940 940 This option implies --pull, even on local repositories.
941 941
942 942 See pull for valid source format details.
943 943
944 944 It is possible to specify an ssh:// URL as the destination, but no
945 945 .hg/hgrc will be created on the remote side. Look at the help text
946 946 for the pull command for important details about ssh:// URLs.
947 947 """
948 948 setremoteconfig(ui, opts)
949 949 hg.clone(ui, ui.expandpath(source), dest,
950 950 pull=opts['pull'],
951 951 stream=opts['uncompressed'],
952 952 rev=opts['rev'],
953 953 update=not opts['noupdate'])
954 954
955 955 def commit(ui, repo, *pats, **opts):
956 956 """commit the specified files or all outstanding changes
957 957
958 958 Commit changes to the given files into the repository.
959 959
960 960 If a list of files is omitted, all changes reported by "hg status"
961 961 will be committed.
962 962
963 963 If no commit message is specified, the editor configured in your hgrc
964 964 or in the EDITOR environment variable is started to enter a message.
965 965 """
966 966 message = logmessage(opts)
967 967
968 968 if opts['addremove']:
969 969 addremove_lock(ui, repo, pats, opts)
970 970 fns, match, anypats = matchpats(repo, pats, opts)
971 971 if pats:
972 modified, added, removed, deleted, unknown = (
973 repo.changes(files=fns, match=match))
972 modified, added, removed = repo.status(files=fns, match=match)[:3]
974 973 files = modified + added + removed
975 974 else:
976 975 files = []
977 976 try:
978 977 repo.commit(files, message, opts['user'], opts['date'], match,
979 978 force_editor=opts.get('force_editor'))
980 979 except ValueError, inst:
981 980 raise util.Abort(str(inst))
982 981
983 982 def docopy(ui, repo, pats, opts, wlock):
984 983 # called with the repo lock held
985 984 cwd = repo.getcwd()
986 985 errors = 0
987 986 copied = []
988 987 targets = {}
989 988
990 989 def okaytocopy(abs, rel, exact):
991 990 reasons = {'?': _('is not managed'),
992 991 'a': _('has been marked for add'),
993 992 'r': _('has been marked for remove')}
994 993 state = repo.dirstate.state(abs)
995 994 reason = reasons.get(state)
996 995 if reason:
997 996 if state == 'a':
998 997 origsrc = repo.dirstate.copied(abs)
999 998 if origsrc is not None:
1000 999 return origsrc
1001 1000 if exact:
1002 1001 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1003 1002 else:
1004 1003 return abs
1005 1004
1006 1005 def copy(origsrc, abssrc, relsrc, target, exact):
1007 1006 abstarget = util.canonpath(repo.root, cwd, target)
1008 1007 reltarget = util.pathto(cwd, abstarget)
1009 1008 prevsrc = targets.get(abstarget)
1010 1009 if prevsrc is not None:
1011 1010 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1012 1011 (reltarget, abssrc, prevsrc))
1013 1012 return
1014 1013 if (not opts['after'] and os.path.exists(reltarget) or
1015 1014 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1016 1015 if not opts['force']:
1017 1016 ui.warn(_('%s: not overwriting - file exists\n') %
1018 1017 reltarget)
1019 1018 return
1020 1019 if not opts['after'] and not opts.get('dry_run'):
1021 1020 os.unlink(reltarget)
1022 1021 if opts['after']:
1023 1022 if not os.path.exists(reltarget):
1024 1023 return
1025 1024 else:
1026 1025 targetdir = os.path.dirname(reltarget) or '.'
1027 1026 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1028 1027 os.makedirs(targetdir)
1029 1028 try:
1030 1029 restore = repo.dirstate.state(abstarget) == 'r'
1031 1030 if restore and not opts.get('dry_run'):
1032 1031 repo.undelete([abstarget], wlock)
1033 1032 try:
1034 1033 if not opts.get('dry_run'):
1035 1034 shutil.copyfile(relsrc, reltarget)
1036 1035 shutil.copymode(relsrc, reltarget)
1037 1036 restore = False
1038 1037 finally:
1039 1038 if restore:
1040 1039 repo.remove([abstarget], wlock)
1041 1040 except shutil.Error, inst:
1042 1041 raise util.Abort(str(inst))
1043 1042 except IOError, inst:
1044 1043 if inst.errno == errno.ENOENT:
1045 1044 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1046 1045 else:
1047 1046 ui.warn(_('%s: cannot copy - %s\n') %
1048 1047 (relsrc, inst.strerror))
1049 1048 errors += 1
1050 1049 return
1051 1050 if ui.verbose or not exact:
1052 1051 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1053 1052 targets[abstarget] = abssrc
1054 1053 if abstarget != origsrc and not opts.get('dry_run'):
1055 1054 repo.copy(origsrc, abstarget, wlock)
1056 1055 copied.append((abssrc, relsrc, exact))
1057 1056
1058 1057 def targetpathfn(pat, dest, srcs):
1059 1058 if os.path.isdir(pat):
1060 1059 abspfx = util.canonpath(repo.root, cwd, pat)
1061 1060 if destdirexists:
1062 1061 striplen = len(os.path.split(abspfx)[0])
1063 1062 else:
1064 1063 striplen = len(abspfx)
1065 1064 if striplen:
1066 1065 striplen += len(os.sep)
1067 1066 res = lambda p: os.path.join(dest, p[striplen:])
1068 1067 elif destdirexists:
1069 1068 res = lambda p: os.path.join(dest, os.path.basename(p))
1070 1069 else:
1071 1070 res = lambda p: dest
1072 1071 return res
1073 1072
1074 1073 def targetpathafterfn(pat, dest, srcs):
1075 1074 if util.patkind(pat, None)[0]:
1076 1075 # a mercurial pattern
1077 1076 res = lambda p: os.path.join(dest, os.path.basename(p))
1078 1077 else:
1079 1078 abspfx = util.canonpath(repo.root, cwd, pat)
1080 1079 if len(abspfx) < len(srcs[0][0]):
1081 1080 # A directory. Either the target path contains the last
1082 1081 # component of the source path or it does not.
1083 1082 def evalpath(striplen):
1084 1083 score = 0
1085 1084 for s in srcs:
1086 1085 t = os.path.join(dest, s[0][striplen:])
1087 1086 if os.path.exists(t):
1088 1087 score += 1
1089 1088 return score
1090 1089
1091 1090 striplen = len(abspfx)
1092 1091 if striplen:
1093 1092 striplen += len(os.sep)
1094 1093 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1095 1094 score = evalpath(striplen)
1096 1095 striplen1 = len(os.path.split(abspfx)[0])
1097 1096 if striplen1:
1098 1097 striplen1 += len(os.sep)
1099 1098 if evalpath(striplen1) > score:
1100 1099 striplen = striplen1
1101 1100 res = lambda p: os.path.join(dest, p[striplen:])
1102 1101 else:
1103 1102 # a file
1104 1103 if destdirexists:
1105 1104 res = lambda p: os.path.join(dest, os.path.basename(p))
1106 1105 else:
1107 1106 res = lambda p: dest
1108 1107 return res
1109 1108
1110 1109
1111 1110 pats = list(pats)
1112 1111 if not pats:
1113 1112 raise util.Abort(_('no source or destination specified'))
1114 1113 if len(pats) == 1:
1115 1114 raise util.Abort(_('no destination specified'))
1116 1115 dest = pats.pop()
1117 1116 destdirexists = os.path.isdir(dest)
1118 1117 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1119 1118 raise util.Abort(_('with multiple sources, destination must be an '
1120 1119 'existing directory'))
1121 1120 if opts['after']:
1122 1121 tfn = targetpathafterfn
1123 1122 else:
1124 1123 tfn = targetpathfn
1125 1124 copylist = []
1126 1125 for pat in pats:
1127 1126 srcs = []
1128 1127 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1129 1128 origsrc = okaytocopy(abssrc, relsrc, exact)
1130 1129 if origsrc:
1131 1130 srcs.append((origsrc, abssrc, relsrc, exact))
1132 1131 if not srcs:
1133 1132 continue
1134 1133 copylist.append((tfn(pat, dest, srcs), srcs))
1135 1134 if not copylist:
1136 1135 raise util.Abort(_('no files to copy'))
1137 1136
1138 1137 for targetpath, srcs in copylist:
1139 1138 for origsrc, abssrc, relsrc, exact in srcs:
1140 1139 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1141 1140
1142 1141 if errors:
1143 1142 ui.warn(_('(consider using --after)\n'))
1144 1143 return errors, copied
1145 1144
1146 1145 def copy(ui, repo, *pats, **opts):
1147 1146 """mark files as copied for the next commit
1148 1147
1149 1148 Mark dest as having copies of source files. If dest is a
1150 1149 directory, copies are put in that directory. If dest is a file,
1151 1150 there can only be one source.
1152 1151
1153 1152 By default, this command copies the contents of files as they
1154 1153 stand in the working directory. If invoked with --after, the
1155 1154 operation is recorded, but no copying is performed.
1156 1155
1157 1156 This command takes effect in the next commit.
1158 1157
1159 1158 NOTE: This command should be treated as experimental. While it
1160 1159 should properly record copied files, this information is not yet
1161 1160 fully used by merge, nor fully reported by log.
1162 1161 """
1163 1162 wlock = repo.wlock(0)
1164 1163 errs, copied = docopy(ui, repo, pats, opts, wlock)
1165 1164 return errs
1166 1165
1167 1166 def debugancestor(ui, index, rev1, rev2):
1168 1167 """find the ancestor revision of two revisions in a given index"""
1169 1168 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1170 1169 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1171 1170 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1172 1171
1173 1172 def debugcomplete(ui, cmd='', **opts):
1174 1173 """returns the completion list associated with the given command"""
1175 1174
1176 1175 if opts['options']:
1177 1176 options = []
1178 1177 otables = [globalopts]
1179 1178 if cmd:
1180 1179 aliases, entry = findcmd(cmd)
1181 1180 otables.append(entry[1])
1182 1181 for t in otables:
1183 1182 for o in t:
1184 1183 if o[0]:
1185 1184 options.append('-%s' % o[0])
1186 1185 options.append('--%s' % o[1])
1187 1186 ui.write("%s\n" % "\n".join(options))
1188 1187 return
1189 1188
1190 1189 clist = findpossible(cmd).keys()
1191 1190 clist.sort()
1192 1191 ui.write("%s\n" % "\n".join(clist))
1193 1192
1194 1193 def debugrebuildstate(ui, repo, rev=None):
1195 1194 """rebuild the dirstate as it would look like for the given revision"""
1196 1195 if not rev:
1197 1196 rev = repo.changelog.tip()
1198 1197 else:
1199 1198 rev = repo.lookup(rev)
1200 1199 change = repo.changelog.read(rev)
1201 1200 n = change[0]
1202 1201 files = repo.manifest.read(n)
1203 1202 wlock = repo.wlock()
1204 1203 repo.dirstate.rebuild(rev, files)
1205 1204
1206 1205 def debugcheckstate(ui, repo):
1207 1206 """validate the correctness of the current dirstate"""
1208 1207 parent1, parent2 = repo.dirstate.parents()
1209 1208 repo.dirstate.read()
1210 1209 dc = repo.dirstate.map
1211 1210 keys = dc.keys()
1212 1211 keys.sort()
1213 1212 m1n = repo.changelog.read(parent1)[0]
1214 1213 m2n = repo.changelog.read(parent2)[0]
1215 1214 m1 = repo.manifest.read(m1n)
1216 1215 m2 = repo.manifest.read(m2n)
1217 1216 errors = 0
1218 1217 for f in dc:
1219 1218 state = repo.dirstate.state(f)
1220 1219 if state in "nr" and f not in m1:
1221 1220 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1222 1221 errors += 1
1223 1222 if state in "a" and f in m1:
1224 1223 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1225 1224 errors += 1
1226 1225 if state in "m" and f not in m1 and f not in m2:
1227 1226 ui.warn(_("%s in state %s, but not in either manifest\n") %
1228 1227 (f, state))
1229 1228 errors += 1
1230 1229 for f in m1:
1231 1230 state = repo.dirstate.state(f)
1232 1231 if state not in "nrm":
1233 1232 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1234 1233 errors += 1
1235 1234 if errors:
1236 1235 error = _(".hg/dirstate inconsistent with current parent's manifest")
1237 1236 raise util.Abort(error)
1238 1237
1239 1238 def debugconfig(ui, repo, *values):
1240 1239 """show combined config settings from all hgrc files
1241 1240
1242 1241 With no args, print names and values of all config items.
1243 1242
1244 1243 With one arg of the form section.name, print just the value of
1245 1244 that config item.
1246 1245
1247 1246 With multiple args, print names and values of all config items
1248 1247 with matching section names."""
1249 1248
1250 1249 if values:
1251 1250 if len([v for v in values if '.' in v]) > 1:
1252 1251 raise util.Abort(_('only one config item permitted'))
1253 1252 for section, name, value in ui.walkconfig():
1254 1253 sectname = section + '.' + name
1255 1254 if values:
1256 1255 for v in values:
1257 1256 if v == section:
1258 1257 ui.write('%s=%s\n' % (sectname, value))
1259 1258 elif v == sectname:
1260 1259 ui.write(value, '\n')
1261 1260 else:
1262 1261 ui.write('%s=%s\n' % (sectname, value))
1263 1262
1264 1263 def debugsetparents(ui, repo, rev1, rev2=None):
1265 1264 """manually set the parents of the current working directory
1266 1265
1267 1266 This is useful for writing repository conversion tools, but should
1268 1267 be used with care.
1269 1268 """
1270 1269
1271 1270 if not rev2:
1272 1271 rev2 = hex(nullid)
1273 1272
1274 1273 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1275 1274
1276 1275 def debugstate(ui, repo):
1277 1276 """show the contents of the current dirstate"""
1278 1277 repo.dirstate.read()
1279 1278 dc = repo.dirstate.map
1280 1279 keys = dc.keys()
1281 1280 keys.sort()
1282 1281 for file_ in keys:
1283 1282 ui.write("%c %3o %10d %s %s\n"
1284 1283 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1285 1284 time.strftime("%x %X",
1286 1285 time.localtime(dc[file_][3])), file_))
1287 1286 for f in repo.dirstate.copies:
1288 1287 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1289 1288
1290 1289 def debugdata(ui, file_, rev):
1291 1290 """dump the contents of an data file revision"""
1292 1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1293 1292 file_[:-2] + ".i", file_, 0)
1294 1293 try:
1295 1294 ui.write(r.revision(r.lookup(rev)))
1296 1295 except KeyError:
1297 1296 raise util.Abort(_('invalid revision identifier %s'), rev)
1298 1297
1299 1298 def debugindex(ui, file_):
1300 1299 """dump the contents of an index file"""
1301 1300 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1302 1301 ui.write(" rev offset length base linkrev" +
1303 1302 " nodeid p1 p2\n")
1304 1303 for i in range(r.count()):
1305 1304 node = r.node(i)
1306 1305 pp = r.parents(node)
1307 1306 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1308 1307 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1309 1308 short(node), short(pp[0]), short(pp[1])))
1310 1309
1311 1310 def debugindexdot(ui, file_):
1312 1311 """dump an index DAG as a .dot file"""
1313 1312 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1314 1313 ui.write("digraph G {\n")
1315 1314 for i in range(r.count()):
1316 1315 node = r.node(i)
1317 1316 pp = r.parents(node)
1318 1317 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1319 1318 if pp[1] != nullid:
1320 1319 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1321 1320 ui.write("}\n")
1322 1321
1323 1322 def debugrename(ui, repo, file, rev=None):
1324 1323 """dump rename information"""
1325 1324 r = repo.file(relpath(repo, [file])[0])
1326 1325 if rev:
1327 1326 try:
1328 1327 # assume all revision numbers are for changesets
1329 1328 n = repo.lookup(rev)
1330 1329 change = repo.changelog.read(n)
1331 1330 m = repo.manifest.read(change[0])
1332 1331 n = m[relpath(repo, [file])[0]]
1333 1332 except (hg.RepoError, KeyError):
1334 1333 n = r.lookup(rev)
1335 1334 else:
1336 1335 n = r.tip()
1337 1336 m = r.renamed(n)
1338 1337 if m:
1339 1338 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1340 1339 else:
1341 1340 ui.write(_("not renamed\n"))
1342 1341
1343 1342 def debugwalk(ui, repo, *pats, **opts):
1344 1343 """show how files match on given patterns"""
1345 1344 items = list(walk(repo, pats, opts))
1346 1345 if not items:
1347 1346 return
1348 1347 fmt = '%%s %%-%ds %%-%ds %%s' % (
1349 1348 max([len(abs) for (src, abs, rel, exact) in items]),
1350 1349 max([len(rel) for (src, abs, rel, exact) in items]))
1351 1350 for src, abs, rel, exact in items:
1352 1351 line = fmt % (src, abs, rel, exact and 'exact' or '')
1353 1352 ui.write("%s\n" % line.rstrip())
1354 1353
1355 1354 def diff(ui, repo, *pats, **opts):
1356 1355 """diff repository (or selected files)
1357 1356
1358 1357 Show differences between revisions for the specified files.
1359 1358
1360 1359 Differences between files are shown using the unified diff format.
1361 1360
1362 1361 When two revision arguments are given, then changes are shown
1363 1362 between those revisions. If only one revision is specified then
1364 1363 that revision is compared to the working directory, and, when no
1365 1364 revisions are specified, the working directory files are compared
1366 1365 to its parent.
1367 1366
1368 1367 Without the -a option, diff will avoid generating diffs of files
1369 1368 it detects as binary. With -a, diff will generate a diff anyway,
1370 1369 probably with undesirable results.
1371 1370 """
1372 1371 node1, node2 = revpair(ui, repo, opts['rev'])
1373 1372
1374 1373 fns, matchfn, anypats = matchpats(repo, pats, opts)
1375 1374
1376 1375 patch.diff(repo, node1, node2, fns, match=matchfn,
1377 1376 opts=ui.diffopts(opts))
1378 1377
1379 1378 def export(ui, repo, *changesets, **opts):
1380 1379 """dump the header and diffs for one or more changesets
1381 1380
1382 1381 Print the changeset header and diffs for one or more revisions.
1383 1382
1384 1383 The information shown in the changeset header is: author,
1385 1384 changeset hash, parent and commit comment.
1386 1385
1387 1386 Output may be to a file, in which case the name of the file is
1388 1387 given using a format string. The formatting rules are as follows:
1389 1388
1390 1389 %% literal "%" character
1391 1390 %H changeset hash (40 bytes of hexadecimal)
1392 1391 %N number of patches being generated
1393 1392 %R changeset revision number
1394 1393 %b basename of the exporting repository
1395 1394 %h short-form changeset hash (12 bytes of hexadecimal)
1396 1395 %n zero-padded sequence number, starting at 1
1397 1396 %r zero-padded changeset revision number
1398 1397
1399 1398 Without the -a option, export will avoid generating diffs of files
1400 1399 it detects as binary. With -a, export will generate a diff anyway,
1401 1400 probably with undesirable results.
1402 1401
1403 1402 With the --switch-parent option, the diff will be against the second
1404 1403 parent. It can be useful to review a merge.
1405 1404 """
1406 1405 if not changesets:
1407 1406 raise util.Abort(_("export requires at least one changeset"))
1408 1407 revs = list(revrange(ui, repo, changesets))
1409 1408 if len(revs) > 1:
1410 1409 ui.note(_('exporting patches:\n'))
1411 1410 else:
1412 1411 ui.note(_('exporting patch:\n'))
1413 1412 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1414 1413 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1415 1414
1416 1415 def forget(ui, repo, *pats, **opts):
1417 1416 """don't add the specified files on the next commit (DEPRECATED)
1418 1417
1419 1418 (DEPRECATED)
1420 1419 Undo an 'hg add' scheduled for the next commit.
1421 1420
1422 1421 This command is now deprecated and will be removed in a future
1423 1422 release. Please use revert instead.
1424 1423 """
1425 1424 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1426 1425 forget = []
1427 1426 for src, abs, rel, exact in walk(repo, pats, opts):
1428 1427 if repo.dirstate.state(abs) == 'a':
1429 1428 forget.append(abs)
1430 1429 if ui.verbose or not exact:
1431 1430 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1432 1431 repo.forget(forget)
1433 1432
1434 1433 def grep(ui, repo, pattern, *pats, **opts):
1435 1434 """search for a pattern in specified files and revisions
1436 1435
1437 1436 Search revisions of files for a regular expression.
1438 1437
1439 1438 This command behaves differently than Unix grep. It only accepts
1440 1439 Python/Perl regexps. It searches repository history, not the
1441 1440 working directory. It always prints the revision number in which
1442 1441 a match appears.
1443 1442
1444 1443 By default, grep only prints output for the first revision of a
1445 1444 file in which it finds a match. To get it to print every revision
1446 1445 that contains a change in match status ("-" for a match that
1447 1446 becomes a non-match, or "+" for a non-match that becomes a match),
1448 1447 use the --all flag.
1449 1448 """
1450 1449 reflags = 0
1451 1450 if opts['ignore_case']:
1452 1451 reflags |= re.I
1453 1452 regexp = re.compile(pattern, reflags)
1454 1453 sep, eol = ':', '\n'
1455 1454 if opts['print0']:
1456 1455 sep = eol = '\0'
1457 1456
1458 1457 fcache = {}
1459 1458 def getfile(fn):
1460 1459 if fn not in fcache:
1461 1460 fcache[fn] = repo.file(fn)
1462 1461 return fcache[fn]
1463 1462
1464 1463 def matchlines(body):
1465 1464 begin = 0
1466 1465 linenum = 0
1467 1466 while True:
1468 1467 match = regexp.search(body, begin)
1469 1468 if not match:
1470 1469 break
1471 1470 mstart, mend = match.span()
1472 1471 linenum += body.count('\n', begin, mstart) + 1
1473 1472 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1474 1473 lend = body.find('\n', mend)
1475 1474 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1476 1475 begin = lend + 1
1477 1476
1478 1477 class linestate(object):
1479 1478 def __init__(self, line, linenum, colstart, colend):
1480 1479 self.line = line
1481 1480 self.linenum = linenum
1482 1481 self.colstart = colstart
1483 1482 self.colend = colend
1484 1483
1485 1484 def __eq__(self, other):
1486 1485 return self.line == other.line
1487 1486
1488 1487 matches = {}
1489 1488 copies = {}
1490 1489 def grepbody(fn, rev, body):
1491 1490 matches[rev].setdefault(fn, [])
1492 1491 m = matches[rev][fn]
1493 1492 for lnum, cstart, cend, line in matchlines(body):
1494 1493 s = linestate(line, lnum, cstart, cend)
1495 1494 m.append(s)
1496 1495
1497 1496 def difflinestates(a, b):
1498 1497 sm = difflib.SequenceMatcher(None, a, b)
1499 1498 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1500 1499 if tag == 'insert':
1501 1500 for i in range(blo, bhi):
1502 1501 yield ('+', b[i])
1503 1502 elif tag == 'delete':
1504 1503 for i in range(alo, ahi):
1505 1504 yield ('-', a[i])
1506 1505 elif tag == 'replace':
1507 1506 for i in range(alo, ahi):
1508 1507 yield ('-', a[i])
1509 1508 for i in range(blo, bhi):
1510 1509 yield ('+', b[i])
1511 1510
1512 1511 prev = {}
1513 1512 ucache = {}
1514 1513 def display(fn, rev, states, prevstates):
1515 1514 counts = {'-': 0, '+': 0}
1516 1515 filerevmatches = {}
1517 1516 if incrementing or not opts['all']:
1518 1517 a, b = prevstates, states
1519 1518 else:
1520 1519 a, b = states, prevstates
1521 1520 for change, l in difflinestates(a, b):
1522 1521 if incrementing or not opts['all']:
1523 1522 r = rev
1524 1523 else:
1525 1524 r = prev[fn]
1526 1525 cols = [fn, str(r)]
1527 1526 if opts['line_number']:
1528 1527 cols.append(str(l.linenum))
1529 1528 if opts['all']:
1530 1529 cols.append(change)
1531 1530 if opts['user']:
1532 1531 cols.append(trimuser(ui, getchange(r)[1], rev,
1533 1532 ucache))
1534 1533 if opts['files_with_matches']:
1535 1534 c = (fn, rev)
1536 1535 if c in filerevmatches:
1537 1536 continue
1538 1537 filerevmatches[c] = 1
1539 1538 else:
1540 1539 cols.append(l.line)
1541 1540 ui.write(sep.join(cols), eol)
1542 1541 counts[change] += 1
1543 1542 return counts['+'], counts['-']
1544 1543
1545 1544 fstate = {}
1546 1545 skip = {}
1547 1546 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1548 1547 count = 0
1549 1548 incrementing = False
1550 1549 follow = opts.get('follow')
1551 1550 for st, rev, fns in changeiter:
1552 1551 if st == 'window':
1553 1552 incrementing = rev
1554 1553 matches.clear()
1555 1554 copies.clear()
1556 1555 elif st == 'add':
1557 1556 change = repo.changelog.read(repo.lookup(str(rev)))
1558 1557 mf = repo.manifest.read(change[0])
1559 1558 matches[rev] = {}
1560 1559 for fn in fns:
1561 1560 if fn in skip:
1562 1561 continue
1563 1562 fstate.setdefault(fn, {})
1564 1563 copies.setdefault(rev, {})
1565 1564 try:
1566 1565 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1567 1566 if follow:
1568 1567 copied = getfile(fn).renamed(mf[fn])
1569 1568 if copied:
1570 1569 copies[rev][fn] = copied[0]
1571 1570 except KeyError:
1572 1571 pass
1573 1572 elif st == 'iter':
1574 1573 states = matches[rev].items()
1575 1574 states.sort()
1576 1575 for fn, m in states:
1577 1576 copy = copies[rev].get(fn)
1578 1577 if fn in skip:
1579 1578 if copy:
1580 1579 skip[copy] = True
1581 1580 continue
1582 1581 if incrementing or not opts['all'] or fstate[fn]:
1583 1582 pos, neg = display(fn, rev, m, fstate[fn])
1584 1583 count += pos + neg
1585 1584 if pos and not opts['all']:
1586 1585 skip[fn] = True
1587 1586 if copy:
1588 1587 skip[copy] = True
1589 1588 fstate[fn] = m
1590 1589 if copy:
1591 1590 fstate[copy] = m
1592 1591 prev[fn] = rev
1593 1592
1594 1593 if not incrementing:
1595 1594 fstate = fstate.items()
1596 1595 fstate.sort()
1597 1596 for fn, state in fstate:
1598 1597 if fn in skip:
1599 1598 continue
1600 1599 if fn not in copies[prev[fn]]:
1601 1600 display(fn, rev, {}, state)
1602 1601 return (count == 0 and 1) or 0
1603 1602
1604 1603 def heads(ui, repo, **opts):
1605 1604 """show current repository heads
1606 1605
1607 1606 Show all repository head changesets.
1608 1607
1609 1608 Repository "heads" are changesets that don't have children
1610 1609 changesets. They are where development generally takes place and
1611 1610 are the usual targets for update and merge operations.
1612 1611 """
1613 1612 if opts['rev']:
1614 1613 heads = repo.heads(repo.lookup(opts['rev']))
1615 1614 else:
1616 1615 heads = repo.heads()
1617 1616 br = None
1618 1617 if opts['branches']:
1619 1618 br = repo.branchlookup(heads)
1620 1619 displayer = show_changeset(ui, repo, opts)
1621 1620 for n in heads:
1622 1621 displayer.show(changenode=n, brinfo=br)
1623 1622
1624 1623 def identify(ui, repo):
1625 1624 """print information about the working copy
1626 1625
1627 1626 Print a short summary of the current state of the repo.
1628 1627
1629 1628 This summary identifies the repository state using one or two parent
1630 1629 hash identifiers, followed by a "+" if there are uncommitted changes
1631 1630 in the working directory, followed by a list of tags for this revision.
1632 1631 """
1633 1632 parents = [p for p in repo.dirstate.parents() if p != nullid]
1634 1633 if not parents:
1635 1634 ui.write(_("unknown\n"))
1636 1635 return
1637 1636
1638 1637 hexfunc = ui.verbose and hex or short
1639 modified, added, removed, deleted, unknown = repo.changes()
1638 modified, added, removed, deleted = repo.status()[:4]
1640 1639 output = ["%s%s" %
1641 1640 ('+'.join([hexfunc(parent) for parent in parents]),
1642 1641 (modified or added or removed or deleted) and "+" or "")]
1643 1642
1644 1643 if not ui.quiet:
1645 1644 # multiple tags for a single parent separated by '/'
1646 1645 parenttags = ['/'.join(tags)
1647 1646 for tags in map(repo.nodetags, parents) if tags]
1648 1647 # tags for multiple parents separated by ' + '
1649 1648 if parenttags:
1650 1649 output.append(' + '.join(parenttags))
1651 1650
1652 1651 ui.write("%s\n" % ' '.join(output))
1653 1652
1654 1653 def import_(ui, repo, patch1, *patches, **opts):
1655 1654 """import an ordered set of patches
1656 1655
1657 1656 Import a list of patches and commit them individually.
1658 1657
1659 1658 If there are outstanding changes in the working directory, import
1660 1659 will abort unless given the -f flag.
1661 1660
1662 1661 You can import a patch straight from a mail message. Even patches
1663 1662 as attachments work (body part must be type text/plain or
1664 1663 text/x-patch to be used). From and Subject headers of email
1665 1664 message are used as default committer and commit message. All
1666 1665 text/plain body parts before first diff are added to commit
1667 1666 message.
1668 1667
1669 1668 If imported patch was generated by hg export, user and description
1670 1669 from patch override values from message headers and body. Values
1671 1670 given on command line with -m and -u override these.
1672 1671
1673 1672 To read a patch from standard input, use patch name "-".
1674 1673 """
1675 1674 patches = (patch1,) + patches
1676 1675
1677 1676 if not opts['force']:
1678 1677 bail_if_changed(repo)
1679 1678
1680 1679 d = opts["base"]
1681 1680 strip = opts["strip"]
1682 1681
1683 1682 wlock = repo.wlock()
1684 1683 lock = repo.lock()
1685 1684
1686 1685 for p in patches:
1687 1686 pf = os.path.join(d, p)
1688 1687
1689 1688 if pf == '-':
1690 1689 ui.status(_("applying patch from stdin\n"))
1691 1690 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1692 1691 else:
1693 1692 ui.status(_("applying %s\n") % p)
1694 1693 tmpname, message, user, date = patch.extract(ui, file(pf))
1695 1694
1696 1695 if tmpname is None:
1697 1696 raise util.Abort(_('no diffs found'))
1698 1697
1699 1698 try:
1700 1699 if opts['message']:
1701 1700 # pickup the cmdline msg
1702 1701 message = opts['message']
1703 1702 elif message:
1704 1703 # pickup the patch msg
1705 1704 message = message.strip()
1706 1705 else:
1707 1706 # launch the editor
1708 1707 message = None
1709 1708 ui.debug(_('message:\n%s\n') % message)
1710 1709
1711 1710 files = patch.patch(strip, tmpname, ui, cwd=repo.root)
1712 1711 removes = []
1713 1712 if len(files) > 0:
1714 1713 cfiles = files.keys()
1715 1714 copies = []
1716 1715 copts = {'after': False, 'force': False}
1717 1716 cwd = repo.getcwd()
1718 1717 if cwd:
1719 1718 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1720 1719 for f in files:
1721 1720 ctype, gp = files[f]
1722 1721 if ctype == 'RENAME':
1723 1722 copies.append((gp.oldpath, gp.path, gp.copymod))
1724 1723 removes.append(gp.oldpath)
1725 1724 elif ctype == 'COPY':
1726 1725 copies.append((gp.oldpath, gp.path, gp.copymod))
1727 1726 elif ctype == 'DELETE':
1728 1727 removes.append(gp.path)
1729 1728 for src, dst, after in copies:
1730 1729 absdst = os.path.join(repo.root, dst)
1731 1730 if not after and os.path.exists(absdst):
1732 1731 raise util.Abort(_('patch creates existing file %s') % dst)
1733 1732 if cwd:
1734 1733 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1735 1734 copts['after'] = after
1736 1735 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1737 1736 if errs:
1738 1737 raise util.Abort(errs)
1739 1738 if removes:
1740 1739 repo.remove(removes, True, wlock=wlock)
1741 1740 for f in files:
1742 1741 ctype, gp = files[f]
1743 1742 if gp and gp.mode:
1744 1743 x = gp.mode & 0100 != 0
1745 1744 dst = os.path.join(repo.root, gp.path)
1746 1745 util.set_exec(dst, x)
1747 1746 addremove_lock(ui, repo, cfiles, {}, wlock=wlock)
1748 1747 files = files.keys()
1749 1748 files.extend([r for r in removes if r not in files])
1750 1749 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1751 1750 finally:
1752 1751 os.unlink(tmpname)
1753 1752
1754 1753 def incoming(ui, repo, source="default", **opts):
1755 1754 """show new changesets found in source
1756 1755
1757 1756 Show new changesets found in the specified path/URL or the default
1758 1757 pull location. These are the changesets that would be pulled if a pull
1759 1758 was requested.
1760 1759
1761 1760 For remote repository, using --bundle avoids downloading the changesets
1762 1761 twice if the incoming is followed by a pull.
1763 1762
1764 1763 See pull for valid source format details.
1765 1764 """
1766 1765 source = ui.expandpath(source)
1767 1766 setremoteconfig(ui, opts)
1768 1767
1769 1768 other = hg.repository(ui, source)
1770 1769 incoming = repo.findincoming(other, force=opts["force"])
1771 1770 if not incoming:
1772 1771 ui.status(_("no changes found\n"))
1773 1772 return
1774 1773
1775 1774 cleanup = None
1776 1775 try:
1777 1776 fname = opts["bundle"]
1778 1777 if fname or not other.local():
1779 1778 # create a bundle (uncompressed if other repo is not local)
1780 1779 cg = other.changegroup(incoming, "incoming")
1781 1780 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1782 1781 # keep written bundle?
1783 1782 if opts["bundle"]:
1784 1783 cleanup = None
1785 1784 if not other.local():
1786 1785 # use the created uncompressed bundlerepo
1787 1786 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1788 1787
1789 1788 revs = None
1790 1789 if opts['rev']:
1791 1790 revs = [other.lookup(rev) for rev in opts['rev']]
1792 1791 o = other.changelog.nodesbetween(incoming, revs)[0]
1793 1792 if opts['newest_first']:
1794 1793 o.reverse()
1795 1794 displayer = show_changeset(ui, other, opts)
1796 1795 for n in o:
1797 1796 parents = [p for p in other.changelog.parents(n) if p != nullid]
1798 1797 if opts['no_merges'] and len(parents) == 2:
1799 1798 continue
1800 1799 displayer.show(changenode=n)
1801 1800 if opts['patch']:
1802 1801 prev = (parents and parents[0]) or nullid
1803 1802 patch.diff(repo, other, prev, n)
1804 1803 ui.write("\n")
1805 1804 finally:
1806 1805 if hasattr(other, 'close'):
1807 1806 other.close()
1808 1807 if cleanup:
1809 1808 os.unlink(cleanup)
1810 1809
1811 1810 def init(ui, dest=".", **opts):
1812 1811 """create a new repository in the given directory
1813 1812
1814 1813 Initialize a new repository in the given directory. If the given
1815 1814 directory does not exist, it is created.
1816 1815
1817 1816 If no directory is given, the current directory is used.
1818 1817
1819 1818 It is possible to specify an ssh:// URL as the destination.
1820 1819 Look at the help text for the pull command for important details
1821 1820 about ssh:// URLs.
1822 1821 """
1823 1822 setremoteconfig(ui, opts)
1824 1823 hg.repository(ui, dest, create=1)
1825 1824
1826 1825 def locate(ui, repo, *pats, **opts):
1827 1826 """locate files matching specific patterns
1828 1827
1829 1828 Print all files under Mercurial control whose names match the
1830 1829 given patterns.
1831 1830
1832 1831 This command searches the current directory and its
1833 1832 subdirectories. To search an entire repository, move to the root
1834 1833 of the repository.
1835 1834
1836 1835 If no patterns are given to match, this command prints all file
1837 1836 names.
1838 1837
1839 1838 If you want to feed the output of this command into the "xargs"
1840 1839 command, use the "-0" option to both this command and "xargs".
1841 1840 This will avoid the problem of "xargs" treating single filenames
1842 1841 that contain white space as multiple filenames.
1843 1842 """
1844 1843 end = opts['print0'] and '\0' or '\n'
1845 1844 rev = opts['rev']
1846 1845 if rev:
1847 1846 node = repo.lookup(rev)
1848 1847 else:
1849 1848 node = None
1850 1849
1851 1850 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1852 1851 head='(?:.*/|)'):
1853 1852 if not node and repo.dirstate.state(abs) == '?':
1854 1853 continue
1855 1854 if opts['fullpath']:
1856 1855 ui.write(os.path.join(repo.root, abs), end)
1857 1856 else:
1858 1857 ui.write(((pats and rel) or abs), end)
1859 1858
1860 1859 def log(ui, repo, *pats, **opts):
1861 1860 """show revision history of entire repository or files
1862 1861
1863 1862 Print the revision history of the specified files or the entire
1864 1863 project.
1865 1864
1866 1865 File history is shown without following rename or copy history of
1867 1866 files. Use -f/--follow with a file name to follow history across
1868 1867 renames and copies. --follow without a file name will only show
1869 1868 ancestors or descendants of the starting revision. --follow-first
1870 1869 only follows the first parent of merge revisions.
1871 1870
1872 1871 If no revision range is specified, the default is tip:0 unless
1873 1872 --follow is set, in which case the working directory parent is
1874 1873 used as the starting revision.
1875 1874
1876 1875 By default this command outputs: changeset id and hash, tags,
1877 1876 non-trivial parents, user, date and time, and a summary for each
1878 1877 commit. When the -v/--verbose switch is used, the list of changed
1879 1878 files and full commit message is shown.
1880 1879 """
1881 1880 class dui(object):
1882 1881 # Implement and delegate some ui protocol. Save hunks of
1883 1882 # output for later display in the desired order.
1884 1883 def __init__(self, ui):
1885 1884 self.ui = ui
1886 1885 self.hunk = {}
1887 1886 self.header = {}
1888 1887 def bump(self, rev):
1889 1888 self.rev = rev
1890 1889 self.hunk[rev] = []
1891 1890 self.header[rev] = []
1892 1891 def note(self, *args):
1893 1892 if self.verbose:
1894 1893 self.write(*args)
1895 1894 def status(self, *args):
1896 1895 if not self.quiet:
1897 1896 self.write(*args)
1898 1897 def write(self, *args):
1899 1898 self.hunk[self.rev].append(args)
1900 1899 def write_header(self, *args):
1901 1900 self.header[self.rev].append(args)
1902 1901 def debug(self, *args):
1903 1902 if self.debugflag:
1904 1903 self.write(*args)
1905 1904 def __getattr__(self, key):
1906 1905 return getattr(self.ui, key)
1907 1906
1908 1907 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1909 1908
1910 1909 if opts['limit']:
1911 1910 try:
1912 1911 limit = int(opts['limit'])
1913 1912 except ValueError:
1914 1913 raise util.Abort(_('limit must be a positive integer'))
1915 1914 if limit <= 0: raise util.Abort(_('limit must be positive'))
1916 1915 else:
1917 1916 limit = sys.maxint
1918 1917 count = 0
1919 1918
1920 1919 displayer = show_changeset(ui, repo, opts)
1921 1920 for st, rev, fns in changeiter:
1922 1921 if st == 'window':
1923 1922 du = dui(ui)
1924 1923 displayer.ui = du
1925 1924 elif st == 'add':
1926 1925 du.bump(rev)
1927 1926 changenode = repo.changelog.node(rev)
1928 1927 parents = [p for p in repo.changelog.parents(changenode)
1929 1928 if p != nullid]
1930 1929 if opts['no_merges'] and len(parents) == 2:
1931 1930 continue
1932 1931 if opts['only_merges'] and len(parents) != 2:
1933 1932 continue
1934 1933
1935 1934 if opts['keyword']:
1936 1935 changes = getchange(rev)
1937 1936 miss = 0
1938 1937 for k in [kw.lower() for kw in opts['keyword']]:
1939 1938 if not (k in changes[1].lower() or
1940 1939 k in changes[4].lower() or
1941 1940 k in " ".join(changes[3][:20]).lower()):
1942 1941 miss = 1
1943 1942 break
1944 1943 if miss:
1945 1944 continue
1946 1945
1947 1946 br = None
1948 1947 if opts['branches']:
1949 1948 br = repo.branchlookup([repo.changelog.node(rev)])
1950 1949
1951 1950 displayer.show(rev, brinfo=br)
1952 1951 if opts['patch']:
1953 1952 prev = (parents and parents[0]) or nullid
1954 1953 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1955 1954 du.write("\n\n")
1956 1955 elif st == 'iter':
1957 1956 if count == limit: break
1958 1957 if du.header[rev]:
1959 1958 for args in du.header[rev]:
1960 1959 ui.write_header(*args)
1961 1960 if du.hunk[rev]:
1962 1961 count += 1
1963 1962 for args in du.hunk[rev]:
1964 1963 ui.write(*args)
1965 1964
1966 1965 def manifest(ui, repo, rev=None):
1967 1966 """output the latest or given revision of the project manifest
1968 1967
1969 1968 Print a list of version controlled files for the given revision.
1970 1969
1971 1970 The manifest is the list of files being version controlled. If no revision
1972 1971 is given then the tip is used.
1973 1972 """
1974 1973 if rev:
1975 1974 try:
1976 1975 # assume all revision numbers are for changesets
1977 1976 n = repo.lookup(rev)
1978 1977 change = repo.changelog.read(n)
1979 1978 n = change[0]
1980 1979 except hg.RepoError:
1981 1980 n = repo.manifest.lookup(rev)
1982 1981 else:
1983 1982 n = repo.manifest.tip()
1984 1983 m = repo.manifest.read(n)
1985 1984 files = m.keys()
1986 1985 files.sort()
1987 1986
1988 1987 for f in files:
1989 1988 ui.write("%40s %3s %s\n" % (hex(m[f]),
1990 1989 m.execf(f) and "755" or "644", f))
1991 1990
1992 1991 def merge(ui, repo, node=None, force=None, branch=None):
1993 1992 """Merge working directory with another revision
1994 1993
1995 1994 Merge the contents of the current working directory and the
1996 1995 requested revision. Files that changed between either parent are
1997 1996 marked as changed for the next commit and a commit must be
1998 1997 performed before any further updates are allowed.
1999 1998 """
2000 1999
2001 2000 node = _lookup(repo, node, branch)
2002 2001 return hg.merge(repo, node, force=force)
2003 2002
2004 2003 def outgoing(ui, repo, dest=None, **opts):
2005 2004 """show changesets not found in destination
2006 2005
2007 2006 Show changesets not found in the specified destination repository or
2008 2007 the default push location. These are the changesets that would be pushed
2009 2008 if a push was requested.
2010 2009
2011 2010 See pull for valid destination format details.
2012 2011 """
2013 2012 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2014 2013 setremoteconfig(ui, opts)
2015 2014 revs = None
2016 2015 if opts['rev']:
2017 2016 revs = [repo.lookup(rev) for rev in opts['rev']]
2018 2017
2019 2018 other = hg.repository(ui, dest)
2020 2019 o = repo.findoutgoing(other, force=opts['force'])
2021 2020 if not o:
2022 2021 ui.status(_("no changes found\n"))
2023 2022 return
2024 2023 o = repo.changelog.nodesbetween(o, revs)[0]
2025 2024 if opts['newest_first']:
2026 2025 o.reverse()
2027 2026 displayer = show_changeset(ui, repo, opts)
2028 2027 for n in o:
2029 2028 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2030 2029 if opts['no_merges'] and len(parents) == 2:
2031 2030 continue
2032 2031 displayer.show(changenode=n)
2033 2032 if opts['patch']:
2034 2033 prev = (parents and parents[0]) or nullid
2035 2034 patch.diff(repo, prev, n)
2036 2035 ui.write("\n")
2037 2036
2038 2037 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2039 2038 """show the parents of the working dir or revision
2040 2039
2041 2040 Print the working directory's parent revisions.
2042 2041 """
2043 2042 # legacy
2044 2043 if file_ and not rev:
2045 2044 try:
2046 2045 rev = repo.lookup(file_)
2047 2046 file_ = None
2048 2047 except hg.RepoError:
2049 2048 pass
2050 2049 else:
2051 2050 ui.warn(_("'hg parent REV' is deprecated, "
2052 2051 "please use 'hg parents -r REV instead\n"))
2053 2052
2054 2053 if rev:
2055 2054 if file_:
2056 2055 ctx = repo.filectx(file_, changeid=rev)
2057 2056 else:
2058 2057 ctx = repo.changectx(rev)
2059 2058 p = [cp.node() for cp in ctx.parents()]
2060 2059 else:
2061 2060 p = repo.dirstate.parents()
2062 2061
2063 2062 br = None
2064 2063 if branches is not None:
2065 2064 br = repo.branchlookup(p)
2066 2065 displayer = show_changeset(ui, repo, opts)
2067 2066 for n in p:
2068 2067 if n != nullid:
2069 2068 displayer.show(changenode=n, brinfo=br)
2070 2069
2071 2070 def paths(ui, repo, search=None):
2072 2071 """show definition of symbolic path names
2073 2072
2074 2073 Show definition of symbolic path name NAME. If no name is given, show
2075 2074 definition of available names.
2076 2075
2077 2076 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2078 2077 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2079 2078 """
2080 2079 if search:
2081 2080 for name, path in ui.configitems("paths"):
2082 2081 if name == search:
2083 2082 ui.write("%s\n" % path)
2084 2083 return
2085 2084 ui.warn(_("not found!\n"))
2086 2085 return 1
2087 2086 else:
2088 2087 for name, path in ui.configitems("paths"):
2089 2088 ui.write("%s = %s\n" % (name, path))
2090 2089
2091 2090 def postincoming(ui, repo, modheads, optupdate):
2092 2091 if modheads == 0:
2093 2092 return
2094 2093 if optupdate:
2095 2094 if modheads == 1:
2096 2095 return hg.update(repo, repo.changelog.tip()) # update
2097 2096 else:
2098 2097 ui.status(_("not updating, since new heads added\n"))
2099 2098 if modheads > 1:
2100 2099 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2101 2100 else:
2102 2101 ui.status(_("(run 'hg update' to get a working copy)\n"))
2103 2102
2104 2103 def pull(ui, repo, source="default", **opts):
2105 2104 """pull changes from the specified source
2106 2105
2107 2106 Pull changes from a remote repository to a local one.
2108 2107
2109 2108 This finds all changes from the repository at the specified path
2110 2109 or URL and adds them to the local repository. By default, this
2111 2110 does not update the copy of the project in the working directory.
2112 2111
2113 2112 Valid URLs are of the form:
2114 2113
2115 2114 local/filesystem/path
2116 2115 http://[user@]host[:port]/[path]
2117 2116 https://[user@]host[:port]/[path]
2118 2117 ssh://[user@]host[:port]/[path]
2119 2118
2120 2119 Some notes about using SSH with Mercurial:
2121 2120 - SSH requires an accessible shell account on the destination machine
2122 2121 and a copy of hg in the remote path or specified with as remotecmd.
2123 2122 - path is relative to the remote user's home directory by default.
2124 2123 Use an extra slash at the start of a path to specify an absolute path:
2125 2124 ssh://example.com//tmp/repository
2126 2125 - Mercurial doesn't use its own compression via SSH; the right thing
2127 2126 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2128 2127 Host *.mylocalnetwork.example.com
2129 2128 Compression off
2130 2129 Host *
2131 2130 Compression on
2132 2131 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2133 2132 with the --ssh command line option.
2134 2133 """
2135 2134 source = ui.expandpath(source)
2136 2135 setremoteconfig(ui, opts)
2137 2136
2138 2137 other = hg.repository(ui, source)
2139 2138 ui.status(_('pulling from %s\n') % (source))
2140 2139 revs = None
2141 2140 if opts['rev'] and not other.local():
2142 2141 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2143 2142 elif opts['rev']:
2144 2143 revs = [other.lookup(rev) for rev in opts['rev']]
2145 2144 modheads = repo.pull(other, heads=revs, force=opts['force'])
2146 2145 return postincoming(ui, repo, modheads, opts['update'])
2147 2146
2148 2147 def push(ui, repo, dest=None, **opts):
2149 2148 """push changes to the specified destination
2150 2149
2151 2150 Push changes from the local repository to the given destination.
2152 2151
2153 2152 This is the symmetrical operation for pull. It helps to move
2154 2153 changes from the current repository to a different one. If the
2155 2154 destination is local this is identical to a pull in that directory
2156 2155 from the current one.
2157 2156
2158 2157 By default, push will refuse to run if it detects the result would
2159 2158 increase the number of remote heads. This generally indicates the
2160 2159 the client has forgotten to sync and merge before pushing.
2161 2160
2162 2161 Valid URLs are of the form:
2163 2162
2164 2163 local/filesystem/path
2165 2164 ssh://[user@]host[:port]/[path]
2166 2165
2167 2166 Look at the help text for the pull command for important details
2168 2167 about ssh:// URLs.
2169 2168
2170 2169 Pushing to http:// and https:// URLs is possible, too, if this
2171 2170 feature is enabled on the remote Mercurial server.
2172 2171 """
2173 2172 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2174 2173 setremoteconfig(ui, opts)
2175 2174
2176 2175 other = hg.repository(ui, dest)
2177 2176 ui.status('pushing to %s\n' % (dest))
2178 2177 revs = None
2179 2178 if opts['rev']:
2180 2179 revs = [repo.lookup(rev) for rev in opts['rev']]
2181 2180 r = repo.push(other, opts['force'], revs=revs)
2182 2181 return r == 0
2183 2182
2184 2183 def rawcommit(ui, repo, *flist, **rc):
2185 2184 """raw commit interface (DEPRECATED)
2186 2185
2187 2186 (DEPRECATED)
2188 2187 Lowlevel commit, for use in helper scripts.
2189 2188
2190 2189 This command is not intended to be used by normal users, as it is
2191 2190 primarily useful for importing from other SCMs.
2192 2191
2193 2192 This command is now deprecated and will be removed in a future
2194 2193 release, please use debugsetparents and commit instead.
2195 2194 """
2196 2195
2197 2196 ui.warn(_("(the rawcommit command is deprecated)\n"))
2198 2197
2199 2198 message = rc['message']
2200 2199 if not message and rc['logfile']:
2201 2200 try:
2202 2201 message = open(rc['logfile']).read()
2203 2202 except IOError:
2204 2203 pass
2205 2204 if not message and not rc['logfile']:
2206 2205 raise util.Abort(_("missing commit message"))
2207 2206
2208 2207 files = relpath(repo, list(flist))
2209 2208 if rc['files']:
2210 2209 files += open(rc['files']).read().splitlines()
2211 2210
2212 2211 rc['parent'] = map(repo.lookup, rc['parent'])
2213 2212
2214 2213 try:
2215 2214 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2216 2215 except ValueError, inst:
2217 2216 raise util.Abort(str(inst))
2218 2217
2219 2218 def recover(ui, repo):
2220 2219 """roll back an interrupted transaction
2221 2220
2222 2221 Recover from an interrupted commit or pull.
2223 2222
2224 2223 This command tries to fix the repository status after an interrupted
2225 2224 operation. It should only be necessary when Mercurial suggests it.
2226 2225 """
2227 2226 if repo.recover():
2228 2227 return hg.verify(repo)
2229 2228 return 1
2230 2229
2231 2230 def remove(ui, repo, *pats, **opts):
2232 2231 """remove the specified files on the next commit
2233 2232
2234 2233 Schedule the indicated files for removal from the repository.
2235 2234
2236 2235 This command schedules the files to be removed at the next commit.
2237 2236 This only removes files from the current branch, not from the
2238 2237 entire project history. If the files still exist in the working
2239 2238 directory, they will be deleted from it. If invoked with --after,
2240 2239 files that have been manually deleted are marked as removed.
2241 2240
2242 2241 Modified files and added files are not removed by default. To
2243 2242 remove them, use the -f/--force option.
2244 2243 """
2245 2244 names = []
2246 2245 if not opts['after'] and not pats:
2247 2246 raise util.Abort(_('no files specified'))
2248 2247 files, matchfn, anypats = matchpats(repo, pats, opts)
2249 2248 exact = dict.fromkeys(files)
2250 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2249 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2251 2250 modified, added, removed, deleted, unknown = mardu
2252 2251 remove, forget = [], []
2253 2252 for src, abs, rel, exact in walk(repo, pats, opts):
2254 2253 reason = None
2255 2254 if abs not in deleted and opts['after']:
2256 2255 reason = _('is still present')
2257 2256 elif abs in modified and not opts['force']:
2258 2257 reason = _('is modified (use -f to force removal)')
2259 2258 elif abs in added:
2260 2259 if opts['force']:
2261 2260 forget.append(abs)
2262 2261 continue
2263 2262 reason = _('has been marked for add (use -f to force removal)')
2264 2263 elif abs in unknown:
2265 2264 reason = _('is not managed')
2266 2265 elif abs in removed:
2267 2266 continue
2268 2267 if reason:
2269 2268 if exact:
2270 2269 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2271 2270 else:
2272 2271 if ui.verbose or not exact:
2273 2272 ui.status(_('removing %s\n') % rel)
2274 2273 remove.append(abs)
2275 2274 repo.forget(forget)
2276 2275 repo.remove(remove, unlink=not opts['after'])
2277 2276
2278 2277 def rename(ui, repo, *pats, **opts):
2279 2278 """rename files; equivalent of copy + remove
2280 2279
2281 2280 Mark dest as copies of sources; mark sources for deletion. If
2282 2281 dest is a directory, copies are put in that directory. If dest is
2283 2282 a file, there can only be one source.
2284 2283
2285 2284 By default, this command copies the contents of files as they
2286 2285 stand in the working directory. If invoked with --after, the
2287 2286 operation is recorded, but no copying is performed.
2288 2287
2289 2288 This command takes effect in the next commit.
2290 2289
2291 2290 NOTE: This command should be treated as experimental. While it
2292 2291 should properly record rename files, this information is not yet
2293 2292 fully used by merge, nor fully reported by log.
2294 2293 """
2295 2294 wlock = repo.wlock(0)
2296 2295 errs, copied = docopy(ui, repo, pats, opts, wlock)
2297 2296 names = []
2298 2297 for abs, rel, exact in copied:
2299 2298 if ui.verbose or not exact:
2300 2299 ui.status(_('removing %s\n') % rel)
2301 2300 names.append(abs)
2302 2301 if not opts.get('dry_run'):
2303 2302 repo.remove(names, True, wlock)
2304 2303 return errs
2305 2304
2306 2305 def revert(ui, repo, *pats, **opts):
2307 2306 """revert files or dirs to their states as of some revision
2308 2307
2309 2308 With no revision specified, revert the named files or directories
2310 2309 to the contents they had in the parent of the working directory.
2311 2310 This restores the contents of the affected files to an unmodified
2312 2311 state. If the working directory has two parents, you must
2313 2312 explicitly specify the revision to revert to.
2314 2313
2315 2314 Modified files are saved with a .orig suffix before reverting.
2316 2315 To disable these backups, use --no-backup.
2317 2316
2318 2317 Using the -r option, revert the given files or directories to
2319 2318 their contents as of a specific revision. This can be helpful to"roll
2320 2319 back" some or all of a change that should not have been committed.
2321 2320
2322 2321 Revert modifies the working directory. It does not commit any
2323 2322 changes, or change the parent of the working directory. If you
2324 2323 revert to a revision other than the parent of the working
2325 2324 directory, the reverted files will thus appear modified
2326 2325 afterwards.
2327 2326
2328 2327 If a file has been deleted, it is recreated. If the executable
2329 2328 mode of a file was changed, it is reset.
2330 2329
2331 2330 If names are given, all files matching the names are reverted.
2332 2331
2333 2332 If no arguments are given, all files in the repository are reverted.
2334 2333 """
2335 2334 parent, p2 = repo.dirstate.parents()
2336 2335 if opts['rev']:
2337 2336 node = repo.lookup(opts['rev'])
2338 2337 elif p2 != nullid:
2339 2338 raise util.Abort(_('working dir has two parents; '
2340 2339 'you must specify the revision to revert to'))
2341 2340 else:
2342 2341 node = parent
2343 2342 mf = repo.manifest.read(repo.changelog.read(node)[0])
2344 2343 if node == parent:
2345 2344 pmf = mf
2346 2345 else:
2347 2346 pmf = None
2348 2347
2349 2348 wlock = repo.wlock()
2350 2349
2351 2350 # need all matching names in dirstate and manifest of target rev,
2352 2351 # so have to walk both. do not print errors if files exist in one
2353 2352 # but not other.
2354 2353
2355 2354 names = {}
2356 2355 target_only = {}
2357 2356
2358 2357 # walk dirstate.
2359 2358
2360 2359 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2361 2360 names[abs] = (rel, exact)
2362 2361 if src == 'b':
2363 2362 target_only[abs] = True
2364 2363
2365 2364 # walk target manifest.
2366 2365
2367 2366 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2368 2367 badmatch=names.has_key):
2369 2368 if abs in names: continue
2370 2369 names[abs] = (rel, exact)
2371 2370 target_only[abs] = True
2372 2371
2373 changes = repo.changes(match=names.has_key, wlock=wlock)
2372 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2374 2373 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2375 2374
2376 2375 revert = ([], _('reverting %s\n'))
2377 2376 add = ([], _('adding %s\n'))
2378 2377 remove = ([], _('removing %s\n'))
2379 2378 forget = ([], _('forgetting %s\n'))
2380 2379 undelete = ([], _('undeleting %s\n'))
2381 2380 update = {}
2382 2381
2383 2382 disptable = (
2384 2383 # dispatch table:
2385 2384 # file state
2386 2385 # action if in target manifest
2387 2386 # action if not in target manifest
2388 2387 # make backup if in target manifest
2389 2388 # make backup if not in target manifest
2390 2389 (modified, revert, remove, True, True),
2391 2390 (added, revert, forget, True, False),
2392 2391 (removed, undelete, None, False, False),
2393 2392 (deleted, revert, remove, False, False),
2394 2393 (unknown, add, None, True, False),
2395 2394 (target_only, add, None, False, False),
2396 2395 )
2397 2396
2398 2397 entries = names.items()
2399 2398 entries.sort()
2400 2399
2401 2400 for abs, (rel, exact) in entries:
2402 2401 mfentry = mf.get(abs)
2403 2402 def handle(xlist, dobackup):
2404 2403 xlist[0].append(abs)
2405 2404 update[abs] = 1
2406 2405 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2407 2406 bakname = "%s.orig" % rel
2408 2407 ui.note(_('saving current version of %s as %s\n') %
2409 2408 (rel, bakname))
2410 2409 if not opts.get('dry_run'):
2411 2410 shutil.copyfile(rel, bakname)
2412 2411 shutil.copymode(rel, bakname)
2413 2412 if ui.verbose or not exact:
2414 2413 ui.status(xlist[1] % rel)
2415 2414 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2416 2415 if abs not in table: continue
2417 2416 # file has changed in dirstate
2418 2417 if mfentry:
2419 2418 handle(hitlist, backuphit)
2420 2419 elif misslist is not None:
2421 2420 handle(misslist, backupmiss)
2422 2421 else:
2423 2422 if exact: ui.warn(_('file not managed: %s\n' % rel))
2424 2423 break
2425 2424 else:
2426 2425 # file has not changed in dirstate
2427 2426 if node == parent:
2428 2427 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2429 2428 continue
2430 2429 if pmf is None:
2431 2430 # only need parent manifest in this unlikely case,
2432 2431 # so do not read by default
2433 2432 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2434 2433 if abs in pmf:
2435 2434 if mfentry:
2436 2435 # if version of file is same in parent and target
2437 2436 # manifests, do nothing
2438 2437 if pmf[abs] != mfentry:
2439 2438 handle(revert, False)
2440 2439 else:
2441 2440 handle(remove, False)
2442 2441
2443 2442 if not opts.get('dry_run'):
2444 2443 repo.dirstate.forget(forget[0])
2445 2444 r = hg.revert(repo, node, update.has_key, wlock)
2446 2445 repo.dirstate.update(add[0], 'a')
2447 2446 repo.dirstate.update(undelete[0], 'n')
2448 2447 repo.dirstate.update(remove[0], 'r')
2449 2448 return r
2450 2449
2451 2450 def rollback(ui, repo):
2452 2451 """roll back the last transaction in this repository
2453 2452
2454 2453 Roll back the last transaction in this repository, restoring the
2455 2454 project to its state prior to the transaction.
2456 2455
2457 2456 Transactions are used to encapsulate the effects of all commands
2458 2457 that create new changesets or propagate existing changesets into a
2459 2458 repository. For example, the following commands are transactional,
2460 2459 and their effects can be rolled back:
2461 2460
2462 2461 commit
2463 2462 import
2464 2463 pull
2465 2464 push (with this repository as destination)
2466 2465 unbundle
2467 2466
2468 2467 This command should be used with care. There is only one level of
2469 2468 rollback, and there is no way to undo a rollback.
2470 2469
2471 2470 This command is not intended for use on public repositories. Once
2472 2471 changes are visible for pull by other users, rolling a transaction
2473 2472 back locally is ineffective (someone else may already have pulled
2474 2473 the changes). Furthermore, a race is possible with readers of the
2475 2474 repository; for example an in-progress pull from the repository
2476 2475 may fail if a rollback is performed.
2477 2476 """
2478 2477 repo.rollback()
2479 2478
2480 2479 def root(ui, repo):
2481 2480 """print the root (top) of the current working dir
2482 2481
2483 2482 Print the root directory of the current repository.
2484 2483 """
2485 2484 ui.write(repo.root + "\n")
2486 2485
2487 2486 def serve(ui, repo, **opts):
2488 2487 """export the repository via HTTP
2489 2488
2490 2489 Start a local HTTP repository browser and pull server.
2491 2490
2492 2491 By default, the server logs accesses to stdout and errors to
2493 2492 stderr. Use the "-A" and "-E" options to log to files.
2494 2493 """
2495 2494
2496 2495 if opts["stdio"]:
2497 2496 if repo is None:
2498 2497 raise hg.RepoError(_('no repo found'))
2499 2498 s = sshserver.sshserver(ui, repo)
2500 2499 s.serve_forever()
2501 2500
2502 2501 optlist = ("name templates style address port ipv6"
2503 2502 " accesslog errorlog webdir_conf")
2504 2503 for o in optlist.split():
2505 2504 if opts[o]:
2506 2505 ui.setconfig("web", o, opts[o])
2507 2506
2508 2507 if repo is None and not ui.config("web", "webdir_conf"):
2509 2508 raise hg.RepoError(_('no repo found'))
2510 2509
2511 2510 if opts['daemon'] and not opts['daemon_pipefds']:
2512 2511 rfd, wfd = os.pipe()
2513 2512 args = sys.argv[:]
2514 2513 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2515 2514 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2516 2515 args[0], args)
2517 2516 os.close(wfd)
2518 2517 os.read(rfd, 1)
2519 2518 os._exit(0)
2520 2519
2521 2520 try:
2522 2521 httpd = hgweb.server.create_server(ui, repo)
2523 2522 except socket.error, inst:
2524 2523 raise util.Abort(_('cannot start server: ') + inst.args[1])
2525 2524
2526 2525 if ui.verbose:
2527 2526 addr, port = httpd.socket.getsockname()
2528 2527 if addr == '0.0.0.0':
2529 2528 addr = socket.gethostname()
2530 2529 else:
2531 2530 try:
2532 2531 addr = socket.gethostbyaddr(addr)[0]
2533 2532 except socket.error:
2534 2533 pass
2535 2534 if port != 80:
2536 2535 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2537 2536 else:
2538 2537 ui.status(_('listening at http://%s/\n') % addr)
2539 2538
2540 2539 if opts['pid_file']:
2541 2540 fp = open(opts['pid_file'], 'w')
2542 2541 fp.write(str(os.getpid()) + '\n')
2543 2542 fp.close()
2544 2543
2545 2544 if opts['daemon_pipefds']:
2546 2545 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2547 2546 os.close(rfd)
2548 2547 os.write(wfd, 'y')
2549 2548 os.close(wfd)
2550 2549 sys.stdout.flush()
2551 2550 sys.stderr.flush()
2552 2551 fd = os.open(util.nulldev, os.O_RDWR)
2553 2552 if fd != 0: os.dup2(fd, 0)
2554 2553 if fd != 1: os.dup2(fd, 1)
2555 2554 if fd != 2: os.dup2(fd, 2)
2556 2555 if fd not in (0, 1, 2): os.close(fd)
2557 2556
2558 2557 httpd.serve_forever()
2559 2558
2560 2559 def status(ui, repo, *pats, **opts):
2561 2560 """show changed files in the working directory
2562 2561
2563 2562 Show status of files in the repository. If names are given, only
2564 2563 files that match are shown. Files that are clean or ignored, are
2565 2564 not listed unless -c (clean), -i (ignored) or -A is given.
2566 2565
2567 2566 The codes used to show the status of files are:
2568 2567 M = modified
2569 2568 A = added
2570 2569 R = removed
2571 2570 C = clean
2572 2571 ! = deleted, but still tracked
2573 2572 ? = not tracked
2574 2573 I = ignored (not shown by default)
2575 2574 = the previous added file was copied from here
2576 2575 """
2577 2576
2578 2577 all = opts['all']
2579 2578
2580 2579 files, matchfn, anypats = matchpats(repo, pats, opts)
2581 2580 cwd = (pats and repo.getcwd()) or ''
2582 2581 modified, added, removed, deleted, unknown, ignored, clean = [
2583 2582 [util.pathto(cwd, x) for x in n]
2584 2583 for n in repo.status(files=files, match=matchfn,
2585 2584 list_ignored=all or opts['ignored'],
2586 2585 list_clean=all or opts['clean'])]
2587 2586
2588 2587 changetypes = (('modified', 'M', modified),
2589 2588 ('added', 'A', added),
2590 2589 ('removed', 'R', removed),
2591 2590 ('deleted', '!', deleted),
2592 2591 ('unknown', '?', unknown),
2593 2592 ('ignored', 'I', ignored))
2594 2593
2595 2594 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2596 2595
2597 2596 end = opts['print0'] and '\0' or '\n'
2598 2597
2599 2598 for opt, char, changes in ([ct for ct in explicit_changetypes
2600 2599 if all or opts[ct[0]]]
2601 2600 or changetypes):
2602 2601 if opts['no_status']:
2603 2602 format = "%%s%s" % end
2604 2603 else:
2605 2604 format = "%s %%s%s" % (char, end)
2606 2605
2607 2606 for f in changes:
2608 2607 ui.write(format % f)
2609 2608 if ((all or opts.get('copies')) and not opts.get('no_status')
2610 2609 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2611 2610 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2612 2611
2613 2612 def tag(ui, repo, name, rev_=None, **opts):
2614 2613 """add a tag for the current tip or a given revision
2615 2614
2616 2615 Name a particular revision using <name>.
2617 2616
2618 2617 Tags are used to name particular revisions of the repository and are
2619 2618 very useful to compare different revision, to go back to significant
2620 2619 earlier versions or to mark branch points as releases, etc.
2621 2620
2622 2621 If no revision is given, the parent of the working directory is used.
2623 2622
2624 2623 To facilitate version control, distribution, and merging of tags,
2625 2624 they are stored as a file named ".hgtags" which is managed
2626 2625 similarly to other project files and can be hand-edited if
2627 2626 necessary. The file '.hg/localtags' is used for local tags (not
2628 2627 shared among repositories).
2629 2628 """
2630 2629 if name in ['tip', '.']:
2631 2630 raise util.Abort(_("the name '%s' is reserved") % name)
2632 2631 if rev_ is not None:
2633 2632 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2634 2633 "please use 'hg tag [-r REV] NAME' instead\n"))
2635 2634 if opts['rev']:
2636 2635 raise util.Abort(_("use only one form to specify the revision"))
2637 2636 if opts['rev']:
2638 2637 rev_ = opts['rev']
2639 2638 if rev_:
2640 2639 r = hex(repo.lookup(rev_))
2641 2640 else:
2642 2641 p1, p2 = repo.dirstate.parents()
2643 2642 if p1 == nullid:
2644 2643 raise util.Abort(_('no revision to tag'))
2645 2644 if p2 != nullid:
2646 2645 raise util.Abort(_('outstanding uncommitted merges'))
2647 2646 r = hex(p1)
2648 2647
2649 2648 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2650 2649 opts['date'])
2651 2650
2652 2651 def tags(ui, repo):
2653 2652 """list repository tags
2654 2653
2655 2654 List the repository tags.
2656 2655
2657 2656 This lists both regular and local tags.
2658 2657 """
2659 2658
2660 2659 l = repo.tagslist()
2661 2660 l.reverse()
2662 2661 for t, n in l:
2663 2662 try:
2664 2663 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2665 2664 except KeyError:
2666 2665 r = " ?:?"
2667 2666 if ui.quiet:
2668 2667 ui.write("%s\n" % t)
2669 2668 else:
2670 2669 ui.write("%-30s %s\n" % (t, r))
2671 2670
2672 2671 def tip(ui, repo, **opts):
2673 2672 """show the tip revision
2674 2673
2675 2674 Show the tip revision.
2676 2675 """
2677 2676 n = repo.changelog.tip()
2678 2677 br = None
2679 2678 if opts['branches']:
2680 2679 br = repo.branchlookup([n])
2681 2680 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2682 2681 if opts['patch']:
2683 2682 patch.diff(repo, repo.changelog.parents(n)[0], n)
2684 2683
2685 2684 def unbundle(ui, repo, fname, **opts):
2686 2685 """apply a changegroup file
2687 2686
2688 2687 Apply a compressed changegroup file generated by the bundle
2689 2688 command.
2690 2689 """
2691 2690 f = urllib.urlopen(fname)
2692 2691
2693 2692 header = f.read(6)
2694 2693 if not header.startswith("HG"):
2695 2694 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2696 2695 elif not header.startswith("HG10"):
2697 2696 raise util.Abort(_("%s: unknown bundle version") % fname)
2698 2697 elif header == "HG10BZ":
2699 2698 def generator(f):
2700 2699 zd = bz2.BZ2Decompressor()
2701 2700 zd.decompress("BZ")
2702 2701 for chunk in f:
2703 2702 yield zd.decompress(chunk)
2704 2703 elif header == "HG10UN":
2705 2704 def generator(f):
2706 2705 for chunk in f:
2707 2706 yield chunk
2708 2707 else:
2709 2708 raise util.Abort(_("%s: unknown bundle compression type")
2710 2709 % fname)
2711 2710 gen = generator(util.filechunkiter(f, 4096))
2712 2711 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2713 2712 'bundle:' + fname)
2714 2713 return postincoming(ui, repo, modheads, opts['update'])
2715 2714
2716 2715 def undo(ui, repo):
2717 2716 """undo the last commit or pull (DEPRECATED)
2718 2717
2719 2718 (DEPRECATED)
2720 2719 This command is now deprecated and will be removed in a future
2721 2720 release. Please use the rollback command instead. For usage
2722 2721 instructions, see the rollback command.
2723 2722 """
2724 2723 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2725 2724 repo.rollback()
2726 2725
2727 2726 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2728 2727 branch=None):
2729 2728 """update or merge working directory
2730 2729
2731 2730 Update the working directory to the specified revision.
2732 2731
2733 2732 If there are no outstanding changes in the working directory and
2734 2733 there is a linear relationship between the current version and the
2735 2734 requested version, the result is the requested version.
2736 2735
2737 2736 To merge the working directory with another revision, use the
2738 2737 merge command.
2739 2738
2740 2739 By default, update will refuse to run if doing so would require
2741 2740 merging or discarding local changes.
2742 2741 """
2743 2742 node = _lookup(repo, node, branch)
2744 2743 if merge:
2745 2744 ui.warn(_('(the -m/--merge option is deprecated; '
2746 2745 'use the merge command instead)\n'))
2747 2746 return hg.merge(repo, node, force=force)
2748 2747 elif clean:
2749 2748 return hg.clean(repo, node)
2750 2749 else:
2751 2750 return hg.update(repo, node)
2752 2751
2753 2752 def _lookup(repo, node, branch=None):
2754 2753 if branch:
2755 2754 br = repo.branchlookup(branch=branch)
2756 2755 found = []
2757 2756 for x in br:
2758 2757 if branch in br[x]:
2759 2758 found.append(x)
2760 2759 if len(found) > 1:
2761 2760 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2762 2761 for x in found:
2763 2762 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2764 2763 raise util.Abort("")
2765 2764 if len(found) == 1:
2766 2765 node = found[0]
2767 2766 repo.ui.warn(_("Using head %s for branch %s\n")
2768 2767 % (short(node), branch))
2769 2768 else:
2770 2769 raise util.Abort(_("branch %s not found\n") % (branch))
2771 2770 else:
2772 2771 node = node and repo.lookup(node) or repo.changelog.tip()
2773 2772 return node
2774 2773
2775 2774 def verify(ui, repo):
2776 2775 """verify the integrity of the repository
2777 2776
2778 2777 Verify the integrity of the current repository.
2779 2778
2780 2779 This will perform an extensive check of the repository's
2781 2780 integrity, validating the hashes and checksums of each entry in
2782 2781 the changelog, manifest, and tracked files, as well as the
2783 2782 integrity of their crosslinks and indices.
2784 2783 """
2785 2784 return hg.verify(repo)
2786 2785
2787 2786 # Command options and aliases are listed here, alphabetically
2788 2787
2789 2788 table = {
2790 2789 "^add":
2791 2790 (add,
2792 2791 [('I', 'include', [], _('include names matching the given patterns')),
2793 2792 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 2793 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 2794 _('hg add [OPTION]... [FILE]...')),
2796 2795 "debugaddremove|addremove":
2797 2796 (addremove,
2798 2797 [('I', 'include', [], _('include names matching the given patterns')),
2799 2798 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2800 2799 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2801 2800 _('hg addremove [OPTION]... [FILE]...')),
2802 2801 "^annotate":
2803 2802 (annotate,
2804 2803 [('r', 'rev', '', _('annotate the specified revision')),
2805 2804 ('a', 'text', None, _('treat all files as text')),
2806 2805 ('u', 'user', None, _('list the author')),
2807 2806 ('d', 'date', None, _('list the date')),
2808 2807 ('n', 'number', None, _('list the revision number (default)')),
2809 2808 ('c', 'changeset', None, _('list the changeset')),
2810 2809 ('I', 'include', [], _('include names matching the given patterns')),
2811 2810 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2812 2811 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2813 2812 "archive":
2814 2813 (archive,
2815 2814 [('', 'no-decode', None, _('do not pass files through decoders')),
2816 2815 ('p', 'prefix', '', _('directory prefix for files in archive')),
2817 2816 ('r', 'rev', '', _('revision to distribute')),
2818 2817 ('t', 'type', '', _('type of distribution to create')),
2819 2818 ('I', 'include', [], _('include names matching the given patterns')),
2820 2819 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2821 2820 _('hg archive [OPTION]... DEST')),
2822 2821 "backout":
2823 2822 (backout,
2824 2823 [('', 'merge', None,
2825 2824 _('merge with old dirstate parent after backout')),
2826 2825 ('m', 'message', '', _('use <text> as commit message')),
2827 2826 ('l', 'logfile', '', _('read commit message from <file>')),
2828 2827 ('d', 'date', '', _('record datecode as commit date')),
2829 2828 ('', 'parent', '', _('parent to choose when backing out merge')),
2830 2829 ('u', 'user', '', _('record user as committer')),
2831 2830 ('I', 'include', [], _('include names matching the given patterns')),
2832 2831 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2833 2832 _('hg backout [OPTION]... REV')),
2834 2833 "bundle":
2835 2834 (bundle,
2836 2835 [('f', 'force', None,
2837 2836 _('run even when remote repository is unrelated'))],
2838 2837 _('hg bundle FILE DEST')),
2839 2838 "cat":
2840 2839 (cat,
2841 2840 [('o', 'output', '', _('print output to file with formatted name')),
2842 2841 ('r', 'rev', '', _('print the given revision')),
2843 2842 ('I', 'include', [], _('include names matching the given patterns')),
2844 2843 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2845 2844 _('hg cat [OPTION]... FILE...')),
2846 2845 "^clone":
2847 2846 (clone,
2848 2847 [('U', 'noupdate', None, _('do not update the new working directory')),
2849 2848 ('r', 'rev', [],
2850 2849 _('a changeset you would like to have after cloning')),
2851 2850 ('', 'pull', None, _('use pull protocol to copy metadata')),
2852 2851 ('', 'uncompressed', None,
2853 2852 _('use uncompressed transfer (fast over LAN)')),
2854 2853 ('e', 'ssh', '', _('specify ssh command to use')),
2855 2854 ('', 'remotecmd', '',
2856 2855 _('specify hg command to run on the remote side'))],
2857 2856 _('hg clone [OPTION]... SOURCE [DEST]')),
2858 2857 "^commit|ci":
2859 2858 (commit,
2860 2859 [('A', 'addremove', None,
2861 2860 _('mark new/missing files as added/removed before committing')),
2862 2861 ('m', 'message', '', _('use <text> as commit message')),
2863 2862 ('l', 'logfile', '', _('read the commit message from <file>')),
2864 2863 ('d', 'date', '', _('record datecode as commit date')),
2865 2864 ('u', 'user', '', _('record user as commiter')),
2866 2865 ('I', 'include', [], _('include names matching the given patterns')),
2867 2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2868 2867 _('hg commit [OPTION]... [FILE]...')),
2869 2868 "copy|cp":
2870 2869 (copy,
2871 2870 [('A', 'after', None, _('record a copy that has already occurred')),
2872 2871 ('f', 'force', None,
2873 2872 _('forcibly copy over an existing managed file')),
2874 2873 ('I', 'include', [], _('include names matching the given patterns')),
2875 2874 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2876 2875 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2877 2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2878 2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2879 2878 "debugcomplete":
2880 2879 (debugcomplete,
2881 2880 [('o', 'options', None, _('show the command options'))],
2882 2881 _('debugcomplete [-o] CMD')),
2883 2882 "debugrebuildstate":
2884 2883 (debugrebuildstate,
2885 2884 [('r', 'rev', '', _('revision to rebuild to'))],
2886 2885 _('debugrebuildstate [-r REV] [REV]')),
2887 2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2888 2887 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2889 2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2890 2889 "debugstate": (debugstate, [], _('debugstate')),
2891 2890 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2892 2891 "debugindex": (debugindex, [], _('debugindex FILE')),
2893 2892 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2894 2893 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2895 2894 "debugwalk":
2896 2895 (debugwalk,
2897 2896 [('I', 'include', [], _('include names matching the given patterns')),
2898 2897 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2899 2898 _('debugwalk [OPTION]... [FILE]...')),
2900 2899 "^diff":
2901 2900 (diff,
2902 2901 [('r', 'rev', [], _('revision')),
2903 2902 ('a', 'text', None, _('treat all files as text')),
2904 2903 ('p', 'show-function', None,
2905 2904 _('show which function each change is in')),
2906 2905 ('w', 'ignore-all-space', None,
2907 2906 _('ignore white space when comparing lines')),
2908 2907 ('b', 'ignore-space-change', None,
2909 2908 _('ignore changes in the amount of white space')),
2910 2909 ('B', 'ignore-blank-lines', None,
2911 2910 _('ignore changes whose lines are all blank')),
2912 2911 ('I', 'include', [], _('include names matching the given patterns')),
2913 2912 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2914 2913 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2915 2914 "^export":
2916 2915 (export,
2917 2916 [('o', 'output', '', _('print output to file with formatted name')),
2918 2917 ('a', 'text', None, _('treat all files as text')),
2919 2918 ('', 'switch-parent', None, _('diff against the second parent'))],
2920 2919 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2921 2920 "debugforget|forget":
2922 2921 (forget,
2923 2922 [('I', 'include', [], _('include names matching the given patterns')),
2924 2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2925 2924 _('hg forget [OPTION]... FILE...')),
2926 2925 "grep":
2927 2926 (grep,
2928 2927 [('0', 'print0', None, _('end fields with NUL')),
2929 2928 ('', 'all', None, _('print all revisions that match')),
2930 2929 ('f', 'follow', None,
2931 2930 _('follow changeset history, or file history across copies and renames')),
2932 2931 ('i', 'ignore-case', None, _('ignore case when matching')),
2933 2932 ('l', 'files-with-matches', None,
2934 2933 _('print only filenames and revs that match')),
2935 2934 ('n', 'line-number', None, _('print matching line numbers')),
2936 2935 ('r', 'rev', [], _('search in given revision range')),
2937 2936 ('u', 'user', None, _('print user who committed change')),
2938 2937 ('I', 'include', [], _('include names matching the given patterns')),
2939 2938 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2940 2939 _('hg grep [OPTION]... PATTERN [FILE]...')),
2941 2940 "heads":
2942 2941 (heads,
2943 2942 [('b', 'branches', None, _('show branches')),
2944 2943 ('', 'style', '', _('display using template map file')),
2945 2944 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2946 2945 ('', 'template', '', _('display with template'))],
2947 2946 _('hg heads [-b] [-r <rev>]')),
2948 2947 "help": (help_, [], _('hg help [COMMAND]')),
2949 2948 "identify|id": (identify, [], _('hg identify')),
2950 2949 "import|patch":
2951 2950 (import_,
2952 2951 [('p', 'strip', 1,
2953 2952 _('directory strip option for patch. This has the same\n'
2954 2953 'meaning as the corresponding patch option')),
2955 2954 ('m', 'message', '', _('use <text> as commit message')),
2956 2955 ('b', 'base', '', _('base path')),
2957 2956 ('f', 'force', None,
2958 2957 _('skip check for outstanding uncommitted changes'))],
2959 2958 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2960 2959 "incoming|in": (incoming,
2961 2960 [('M', 'no-merges', None, _('do not show merges')),
2962 2961 ('f', 'force', None,
2963 2962 _('run even when remote repository is unrelated')),
2964 2963 ('', 'style', '', _('display using template map file')),
2965 2964 ('n', 'newest-first', None, _('show newest record first')),
2966 2965 ('', 'bundle', '', _('file to store the bundles into')),
2967 2966 ('p', 'patch', None, _('show patch')),
2968 2967 ('r', 'rev', [], _('a specific revision you would like to pull')),
2969 2968 ('', 'template', '', _('display with template')),
2970 2969 ('e', 'ssh', '', _('specify ssh command to use')),
2971 2970 ('', 'remotecmd', '',
2972 2971 _('specify hg command to run on the remote side'))],
2973 2972 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2974 2973 ' [--bundle FILENAME] [SOURCE]')),
2975 2974 "^init":
2976 2975 (init,
2977 2976 [('e', 'ssh', '', _('specify ssh command to use')),
2978 2977 ('', 'remotecmd', '',
2979 2978 _('specify hg command to run on the remote side'))],
2980 2979 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2981 2980 "locate":
2982 2981 (locate,
2983 2982 [('r', 'rev', '', _('search the repository as it stood at rev')),
2984 2983 ('0', 'print0', None,
2985 2984 _('end filenames with NUL, for use with xargs')),
2986 2985 ('f', 'fullpath', None,
2987 2986 _('print complete paths from the filesystem root')),
2988 2987 ('I', 'include', [], _('include names matching the given patterns')),
2989 2988 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2990 2989 _('hg locate [OPTION]... [PATTERN]...')),
2991 2990 "^log|history":
2992 2991 (log,
2993 2992 [('b', 'branches', None, _('show branches')),
2994 2993 ('f', 'follow', None,
2995 2994 _('follow changeset history, or file history across copies and renames')),
2996 2995 ('', 'follow-first', None,
2997 2996 _('only follow the first parent of merge changesets')),
2998 2997 ('k', 'keyword', [], _('search for a keyword')),
2999 2998 ('l', 'limit', '', _('limit number of changes displayed')),
3000 2999 ('r', 'rev', [], _('show the specified revision or range')),
3001 3000 ('M', 'no-merges', None, _('do not show merges')),
3002 3001 ('', 'style', '', _('display using template map file')),
3003 3002 ('m', 'only-merges', None, _('show only merges')),
3004 3003 ('p', 'patch', None, _('show patch')),
3005 3004 ('', 'template', '', _('display with template')),
3006 3005 ('I', 'include', [], _('include names matching the given patterns')),
3007 3006 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3008 3007 _('hg log [OPTION]... [FILE]')),
3009 3008 "manifest": (manifest, [], _('hg manifest [REV]')),
3010 3009 "merge":
3011 3010 (merge,
3012 3011 [('b', 'branch', '', _('merge with head of a specific branch')),
3013 3012 ('f', 'force', None, _('force a merge with outstanding changes'))],
3014 3013 _('hg merge [-b TAG] [-f] [REV]')),
3015 3014 "outgoing|out": (outgoing,
3016 3015 [('M', 'no-merges', None, _('do not show merges')),
3017 3016 ('f', 'force', None,
3018 3017 _('run even when remote repository is unrelated')),
3019 3018 ('p', 'patch', None, _('show patch')),
3020 3019 ('', 'style', '', _('display using template map file')),
3021 3020 ('r', 'rev', [], _('a specific revision you would like to push')),
3022 3021 ('n', 'newest-first', None, _('show newest record first')),
3023 3022 ('', 'template', '', _('display with template')),
3024 3023 ('e', 'ssh', '', _('specify ssh command to use')),
3025 3024 ('', 'remotecmd', '',
3026 3025 _('specify hg command to run on the remote side'))],
3027 3026 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3028 3027 "^parents":
3029 3028 (parents,
3030 3029 [('b', 'branches', None, _('show branches')),
3031 3030 ('r', 'rev', '', _('show parents from the specified rev')),
3032 3031 ('', 'style', '', _('display using template map file')),
3033 3032 ('', 'template', '', _('display with template'))],
3034 3033 _('hg parents [-b] [-r REV] [FILE]')),
3035 3034 "paths": (paths, [], _('hg paths [NAME]')),
3036 3035 "^pull":
3037 3036 (pull,
3038 3037 [('u', 'update', None,
3039 3038 _('update the working directory to tip after pull')),
3040 3039 ('e', 'ssh', '', _('specify ssh command to use')),
3041 3040 ('f', 'force', None,
3042 3041 _('run even when remote repository is unrelated')),
3043 3042 ('r', 'rev', [], _('a specific revision you would like to pull')),
3044 3043 ('', 'remotecmd', '',
3045 3044 _('specify hg command to run on the remote side'))],
3046 3045 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3047 3046 "^push":
3048 3047 (push,
3049 3048 [('f', 'force', None, _('force push')),
3050 3049 ('e', 'ssh', '', _('specify ssh command to use')),
3051 3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3052 3051 ('', 'remotecmd', '',
3053 3052 _('specify hg command to run on the remote side'))],
3054 3053 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3055 3054 "debugrawcommit|rawcommit":
3056 3055 (rawcommit,
3057 3056 [('p', 'parent', [], _('parent')),
3058 3057 ('d', 'date', '', _('date code')),
3059 3058 ('u', 'user', '', _('user')),
3060 3059 ('F', 'files', '', _('file list')),
3061 3060 ('m', 'message', '', _('commit message')),
3062 3061 ('l', 'logfile', '', _('commit message file'))],
3063 3062 _('hg debugrawcommit [OPTION]... [FILE]...')),
3064 3063 "recover": (recover, [], _('hg recover')),
3065 3064 "^remove|rm":
3066 3065 (remove,
3067 3066 [('A', 'after', None, _('record remove that has already occurred')),
3068 3067 ('f', 'force', None, _('remove file even if modified')),
3069 3068 ('I', 'include', [], _('include names matching the given patterns')),
3070 3069 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3071 3070 _('hg remove [OPTION]... FILE...')),
3072 3071 "rename|mv":
3073 3072 (rename,
3074 3073 [('A', 'after', None, _('record a rename that has already occurred')),
3075 3074 ('f', 'force', None,
3076 3075 _('forcibly copy over an existing managed file')),
3077 3076 ('I', 'include', [], _('include names matching the given patterns')),
3078 3077 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3079 3078 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 3079 _('hg rename [OPTION]... SOURCE... DEST')),
3081 3080 "^revert":
3082 3081 (revert,
3083 3082 [('r', 'rev', '', _('revision to revert to')),
3084 3083 ('', 'no-backup', None, _('do not save backup copies of files')),
3085 3084 ('I', 'include', [], _('include names matching given patterns')),
3086 3085 ('X', 'exclude', [], _('exclude names matching given patterns')),
3087 3086 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3088 3087 _('hg revert [-r REV] [NAME]...')),
3089 3088 "rollback": (rollback, [], _('hg rollback')),
3090 3089 "root": (root, [], _('hg root')),
3091 3090 "^serve":
3092 3091 (serve,
3093 3092 [('A', 'accesslog', '', _('name of access log file to write to')),
3094 3093 ('d', 'daemon', None, _('run server in background')),
3095 3094 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3096 3095 ('E', 'errorlog', '', _('name of error log file to write to')),
3097 3096 ('p', 'port', 0, _('port to use (default: 8000)')),
3098 3097 ('a', 'address', '', _('address to use')),
3099 3098 ('n', 'name', '',
3100 3099 _('name to show in web pages (default: working dir)')),
3101 3100 ('', 'webdir-conf', '', _('name of the webdir config file'
3102 3101 ' (serve more than one repo)')),
3103 3102 ('', 'pid-file', '', _('name of file to write process ID to')),
3104 3103 ('', 'stdio', None, _('for remote clients')),
3105 3104 ('t', 'templates', '', _('web templates to use')),
3106 3105 ('', 'style', '', _('template style to use')),
3107 3106 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3108 3107 _('hg serve [OPTION]...')),
3109 3108 "^status|st":
3110 3109 (status,
3111 3110 [('A', 'all', None, _('show status of all files')),
3112 3111 ('m', 'modified', None, _('show only modified files')),
3113 3112 ('a', 'added', None, _('show only added files')),
3114 3113 ('r', 'removed', None, _('show only removed files')),
3115 3114 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3116 3115 ('c', 'clean', None, _('show only files without changes')),
3117 3116 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3118 3117 ('i', 'ignored', None, _('show ignored files')),
3119 3118 ('n', 'no-status', None, _('hide status prefix')),
3120 3119 ('C', 'copies', None, _('show source of copied files')),
3121 3120 ('0', 'print0', None,
3122 3121 _('end filenames with NUL, for use with xargs')),
3123 3122 ('I', 'include', [], _('include names matching the given patterns')),
3124 3123 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3125 3124 _('hg status [OPTION]... [FILE]...')),
3126 3125 "tag":
3127 3126 (tag,
3128 3127 [('l', 'local', None, _('make the tag local')),
3129 3128 ('m', 'message', '', _('message for tag commit log entry')),
3130 3129 ('d', 'date', '', _('record datecode as commit date')),
3131 3130 ('u', 'user', '', _('record user as commiter')),
3132 3131 ('r', 'rev', '', _('revision to tag'))],
3133 3132 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3134 3133 "tags": (tags, [], _('hg tags')),
3135 3134 "tip":
3136 3135 (tip,
3137 3136 [('b', 'branches', None, _('show branches')),
3138 3137 ('', 'style', '', _('display using template map file')),
3139 3138 ('p', 'patch', None, _('show patch')),
3140 3139 ('', 'template', '', _('display with template'))],
3141 3140 _('hg tip [-b] [-p]')),
3142 3141 "unbundle":
3143 3142 (unbundle,
3144 3143 [('u', 'update', None,
3145 3144 _('update the working directory to tip after unbundle'))],
3146 3145 _('hg unbundle [-u] FILE')),
3147 3146 "debugundo|undo": (undo, [], _('hg undo')),
3148 3147 "^update|up|checkout|co":
3149 3148 (update,
3150 3149 [('b', 'branch', '', _('checkout the head of a specific branch')),
3151 3150 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3152 3151 ('C', 'clean', None, _('overwrite locally modified files')),
3153 3152 ('f', 'force', None, _('force a merge with outstanding changes'))],
3154 3153 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3155 3154 "verify": (verify, [], _('hg verify')),
3156 3155 "version": (show_version, [], _('hg version')),
3157 3156 }
3158 3157
3159 3158 globalopts = [
3160 3159 ('R', 'repository', '',
3161 3160 _('repository root directory or symbolic path name')),
3162 3161 ('', 'cwd', '', _('change working directory')),
3163 3162 ('y', 'noninteractive', None,
3164 3163 _('do not prompt, assume \'yes\' for any required answers')),
3165 3164 ('q', 'quiet', None, _('suppress output')),
3166 3165 ('v', 'verbose', None, _('enable additional output')),
3167 3166 ('', 'config', [], _('set/override config option')),
3168 3167 ('', 'debug', None, _('enable debugging output')),
3169 3168 ('', 'debugger', None, _('start debugger')),
3170 3169 ('', 'lsprof', None, _('print improved command execution profile')),
3171 3170 ('', 'traceback', None, _('print traceback on exception')),
3172 3171 ('', 'time', None, _('time how long the command takes')),
3173 3172 ('', 'profile', None, _('print command execution profile')),
3174 3173 ('', 'version', None, _('output version information and exit')),
3175 3174 ('h', 'help', None, _('display help and exit')),
3176 3175 ]
3177 3176
3178 3177 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3179 3178 " debugindex debugindexdot")
3180 3179 optionalrepo = ("paths serve debugconfig")
3181 3180
3182 3181 def findpossible(cmd):
3183 3182 """
3184 3183 Return cmd -> (aliases, command table entry)
3185 3184 for each matching command.
3186 3185 Return debug commands (or their aliases) only if no normal command matches.
3187 3186 """
3188 3187 choice = {}
3189 3188 debugchoice = {}
3190 3189 for e in table.keys():
3191 3190 aliases = e.lstrip("^").split("|")
3192 3191 found = None
3193 3192 if cmd in aliases:
3194 3193 found = cmd
3195 3194 else:
3196 3195 for a in aliases:
3197 3196 if a.startswith(cmd):
3198 3197 found = a
3199 3198 break
3200 3199 if found is not None:
3201 3200 if aliases[0].startswith("debug"):
3202 3201 debugchoice[found] = (aliases, table[e])
3203 3202 else:
3204 3203 choice[found] = (aliases, table[e])
3205 3204
3206 3205 if not choice and debugchoice:
3207 3206 choice = debugchoice
3208 3207
3209 3208 return choice
3210 3209
3211 3210 def findcmd(cmd):
3212 3211 """Return (aliases, command table entry) for command string."""
3213 3212 choice = findpossible(cmd)
3214 3213
3215 3214 if choice.has_key(cmd):
3216 3215 return choice[cmd]
3217 3216
3218 3217 if len(choice) > 1:
3219 3218 clist = choice.keys()
3220 3219 clist.sort()
3221 3220 raise AmbiguousCommand(cmd, clist)
3222 3221
3223 3222 if choice:
3224 3223 return choice.values()[0]
3225 3224
3226 3225 raise UnknownCommand(cmd)
3227 3226
3228 3227 def catchterm(*args):
3229 3228 raise util.SignalInterrupt
3230 3229
3231 3230 def run():
3232 3231 sys.exit(dispatch(sys.argv[1:]))
3233 3232
3234 3233 class ParseError(Exception):
3235 3234 """Exception raised on errors in parsing the command line."""
3236 3235
3237 3236 def parse(ui, args):
3238 3237 options = {}
3239 3238 cmdoptions = {}
3240 3239
3241 3240 try:
3242 3241 args = fancyopts.fancyopts(args, globalopts, options)
3243 3242 except fancyopts.getopt.GetoptError, inst:
3244 3243 raise ParseError(None, inst)
3245 3244
3246 3245 if args:
3247 3246 cmd, args = args[0], args[1:]
3248 3247 aliases, i = findcmd(cmd)
3249 3248 cmd = aliases[0]
3250 3249 defaults = ui.config("defaults", cmd)
3251 3250 if defaults:
3252 3251 args = defaults.split() + args
3253 3252 c = list(i[1])
3254 3253 else:
3255 3254 cmd = None
3256 3255 c = []
3257 3256
3258 3257 # combine global options into local
3259 3258 for o in globalopts:
3260 3259 c.append((o[0], o[1], options[o[1]], o[3]))
3261 3260
3262 3261 try:
3263 3262 args = fancyopts.fancyopts(args, c, cmdoptions)
3264 3263 except fancyopts.getopt.GetoptError, inst:
3265 3264 raise ParseError(cmd, inst)
3266 3265
3267 3266 # separate global options back out
3268 3267 for o in globalopts:
3269 3268 n = o[1]
3270 3269 options[n] = cmdoptions[n]
3271 3270 del cmdoptions[n]
3272 3271
3273 3272 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3274 3273
3275 3274 external = {}
3276 3275
3277 3276 def findext(name):
3278 3277 '''return module with given extension name'''
3279 3278 try:
3280 3279 return sys.modules[external[name]]
3281 3280 except KeyError:
3282 3281 for k, v in external.iteritems():
3283 3282 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3284 3283 return sys.modules[v]
3285 3284 raise KeyError(name)
3286 3285
3287 3286 def dispatch(args):
3288 3287 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3289 3288 num = getattr(signal, name, None)
3290 3289 if num: signal.signal(num, catchterm)
3291 3290
3292 3291 try:
3293 3292 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3294 3293 except util.Abort, inst:
3295 3294 sys.stderr.write(_("abort: %s\n") % inst)
3296 3295 return -1
3297 3296
3298 3297 for ext_name, load_from_name in u.extensions():
3299 3298 try:
3300 3299 if load_from_name:
3301 3300 # the module will be loaded in sys.modules
3302 3301 # choose an unique name so that it doesn't
3303 3302 # conflicts with other modules
3304 3303 module_name = "hgext_%s" % ext_name.replace('.', '_')
3305 3304 mod = imp.load_source(module_name, load_from_name)
3306 3305 else:
3307 3306 def importh(name):
3308 3307 mod = __import__(name)
3309 3308 components = name.split('.')
3310 3309 for comp in components[1:]:
3311 3310 mod = getattr(mod, comp)
3312 3311 return mod
3313 3312 try:
3314 3313 mod = importh("hgext.%s" % ext_name)
3315 3314 except ImportError:
3316 3315 mod = importh(ext_name)
3317 3316 external[ext_name] = mod.__name__
3318 3317 except (util.SignalInterrupt, KeyboardInterrupt):
3319 3318 raise
3320 3319 except Exception, inst:
3321 3320 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3322 3321 if u.print_exc():
3323 3322 return 1
3324 3323
3325 3324 for name in external.itervalues():
3326 3325 mod = sys.modules[name]
3327 3326 uisetup = getattr(mod, 'uisetup', None)
3328 3327 if uisetup:
3329 3328 uisetup(u)
3330 3329 cmdtable = getattr(mod, 'cmdtable', {})
3331 3330 for t in cmdtable:
3332 3331 if t in table:
3333 3332 u.warn(_("module %s overrides %s\n") % (name, t))
3334 3333 table.update(cmdtable)
3335 3334
3336 3335 try:
3337 3336 cmd, func, args, options, cmdoptions = parse(u, args)
3338 3337 if options["time"]:
3339 3338 def get_times():
3340 3339 t = os.times()
3341 3340 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3342 3341 t = (t[0], t[1], t[2], t[3], time.clock())
3343 3342 return t
3344 3343 s = get_times()
3345 3344 def print_time():
3346 3345 t = get_times()
3347 3346 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3348 3347 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3349 3348 atexit.register(print_time)
3350 3349
3351 3350 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3352 3351 not options["noninteractive"], options["traceback"],
3353 3352 options["config"])
3354 3353
3355 3354 # enter the debugger before command execution
3356 3355 if options['debugger']:
3357 3356 pdb.set_trace()
3358 3357
3359 3358 try:
3360 3359 if options['cwd']:
3361 3360 try:
3362 3361 os.chdir(options['cwd'])
3363 3362 except OSError, inst:
3364 3363 raise util.Abort('%s: %s' %
3365 3364 (options['cwd'], inst.strerror))
3366 3365
3367 3366 path = u.expandpath(options["repository"]) or ""
3368 3367 repo = path and hg.repository(u, path=path) or None
3369 3368
3370 3369 if options['help']:
3371 3370 return help_(u, cmd, options['version'])
3372 3371 elif options['version']:
3373 3372 return show_version(u)
3374 3373 elif not cmd:
3375 3374 return help_(u, 'shortlist')
3376 3375
3377 3376 if cmd not in norepo.split():
3378 3377 try:
3379 3378 if not repo:
3380 3379 repo = hg.repository(u, path=path)
3381 3380 u = repo.ui
3382 3381 for name in external.itervalues():
3383 3382 mod = sys.modules[name]
3384 3383 if hasattr(mod, 'reposetup'):
3385 3384 mod.reposetup(u, repo)
3386 3385 hg.repo_setup_hooks.append(mod.reposetup)
3387 3386 except hg.RepoError:
3388 3387 if cmd not in optionalrepo.split():
3389 3388 raise
3390 3389 d = lambda: func(u, repo, *args, **cmdoptions)
3391 3390 else:
3392 3391 d = lambda: func(u, *args, **cmdoptions)
3393 3392
3394 3393 # reupdate the options, repo/.hg/hgrc may have changed them
3395 3394 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3396 3395 not options["noninteractive"], options["traceback"],
3397 3396 options["config"])
3398 3397
3399 3398 try:
3400 3399 if options['profile']:
3401 3400 import hotshot, hotshot.stats
3402 3401 prof = hotshot.Profile("hg.prof")
3403 3402 try:
3404 3403 try:
3405 3404 return prof.runcall(d)
3406 3405 except:
3407 3406 try:
3408 3407 u.warn(_('exception raised - generating '
3409 3408 'profile anyway\n'))
3410 3409 except:
3411 3410 pass
3412 3411 raise
3413 3412 finally:
3414 3413 prof.close()
3415 3414 stats = hotshot.stats.load("hg.prof")
3416 3415 stats.strip_dirs()
3417 3416 stats.sort_stats('time', 'calls')
3418 3417 stats.print_stats(40)
3419 3418 elif options['lsprof']:
3420 3419 try:
3421 3420 from mercurial import lsprof
3422 3421 except ImportError:
3423 3422 raise util.Abort(_(
3424 3423 'lsprof not available - install from '
3425 3424 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3426 3425 p = lsprof.Profiler()
3427 3426 p.enable(subcalls=True)
3428 3427 try:
3429 3428 return d()
3430 3429 finally:
3431 3430 p.disable()
3432 3431 stats = lsprof.Stats(p.getstats())
3433 3432 stats.sort()
3434 3433 stats.pprint(top=10, file=sys.stderr, climit=5)
3435 3434 else:
3436 3435 return d()
3437 3436 finally:
3438 3437 u.flush()
3439 3438 except:
3440 3439 # enter the debugger when we hit an exception
3441 3440 if options['debugger']:
3442 3441 pdb.post_mortem(sys.exc_info()[2])
3443 3442 u.print_exc()
3444 3443 raise
3445 3444 except ParseError, inst:
3446 3445 if inst.args[0]:
3447 3446 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3448 3447 help_(u, inst.args[0])
3449 3448 else:
3450 3449 u.warn(_("hg: %s\n") % inst.args[1])
3451 3450 help_(u, 'shortlist')
3452 3451 except AmbiguousCommand, inst:
3453 3452 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3454 3453 (inst.args[0], " ".join(inst.args[1])))
3455 3454 except UnknownCommand, inst:
3456 3455 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3457 3456 help_(u, 'shortlist')
3458 3457 except hg.RepoError, inst:
3459 3458 u.warn(_("abort: %s!\n") % inst)
3460 3459 except lock.LockHeld, inst:
3461 3460 if inst.errno == errno.ETIMEDOUT:
3462 3461 reason = _('timed out waiting for lock held by %s') % inst.locker
3463 3462 else:
3464 3463 reason = _('lock held by %s') % inst.locker
3465 3464 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3466 3465 except lock.LockUnavailable, inst:
3467 3466 u.warn(_("abort: could not lock %s: %s\n") %
3468 3467 (inst.desc or inst.filename, inst.strerror))
3469 3468 except revlog.RevlogError, inst:
3470 3469 u.warn(_("abort: "), inst, "!\n")
3471 3470 except util.SignalInterrupt:
3472 3471 u.warn(_("killed!\n"))
3473 3472 except KeyboardInterrupt:
3474 3473 try:
3475 3474 u.warn(_("interrupted!\n"))
3476 3475 except IOError, inst:
3477 3476 if inst.errno == errno.EPIPE:
3478 3477 if u.debugflag:
3479 3478 u.warn(_("\nbroken pipe\n"))
3480 3479 else:
3481 3480 raise
3482 3481 except IOError, inst:
3483 3482 if hasattr(inst, "code"):
3484 3483 u.warn(_("abort: %s\n") % inst)
3485 3484 elif hasattr(inst, "reason"):
3486 3485 u.warn(_("abort: error: %s\n") % inst.reason[1])
3487 3486 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3488 3487 if u.debugflag:
3489 3488 u.warn(_("broken pipe\n"))
3490 3489 elif getattr(inst, "strerror", None):
3491 3490 if getattr(inst, "filename", None):
3492 3491 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3493 3492 else:
3494 3493 u.warn(_("abort: %s\n") % inst.strerror)
3495 3494 else:
3496 3495 raise
3497 3496 except OSError, inst:
3498 3497 if hasattr(inst, "filename"):
3499 3498 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3500 3499 else:
3501 3500 u.warn(_("abort: %s\n") % inst.strerror)
3502 3501 except util.Abort, inst:
3503 3502 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3504 3503 except TypeError, inst:
3505 3504 # was this an argument error?
3506 3505 tb = traceback.extract_tb(sys.exc_info()[2])
3507 3506 if len(tb) > 2: # no
3508 3507 raise
3509 3508 u.debug(inst, "\n")
3510 3509 u.warn(_("%s: invalid arguments\n") % cmd)
3511 3510 help_(u, cmd)
3512 3511 except SystemExit, inst:
3513 3512 # Commands shouldn't sys.exit directly, but give a return code.
3514 3513 # Just in case catch this and and pass exit code to caller.
3515 3514 return inst.code
3516 3515 except:
3517 3516 u.warn(_("** unknown exception encountered, details follow\n"))
3518 3517 u.warn(_("** report bug details to "
3519 3518 "http://www.selenic.com/mercurial/bts\n"))
3520 3519 u.warn(_("** or mercurial@selenic.com\n"))
3521 3520 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3522 3521 % version.get_version())
3523 3522 raise
3524 3523
3525 3524 return -1
@@ -1,1757 +1,1747 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ()
19 19
20 20 def __del__(self):
21 21 self.transhandle = None
22 22 def __init__(self, parentui, path=None, create=0):
23 23 repo.repository.__init__(self)
24 24 if not path:
25 25 p = os.getcwd()
26 26 while not os.path.isdir(os.path.join(p, ".hg")):
27 27 oldp = p
28 28 p = os.path.dirname(p)
29 29 if p == oldp:
30 30 raise repo.RepoError(_("no repo found"))
31 31 path = p
32 32 self.path = os.path.join(path, ".hg")
33 33
34 34 if not create and not os.path.isdir(self.path):
35 35 raise repo.RepoError(_("repository %s not found") % path)
36 36
37 37 self.root = os.path.abspath(path)
38 38 self.origroot = path
39 39 self.ui = ui.ui(parentui=parentui)
40 40 self.opener = util.opener(self.path)
41 41 self.wopener = util.opener(self.root)
42 42
43 43 try:
44 44 self.ui.readconfig(self.join("hgrc"), self.root)
45 45 except IOError:
46 46 pass
47 47
48 48 v = self.ui.revlogopts
49 49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 51 fl = v.get('flags', None)
52 52 flags = 0
53 53 if fl != None:
54 54 for x in fl.split():
55 55 flags |= revlog.flagstr(x)
56 56 elif self.revlogv1:
57 57 flags = revlog.REVLOG_DEFAULT_FLAGS
58 58
59 59 v = self.revlogversion | flags
60 60 self.manifest = manifest.manifest(self.opener, v)
61 61 self.changelog = changelog.changelog(self.opener, v)
62 62
63 63 # the changelog might not have the inline index flag
64 64 # on. If the format of the changelog is the same as found in
65 65 # .hgrc, apply any flags found in the .hgrc as well.
66 66 # Otherwise, just version from the changelog
67 67 v = self.changelog.version
68 68 if v == self.revlogversion:
69 69 v |= flags
70 70 self.revlogversion = v
71 71
72 72 self.tagscache = None
73 73 self.nodetagscache = None
74 74 self.encodepats = None
75 75 self.decodepats = None
76 76 self.transhandle = None
77 77
78 78 if create:
79 79 if not os.path.exists(path):
80 80 os.mkdir(path)
81 81 os.mkdir(self.path)
82 82 os.mkdir(self.join("data"))
83 83
84 84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85 85
86 86 def url(self):
87 87 return 'file:' + self.root
88 88
89 89 def hook(self, name, throw=False, **args):
90 90 def callhook(hname, funcname):
91 91 '''call python hook. hook is callable object, looked up as
92 92 name in python module. if callable returns "true", hook
93 93 fails, else passes. if hook raises exception, treated as
94 94 hook failure. exception propagates if throw is "true".
95 95
96 96 reason for "true" meaning "hook failed" is so that
97 97 unmodified commands (e.g. mercurial.commands.update) can
98 98 be run as hooks without wrappers to convert return values.'''
99 99
100 100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 101 d = funcname.rfind('.')
102 102 if d == -1:
103 103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 104 % (hname, funcname))
105 105 modname = funcname[:d]
106 106 try:
107 107 obj = __import__(modname)
108 108 except ImportError:
109 109 try:
110 110 # extensions are loaded with hgext_ prefix
111 111 obj = __import__("hgext_%s" % modname)
112 112 except ImportError:
113 113 raise util.Abort(_('%s hook is invalid '
114 114 '(import of "%s" failed)') %
115 115 (hname, modname))
116 116 try:
117 117 for p in funcname.split('.')[1:]:
118 118 obj = getattr(obj, p)
119 119 except AttributeError, err:
120 120 raise util.Abort(_('%s hook is invalid '
121 121 '("%s" is not defined)') %
122 122 (hname, funcname))
123 123 if not callable(obj):
124 124 raise util.Abort(_('%s hook is invalid '
125 125 '("%s" is not callable)') %
126 126 (hname, funcname))
127 127 try:
128 128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 129 except (KeyboardInterrupt, util.SignalInterrupt):
130 130 raise
131 131 except Exception, exc:
132 132 if isinstance(exc, util.Abort):
133 133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 134 (hname, exc.args[0] % exc.args[1:]))
135 135 else:
136 136 self.ui.warn(_('error: %s hook raised an exception: '
137 137 '%s\n') % (hname, exc))
138 138 if throw:
139 139 raise
140 140 self.ui.print_exc()
141 141 return True
142 142 if r:
143 143 if throw:
144 144 raise util.Abort(_('%s hook failed') % hname)
145 145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 146 return r
147 147
148 148 def runhook(name, cmd):
149 149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 151 r = util.system(cmd, environ=env, cwd=self.root)
152 152 if r:
153 153 desc, r = util.explain_exit(r)
154 154 if throw:
155 155 raise util.Abort(_('%s hook %s') % (name, desc))
156 156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 157 return r
158 158
159 159 r = False
160 160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 161 if hname.split(".", 1)[0] == name and cmd]
162 162 hooks.sort()
163 163 for hname, cmd in hooks:
164 164 if cmd.startswith('python:'):
165 165 r = callhook(hname, cmd[7:].strip()) or r
166 166 else:
167 167 r = runhook(hname, cmd) or r
168 168 return r
169 169
170 170 tag_disallowed = ':\r\n'
171 171
172 172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 173 '''tag a revision with a symbolic name.
174 174
175 175 if local is True, the tag is stored in a per-repository file.
176 176 otherwise, it is stored in the .hgtags file, and a new
177 177 changeset is committed with the change.
178 178
179 179 keyword arguments:
180 180
181 181 local: whether to store tag in non-version-controlled file
182 182 (default False)
183 183
184 184 message: commit message to use if committing
185 185
186 186 user: name of user to use if committing
187 187
188 188 date: date tuple to use if committing'''
189 189
190 190 for c in self.tag_disallowed:
191 191 if c in name:
192 192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193 193
194 194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195 195
196 196 if local:
197 197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 198 self.hook('tag', node=node, tag=name, local=local)
199 199 return
200 200
201 for x in self.changes():
201 for x in self.status()[:5]:
202 202 if '.hgtags' in x:
203 203 raise util.Abort(_('working copy of .hgtags is changed '
204 204 '(please commit .hgtags manually)'))
205 205
206 206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 207 if self.dirstate.state('.hgtags') == '?':
208 208 self.add(['.hgtags'])
209 209
210 210 if not message:
211 211 message = _('Added tag %s for changeset %s') % (name, node)
212 212
213 213 self.commit(['.hgtags'], message, user, date)
214 214 self.hook('tag', node=node, tag=name, local=local)
215 215
216 216 def tags(self):
217 217 '''return a mapping of tag to node'''
218 218 if not self.tagscache:
219 219 self.tagscache = {}
220 220
221 221 def parsetag(line, context):
222 222 if not line:
223 223 return
224 224 s = l.split(" ", 1)
225 225 if len(s) != 2:
226 226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 227 return
228 228 node, key = s
229 229 key = key.strip()
230 230 try:
231 231 bin_n = bin(node)
232 232 except TypeError:
233 233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 234 (context, node))
235 235 return
236 236 if bin_n not in self.changelog.nodemap:
237 237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 238 (context, key))
239 239 return
240 240 self.tagscache[key] = bin_n
241 241
242 242 # read the tags file from each head, ending with the tip,
243 243 # and add each tag found to the map, with "newer" ones
244 244 # taking precedence
245 245 heads = self.heads()
246 246 heads.reverse()
247 247 fl = self.file(".hgtags")
248 248 for node in heads:
249 249 change = self.changelog.read(node)
250 250 rev = self.changelog.rev(node)
251 251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 252 if fn is None: continue
253 253 count = 0
254 254 for l in fl.read(fn).splitlines():
255 255 count += 1
256 256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 257 (rev, short(node), count))
258 258 try:
259 259 f = self.opener("localtags")
260 260 count = 0
261 261 for l in f:
262 262 count += 1
263 263 parsetag(l, _("localtags, line %d") % count)
264 264 except IOError:
265 265 pass
266 266
267 267 self.tagscache['tip'] = self.changelog.tip()
268 268
269 269 return self.tagscache
270 270
271 271 def tagslist(self):
272 272 '''return a list of tags ordered by revision'''
273 273 l = []
274 274 for t, n in self.tags().items():
275 275 try:
276 276 r = self.changelog.rev(n)
277 277 except:
278 278 r = -2 # sort to the beginning of the list if unknown
279 279 l.append((r, t, n))
280 280 l.sort()
281 281 return [(t, n) for r, t, n in l]
282 282
283 283 def nodetags(self, node):
284 284 '''return the tags associated with a node'''
285 285 if not self.nodetagscache:
286 286 self.nodetagscache = {}
287 287 for t, n in self.tags().items():
288 288 self.nodetagscache.setdefault(n, []).append(t)
289 289 return self.nodetagscache.get(node, [])
290 290
291 291 def lookup(self, key):
292 292 try:
293 293 return self.tags()[key]
294 294 except KeyError:
295 295 if key == '.':
296 296 key = self.dirstate.parents()[0]
297 297 if key == nullid:
298 298 raise repo.RepoError(_("no revision checked out"))
299 299 try:
300 300 return self.changelog.lookup(key)
301 301 except:
302 302 raise repo.RepoError(_("unknown revision '%s'") % key)
303 303
304 304 def dev(self):
305 305 return os.lstat(self.path).st_dev
306 306
307 307 def local(self):
308 308 return True
309 309
310 310 def join(self, f):
311 311 return os.path.join(self.path, f)
312 312
313 313 def wjoin(self, f):
314 314 return os.path.join(self.root, f)
315 315
316 316 def file(self, f):
317 317 if f[0] == '/':
318 318 f = f[1:]
319 319 return filelog.filelog(self.opener, f, self.revlogversion)
320 320
321 321 def changectx(self, changeid):
322 322 return context.changectx(self, changeid)
323 323
324 324 def filectx(self, path, changeid=None, fileid=None):
325 325 """changeid can be a changeset revision, node, or tag.
326 326 fileid can be a file revision or node."""
327 327 return context.filectx(self, path, changeid, fileid)
328 328
329 329 def getcwd(self):
330 330 return self.dirstate.getcwd()
331 331
332 332 def wfile(self, f, mode='r'):
333 333 return self.wopener(f, mode)
334 334
335 335 def wread(self, filename):
336 336 if self.encodepats == None:
337 337 l = []
338 338 for pat, cmd in self.ui.configitems("encode"):
339 339 mf = util.matcher(self.root, "", [pat], [], [])[1]
340 340 l.append((mf, cmd))
341 341 self.encodepats = l
342 342
343 343 data = self.wopener(filename, 'r').read()
344 344
345 345 for mf, cmd in self.encodepats:
346 346 if mf(filename):
347 347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
348 348 data = util.filter(data, cmd)
349 349 break
350 350
351 351 return data
352 352
353 353 def wwrite(self, filename, data, fd=None):
354 354 if self.decodepats == None:
355 355 l = []
356 356 for pat, cmd in self.ui.configitems("decode"):
357 357 mf = util.matcher(self.root, "", [pat], [], [])[1]
358 358 l.append((mf, cmd))
359 359 self.decodepats = l
360 360
361 361 for mf, cmd in self.decodepats:
362 362 if mf(filename):
363 363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 364 data = util.filter(data, cmd)
365 365 break
366 366
367 367 if fd:
368 368 return fd.write(data)
369 369 return self.wopener(filename, 'w').write(data)
370 370
371 371 def transaction(self):
372 372 tr = self.transhandle
373 373 if tr != None and tr.running():
374 374 return tr.nest()
375 375
376 376 # save dirstate for rollback
377 377 try:
378 378 ds = self.opener("dirstate").read()
379 379 except IOError:
380 380 ds = ""
381 381 self.opener("journal.dirstate", "w").write(ds)
382 382
383 383 tr = transaction.transaction(self.ui.warn, self.opener,
384 384 self.join("journal"),
385 385 aftertrans(self.path))
386 386 self.transhandle = tr
387 387 return tr
388 388
389 389 def recover(self):
390 390 l = self.lock()
391 391 if os.path.exists(self.join("journal")):
392 392 self.ui.status(_("rolling back interrupted transaction\n"))
393 393 transaction.rollback(self.opener, self.join("journal"))
394 394 self.reload()
395 395 return True
396 396 else:
397 397 self.ui.warn(_("no interrupted transaction available\n"))
398 398 return False
399 399
400 400 def rollback(self, wlock=None):
401 401 if not wlock:
402 402 wlock = self.wlock()
403 403 l = self.lock()
404 404 if os.path.exists(self.join("undo")):
405 405 self.ui.status(_("rolling back last transaction\n"))
406 406 transaction.rollback(self.opener, self.join("undo"))
407 407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
408 408 self.reload()
409 409 self.wreload()
410 410 else:
411 411 self.ui.warn(_("no rollback information available\n"))
412 412
413 413 def wreload(self):
414 414 self.dirstate.read()
415 415
416 416 def reload(self):
417 417 self.changelog.load()
418 418 self.manifest.load()
419 419 self.tagscache = None
420 420 self.nodetagscache = None
421 421
422 422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
423 423 desc=None):
424 424 try:
425 425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
426 426 except lock.LockHeld, inst:
427 427 if not wait:
428 428 raise
429 429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
430 430 (desc, inst.args[0]))
431 431 # default to 600 seconds timeout
432 432 l = lock.lock(self.join(lockname),
433 433 int(self.ui.config("ui", "timeout") or 600),
434 434 releasefn, desc=desc)
435 435 if acquirefn:
436 436 acquirefn()
437 437 return l
438 438
439 439 def lock(self, wait=1):
440 440 return self.do_lock("lock", wait, acquirefn=self.reload,
441 441 desc=_('repository %s') % self.origroot)
442 442
443 443 def wlock(self, wait=1):
444 444 return self.do_lock("wlock", wait, self.dirstate.write,
445 445 self.wreload,
446 446 desc=_('working directory of %s') % self.origroot)
447 447
448 448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
449 449 "determine whether a new filenode is needed"
450 450 fp1 = manifest1.get(filename, nullid)
451 451 fp2 = manifest2.get(filename, nullid)
452 452
453 453 if fp2 != nullid:
454 454 # is one parent an ancestor of the other?
455 455 fpa = filelog.ancestor(fp1, fp2)
456 456 if fpa == fp1:
457 457 fp1, fp2 = fp2, nullid
458 458 elif fpa == fp2:
459 459 fp2 = nullid
460 460
461 461 # is the file unmodified from the parent? report existing entry
462 462 if fp2 == nullid and text == filelog.read(fp1):
463 463 return (fp1, None, None)
464 464
465 465 return (None, fp1, fp2)
466 466
467 467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
468 468 orig_parent = self.dirstate.parents()[0] or nullid
469 469 p1 = p1 or self.dirstate.parents()[0] or nullid
470 470 p2 = p2 or self.dirstate.parents()[1] or nullid
471 471 c1 = self.changelog.read(p1)
472 472 c2 = self.changelog.read(p2)
473 473 m1 = self.manifest.read(c1[0]).copy()
474 474 m2 = self.manifest.read(c2[0])
475 475 changed = []
476 476
477 477 if orig_parent == p1:
478 478 update_dirstate = 1
479 479 else:
480 480 update_dirstate = 0
481 481
482 482 if not wlock:
483 483 wlock = self.wlock()
484 484 l = self.lock()
485 485 tr = self.transaction()
486 486 linkrev = self.changelog.count()
487 487 for f in files:
488 488 try:
489 489 t = self.wread(f)
490 490 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
491 491 r = self.file(f)
492 492
493 493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
494 494 if entry:
495 495 m1[f] = entry
496 496 continue
497 497
498 498 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
499 499 changed.append(f)
500 500 if update_dirstate:
501 501 self.dirstate.update([f], "n")
502 502 except IOError:
503 503 try:
504 504 del m1[f]
505 505 if update_dirstate:
506 506 self.dirstate.forget([f])
507 507 except:
508 508 # deleted from p2?
509 509 pass
510 510
511 511 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
512 512 user = user or self.ui.username()
513 513 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
514 514 tr.close()
515 515 if update_dirstate:
516 516 self.dirstate.setparents(n, nullid)
517 517
518 518 def commit(self, files=None, text="", user=None, date=None,
519 519 match=util.always, force=False, lock=None, wlock=None,
520 520 force_editor=False):
521 521 commit = []
522 522 remove = []
523 523 changed = []
524 524
525 525 if files:
526 526 for f in files:
527 527 s = self.dirstate.state(f)
528 528 if s in 'nmai':
529 529 commit.append(f)
530 530 elif s == 'r':
531 531 remove.append(f)
532 532 else:
533 533 self.ui.warn(_("%s not tracked!\n") % f)
534 534 else:
535 modified, added, removed, deleted, unknown = self.changes(match=match)
535 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
536 536 commit = modified + added
537 537 remove = removed
538 538
539 539 p1, p2 = self.dirstate.parents()
540 540 c1 = self.changelog.read(p1)
541 541 c2 = self.changelog.read(p2)
542 542 m1 = self.manifest.read(c1[0]).copy()
543 543 m2 = self.manifest.read(c2[0])
544 544
545 545 if not commit and not remove and not force and p2 == nullid:
546 546 self.ui.status(_("nothing changed\n"))
547 547 return None
548 548
549 549 xp1 = hex(p1)
550 550 if p2 == nullid: xp2 = ''
551 551 else: xp2 = hex(p2)
552 552
553 553 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
554 554
555 555 if not wlock:
556 556 wlock = self.wlock()
557 557 if not lock:
558 558 lock = self.lock()
559 559 tr = self.transaction()
560 560
561 561 # check in files
562 562 new = {}
563 563 linkrev = self.changelog.count()
564 564 commit.sort()
565 565 for f in commit:
566 566 self.ui.note(f + "\n")
567 567 try:
568 568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
569 569 t = self.wread(f)
570 570 except IOError:
571 571 self.ui.warn(_("trouble committing %s!\n") % f)
572 572 raise
573 573
574 574 r = self.file(f)
575 575
576 576 meta = {}
577 577 cp = self.dirstate.copied(f)
578 578 if cp:
579 579 meta["copy"] = cp
580 580 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
581 581 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
582 582 fp1, fp2 = nullid, nullid
583 583 else:
584 584 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
585 585 if entry:
586 586 new[f] = entry
587 587 continue
588 588
589 589 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
590 590 # remember what we've added so that we can later calculate
591 591 # the files to pull from a set of changesets
592 592 changed.append(f)
593 593
594 594 # update manifest
595 595 m1.update(new)
596 596 for f in remove:
597 597 if f in m1:
598 598 del m1[f]
599 599 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
600 600 (new, remove))
601 601
602 602 # add changeset
603 603 new = new.keys()
604 604 new.sort()
605 605
606 606 user = user or self.ui.username()
607 607 if not text or force_editor:
608 608 edittext = []
609 609 if text:
610 610 edittext.append(text)
611 611 edittext.append("")
612 612 if p2 != nullid:
613 613 edittext.append("HG: branch merge")
614 614 edittext.extend(["HG: changed %s" % f for f in changed])
615 615 edittext.extend(["HG: removed %s" % f for f in remove])
616 616 if not changed and not remove:
617 617 edittext.append("HG: no files changed")
618 618 edittext.append("")
619 619 # run editor in the repository root
620 620 olddir = os.getcwd()
621 621 os.chdir(self.root)
622 622 text = self.ui.edit("\n".join(edittext), user)
623 623 os.chdir(olddir)
624 624
625 625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
626 626 while lines and not lines[0]:
627 627 del lines[0]
628 628 if not lines:
629 629 return None
630 630 text = '\n'.join(lines)
631 631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
632 632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
633 633 parent2=xp2)
634 634 tr.close()
635 635
636 636 self.dirstate.setparents(n)
637 637 self.dirstate.update(new, "n")
638 638 self.dirstate.forget(remove)
639 639
640 640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
641 641 return n
642 642
643 643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
644 644 if node:
645 645 fdict = dict.fromkeys(files)
646 646 for fn in self.manifest.read(self.changelog.read(node)[0]):
647 647 fdict.pop(fn, None)
648 648 if match(fn):
649 649 yield 'm', fn
650 650 for fn in fdict:
651 651 if badmatch and badmatch(fn):
652 652 if match(fn):
653 653 yield 'b', fn
654 654 else:
655 655 self.ui.warn(_('%s: No such file in rev %s\n') % (
656 656 util.pathto(self.getcwd(), fn), short(node)))
657 657 else:
658 658 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
659 659 yield src, fn
660 660
661 661 def status(self, node1=None, node2=None, files=[], match=util.always,
662 662 wlock=None, list_ignored=False, list_clean=False):
663 663 """return status of files between two nodes or node and working directory
664 664
665 665 If node1 is None, use the first dirstate parent instead.
666 666 If node2 is None, compare node1 with working directory.
667 667 """
668 668
669 669 def fcmp(fn, mf):
670 670 t1 = self.wread(fn)
671 671 t2 = self.file(fn).read(mf.get(fn, nullid))
672 672 return cmp(t1, t2)
673 673
674 674 def mfmatches(node):
675 675 change = self.changelog.read(node)
676 676 mf = dict(self.manifest.read(change[0]))
677 677 for fn in mf.keys():
678 678 if not match(fn):
679 679 del mf[fn]
680 680 return mf
681 681
682 682 modified, added, removed, deleted, unknown = [], [], [], [], []
683 683 ignored, clean = [], []
684 684
685 685 compareworking = False
686 686 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
687 687 compareworking = True
688 688
689 689 if not compareworking:
690 690 # read the manifest from node1 before the manifest from node2,
691 691 # so that we'll hit the manifest cache if we're going through
692 692 # all the revisions in parent->child order.
693 693 mf1 = mfmatches(node1)
694 694
695 695 # are we comparing the working directory?
696 696 if not node2:
697 697 if not wlock:
698 698 try:
699 699 wlock = self.wlock(wait=0)
700 700 except lock.LockException:
701 701 wlock = None
702 702 (lookup, modified, added, removed, deleted, unknown,
703 703 ignored, clean) = self.dirstate.status(files, match,
704 704 list_ignored, list_clean)
705 705
706 706 # are we comparing working dir against its parent?
707 707 if compareworking:
708 708 if lookup:
709 709 # do a full compare of any files that might have changed
710 710 mf2 = mfmatches(self.dirstate.parents()[0])
711 711 for f in lookup:
712 712 if fcmp(f, mf2):
713 713 modified.append(f)
714 714 elif wlock is not None:
715 715 self.dirstate.update([f], "n")
716 716 else:
717 717 # we are comparing working dir against non-parent
718 718 # generate a pseudo-manifest for the working dir
719 719 mf2 = mfmatches(self.dirstate.parents()[0])
720 720 for f in lookup + modified + added:
721 721 mf2[f] = ""
722 722 for f in removed:
723 723 if f in mf2:
724 724 del mf2[f]
725 725 else:
726 726 # we are comparing two revisions
727 727 mf2 = mfmatches(node2)
728 728
729 729 if not compareworking:
730 730 # flush lists from dirstate before comparing manifests
731 731 modified, added, clean = [], [], []
732 732
733 733 # make sure to sort the files so we talk to the disk in a
734 734 # reasonable order
735 735 mf2keys = mf2.keys()
736 736 mf2keys.sort()
737 737 for fn in mf2keys:
738 738 if mf1.has_key(fn):
739 739 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
740 740 modified.append(fn)
741 741 elif list_clean:
742 742 clean.append(fn)
743 743 del mf1[fn]
744 744 else:
745 745 added.append(fn)
746 746
747 747 removed = mf1.keys()
748 748
749 749 # sort and return results:
750 750 for l in modified, added, removed, deleted, unknown, ignored, clean:
751 751 l.sort()
752 752 return (modified, added, removed, deleted, unknown, ignored, clean)
753 753
754 def changes(self, node1=None, node2=None, files=[], match=util.always,
755 wlock=None, list_ignored=False, list_clean=False):
756 '''DEPRECATED - use status instead'''
757 marduit = self.status(node1, node2, files, match, wlock,
758 list_ignored, list_clean)
759 if list_ignored:
760 return marduit[:-1]
761 else:
762 return marduit[:-2]
763
764 754 def add(self, list, wlock=None):
765 755 if not wlock:
766 756 wlock = self.wlock()
767 757 for f in list:
768 758 p = self.wjoin(f)
769 759 if not os.path.exists(p):
770 760 self.ui.warn(_("%s does not exist!\n") % f)
771 761 elif not os.path.isfile(p):
772 762 self.ui.warn(_("%s not added: only files supported currently\n")
773 763 % f)
774 764 elif self.dirstate.state(f) in 'an':
775 765 self.ui.warn(_("%s already tracked!\n") % f)
776 766 else:
777 767 self.dirstate.update([f], "a")
778 768
779 769 def forget(self, list, wlock=None):
780 770 if not wlock:
781 771 wlock = self.wlock()
782 772 for f in list:
783 773 if self.dirstate.state(f) not in 'ai':
784 774 self.ui.warn(_("%s not added!\n") % f)
785 775 else:
786 776 self.dirstate.forget([f])
787 777
788 778 def remove(self, list, unlink=False, wlock=None):
789 779 if unlink:
790 780 for f in list:
791 781 try:
792 782 util.unlink(self.wjoin(f))
793 783 except OSError, inst:
794 784 if inst.errno != errno.ENOENT:
795 785 raise
796 786 if not wlock:
797 787 wlock = self.wlock()
798 788 for f in list:
799 789 p = self.wjoin(f)
800 790 if os.path.exists(p):
801 791 self.ui.warn(_("%s still exists!\n") % f)
802 792 elif self.dirstate.state(f) == 'a':
803 793 self.dirstate.forget([f])
804 794 elif f not in self.dirstate:
805 795 self.ui.warn(_("%s not tracked!\n") % f)
806 796 else:
807 797 self.dirstate.update([f], "r")
808 798
809 799 def undelete(self, list, wlock=None):
810 800 p = self.dirstate.parents()[0]
811 801 mn = self.changelog.read(p)[0]
812 802 m = self.manifest.read(mn)
813 803 if not wlock:
814 804 wlock = self.wlock()
815 805 for f in list:
816 806 if self.dirstate.state(f) not in "r":
817 807 self.ui.warn("%s not removed!\n" % f)
818 808 else:
819 809 t = self.file(f).read(m[f])
820 810 self.wwrite(f, t)
821 811 util.set_exec(self.wjoin(f), m.execf(f))
822 812 self.dirstate.update([f], "n")
823 813
824 814 def copy(self, source, dest, wlock=None):
825 815 p = self.wjoin(dest)
826 816 if not os.path.exists(p):
827 817 self.ui.warn(_("%s does not exist!\n") % dest)
828 818 elif not os.path.isfile(p):
829 819 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
830 820 else:
831 821 if not wlock:
832 822 wlock = self.wlock()
833 823 if self.dirstate.state(dest) == '?':
834 824 self.dirstate.update([dest], "a")
835 825 self.dirstate.copy(source, dest)
836 826
837 827 def heads(self, start=None):
838 828 heads = self.changelog.heads(start)
839 829 # sort the output in rev descending order
840 830 heads = [(-self.changelog.rev(h), h) for h in heads]
841 831 heads.sort()
842 832 return [n for (r, n) in heads]
843 833
844 834 # branchlookup returns a dict giving a list of branches for
845 835 # each head. A branch is defined as the tag of a node or
846 836 # the branch of the node's parents. If a node has multiple
847 837 # branch tags, tags are eliminated if they are visible from other
848 838 # branch tags.
849 839 #
850 840 # So, for this graph: a->b->c->d->e
851 841 # \ /
852 842 # aa -----/
853 843 # a has tag 2.6.12
854 844 # d has tag 2.6.13
855 845 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
856 846 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
857 847 # from the list.
858 848 #
859 849 # It is possible that more than one head will have the same branch tag.
860 850 # callers need to check the result for multiple heads under the same
861 851 # branch tag if that is a problem for them (ie checkout of a specific
862 852 # branch).
863 853 #
864 854 # passing in a specific branch will limit the depth of the search
865 855 # through the parents. It won't limit the branches returned in the
866 856 # result though.
867 857 def branchlookup(self, heads=None, branch=None):
868 858 if not heads:
869 859 heads = self.heads()
870 860 headt = [ h for h in heads ]
871 861 chlog = self.changelog
872 862 branches = {}
873 863 merges = []
874 864 seenmerge = {}
875 865
876 866 # traverse the tree once for each head, recording in the branches
877 867 # dict which tags are visible from this head. The branches
878 868 # dict also records which tags are visible from each tag
879 869 # while we traverse.
880 870 while headt or merges:
881 871 if merges:
882 872 n, found = merges.pop()
883 873 visit = [n]
884 874 else:
885 875 h = headt.pop()
886 876 visit = [h]
887 877 found = [h]
888 878 seen = {}
889 879 while visit:
890 880 n = visit.pop()
891 881 if n in seen:
892 882 continue
893 883 pp = chlog.parents(n)
894 884 tags = self.nodetags(n)
895 885 if tags:
896 886 for x in tags:
897 887 if x == 'tip':
898 888 continue
899 889 for f in found:
900 890 branches.setdefault(f, {})[n] = 1
901 891 branches.setdefault(n, {})[n] = 1
902 892 break
903 893 if n not in found:
904 894 found.append(n)
905 895 if branch in tags:
906 896 continue
907 897 seen[n] = 1
908 898 if pp[1] != nullid and n not in seenmerge:
909 899 merges.append((pp[1], [x for x in found]))
910 900 seenmerge[n] = 1
911 901 if pp[0] != nullid:
912 902 visit.append(pp[0])
913 903 # traverse the branches dict, eliminating branch tags from each
914 904 # head that are visible from another branch tag for that head.
915 905 out = {}
916 906 viscache = {}
917 907 for h in heads:
918 908 def visible(node):
919 909 if node in viscache:
920 910 return viscache[node]
921 911 ret = {}
922 912 visit = [node]
923 913 while visit:
924 914 x = visit.pop()
925 915 if x in viscache:
926 916 ret.update(viscache[x])
927 917 elif x not in ret:
928 918 ret[x] = 1
929 919 if x in branches:
930 920 visit[len(visit):] = branches[x].keys()
931 921 viscache[node] = ret
932 922 return ret
933 923 if h not in branches:
934 924 continue
935 925 # O(n^2), but somewhat limited. This only searches the
936 926 # tags visible from a specific head, not all the tags in the
937 927 # whole repo.
938 928 for b in branches[h]:
939 929 vis = False
940 930 for bb in branches[h].keys():
941 931 if b != bb:
942 932 if b in visible(bb):
943 933 vis = True
944 934 break
945 935 if not vis:
946 936 l = out.setdefault(h, [])
947 937 l[len(l):] = self.nodetags(b)
948 938 return out
949 939
950 940 def branches(self, nodes):
951 941 if not nodes:
952 942 nodes = [self.changelog.tip()]
953 943 b = []
954 944 for n in nodes:
955 945 t = n
956 946 while 1:
957 947 p = self.changelog.parents(n)
958 948 if p[1] != nullid or p[0] == nullid:
959 949 b.append((t, n, p[0], p[1]))
960 950 break
961 951 n = p[0]
962 952 return b
963 953
964 954 def between(self, pairs):
965 955 r = []
966 956
967 957 for top, bottom in pairs:
968 958 n, l, i = top, [], 0
969 959 f = 1
970 960
971 961 while n != bottom:
972 962 p = self.changelog.parents(n)[0]
973 963 if i == f:
974 964 l.append(n)
975 965 f = f * 2
976 966 n = p
977 967 i += 1
978 968
979 969 r.append(l)
980 970
981 971 return r
982 972
983 973 def findincoming(self, remote, base=None, heads=None, force=False):
984 974 """Return list of roots of the subsets of missing nodes from remote
985 975
986 976 If base dict is specified, assume that these nodes and their parents
987 977 exist on the remote side and that no child of a node of base exists
988 978 in both remote and self.
989 979 Furthermore base will be updated to include the nodes that exists
990 980 in self and remote but no children exists in self and remote.
991 981 If a list of heads is specified, return only nodes which are heads
992 982 or ancestors of these heads.
993 983
994 984 All the ancestors of base are in self and in remote.
995 985 All the descendants of the list returned are missing in self.
996 986 (and so we know that the rest of the nodes are missing in remote, see
997 987 outgoing)
998 988 """
999 989 m = self.changelog.nodemap
1000 990 search = []
1001 991 fetch = {}
1002 992 seen = {}
1003 993 seenbranch = {}
1004 994 if base == None:
1005 995 base = {}
1006 996
1007 997 if not heads:
1008 998 heads = remote.heads()
1009 999
1010 1000 if self.changelog.tip() == nullid:
1011 1001 base[nullid] = 1
1012 1002 if heads != [nullid]:
1013 1003 return [nullid]
1014 1004 return []
1015 1005
1016 1006 # assume we're closer to the tip than the root
1017 1007 # and start by examining the heads
1018 1008 self.ui.status(_("searching for changes\n"))
1019 1009
1020 1010 unknown = []
1021 1011 for h in heads:
1022 1012 if h not in m:
1023 1013 unknown.append(h)
1024 1014 else:
1025 1015 base[h] = 1
1026 1016
1027 1017 if not unknown:
1028 1018 return []
1029 1019
1030 1020 req = dict.fromkeys(unknown)
1031 1021 reqcnt = 0
1032 1022
1033 1023 # search through remote branches
1034 1024 # a 'branch' here is a linear segment of history, with four parts:
1035 1025 # head, root, first parent, second parent
1036 1026 # (a branch always has two parents (or none) by definition)
1037 1027 unknown = remote.branches(unknown)
1038 1028 while unknown:
1039 1029 r = []
1040 1030 while unknown:
1041 1031 n = unknown.pop(0)
1042 1032 if n[0] in seen:
1043 1033 continue
1044 1034
1045 1035 self.ui.debug(_("examining %s:%s\n")
1046 1036 % (short(n[0]), short(n[1])))
1047 1037 if n[0] == nullid: # found the end of the branch
1048 1038 pass
1049 1039 elif n in seenbranch:
1050 1040 self.ui.debug(_("branch already found\n"))
1051 1041 continue
1052 1042 elif n[1] and n[1] in m: # do we know the base?
1053 1043 self.ui.debug(_("found incomplete branch %s:%s\n")
1054 1044 % (short(n[0]), short(n[1])))
1055 1045 search.append(n) # schedule branch range for scanning
1056 1046 seenbranch[n] = 1
1057 1047 else:
1058 1048 if n[1] not in seen and n[1] not in fetch:
1059 1049 if n[2] in m and n[3] in m:
1060 1050 self.ui.debug(_("found new changeset %s\n") %
1061 1051 short(n[1]))
1062 1052 fetch[n[1]] = 1 # earliest unknown
1063 1053 for p in n[2:4]:
1064 1054 if p in m:
1065 1055 base[p] = 1 # latest known
1066 1056
1067 1057 for p in n[2:4]:
1068 1058 if p not in req and p not in m:
1069 1059 r.append(p)
1070 1060 req[p] = 1
1071 1061 seen[n[0]] = 1
1072 1062
1073 1063 if r:
1074 1064 reqcnt += 1
1075 1065 self.ui.debug(_("request %d: %s\n") %
1076 1066 (reqcnt, " ".join(map(short, r))))
1077 1067 for p in range(0, len(r), 10):
1078 1068 for b in remote.branches(r[p:p+10]):
1079 1069 self.ui.debug(_("received %s:%s\n") %
1080 1070 (short(b[0]), short(b[1])))
1081 1071 unknown.append(b)
1082 1072
1083 1073 # do binary search on the branches we found
1084 1074 while search:
1085 1075 n = search.pop(0)
1086 1076 reqcnt += 1
1087 1077 l = remote.between([(n[0], n[1])])[0]
1088 1078 l.append(n[1])
1089 1079 p = n[0]
1090 1080 f = 1
1091 1081 for i in l:
1092 1082 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1093 1083 if i in m:
1094 1084 if f <= 2:
1095 1085 self.ui.debug(_("found new branch changeset %s\n") %
1096 1086 short(p))
1097 1087 fetch[p] = 1
1098 1088 base[i] = 1
1099 1089 else:
1100 1090 self.ui.debug(_("narrowed branch search to %s:%s\n")
1101 1091 % (short(p), short(i)))
1102 1092 search.append((p, i))
1103 1093 break
1104 1094 p, f = i, f * 2
1105 1095
1106 1096 # sanity check our fetch list
1107 1097 for f in fetch.keys():
1108 1098 if f in m:
1109 1099 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1110 1100
1111 1101 if base.keys() == [nullid]:
1112 1102 if force:
1113 1103 self.ui.warn(_("warning: repository is unrelated\n"))
1114 1104 else:
1115 1105 raise util.Abort(_("repository is unrelated"))
1116 1106
1117 1107 self.ui.note(_("found new changesets starting at ") +
1118 1108 " ".join([short(f) for f in fetch]) + "\n")
1119 1109
1120 1110 self.ui.debug(_("%d total queries\n") % reqcnt)
1121 1111
1122 1112 return fetch.keys()
1123 1113
1124 1114 def findoutgoing(self, remote, base=None, heads=None, force=False):
1125 1115 """Return list of nodes that are roots of subsets not in remote
1126 1116
1127 1117 If base dict is specified, assume that these nodes and their parents
1128 1118 exist on the remote side.
1129 1119 If a list of heads is specified, return only nodes which are heads
1130 1120 or ancestors of these heads, and return a second element which
1131 1121 contains all remote heads which get new children.
1132 1122 """
1133 1123 if base == None:
1134 1124 base = {}
1135 1125 self.findincoming(remote, base, heads, force=force)
1136 1126
1137 1127 self.ui.debug(_("common changesets up to ")
1138 1128 + " ".join(map(short, base.keys())) + "\n")
1139 1129
1140 1130 remain = dict.fromkeys(self.changelog.nodemap)
1141 1131
1142 1132 # prune everything remote has from the tree
1143 1133 del remain[nullid]
1144 1134 remove = base.keys()
1145 1135 while remove:
1146 1136 n = remove.pop(0)
1147 1137 if n in remain:
1148 1138 del remain[n]
1149 1139 for p in self.changelog.parents(n):
1150 1140 remove.append(p)
1151 1141
1152 1142 # find every node whose parents have been pruned
1153 1143 subset = []
1154 1144 # find every remote head that will get new children
1155 1145 updated_heads = {}
1156 1146 for n in remain:
1157 1147 p1, p2 = self.changelog.parents(n)
1158 1148 if p1 not in remain and p2 not in remain:
1159 1149 subset.append(n)
1160 1150 if heads:
1161 1151 if p1 in heads:
1162 1152 updated_heads[p1] = True
1163 1153 if p2 in heads:
1164 1154 updated_heads[p2] = True
1165 1155
1166 1156 # this is the set of all roots we have to push
1167 1157 if heads:
1168 1158 return subset, updated_heads.keys()
1169 1159 else:
1170 1160 return subset
1171 1161
1172 1162 def pull(self, remote, heads=None, force=False, lock=None):
1173 1163 mylock = False
1174 1164 if not lock:
1175 1165 lock = self.lock()
1176 1166 mylock = True
1177 1167
1178 1168 try:
1179 1169 fetch = self.findincoming(remote, force=force)
1180 1170 if fetch == [nullid]:
1181 1171 self.ui.status(_("requesting all changes\n"))
1182 1172
1183 1173 if not fetch:
1184 1174 self.ui.status(_("no changes found\n"))
1185 1175 return 0
1186 1176
1187 1177 if heads is None:
1188 1178 cg = remote.changegroup(fetch, 'pull')
1189 1179 else:
1190 1180 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 1181 return self.addchangegroup(cg, 'pull', remote.url())
1192 1182 finally:
1193 1183 if mylock:
1194 1184 lock.release()
1195 1185
1196 1186 def push(self, remote, force=False, revs=None):
1197 1187 # there are two ways to push to remote repo:
1198 1188 #
1199 1189 # addchangegroup assumes local user can lock remote
1200 1190 # repo (local filesystem, old ssh servers).
1201 1191 #
1202 1192 # unbundle assumes local user cannot lock remote repo (new ssh
1203 1193 # servers, http servers).
1204 1194
1205 1195 if remote.capable('unbundle'):
1206 1196 return self.push_unbundle(remote, force, revs)
1207 1197 return self.push_addchangegroup(remote, force, revs)
1208 1198
1209 1199 def prepush(self, remote, force, revs):
1210 1200 base = {}
1211 1201 remote_heads = remote.heads()
1212 1202 inc = self.findincoming(remote, base, remote_heads, force=force)
1213 1203 if not force and inc:
1214 1204 self.ui.warn(_("abort: unsynced remote changes!\n"))
1215 1205 self.ui.status(_("(did you forget to sync?"
1216 1206 " use push -f to force)\n"))
1217 1207 return None, 1
1218 1208
1219 1209 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1220 1210 if revs is not None:
1221 1211 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1222 1212 else:
1223 1213 bases, heads = update, self.changelog.heads()
1224 1214
1225 1215 if not bases:
1226 1216 self.ui.status(_("no changes found\n"))
1227 1217 return None, 1
1228 1218 elif not force:
1229 1219 # FIXME we don't properly detect creation of new heads
1230 1220 # in the push -r case, assume the user knows what he's doing
1231 1221 if not revs and len(remote_heads) < len(heads) \
1232 1222 and remote_heads != [nullid]:
1233 1223 self.ui.warn(_("abort: push creates new remote branches!\n"))
1234 1224 self.ui.status(_("(did you forget to merge?"
1235 1225 " use push -f to force)\n"))
1236 1226 return None, 1
1237 1227
1238 1228 if revs is None:
1239 1229 cg = self.changegroup(update, 'push')
1240 1230 else:
1241 1231 cg = self.changegroupsubset(update, revs, 'push')
1242 1232 return cg, remote_heads
1243 1233
1244 1234 def push_addchangegroup(self, remote, force, revs):
1245 1235 lock = remote.lock()
1246 1236
1247 1237 ret = self.prepush(remote, force, revs)
1248 1238 if ret[0] is not None:
1249 1239 cg, remote_heads = ret
1250 1240 return remote.addchangegroup(cg, 'push', self.url())
1251 1241 return ret[1]
1252 1242
1253 1243 def push_unbundle(self, remote, force, revs):
1254 1244 # local repo finds heads on server, finds out what revs it
1255 1245 # must push. once revs transferred, if server finds it has
1256 1246 # different heads (someone else won commit/push race), server
1257 1247 # aborts.
1258 1248
1259 1249 ret = self.prepush(remote, force, revs)
1260 1250 if ret[0] is not None:
1261 1251 cg, remote_heads = ret
1262 1252 if force: remote_heads = ['force']
1263 1253 return remote.unbundle(cg, remote_heads, 'push')
1264 1254 return ret[1]
1265 1255
1266 1256 def changegroupsubset(self, bases, heads, source):
1267 1257 """This function generates a changegroup consisting of all the nodes
1268 1258 that are descendents of any of the bases, and ancestors of any of
1269 1259 the heads.
1270 1260
1271 1261 It is fairly complex as determining which filenodes and which
1272 1262 manifest nodes need to be included for the changeset to be complete
1273 1263 is non-trivial.
1274 1264
1275 1265 Another wrinkle is doing the reverse, figuring out which changeset in
1276 1266 the changegroup a particular filenode or manifestnode belongs to."""
1277 1267
1278 1268 self.hook('preoutgoing', throw=True, source=source)
1279 1269
1280 1270 # Set up some initial variables
1281 1271 # Make it easy to refer to self.changelog
1282 1272 cl = self.changelog
1283 1273 # msng is short for missing - compute the list of changesets in this
1284 1274 # changegroup.
1285 1275 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1286 1276 # Some bases may turn out to be superfluous, and some heads may be
1287 1277 # too. nodesbetween will return the minimal set of bases and heads
1288 1278 # necessary to re-create the changegroup.
1289 1279
1290 1280 # Known heads are the list of heads that it is assumed the recipient
1291 1281 # of this changegroup will know about.
1292 1282 knownheads = {}
1293 1283 # We assume that all parents of bases are known heads.
1294 1284 for n in bases:
1295 1285 for p in cl.parents(n):
1296 1286 if p != nullid:
1297 1287 knownheads[p] = 1
1298 1288 knownheads = knownheads.keys()
1299 1289 if knownheads:
1300 1290 # Now that we know what heads are known, we can compute which
1301 1291 # changesets are known. The recipient must know about all
1302 1292 # changesets required to reach the known heads from the null
1303 1293 # changeset.
1304 1294 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1305 1295 junk = None
1306 1296 # Transform the list into an ersatz set.
1307 1297 has_cl_set = dict.fromkeys(has_cl_set)
1308 1298 else:
1309 1299 # If there were no known heads, the recipient cannot be assumed to
1310 1300 # know about any changesets.
1311 1301 has_cl_set = {}
1312 1302
1313 1303 # Make it easy to refer to self.manifest
1314 1304 mnfst = self.manifest
1315 1305 # We don't know which manifests are missing yet
1316 1306 msng_mnfst_set = {}
1317 1307 # Nor do we know which filenodes are missing.
1318 1308 msng_filenode_set = {}
1319 1309
1320 1310 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1321 1311 junk = None
1322 1312
1323 1313 # A changeset always belongs to itself, so the changenode lookup
1324 1314 # function for a changenode is identity.
1325 1315 def identity(x):
1326 1316 return x
1327 1317
1328 1318 # A function generating function. Sets up an environment for the
1329 1319 # inner function.
1330 1320 def cmp_by_rev_func(revlog):
1331 1321 # Compare two nodes by their revision number in the environment's
1332 1322 # revision history. Since the revision number both represents the
1333 1323 # most efficient order to read the nodes in, and represents a
1334 1324 # topological sorting of the nodes, this function is often useful.
1335 1325 def cmp_by_rev(a, b):
1336 1326 return cmp(revlog.rev(a), revlog.rev(b))
1337 1327 return cmp_by_rev
1338 1328
1339 1329 # If we determine that a particular file or manifest node must be a
1340 1330 # node that the recipient of the changegroup will already have, we can
1341 1331 # also assume the recipient will have all the parents. This function
1342 1332 # prunes them from the set of missing nodes.
1343 1333 def prune_parents(revlog, hasset, msngset):
1344 1334 haslst = hasset.keys()
1345 1335 haslst.sort(cmp_by_rev_func(revlog))
1346 1336 for node in haslst:
1347 1337 parentlst = [p for p in revlog.parents(node) if p != nullid]
1348 1338 while parentlst:
1349 1339 n = parentlst.pop()
1350 1340 if n not in hasset:
1351 1341 hasset[n] = 1
1352 1342 p = [p for p in revlog.parents(n) if p != nullid]
1353 1343 parentlst.extend(p)
1354 1344 for n in hasset:
1355 1345 msngset.pop(n, None)
1356 1346
1357 1347 # This is a function generating function used to set up an environment
1358 1348 # for the inner function to execute in.
1359 1349 def manifest_and_file_collector(changedfileset):
1360 1350 # This is an information gathering function that gathers
1361 1351 # information from each changeset node that goes out as part of
1362 1352 # the changegroup. The information gathered is a list of which
1363 1353 # manifest nodes are potentially required (the recipient may
1364 1354 # already have them) and total list of all files which were
1365 1355 # changed in any changeset in the changegroup.
1366 1356 #
1367 1357 # We also remember the first changenode we saw any manifest
1368 1358 # referenced by so we can later determine which changenode 'owns'
1369 1359 # the manifest.
1370 1360 def collect_manifests_and_files(clnode):
1371 1361 c = cl.read(clnode)
1372 1362 for f in c[3]:
1373 1363 # This is to make sure we only have one instance of each
1374 1364 # filename string for each filename.
1375 1365 changedfileset.setdefault(f, f)
1376 1366 msng_mnfst_set.setdefault(c[0], clnode)
1377 1367 return collect_manifests_and_files
1378 1368
1379 1369 # Figure out which manifest nodes (of the ones we think might be part
1380 1370 # of the changegroup) the recipient must know about and remove them
1381 1371 # from the changegroup.
1382 1372 def prune_manifests():
1383 1373 has_mnfst_set = {}
1384 1374 for n in msng_mnfst_set:
1385 1375 # If a 'missing' manifest thinks it belongs to a changenode
1386 1376 # the recipient is assumed to have, obviously the recipient
1387 1377 # must have that manifest.
1388 1378 linknode = cl.node(mnfst.linkrev(n))
1389 1379 if linknode in has_cl_set:
1390 1380 has_mnfst_set[n] = 1
1391 1381 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1392 1382
1393 1383 # Use the information collected in collect_manifests_and_files to say
1394 1384 # which changenode any manifestnode belongs to.
1395 1385 def lookup_manifest_link(mnfstnode):
1396 1386 return msng_mnfst_set[mnfstnode]
1397 1387
1398 1388 # A function generating function that sets up the initial environment
1399 1389 # the inner function.
1400 1390 def filenode_collector(changedfiles):
1401 1391 next_rev = [0]
1402 1392 # This gathers information from each manifestnode included in the
1403 1393 # changegroup about which filenodes the manifest node references
1404 1394 # so we can include those in the changegroup too.
1405 1395 #
1406 1396 # It also remembers which changenode each filenode belongs to. It
1407 1397 # does this by assuming the a filenode belongs to the changenode
1408 1398 # the first manifest that references it belongs to.
1409 1399 def collect_msng_filenodes(mnfstnode):
1410 1400 r = mnfst.rev(mnfstnode)
1411 1401 if r == next_rev[0]:
1412 1402 # If the last rev we looked at was the one just previous,
1413 1403 # we only need to see a diff.
1414 1404 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1415 1405 # For each line in the delta
1416 1406 for dline in delta.splitlines():
1417 1407 # get the filename and filenode for that line
1418 1408 f, fnode = dline.split('\0')
1419 1409 fnode = bin(fnode[:40])
1420 1410 f = changedfiles.get(f, None)
1421 1411 # And if the file is in the list of files we care
1422 1412 # about.
1423 1413 if f is not None:
1424 1414 # Get the changenode this manifest belongs to
1425 1415 clnode = msng_mnfst_set[mnfstnode]
1426 1416 # Create the set of filenodes for the file if
1427 1417 # there isn't one already.
1428 1418 ndset = msng_filenode_set.setdefault(f, {})
1429 1419 # And set the filenode's changelog node to the
1430 1420 # manifest's if it hasn't been set already.
1431 1421 ndset.setdefault(fnode, clnode)
1432 1422 else:
1433 1423 # Otherwise we need a full manifest.
1434 1424 m = mnfst.read(mnfstnode)
1435 1425 # For every file in we care about.
1436 1426 for f in changedfiles:
1437 1427 fnode = m.get(f, None)
1438 1428 # If it's in the manifest
1439 1429 if fnode is not None:
1440 1430 # See comments above.
1441 1431 clnode = msng_mnfst_set[mnfstnode]
1442 1432 ndset = msng_filenode_set.setdefault(f, {})
1443 1433 ndset.setdefault(fnode, clnode)
1444 1434 # Remember the revision we hope to see next.
1445 1435 next_rev[0] = r + 1
1446 1436 return collect_msng_filenodes
1447 1437
1448 1438 # We have a list of filenodes we think we need for a file, lets remove
1449 1439 # all those we now the recipient must have.
1450 1440 def prune_filenodes(f, filerevlog):
1451 1441 msngset = msng_filenode_set[f]
1452 1442 hasset = {}
1453 1443 # If a 'missing' filenode thinks it belongs to a changenode we
1454 1444 # assume the recipient must have, then the recipient must have
1455 1445 # that filenode.
1456 1446 for n in msngset:
1457 1447 clnode = cl.node(filerevlog.linkrev(n))
1458 1448 if clnode in has_cl_set:
1459 1449 hasset[n] = 1
1460 1450 prune_parents(filerevlog, hasset, msngset)
1461 1451
1462 1452 # A function generator function that sets up the a context for the
1463 1453 # inner function.
1464 1454 def lookup_filenode_link_func(fname):
1465 1455 msngset = msng_filenode_set[fname]
1466 1456 # Lookup the changenode the filenode belongs to.
1467 1457 def lookup_filenode_link(fnode):
1468 1458 return msngset[fnode]
1469 1459 return lookup_filenode_link
1470 1460
1471 1461 # Now that we have all theses utility functions to help out and
1472 1462 # logically divide up the task, generate the group.
1473 1463 def gengroup():
1474 1464 # The set of changed files starts empty.
1475 1465 changedfiles = {}
1476 1466 # Create a changenode group generator that will call our functions
1477 1467 # back to lookup the owning changenode and collect information.
1478 1468 group = cl.group(msng_cl_lst, identity,
1479 1469 manifest_and_file_collector(changedfiles))
1480 1470 for chnk in group:
1481 1471 yield chnk
1482 1472
1483 1473 # The list of manifests has been collected by the generator
1484 1474 # calling our functions back.
1485 1475 prune_manifests()
1486 1476 msng_mnfst_lst = msng_mnfst_set.keys()
1487 1477 # Sort the manifestnodes by revision number.
1488 1478 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1489 1479 # Create a generator for the manifestnodes that calls our lookup
1490 1480 # and data collection functions back.
1491 1481 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1492 1482 filenode_collector(changedfiles))
1493 1483 for chnk in group:
1494 1484 yield chnk
1495 1485
1496 1486 # These are no longer needed, dereference and toss the memory for
1497 1487 # them.
1498 1488 msng_mnfst_lst = None
1499 1489 msng_mnfst_set.clear()
1500 1490
1501 1491 changedfiles = changedfiles.keys()
1502 1492 changedfiles.sort()
1503 1493 # Go through all our files in order sorted by name.
1504 1494 for fname in changedfiles:
1505 1495 filerevlog = self.file(fname)
1506 1496 # Toss out the filenodes that the recipient isn't really
1507 1497 # missing.
1508 1498 if msng_filenode_set.has_key(fname):
1509 1499 prune_filenodes(fname, filerevlog)
1510 1500 msng_filenode_lst = msng_filenode_set[fname].keys()
1511 1501 else:
1512 1502 msng_filenode_lst = []
1513 1503 # If any filenodes are left, generate the group for them,
1514 1504 # otherwise don't bother.
1515 1505 if len(msng_filenode_lst) > 0:
1516 1506 yield changegroup.genchunk(fname)
1517 1507 # Sort the filenodes by their revision #
1518 1508 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1519 1509 # Create a group generator and only pass in a changenode
1520 1510 # lookup function as we need to collect no information
1521 1511 # from filenodes.
1522 1512 group = filerevlog.group(msng_filenode_lst,
1523 1513 lookup_filenode_link_func(fname))
1524 1514 for chnk in group:
1525 1515 yield chnk
1526 1516 if msng_filenode_set.has_key(fname):
1527 1517 # Don't need this anymore, toss it to free memory.
1528 1518 del msng_filenode_set[fname]
1529 1519 # Signal that no more groups are left.
1530 1520 yield changegroup.closechunk()
1531 1521
1532 1522 if msng_cl_lst:
1533 1523 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1534 1524
1535 1525 return util.chunkbuffer(gengroup())
1536 1526
1537 1527 def changegroup(self, basenodes, source):
1538 1528 """Generate a changegroup of all nodes that we have that a recipient
1539 1529 doesn't.
1540 1530
1541 1531 This is much easier than the previous function as we can assume that
1542 1532 the recipient has any changenode we aren't sending them."""
1543 1533
1544 1534 self.hook('preoutgoing', throw=True, source=source)
1545 1535
1546 1536 cl = self.changelog
1547 1537 nodes = cl.nodesbetween(basenodes, None)[0]
1548 1538 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1549 1539
1550 1540 def identity(x):
1551 1541 return x
1552 1542
1553 1543 def gennodelst(revlog):
1554 1544 for r in xrange(0, revlog.count()):
1555 1545 n = revlog.node(r)
1556 1546 if revlog.linkrev(n) in revset:
1557 1547 yield n
1558 1548
1559 1549 def changed_file_collector(changedfileset):
1560 1550 def collect_changed_files(clnode):
1561 1551 c = cl.read(clnode)
1562 1552 for fname in c[3]:
1563 1553 changedfileset[fname] = 1
1564 1554 return collect_changed_files
1565 1555
1566 1556 def lookuprevlink_func(revlog):
1567 1557 def lookuprevlink(n):
1568 1558 return cl.node(revlog.linkrev(n))
1569 1559 return lookuprevlink
1570 1560
1571 1561 def gengroup():
1572 1562 # construct a list of all changed files
1573 1563 changedfiles = {}
1574 1564
1575 1565 for chnk in cl.group(nodes, identity,
1576 1566 changed_file_collector(changedfiles)):
1577 1567 yield chnk
1578 1568 changedfiles = changedfiles.keys()
1579 1569 changedfiles.sort()
1580 1570
1581 1571 mnfst = self.manifest
1582 1572 nodeiter = gennodelst(mnfst)
1583 1573 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1584 1574 yield chnk
1585 1575
1586 1576 for fname in changedfiles:
1587 1577 filerevlog = self.file(fname)
1588 1578 nodeiter = gennodelst(filerevlog)
1589 1579 nodeiter = list(nodeiter)
1590 1580 if nodeiter:
1591 1581 yield changegroup.genchunk(fname)
1592 1582 lookup = lookuprevlink_func(filerevlog)
1593 1583 for chnk in filerevlog.group(nodeiter, lookup):
1594 1584 yield chnk
1595 1585
1596 1586 yield changegroup.closechunk()
1597 1587
1598 1588 if nodes:
1599 1589 self.hook('outgoing', node=hex(nodes[0]), source=source)
1600 1590
1601 1591 return util.chunkbuffer(gengroup())
1602 1592
1603 1593 def addchangegroup(self, source, srctype, url):
1604 1594 """add changegroup to repo.
1605 1595 returns number of heads modified or added + 1."""
1606 1596
1607 1597 def csmap(x):
1608 1598 self.ui.debug(_("add changeset %s\n") % short(x))
1609 1599 return cl.count()
1610 1600
1611 1601 def revmap(x):
1612 1602 return cl.rev(x)
1613 1603
1614 1604 if not source:
1615 1605 return 0
1616 1606
1617 1607 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1618 1608
1619 1609 changesets = files = revisions = 0
1620 1610
1621 1611 tr = self.transaction()
1622 1612
1623 1613 # write changelog data to temp files so concurrent readers will not see
1624 1614 # inconsistent view
1625 1615 cl = None
1626 1616 try:
1627 1617 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1628 1618
1629 1619 oldheads = len(cl.heads())
1630 1620
1631 1621 # pull off the changeset group
1632 1622 self.ui.status(_("adding changesets\n"))
1633 1623 cor = cl.count() - 1
1634 1624 chunkiter = changegroup.chunkiter(source)
1635 1625 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1636 1626 raise util.Abort(_("received changelog group is empty"))
1637 1627 cnr = cl.count() - 1
1638 1628 changesets = cnr - cor
1639 1629
1640 1630 # pull off the manifest group
1641 1631 self.ui.status(_("adding manifests\n"))
1642 1632 chunkiter = changegroup.chunkiter(source)
1643 1633 # no need to check for empty manifest group here:
1644 1634 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1645 1635 # no new manifest will be created and the manifest group will
1646 1636 # be empty during the pull
1647 1637 self.manifest.addgroup(chunkiter, revmap, tr)
1648 1638
1649 1639 # process the files
1650 1640 self.ui.status(_("adding file changes\n"))
1651 1641 while 1:
1652 1642 f = changegroup.getchunk(source)
1653 1643 if not f:
1654 1644 break
1655 1645 self.ui.debug(_("adding %s revisions\n") % f)
1656 1646 fl = self.file(f)
1657 1647 o = fl.count()
1658 1648 chunkiter = changegroup.chunkiter(source)
1659 1649 if fl.addgroup(chunkiter, revmap, tr) is None:
1660 1650 raise util.Abort(_("received file revlog group is empty"))
1661 1651 revisions += fl.count() - o
1662 1652 files += 1
1663 1653
1664 1654 cl.writedata()
1665 1655 finally:
1666 1656 if cl:
1667 1657 cl.cleanup()
1668 1658
1669 1659 # make changelog see real files again
1670 1660 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1671 1661 self.changelog.checkinlinesize(tr)
1672 1662
1673 1663 newheads = len(self.changelog.heads())
1674 1664 heads = ""
1675 1665 if oldheads and newheads != oldheads:
1676 1666 heads = _(" (%+d heads)") % (newheads - oldheads)
1677 1667
1678 1668 self.ui.status(_("added %d changesets"
1679 1669 " with %d changes to %d files%s\n")
1680 1670 % (changesets, revisions, files, heads))
1681 1671
1682 1672 if changesets > 0:
1683 1673 self.hook('pretxnchangegroup', throw=True,
1684 1674 node=hex(self.changelog.node(cor+1)), source=srctype,
1685 1675 url=url)
1686 1676
1687 1677 tr.close()
1688 1678
1689 1679 if changesets > 0:
1690 1680 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1691 1681 source=srctype, url=url)
1692 1682
1693 1683 for i in range(cor + 1, cnr + 1):
1694 1684 self.hook("incoming", node=hex(self.changelog.node(i)),
1695 1685 source=srctype, url=url)
1696 1686
1697 1687 return newheads - oldheads + 1
1698 1688
1699 1689
1700 1690 def stream_in(self, remote):
1701 1691 fp = remote.stream_out()
1702 1692 resp = int(fp.readline())
1703 1693 if resp != 0:
1704 1694 raise util.Abort(_('operation forbidden by server'))
1705 1695 self.ui.status(_('streaming all changes\n'))
1706 1696 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1707 1697 self.ui.status(_('%d files to transfer, %s of data\n') %
1708 1698 (total_files, util.bytecount(total_bytes)))
1709 1699 start = time.time()
1710 1700 for i in xrange(total_files):
1711 1701 name, size = fp.readline().split('\0', 1)
1712 1702 size = int(size)
1713 1703 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1714 1704 ofp = self.opener(name, 'w')
1715 1705 for chunk in util.filechunkiter(fp, limit=size):
1716 1706 ofp.write(chunk)
1717 1707 ofp.close()
1718 1708 elapsed = time.time() - start
1719 1709 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1720 1710 (util.bytecount(total_bytes), elapsed,
1721 1711 util.bytecount(total_bytes / elapsed)))
1722 1712 self.reload()
1723 1713 return len(self.heads()) + 1
1724 1714
1725 1715 def clone(self, remote, heads=[], stream=False):
1726 1716 '''clone remote repository.
1727 1717
1728 1718 keyword arguments:
1729 1719 heads: list of revs to clone (forces use of pull)
1730 1720 stream: use streaming clone if possible'''
1731 1721
1732 1722 # now, all clients that can request uncompressed clones can
1733 1723 # read repo formats supported by all servers that can serve
1734 1724 # them.
1735 1725
1736 1726 # if revlog format changes, client will have to check version
1737 1727 # and format flags on "stream" capability, and use
1738 1728 # uncompressed only if compatible.
1739 1729
1740 1730 if stream and not heads and remote.capable('stream'):
1741 1731 return self.stream_in(remote)
1742 1732 return self.pull(remote, heads)
1743 1733
1744 1734 # used to avoid circular references so destructors work
1745 1735 def aftertrans(base):
1746 1736 p = base
1747 1737 def a():
1748 1738 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1749 1739 util.rename(os.path.join(p, "journal.dirstate"),
1750 1740 os.path.join(p, "undo.dirstate"))
1751 1741 return a
1752 1742
1753 1743 def instance(ui, path, create):
1754 1744 return localrepository(ui, util.drop_scheme('file', path), create)
1755 1745
1756 1746 def islocal(path):
1757 1747 return True
@@ -1,334 +1,334 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "util os tempfile")
12 12
13 13 def merge3(repo, fn, my, other, p1, p2):
14 14 """perform a 3-way merge in the working directory"""
15 15
16 16 def temp(prefix, node):
17 17 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 18 (fd, name) = tempfile.mkstemp(prefix=pre)
19 19 f = os.fdopen(fd, "wb")
20 20 repo.wwrite(fn, fl.read(node), f)
21 21 f.close()
22 22 return name
23 23
24 24 fl = repo.file(fn)
25 25 base = fl.ancestor(my, other)
26 26 a = repo.wjoin(fn)
27 27 b = temp("base", base)
28 28 c = temp("other", other)
29 29
30 30 repo.ui.note(_("resolving %s\n") % fn)
31 31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 32 (fn, short(my), short(other), short(base)))
33 33
34 34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 35 or "hgmerge")
36 36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 37 environ={'HG_FILE': fn,
38 38 'HG_MY_NODE': p1,
39 39 'HG_OTHER_NODE': p2,
40 40 'HG_FILE_MY_NODE': hex(my),
41 41 'HG_FILE_OTHER_NODE': hex(other),
42 42 'HG_FILE_BASE_NODE': hex(base)})
43 43 if r:
44 44 repo.ui.warn(_("merging %s failed!\n") % fn)
45 45
46 46 os.unlink(b)
47 47 os.unlink(c)
48 48 return r
49 49
50 50 def update(repo, node, branchmerge=False, force=False, partial=None,
51 51 wlock=None, show_stats=True, remind=True):
52 52
53 53 overwrite = force and not branchmerge
54 54 forcemerge = force and branchmerge
55 55
56 56 if not wlock:
57 57 wlock = repo.wlock()
58 58
59 59 ### check phase
60 60
61 61 pl = repo.dirstate.parents()
62 62 if not overwrite and pl[1] != nullid:
63 63 raise util.Abort(_("outstanding uncommitted merges"))
64 64
65 65 p1, p2 = pl[0], node
66 66 pa = repo.changelog.ancestor(p1, p2)
67 67
68 68 # is there a linear path from p1 to p2?
69 69 linear_path = (pa == p1 or pa == p2)
70 70 if branchmerge and linear_path:
71 71 raise util.Abort(_("there is nothing to merge, just use "
72 72 "'hg update' or look at 'hg heads'"))
73 73
74 74 if not overwrite and not linear_path and not branchmerge:
75 75 raise util.Abort(_("update spans branches, use 'hg merge' "
76 76 "or 'hg update -C' to lose changes"))
77 77
78 modified, added, removed, deleted, unknown = repo.changes()
78 modified, added, removed, deleted, unknown = repo.status()[:5]
79 79 if branchmerge and not forcemerge:
80 80 if modified or added or removed:
81 81 raise util.Abort(_("outstanding uncommitted changes"))
82 82
83 83 m1n = repo.changelog.read(p1)[0]
84 84 m2n = repo.changelog.read(p2)[0]
85 85 man = repo.manifest.ancestor(m1n, m2n)
86 86 m1 = repo.manifest.read(m1n)
87 87 m2 = repo.manifest.read(m2n).copy()
88 88 ma = repo.manifest.read(man)
89 89
90 90 if not forcemerge and not overwrite:
91 91 for f in unknown:
92 92 if f in m2:
93 93 t1 = repo.wread(f)
94 94 t2 = repo.file(f).read(m2[f])
95 95 if cmp(t1, t2) != 0:
96 96 raise util.Abort(_("'%s' already exists in the working"
97 97 " dir and differs from remote") % f)
98 98
99 99 # resolve the manifest to determine which files
100 100 # we care about merging
101 101 repo.ui.note(_("resolving manifests\n"))
102 102 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
103 103 (overwrite, branchmerge, partial and True or False, linear_path))
104 104 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
105 105 (short(man), short(m1n), short(m2n)))
106 106
107 107 merge = {}
108 108 get = {}
109 109 remove = []
110 110
111 111 # construct a working dir manifest
112 112 mw = m1.copy()
113 113 umap = dict.fromkeys(unknown)
114 114
115 115 for f in added + modified + unknown:
116 116 mw[f] = ""
117 117 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
118 118
119 119 for f in deleted + removed:
120 120 if f in mw:
121 121 del mw[f]
122 122
123 123 # If we're jumping between revisions (as opposed to merging),
124 124 # and if neither the working directory nor the target rev has
125 125 # the file, then we need to remove it from the dirstate, to
126 126 # prevent the dirstate from listing the file when it is no
127 127 # longer in the manifest.
128 128 if not partial and linear_path and f not in m2:
129 129 repo.dirstate.forget((f,))
130 130
131 131 # Compare manifests
132 132 for f, n in mw.iteritems():
133 133 if partial and not partial(f):
134 134 continue
135 135 if f in m2:
136 136 s = 0
137 137
138 138 # is the wfile new since m1, and match m2?
139 139 if f not in m1:
140 140 t1 = repo.wread(f)
141 141 t2 = repo.file(f).read(m2[f])
142 142 if cmp(t1, t2) == 0:
143 143 n = m2[f]
144 144 del t1, t2
145 145
146 146 # are files different?
147 147 if n != m2[f]:
148 148 a = ma.get(f, nullid)
149 149 # are both different from the ancestor?
150 150 if n != a and m2[f] != a:
151 151 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 152 # merge executable bits
153 153 # "if we changed or they changed, change in merge"
154 154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
155 155 mode = ((a^b) | (a^c)) ^ a
156 156 merge[f] = (mode, m1.get(f, nullid), m2[f])
157 157 s = 1
158 158 # are we clobbering?
159 159 # is remote's version newer?
160 160 # or are we going back in time?
161 161 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
162 162 repo.ui.debug(_(" remote %s is newer, get\n") % f)
163 163 get[f] = (m2.execf(f), m2[f])
164 164 s = 1
165 165 elif f in umap or f in added:
166 166 # this unknown file is the same as the checkout
167 167 # we need to reset the dirstate if the file was added
168 168 get[f] = (m2.execf(f), m2[f])
169 169
170 170 if not s and mw.execf(f) != m2.execf(f):
171 171 if overwrite:
172 172 repo.ui.debug(_(" updating permissions for %s\n") % f)
173 173 util.set_exec(repo.wjoin(f), m2.execf(f))
174 174 else:
175 175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
176 176 mode = ((a^b) | (a^c)) ^ a
177 177 if mode != b:
178 178 repo.ui.debug(_(" updating permissions for %s\n")
179 179 % f)
180 180 util.set_exec(repo.wjoin(f), mode)
181 181 del m2[f]
182 182 elif f in ma:
183 183 if n != ma[f]:
184 184 r = _("d")
185 185 if not overwrite and (linear_path or branchmerge):
186 186 r = repo.ui.prompt(
187 187 (_(" local changed %s which remote deleted\n") % f) +
188 188 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
189 189 if r == _("d"):
190 190 remove.append(f)
191 191 else:
192 192 repo.ui.debug(_("other deleted %s\n") % f)
193 193 remove.append(f) # other deleted it
194 194 else:
195 195 # file is created on branch or in working directory
196 196 if overwrite and f not in umap:
197 197 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
198 198 remove.append(f)
199 199 elif n == m1.get(f, nullid): # same as parent
200 200 if p2 == pa: # going backwards?
201 201 repo.ui.debug(_("remote deleted %s\n") % f)
202 202 remove.append(f)
203 203 else:
204 204 repo.ui.debug(_("local modified %s, keeping\n") % f)
205 205 else:
206 206 repo.ui.debug(_("working dir created %s, keeping\n") % f)
207 207
208 208 for f, n in m2.iteritems():
209 209 if partial and not partial(f):
210 210 continue
211 211 if f[0] == "/":
212 212 continue
213 213 if f in ma and n != ma[f]:
214 214 r = _("k")
215 215 if not overwrite and (linear_path or branchmerge):
216 216 r = repo.ui.prompt(
217 217 (_("remote changed %s which local deleted\n") % f) +
218 218 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
219 219 if r == _("k"):
220 220 get[f] = (m2.execf(f), n)
221 221 elif f not in ma:
222 222 repo.ui.debug(_("remote created %s\n") % f)
223 223 get[f] = (m2.execf(f), n)
224 224 else:
225 225 if overwrite or p2 == pa: # going backwards?
226 226 repo.ui.debug(_("local deleted %s, recreating\n") % f)
227 227 get[f] = (m2.execf(f), n)
228 228 else:
229 229 repo.ui.debug(_("local deleted %s\n") % f)
230 230
231 231 del mw, m1, m2, ma
232 232
233 233 if overwrite:
234 234 for f in merge:
235 235 get[f] = merge[f][:2]
236 236 merge = {}
237 237
238 238 if linear_path or overwrite:
239 239 # we don't need to do any magic, just jump to the new rev
240 240 p1, p2 = p2, nullid
241 241
242 242 xp1 = hex(p1)
243 243 xp2 = hex(p2)
244 244 if p2 == nullid: xxp2 = ''
245 245 else: xxp2 = xp2
246 246
247 247 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
248 248
249 249 # get the files we don't need to change
250 250 files = get.keys()
251 251 files.sort()
252 252 for f in files:
253 253 flag, node = get[f]
254 254 if f[0] == "/":
255 255 continue
256 256 repo.ui.note(_("getting %s\n") % f)
257 257 t = repo.file(f).read(node)
258 258 repo.wwrite(f, t)
259 259 util.set_exec(repo.wjoin(f), flag)
260 260 if not partial:
261 261 if branchmerge:
262 262 repo.dirstate.update([f], 'n', st_mtime=-1)
263 263 else:
264 264 repo.dirstate.update([f], 'n')
265 265
266 266 # merge the tricky bits
267 267 unresolved = []
268 268 files = merge.keys()
269 269 files.sort()
270 270 for f in files:
271 271 repo.ui.status(_("merging %s\n") % f)
272 272 flag, my, other = merge[f]
273 273 ret = merge3(repo, f, my, other, xp1, xp2)
274 274 if ret:
275 275 unresolved.append(f)
276 276 util.set_exec(repo.wjoin(f), flag)
277 277 if not partial:
278 278 if branchmerge:
279 279 # We've done a branch merge, mark this file as merged
280 280 # so that we properly record the merger later
281 281 repo.dirstate.update([f], 'm')
282 282 else:
283 283 # We've update-merged a locally modified file, so
284 284 # we set the dirstate to emulate a normal checkout
285 285 # of that file some time in the past. Thus our
286 286 # merge will appear as a normal local file
287 287 # modification.
288 288 f_len = len(repo.file(f).read(other))
289 289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
290 290
291 291 remove.sort()
292 292 for f in remove:
293 293 repo.ui.note(_("removing %s\n") % f)
294 294 util.audit_path(f)
295 295 try:
296 296 util.unlink(repo.wjoin(f))
297 297 except OSError, inst:
298 298 if inst.errno != errno.ENOENT:
299 299 repo.ui.warn(_("update failed to remove %s: %s!\n") %
300 300 (f, inst.strerror))
301 301 if not partial:
302 302 if branchmerge:
303 303 repo.dirstate.update(remove, 'r')
304 304 else:
305 305 repo.dirstate.forget(remove)
306 306
307 307 if not partial:
308 308 repo.dirstate.setparents(p1, p2)
309 309
310 310 if show_stats:
311 311 stats = ((len(get), _("updated")),
312 312 (len(merge) - len(unresolved), _("merged")),
313 313 (len(remove), _("removed")),
314 314 (len(unresolved), _("unresolved")))
315 315 note = ", ".join([_("%d files %s") % s for s in stats])
316 316 repo.ui.status("%s\n" % note)
317 317 if not partial:
318 318 if branchmerge:
319 319 if unresolved:
320 320 repo.ui.status(_("There are unresolved merges,"
321 321 " you can redo the full merge using:\n"
322 322 " hg update -C %s\n"
323 323 " hg merge %s\n"
324 324 % (repo.changelog.rev(p1),
325 325 repo.changelog.rev(p2))))
326 326 elif remind:
327 327 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
328 328 elif unresolved:
329 329 repo.ui.status(_("There are unresolved merges with"
330 330 " locally modified files.\n"))
331 331
332 332 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
333 333 return len(unresolved)
334 334
@@ -1,366 +1,366 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from i18n import gettext as _
10 10 from node import *
11 11 demandload(globals(), "cmdutil mdiff util")
12 12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
13 13
14 14 def extract(ui, fileobj):
15 15 '''extract patch from data read from fileobj.
16 16
17 17 patch can be normal patch or contained in email message.
18 18
19 19 return tuple (filename, message, user, date). any item in returned
20 20 tuple can be None. if filename is None, fileobj did not contain
21 21 patch. caller must unlink filename when done.'''
22 22
23 23 # attempt to detect the start of a patch
24 24 # (this heuristic is borrowed from quilt)
25 25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
26 26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
27 27 '(---|\*\*\*)[ \t])', re.MULTILINE)
28 28
29 29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
30 30 tmpfp = os.fdopen(fd, 'w')
31 31 try:
32 32 hgpatch = False
33 33
34 34 msg = email.Parser.Parser().parse(fileobj)
35 35
36 36 message = msg['Subject']
37 37 user = msg['From']
38 38 # should try to parse msg['Date']
39 39 date = None
40 40
41 41 if message:
42 42 message = message.replace('\n\t', ' ')
43 43 ui.debug('Subject: %s\n' % message)
44 44 if user:
45 45 ui.debug('From: %s\n' % user)
46 46 diffs_seen = 0
47 47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
48 48
49 49 for part in msg.walk():
50 50 content_type = part.get_content_type()
51 51 ui.debug('Content-Type: %s\n' % content_type)
52 52 if content_type not in ok_types:
53 53 continue
54 54 payload = part.get_payload(decode=True)
55 55 m = diffre.search(payload)
56 56 if m:
57 57 ui.debug(_('found patch at byte %d\n') % m.start(0))
58 58 diffs_seen += 1
59 59 cfp = cStringIO.StringIO()
60 60 if message:
61 61 cfp.write(message)
62 62 cfp.write('\n')
63 63 for line in payload[:m.start(0)].splitlines():
64 64 if line.startswith('# HG changeset patch'):
65 65 ui.debug(_('patch generated by hg export\n'))
66 66 hgpatch = True
67 67 # drop earlier commit message content
68 68 cfp.seek(0)
69 69 cfp.truncate()
70 70 elif hgpatch:
71 71 if line.startswith('# User '):
72 72 user = line[7:]
73 73 ui.debug('From: %s\n' % user)
74 74 elif line.startswith("# Date "):
75 75 date = line[7:]
76 76 if not line.startswith('# '):
77 77 cfp.write(line)
78 78 cfp.write('\n')
79 79 message = cfp.getvalue()
80 80 if tmpfp:
81 81 tmpfp.write(payload)
82 82 if not payload.endswith('\n'):
83 83 tmpfp.write('\n')
84 84 elif not diffs_seen and message and content_type == 'text/plain':
85 85 message += '\n' + payload
86 86 except:
87 87 tmpfp.close()
88 88 os.unlink(tmpname)
89 89 raise
90 90
91 91 tmpfp.close()
92 92 if not diffs_seen:
93 93 os.unlink(tmpname)
94 94 return None, message, user, date
95 95 return tmpname, message, user, date
96 96
97 97 def readgitpatch(patchname):
98 98 """extract git-style metadata about patches from <patchname>"""
99 99 class gitpatch:
100 100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
101 101 def __init__(self, path):
102 102 self.path = path
103 103 self.oldpath = None
104 104 self.mode = None
105 105 self.op = 'MODIFY'
106 106 self.copymod = False
107 107 self.lineno = 0
108 108
109 109 # Filter patch for git information
110 110 gitre = re.compile('diff --git a/(.*) b/(.*)')
111 111 pf = file(patchname)
112 112 gp = None
113 113 gitpatches = []
114 114 # Can have a git patch with only metadata, causing patch to complain
115 115 dopatch = False
116 116
117 117 lineno = 0
118 118 for line in pf:
119 119 lineno += 1
120 120 if line.startswith('diff --git'):
121 121 m = gitre.match(line)
122 122 if m:
123 123 if gp:
124 124 gitpatches.append(gp)
125 125 src, dst = m.group(1,2)
126 126 gp = gitpatch(dst)
127 127 gp.lineno = lineno
128 128 elif gp:
129 129 if line.startswith('--- '):
130 130 if gp.op in ('COPY', 'RENAME'):
131 131 gp.copymod = True
132 132 dopatch = 'filter'
133 133 gitpatches.append(gp)
134 134 gp = None
135 135 if not dopatch:
136 136 dopatch = True
137 137 continue
138 138 if line.startswith('rename from '):
139 139 gp.op = 'RENAME'
140 140 gp.oldpath = line[12:].rstrip()
141 141 elif line.startswith('rename to '):
142 142 gp.path = line[10:].rstrip()
143 143 elif line.startswith('copy from '):
144 144 gp.op = 'COPY'
145 145 gp.oldpath = line[10:].rstrip()
146 146 elif line.startswith('copy to '):
147 147 gp.path = line[8:].rstrip()
148 148 elif line.startswith('deleted file'):
149 149 gp.op = 'DELETE'
150 150 elif line.startswith('new file mode '):
151 151 gp.op = 'ADD'
152 152 gp.mode = int(line.rstrip()[-3:], 8)
153 153 elif line.startswith('new mode '):
154 154 gp.mode = int(line.rstrip()[-3:], 8)
155 155 if gp:
156 156 gitpatches.append(gp)
157 157
158 158 if not gitpatches:
159 159 dopatch = True
160 160
161 161 return (dopatch, gitpatches)
162 162
163 163 def dogitpatch(patchname, gitpatches):
164 164 """Preprocess git patch so that vanilla patch can handle it"""
165 165 pf = file(patchname)
166 166 pfline = 1
167 167
168 168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
169 169 tmpfp = os.fdopen(fd, 'w')
170 170
171 171 try:
172 172 for i in range(len(gitpatches)):
173 173 p = gitpatches[i]
174 174 if not p.copymod:
175 175 continue
176 176
177 177 if os.path.exists(p.path):
178 178 raise util.Abort(_("cannot create %s: destination already exists") %
179 179 p.path)
180 180
181 181 (src, dst) = [os.path.join(os.getcwd(), n)
182 182 for n in (p.oldpath, p.path)]
183 183
184 184 targetdir = os.path.dirname(dst)
185 185 if not os.path.isdir(targetdir):
186 186 os.makedirs(targetdir)
187 187 try:
188 188 shutil.copyfile(src, dst)
189 189 shutil.copymode(src, dst)
190 190 except shutil.Error, inst:
191 191 raise util.Abort(str(inst))
192 192
193 193 # rewrite patch hunk
194 194 while pfline < p.lineno:
195 195 tmpfp.write(pf.readline())
196 196 pfline += 1
197 197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
198 198 line = pf.readline()
199 199 pfline += 1
200 200 while not line.startswith('--- a/'):
201 201 tmpfp.write(line)
202 202 line = pf.readline()
203 203 pfline += 1
204 204 tmpfp.write('--- a/%s\n' % p.path)
205 205
206 206 line = pf.readline()
207 207 while line:
208 208 tmpfp.write(line)
209 209 line = pf.readline()
210 210 except:
211 211 tmpfp.close()
212 212 os.unlink(patchname)
213 213 raise
214 214
215 215 tmpfp.close()
216 216 return patchname
217 217
218 218 def patch(strip, patchname, ui, cwd=None):
219 219 """apply the patch <patchname> to the working directory.
220 220 a list of patched files is returned"""
221 221
222 222 (dopatch, gitpatches) = readgitpatch(patchname)
223 223
224 224 files = {}
225 225 if dopatch:
226 226 if dopatch == 'filter':
227 227 patchname = dogitpatch(patchname, gitpatches)
228 228 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
229 229 args = []
230 230 if cwd:
231 231 args.append('-d %s' % util.shellquote(cwd))
232 232 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
233 233 util.shellquote(patchname)))
234 234
235 235 if dopatch == 'filter':
236 236 False and os.unlink(patchname)
237 237
238 238 for line in fp:
239 239 line = line.rstrip()
240 240 ui.status("%s\n" % line)
241 241 if line.startswith('patching file '):
242 242 pf = util.parse_patch_output(line)
243 243 files.setdefault(pf, (None, None))
244 244 code = fp.close()
245 245 if code:
246 246 raise util.Abort(_("patch command failed: %s") %
247 247 util.explain_exit(code)[0])
248 248
249 249 for gp in gitpatches:
250 250 files[gp.path] = (gp.op, gp)
251 251
252 252 return files
253 253
254 254 def diff(repo, node1=None, node2=None, files=None, match=util.always,
255 255 fp=None, changes=None, opts=None):
256 256 '''print diff of changes to files between two nodes, or node and
257 257 working directory.
258 258
259 259 if node1 is None, use first dirstate parent instead.
260 260 if node2 is None, compare node1 with working directory.'''
261 261
262 262 if opts is None:
263 263 opts = mdiff.defaultopts
264 264 if fp is None:
265 265 fp = repo.ui
266 266
267 267 if not node1:
268 268 node1 = repo.dirstate.parents()[0]
269 269 # reading the data for node1 early allows it to play nicely
270 # with repo.changes and the revlog cache.
270 # with repo.status and the revlog cache.
271 271 change = repo.changelog.read(node1)
272 272 mmap = repo.manifest.read(change[0])
273 273 date1 = util.datestr(change[2])
274 274
275 275 if not changes:
276 changes = repo.changes(node1, node2, files, match=match)
276 changes = repo.status(node1, node2, files, match=match)[:5]
277 277 modified, added, removed, deleted, unknown = changes
278 278 if files:
279 279 def filterfiles(filters):
280 280 l = [x for x in files if x in filters]
281 281
282 282 for t in filters:
283 283 if t and t[-1] != "/":
284 284 t += "/"
285 285 l += [x for x in files if x.startswith(t)]
286 286 return l
287 287
288 288 modified, added, removed = map(lambda x: filterfiles(x),
289 289 (modified, added, removed))
290 290
291 291 if not modified and not added and not removed:
292 292 return
293 293
294 294 if node2:
295 295 change = repo.changelog.read(node2)
296 296 mmap2 = repo.manifest.read(change[0])
297 297 _date2 = util.datestr(change[2])
298 298 def date2(f):
299 299 return _date2
300 300 def read(f):
301 301 return repo.file(f).read(mmap2[f])
302 302 else:
303 303 tz = util.makedate()[1]
304 304 _date2 = util.datestr()
305 305 def date2(f):
306 306 try:
307 307 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
308 308 except OSError, err:
309 309 if err.errno != errno.ENOENT: raise
310 310 return _date2
311 311 def read(f):
312 312 return repo.wread(f)
313 313
314 314 if repo.ui.quiet:
315 315 r = None
316 316 else:
317 317 hexfunc = repo.ui.verbose and hex or short
318 318 r = [hexfunc(node) for node in [node1, node2] if node]
319 319
320 320 all = modified + added + removed
321 321 all.sort()
322 322 for f in all:
323 323 to = None
324 324 tn = None
325 325 if f in mmap:
326 326 to = repo.file(f).read(mmap[f])
327 327 if f not in removed:
328 328 tn = read(f)
329 329 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
330 330
331 331 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
332 332 opts=None):
333 333 '''export changesets as hg patches.'''
334 334
335 335 total = len(revs)
336 336 revwidth = max(map(len, revs))
337 337
338 338 def single(node, seqno, fp):
339 339 parents = [p for p in repo.changelog.parents(node) if p != nullid]
340 340 if switch_parent:
341 341 parents.reverse()
342 342 prev = (parents and parents[0]) or nullid
343 343 change = repo.changelog.read(node)
344 344
345 345 if not fp:
346 346 fp = cmdutil.make_file(repo, template, node, total=total,
347 347 seqno=seqno, revwidth=revwidth)
348 348 if fp not in (sys.stdout, repo.ui):
349 349 repo.ui.note("%s\n" % fp.name)
350 350
351 351 fp.write("# HG changeset patch\n")
352 352 fp.write("# User %s\n" % change[1])
353 353 fp.write("# Date %d %d\n" % change[2])
354 354 fp.write("# Node ID %s\n" % hex(node))
355 355 fp.write("# Parent %s\n" % hex(prev))
356 356 if len(parents) > 1:
357 357 fp.write("# Parent %s\n" % hex(parents[1]))
358 358 fp.write(change[4].rstrip())
359 359 fp.write("\n\n")
360 360
361 361 diff(repo, prev, node, fp=fp, opts=opts)
362 362 if fp not in (sys.stdout, repo.ui):
363 363 fp.close()
364 364
365 365 for seqno, cset in enumerate(revs):
366 366 single(cset, seqno, fp)
@@ -1,532 +1,532 b''
1 1 # templater.py - template expansion for output
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from i18n import gettext as _
10 10 from node import *
11 11 demandload(globals(), "cStringIO cgi re sys os time urllib util textwrap")
12 12
13 13 esctable = {
14 14 '\\': '\\',
15 15 'r': '\r',
16 16 't': '\t',
17 17 'n': '\n',
18 18 'v': '\v',
19 19 }
20 20
21 21 def parsestring(s, quoted=True):
22 22 '''parse a string using simple c-like syntax.
23 23 string must be in quotes if quoted is True.'''
24 24 fp = cStringIO.StringIO()
25 25 if quoted:
26 26 first = s[0]
27 27 if len(s) < 2: raise SyntaxError(_('string too short'))
28 28 if first not in "'\"": raise SyntaxError(_('invalid quote'))
29 29 if s[-1] != first: raise SyntaxError(_('unmatched quotes'))
30 30 s = s[1:-1]
31 31 escape = False
32 32 for c in s:
33 33 if escape:
34 34 fp.write(esctable.get(c, c))
35 35 escape = False
36 36 elif c == '\\': escape = True
37 37 elif quoted and c == first: raise SyntaxError(_('string ends early'))
38 38 else: fp.write(c)
39 39 if escape: raise SyntaxError(_('unterminated escape'))
40 40 return fp.getvalue()
41 41
42 42 class templater(object):
43 43 '''template expansion engine.
44 44
45 45 template expansion works like this. a map file contains key=value
46 46 pairs. if value is quoted, it is treated as string. otherwise, it
47 47 is treated as name of template file.
48 48
49 49 templater is asked to expand a key in map. it looks up key, and
50 50 looks for atrings like this: {foo}. it expands {foo} by looking up
51 51 foo in map, and substituting it. expansion is recursive: it stops
52 52 when there is no more {foo} to replace.
53 53
54 54 expansion also allows formatting and filtering.
55 55
56 56 format uses key to expand each item in list. syntax is
57 57 {key%format}.
58 58
59 59 filter uses function to transform value. syntax is
60 60 {key|filter1|filter2|...}.'''
61 61
62 62 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
63 63 '''set up template engine.
64 64 mapfile is name of file to read map definitions from.
65 65 filters is dict of functions. each transforms a value into another.
66 66 defaults is dict of default map definitions.'''
67 67 self.mapfile = mapfile or 'template'
68 68 self.cache = cache.copy()
69 69 self.map = {}
70 70 self.base = (mapfile and os.path.dirname(mapfile)) or ''
71 71 self.filters = filters
72 72 self.defaults = defaults
73 73
74 74 if not mapfile:
75 75 return
76 76 i = 0
77 77 for l in file(mapfile):
78 78 l = l.strip()
79 79 i += 1
80 80 if not l or l[0] in '#;': continue
81 81 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
82 82 if m:
83 83 key, val = m.groups()
84 84 if val[0] in "'\"":
85 85 try:
86 86 self.cache[key] = parsestring(val)
87 87 except SyntaxError, inst:
88 88 raise SyntaxError('%s:%s: %s' %
89 89 (mapfile, i, inst.args[0]))
90 90 else:
91 91 self.map[key] = os.path.join(self.base, val)
92 92 else:
93 93 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
94 94
95 95 def __contains__(self, key):
96 96 return key in self.cache
97 97
98 98 def __call__(self, t, **map):
99 99 '''perform expansion.
100 100 t is name of map element to expand.
101 101 map is added elements to use during expansion.'''
102 102 m = self.defaults.copy()
103 103 m.update(map)
104 104 try:
105 105 tmpl = self.cache[t]
106 106 except KeyError:
107 107 try:
108 108 tmpl = self.cache[t] = file(self.map[t]).read()
109 109 except IOError, inst:
110 110 raise IOError(inst.args[0], _('template file %s: %s') %
111 111 (self.map[t], inst.args[1]))
112 112 return self.template(tmpl, self.filters, **m)
113 113
114 114 template_re = re.compile(r"[#{]([a-zA-Z_][a-zA-Z0-9_]*)"
115 115 r"((%[a-zA-Z_][a-zA-Z0-9_]*)*)"
116 116 r"((\|[a-zA-Z_][a-zA-Z0-9_]*)*)[#}]")
117 117
118 118 def template(self, tmpl, filters={}, **map):
119 119 lm = map.copy()
120 120 while tmpl:
121 121 m = self.template_re.search(tmpl)
122 122 if m:
123 123 start, end = m.span(0)
124 124 s, e = tmpl[start], tmpl[end - 1]
125 125 key = m.group(1)
126 126 if ((s == '#' and e != '#') or (s == '{' and e != '}')):
127 127 raise SyntaxError(_("'%s'/'%s' mismatch expanding '%s'") %
128 128 (s, e, key))
129 129 if start:
130 130 yield tmpl[:start]
131 131 v = map.get(key, "")
132 132 v = callable(v) and v(**map) or v
133 133
134 134 format = m.group(2)
135 135 fl = m.group(4)
136 136
137 137 if format:
138 138 q = v.__iter__
139 139 for i in q():
140 140 lm.update(i)
141 141 yield self(format[1:], **lm)
142 142
143 143 v = ""
144 144
145 145 elif fl:
146 146 for f in fl.split("|")[1:]:
147 147 v = filters[f](v)
148 148
149 149 yield v
150 150 tmpl = tmpl[end:]
151 151 else:
152 152 yield tmpl
153 153 break
154 154
155 155 agescales = [("second", 1),
156 156 ("minute", 60),
157 157 ("hour", 3600),
158 158 ("day", 3600 * 24),
159 159 ("week", 3600 * 24 * 7),
160 160 ("month", 3600 * 24 * 30),
161 161 ("year", 3600 * 24 * 365)]
162 162
163 163 agescales.reverse()
164 164
165 165 def age(date):
166 166 '''turn a (timestamp, tzoff) tuple into an age string.'''
167 167
168 168 def plural(t, c):
169 169 if c == 1:
170 170 return t
171 171 return t + "s"
172 172 def fmt(t, c):
173 173 return "%d %s" % (c, plural(t, c))
174 174
175 175 now = time.time()
176 176 then = date[0]
177 177 delta = max(1, int(now - then))
178 178
179 179 for t, s in agescales:
180 180 n = delta / s
181 181 if n >= 2 or s == 1:
182 182 return fmt(t, n)
183 183
184 184 def stringify(thing):
185 185 '''turn nested template iterator into string.'''
186 186 cs = cStringIO.StringIO()
187 187 def walk(things):
188 188 for t in things:
189 189 if hasattr(t, '__iter__'):
190 190 walk(t)
191 191 else:
192 192 cs.write(t)
193 193 walk(thing)
194 194 return cs.getvalue()
195 195
196 196 para_re = None
197 197 space_re = None
198 198
199 199 def fill(text, width):
200 200 '''fill many paragraphs.'''
201 201 global para_re, space_re
202 202 if para_re is None:
203 203 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
204 204 space_re = re.compile(r' +')
205 205
206 206 def findparas():
207 207 start = 0
208 208 while True:
209 209 m = para_re.search(text, start)
210 210 if not m:
211 211 w = len(text)
212 212 while w > start and text[w-1].isspace(): w -= 1
213 213 yield text[start:w], text[w:]
214 214 break
215 215 yield text[start:m.start(0)], m.group(1)
216 216 start = m.end(1)
217 217
218 218 fp = cStringIO.StringIO()
219 219 for para, rest in findparas():
220 220 fp.write(space_re.sub(' ', textwrap.fill(para, width)))
221 221 fp.write(rest)
222 222 return fp.getvalue()
223 223
224 224 def firstline(text):
225 225 '''return the first line of text'''
226 226 try:
227 227 return text.splitlines(1)[0].rstrip('\r\n')
228 228 except IndexError:
229 229 return ''
230 230
231 231 def isodate(date):
232 232 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
233 233 return util.datestr(date, format='%Y-%m-%d %H:%M')
234 234
235 235 def hgdate(date):
236 236 '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
237 237 return "%d %d" % date
238 238
239 239 def nl2br(text):
240 240 '''replace raw newlines with xhtml line breaks.'''
241 241 return text.replace('\n', '<br/>\n')
242 242
243 243 def obfuscate(text):
244 244 text = unicode(text, 'utf-8', 'replace')
245 245 return ''.join(['&#%d;' % ord(c) for c in text])
246 246
247 247 def domain(author):
248 248 '''get domain of author, or empty string if none.'''
249 249 f = author.find('@')
250 250 if f == -1: return ''
251 251 author = author[f+1:]
252 252 f = author.find('>')
253 253 if f >= 0: author = author[:f]
254 254 return author
255 255
256 256 def email(author):
257 257 '''get email of author.'''
258 258 r = author.find('>')
259 259 if r == -1: r = None
260 260 return author[author.find('<')+1:r]
261 261
262 262 def person(author):
263 263 '''get name of author, or else username.'''
264 264 f = author.find('<')
265 265 if f == -1: return util.shortuser(author)
266 266 return author[:f].rstrip()
267 267
268 268 def shortdate(date):
269 269 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
270 270 return util.datestr(date, format='%Y-%m-%d', timezone=False)
271 271
272 272 def indent(text, prefix):
273 273 '''indent each non-empty line of text after first with prefix.'''
274 274 fp = cStringIO.StringIO()
275 275 lines = text.splitlines()
276 276 num_lines = len(lines)
277 277 for i in xrange(num_lines):
278 278 l = lines[i]
279 279 if i and l.strip(): fp.write(prefix)
280 280 fp.write(l)
281 281 if i < num_lines - 1 or text.endswith('\n'):
282 282 fp.write('\n')
283 283 return fp.getvalue()
284 284
285 285 common_filters = {
286 286 "addbreaks": nl2br,
287 287 "basename": os.path.basename,
288 288 "age": age,
289 289 "date": lambda x: util.datestr(x),
290 290 "domain": domain,
291 291 "email": email,
292 292 "escape": lambda x: cgi.escape(x, True),
293 293 "fill68": lambda x: fill(x, width=68),
294 294 "fill76": lambda x: fill(x, width=76),
295 295 "firstline": firstline,
296 296 "tabindent": lambda x: indent(x, '\t'),
297 297 "hgdate": hgdate,
298 298 "isodate": isodate,
299 299 "obfuscate": obfuscate,
300 300 "permissions": lambda x: x and "-rwxr-xr-x" or "-rw-r--r--",
301 301 "person": person,
302 302 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
303 303 "short": lambda x: x[:12],
304 304 "shortdate": shortdate,
305 305 "stringify": stringify,
306 306 "strip": lambda x: x.strip(),
307 307 "urlescape": lambda x: urllib.quote(x),
308 308 "user": lambda x: util.shortuser(x),
309 309 }
310 310
311 311 def templatepath(name=None):
312 312 '''return location of template file or directory (if no name).
313 313 returns None if not found.'''
314 314
315 315 # executable version (py2exe) doesn't support __file__
316 316 if hasattr(sys, 'frozen'):
317 317 module = sys.executable
318 318 else:
319 319 module = __file__
320 320 for f in 'templates', '../templates':
321 321 fl = f.split('/')
322 322 if name: fl.append(name)
323 323 p = os.path.join(os.path.dirname(module), *fl)
324 324 if (name and os.path.exists(p)) or os.path.isdir(p):
325 325 return os.path.normpath(p)
326 326
327 327 class changeset_templater(object):
328 328 '''format changeset information.'''
329 329
330 330 def __init__(self, ui, repo, mapfile, dest=None):
331 331 self.t = templater(mapfile, common_filters,
332 332 cache={'parent': '{rev}:{node|short} ',
333 333 'manifest': '{rev}:{node|short}'})
334 334 self.ui = ui
335 335 self.dest = dest
336 336 self.repo = repo
337 337
338 338 def use_template(self, t):
339 339 '''set template string to use'''
340 340 self.t.cache['changeset'] = t
341 341
342 342 def write(self, thing, header=False):
343 343 '''write expanded template.
344 344 uses in-order recursive traverse of iterators.'''
345 345 dest = self.dest or self.ui
346 346 for t in thing:
347 347 if hasattr(t, '__iter__'):
348 348 self.write(t, header=header)
349 349 elif header:
350 350 dest.write_header(t)
351 351 else:
352 352 dest.write(t)
353 353
354 354 def write_header(self, thing):
355 355 self.write(thing, header=True)
356 356
357 357 def show(self, rev=0, changenode=None, brinfo=None, changes=None,
358 358 **props):
359 359 '''show a single changeset or file revision'''
360 360 log = self.repo.changelog
361 361 if changenode is None:
362 362 changenode = log.node(rev)
363 363 elif not rev:
364 364 rev = log.rev(changenode)
365 365 if changes is None:
366 366 changes = log.read(changenode)
367 367
368 368 def showlist(name, values, plural=None, **args):
369 369 '''expand set of values.
370 370 name is name of key in template map.
371 371 values is list of strings or dicts.
372 372 plural is plural of name, if not simply name + 's'.
373 373
374 374 expansion works like this, given name 'foo'.
375 375
376 376 if values is empty, expand 'no_foos'.
377 377
378 378 if 'foo' not in template map, return values as a string,
379 379 joined by space.
380 380
381 381 expand 'start_foos'.
382 382
383 383 for each value, expand 'foo'. if 'last_foo' in template
384 384 map, expand it instead of 'foo' for last key.
385 385
386 386 expand 'end_foos'.
387 387 '''
388 388 if plural: names = plural
389 389 else: names = name + 's'
390 390 if not values:
391 391 noname = 'no_' + names
392 392 if noname in self.t:
393 393 yield self.t(noname, **args)
394 394 return
395 395 if name not in self.t:
396 396 if isinstance(values[0], str):
397 397 yield ' '.join(values)
398 398 else:
399 399 for v in values:
400 400 yield dict(v, **args)
401 401 return
402 402 startname = 'start_' + names
403 403 if startname in self.t:
404 404 yield self.t(startname, **args)
405 405 vargs = args.copy()
406 406 def one(v, tag=name):
407 407 try:
408 408 vargs.update(v)
409 409 except (AttributeError, ValueError):
410 410 try:
411 411 for a, b in v:
412 412 vargs[a] = b
413 413 except ValueError:
414 414 vargs[name] = v
415 415 return self.t(tag, **vargs)
416 416 lastname = 'last_' + name
417 417 if lastname in self.t:
418 418 last = values.pop()
419 419 else:
420 420 last = None
421 421 for v in values:
422 422 yield one(v)
423 423 if last is not None:
424 424 yield one(last, tag=lastname)
425 425 endname = 'end_' + names
426 426 if endname in self.t:
427 427 yield self.t(endname, **args)
428 428
429 429 if brinfo:
430 430 def showbranches(**args):
431 431 if changenode in brinfo:
432 432 for x in showlist('branch', brinfo[changenode],
433 433 plural='branches', **args):
434 434 yield x
435 435 else:
436 436 showbranches = ''
437 437
438 438 if self.ui.debugflag:
439 439 def showmanifest(**args):
440 440 args = args.copy()
441 441 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
442 442 node=hex(changes[0])))
443 443 yield self.t('manifest', **args)
444 444 else:
445 445 showmanifest = ''
446 446
447 447 def showparents(**args):
448 448 parents = [[('rev', log.rev(p)), ('node', hex(p))]
449 449 for p in log.parents(changenode)
450 450 if self.ui.debugflag or p != nullid]
451 451 if (not self.ui.debugflag and len(parents) == 1 and
452 452 parents[0][0][1] == rev - 1):
453 453 return
454 454 for x in showlist('parent', parents, **args):
455 455 yield x
456 456
457 457 def showtags(**args):
458 458 for x in showlist('tag', self.repo.nodetags(changenode), **args):
459 459 yield x
460 460
461 461 if self.ui.debugflag:
462 files = self.repo.changes(log.parents(changenode)[0], changenode)
462 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
463 463 def showfiles(**args):
464 464 for x in showlist('file', files[0], **args): yield x
465 465 def showadds(**args):
466 466 for x in showlist('file_add', files[1], **args): yield x
467 467 def showdels(**args):
468 468 for x in showlist('file_del', files[2], **args): yield x
469 469 else:
470 470 def showfiles(**args):
471 471 for x in showlist('file', changes[3], **args): yield x
472 472 showadds = ''
473 473 showdels = ''
474 474
475 475 defprops = {
476 476 'author': changes[1],
477 477 'branches': showbranches,
478 478 'date': changes[2],
479 479 'desc': changes[4],
480 480 'file_adds': showadds,
481 481 'file_dels': showdels,
482 482 'files': showfiles,
483 483 'manifest': showmanifest,
484 484 'node': hex(changenode),
485 485 'parents': showparents,
486 486 'rev': rev,
487 487 'tags': showtags,
488 488 }
489 489 props = props.copy()
490 490 props.update(defprops)
491 491
492 492 try:
493 493 if self.ui.debugflag and 'header_debug' in self.t:
494 494 key = 'header_debug'
495 495 elif self.ui.quiet and 'header_quiet' in self.t:
496 496 key = 'header_quiet'
497 497 elif self.ui.verbose and 'header_verbose' in self.t:
498 498 key = 'header_verbose'
499 499 elif 'header' in self.t:
500 500 key = 'header'
501 501 else:
502 502 key = ''
503 503 if key:
504 504 self.write_header(self.t(key, **props))
505 505 if self.ui.debugflag and 'changeset_debug' in self.t:
506 506 key = 'changeset_debug'
507 507 elif self.ui.quiet and 'changeset_quiet' in self.t:
508 508 key = 'changeset_quiet'
509 509 elif self.ui.verbose and 'changeset_verbose' in self.t:
510 510 key = 'changeset_verbose'
511 511 else:
512 512 key = 'changeset'
513 513 self.write(self.t(key, **props))
514 514 except KeyError, inst:
515 515 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
516 516 inst.args[0]))
517 517 except SyntaxError, inst:
518 518 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
519 519
520 520 class stringio(object):
521 521 '''wrap cStringIO for use by changeset_templater.'''
522 522 def __init__(self):
523 523 self.fp = cStringIO.StringIO()
524 524
525 525 def write(self, *args):
526 526 for a in args:
527 527 self.fp.write(a)
528 528
529 529 write_header = write
530 530
531 531 def __getattr__(self, key):
532 532 return getattr(self.fp, key)
General Comments 0
You need to be logged in to leave comments. Login now