##// END OF EJS Templates
Merge with stable
Martin Geisler -
r12070:fddacca3 merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,191 +1,197
1 1 # churn.py - create a graph of revisions count grouped by template
2 2 #
3 3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''command to display statistics about repository history'''
10 10
11 11 from mercurial.i18n import _
12 12 from mercurial import patch, cmdutil, util, templater, commands
13 13 import os
14 14 import time, datetime
15 15
16 16 def maketemplater(ui, repo, tmpl):
17 17 tmpl = templater.parsestring(tmpl, quoted=False)
18 18 try:
19 19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
20 20 except SyntaxError, inst:
21 21 raise util.Abort(inst.args[0])
22 22 t.use_template(tmpl)
23 23 return t
24 24
25 25 def changedlines(ui, repo, ctx1, ctx2, fns):
26 26 added, removed = 0, 0
27 27 fmatch = cmdutil.matchfiles(repo, fns)
28 28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
29 29 for l in diff.split('\n'):
30 30 if l.startswith("+") and not l.startswith("+++ "):
31 31 added += 1
32 32 elif l.startswith("-") and not l.startswith("--- "):
33 33 removed += 1
34 34 return (added, removed)
35 35
36 36 def countrate(ui, repo, amap, *pats, **opts):
37 37 """Calculate stats"""
38 38 if opts.get('dateformat'):
39 39 def getkey(ctx):
40 40 t, tz = ctx.date()
41 41 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
42 42 return date.strftime(opts['dateformat'])
43 43 else:
44 44 tmpl = opts.get('template', '{author|email}')
45 45 tmpl = maketemplater(ui, repo, tmpl)
46 46 def getkey(ctx):
47 47 ui.pushbuffer()
48 48 tmpl.show(ctx)
49 49 return ui.popbuffer()
50 50
51 51 state = {'count': 0}
52 52 rate = {}
53 53 df = False
54 54 if opts.get('date'):
55 55 df = util.matchdate(opts['date'])
56 56
57 57 m = cmdutil.match(repo, pats, opts)
58 58 def prep(ctx, fns):
59 59 rev = ctx.rev()
60 60 if df and not df(ctx.date()[0]): # doesn't match date format
61 61 return
62 62
63 63 key = getkey(ctx)
64 64 key = amap.get(key, key) # alias remap
65 65 if opts.get('changesets'):
66 66 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
67 67 else:
68 68 parents = ctx.parents()
69 69 if len(parents) > 1:
70 70 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
71 71 return
72 72
73 73 ctx1 = parents[0]
74 74 lines = changedlines(ui, repo, ctx1, ctx, fns)
75 75 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
76 76
77 77 state['count'] += 1
78 78 ui.progress(_('analyzing'), state['count'], total=len(repo))
79 79
80 80 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
81 81 continue
82 82
83 83 ui.progress(_('analyzing'), None)
84 84
85 85 return rate
86 86
87 87
88 88 def churn(ui, repo, *pats, **opts):
89 89 '''histogram of changes to the repository
90 90
91 91 This command will display a histogram representing the number
92 92 of changed lines or revisions, grouped according to the given
93 93 template. The default template will group changes by author.
94 94 The --dateformat option may be used to group the results by
95 95 date instead.
96 96
97 97 Statistics are based on the number of changed lines, or
98 98 alternatively the number of matching revisions if the
99 99 --changesets option is specified.
100 100
101 101 Examples::
102 102
103 103 # display count of changed lines for every committer
104 104 hg churn -t '{author|email}'
105 105
106 106 # display daily activity graph
107 107 hg churn -f '%H' -s -c
108 108
109 109 # display activity of developers by month
110 110 hg churn -f '%Y-%m' -s -c
111 111
112 112 # display count of lines changed in every year
113 113 hg churn -f '%Y' -s
114 114
115 115 It is possible to map alternate email addresses to a main address
116 116 by providing a file using the following format::
117 117
118 118 <alias email> = <actual email>
119 119
120 120 Such a file may be specified with the --aliases option, otherwise
121 121 a .hgchurn file will be looked for in the working directory root.
122 122 '''
123 123 def pad(s, l):
124 124 return (s + " " * l)[:l]
125 125
126 126 amap = {}
127 127 aliases = opts.get('aliases')
128 128 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
129 129 aliases = repo.wjoin('.hgchurn')
130 130 if aliases:
131 131 for l in open(aliases, "r"):
132 alias, actual = l.split('=' in l and '=' or None, 1)
133 amap[alias.strip()] = actual.strip()
132 try:
133 alias, actual = l.split('=' in l and '=' or None, 1)
134 amap[alias.strip()] = actual.strip()
135 except ValueError:
136 l = l.strip()
137 if l:
138 ui.warn(_("skipping malformed alias: %s\n" % l))
139 continue
134 140
135 141 rate = countrate(ui, repo, amap, *pats, **opts).items()
136 142 if not rate:
137 143 return
138 144
139 145 sortkey = ((not opts.get('sort')) and (lambda x: -sum(x[1])) or None)
140 146 rate.sort(key=sortkey)
141 147
142 148 # Be careful not to have a zero maxcount (issue833)
143 149 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
144 150 maxname = max(len(k) for k, v in rate)
145 151
146 152 ttywidth = util.termwidth()
147 153 ui.debug("assuming %i character terminal\n" % ttywidth)
148 154 width = ttywidth - maxname - 2 - 2 - 2
149 155
150 156 if opts.get('diffstat'):
151 157 width -= 15
152 158 def format(name, diffstat):
153 159 added, removed = diffstat
154 160 return "%s %15s %s%s\n" % (pad(name, maxname),
155 161 '+%d/-%d' % (added, removed),
156 162 ui.label('+' * charnum(added),
157 163 'diffstat.inserted'),
158 164 ui.label('-' * charnum(removed),
159 165 'diffstat.deleted'))
160 166 else:
161 167 width -= 6
162 168 def format(name, count):
163 169 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
164 170 '*' * charnum(sum(count)))
165 171
166 172 def charnum(count):
167 173 return int(round(count * width / maxcount))
168 174
169 175 for name, count in rate:
170 176 ui.write(format(name, count))
171 177
172 178
173 179 cmdtable = {
174 180 "churn":
175 181 (churn,
176 182 [('r', 'rev', [],
177 183 _('count rate for the specified revision or range'), _('REV')),
178 184 ('d', 'date', '',
179 185 _('count rate for revisions matching date spec'), _('DATE')),
180 186 ('t', 'template', '{author|email}',
181 187 _('template to group changesets'), _('TEMPLATE')),
182 188 ('f', 'dateformat', '',
183 189 _('strftime-compatible format for grouping by date'), _('FORMAT')),
184 190 ('c', 'changesets', False, _('count rate by number of changesets')),
185 191 ('s', 'sort', False, _('sort by key (default: sort by count)')),
186 192 ('', 'diffstat', False, _('display added/removed lines separately')),
187 193 ('', 'aliases', '',
188 194 _('file with email aliases'), _('FILE')),
189 195 ] + commands.walkopts,
190 196 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")),
191 197 }
@@ -1,288 +1,288
1 1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5
6 6 '''commands to sign and verify changesets'''
7 7
8 8 import os, tempfile, binascii
9 9 from mercurial import util, commands, match
10 10 from mercurial import node as hgnode
11 11 from mercurial.i18n import _
12 12
13 13 class gpg(object):
14 14 def __init__(self, path, key=None):
15 15 self.path = path
16 16 self.key = (key and " --local-user \"%s\"" % key) or ""
17 17
18 18 def sign(self, data):
19 19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 20 return util.filter(data, gpgcmd)
21 21
22 22 def verify(self, data, sig):
23 23 """ returns of the good and bad signatures"""
24 24 sigfile = datafile = None
25 25 try:
26 26 # create temporary files
27 27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 28 fp = os.fdopen(fd, 'wb')
29 29 fp.write(sig)
30 30 fp.close()
31 31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 32 fp = os.fdopen(fd, 'wb')
33 33 fp.write(data)
34 34 fp.close()
35 35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 37 ret = util.filter("", gpgcmd)
38 38 finally:
39 39 for f in (sigfile, datafile):
40 40 try:
41 41 if f:
42 42 os.unlink(f)
43 43 except:
44 44 pass
45 45 keys = []
46 46 key, fingerprint = None, None
47 47 err = ""
48 48 for l in ret.splitlines():
49 49 # see DETAILS in the gnupg documentation
50 50 # filter the logger output
51 51 if not l.startswith("[GNUPG:]"):
52 52 continue
53 53 l = l[9:]
54 54 if l.startswith("ERRSIG"):
55 55 err = _("error while verifying signature")
56 56 break
57 57 elif l.startswith("VALIDSIG"):
58 58 # fingerprint of the primary key
59 59 fingerprint = l.split()[10]
60 60 elif (l.startswith("GOODSIG") or
61 61 l.startswith("EXPSIG") or
62 62 l.startswith("EXPKEYSIG") or
63 63 l.startswith("BADSIG")):
64 64 if key is not None:
65 65 keys.append(key + [fingerprint])
66 66 key = l.split(" ", 2)
67 67 fingerprint = None
68 68 if err:
69 69 return err, []
70 70 if key is not None:
71 71 keys.append(key + [fingerprint])
72 72 return err, keys
73 73
74 74 def newgpg(ui, **opts):
75 75 """create a new gpg instance"""
76 76 gpgpath = ui.config("gpg", "cmd", "gpg")
77 77 gpgkey = opts.get('key')
78 78 if not gpgkey:
79 79 gpgkey = ui.config("gpg", "key", None)
80 80 return gpg(gpgpath, gpgkey)
81 81
82 82 def sigwalk(repo):
83 83 """
84 84 walk over every sigs, yields a couple
85 85 ((node, version, sig), (filename, linenumber))
86 86 """
87 87 def parsefile(fileiter, context):
88 88 ln = 1
89 89 for l in fileiter:
90 90 if not l:
91 91 continue
92 92 yield (l.split(" ", 2), (context, ln))
93 93 ln += 1
94 94
95 95 # read the heads
96 96 fl = repo.file(".hgsigs")
97 97 for r in reversed(fl.heads()):
98 98 fn = ".hgsigs|%s" % hgnode.short(r)
99 99 for item in parsefile(fl.read(r).splitlines(), fn):
100 100 yield item
101 101 try:
102 102 # read local signatures
103 103 fn = "localsigs"
104 104 for item in parsefile(repo.opener(fn), fn):
105 105 yield item
106 106 except IOError:
107 107 pass
108 108
109 109 def getkeys(ui, repo, mygpg, sigdata, context):
110 110 """get the keys who signed a data"""
111 111 fn, ln = context
112 112 node, version, sig = sigdata
113 113 prefix = "%s:%d" % (fn, ln)
114 114 node = hgnode.bin(node)
115 115
116 116 data = node2txt(repo, node, version)
117 117 sig = binascii.a2b_base64(sig)
118 118 err, keys = mygpg.verify(data, sig)
119 119 if err:
120 120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 121 return None
122 122
123 123 validkeys = []
124 124 # warn for expired key and/or sigs
125 125 for key in keys:
126 126 if key[0] == "BADSIG":
127 127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 128 continue
129 129 if key[0] == "EXPSIG":
130 130 ui.write(_("%s Note: Signature has expired"
131 131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 132 elif key[0] == "EXPKEYSIG":
133 133 ui.write(_("%s Note: This key has expired"
134 134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 135 validkeys.append((key[1], key[2], key[3]))
136 136 return validkeys
137 137
138 138 def sigs(ui, repo):
139 139 """list signed changesets"""
140 140 mygpg = newgpg(ui)
141 141 revs = {}
142 142
143 143 for data, context in sigwalk(repo):
144 144 node, version, sig = data
145 145 fn, ln = context
146 146 try:
147 147 n = repo.lookup(node)
148 148 except KeyError:
149 149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 150 continue
151 151 r = repo.changelog.rev(n)
152 152 keys = getkeys(ui, repo, mygpg, data, context)
153 153 if not keys:
154 154 continue
155 155 revs.setdefault(r, [])
156 156 revs[r].extend(keys)
157 157 for rev in sorted(revs, reverse=True):
158 158 for k in revs[rev]:
159 159 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
160 160 ui.write("%-30s %s\n" % (keystr(ui, k), r))
161 161
162 162 def check(ui, repo, rev):
163 163 """verify all the signatures there may be for a particular revision"""
164 164 mygpg = newgpg(ui)
165 165 rev = repo.lookup(rev)
166 166 hexrev = hgnode.hex(rev)
167 167 keys = []
168 168
169 169 for data, context in sigwalk(repo):
170 170 node, version, sig = data
171 171 if node == hexrev:
172 172 k = getkeys(ui, repo, mygpg, data, context)
173 173 if k:
174 174 keys.extend(k)
175 175
176 176 if not keys:
177 177 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
178 178 return
179 179
180 180 # print summary
181 181 ui.write("%s is signed by:\n" % hgnode.short(rev))
182 182 for key in keys:
183 183 ui.write(" %s\n" % keystr(ui, key))
184 184
185 185 def keystr(ui, key):
186 186 """associate a string to a key (username, comment)"""
187 187 keyid, user, fingerprint = key
188 188 comment = ui.config("gpg", fingerprint, None)
189 189 if comment:
190 190 return "%s (%s)" % (user, comment)
191 191 else:
192 192 return user
193 193
194 194 def sign(ui, repo, *revs, **opts):
195 195 """add a signature for the current or given revision
196 196
197 197 If no revision is given, the parent of the working directory is used,
198 198 or tip if no revision is checked out.
199 199
200 200 See :hg:`help dates` for a list of formats valid for -d/--date.
201 201 """
202 202
203 203 mygpg = newgpg(ui, **opts)
204 204 sigver = "0"
205 205 sigmessage = ""
206 206
207 207 date = opts.get('date')
208 208 if date:
209 209 opts['date'] = util.parsedate(date)
210 210
211 211 if revs:
212 212 nodes = [repo.lookup(n) for n in revs]
213 213 else:
214 214 nodes = [node for node in repo.dirstate.parents()
215 215 if node != hgnode.nullid]
216 216 if len(nodes) > 1:
217 217 raise util.Abort(_('uncommitted merge - please provide a '
218 218 'specific revision'))
219 219 if not nodes:
220 220 nodes = [repo.changelog.tip()]
221 221
222 222 for n in nodes:
223 223 hexnode = hgnode.hex(n)
224 224 ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n),
225 225 hgnode.short(n)))
226 226 # build data
227 227 data = node2txt(repo, n, sigver)
228 228 sig = mygpg.sign(data)
229 229 if not sig:
230 raise util.Abort(_("Error while signing"))
230 raise util.abort(_("error while signing"))
231 231 sig = binascii.b2a_base64(sig)
232 232 sig = sig.replace("\n", "")
233 233 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
234 234
235 235 # write it
236 236 if opts['local']:
237 237 repo.opener("localsigs", "ab").write(sigmessage)
238 238 return
239 239
240 240 msigs = match.exact(repo.root, '', ['.hgsigs'])
241 241 s = repo.status(match=msigs, unknown=True, ignored=True)[:6]
242 242 if util.any(s) and not opts["force"]:
243 243 raise util.Abort(_("working copy of .hgsigs is changed "
244 244 "(please commit .hgsigs manually "
245 245 "or use --force)"))
246 246
247 247 repo.wfile(".hgsigs", "ab").write(sigmessage)
248 248
249 249 if '.hgsigs' not in repo.dirstate:
250 250 repo[None].add([".hgsigs"])
251 251
252 252 if opts["no_commit"]:
253 253 return
254 254
255 255 message = opts['message']
256 256 if not message:
257 257 # we don't translate commit messages
258 258 message = "\n".join(["Added signature for changeset %s"
259 259 % hgnode.short(n)
260 260 for n in nodes])
261 261 try:
262 262 repo.commit(message, opts['user'], opts['date'], match=msigs)
263 263 except ValueError, inst:
264 264 raise util.Abort(str(inst))
265 265
266 266 def node2txt(repo, node, ver):
267 267 """map a manifest into some text"""
268 268 if ver == "0":
269 269 return "%s\n" % hgnode.hex(node)
270 270 else:
271 271 raise util.Abort(_("unknown signature version"))
272 272
273 273 cmdtable = {
274 274 "sign":
275 275 (sign,
276 276 [('l', 'local', None, _('make the signature local')),
277 277 ('f', 'force', None, _('sign even if the sigfile is modified')),
278 278 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
279 279 ('k', 'key', '',
280 280 _('the key id to sign with'), _('ID')),
281 281 ('m', 'message', '',
282 282 _('commit message'), _('TEXT')),
283 283 ] + commands.commitopts2,
284 284 _('hg sign [OPTION]... [REVISION]...')),
285 285 "sigcheck": (check, [], _('hg sigcheck REVISION')),
286 286 "sigs": (sigs, [], _('hg sigs')),
287 287 }
288 288
@@ -1,3119 +1,3119
1 1 # mq.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''manage a stack of patches
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use :hg:`help command` for more details)::
18 18
19 19 create new patch qnew
20 20 import existing patch qimport
21 21
22 22 print patch series qseries
23 23 print applied patches qapplied
24 24
25 25 add known patch to applied stack qpush
26 26 remove patch from applied stack qpop
27 27 refresh contents of top applied patch qrefresh
28 28
29 29 By default, mq will automatically use git patches when required to
30 30 avoid losing file mode changes, copy records, binary files or empty
31 31 files creations or deletions. This behaviour can be configured with::
32 32
33 33 [mq]
34 34 git = auto/keep/yes/no
35 35
36 36 If set to 'keep', mq will obey the [diff] section configuration while
37 37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 38 'no', mq will override the [diff] section and always generate git or
39 39 regular patches, possibly losing data in the second case.
40 40
41 41 You will by default be managing a patch queue named "patches". You can
42 42 create other, independent patch queues with the :hg:`qqueue` command.
43 43 '''
44 44
45 45 from mercurial.i18n import _
46 46 from mercurial.node import bin, hex, short, nullid, nullrev
47 47 from mercurial.lock import release
48 48 from mercurial import commands, cmdutil, hg, patch, util
49 49 from mercurial import repair, extensions, url, error
50 50 import os, sys, re, errno, shutil
51 51
52 52 commands.norepo += " qclone"
53 53
54 54 # Patch names looks like unix-file names.
55 55 # They must be joinable with queue directory and result in the patch path.
56 56 normname = util.normpath
57 57
58 58 class statusentry(object):
59 59 def __init__(self, node, name):
60 60 self.node, self.name = node, name
61 61 def __repr__(self):
62 62 return hex(self.node) + ':' + self.name
63 63
64 64 class patchheader(object):
65 65 def __init__(self, pf, plainmode=False):
66 66 def eatdiff(lines):
67 67 while lines:
68 68 l = lines[-1]
69 69 if (l.startswith("diff -") or
70 70 l.startswith("Index:") or
71 71 l.startswith("===========")):
72 72 del lines[-1]
73 73 else:
74 74 break
75 75 def eatempty(lines):
76 76 while lines:
77 77 if not lines[-1].strip():
78 78 del lines[-1]
79 79 else:
80 80 break
81 81
82 82 message = []
83 83 comments = []
84 84 user = None
85 85 date = None
86 86 parent = None
87 87 format = None
88 88 subject = None
89 89 diffstart = 0
90 90
91 91 for line in file(pf):
92 92 line = line.rstrip()
93 93 if (line.startswith('diff --git')
94 94 or (diffstart and line.startswith('+++ '))):
95 95 diffstart = 2
96 96 break
97 97 diffstart = 0 # reset
98 98 if line.startswith("--- "):
99 99 diffstart = 1
100 100 continue
101 101 elif format == "hgpatch":
102 102 # parse values when importing the result of an hg export
103 103 if line.startswith("# User "):
104 104 user = line[7:]
105 105 elif line.startswith("# Date "):
106 106 date = line[7:]
107 107 elif line.startswith("# Parent "):
108 108 parent = line[9:]
109 109 elif not line.startswith("# ") and line:
110 110 message.append(line)
111 111 format = None
112 112 elif line == '# HG changeset patch':
113 113 message = []
114 114 format = "hgpatch"
115 115 elif (format != "tagdone" and (line.startswith("Subject: ") or
116 116 line.startswith("subject: "))):
117 117 subject = line[9:]
118 118 format = "tag"
119 119 elif (format != "tagdone" and (line.startswith("From: ") or
120 120 line.startswith("from: "))):
121 121 user = line[6:]
122 122 format = "tag"
123 123 elif (format != "tagdone" and (line.startswith("Date: ") or
124 124 line.startswith("date: "))):
125 125 date = line[6:]
126 126 format = "tag"
127 127 elif format == "tag" and line == "":
128 128 # when looking for tags (subject: from: etc) they
129 129 # end once you find a blank line in the source
130 130 format = "tagdone"
131 131 elif message or line:
132 132 message.append(line)
133 133 comments.append(line)
134 134
135 135 eatdiff(message)
136 136 eatdiff(comments)
137 137 eatempty(message)
138 138 eatempty(comments)
139 139
140 140 # make sure message isn't empty
141 141 if format and format.startswith("tag") and subject:
142 142 message.insert(0, "")
143 143 message.insert(0, subject)
144 144
145 145 self.message = message
146 146 self.comments = comments
147 147 self.user = user
148 148 self.date = date
149 149 self.parent = parent
150 150 self.haspatch = diffstart > 1
151 151 self.plainmode = plainmode
152 152
153 153 def setuser(self, user):
154 154 if not self.updateheader(['From: ', '# User '], user):
155 155 try:
156 156 patchheaderat = self.comments.index('# HG changeset patch')
157 157 self.comments.insert(patchheaderat + 1, '# User ' + user)
158 158 except ValueError:
159 159 if self.plainmode or self._hasheader(['Date: ']):
160 160 self.comments = ['From: ' + user] + self.comments
161 161 else:
162 162 tmp = ['# HG changeset patch', '# User ' + user, '']
163 163 self.comments = tmp + self.comments
164 164 self.user = user
165 165
166 166 def setdate(self, date):
167 167 if not self.updateheader(['Date: ', '# Date '], date):
168 168 try:
169 169 patchheaderat = self.comments.index('# HG changeset patch')
170 170 self.comments.insert(patchheaderat + 1, '# Date ' + date)
171 171 except ValueError:
172 172 if self.plainmode or self._hasheader(['From: ']):
173 173 self.comments = ['Date: ' + date] + self.comments
174 174 else:
175 175 tmp = ['# HG changeset patch', '# Date ' + date, '']
176 176 self.comments = tmp + self.comments
177 177 self.date = date
178 178
179 179 def setparent(self, parent):
180 180 if not self.updateheader(['# Parent '], parent):
181 181 try:
182 182 patchheaderat = self.comments.index('# HG changeset patch')
183 183 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
184 184 except ValueError:
185 185 pass
186 186 self.parent = parent
187 187
188 188 def setmessage(self, message):
189 189 if self.comments:
190 190 self._delmsg()
191 191 self.message = [message]
192 192 self.comments += self.message
193 193
194 194 def updateheader(self, prefixes, new):
195 195 '''Update all references to a field in the patch header.
196 196 Return whether the field is present.'''
197 197 res = False
198 198 for prefix in prefixes:
199 199 for i in xrange(len(self.comments)):
200 200 if self.comments[i].startswith(prefix):
201 201 self.comments[i] = prefix + new
202 202 res = True
203 203 break
204 204 return res
205 205
206 206 def _hasheader(self, prefixes):
207 207 '''Check if a header starts with any of the given prefixes.'''
208 208 for prefix in prefixes:
209 209 for comment in self.comments:
210 210 if comment.startswith(prefix):
211 211 return True
212 212 return False
213 213
214 214 def __str__(self):
215 215 if not self.comments:
216 216 return ''
217 217 return '\n'.join(self.comments) + '\n\n'
218 218
219 219 def _delmsg(self):
220 220 '''Remove existing message, keeping the rest of the comments fields.
221 221 If comments contains 'subject: ', message will prepend
222 222 the field and a blank line.'''
223 223 if self.message:
224 224 subj = 'subject: ' + self.message[0].lower()
225 225 for i in xrange(len(self.comments)):
226 226 if subj == self.comments[i].lower():
227 227 del self.comments[i]
228 228 self.message = self.message[2:]
229 229 break
230 230 ci = 0
231 231 for mi in self.message:
232 232 while mi != self.comments[ci]:
233 233 ci += 1
234 234 del self.comments[ci]
235 235
236 236 class queue(object):
237 237 def __init__(self, ui, path, patchdir=None):
238 238 self.basepath = path
239 239 try:
240 240 fh = open(os.path.join(path, 'patches.queue'))
241 241 cur = fh.read().rstrip()
242 242 if not cur:
243 243 curpath = os.path.join(path, 'patches')
244 244 else:
245 245 curpath = os.path.join(path, 'patches-' + cur)
246 246 except IOError:
247 247 curpath = os.path.join(path, 'patches')
248 248 self.path = patchdir or curpath
249 249 self.opener = util.opener(self.path)
250 250 self.ui = ui
251 251 self.applied_dirty = 0
252 252 self.series_dirty = 0
253 253 self.added = []
254 254 self.series_path = "series"
255 255 self.status_path = "status"
256 256 self.guards_path = "guards"
257 257 self.active_guards = None
258 258 self.guards_dirty = False
259 259 # Handle mq.git as a bool with extended values
260 260 try:
261 261 gitmode = ui.configbool('mq', 'git', None)
262 262 if gitmode is None:
263 263 raise error.ConfigError()
264 264 self.gitmode = gitmode and 'yes' or 'no'
265 265 except error.ConfigError:
266 266 self.gitmode = ui.config('mq', 'git', 'auto').lower()
267 267 self.plainmode = ui.configbool('mq', 'plain', False)
268 268
269 269 @util.propertycache
270 270 def applied(self):
271 271 if os.path.exists(self.join(self.status_path)):
272 272 def parse(l):
273 273 n, name = l.split(':', 1)
274 274 return statusentry(bin(n), name)
275 275 lines = self.opener(self.status_path).read().splitlines()
276 276 return [parse(l) for l in lines]
277 277 return []
278 278
279 279 @util.propertycache
280 280 def full_series(self):
281 281 if os.path.exists(self.join(self.series_path)):
282 282 return self.opener(self.series_path).read().splitlines()
283 283 return []
284 284
285 285 @util.propertycache
286 286 def series(self):
287 287 self.parse_series()
288 288 return self.series
289 289
290 290 @util.propertycache
291 291 def series_guards(self):
292 292 self.parse_series()
293 293 return self.series_guards
294 294
295 295 def invalidate(self):
296 296 for a in 'applied full_series series series_guards'.split():
297 297 if a in self.__dict__:
298 298 delattr(self, a)
299 299 self.applied_dirty = 0
300 300 self.series_dirty = 0
301 301 self.guards_dirty = False
302 302 self.active_guards = None
303 303
304 304 def diffopts(self, opts={}, patchfn=None):
305 305 diffopts = patch.diffopts(self.ui, opts)
306 306 if self.gitmode == 'auto':
307 307 diffopts.upgrade = True
308 308 elif self.gitmode == 'keep':
309 309 pass
310 310 elif self.gitmode in ('yes', 'no'):
311 311 diffopts.git = self.gitmode == 'yes'
312 312 else:
313 313 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
314 314 ' got %s') % self.gitmode)
315 315 if patchfn:
316 316 diffopts = self.patchopts(diffopts, patchfn)
317 317 return diffopts
318 318
319 319 def patchopts(self, diffopts, *patches):
320 320 """Return a copy of input diff options with git set to true if
321 321 referenced patch is a git patch and should be preserved as such.
322 322 """
323 323 diffopts = diffopts.copy()
324 324 if not diffopts.git and self.gitmode == 'keep':
325 325 for patchfn in patches:
326 326 patchf = self.opener(patchfn, 'r')
327 327 # if the patch was a git patch, refresh it as a git patch
328 328 for line in patchf:
329 329 if line.startswith('diff --git'):
330 330 diffopts.git = True
331 331 break
332 332 patchf.close()
333 333 return diffopts
334 334
335 335 def join(self, *p):
336 336 return os.path.join(self.path, *p)
337 337
338 338 def find_series(self, patch):
339 339 def matchpatch(l):
340 340 l = l.split('#', 1)[0]
341 341 return l.strip() == patch
342 342 for index, l in enumerate(self.full_series):
343 343 if matchpatch(l):
344 344 return index
345 345 return None
346 346
347 347 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
348 348
349 349 def parse_series(self):
350 350 self.series = []
351 351 self.series_guards = []
352 352 for l in self.full_series:
353 353 h = l.find('#')
354 354 if h == -1:
355 355 patch = l
356 356 comment = ''
357 357 elif h == 0:
358 358 continue
359 359 else:
360 360 patch = l[:h]
361 361 comment = l[h:]
362 362 patch = patch.strip()
363 363 if patch:
364 364 if patch in self.series:
365 365 raise util.Abort(_('%s appears more than once in %s') %
366 366 (patch, self.join(self.series_path)))
367 367 self.series.append(patch)
368 368 self.series_guards.append(self.guard_re.findall(comment))
369 369
370 370 def check_guard(self, guard):
371 371 if not guard:
372 372 return _('guard cannot be an empty string')
373 373 bad_chars = '# \t\r\n\f'
374 374 first = guard[0]
375 375 if first in '-+':
376 376 return (_('guard %r starts with invalid character: %r') %
377 377 (guard, first))
378 378 for c in bad_chars:
379 379 if c in guard:
380 380 return _('invalid character in guard %r: %r') % (guard, c)
381 381
382 382 def set_active(self, guards):
383 383 for guard in guards:
384 384 bad = self.check_guard(guard)
385 385 if bad:
386 386 raise util.Abort(bad)
387 387 guards = sorted(set(guards))
388 388 self.ui.debug('active guards: %s\n' % ' '.join(guards))
389 389 self.active_guards = guards
390 390 self.guards_dirty = True
391 391
392 392 def active(self):
393 393 if self.active_guards is None:
394 394 self.active_guards = []
395 395 try:
396 396 guards = self.opener(self.guards_path).read().split()
397 397 except IOError, err:
398 398 if err.errno != errno.ENOENT:
399 399 raise
400 400 guards = []
401 401 for i, guard in enumerate(guards):
402 402 bad = self.check_guard(guard)
403 403 if bad:
404 404 self.ui.warn('%s:%d: %s\n' %
405 405 (self.join(self.guards_path), i + 1, bad))
406 406 else:
407 407 self.active_guards.append(guard)
408 408 return self.active_guards
409 409
410 410 def set_guards(self, idx, guards):
411 411 for g in guards:
412 412 if len(g) < 2:
413 413 raise util.Abort(_('guard %r too short') % g)
414 414 if g[0] not in '-+':
415 415 raise util.Abort(_('guard %r starts with invalid char') % g)
416 416 bad = self.check_guard(g[1:])
417 417 if bad:
418 418 raise util.Abort(bad)
419 419 drop = self.guard_re.sub('', self.full_series[idx])
420 420 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
421 421 self.parse_series()
422 422 self.series_dirty = True
423 423
424 424 def pushable(self, idx):
425 425 if isinstance(idx, str):
426 426 idx = self.series.index(idx)
427 427 patchguards = self.series_guards[idx]
428 428 if not patchguards:
429 429 return True, None
430 430 guards = self.active()
431 431 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
432 432 if exactneg:
433 433 return False, exactneg[0]
434 434 pos = [g for g in patchguards if g[0] == '+']
435 435 exactpos = [g for g in pos if g[1:] in guards]
436 436 if pos:
437 437 if exactpos:
438 438 return True, exactpos[0]
439 439 return False, pos
440 440 return True, ''
441 441
442 442 def explain_pushable(self, idx, all_patches=False):
443 443 write = all_patches and self.ui.write or self.ui.warn
444 444 if all_patches or self.ui.verbose:
445 445 if isinstance(idx, str):
446 446 idx = self.series.index(idx)
447 447 pushable, why = self.pushable(idx)
448 448 if all_patches and pushable:
449 449 if why is None:
450 450 write(_('allowing %s - no guards in effect\n') %
451 451 self.series[idx])
452 452 else:
453 453 if not why:
454 454 write(_('allowing %s - no matching negative guards\n') %
455 455 self.series[idx])
456 456 else:
457 457 write(_('allowing %s - guarded by %r\n') %
458 458 (self.series[idx], why))
459 459 if not pushable:
460 460 if why:
461 461 write(_('skipping %s - guarded by %r\n') %
462 462 (self.series[idx], why))
463 463 else:
464 464 write(_('skipping %s - no matching guards\n') %
465 465 self.series[idx])
466 466
467 467 def save_dirty(self):
468 468 def write_list(items, path):
469 469 fp = self.opener(path, 'w')
470 470 for i in items:
471 471 fp.write("%s\n" % i)
472 472 fp.close()
473 473 if self.applied_dirty:
474 474 write_list(map(str, self.applied), self.status_path)
475 475 if self.series_dirty:
476 476 write_list(self.full_series, self.series_path)
477 477 if self.guards_dirty:
478 478 write_list(self.active_guards, self.guards_path)
479 479 if self.added:
480 480 qrepo = self.qrepo()
481 481 if qrepo:
482 482 qrepo[None].add(self.added)
483 483 self.added = []
484 484
485 485 def removeundo(self, repo):
486 486 undo = repo.sjoin('undo')
487 487 if not os.path.exists(undo):
488 488 return
489 489 try:
490 490 os.unlink(undo)
491 491 except OSError, inst:
492 492 self.ui.warn(_('error removing undo: %s\n') % str(inst))
493 493
494 494 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
495 495 fp=None, changes=None, opts={}):
496 496 stat = opts.get('stat')
497 497 m = cmdutil.match(repo, files, opts)
498 498 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
499 499 changes, stat, fp)
500 500
501 501 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
502 502 # first try just applying the patch
503 503 (err, n) = self.apply(repo, [patch], update_status=False,
504 504 strict=True, merge=rev)
505 505
506 506 if err == 0:
507 507 return (err, n)
508 508
509 509 if n is None:
510 510 raise util.Abort(_("apply failed for patch %s") % patch)
511 511
512 512 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
513 513
514 514 # apply failed, strip away that rev and merge.
515 515 hg.clean(repo, head)
516 516 self.strip(repo, [n], update=False, backup='strip')
517 517
518 518 ctx = repo[rev]
519 519 ret = hg.merge(repo, rev)
520 520 if ret:
521 521 raise util.Abort(_("update returned %d") % ret)
522 522 n = repo.commit(ctx.description(), ctx.user(), force=True)
523 523 if n is None:
524 524 raise util.Abort(_("repo commit failed"))
525 525 try:
526 526 ph = patchheader(mergeq.join(patch), self.plainmode)
527 527 except:
528 528 raise util.Abort(_("unable to read %s") % patch)
529 529
530 530 diffopts = self.patchopts(diffopts, patch)
531 531 patchf = self.opener(patch, "w")
532 532 comments = str(ph)
533 533 if comments:
534 534 patchf.write(comments)
535 535 self.printdiff(repo, diffopts, head, n, fp=patchf)
536 536 patchf.close()
537 537 self.removeundo(repo)
538 538 return (0, n)
539 539
540 540 def qparents(self, repo, rev=None):
541 541 if rev is None:
542 542 (p1, p2) = repo.dirstate.parents()
543 543 if p2 == nullid:
544 544 return p1
545 545 if not self.applied:
546 546 return None
547 547 return self.applied[-1].node
548 548 p1, p2 = repo.changelog.parents(rev)
549 549 if p2 != nullid and p2 in [x.node for x in self.applied]:
550 550 return p2
551 551 return p1
552 552
553 553 def mergepatch(self, repo, mergeq, series, diffopts):
554 554 if not self.applied:
555 555 # each of the patches merged in will have two parents. This
556 556 # can confuse the qrefresh, qdiff, and strip code because it
557 557 # needs to know which parent is actually in the patch queue.
558 558 # so, we insert a merge marker with only one parent. This way
559 559 # the first patch in the queue is never a merge patch
560 560 #
561 561 pname = ".hg.patches.merge.marker"
562 562 n = repo.commit('[mq]: merge marker', force=True)
563 563 self.removeundo(repo)
564 564 self.applied.append(statusentry(n, pname))
565 565 self.applied_dirty = 1
566 566
567 567 head = self.qparents(repo)
568 568
569 569 for patch in series:
570 570 patch = mergeq.lookup(patch, strict=True)
571 571 if not patch:
572 572 self.ui.warn(_("patch %s does not exist\n") % patch)
573 573 return (1, None)
574 574 pushable, reason = self.pushable(patch)
575 575 if not pushable:
576 576 self.explain_pushable(patch, all_patches=True)
577 577 continue
578 578 info = mergeq.isapplied(patch)
579 579 if not info:
580 580 self.ui.warn(_("patch %s is not applied\n") % patch)
581 581 return (1, None)
582 582 rev = info[1]
583 583 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
584 584 if head:
585 585 self.applied.append(statusentry(head, patch))
586 586 self.applied_dirty = 1
587 587 if err:
588 588 return (err, head)
589 589 self.save_dirty()
590 590 return (0, head)
591 591
592 592 def patch(self, repo, patchfile):
593 593 '''Apply patchfile to the working directory.
594 594 patchfile: name of patch file'''
595 595 files = {}
596 596 try:
597 597 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
598 598 files=files, eolmode=None)
599 599 except Exception, inst:
600 600 self.ui.note(str(inst) + '\n')
601 601 if not self.ui.verbose:
602 602 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
603 603 return (False, files, False)
604 604
605 605 return (True, files, fuzz)
606 606
607 607 def apply(self, repo, series, list=False, update_status=True,
608 608 strict=False, patchdir=None, merge=None, all_files=None):
609 609 wlock = lock = tr = None
610 610 try:
611 611 wlock = repo.wlock()
612 612 lock = repo.lock()
613 613 tr = repo.transaction("qpush")
614 614 try:
615 615 ret = self._apply(repo, series, list, update_status,
616 616 strict, patchdir, merge, all_files=all_files)
617 617 tr.close()
618 618 self.save_dirty()
619 619 return ret
620 620 except:
621 621 try:
622 622 tr.abort()
623 623 finally:
624 624 repo.invalidate()
625 625 repo.dirstate.invalidate()
626 626 raise
627 627 finally:
628 628 release(tr, lock, wlock)
629 629 self.removeundo(repo)
630 630
631 631 def _apply(self, repo, series, list=False, update_status=True,
632 632 strict=False, patchdir=None, merge=None, all_files=None):
633 633 '''returns (error, hash)
634 634 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
635 635 # TODO unify with commands.py
636 636 if not patchdir:
637 637 patchdir = self.path
638 638 err = 0
639 639 n = None
640 640 for patchname in series:
641 641 pushable, reason = self.pushable(patchname)
642 642 if not pushable:
643 643 self.explain_pushable(patchname, all_patches=True)
644 644 continue
645 645 self.ui.status(_("applying %s\n") % patchname)
646 646 pf = os.path.join(patchdir, patchname)
647 647
648 648 try:
649 649 ph = patchheader(self.join(patchname), self.plainmode)
650 650 except:
651 651 self.ui.warn(_("unable to read %s\n") % patchname)
652 652 err = 1
653 653 break
654 654
655 655 message = ph.message
656 656 if not message:
657 657 message = "imported patch %s\n" % patchname
658 658 else:
659 659 if list:
660 660 message.append("\nimported patch %s" % patchname)
661 661 message = '\n'.join(message)
662 662
663 663 if ph.haspatch:
664 664 (patcherr, files, fuzz) = self.patch(repo, pf)
665 665 if all_files is not None:
666 666 all_files.update(files)
667 667 patcherr = not patcherr
668 668 else:
669 669 self.ui.warn(_("patch %s is empty\n") % patchname)
670 670 patcherr, files, fuzz = 0, [], 0
671 671
672 672 if merge and files:
673 673 # Mark as removed/merged and update dirstate parent info
674 674 removed = []
675 675 merged = []
676 676 for f in files:
677 677 if os.path.exists(repo.wjoin(f)):
678 678 merged.append(f)
679 679 else:
680 680 removed.append(f)
681 681 for f in removed:
682 682 repo.dirstate.remove(f)
683 683 for f in merged:
684 684 repo.dirstate.merge(f)
685 685 p1, p2 = repo.dirstate.parents()
686 686 repo.dirstate.setparents(p1, merge)
687 687
688 688 files = patch.updatedir(self.ui, repo, files)
689 689 match = cmdutil.matchfiles(repo, files or [])
690 690 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
691 691
692 692 if n is None:
693 693 raise util.Abort(_("repo commit failed"))
694 694
695 695 if update_status:
696 696 self.applied.append(statusentry(n, patchname))
697 697
698 698 if patcherr:
699 699 self.ui.warn(_("patch failed, rejects left in working dir\n"))
700 700 err = 2
701 701 break
702 702
703 703 if fuzz and strict:
704 704 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
705 705 err = 3
706 706 break
707 707 return (err, n)
708 708
709 709 def _cleanup(self, patches, numrevs, keep=False):
710 710 if not keep:
711 711 r = self.qrepo()
712 712 if r:
713 713 r[None].remove(patches, True)
714 714 else:
715 715 for p in patches:
716 716 os.unlink(self.join(p))
717 717
718 718 if numrevs:
719 719 del self.applied[:numrevs]
720 720 self.applied_dirty = 1
721 721
722 722 for i in sorted([self.find_series(p) for p in patches], reverse=True):
723 723 del self.full_series[i]
724 724 self.parse_series()
725 725 self.series_dirty = 1
726 726
727 727 def _revpatches(self, repo, revs):
728 728 firstrev = repo[self.applied[0].node].rev()
729 729 patches = []
730 730 for i, rev in enumerate(revs):
731 731
732 732 if rev < firstrev:
733 733 raise util.Abort(_('revision %d is not managed') % rev)
734 734
735 735 ctx = repo[rev]
736 736 base = self.applied[i].node
737 737 if ctx.node() != base:
738 738 msg = _('cannot delete revision %d above applied patches')
739 739 raise util.Abort(msg % rev)
740 740
741 741 patch = self.applied[i].name
742 742 for fmt in ('[mq]: %s', 'imported patch %s'):
743 743 if ctx.description() == fmt % patch:
744 744 msg = _('patch %s finalized without changeset message\n')
745 745 repo.ui.status(msg % patch)
746 746 break
747 747
748 748 patches.append(patch)
749 749 return patches
750 750
751 751 def finish(self, repo, revs):
752 752 patches = self._revpatches(repo, sorted(revs))
753 753 self._cleanup(patches, len(patches))
754 754
755 755 def delete(self, repo, patches, opts):
756 756 if not patches and not opts.get('rev'):
757 757 raise util.Abort(_('qdelete requires at least one revision or '
758 758 'patch name'))
759 759
760 760 realpatches = []
761 761 for patch in patches:
762 762 patch = self.lookup(patch, strict=True)
763 763 info = self.isapplied(patch)
764 764 if info:
765 765 raise util.Abort(_("cannot delete applied patch %s") % patch)
766 766 if patch not in self.series:
767 767 raise util.Abort(_("patch %s not in series file") % patch)
768 768 realpatches.append(patch)
769 769
770 770 numrevs = 0
771 771 if opts.get('rev'):
772 772 if not self.applied:
773 773 raise util.Abort(_('no patches applied'))
774 774 revs = cmdutil.revrange(repo, opts['rev'])
775 775 if len(revs) > 1 and revs[0] > revs[1]:
776 776 revs.reverse()
777 777 revpatches = self._revpatches(repo, revs)
778 778 realpatches += revpatches
779 779 numrevs = len(revpatches)
780 780
781 781 self._cleanup(realpatches, numrevs, opts.get('keep'))
782 782
783 783 def check_toppatch(self, repo):
784 784 if self.applied:
785 785 top = self.applied[-1].node
786 786 patch = self.applied[-1].name
787 787 pp = repo.dirstate.parents()
788 788 if top not in pp:
789 789 raise util.Abort(_("working directory revision is not qtip"))
790 790 return top, patch
791 791 return None, None
792 792
793 793 def check_localchanges(self, repo, force=False, refresh=True):
794 794 m, a, r, d = repo.status()[:4]
795 795 if (m or a or r or d) and not force:
796 796 if refresh:
797 797 raise util.Abort(_("local changes found, refresh first"))
798 798 else:
799 799 raise util.Abort(_("local changes found"))
800 800 return m, a, r, d
801 801
802 802 _reserved = ('series', 'status', 'guards')
803 803 def check_reserved_name(self, name):
804 804 if (name in self._reserved or name.startswith('.hg')
805 805 or name.startswith('.mq') or '#' in name or ':' in name):
806 806 raise util.Abort(_('"%s" cannot be used as the name of a patch')
807 807 % name)
808 808
809 809 def new(self, repo, patchfn, *pats, **opts):
810 810 """options:
811 811 msg: a string or a no-argument function returning a string
812 812 """
813 813 msg = opts.get('msg')
814 814 user = opts.get('user')
815 815 date = opts.get('date')
816 816 if date:
817 817 date = util.parsedate(date)
818 818 diffopts = self.diffopts({'git': opts.get('git')})
819 819 self.check_reserved_name(patchfn)
820 820 if os.path.exists(self.join(patchfn)):
821 821 raise util.Abort(_('patch "%s" already exists') % patchfn)
822 822 if opts.get('include') or opts.get('exclude') or pats:
823 823 match = cmdutil.match(repo, pats, opts)
824 824 # detect missing files in pats
825 825 def badfn(f, msg):
826 826 raise util.Abort('%s: %s' % (f, msg))
827 827 match.bad = badfn
828 828 m, a, r, d = repo.status(match=match)[:4]
829 829 else:
830 830 m, a, r, d = self.check_localchanges(repo, force=True)
831 831 match = cmdutil.matchfiles(repo, m + a + r)
832 832 if len(repo[None].parents()) > 1:
833 833 raise util.Abort(_('cannot manage merge changesets'))
834 834 commitfiles = m + a + r
835 835 self.check_toppatch(repo)
836 836 insert = self.full_series_end()
837 837 wlock = repo.wlock()
838 838 try:
839 839 # if patch file write fails, abort early
840 840 p = self.opener(patchfn, "w")
841 841 try:
842 842 if self.plainmode:
843 843 if user:
844 844 p.write("From: " + user + "\n")
845 845 if not date:
846 846 p.write("\n")
847 847 if date:
848 848 p.write("Date: %d %d\n\n" % date)
849 849 else:
850 850 p.write("# HG changeset patch\n")
851 851 p.write("# Parent "
852 852 + hex(repo[None].parents()[0].node()) + "\n")
853 853 if user:
854 854 p.write("# User " + user + "\n")
855 855 if date:
856 856 p.write("# Date %s %s\n\n" % date)
857 857 if hasattr(msg, '__call__'):
858 858 msg = msg()
859 859 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
860 860 n = repo.commit(commitmsg, user, date, match=match, force=True)
861 861 if n is None:
862 862 raise util.Abort(_("repo commit failed"))
863 863 try:
864 864 self.full_series[insert:insert] = [patchfn]
865 865 self.applied.append(statusentry(n, patchfn))
866 866 self.parse_series()
867 867 self.series_dirty = 1
868 868 self.applied_dirty = 1
869 869 if msg:
870 870 msg = msg + "\n\n"
871 871 p.write(msg)
872 872 if commitfiles:
873 873 parent = self.qparents(repo, n)
874 874 chunks = patch.diff(repo, node1=parent, node2=n,
875 875 match=match, opts=diffopts)
876 876 for chunk in chunks:
877 877 p.write(chunk)
878 878 p.close()
879 879 wlock.release()
880 880 wlock = None
881 881 r = self.qrepo()
882 882 if r:
883 883 r[None].add([patchfn])
884 884 except:
885 885 repo.rollback()
886 886 raise
887 887 except Exception:
888 888 patchpath = self.join(patchfn)
889 889 try:
890 890 os.unlink(patchpath)
891 891 except:
892 892 self.ui.warn(_('error unlinking %s\n') % patchpath)
893 893 raise
894 894 self.removeundo(repo)
895 895 finally:
896 896 release(wlock)
897 897
898 898 def strip(self, repo, revs, update=True, backup="all", force=None):
899 899 wlock = lock = None
900 900 try:
901 901 wlock = repo.wlock()
902 902 lock = repo.lock()
903 903
904 904 if update:
905 905 self.check_localchanges(repo, force=force, refresh=False)
906 906 urev = self.qparents(repo, revs[0])
907 907 hg.clean(repo, urev)
908 908 repo.dirstate.write()
909 909
910 910 self.removeundo(repo)
911 911 for rev in revs:
912 912 repair.strip(self.ui, repo, rev, backup)
913 913 # strip may have unbundled a set of backed up revisions after
914 914 # the actual strip
915 915 self.removeundo(repo)
916 916 finally:
917 917 release(lock, wlock)
918 918
919 919 def isapplied(self, patch):
920 920 """returns (index, rev, patch)"""
921 921 for i, a in enumerate(self.applied):
922 922 if a.name == patch:
923 923 return (i, a.node, a.name)
924 924 return None
925 925
926 926 # if the exact patch name does not exist, we try a few
927 927 # variations. If strict is passed, we try only #1
928 928 #
929 929 # 1) a number to indicate an offset in the series file
930 930 # 2) a unique substring of the patch name was given
931 931 # 3) patchname[-+]num to indicate an offset in the series file
932 932 def lookup(self, patch, strict=False):
933 933 patch = patch and str(patch)
934 934
935 935 def partial_name(s):
936 936 if s in self.series:
937 937 return s
938 938 matches = [x for x in self.series if s in x]
939 939 if len(matches) > 1:
940 940 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
941 941 for m in matches:
942 942 self.ui.warn(' %s\n' % m)
943 943 return None
944 944 if matches:
945 945 return matches[0]
946 946 if self.series and self.applied:
947 947 if s == 'qtip':
948 948 return self.series[self.series_end(True)-1]
949 949 if s == 'qbase':
950 950 return self.series[0]
951 951 return None
952 952
953 953 if patch is None:
954 954 return None
955 955 if patch in self.series:
956 956 return patch
957 957
958 958 if not os.path.isfile(self.join(patch)):
959 959 try:
960 960 sno = int(patch)
961 961 except (ValueError, OverflowError):
962 962 pass
963 963 else:
964 964 if -len(self.series) <= sno < len(self.series):
965 965 return self.series[sno]
966 966
967 967 if not strict:
968 968 res = partial_name(patch)
969 969 if res:
970 970 return res
971 971 minus = patch.rfind('-')
972 972 if minus >= 0:
973 973 res = partial_name(patch[:minus])
974 974 if res:
975 975 i = self.series.index(res)
976 976 try:
977 977 off = int(patch[minus + 1:] or 1)
978 978 except (ValueError, OverflowError):
979 979 pass
980 980 else:
981 981 if i - off >= 0:
982 982 return self.series[i - off]
983 983 plus = patch.rfind('+')
984 984 if plus >= 0:
985 985 res = partial_name(patch[:plus])
986 986 if res:
987 987 i = self.series.index(res)
988 988 try:
989 989 off = int(patch[plus + 1:] or 1)
990 990 except (ValueError, OverflowError):
991 991 pass
992 992 else:
993 993 if i + off < len(self.series):
994 994 return self.series[i + off]
995 995 raise util.Abort(_("patch %s not in series") % patch)
996 996
997 997 def push(self, repo, patch=None, force=False, list=False,
998 998 mergeq=None, all=False, move=False):
999 999 diffopts = self.diffopts()
1000 1000 wlock = repo.wlock()
1001 1001 try:
1002 1002 heads = []
1003 1003 for b, ls in repo.branchmap().iteritems():
1004 1004 heads += ls
1005 1005 if not heads:
1006 1006 heads = [nullid]
1007 1007 if repo.dirstate.parents()[0] not in heads:
1008 1008 self.ui.status(_("(working directory not at a head)\n"))
1009 1009
1010 1010 if not self.series:
1011 1011 self.ui.warn(_('no patches in series\n'))
1012 1012 return 0
1013 1013
1014 1014 patch = self.lookup(patch)
1015 1015 # Suppose our series file is: A B C and the current 'top'
1016 1016 # patch is B. qpush C should be performed (moving forward)
1017 1017 # qpush B is a NOP (no change) qpush A is an error (can't
1018 1018 # go backwards with qpush)
1019 1019 if patch:
1020 1020 info = self.isapplied(patch)
1021 1021 if info:
1022 1022 if info[0] < len(self.applied) - 1:
1023 1023 raise util.Abort(
1024 1024 _("cannot push to a previous patch: %s") % patch)
1025 1025 self.ui.warn(
1026 1026 _('qpush: %s is already at the top\n') % patch)
1027 1027 return 0
1028 1028 pushable, reason = self.pushable(patch)
1029 1029 if not pushable:
1030 1030 if reason:
1031 1031 reason = _('guarded by %r') % reason
1032 1032 else:
1033 1033 reason = _('no matching guards')
1034 1034 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1035 1035 return 1
1036 1036 elif all:
1037 1037 patch = self.series[-1]
1038 1038 if self.isapplied(patch):
1039 1039 self.ui.warn(_('all patches are currently applied\n'))
1040 1040 return 0
1041 1041
1042 1042 # Following the above example, starting at 'top' of B:
1043 1043 # qpush should be performed (pushes C), but a subsequent
1044 1044 # qpush without an argument is an error (nothing to
1045 1045 # apply). This allows a loop of "...while hg qpush..." to
1046 1046 # work as it detects an error when done
1047 1047 start = self.series_end()
1048 1048 if start == len(self.series):
1049 1049 self.ui.warn(_('patch series already fully applied\n'))
1050 1050 return 1
1051 1051 if not force:
1052 1052 self.check_localchanges(repo)
1053 1053
1054 1054 if move:
1055 1055 if not patch:
1056 1056 raise util.Abort(_("please specify the patch to move"))
1057 1057 for i, rpn in enumerate(self.full_series[start:]):
1058 1058 # strip markers for patch guards
1059 1059 if self.guard_re.split(rpn, 1)[0] == patch:
1060 1060 break
1061 1061 index = start + i
1062 1062 assert index < len(self.full_series)
1063 1063 fullpatch = self.full_series[index]
1064 1064 del self.full_series[index]
1065 1065 self.full_series.insert(start, fullpatch)
1066 1066 self.parse_series()
1067 1067 self.series_dirty = 1
1068 1068
1069 1069 self.applied_dirty = 1
1070 1070 if start > 0:
1071 1071 self.check_toppatch(repo)
1072 1072 if not patch:
1073 1073 patch = self.series[start]
1074 1074 end = start + 1
1075 1075 else:
1076 1076 end = self.series.index(patch, start) + 1
1077 1077
1078 1078 s = self.series[start:end]
1079 1079 all_files = set()
1080 1080 try:
1081 1081 if mergeq:
1082 1082 ret = self.mergepatch(repo, mergeq, s, diffopts)
1083 1083 else:
1084 1084 ret = self.apply(repo, s, list, all_files=all_files)
1085 1085 except:
1086 1086 self.ui.warn(_('cleaning up working directory...'))
1087 1087 node = repo.dirstate.parents()[0]
1088 1088 hg.revert(repo, node, None)
1089 1089 # only remove unknown files that we know we touched or
1090 1090 # created while patching
1091 1091 for f in all_files:
1092 1092 if f not in repo.dirstate:
1093 1093 try:
1094 1094 util.unlink(repo.wjoin(f))
1095 1095 except OSError, inst:
1096 1096 if inst.errno != errno.ENOENT:
1097 1097 raise
1098 1098 self.ui.warn(_('done\n'))
1099 1099 raise
1100 1100
1101 1101 if not self.applied:
1102 1102 return ret[0]
1103 1103 top = self.applied[-1].name
1104 1104 if ret[0] and ret[0] > 1:
1105 1105 msg = _("errors during apply, please fix and refresh %s\n")
1106 1106 self.ui.write(msg % top)
1107 1107 else:
1108 1108 self.ui.write(_("now at: %s\n") % top)
1109 1109 return ret[0]
1110 1110
1111 1111 finally:
1112 1112 wlock.release()
1113 1113
1114 1114 def pop(self, repo, patch=None, force=False, update=True, all=False):
1115 1115 wlock = repo.wlock()
1116 1116 try:
1117 1117 if patch:
1118 1118 # index, rev, patch
1119 1119 info = self.isapplied(patch)
1120 1120 if not info:
1121 1121 patch = self.lookup(patch)
1122 1122 info = self.isapplied(patch)
1123 1123 if not info:
1124 1124 raise util.Abort(_("patch %s is not applied") % patch)
1125 1125
1126 1126 if not self.applied:
1127 1127 # Allow qpop -a to work repeatedly,
1128 1128 # but not qpop without an argument
1129 1129 self.ui.warn(_("no patches applied\n"))
1130 1130 return not all
1131 1131
1132 1132 if all:
1133 1133 start = 0
1134 1134 elif patch:
1135 1135 start = info[0] + 1
1136 1136 else:
1137 1137 start = len(self.applied) - 1
1138 1138
1139 1139 if start >= len(self.applied):
1140 1140 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1141 1141 return
1142 1142
1143 1143 if not update:
1144 1144 parents = repo.dirstate.parents()
1145 1145 rr = [x.node for x in self.applied]
1146 1146 for p in parents:
1147 1147 if p in rr:
1148 1148 self.ui.warn(_("qpop: forcing dirstate update\n"))
1149 1149 update = True
1150 1150 else:
1151 1151 parents = [p.node() for p in repo[None].parents()]
1152 1152 needupdate = False
1153 1153 for entry in self.applied[start:]:
1154 1154 if entry.node in parents:
1155 1155 needupdate = True
1156 1156 break
1157 1157 update = needupdate
1158 1158
1159 1159 if not force and update:
1160 1160 self.check_localchanges(repo)
1161 1161
1162 1162 self.applied_dirty = 1
1163 1163 end = len(self.applied)
1164 1164 rev = self.applied[start].node
1165 1165 if update:
1166 1166 top = self.check_toppatch(repo)[0]
1167 1167
1168 1168 try:
1169 1169 heads = repo.changelog.heads(rev)
1170 1170 except error.LookupError:
1171 1171 node = short(rev)
1172 1172 raise util.Abort(_('trying to pop unknown node %s') % node)
1173 1173
1174 1174 if heads != [self.applied[-1].node]:
1175 1175 raise util.Abort(_("popping would remove a revision not "
1176 1176 "managed by this patch queue"))
1177 1177
1178 1178 # we know there are no local changes, so we can make a simplified
1179 1179 # form of hg.update.
1180 1180 if update:
1181 1181 qp = self.qparents(repo, rev)
1182 1182 ctx = repo[qp]
1183 1183 m, a, r, d = repo.status(qp, top)[:4]
1184 1184 if d:
1185 1185 raise util.Abort(_("deletions found between repo revs"))
1186 1186 for f in a:
1187 1187 try:
1188 1188 util.unlink(repo.wjoin(f))
1189 1189 except OSError, e:
1190 1190 if e.errno != errno.ENOENT:
1191 1191 raise
1192 1192 repo.dirstate.forget(f)
1193 1193 for f in m + r:
1194 1194 fctx = ctx[f]
1195 1195 repo.wwrite(f, fctx.data(), fctx.flags())
1196 1196 repo.dirstate.normal(f)
1197 1197 repo.dirstate.setparents(qp, nullid)
1198 1198 for patch in reversed(self.applied[start:end]):
1199 1199 self.ui.status(_("popping %s\n") % patch.name)
1200 1200 del self.applied[start:end]
1201 1201 self.strip(repo, [rev], update=False, backup='strip')
1202 1202 if self.applied:
1203 1203 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1204 1204 else:
1205 1205 self.ui.write(_("patch queue now empty\n"))
1206 1206 finally:
1207 1207 wlock.release()
1208 1208
1209 1209 def diff(self, repo, pats, opts):
1210 1210 top, patch = self.check_toppatch(repo)
1211 1211 if not top:
1212 1212 self.ui.write(_("no patches applied\n"))
1213 1213 return
1214 1214 qp = self.qparents(repo, top)
1215 1215 if opts.get('reverse'):
1216 1216 node1, node2 = None, qp
1217 1217 else:
1218 1218 node1, node2 = qp, None
1219 1219 diffopts = self.diffopts(opts, patch)
1220 1220 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1221 1221
1222 1222 def refresh(self, repo, pats=None, **opts):
1223 1223 if not self.applied:
1224 1224 self.ui.write(_("no patches applied\n"))
1225 1225 return 1
1226 1226 msg = opts.get('msg', '').rstrip()
1227 1227 newuser = opts.get('user')
1228 1228 newdate = opts.get('date')
1229 1229 if newdate:
1230 1230 newdate = '%d %d' % util.parsedate(newdate)
1231 1231 wlock = repo.wlock()
1232 1232
1233 1233 try:
1234 1234 self.check_toppatch(repo)
1235 1235 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1236 1236 if repo.changelog.heads(top) != [top]:
1237 1237 raise util.Abort(_("cannot refresh a revision with children"))
1238 1238
1239 1239 cparents = repo.changelog.parents(top)
1240 1240 patchparent = self.qparents(repo, top)
1241 1241 ph = patchheader(self.join(patchfn), self.plainmode)
1242 1242 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1243 1243 if msg:
1244 1244 ph.setmessage(msg)
1245 1245 if newuser:
1246 1246 ph.setuser(newuser)
1247 1247 if newdate:
1248 1248 ph.setdate(newdate)
1249 1249 ph.setparent(hex(patchparent))
1250 1250
1251 1251 # only commit new patch when write is complete
1252 1252 patchf = self.opener(patchfn, 'w', atomictemp=True)
1253 1253
1254 1254 comments = str(ph)
1255 1255 if comments:
1256 1256 patchf.write(comments)
1257 1257
1258 1258 # update the dirstate in place, strip off the qtip commit
1259 1259 # and then commit.
1260 1260 #
1261 1261 # this should really read:
1262 1262 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1263 1263 # but we do it backwards to take advantage of manifest/chlog
1264 1264 # caching against the next repo.status call
1265 1265 mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
1266 1266 changes = repo.changelog.read(top)
1267 1267 man = repo.manifest.read(changes[0])
1268 1268 aaa = aa[:]
1269 1269 matchfn = cmdutil.match(repo, pats, opts)
1270 1270 # in short mode, we only diff the files included in the
1271 1271 # patch already plus specified files
1272 1272 if opts.get('short'):
1273 1273 # if amending a patch, we start with existing
1274 1274 # files plus specified files - unfiltered
1275 1275 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1276 1276 # filter with inc/exl options
1277 1277 matchfn = cmdutil.match(repo, opts=opts)
1278 1278 else:
1279 1279 match = cmdutil.matchall(repo)
1280 1280 m, a, r, d = repo.status(match=match)[:4]
1281 1281
1282 1282 # we might end up with files that were added between
1283 1283 # qtip and the dirstate parent, but then changed in the
1284 1284 # local dirstate. in this case, we want them to only
1285 1285 # show up in the added section
1286 1286 for x in m:
1287 1287 if x not in aa:
1288 1288 mm.append(x)
1289 1289 # we might end up with files added by the local dirstate that
1290 1290 # were deleted by the patch. In this case, they should only
1291 1291 # show up in the changed section.
1292 1292 for x in a:
1293 1293 if x in dd:
1294 1294 del dd[dd.index(x)]
1295 1295 mm.append(x)
1296 1296 else:
1297 1297 aa.append(x)
1298 1298 # make sure any files deleted in the local dirstate
1299 1299 # are not in the add or change column of the patch
1300 1300 forget = []
1301 1301 for x in d + r:
1302 1302 if x in aa:
1303 1303 del aa[aa.index(x)]
1304 1304 forget.append(x)
1305 1305 continue
1306 1306 elif x in mm:
1307 1307 del mm[mm.index(x)]
1308 1308 dd.append(x)
1309 1309
1310 1310 m = list(set(mm))
1311 1311 r = list(set(dd))
1312 1312 a = list(set(aa))
1313 1313 c = [filter(matchfn, l) for l in (m, a, r)]
1314 1314 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1315 1315 chunks = patch.diff(repo, patchparent, match=match,
1316 1316 changes=c, opts=diffopts)
1317 1317 for chunk in chunks:
1318 1318 patchf.write(chunk)
1319 1319
1320 1320 try:
1321 1321 if diffopts.git or diffopts.upgrade:
1322 1322 copies = {}
1323 1323 for dst in a:
1324 1324 src = repo.dirstate.copied(dst)
1325 1325 # during qfold, the source file for copies may
1326 1326 # be removed. Treat this as a simple add.
1327 1327 if src is not None and src in repo.dirstate:
1328 1328 copies.setdefault(src, []).append(dst)
1329 1329 repo.dirstate.add(dst)
1330 1330 # remember the copies between patchparent and qtip
1331 1331 for dst in aaa:
1332 1332 f = repo.file(dst)
1333 1333 src = f.renamed(man[dst])
1334 1334 if src:
1335 1335 copies.setdefault(src[0], []).extend(
1336 1336 copies.get(dst, []))
1337 1337 if dst in a:
1338 1338 copies[src[0]].append(dst)
1339 1339 # we can't copy a file created by the patch itself
1340 1340 if dst in copies:
1341 1341 del copies[dst]
1342 1342 for src, dsts in copies.iteritems():
1343 1343 for dst in dsts:
1344 1344 repo.dirstate.copy(src, dst)
1345 1345 else:
1346 1346 for dst in a:
1347 1347 repo.dirstate.add(dst)
1348 1348 # Drop useless copy information
1349 1349 for f in list(repo.dirstate.copies()):
1350 1350 repo.dirstate.copy(None, f)
1351 1351 for f in r:
1352 1352 repo.dirstate.remove(f)
1353 1353 # if the patch excludes a modified file, mark that
1354 1354 # file with mtime=0 so status can see it.
1355 1355 mm = []
1356 1356 for i in xrange(len(m)-1, -1, -1):
1357 1357 if not matchfn(m[i]):
1358 1358 mm.append(m[i])
1359 1359 del m[i]
1360 1360 for f in m:
1361 1361 repo.dirstate.normal(f)
1362 1362 for f in mm:
1363 1363 repo.dirstate.normallookup(f)
1364 1364 for f in forget:
1365 1365 repo.dirstate.forget(f)
1366 1366
1367 1367 if not msg:
1368 1368 if not ph.message:
1369 1369 message = "[mq]: %s\n" % patchfn
1370 1370 else:
1371 1371 message = "\n".join(ph.message)
1372 1372 else:
1373 1373 message = msg
1374 1374
1375 1375 user = ph.user or changes[1]
1376 1376
1377 1377 # assumes strip can roll itself back if interrupted
1378 1378 repo.dirstate.setparents(*cparents)
1379 1379 self.applied.pop()
1380 1380 self.applied_dirty = 1
1381 1381 self.strip(repo, [top], update=False,
1382 1382 backup='strip')
1383 1383 except:
1384 1384 repo.dirstate.invalidate()
1385 1385 raise
1386 1386
1387 1387 try:
1388 1388 # might be nice to attempt to roll back strip after this
1389 1389 patchf.rename()
1390 1390 n = repo.commit(message, user, ph.date, match=match,
1391 1391 force=True)
1392 1392 self.applied.append(statusentry(n, patchfn))
1393 1393 except:
1394 1394 ctx = repo[cparents[0]]
1395 1395 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1396 1396 self.save_dirty()
1397 1397 self.ui.warn(_('refresh interrupted while patch was popped! '
1398 1398 '(revert --all, qpush to recover)\n'))
1399 1399 raise
1400 1400 finally:
1401 1401 wlock.release()
1402 1402 self.removeundo(repo)
1403 1403
1404 1404 def init(self, repo, create=False):
1405 1405 if not create and os.path.isdir(self.path):
1406 1406 raise util.Abort(_("patch queue directory already exists"))
1407 1407 try:
1408 1408 os.mkdir(self.path)
1409 1409 except OSError, inst:
1410 1410 if inst.errno != errno.EEXIST or not create:
1411 1411 raise
1412 1412 if create:
1413 1413 return self.qrepo(create=True)
1414 1414
1415 1415 def unapplied(self, repo, patch=None):
1416 1416 if patch and patch not in self.series:
1417 1417 raise util.Abort(_("patch %s is not in series file") % patch)
1418 1418 if not patch:
1419 1419 start = self.series_end()
1420 1420 else:
1421 1421 start = self.series.index(patch) + 1
1422 1422 unapplied = []
1423 1423 for i in xrange(start, len(self.series)):
1424 1424 pushable, reason = self.pushable(i)
1425 1425 if pushable:
1426 1426 unapplied.append((i, self.series[i]))
1427 1427 self.explain_pushable(i)
1428 1428 return unapplied
1429 1429
1430 1430 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1431 1431 summary=False):
1432 1432 def displayname(pfx, patchname, state):
1433 1433 if pfx:
1434 1434 self.ui.write(pfx)
1435 1435 if summary:
1436 1436 ph = patchheader(self.join(patchname), self.plainmode)
1437 1437 msg = ph.message and ph.message[0] or ''
1438 1438 if self.ui.formatted():
1439 1439 width = util.termwidth() - len(pfx) - len(patchname) - 2
1440 1440 if width > 0:
1441 1441 msg = util.ellipsis(msg, width)
1442 1442 else:
1443 1443 msg = ''
1444 1444 self.ui.write(patchname, label='qseries.' + state)
1445 1445 self.ui.write(': ')
1446 1446 self.ui.write(msg, label='qseries.message.' + state)
1447 1447 else:
1448 1448 self.ui.write(patchname, label='qseries.' + state)
1449 1449 self.ui.write('\n')
1450 1450
1451 1451 applied = set([p.name for p in self.applied])
1452 1452 if length is None:
1453 1453 length = len(self.series) - start
1454 1454 if not missing:
1455 1455 if self.ui.verbose:
1456 1456 idxwidth = len(str(start + length - 1))
1457 1457 for i in xrange(start, start + length):
1458 1458 patch = self.series[i]
1459 1459 if patch in applied:
1460 1460 char, state = 'A', 'applied'
1461 1461 elif self.pushable(i)[0]:
1462 1462 char, state = 'U', 'unapplied'
1463 1463 else:
1464 1464 char, state = 'G', 'guarded'
1465 1465 pfx = ''
1466 1466 if self.ui.verbose:
1467 1467 pfx = '%*d %s ' % (idxwidth, i, char)
1468 1468 elif status and status != char:
1469 1469 continue
1470 1470 displayname(pfx, patch, state)
1471 1471 else:
1472 1472 msng_list = []
1473 1473 for root, dirs, files in os.walk(self.path):
1474 1474 d = root[len(self.path) + 1:]
1475 1475 for f in files:
1476 1476 fl = os.path.join(d, f)
1477 1477 if (fl not in self.series and
1478 1478 fl not in (self.status_path, self.series_path,
1479 1479 self.guards_path)
1480 1480 and not fl.startswith('.')):
1481 1481 msng_list.append(fl)
1482 1482 for x in sorted(msng_list):
1483 1483 pfx = self.ui.verbose and ('D ') or ''
1484 1484 displayname(pfx, x, 'missing')
1485 1485
1486 1486 def issaveline(self, l):
1487 1487 if l.name == '.hg.patches.save.line':
1488 1488 return True
1489 1489
1490 1490 def qrepo(self, create=False):
1491 1491 ui = self.ui.copy()
1492 1492 ui.setconfig('paths', 'default', '', overlay=False)
1493 1493 ui.setconfig('paths', 'default-push', '', overlay=False)
1494 1494 if create or os.path.isdir(self.join(".hg")):
1495 1495 return hg.repository(ui, path=self.path, create=create)
1496 1496
1497 1497 def restore(self, repo, rev, delete=None, qupdate=None):
1498 1498 desc = repo[rev].description().strip()
1499 1499 lines = desc.splitlines()
1500 1500 i = 0
1501 1501 datastart = None
1502 1502 series = []
1503 1503 applied = []
1504 1504 qpp = None
1505 1505 for i, line in enumerate(lines):
1506 1506 if line == 'Patch Data:':
1507 1507 datastart = i + 1
1508 1508 elif line.startswith('Dirstate:'):
1509 1509 l = line.rstrip()
1510 1510 l = l[10:].split(' ')
1511 1511 qpp = [bin(x) for x in l]
1512 1512 elif datastart != None:
1513 1513 l = line.rstrip()
1514 1514 n, name = l.split(':', 1)
1515 1515 if n:
1516 1516 applied.append(statusentry(bin(n), name))
1517 1517 else:
1518 1518 series.append(l)
1519 1519 if datastart is None:
1520 1520 self.ui.warn(_("No saved patch data found\n"))
1521 1521 return 1
1522 1522 self.ui.warn(_("restoring status: %s\n") % lines[0])
1523 1523 self.full_series = series
1524 1524 self.applied = applied
1525 1525 self.parse_series()
1526 1526 self.series_dirty = 1
1527 1527 self.applied_dirty = 1
1528 1528 heads = repo.changelog.heads()
1529 1529 if delete:
1530 1530 if rev not in heads:
1531 1531 self.ui.warn(_("save entry has children, leaving it alone\n"))
1532 1532 else:
1533 1533 self.ui.warn(_("removing save entry %s\n") % short(rev))
1534 1534 pp = repo.dirstate.parents()
1535 1535 if rev in pp:
1536 1536 update = True
1537 1537 else:
1538 1538 update = False
1539 1539 self.strip(repo, [rev], update=update, backup='strip')
1540 1540 if qpp:
1541 1541 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1542 1542 (short(qpp[0]), short(qpp[1])))
1543 1543 if qupdate:
1544 1544 self.ui.status(_("queue directory updating\n"))
1545 1545 r = self.qrepo()
1546 1546 if not r:
1547 1547 self.ui.warn(_("Unable to load queue repository\n"))
1548 1548 return 1
1549 1549 hg.clean(r, qpp[0])
1550 1550
1551 1551 def save(self, repo, msg=None):
1552 1552 if not self.applied:
1553 1553 self.ui.warn(_("save: no patches applied, exiting\n"))
1554 1554 return 1
1555 1555 if self.issaveline(self.applied[-1]):
1556 1556 self.ui.warn(_("status is already saved\n"))
1557 1557 return 1
1558 1558
1559 1559 if not msg:
1560 1560 msg = _("hg patches saved state")
1561 1561 else:
1562 1562 msg = "hg patches: " + msg.rstrip('\r\n')
1563 1563 r = self.qrepo()
1564 1564 if r:
1565 1565 pp = r.dirstate.parents()
1566 1566 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1567 1567 msg += "\n\nPatch Data:\n"
1568 1568 msg += ''.join('%s\n' % x for x in self.applied)
1569 1569 msg += ''.join(':%s\n' % x for x in self.full_series)
1570 1570 n = repo.commit(msg, force=True)
1571 1571 if not n:
1572 1572 self.ui.warn(_("repo commit failed\n"))
1573 1573 return 1
1574 1574 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1575 1575 self.applied_dirty = 1
1576 1576 self.removeundo(repo)
1577 1577
1578 1578 def full_series_end(self):
1579 1579 if self.applied:
1580 1580 p = self.applied[-1].name
1581 1581 end = self.find_series(p)
1582 1582 if end is None:
1583 1583 return len(self.full_series)
1584 1584 return end + 1
1585 1585 return 0
1586 1586
1587 1587 def series_end(self, all_patches=False):
1588 1588 """If all_patches is False, return the index of the next pushable patch
1589 1589 in the series, or the series length. If all_patches is True, return the
1590 1590 index of the first patch past the last applied one.
1591 1591 """
1592 1592 end = 0
1593 1593 def next(start):
1594 1594 if all_patches or start >= len(self.series):
1595 1595 return start
1596 1596 for i in xrange(start, len(self.series)):
1597 1597 p, reason = self.pushable(i)
1598 1598 if p:
1599 1599 break
1600 1600 self.explain_pushable(i)
1601 1601 return i
1602 1602 if self.applied:
1603 1603 p = self.applied[-1].name
1604 1604 try:
1605 1605 end = self.series.index(p)
1606 1606 except ValueError:
1607 1607 return 0
1608 1608 return next(end + 1)
1609 1609 return next(end)
1610 1610
1611 1611 def appliedname(self, index):
1612 1612 pname = self.applied[index].name
1613 1613 if not self.ui.verbose:
1614 1614 p = pname
1615 1615 else:
1616 1616 p = str(self.series.index(pname)) + " " + pname
1617 1617 return p
1618 1618
1619 1619 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1620 1620 force=None, git=False):
1621 1621 def checkseries(patchname):
1622 1622 if patchname in self.series:
1623 1623 raise util.Abort(_('patch %s is already in the series file')
1624 1624 % patchname)
1625 1625 def checkfile(patchname):
1626 1626 if not force and os.path.exists(self.join(patchname)):
1627 1627 raise util.Abort(_('patch "%s" already exists')
1628 1628 % patchname)
1629 1629
1630 1630 if rev:
1631 1631 if files:
1632 1632 raise util.Abort(_('option "-r" not valid when importing '
1633 1633 'files'))
1634 1634 rev = cmdutil.revrange(repo, rev)
1635 1635 rev.sort(reverse=True)
1636 1636 if (len(files) > 1 or len(rev) > 1) and patchname:
1637 1637 raise util.Abort(_('option "-n" not valid when importing multiple '
1638 1638 'patches'))
1639 1639 if rev:
1640 1640 # If mq patches are applied, we can only import revisions
1641 1641 # that form a linear path to qbase.
1642 1642 # Otherwise, they should form a linear path to a head.
1643 1643 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1644 1644 if len(heads) > 1:
1645 1645 raise util.Abort(_('revision %d is the root of more than one '
1646 1646 'branch') % rev[-1])
1647 1647 if self.applied:
1648 1648 base = repo.changelog.node(rev[0])
1649 1649 if base in [n.node for n in self.applied]:
1650 1650 raise util.Abort(_('revision %d is already managed')
1651 1651 % rev[0])
1652 1652 if heads != [self.applied[-1].node]:
1653 1653 raise util.Abort(_('revision %d is not the parent of '
1654 1654 'the queue') % rev[0])
1655 1655 base = repo.changelog.rev(self.applied[0].node)
1656 1656 lastparent = repo.changelog.parentrevs(base)[0]
1657 1657 else:
1658 1658 if heads != [repo.changelog.node(rev[0])]:
1659 1659 raise util.Abort(_('revision %d has unmanaged children')
1660 1660 % rev[0])
1661 1661 lastparent = None
1662 1662
1663 1663 diffopts = self.diffopts({'git': git})
1664 1664 for r in rev:
1665 1665 p1, p2 = repo.changelog.parentrevs(r)
1666 1666 n = repo.changelog.node(r)
1667 1667 if p2 != nullrev:
1668 1668 raise util.Abort(_('cannot import merge revision %d') % r)
1669 1669 if lastparent and lastparent != r:
1670 1670 raise util.Abort(_('revision %d is not the parent of %d')
1671 1671 % (r, lastparent))
1672 1672 lastparent = p1
1673 1673
1674 1674 if not patchname:
1675 1675 patchname = normname('%d.diff' % r)
1676 1676 self.check_reserved_name(patchname)
1677 1677 checkseries(patchname)
1678 1678 checkfile(patchname)
1679 1679 self.full_series.insert(0, patchname)
1680 1680
1681 1681 patchf = self.opener(patchname, "w")
1682 1682 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1683 1683 patchf.close()
1684 1684
1685 1685 se = statusentry(n, patchname)
1686 1686 self.applied.insert(0, se)
1687 1687
1688 1688 self.added.append(patchname)
1689 1689 patchname = None
1690 1690 self.parse_series()
1691 1691 self.applied_dirty = 1
1692 1692 self.series_dirty = True
1693 1693
1694 1694 for i, filename in enumerate(files):
1695 1695 if existing:
1696 1696 if filename == '-':
1697 1697 raise util.Abort(_('-e is incompatible with import from -'))
1698 1698 filename = normname(filename)
1699 1699 self.check_reserved_name(filename)
1700 1700 originpath = self.join(filename)
1701 1701 if not os.path.isfile(originpath):
1702 1702 raise util.Abort(_("patch %s does not exist") % filename)
1703 1703
1704 1704 if patchname:
1705 1705 self.check_reserved_name(patchname)
1706 1706 checkfile(patchname)
1707 1707
1708 1708 self.ui.write(_('renaming %s to %s\n')
1709 1709 % (filename, patchname))
1710 1710 util.rename(originpath, self.join(patchname))
1711 1711 else:
1712 1712 patchname = filename
1713 1713
1714 1714 else:
1715 1715 try:
1716 1716 if filename == '-':
1717 1717 if not patchname:
1718 1718 raise util.Abort(
1719 1719 _('need --name to import a patch from -'))
1720 1720 text = sys.stdin.read()
1721 1721 else:
1722 1722 text = url.open(self.ui, filename).read()
1723 1723 except (OSError, IOError):
1724 1724 raise util.Abort(_("unable to read file %s") % filename)
1725 1725 if not patchname:
1726 1726 patchname = normname(os.path.basename(filename))
1727 1727 self.check_reserved_name(patchname)
1728 1728 checkfile(patchname)
1729 1729 patchf = self.opener(patchname, "w")
1730 1730 patchf.write(text)
1731 1731 if not force:
1732 1732 checkseries(patchname)
1733 1733 if patchname not in self.series:
1734 1734 index = self.full_series_end() + i
1735 1735 self.full_series[index:index] = [patchname]
1736 1736 self.parse_series()
1737 1737 self.series_dirty = True
1738 1738 self.ui.warn(_("adding %s to series file\n") % patchname)
1739 1739 self.added.append(patchname)
1740 1740 patchname = None
1741 1741
1742 1742 def delete(ui, repo, *patches, **opts):
1743 1743 """remove patches from queue
1744 1744
1745 1745 The patches must not be applied, and at least one patch is required. With
1746 1746 -k/--keep, the patch files are preserved in the patch directory.
1747 1747
1748 1748 To stop managing a patch and move it into permanent history,
1749 1749 use the :hg:`qfinish` command."""
1750 1750 q = repo.mq
1751 1751 q.delete(repo, patches, opts)
1752 1752 q.save_dirty()
1753 1753 return 0
1754 1754
1755 1755 def applied(ui, repo, patch=None, **opts):
1756 1756 """print the patches already applied"""
1757 1757
1758 1758 q = repo.mq
1759 1759
1760 1760 if patch:
1761 1761 if patch not in q.series:
1762 1762 raise util.Abort(_("patch %s is not in series file") % patch)
1763 1763 end = q.series.index(patch) + 1
1764 1764 else:
1765 1765 end = q.series_end(True)
1766 1766
1767 1767 if opts.get('last') and not end:
1768 1768 ui.write(_("no patches applied\n"))
1769 1769 return 1
1770 1770 elif opts.get('last') and end == 1:
1771 1771 ui.write(_("only one patch applied\n"))
1772 1772 return 1
1773 1773 elif opts.get('last'):
1774 1774 start = end - 2
1775 1775 end = 1
1776 1776 else:
1777 1777 start = 0
1778 1778
1779 1779 return q.qseries(repo, length=end, start=start, status='A',
1780 1780 summary=opts.get('summary'))
1781 1781
1782 1782 def unapplied(ui, repo, patch=None, **opts):
1783 1783 """print the patches not yet applied"""
1784 1784
1785 1785 q = repo.mq
1786 1786 if patch:
1787 1787 if patch not in q.series:
1788 1788 raise util.Abort(_("patch %s is not in series file") % patch)
1789 1789 start = q.series.index(patch) + 1
1790 1790 else:
1791 1791 start = q.series_end(True)
1792 1792
1793 1793 if start == len(q.series) and opts.get('first'):
1794 1794 ui.write(_("all patches applied\n"))
1795 1795 return 1
1796 1796
1797 1797 length = opts.get('first') and 1 or None
1798 1798 return q.qseries(repo, start=start, length=length, status='U',
1799 1799 summary=opts.get('summary'))
1800 1800
1801 1801 def qimport(ui, repo, *filename, **opts):
1802 1802 """import a patch
1803 1803
1804 1804 The patch is inserted into the series after the last applied
1805 1805 patch. If no patches have been applied, qimport prepends the patch
1806 1806 to the series.
1807 1807
1808 1808 The patch will have the same name as its source file unless you
1809 1809 give it a new one with -n/--name.
1810 1810
1811 1811 You can register an existing patch inside the patch directory with
1812 1812 the -e/--existing flag.
1813 1813
1814 1814 With -f/--force, an existing patch of the same name will be
1815 1815 overwritten.
1816 1816
1817 1817 An existing changeset may be placed under mq control with -r/--rev
1818 1818 (e.g. qimport --rev tip -n patch will place tip under mq control).
1819 1819 With -g/--git, patches imported with --rev will use the git diff
1820 1820 format. See the diffs help topic for information on why this is
1821 1821 important for preserving rename/copy information and permission
1822 1822 changes.
1823 1823
1824 1824 To import a patch from standard input, pass - as the patch file.
1825 1825 When importing from standard input, a patch name must be specified
1826 1826 using the --name flag.
1827 1827
1828 1828 To import an existing patch while renaming it::
1829 1829
1830 1830 hg qimport -e existing-patch -n new-name
1831 1831 """
1832 1832 q = repo.mq
1833 1833 try:
1834 1834 q.qimport(repo, filename, patchname=opts['name'],
1835 1835 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1836 1836 git=opts['git'])
1837 1837 finally:
1838 1838 q.save_dirty()
1839 1839
1840 1840 if opts.get('push') and not opts.get('rev'):
1841 1841 return q.push(repo, None)
1842 1842 return 0
1843 1843
1844 1844 def qinit(ui, repo, create):
1845 1845 """initialize a new queue repository
1846 1846
1847 1847 This command also creates a series file for ordering patches, and
1848 1848 an mq-specific .hgignore file in the queue repository, to exclude
1849 1849 the status and guards files (these contain mostly transient state)."""
1850 1850 q = repo.mq
1851 1851 r = q.init(repo, create)
1852 1852 q.save_dirty()
1853 1853 if r:
1854 1854 if not os.path.exists(r.wjoin('.hgignore')):
1855 1855 fp = r.wopener('.hgignore', 'w')
1856 1856 fp.write('^\\.hg\n')
1857 1857 fp.write('^\\.mq\n')
1858 1858 fp.write('syntax: glob\n')
1859 1859 fp.write('status\n')
1860 1860 fp.write('guards\n')
1861 1861 fp.close()
1862 1862 if not os.path.exists(r.wjoin('series')):
1863 1863 r.wopener('series', 'w').close()
1864 1864 r[None].add(['.hgignore', 'series'])
1865 1865 commands.add(ui, r)
1866 1866 return 0
1867 1867
1868 1868 def init(ui, repo, **opts):
1869 1869 """init a new queue repository (DEPRECATED)
1870 1870
1871 1871 The queue repository is unversioned by default. If
1872 1872 -c/--create-repo is specified, qinit will create a separate nested
1873 1873 repository for patches (qinit -c may also be run later to convert
1874 1874 an unversioned patch repository into a versioned one). You can use
1875 1875 qcommit to commit changes to this queue repository.
1876 1876
1877 1877 This command is deprecated. Without -c, it's implied by other relevant
1878 1878 commands. With -c, use :hg:`init --mq` instead."""
1879 1879 return qinit(ui, repo, create=opts['create_repo'])
1880 1880
1881 1881 def clone(ui, source, dest=None, **opts):
1882 1882 '''clone main and patch repository at same time
1883 1883
1884 1884 If source is local, destination will have no patches applied. If
1885 1885 source is remote, this command can not check if patches are
1886 1886 applied in source, so cannot guarantee that patches are not
1887 1887 applied in destination. If you clone remote repository, be sure
1888 1888 before that it has no patches applied.
1889 1889
1890 1890 Source patch repository is looked for in <src>/.hg/patches by
1891 1891 default. Use -p <url> to change.
1892 1892
1893 1893 The patch directory must be a nested Mercurial repository, as
1894 1894 would be created by :hg:`init --mq`.
1895 1895 '''
1896 1896 def patchdir(repo):
1897 1897 url = repo.url()
1898 1898 if url.endswith('/'):
1899 1899 url = url[:-1]
1900 1900 return url + '/.hg/patches'
1901 1901 if dest is None:
1902 1902 dest = hg.defaultdest(source)
1903 1903 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1904 1904 if opts['patches']:
1905 1905 patchespath = ui.expandpath(opts['patches'])
1906 1906 else:
1907 1907 patchespath = patchdir(sr)
1908 1908 try:
1909 1909 hg.repository(ui, patchespath)
1910 1910 except error.RepoError:
1911 1911 raise util.Abort(_('versioned patch repository not found'
1912 1912 ' (see init --mq)'))
1913 1913 qbase, destrev = None, None
1914 1914 if sr.local():
1915 1915 if sr.mq.applied:
1916 1916 qbase = sr.mq.applied[0].node
1917 1917 if not hg.islocal(dest):
1918 1918 heads = set(sr.heads())
1919 1919 destrev = list(heads.difference(sr.heads(qbase)))
1920 1920 destrev.append(sr.changelog.parents(qbase)[0])
1921 1921 elif sr.capable('lookup'):
1922 1922 try:
1923 1923 qbase = sr.lookup('qbase')
1924 1924 except error.RepoError:
1925 1925 pass
1926 1926 ui.note(_('cloning main repository\n'))
1927 1927 sr, dr = hg.clone(ui, sr.url(), dest,
1928 1928 pull=opts['pull'],
1929 1929 rev=destrev,
1930 1930 update=False,
1931 1931 stream=opts['uncompressed'])
1932 1932 ui.note(_('cloning patch repository\n'))
1933 1933 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1934 1934 pull=opts['pull'], update=not opts['noupdate'],
1935 1935 stream=opts['uncompressed'])
1936 1936 if dr.local():
1937 1937 if qbase:
1938 1938 ui.note(_('stripping applied patches from destination '
1939 1939 'repository\n'))
1940 1940 dr.mq.strip(dr, [qbase], update=False, backup=None)
1941 1941 if not opts['noupdate']:
1942 1942 ui.note(_('updating destination repository\n'))
1943 1943 hg.update(dr, dr.changelog.tip())
1944 1944
1945 1945 def commit(ui, repo, *pats, **opts):
1946 1946 """commit changes in the queue repository (DEPRECATED)
1947 1947
1948 1948 This command is deprecated; use :hg:`commit --mq` instead."""
1949 1949 q = repo.mq
1950 1950 r = q.qrepo()
1951 1951 if not r:
1952 1952 raise util.Abort('no queue repository')
1953 1953 commands.commit(r.ui, r, *pats, **opts)
1954 1954
1955 1955 def series(ui, repo, **opts):
1956 1956 """print the entire series file"""
1957 1957 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1958 1958 return 0
1959 1959
1960 1960 def top(ui, repo, **opts):
1961 1961 """print the name of the current patch"""
1962 1962 q = repo.mq
1963 1963 t = q.applied and q.series_end(True) or 0
1964 1964 if t:
1965 1965 return q.qseries(repo, start=t - 1, length=1, status='A',
1966 1966 summary=opts.get('summary'))
1967 1967 else:
1968 1968 ui.write(_("no patches applied\n"))
1969 1969 return 1
1970 1970
1971 1971 def next(ui, repo, **opts):
1972 1972 """print the name of the next patch"""
1973 1973 q = repo.mq
1974 1974 end = q.series_end()
1975 1975 if end == len(q.series):
1976 1976 ui.write(_("all patches applied\n"))
1977 1977 return 1
1978 1978 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1979 1979
1980 1980 def prev(ui, repo, **opts):
1981 1981 """print the name of the previous patch"""
1982 1982 q = repo.mq
1983 1983 l = len(q.applied)
1984 1984 if l == 1:
1985 1985 ui.write(_("only one patch applied\n"))
1986 1986 return 1
1987 1987 if not l:
1988 1988 ui.write(_("no patches applied\n"))
1989 1989 return 1
1990 1990 return q.qseries(repo, start=l - 2, length=1, status='A',
1991 1991 summary=opts.get('summary'))
1992 1992
1993 1993 def setupheaderopts(ui, opts):
1994 1994 if not opts.get('user') and opts.get('currentuser'):
1995 1995 opts['user'] = ui.username()
1996 1996 if not opts.get('date') and opts.get('currentdate'):
1997 1997 opts['date'] = "%d %d" % util.makedate()
1998 1998
1999 1999 def new(ui, repo, patch, *args, **opts):
2000 2000 """create a new patch
2001 2001
2002 2002 qnew creates a new patch on top of the currently-applied patch (if
2003 2003 any). The patch will be initialized with any outstanding changes
2004 2004 in the working directory. You may also use -I/--include,
2005 2005 -X/--exclude, and/or a list of files after the patch name to add
2006 2006 only changes to matching files to the new patch, leaving the rest
2007 2007 as uncommitted modifications.
2008 2008
2009 2009 -u/--user and -d/--date can be used to set the (given) user and
2010 2010 date, respectively. -U/--currentuser and -D/--currentdate set user
2011 2011 to current user and date to current date.
2012 2012
2013 2013 -e/--edit, -m/--message or -l/--logfile set the patch header as
2014 2014 well as the commit message. If none is specified, the header is
2015 2015 empty and the commit message is '[mq]: PATCH'.
2016 2016
2017 2017 Use the -g/--git option to keep the patch in the git extended diff
2018 2018 format. Read the diffs help topic for more information on why this
2019 2019 is important for preserving permission changes and copy/rename
2020 2020 information.
2021 2021 """
2022 2022 msg = cmdutil.logmessage(opts)
2023 2023 def getmsg():
2024 2024 return ui.edit(msg, opts['user'] or ui.username())
2025 2025 q = repo.mq
2026 2026 opts['msg'] = msg
2027 2027 if opts.get('edit'):
2028 2028 opts['msg'] = getmsg
2029 2029 else:
2030 2030 opts['msg'] = msg
2031 2031 setupheaderopts(ui, opts)
2032 2032 q.new(repo, patch, *args, **opts)
2033 2033 q.save_dirty()
2034 2034 return 0
2035 2035
2036 2036 def refresh(ui, repo, *pats, **opts):
2037 2037 """update the current patch
2038 2038
2039 2039 If any file patterns are provided, the refreshed patch will
2040 2040 contain only the modifications that match those patterns; the
2041 2041 remaining modifications will remain in the working directory.
2042 2042
2043 2043 If -s/--short is specified, files currently included in the patch
2044 2044 will be refreshed just like matched files and remain in the patch.
2045 2045
2046 2046 If -e/--edit is specified, Mercurial will start your configured editor for
2047 2047 you to enter a message. In case qrefresh fails, you will find a backup of
2048 2048 your message in ``.hg/last-message.txt``.
2049 2049
2050 2050 hg add/remove/copy/rename work as usual, though you might want to
2051 2051 use git-style patches (-g/--git or [diff] git=1) to track copies
2052 2052 and renames. See the diffs help topic for more information on the
2053 2053 git diff format.
2054 2054 """
2055 2055 q = repo.mq
2056 2056 message = cmdutil.logmessage(opts)
2057 2057 if opts['edit']:
2058 2058 if not q.applied:
2059 2059 ui.write(_("no patches applied\n"))
2060 2060 return 1
2061 2061 if message:
2062 2062 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2063 2063 patch = q.applied[-1].name
2064 2064 ph = patchheader(q.join(patch), q.plainmode)
2065 2065 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2066 2066 # We don't want to lose the patch message if qrefresh fails (issue2062)
2067 2067 msgfile = repo.opener('last-message.txt', 'wb')
2068 2068 msgfile.write(message)
2069 2069 msgfile.close()
2070 2070 setupheaderopts(ui, opts)
2071 2071 ret = q.refresh(repo, pats, msg=message, **opts)
2072 2072 q.save_dirty()
2073 2073 return ret
2074 2074
2075 2075 def diff(ui, repo, *pats, **opts):
2076 2076 """diff of the current patch and subsequent modifications
2077 2077
2078 2078 Shows a diff which includes the current patch as well as any
2079 2079 changes which have been made in the working directory since the
2080 2080 last refresh (thus showing what the current patch would become
2081 2081 after a qrefresh).
2082 2082
2083 2083 Use :hg:`diff` if you only want to see the changes made since the
2084 2084 last qrefresh, or :hg:`export qtip` if you want to see changes
2085 2085 made by the current patch without including changes made since the
2086 2086 qrefresh.
2087 2087 """
2088 2088 repo.mq.diff(repo, pats, opts)
2089 2089 return 0
2090 2090
2091 2091 def fold(ui, repo, *files, **opts):
2092 2092 """fold the named patches into the current patch
2093 2093
2094 2094 Patches must not yet be applied. Each patch will be successively
2095 2095 applied to the current patch in the order given. If all the
2096 2096 patches apply successfully, the current patch will be refreshed
2097 2097 with the new cumulative patch, and the folded patches will be
2098 2098 deleted. With -k/--keep, the folded patch files will not be
2099 2099 removed afterwards.
2100 2100
2101 2101 The header for each folded patch will be concatenated with the
2102 2102 current patch header, separated by a line of '* * *'."""
2103 2103
2104 2104 q = repo.mq
2105 2105
2106 2106 if not files:
2107 2107 raise util.Abort(_('qfold requires at least one patch name'))
2108 2108 if not q.check_toppatch(repo)[0]:
2109 raise util.Abort(_('No patches applied'))
2109 raise util.Abort(_('no patches applied'))
2110 2110 q.check_localchanges(repo)
2111 2111
2112 2112 message = cmdutil.logmessage(opts)
2113 2113 if opts['edit']:
2114 2114 if message:
2115 2115 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2116 2116
2117 2117 parent = q.lookup('qtip')
2118 2118 patches = []
2119 2119 messages = []
2120 2120 for f in files:
2121 2121 p = q.lookup(f)
2122 2122 if p in patches or p == parent:
2123 2123 ui.warn(_('Skipping already folded patch %s') % p)
2124 2124 if q.isapplied(p):
2125 2125 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2126 2126 patches.append(p)
2127 2127
2128 2128 for p in patches:
2129 2129 if not message:
2130 2130 ph = patchheader(q.join(p), q.plainmode)
2131 2131 if ph.message:
2132 2132 messages.append(ph.message)
2133 2133 pf = q.join(p)
2134 2134 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2135 2135 if not patchsuccess:
2136 raise util.Abort(_('Error folding patch %s') % p)
2136 raise util.Abort(_('error folding patch %s') % p)
2137 2137 patch.updatedir(ui, repo, files)
2138 2138
2139 2139 if not message:
2140 2140 ph = patchheader(q.join(parent), q.plainmode)
2141 2141 message, user = ph.message, ph.user
2142 2142 for msg in messages:
2143 2143 message.append('* * *')
2144 2144 message.extend(msg)
2145 2145 message = '\n'.join(message)
2146 2146
2147 2147 if opts['edit']:
2148 2148 message = ui.edit(message, user or ui.username())
2149 2149
2150 2150 diffopts = q.patchopts(q.diffopts(), *patches)
2151 2151 q.refresh(repo, msg=message, git=diffopts.git)
2152 2152 q.delete(repo, patches, opts)
2153 2153 q.save_dirty()
2154 2154
2155 2155 def goto(ui, repo, patch, **opts):
2156 2156 '''push or pop patches until named patch is at top of stack'''
2157 2157 q = repo.mq
2158 2158 patch = q.lookup(patch)
2159 2159 if q.isapplied(patch):
2160 2160 ret = q.pop(repo, patch, force=opts['force'])
2161 2161 else:
2162 2162 ret = q.push(repo, patch, force=opts['force'])
2163 2163 q.save_dirty()
2164 2164 return ret
2165 2165
2166 2166 def guard(ui, repo, *args, **opts):
2167 2167 '''set or print guards for a patch
2168 2168
2169 2169 Guards control whether a patch can be pushed. A patch with no
2170 2170 guards is always pushed. A patch with a positive guard ("+foo") is
2171 2171 pushed only if the :hg:`qselect` command has activated it. A patch with
2172 2172 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2173 2173 has activated it.
2174 2174
2175 2175 With no arguments, print the currently active guards.
2176 2176 With arguments, set guards for the named patch.
2177 2177 NOTE: Specifying negative guards now requires '--'.
2178 2178
2179 2179 To set guards on another patch::
2180 2180
2181 2181 hg qguard other.patch -- +2.6.17 -stable
2182 2182 '''
2183 2183 def status(idx):
2184 2184 guards = q.series_guards[idx] or ['unguarded']
2185 2185 if q.series[idx] in applied:
2186 2186 state = 'applied'
2187 2187 elif q.pushable(idx)[0]:
2188 2188 state = 'unapplied'
2189 2189 else:
2190 2190 state = 'guarded'
2191 2191 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2192 2192 ui.write('%s: ' % ui.label(q.series[idx], label))
2193 2193
2194 2194 for i, guard in enumerate(guards):
2195 2195 if guard.startswith('+'):
2196 2196 ui.write(guard, label='qguard.positive')
2197 2197 elif guard.startswith('-'):
2198 2198 ui.write(guard, label='qguard.negative')
2199 2199 else:
2200 2200 ui.write(guard, label='qguard.unguarded')
2201 2201 if i != len(guards) - 1:
2202 2202 ui.write(' ')
2203 2203 ui.write('\n')
2204 2204 q = repo.mq
2205 2205 applied = set(p.name for p in q.applied)
2206 2206 patch = None
2207 2207 args = list(args)
2208 2208 if opts['list']:
2209 2209 if args or opts['none']:
2210 2210 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2211 2211 for i in xrange(len(q.series)):
2212 2212 status(i)
2213 2213 return
2214 2214 if not args or args[0][0:1] in '-+':
2215 2215 if not q.applied:
2216 2216 raise util.Abort(_('no patches applied'))
2217 2217 patch = q.applied[-1].name
2218 2218 if patch is None and args[0][0:1] not in '-+':
2219 2219 patch = args.pop(0)
2220 2220 if patch is None:
2221 2221 raise util.Abort(_('no patch to work with'))
2222 2222 if args or opts['none']:
2223 2223 idx = q.find_series(patch)
2224 2224 if idx is None:
2225 2225 raise util.Abort(_('no patch named %s') % patch)
2226 2226 q.set_guards(idx, args)
2227 2227 q.save_dirty()
2228 2228 else:
2229 2229 status(q.series.index(q.lookup(patch)))
2230 2230
2231 2231 def header(ui, repo, patch=None):
2232 2232 """print the header of the topmost or specified patch"""
2233 2233 q = repo.mq
2234 2234
2235 2235 if patch:
2236 2236 patch = q.lookup(patch)
2237 2237 else:
2238 2238 if not q.applied:
2239 2239 ui.write(_('no patches applied\n'))
2240 2240 return 1
2241 2241 patch = q.lookup('qtip')
2242 2242 ph = patchheader(q.join(patch), q.plainmode)
2243 2243
2244 2244 ui.write('\n'.join(ph.message) + '\n')
2245 2245
2246 2246 def lastsavename(path):
2247 2247 (directory, base) = os.path.split(path)
2248 2248 names = os.listdir(directory)
2249 2249 namere = re.compile("%s.([0-9]+)" % base)
2250 2250 maxindex = None
2251 2251 maxname = None
2252 2252 for f in names:
2253 2253 m = namere.match(f)
2254 2254 if m:
2255 2255 index = int(m.group(1))
2256 2256 if maxindex is None or index > maxindex:
2257 2257 maxindex = index
2258 2258 maxname = f
2259 2259 if maxname:
2260 2260 return (os.path.join(directory, maxname), maxindex)
2261 2261 return (None, None)
2262 2262
2263 2263 def savename(path):
2264 2264 (last, index) = lastsavename(path)
2265 2265 if last is None:
2266 2266 index = 0
2267 2267 newpath = path + ".%d" % (index + 1)
2268 2268 return newpath
2269 2269
2270 2270 def push(ui, repo, patch=None, **opts):
2271 2271 """push the next patch onto the stack
2272 2272
2273 2273 When -f/--force is applied, all local changes in patched files
2274 2274 will be lost.
2275 2275 """
2276 2276 q = repo.mq
2277 2277 mergeq = None
2278 2278
2279 2279 if opts['merge']:
2280 2280 if opts['name']:
2281 2281 newpath = repo.join(opts['name'])
2282 2282 else:
2283 2283 newpath, i = lastsavename(q.path)
2284 2284 if not newpath:
2285 2285 ui.warn(_("no saved queues found, please use -n\n"))
2286 2286 return 1
2287 2287 mergeq = queue(ui, repo.join(""), newpath)
2288 2288 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2289 2289 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2290 2290 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
2291 2291 return ret
2292 2292
2293 2293 def pop(ui, repo, patch=None, **opts):
2294 2294 """pop the current patch off the stack
2295 2295
2296 2296 By default, pops off the top of the patch stack. If given a patch
2297 2297 name, keeps popping off patches until the named patch is at the
2298 2298 top of the stack.
2299 2299 """
2300 2300 localupdate = True
2301 2301 if opts['name']:
2302 2302 q = queue(ui, repo.join(""), repo.join(opts['name']))
2303 2303 ui.warn(_('using patch queue: %s\n') % q.path)
2304 2304 localupdate = False
2305 2305 else:
2306 2306 q = repo.mq
2307 2307 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2308 2308 all=opts['all'])
2309 2309 q.save_dirty()
2310 2310 return ret
2311 2311
2312 2312 def rename(ui, repo, patch, name=None, **opts):
2313 2313 """rename a patch
2314 2314
2315 2315 With one argument, renames the current patch to PATCH1.
2316 2316 With two arguments, renames PATCH1 to PATCH2."""
2317 2317
2318 2318 q = repo.mq
2319 2319
2320 2320 if not name:
2321 2321 name = patch
2322 2322 patch = None
2323 2323
2324 2324 if patch:
2325 2325 patch = q.lookup(patch)
2326 2326 else:
2327 2327 if not q.applied:
2328 2328 ui.write(_('no patches applied\n'))
2329 2329 return
2330 2330 patch = q.lookup('qtip')
2331 2331 absdest = q.join(name)
2332 2332 if os.path.isdir(absdest):
2333 2333 name = normname(os.path.join(name, os.path.basename(patch)))
2334 2334 absdest = q.join(name)
2335 2335 if os.path.exists(absdest):
2336 2336 raise util.Abort(_('%s already exists') % absdest)
2337 2337
2338 2338 if name in q.series:
2339 2339 raise util.Abort(
2340 2340 _('A patch named %s already exists in the series file') % name)
2341 2341
2342 2342 ui.note(_('renaming %s to %s\n') % (patch, name))
2343 2343 i = q.find_series(patch)
2344 2344 guards = q.guard_re.findall(q.full_series[i])
2345 2345 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2346 2346 q.parse_series()
2347 2347 q.series_dirty = 1
2348 2348
2349 2349 info = q.isapplied(patch)
2350 2350 if info:
2351 2351 q.applied[info[0]] = statusentry(info[1], name)
2352 2352 q.applied_dirty = 1
2353 2353
2354 2354 destdir = os.path.dirname(absdest)
2355 2355 if not os.path.isdir(destdir):
2356 2356 os.makedirs(destdir)
2357 2357 util.rename(q.join(patch), absdest)
2358 2358 r = q.qrepo()
2359 2359 if r:
2360 2360 wctx = r[None]
2361 2361 wlock = r.wlock()
2362 2362 try:
2363 2363 if r.dirstate[patch] == 'a':
2364 2364 r.dirstate.forget(patch)
2365 2365 r.dirstate.add(name)
2366 2366 else:
2367 2367 if r.dirstate[name] == 'r':
2368 2368 wctx.undelete([name])
2369 2369 wctx.copy(patch, name)
2370 2370 wctx.remove([patch], False)
2371 2371 finally:
2372 2372 wlock.release()
2373 2373
2374 2374 q.save_dirty()
2375 2375
2376 2376 def restore(ui, repo, rev, **opts):
2377 2377 """restore the queue state saved by a revision (DEPRECATED)
2378 2378
2379 2379 This command is deprecated, use rebase --mq instead."""
2380 2380 rev = repo.lookup(rev)
2381 2381 q = repo.mq
2382 2382 q.restore(repo, rev, delete=opts['delete'],
2383 2383 qupdate=opts['update'])
2384 2384 q.save_dirty()
2385 2385 return 0
2386 2386
2387 2387 def save(ui, repo, **opts):
2388 2388 """save current queue state (DEPRECATED)
2389 2389
2390 2390 This command is deprecated, use rebase --mq instead."""
2391 2391 q = repo.mq
2392 2392 message = cmdutil.logmessage(opts)
2393 2393 ret = q.save(repo, msg=message)
2394 2394 if ret:
2395 2395 return ret
2396 2396 q.save_dirty()
2397 2397 if opts['copy']:
2398 2398 path = q.path
2399 2399 if opts['name']:
2400 2400 newpath = os.path.join(q.basepath, opts['name'])
2401 2401 if os.path.exists(newpath):
2402 2402 if not os.path.isdir(newpath):
2403 2403 raise util.Abort(_('destination %s exists and is not '
2404 2404 'a directory') % newpath)
2405 2405 if not opts['force']:
2406 2406 raise util.Abort(_('destination %s exists, '
2407 2407 'use -f to force') % newpath)
2408 2408 else:
2409 2409 newpath = savename(path)
2410 2410 ui.warn(_("copy %s to %s\n") % (path, newpath))
2411 2411 util.copyfiles(path, newpath)
2412 2412 if opts['empty']:
2413 2413 try:
2414 2414 os.unlink(q.join(q.status_path))
2415 2415 except:
2416 2416 pass
2417 2417 return 0
2418 2418
2419 2419 def strip(ui, repo, *revs, **opts):
2420 2420 """strip changesets and all their descendants from the repository
2421 2421
2422 2422 The strip command removes the specified changesets and all their
2423 2423 descendants. If the working directory has uncommitted changes,
2424 2424 the operation is aborted unless the --force flag is supplied.
2425 2425
2426 2426 If a parent of the working directory is stripped, then the working
2427 2427 directory will automatically be updated to the most recent
2428 2428 available ancestor of the stripped parent after the operation
2429 2429 completes.
2430 2430
2431 2431 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2432 2432 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2433 2433 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2434 2434 where BUNDLE is the bundle file created by the strip. Note that
2435 2435 the local revision numbers will in general be different after the
2436 2436 restore.
2437 2437
2438 2438 Use the --nobackup option to discard the backup bundle once the
2439 2439 operation completes.
2440 2440 """
2441 2441 backup = 'all'
2442 2442 if opts['backup']:
2443 2443 backup = 'strip'
2444 2444 elif opts['nobackup']:
2445 2445 backup = 'none'
2446 2446
2447 2447 cl = repo.changelog
2448 2448 revs = set(cl.rev(repo.lookup(r)) for r in revs)
2449 2449
2450 2450 descendants = set(cl.descendants(*revs))
2451 2451 strippedrevs = revs.union(descendants)
2452 2452 roots = revs.difference(descendants)
2453 2453
2454 2454 update = False
2455 2455 # if one of the wdir parent is stripped we'll need
2456 2456 # to update away to an earlier revision
2457 2457 for p in repo.dirstate.parents():
2458 2458 if p != nullid and cl.rev(p) in strippedrevs:
2459 2459 update = True
2460 2460 break
2461 2461
2462 2462 rootnodes = set(cl.node(r) for r in roots)
2463 2463
2464 2464 q = repo.mq
2465 2465 if q.applied:
2466 2466 # refresh queue state if we're about to strip
2467 2467 # applied patches
2468 2468 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2469 2469 q.applied_dirty = True
2470 2470 start = 0
2471 2471 end = len(q.applied)
2472 2472 for i, statusentry in enumerate(q.applied):
2473 2473 if statusentry.node in rootnodes:
2474 2474 # if one of the stripped roots is an applied
2475 2475 # patch, only part of the queue is stripped
2476 2476 start = i
2477 2477 break
2478 2478 del q.applied[start:end]
2479 2479 q.save_dirty()
2480 2480
2481 2481 repo.mq.strip(repo, list(rootnodes), backup=backup, update=update,
2482 2482 force=opts['force'])
2483 2483 return 0
2484 2484
2485 2485 def select(ui, repo, *args, **opts):
2486 2486 '''set or print guarded patches to push
2487 2487
2488 2488 Use the :hg:`qguard` command to set or print guards on patch, then use
2489 2489 qselect to tell mq which guards to use. A patch will be pushed if
2490 2490 it has no guards or any positive guards match the currently
2491 2491 selected guard, but will not be pushed if any negative guards
2492 2492 match the current guard. For example::
2493 2493
2494 2494 qguard foo.patch -stable (negative guard)
2495 2495 qguard bar.patch +stable (positive guard)
2496 2496 qselect stable
2497 2497
2498 2498 This activates the "stable" guard. mq will skip foo.patch (because
2499 2499 it has a negative match) but push bar.patch (because it has a
2500 2500 positive match).
2501 2501
2502 2502 With no arguments, prints the currently active guards.
2503 2503 With one argument, sets the active guard.
2504 2504
2505 2505 Use -n/--none to deactivate guards (no other arguments needed).
2506 2506 When no guards are active, patches with positive guards are
2507 2507 skipped and patches with negative guards are pushed.
2508 2508
2509 2509 qselect can change the guards on applied patches. It does not pop
2510 2510 guarded patches by default. Use --pop to pop back to the last
2511 2511 applied patch that is not guarded. Use --reapply (which implies
2512 2512 --pop) to push back to the current patch afterwards, but skip
2513 2513 guarded patches.
2514 2514
2515 2515 Use -s/--series to print a list of all guards in the series file
2516 2516 (no other arguments needed). Use -v for more information.'''
2517 2517
2518 2518 q = repo.mq
2519 2519 guards = q.active()
2520 2520 if args or opts['none']:
2521 2521 old_unapplied = q.unapplied(repo)
2522 2522 old_guarded = [i for i in xrange(len(q.applied)) if
2523 2523 not q.pushable(i)[0]]
2524 2524 q.set_active(args)
2525 2525 q.save_dirty()
2526 2526 if not args:
2527 2527 ui.status(_('guards deactivated\n'))
2528 2528 if not opts['pop'] and not opts['reapply']:
2529 2529 unapplied = q.unapplied(repo)
2530 2530 guarded = [i for i in xrange(len(q.applied))
2531 2531 if not q.pushable(i)[0]]
2532 2532 if len(unapplied) != len(old_unapplied):
2533 2533 ui.status(_('number of unguarded, unapplied patches has '
2534 2534 'changed from %d to %d\n') %
2535 2535 (len(old_unapplied), len(unapplied)))
2536 2536 if len(guarded) != len(old_guarded):
2537 2537 ui.status(_('number of guarded, applied patches has changed '
2538 2538 'from %d to %d\n') %
2539 2539 (len(old_guarded), len(guarded)))
2540 2540 elif opts['series']:
2541 2541 guards = {}
2542 2542 noguards = 0
2543 2543 for gs in q.series_guards:
2544 2544 if not gs:
2545 2545 noguards += 1
2546 2546 for g in gs:
2547 2547 guards.setdefault(g, 0)
2548 2548 guards[g] += 1
2549 2549 if ui.verbose:
2550 2550 guards['NONE'] = noguards
2551 2551 guards = guards.items()
2552 2552 guards.sort(key=lambda x: x[0][1:])
2553 2553 if guards:
2554 2554 ui.note(_('guards in series file:\n'))
2555 2555 for guard, count in guards:
2556 2556 ui.note('%2d ' % count)
2557 2557 ui.write(guard, '\n')
2558 2558 else:
2559 2559 ui.note(_('no guards in series file\n'))
2560 2560 else:
2561 2561 if guards:
2562 2562 ui.note(_('active guards:\n'))
2563 2563 for g in guards:
2564 2564 ui.write(g, '\n')
2565 2565 else:
2566 2566 ui.write(_('no active guards\n'))
2567 2567 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2568 2568 popped = False
2569 2569 if opts['pop'] or opts['reapply']:
2570 2570 for i in xrange(len(q.applied)):
2571 2571 pushable, reason = q.pushable(i)
2572 2572 if not pushable:
2573 2573 ui.status(_('popping guarded patches\n'))
2574 2574 popped = True
2575 2575 if i == 0:
2576 2576 q.pop(repo, all=True)
2577 2577 else:
2578 2578 q.pop(repo, i - 1)
2579 2579 break
2580 2580 if popped:
2581 2581 try:
2582 2582 if reapply:
2583 2583 ui.status(_('reapplying unguarded patches\n'))
2584 2584 q.push(repo, reapply)
2585 2585 finally:
2586 2586 q.save_dirty()
2587 2587
2588 2588 def finish(ui, repo, *revrange, **opts):
2589 2589 """move applied patches into repository history
2590 2590
2591 2591 Finishes the specified revisions (corresponding to applied
2592 2592 patches) by moving them out of mq control into regular repository
2593 2593 history.
2594 2594
2595 2595 Accepts a revision range or the -a/--applied option. If --applied
2596 2596 is specified, all applied mq revisions are removed from mq
2597 2597 control. Otherwise, the given revisions must be at the base of the
2598 2598 stack of applied patches.
2599 2599
2600 2600 This can be especially useful if your changes have been applied to
2601 2601 an upstream repository, or if you are about to push your changes
2602 2602 to upstream.
2603 2603 """
2604 2604 if not opts['applied'] and not revrange:
2605 2605 raise util.Abort(_('no revisions specified'))
2606 2606 elif opts['applied']:
2607 2607 revrange = ('qbase::qtip',) + revrange
2608 2608
2609 2609 q = repo.mq
2610 2610 if not q.applied:
2611 2611 ui.status(_('no patches applied\n'))
2612 2612 return 0
2613 2613
2614 2614 revs = cmdutil.revrange(repo, revrange)
2615 2615 q.finish(repo, revs)
2616 2616 q.save_dirty()
2617 2617 return 0
2618 2618
2619 2619 def qqueue(ui, repo, name=None, **opts):
2620 2620 '''manage multiple patch queues
2621 2621
2622 2622 Supports switching between different patch queues, as well as creating
2623 2623 new patch queues and deleting existing ones.
2624 2624
2625 2625 Omitting a queue name or specifying -l/--list will show you the registered
2626 2626 queues - by default the "normal" patches queue is registered. The currently
2627 2627 active queue will be marked with "(active)".
2628 2628
2629 2629 To create a new queue, use -c/--create. The queue is automatically made
2630 2630 active, except in the case where there are applied patches from the
2631 2631 currently active queue in the repository. Then the queue will only be
2632 2632 created and switching will fail.
2633 2633
2634 2634 To delete an existing queue, use --delete. You cannot delete the currently
2635 2635 active queue.
2636 2636 '''
2637 2637
2638 2638 q = repo.mq
2639 2639
2640 2640 _defaultqueue = 'patches'
2641 2641 _allqueues = 'patches.queues'
2642 2642 _activequeue = 'patches.queue'
2643 2643
2644 2644 def _getcurrent():
2645 2645 cur = os.path.basename(q.path)
2646 2646 if cur.startswith('patches-'):
2647 2647 cur = cur[8:]
2648 2648 return cur
2649 2649
2650 2650 def _noqueues():
2651 2651 try:
2652 2652 fh = repo.opener(_allqueues, 'r')
2653 2653 fh.close()
2654 2654 except IOError:
2655 2655 return True
2656 2656
2657 2657 return False
2658 2658
2659 2659 def _getqueues():
2660 2660 current = _getcurrent()
2661 2661
2662 2662 try:
2663 2663 fh = repo.opener(_allqueues, 'r')
2664 2664 queues = [queue.strip() for queue in fh if queue.strip()]
2665 2665 if current not in queues:
2666 2666 queues.append(current)
2667 2667 except IOError:
2668 2668 queues = [_defaultqueue]
2669 2669
2670 2670 return sorted(queues)
2671 2671
2672 2672 def _setactive(name):
2673 2673 if q.applied:
2674 2674 raise util.Abort(_('patches applied - cannot set new queue active'))
2675 2675 _setactivenocheck(name)
2676 2676
2677 2677 def _setactivenocheck(name):
2678 2678 fh = repo.opener(_activequeue, 'w')
2679 2679 if name != 'patches':
2680 2680 fh.write(name)
2681 2681 fh.close()
2682 2682
2683 2683 def _addqueue(name):
2684 2684 fh = repo.opener(_allqueues, 'a')
2685 2685 fh.write('%s\n' % (name,))
2686 2686 fh.close()
2687 2687
2688 2688 def _queuedir(name):
2689 2689 if name == 'patches':
2690 2690 return repo.join('patches')
2691 2691 else:
2692 2692 return repo.join('patches-' + name)
2693 2693
2694 2694 def _validname(name):
2695 2695 for n in name:
2696 2696 if n in ':\\/.':
2697 2697 return False
2698 2698 return True
2699 2699
2700 2700 def _delete(name):
2701 2701 if name not in existing:
2702 2702 raise util.Abort(_('cannot delete queue that does not exist'))
2703 2703
2704 2704 current = _getcurrent()
2705 2705
2706 2706 if name == current:
2707 2707 raise util.Abort(_('cannot delete currently active queue'))
2708 2708
2709 2709 fh = repo.opener('patches.queues.new', 'w')
2710 2710 for queue in existing:
2711 2711 if queue == name:
2712 2712 continue
2713 2713 fh.write('%s\n' % (queue,))
2714 2714 fh.close()
2715 2715 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2716 2716
2717 2717 if not name or opts.get('list'):
2718 2718 current = _getcurrent()
2719 2719 for queue in _getqueues():
2720 2720 ui.write('%s' % (queue,))
2721 2721 if queue == current and not ui.quiet:
2722 2722 ui.write(_(' (active)\n'))
2723 2723 else:
2724 2724 ui.write('\n')
2725 2725 return
2726 2726
2727 2727 if not _validname(name):
2728 2728 raise util.Abort(
2729 2729 _('invalid queue name, may not contain the characters ":\\/."'))
2730 2730
2731 2731 existing = _getqueues()
2732 2732
2733 2733 if opts.get('create'):
2734 2734 if name in existing:
2735 2735 raise util.Abort(_('queue "%s" already exists') % name)
2736 2736 if _noqueues():
2737 2737 _addqueue(_defaultqueue)
2738 2738 _addqueue(name)
2739 2739 _setactive(name)
2740 2740 elif opts.get('rename'):
2741 2741 current = _getcurrent()
2742 2742 if name == current:
2743 2743 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
2744 2744 if name in existing:
2745 2745 raise util.Abort(_('queue "%s" already exists') % name)
2746 2746
2747 2747 olddir = _queuedir(current)
2748 2748 newdir = _queuedir(name)
2749 2749
2750 2750 if os.path.exists(newdir):
2751 2751 raise util.Abort(_('non-queue directory "%s" already exists') %
2752 2752 newdir)
2753 2753
2754 2754 fh = repo.opener('patches.queues.new', 'w')
2755 2755 for queue in existing:
2756 2756 if queue == current:
2757 2757 fh.write('%s\n' % (name,))
2758 2758 if os.path.exists(olddir):
2759 2759 util.rename(olddir, newdir)
2760 2760 else:
2761 2761 fh.write('%s\n' % (queue,))
2762 2762 fh.close()
2763 2763 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2764 2764 _setactivenocheck(name)
2765 2765 elif opts.get('delete'):
2766 2766 _delete(name)
2767 2767 elif opts.get('purge'):
2768 2768 if name in existing:
2769 2769 _delete(name)
2770 2770 qdir = _queuedir(name)
2771 2771 if os.path.exists(qdir):
2772 2772 shutil.rmtree(qdir)
2773 2773 else:
2774 2774 if name not in existing:
2775 2775 raise util.Abort(_('use --create to create a new queue'))
2776 2776 _setactive(name)
2777 2777
2778 2778 def reposetup(ui, repo):
2779 2779 class mqrepo(repo.__class__):
2780 2780 @util.propertycache
2781 2781 def mq(self):
2782 2782 return queue(self.ui, self.join(""))
2783 2783
2784 2784 def abort_if_wdir_patched(self, errmsg, force=False):
2785 2785 if self.mq.applied and not force:
2786 2786 parent = self.dirstate.parents()[0]
2787 2787 if parent in [s.node for s in self.mq.applied]:
2788 2788 raise util.Abort(errmsg)
2789 2789
2790 2790 def commit(self, text="", user=None, date=None, match=None,
2791 2791 force=False, editor=False, extra={}):
2792 2792 self.abort_if_wdir_patched(
2793 2793 _('cannot commit over an applied mq patch'),
2794 2794 force)
2795 2795
2796 2796 return super(mqrepo, self).commit(text, user, date, match, force,
2797 2797 editor, extra)
2798 2798
2799 2799 def push(self, remote, force=False, revs=None, newbranch=False):
2800 2800 if self.mq.applied and not force and not revs:
2801 2801 raise util.Abort(_('source has mq patches applied'))
2802 2802 return super(mqrepo, self).push(remote, force, revs, newbranch)
2803 2803
2804 2804 def _findtags(self):
2805 2805 '''augment tags from base class with patch tags'''
2806 2806 result = super(mqrepo, self)._findtags()
2807 2807
2808 2808 q = self.mq
2809 2809 if not q.applied:
2810 2810 return result
2811 2811
2812 2812 mqtags = [(patch.node, patch.name) for patch in q.applied]
2813 2813
2814 2814 if mqtags[-1][0] not in self.changelog.nodemap:
2815 2815 self.ui.warn(_('mq status file refers to unknown node %s\n')
2816 2816 % short(mqtags[-1][0]))
2817 2817 return result
2818 2818
2819 2819 mqtags.append((mqtags[-1][0], 'qtip'))
2820 2820 mqtags.append((mqtags[0][0], 'qbase'))
2821 2821 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2822 2822 tags = result[0]
2823 2823 for patch in mqtags:
2824 2824 if patch[1] in tags:
2825 2825 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2826 2826 % patch[1])
2827 2827 else:
2828 2828 tags[patch[1]] = patch[0]
2829 2829
2830 2830 return result
2831 2831
2832 2832 def _branchtags(self, partial, lrev):
2833 2833 q = self.mq
2834 2834 if not q.applied:
2835 2835 return super(mqrepo, self)._branchtags(partial, lrev)
2836 2836
2837 2837 cl = self.changelog
2838 2838 qbasenode = q.applied[0].node
2839 2839 if qbasenode not in cl.nodemap:
2840 2840 self.ui.warn(_('mq status file refers to unknown node %s\n')
2841 2841 % short(qbasenode))
2842 2842 return super(mqrepo, self)._branchtags(partial, lrev)
2843 2843
2844 2844 qbase = cl.rev(qbasenode)
2845 2845 start = lrev + 1
2846 2846 if start < qbase:
2847 2847 # update the cache (excluding the patches) and save it
2848 2848 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2849 2849 self._updatebranchcache(partial, ctxgen)
2850 2850 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
2851 2851 start = qbase
2852 2852 # if start = qbase, the cache is as updated as it should be.
2853 2853 # if start > qbase, the cache includes (part of) the patches.
2854 2854 # we might as well use it, but we won't save it.
2855 2855
2856 2856 # update the cache up to the tip
2857 2857 ctxgen = (self[r] for r in xrange(start, len(cl)))
2858 2858 self._updatebranchcache(partial, ctxgen)
2859 2859
2860 2860 return partial
2861 2861
2862 2862 if repo.local():
2863 2863 repo.__class__ = mqrepo
2864 2864
2865 2865 def mqimport(orig, ui, repo, *args, **kwargs):
2866 2866 if (hasattr(repo, 'abort_if_wdir_patched')
2867 2867 and not kwargs.get('no_commit', False)):
2868 2868 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2869 2869 kwargs.get('force'))
2870 2870 return orig(ui, repo, *args, **kwargs)
2871 2871
2872 2872 def mqinit(orig, ui, *args, **kwargs):
2873 2873 mq = kwargs.pop('mq', None)
2874 2874
2875 2875 if not mq:
2876 2876 return orig(ui, *args, **kwargs)
2877 2877
2878 2878 if args:
2879 2879 repopath = args[0]
2880 2880 if not hg.islocal(repopath):
2881 2881 raise util.Abort(_('only a local queue repository '
2882 2882 'may be initialized'))
2883 2883 else:
2884 2884 repopath = cmdutil.findrepo(os.getcwd())
2885 2885 if not repopath:
2886 raise util.Abort(_('There is no Mercurial repository here '
2886 raise util.Abort(_('there is no Mercurial repository here '
2887 2887 '(.hg not found)'))
2888 2888 repo = hg.repository(ui, repopath)
2889 2889 return qinit(ui, repo, True)
2890 2890
2891 2891 def mqcommand(orig, ui, repo, *args, **kwargs):
2892 2892 """Add --mq option to operate on patch repository instead of main"""
2893 2893
2894 2894 # some commands do not like getting unknown options
2895 2895 mq = kwargs.pop('mq', None)
2896 2896
2897 2897 if not mq:
2898 2898 return orig(ui, repo, *args, **kwargs)
2899 2899
2900 2900 q = repo.mq
2901 2901 r = q.qrepo()
2902 2902 if not r:
2903 2903 raise util.Abort(_('no queue repository'))
2904 2904 return orig(r.ui, r, *args, **kwargs)
2905 2905
2906 2906 def summary(orig, ui, repo, *args, **kwargs):
2907 2907 r = orig(ui, repo, *args, **kwargs)
2908 2908 q = repo.mq
2909 2909 m = []
2910 2910 a, u = len(q.applied), len(q.unapplied(repo))
2911 2911 if a:
2912 2912 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
2913 2913 if u:
2914 2914 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
2915 2915 if m:
2916 2916 ui.write("mq: %s\n" % ', '.join(m))
2917 2917 else:
2918 2918 ui.note(_("mq: (empty queue)\n"))
2919 2919 return r
2920 2920
2921 2921 def uisetup(ui):
2922 2922 mqopt = [('', 'mq', None, _("operate on patch repository"))]
2923 2923
2924 2924 extensions.wrapcommand(commands.table, 'import', mqimport)
2925 2925 extensions.wrapcommand(commands.table, 'summary', summary)
2926 2926
2927 2927 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
2928 2928 entry[1].extend(mqopt)
2929 2929
2930 2930 nowrap = set(commands.norepo.split(" ") + ['qrecord'])
2931 2931
2932 2932 def dotable(cmdtable):
2933 2933 for cmd in cmdtable.keys():
2934 2934 cmd = cmdutil.parsealiases(cmd)[0]
2935 2935 if cmd in nowrap:
2936 2936 continue
2937 2937 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
2938 2938 entry[1].extend(mqopt)
2939 2939
2940 2940 dotable(commands.table)
2941 2941
2942 2942 for extname, extmodule in extensions.extensions():
2943 2943 if extmodule.__file__ != __file__:
2944 2944 dotable(getattr(extmodule, 'cmdtable', {}))
2945 2945
2946 2946 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2947 2947
2948 2948 cmdtable = {
2949 2949 "qapplied":
2950 2950 (applied,
2951 2951 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
2952 2952 _('hg qapplied [-1] [-s] [PATCH]')),
2953 2953 "qclone":
2954 2954 (clone,
2955 2955 [('', 'pull', None, _('use pull protocol to copy metadata')),
2956 2956 ('U', 'noupdate', None, _('do not update the new working directories')),
2957 2957 ('', 'uncompressed', None,
2958 2958 _('use uncompressed transfer (fast over LAN)')),
2959 2959 ('p', 'patches', '',
2960 2960 _('location of source patch repository'), _('REPO')),
2961 2961 ] + commands.remoteopts,
2962 2962 _('hg qclone [OPTION]... SOURCE [DEST]')),
2963 2963 "qcommit|qci":
2964 2964 (commit,
2965 2965 commands.table["^commit|ci"][1],
2966 2966 _('hg qcommit [OPTION]... [FILE]...')),
2967 2967 "^qdiff":
2968 2968 (diff,
2969 2969 commands.diffopts + commands.diffopts2 + commands.walkopts,
2970 2970 _('hg qdiff [OPTION]... [FILE]...')),
2971 2971 "qdelete|qremove|qrm":
2972 2972 (delete,
2973 2973 [('k', 'keep', None, _('keep patch file')),
2974 2974 ('r', 'rev', [],
2975 2975 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2976 2976 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2977 2977 'qfold':
2978 2978 (fold,
2979 2979 [('e', 'edit', None, _('edit patch header')),
2980 2980 ('k', 'keep', None, _('keep folded patch files')),
2981 2981 ] + commands.commitopts,
2982 2982 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2983 2983 'qgoto':
2984 2984 (goto,
2985 2985 [('f', 'force', None, _('overwrite any local changes'))],
2986 2986 _('hg qgoto [OPTION]... PATCH')),
2987 2987 'qguard':
2988 2988 (guard,
2989 2989 [('l', 'list', None, _('list all patches and guards')),
2990 2990 ('n', 'none', None, _('drop all guards'))],
2991 2991 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
2992 2992 'qheader': (header, [], _('hg qheader [PATCH]')),
2993 2993 "qimport":
2994 2994 (qimport,
2995 2995 [('e', 'existing', None, _('import file in patch directory')),
2996 2996 ('n', 'name', '',
2997 2997 _('name of patch file'), _('NAME')),
2998 2998 ('f', 'force', None, _('overwrite existing files')),
2999 2999 ('r', 'rev', [],
3000 3000 _('place existing revisions under mq control'), _('REV')),
3001 3001 ('g', 'git', None, _('use git extended diff format')),
3002 3002 ('P', 'push', None, _('qpush after importing'))],
3003 3003 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
3004 3004 "^qinit":
3005 3005 (init,
3006 3006 [('c', 'create-repo', None, _('create queue repository'))],
3007 3007 _('hg qinit [-c]')),
3008 3008 "^qnew":
3009 3009 (new,
3010 3010 [('e', 'edit', None, _('edit commit message')),
3011 3011 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
3012 3012 ('g', 'git', None, _('use git extended diff format')),
3013 3013 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
3014 3014 ('u', 'user', '',
3015 3015 _('add "From: <USER>" to patch'), _('USER')),
3016 3016 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
3017 3017 ('d', 'date', '',
3018 3018 _('add "Date: <DATE>" to patch'), _('DATE'))
3019 3019 ] + commands.walkopts + commands.commitopts,
3020 3020 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
3021 3021 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
3022 3022 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
3023 3023 "^qpop":
3024 3024 (pop,
3025 3025 [('a', 'all', None, _('pop all patches')),
3026 3026 ('n', 'name', '',
3027 3027 _('queue name to pop (DEPRECATED)'), _('NAME')),
3028 3028 ('f', 'force', None, _('forget any local changes to patched files'))],
3029 3029 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
3030 3030 "^qpush":
3031 3031 (push,
3032 3032 [('f', 'force', None, _('apply if the patch has rejects')),
3033 3033 ('l', 'list', None, _('list patch name in commit text')),
3034 3034 ('a', 'all', None, _('apply all patches')),
3035 3035 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
3036 3036 ('n', 'name', '',
3037 3037 _('merge queue name (DEPRECATED)'), _('NAME')),
3038 3038 ('', 'move', None, _('reorder patch series and apply only the patch'))],
3039 3039 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [--move] [PATCH | INDEX]')),
3040 3040 "^qrefresh":
3041 3041 (refresh,
3042 3042 [('e', 'edit', None, _('edit commit message')),
3043 3043 ('g', 'git', None, _('use git extended diff format')),
3044 3044 ('s', 'short', None,
3045 3045 _('refresh only files already in the patch and specified files')),
3046 3046 ('U', 'currentuser', None,
3047 3047 _('add/update author field in patch with current user')),
3048 3048 ('u', 'user', '',
3049 3049 _('add/update author field in patch with given user'), _('USER')),
3050 3050 ('D', 'currentdate', None,
3051 3051 _('add/update date field in patch with current date')),
3052 3052 ('d', 'date', '',
3053 3053 _('add/update date field in patch with given date'), _('DATE'))
3054 3054 ] + commands.walkopts + commands.commitopts,
3055 3055 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
3056 3056 'qrename|qmv':
3057 3057 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
3058 3058 "qrestore":
3059 3059 (restore,
3060 3060 [('d', 'delete', None, _('delete save entry')),
3061 3061 ('u', 'update', None, _('update queue working directory'))],
3062 3062 _('hg qrestore [-d] [-u] REV')),
3063 3063 "qsave":
3064 3064 (save,
3065 3065 [('c', 'copy', None, _('copy patch directory')),
3066 3066 ('n', 'name', '',
3067 3067 _('copy directory name'), _('NAME')),
3068 3068 ('e', 'empty', None, _('clear queue status file')),
3069 3069 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3070 3070 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
3071 3071 "qselect":
3072 3072 (select,
3073 3073 [('n', 'none', None, _('disable all guards')),
3074 3074 ('s', 'series', None, _('list all guards in series file')),
3075 3075 ('', 'pop', None, _('pop to before first guarded applied patch')),
3076 3076 ('', 'reapply', None, _('pop, then reapply patches'))],
3077 3077 _('hg qselect [OPTION]... [GUARD]...')),
3078 3078 "qseries":
3079 3079 (series,
3080 3080 [('m', 'missing', None, _('print patches not in series')),
3081 3081 ] + seriesopts,
3082 3082 _('hg qseries [-ms]')),
3083 3083 "strip":
3084 3084 (strip,
3085 3085 [('f', 'force', None, _('force removal of changesets even if the '
3086 3086 'working directory has uncommitted changes')),
3087 3087 ('b', 'backup', None, _('bundle only changesets with local revision'
3088 3088 ' number greater than REV which are not'
3089 3089 ' descendants of REV (DEPRECATED)')),
3090 3090 ('n', 'nobackup', None, _('no backups'))],
3091 3091 _('hg strip [-f] [-n] REV...')),
3092 3092 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3093 3093 "qunapplied":
3094 3094 (unapplied,
3095 3095 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3096 3096 _('hg qunapplied [-1] [-s] [PATCH]')),
3097 3097 "qfinish":
3098 3098 (finish,
3099 3099 [('a', 'applied', None, _('finish all applied changesets'))],
3100 3100 _('hg qfinish [-a] [REV]...')),
3101 3101 'qqueue':
3102 3102 (qqueue,
3103 3103 [
3104 3104 ('l', 'list', False, _('list all available queues')),
3105 3105 ('c', 'create', False, _('create new queue')),
3106 3106 ('', 'rename', False, _('rename active queue')),
3107 3107 ('', 'delete', False, _('delete reference to queue')),
3108 3108 ('', 'purge', False, _('delete queue, and remove patch dir')),
3109 3109 ],
3110 3110 _('[OPTION] [QUEUE]')),
3111 3111 }
3112 3112
3113 3113 colortable = {'qguard.negative': 'red',
3114 3114 'qguard.positive': 'yellow',
3115 3115 'qguard.unguarded': 'green',
3116 3116 'qseries.applied': 'blue bold underline',
3117 3117 'qseries.guarded': 'black bold',
3118 3118 'qseries.missing': 'red bold',
3119 3119 'qseries.unapplied': 'black bold'}
@@ -1,620 +1,620
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to transplant changesets from another branch
9 9
10 10 This extension allows you to transplant patches from another branch.
11 11
12 12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 13 map from a changeset hash to its hash in the source repository.
14 14 '''
15 15
16 16 from mercurial.i18n import _
17 17 import os, tempfile
18 18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
19 19 from mercurial import patch, revlog, util, error, discovery
20 20
21 21 class transplantentry(object):
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants(object):
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter(object):
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants',
73 73 opener=self.opener)
74 74
75 75 def applied(self, repo, node, parent):
76 76 '''returns True if a node is already an ancestor of parent
77 77 or has already been transplanted'''
78 78 if hasnode(repo, node):
79 79 if node in repo.changelog.reachable(parent, stop=node):
80 80 return True
81 81 for t in self.transplants.get(node):
82 82 # it might have been stripped
83 83 if not hasnode(repo, t.lnode):
84 84 self.transplants.remove(t)
85 85 return False
86 86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
87 87 return True
88 88 return False
89 89
90 90 def apply(self, repo, source, revmap, merges, opts={}):
91 91 '''apply the revisions in revmap one by one in revision order'''
92 92 revs = sorted(revmap)
93 93 p1, p2 = repo.dirstate.parents()
94 94 pulls = []
95 95 diffopts = patch.diffopts(self.ui, opts)
96 96 diffopts.git = True
97 97
98 98 lock = wlock = None
99 99 try:
100 100 wlock = repo.wlock()
101 101 lock = repo.lock()
102 102 for rev in revs:
103 103 node = revmap[rev]
104 104 revstr = '%s:%s' % (rev, revlog.short(node))
105 105
106 106 if self.applied(repo, node, p1):
107 107 self.ui.warn(_('skipping already applied revision %s\n') %
108 108 revstr)
109 109 continue
110 110
111 111 parents = source.changelog.parents(node)
112 112 if not opts.get('filter'):
113 113 # If the changeset parent is the same as the
114 114 # wdir's parent, just pull it.
115 115 if parents[0] == p1:
116 116 pulls.append(node)
117 117 p1 = node
118 118 continue
119 119 if pulls:
120 120 if source != repo:
121 121 repo.pull(source, heads=pulls)
122 122 merge.update(repo, pulls[-1], False, False, None)
123 123 p1, p2 = repo.dirstate.parents()
124 124 pulls = []
125 125
126 126 domerge = False
127 127 if node in merges:
128 128 # pulling all the merge revs at once would mean we
129 129 # couldn't transplant after the latest even if
130 130 # transplants before them fail.
131 131 domerge = True
132 132 if not hasnode(repo, node):
133 133 repo.pull(source, heads=[node])
134 134
135 135 if parents[1] != revlog.nullid:
136 136 self.ui.note(_('skipping merge changeset %s:%s\n')
137 137 % (rev, revlog.short(node)))
138 138 patchfile = None
139 139 else:
140 140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
141 141 fp = os.fdopen(fd, 'w')
142 142 gen = patch.diff(source, parents[0], node, opts=diffopts)
143 143 for chunk in gen:
144 144 fp.write(chunk)
145 145 fp.close()
146 146
147 147 del revmap[rev]
148 148 if patchfile or domerge:
149 149 try:
150 150 n = self.applyone(repo, node,
151 151 source.changelog.read(node),
152 152 patchfile, merge=domerge,
153 153 log=opts.get('log'),
154 154 filter=opts.get('filter'))
155 155 if n and domerge:
156 156 self.ui.status(_('%s merged at %s\n') % (revstr,
157 157 revlog.short(n)))
158 158 elif n:
159 159 self.ui.status(_('%s transplanted to %s\n')
160 160 % (revlog.short(node),
161 161 revlog.short(n)))
162 162 finally:
163 163 if patchfile:
164 164 os.unlink(patchfile)
165 165 if pulls:
166 166 repo.pull(source, heads=pulls)
167 167 merge.update(repo, pulls[-1], False, False, None)
168 168 finally:
169 169 self.saveseries(revmap, merges)
170 170 self.transplants.write()
171 171 lock.release()
172 172 wlock.release()
173 173
174 174 def filter(self, filter, changelog, patchfile):
175 175 '''arbitrarily rewrite changeset before applying it'''
176 176
177 177 self.ui.status(_('filtering %s\n') % patchfile)
178 178 user, date, msg = (changelog[1], changelog[2], changelog[4])
179 179
180 180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
181 181 fp = os.fdopen(fd, 'w')
182 182 fp.write("# HG changeset patch\n")
183 183 fp.write("# User %s\n" % user)
184 184 fp.write("# Date %d %d\n" % date)
185 185 fp.write(msg + '\n')
186 186 fp.close()
187 187
188 188 try:
189 189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
190 190 util.shellquote(patchfile)),
191 191 environ={'HGUSER': changelog[1]},
192 192 onerr=util.Abort, errprefix=_('filter failed'))
193 193 user, date, msg = self.parselog(file(headerfile))[1:4]
194 194 finally:
195 195 os.unlink(headerfile)
196 196
197 197 return (user, date, msg)
198 198
199 199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
200 200 filter=None):
201 201 '''apply the patch in patchfile to the repository as a transplant'''
202 202 (manifest, user, (time, timezone), files, message) = cl[:5]
203 203 date = "%d %d" % (time, timezone)
204 204 extra = {'transplant_source': node}
205 205 if filter:
206 206 (user, date, message) = self.filter(filter, cl, patchfile)
207 207
208 208 if log:
209 209 # we don't translate messages inserted into commits
210 210 message += '\n(transplanted from %s)' % revlog.hex(node)
211 211
212 212 self.ui.status(_('applying %s\n') % revlog.short(node))
213 213 self.ui.note('%s %s\n%s\n' % (user, date, message))
214 214
215 215 if not patchfile and not merge:
216 216 raise util.Abort(_('can only omit patchfile if merging'))
217 217 if patchfile:
218 218 try:
219 219 files = {}
220 220 try:
221 221 patch.patch(patchfile, self.ui, cwd=repo.root,
222 222 files=files, eolmode=None)
223 223 if not files:
224 224 self.ui.warn(_('%s: empty changeset')
225 225 % revlog.hex(node))
226 226 return None
227 227 finally:
228 228 files = patch.updatedir(self.ui, repo, files)
229 229 except Exception, inst:
230 230 seriespath = os.path.join(self.path, 'series')
231 231 if os.path.exists(seriespath):
232 232 os.unlink(seriespath)
233 233 p1 = repo.dirstate.parents()[0]
234 234 p2 = node
235 235 self.log(user, date, message, p1, p2, merge=merge)
236 236 self.ui.write(str(inst) + '\n')
237 raise util.Abort(_('Fix up the merge and run '
237 raise util.Abort(_('fix up the merge and run '
238 238 'hg transplant --continue'))
239 239 else:
240 240 files = None
241 241 if merge:
242 242 p1, p2 = repo.dirstate.parents()
243 243 repo.dirstate.setparents(p1, node)
244 244 m = match.always(repo.root, '')
245 245 else:
246 246 m = match.exact(repo.root, '', files)
247 247
248 248 n = repo.commit(message, user, date, extra=extra, match=m)
249 249 if not n:
250 250 # Crash here to prevent an unclear crash later, in
251 251 # transplants.write(). This can happen if patch.patch()
252 252 # does nothing but claims success or if repo.status() fails
253 253 # to report changes done by patch.patch(). These both
254 254 # appear to be bugs in other parts of Mercurial, but dying
255 255 # here, as soon as we can detect the problem, is preferable
256 256 # to silently dropping changesets on the floor.
257 257 raise RuntimeError('nothing committed after transplant')
258 258 if not merge:
259 259 self.transplants.set(n, node)
260 260
261 261 return n
262 262
263 263 def resume(self, repo, source, opts=None):
264 264 '''recover last transaction and apply remaining changesets'''
265 265 if os.path.exists(os.path.join(self.path, 'journal')):
266 266 n, node = self.recover(repo)
267 267 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
268 268 revlog.short(n)))
269 269 seriespath = os.path.join(self.path, 'series')
270 270 if not os.path.exists(seriespath):
271 271 self.transplants.write()
272 272 return
273 273 nodes, merges = self.readseries()
274 274 revmap = {}
275 275 for n in nodes:
276 276 revmap[source.changelog.rev(n)] = n
277 277 os.unlink(seriespath)
278 278
279 279 self.apply(repo, source, revmap, merges, opts)
280 280
281 281 def recover(self, repo):
282 282 '''commit working directory using journal metadata'''
283 283 node, user, date, message, parents = self.readlog()
284 284 merge = len(parents) == 2
285 285
286 286 if not user or not date or not message or not parents[0]:
287 287 raise util.Abort(_('transplant log file is corrupt'))
288 288
289 289 extra = {'transplant_source': node}
290 290 wlock = repo.wlock()
291 291 try:
292 292 p1, p2 = repo.dirstate.parents()
293 293 if p1 != parents[0]:
294 294 raise util.Abort(
295 295 _('working dir not at transplant parent %s') %
296 296 revlog.hex(parents[0]))
297 297 if merge:
298 298 repo.dirstate.setparents(p1, parents[1])
299 299 n = repo.commit(message, user, date, extra=extra)
300 300 if not n:
301 301 raise util.Abort(_('commit failed'))
302 302 if not merge:
303 303 self.transplants.set(n, node)
304 304 self.unlog()
305 305
306 306 return n, node
307 307 finally:
308 308 wlock.release()
309 309
310 310 def readseries(self):
311 311 nodes = []
312 312 merges = []
313 313 cur = nodes
314 314 for line in self.opener('series').read().splitlines():
315 315 if line.startswith('# Merges'):
316 316 cur = merges
317 317 continue
318 318 cur.append(revlog.bin(line))
319 319
320 320 return (nodes, merges)
321 321
322 322 def saveseries(self, revmap, merges):
323 323 if not revmap:
324 324 return
325 325
326 326 if not os.path.isdir(self.path):
327 327 os.mkdir(self.path)
328 328 series = self.opener('series', 'w')
329 329 for rev in sorted(revmap):
330 330 series.write(revlog.hex(revmap[rev]) + '\n')
331 331 if merges:
332 332 series.write('# Merges\n')
333 333 for m in merges:
334 334 series.write(revlog.hex(m) + '\n')
335 335 series.close()
336 336
337 337 def parselog(self, fp):
338 338 parents = []
339 339 message = []
340 340 node = revlog.nullid
341 341 inmsg = False
342 342 for line in fp.read().splitlines():
343 343 if inmsg:
344 344 message.append(line)
345 345 elif line.startswith('# User '):
346 346 user = line[7:]
347 347 elif line.startswith('# Date '):
348 348 date = line[7:]
349 349 elif line.startswith('# Node ID '):
350 350 node = revlog.bin(line[10:])
351 351 elif line.startswith('# Parent '):
352 352 parents.append(revlog.bin(line[9:]))
353 353 elif not line.startswith('# '):
354 354 inmsg = True
355 355 message.append(line)
356 356 return (node, user, date, '\n'.join(message), parents)
357 357
358 358 def log(self, user, date, message, p1, p2, merge=False):
359 359 '''journal changelog metadata for later recover'''
360 360
361 361 if not os.path.isdir(self.path):
362 362 os.mkdir(self.path)
363 363 fp = self.opener('journal', 'w')
364 364 fp.write('# User %s\n' % user)
365 365 fp.write('# Date %s\n' % date)
366 366 fp.write('# Node ID %s\n' % revlog.hex(p2))
367 367 fp.write('# Parent ' + revlog.hex(p1) + '\n')
368 368 if merge:
369 369 fp.write('# Parent ' + revlog.hex(p2) + '\n')
370 370 fp.write(message.rstrip() + '\n')
371 371 fp.close()
372 372
373 373 def readlog(self):
374 374 return self.parselog(self.opener('journal'))
375 375
376 376 def unlog(self):
377 377 '''remove changelog journal'''
378 378 absdst = os.path.join(self.path, 'journal')
379 379 if os.path.exists(absdst):
380 380 os.unlink(absdst)
381 381
382 382 def transplantfilter(self, repo, source, root):
383 383 def matchfn(node):
384 384 if self.applied(repo, node, root):
385 385 return False
386 386 if source.changelog.parents(node)[1] != revlog.nullid:
387 387 return False
388 388 extra = source.changelog.read(node)[5]
389 389 cnode = extra.get('transplant_source')
390 390 if cnode and self.applied(repo, cnode, root):
391 391 return False
392 392 return True
393 393
394 394 return matchfn
395 395
396 396 def hasnode(repo, node):
397 397 try:
398 398 return repo.changelog.rev(node) != None
399 399 except error.RevlogError:
400 400 return False
401 401
402 402 def browserevs(ui, repo, nodes, opts):
403 403 '''interactively transplant changesets'''
404 404 def browsehelp(ui):
405 405 ui.write(_('y: transplant this changeset\n'
406 406 'n: skip this changeset\n'
407 407 'm: merge at this changeset\n'
408 408 'p: show patch\n'
409 409 'c: commit selected changesets\n'
410 410 'q: cancel transplant\n'
411 411 '?: show this help\n'))
412 412
413 413 displayer = cmdutil.show_changeset(ui, repo, opts)
414 414 transplants = []
415 415 merges = []
416 416 for node in nodes:
417 417 displayer.show(repo[node])
418 418 action = None
419 419 while not action:
420 420 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
421 421 if action == '?':
422 422 browsehelp(ui)
423 423 action = None
424 424 elif action == 'p':
425 425 parent = repo.changelog.parents(node)[0]
426 426 for chunk in patch.diff(repo, parent, node):
427 427 ui.write(chunk)
428 428 action = None
429 429 elif action not in ('y', 'n', 'm', 'c', 'q'):
430 430 ui.write(_('no such option\n'))
431 431 action = None
432 432 if action == 'y':
433 433 transplants.append(node)
434 434 elif action == 'm':
435 435 merges.append(node)
436 436 elif action == 'c':
437 437 break
438 438 elif action == 'q':
439 439 transplants = ()
440 440 merges = ()
441 441 break
442 442 displayer.close()
443 443 return (transplants, merges)
444 444
445 445 def transplant(ui, repo, *revs, **opts):
446 446 '''transplant changesets from another branch
447 447
448 448 Selected changesets will be applied on top of the current working
449 449 directory with the log of the original changeset. If --log is
450 450 specified, log messages will have a comment appended of the form::
451 451
452 452 (transplanted from CHANGESETHASH)
453 453
454 454 You can rewrite the changelog message with the --filter option.
455 455 Its argument will be invoked with the current changelog message as
456 456 $1 and the patch as $2.
457 457
458 458 If --source/-s is specified, selects changesets from the named
459 459 repository. If --branch/-b is specified, selects changesets from
460 460 the branch holding the named revision, up to that revision. If
461 461 --all/-a is specified, all changesets on the branch will be
462 462 transplanted, otherwise you will be prompted to select the
463 463 changesets you want.
464 464
465 465 :hg:`transplant --branch REVISION --all` will rebase the selected
466 466 branch (up to the named revision) onto your current working
467 467 directory.
468 468
469 469 You can optionally mark selected transplanted changesets as merge
470 470 changesets. You will not be prompted to transplant any ancestors
471 471 of a merged transplant, and you can merge descendants of them
472 472 normally instead of transplanting them.
473 473
474 474 If no merges or revisions are provided, :hg:`transplant` will
475 475 start an interactive changeset browser.
476 476
477 477 If a changeset application fails, you can fix the merge by hand
478 478 and then resume where you left off by calling :hg:`transplant
479 479 --continue/-c`.
480 480 '''
481 481 def getremotechanges(repo, url):
482 482 sourcerepo = ui.expandpath(url)
483 483 source = hg.repository(ui, sourcerepo)
484 484 tmp = discovery.findcommonincoming(repo, source, force=True)
485 485 common, incoming, rheads = tmp
486 486 if not incoming:
487 487 return (source, None, None)
488 488
489 489 bundle = None
490 490 if not source.local():
491 491 if source.capable('changegroupsubset'):
492 492 cg = source.changegroupsubset(incoming, rheads, 'incoming')
493 493 else:
494 494 cg = source.changegroup(incoming, 'incoming')
495 495 bundle = changegroup.writebundle(cg, None, 'HG10UN')
496 496 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
497 497
498 498 return (source, incoming, bundle)
499 499
500 500 def incwalk(repo, incoming, branches, match=util.always):
501 501 if not branches:
502 502 branches = None
503 503 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
504 504 if match(node):
505 505 yield node
506 506
507 507 def transplantwalk(repo, root, branches, match=util.always):
508 508 if not branches:
509 509 branches = repo.heads()
510 510 ancestors = []
511 511 for branch in branches:
512 512 ancestors.append(repo.changelog.ancestor(root, branch))
513 513 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
514 514 if match(node):
515 515 yield node
516 516
517 517 def checkopts(opts, revs):
518 518 if opts.get('continue'):
519 519 if opts.get('branch') or opts.get('all') or opts.get('merge'):
520 520 raise util.Abort(_('--continue is incompatible with '
521 521 'branch, all or merge'))
522 522 return
523 523 if not (opts.get('source') or revs or
524 524 opts.get('merge') or opts.get('branch')):
525 525 raise util.Abort(_('no source URL, branch tag or revision '
526 526 'list provided'))
527 527 if opts.get('all'):
528 528 if not opts.get('branch'):
529 529 raise util.Abort(_('--all requires a branch revision'))
530 530 if revs:
531 531 raise util.Abort(_('--all is incompatible with a '
532 532 'revision list'))
533 533
534 534 checkopts(opts, revs)
535 535
536 536 if not opts.get('log'):
537 537 opts['log'] = ui.config('transplant', 'log')
538 538 if not opts.get('filter'):
539 539 opts['filter'] = ui.config('transplant', 'filter')
540 540
541 541 tp = transplanter(ui, repo)
542 542
543 543 p1, p2 = repo.dirstate.parents()
544 544 if len(repo) > 0 and p1 == revlog.nullid:
545 545 raise util.Abort(_('no revision checked out'))
546 546 if not opts.get('continue'):
547 547 if p2 != revlog.nullid:
548 548 raise util.Abort(_('outstanding uncommitted merges'))
549 549 m, a, r, d = repo.status()[:4]
550 550 if m or a or r or d:
551 551 raise util.Abort(_('outstanding local changes'))
552 552
553 553 bundle = None
554 554 source = opts.get('source')
555 555 if source:
556 556 (source, incoming, bundle) = getremotechanges(repo, source)
557 557 else:
558 558 source = repo
559 559
560 560 try:
561 561 if opts.get('continue'):
562 562 tp.resume(repo, source, opts)
563 563 return
564 564
565 565 tf = tp.transplantfilter(repo, source, p1)
566 566 if opts.get('prune'):
567 567 prune = [source.lookup(r)
568 568 for r in cmdutil.revrange(source, opts.get('prune'))]
569 569 matchfn = lambda x: tf(x) and x not in prune
570 570 else:
571 571 matchfn = tf
572 572 branches = map(source.lookup, opts.get('branch', ()))
573 573 merges = map(source.lookup, opts.get('merge', ()))
574 574 revmap = {}
575 575 if revs:
576 576 for r in cmdutil.revrange(source, revs):
577 577 revmap[int(r)] = source.lookup(r)
578 578 elif opts.get('all') or not merges:
579 579 if source != repo:
580 580 alltransplants = incwalk(source, incoming, branches,
581 581 match=matchfn)
582 582 else:
583 583 alltransplants = transplantwalk(source, p1, branches,
584 584 match=matchfn)
585 585 if opts.get('all'):
586 586 revs = alltransplants
587 587 else:
588 588 revs, newmerges = browserevs(ui, source, alltransplants, opts)
589 589 merges.extend(newmerges)
590 590 for r in revs:
591 591 revmap[source.changelog.rev(r)] = r
592 592 for r in merges:
593 593 revmap[source.changelog.rev(r)] = r
594 594
595 595 tp.apply(repo, source, revmap, merges, opts)
596 596 finally:
597 597 if bundle:
598 598 source.close()
599 599 os.unlink(bundle)
600 600
601 601 cmdtable = {
602 602 "transplant":
603 603 (transplant,
604 604 [('s', 'source', '',
605 605 _('pull patches from REPO'), _('REPO')),
606 606 ('b', 'branch', [],
607 607 _('pull patches from branch BRANCH'), _('BRANCH')),
608 608 ('a', 'all', None, _('pull all changesets up to BRANCH')),
609 609 ('p', 'prune', [],
610 610 _('skip over REV'), _('REV')),
611 611 ('m', 'merge', [],
612 612 _('merge at REV'), _('REV')),
613 613 ('', 'log', None, _('append transplant info to log message')),
614 614 ('c', 'continue', None, _('continue last transplant session '
615 615 'after repair')),
616 616 ('', 'filter', '',
617 617 _('filter changesets through command'), _('CMD'))],
618 618 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
619 619 '[-m REV] [REV]...'))
620 620 }
@@ -1,4476 +1,4476
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, difflib, time, tempfile
12 12 import hg, util, revlog, bundlerepo, extensions, copies, error
13 13 import patch, help, mdiff, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 15 import merge as mergemod
16 16 import minirst, revset
17 17 import dagparser
18 18
19 19 # Commands start here, listed alphabetically
20 20
21 21 def add(ui, repo, *pats, **opts):
22 22 """add the specified files on the next commit
23 23
24 24 Schedule files to be version controlled and added to the
25 25 repository.
26 26
27 27 The files will be added to the repository at the next commit. To
28 28 undo an add before that, see :hg:`forget`.
29 29
30 30 If no names are given, add all files to the repository.
31 31
32 32 .. container:: verbose
33 33
34 34 An example showing how new (unknown) files are added
35 35 automatically by :hg:`add`::
36 36
37 37 $ ls
38 38 foo.c
39 39 $ hg status
40 40 ? foo.c
41 41 $ hg add
42 42 adding foo.c
43 43 $ hg status
44 44 A foo.c
45 45
46 46 Returns 0 if all files are successfully added.
47 47 """
48 48
49 49 bad = []
50 50 names = []
51 51 m = cmdutil.match(repo, pats, opts)
52 52 oldbad = m.bad
53 53 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
54 54
55 55 for f in repo.walk(m):
56 56 exact = m.exact(f)
57 57 if exact or f not in repo.dirstate:
58 58 names.append(f)
59 59 if ui.verbose or not exact:
60 60 ui.status(_('adding %s\n') % m.rel(f))
61 61 if not opts.get('dry_run'):
62 62 bad += [f for f in repo[None].add(names) if f in m.files()]
63 63 return bad and 1 or 0
64 64
65 65 def addremove(ui, repo, *pats, **opts):
66 66 """add all new files, delete all missing files
67 67
68 68 Add all new files and remove all missing files from the
69 69 repository.
70 70
71 71 New files are ignored if they match any of the patterns in
72 72 .hgignore. As with add, these changes take effect at the next
73 73 commit.
74 74
75 75 Use the -s/--similarity option to detect renamed files. With a
76 76 parameter greater than 0, this compares every removed file with
77 77 every added file and records those similar enough as renames. This
78 78 option takes a percentage between 0 (disabled) and 100 (files must
79 79 be identical) as its parameter. Detecting renamed files this way
80 80 can be expensive. After using this option, :hg:`status -C` can be
81 81 used to check which files were identified as moved or renamed.
82 82
83 83 Returns 0 if all files are successfully added.
84 84 """
85 85 try:
86 86 sim = float(opts.get('similarity') or 100)
87 87 except ValueError:
88 88 raise util.Abort(_('similarity must be a number'))
89 89 if sim < 0 or sim > 100:
90 90 raise util.Abort(_('similarity must be between 0 and 100'))
91 91 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
92 92
93 93 def annotate(ui, repo, *pats, **opts):
94 94 """show changeset information by line for each file
95 95
96 96 List changes in files, showing the revision id responsible for
97 97 each line
98 98
99 99 This command is useful for discovering when a change was made and
100 100 by whom.
101 101
102 102 Without the -a/--text option, annotate will avoid processing files
103 103 it detects as binary. With -a, annotate will annotate the file
104 104 anyway, although the results will probably be neither useful
105 105 nor desirable.
106 106
107 107 Returns 0 on success.
108 108 """
109 109 if opts.get('follow'):
110 110 # --follow is deprecated and now just an alias for -f/--file
111 111 # to mimic the behavior of Mercurial before version 1.5
112 112 opts['file'] = 1
113 113
114 114 datefunc = ui.quiet and util.shortdate or util.datestr
115 115 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
116 116
117 117 if not pats:
118 118 raise util.Abort(_('at least one filename or pattern is required'))
119 119
120 120 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
121 121 ('number', lambda x: str(x[0].rev())),
122 122 ('changeset', lambda x: short(x[0].node())),
123 123 ('date', getdate),
124 124 ('file', lambda x: x[0].path()),
125 125 ]
126 126
127 127 if (not opts.get('user') and not opts.get('changeset')
128 128 and not opts.get('date') and not opts.get('file')):
129 129 opts['number'] = 1
130 130
131 131 linenumber = opts.get('line_number') is not None
132 132 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
133 133 raise util.Abort(_('at least one of -n/-c is required for -l'))
134 134
135 135 funcmap = [func for op, func in opmap if opts.get(op)]
136 136 if linenumber:
137 137 lastfunc = funcmap[-1]
138 138 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
139 139
140 140 ctx = repo[opts.get('rev')]
141 141 m = cmdutil.match(repo, pats, opts)
142 142 follow = not opts.get('no_follow')
143 143 for abs in ctx.walk(m):
144 144 fctx = ctx[abs]
145 145 if not opts.get('text') and util.binary(fctx.data()):
146 146 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
147 147 continue
148 148
149 149 lines = fctx.annotate(follow=follow, linenumber=linenumber)
150 150 pieces = []
151 151
152 152 for f in funcmap:
153 153 l = [f(n) for n, dummy in lines]
154 154 if l:
155 155 sized = [(x, encoding.colwidth(x)) for x in l]
156 156 ml = max([w for x, w in sized])
157 157 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
158 158
159 159 if pieces:
160 160 for p, l in zip(zip(*pieces), lines):
161 161 ui.write("%s: %s" % (" ".join(p), l[1]))
162 162
163 163 def archive(ui, repo, dest, **opts):
164 164 '''create an unversioned archive of a repository revision
165 165
166 166 By default, the revision used is the parent of the working
167 167 directory; use -r/--rev to specify a different revision.
168 168
169 169 The archive type is automatically detected based on file
170 170 extension (or override using -t/--type).
171 171
172 172 Valid types are:
173 173
174 174 :``files``: a directory full of files (default)
175 175 :``tar``: tar archive, uncompressed
176 176 :``tbz2``: tar archive, compressed using bzip2
177 177 :``tgz``: tar archive, compressed using gzip
178 178 :``uzip``: zip archive, uncompressed
179 179 :``zip``: zip archive, compressed using deflate
180 180
181 181 The exact name of the destination archive or directory is given
182 182 using a format string; see :hg:`help export` for details.
183 183
184 184 Each member added to an archive file has a directory prefix
185 185 prepended. Use -p/--prefix to specify a format string for the
186 186 prefix. The default is the basename of the archive, with suffixes
187 187 removed.
188 188
189 189 Returns 0 on success.
190 190 '''
191 191
192 192 ctx = repo[opts.get('rev')]
193 193 if not ctx:
194 194 raise util.Abort(_('no working directory: please specify a revision'))
195 195 node = ctx.node()
196 196 dest = cmdutil.make_filename(repo, dest, node)
197 197 if os.path.realpath(dest) == repo.root:
198 198 raise util.Abort(_('repository root cannot be destination'))
199 199
200 200 kind = opts.get('type') or archival.guesskind(dest) or 'files'
201 201 prefix = opts.get('prefix')
202 202
203 203 if dest == '-':
204 204 if kind == 'files':
205 205 raise util.Abort(_('cannot archive plain files to stdout'))
206 206 dest = sys.stdout
207 207 if not prefix:
208 208 prefix = os.path.basename(repo.root) + '-%h'
209 209
210 210 prefix = cmdutil.make_filename(repo, prefix, node)
211 211 matchfn = cmdutil.match(repo, [], opts)
212 212 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
213 213 matchfn, prefix)
214 214
215 215 def backout(ui, repo, node=None, rev=None, **opts):
216 216 '''reverse effect of earlier changeset
217 217
218 218 Commit the backed out changes as a new changeset. The new
219 219 changeset is a child of the backed out changeset.
220 220
221 221 If you backout a changeset other than the tip, a new head is
222 222 created. This head will be the new tip and you should merge this
223 223 backout changeset with another head.
224 224
225 225 The --merge option remembers the parent of the working directory
226 226 before starting the backout, then merges the new head with that
227 227 changeset afterwards. This saves you from doing the merge by hand.
228 228 The result of this merge is not committed, as with a normal merge.
229 229
230 230 See :hg:`help dates` for a list of formats valid for -d/--date.
231 231
232 232 Returns 0 on success.
233 233 '''
234 234 if rev and node:
235 235 raise util.Abort(_("please specify just one revision"))
236 236
237 237 if not rev:
238 238 rev = node
239 239
240 240 if not rev:
241 241 raise util.Abort(_("please specify a revision to backout"))
242 242
243 243 date = opts.get('date')
244 244 if date:
245 245 opts['date'] = util.parsedate(date)
246 246
247 247 cmdutil.bail_if_changed(repo)
248 248 node = repo.lookup(rev)
249 249
250 250 op1, op2 = repo.dirstate.parents()
251 251 a = repo.changelog.ancestor(op1, node)
252 252 if a != node:
253 253 raise util.Abort(_('cannot backout change on a different branch'))
254 254
255 255 p1, p2 = repo.changelog.parents(node)
256 256 if p1 == nullid:
257 257 raise util.Abort(_('cannot backout a change with no parents'))
258 258 if p2 != nullid:
259 259 if not opts.get('parent'):
260 260 raise util.Abort(_('cannot backout a merge changeset without '
261 261 '--parent'))
262 262 p = repo.lookup(opts['parent'])
263 263 if p not in (p1, p2):
264 264 raise util.Abort(_('%s is not a parent of %s') %
265 265 (short(p), short(node)))
266 266 parent = p
267 267 else:
268 268 if opts.get('parent'):
269 269 raise util.Abort(_('cannot use --parent on non-merge changeset'))
270 270 parent = p1
271 271
272 272 # the backout should appear on the same branch
273 273 branch = repo.dirstate.branch()
274 274 hg.clean(repo, node, show_stats=False)
275 275 repo.dirstate.setbranch(branch)
276 276 revert_opts = opts.copy()
277 277 revert_opts['date'] = None
278 278 revert_opts['all'] = True
279 279 revert_opts['rev'] = hex(parent)
280 280 revert_opts['no_backup'] = None
281 281 revert(ui, repo, **revert_opts)
282 282 commit_opts = opts.copy()
283 283 commit_opts['addremove'] = False
284 284 if not commit_opts['message'] and not commit_opts['logfile']:
285 285 # we don't translate commit messages
286 286 commit_opts['message'] = "Backed out changeset %s" % short(node)
287 287 commit_opts['force_editor'] = True
288 288 commit(ui, repo, **commit_opts)
289 289 def nice(node):
290 290 return '%d:%s' % (repo.changelog.rev(node), short(node))
291 291 ui.status(_('changeset %s backs out changeset %s\n') %
292 292 (nice(repo.changelog.tip()), nice(node)))
293 293 if op1 != node:
294 294 hg.clean(repo, op1, show_stats=False)
295 295 if opts.get('merge'):
296 296 ui.status(_('merging with changeset %s\n')
297 297 % nice(repo.changelog.tip()))
298 298 hg.merge(repo, hex(repo.changelog.tip()))
299 299 else:
300 300 ui.status(_('the backout changeset is a new head - '
301 301 'do not forget to merge\n'))
302 302 ui.status(_('(use "backout --merge" '
303 303 'if you want to auto-merge)\n'))
304 304
305 305 def bisect(ui, repo, rev=None, extra=None, command=None,
306 306 reset=None, good=None, bad=None, skip=None, noupdate=None):
307 307 """subdivision search of changesets
308 308
309 309 This command helps to find changesets which introduce problems. To
310 310 use, mark the earliest changeset you know exhibits the problem as
311 311 bad, then mark the latest changeset which is free from the problem
312 312 as good. Bisect will update your working directory to a revision
313 313 for testing (unless the -U/--noupdate option is specified). Once
314 314 you have performed tests, mark the working directory as good or
315 315 bad, and bisect will either update to another candidate changeset
316 316 or announce that it has found the bad revision.
317 317
318 318 As a shortcut, you can also use the revision argument to mark a
319 319 revision as good or bad without checking it out first.
320 320
321 321 If you supply a command, it will be used for automatic bisection.
322 322 Its exit status will be used to mark revisions as good or bad:
323 323 status 0 means good, 125 means to skip the revision, 127
324 324 (command not found) will abort the bisection, and any other
325 325 non-zero exit status means the revision is bad.
326 326
327 327 Returns 0 on success.
328 328 """
329 329 def print_result(nodes, good):
330 330 displayer = cmdutil.show_changeset(ui, repo, {})
331 331 if len(nodes) == 1:
332 332 # narrowed it down to a single revision
333 333 if good:
334 334 ui.write(_("The first good revision is:\n"))
335 335 else:
336 336 ui.write(_("The first bad revision is:\n"))
337 337 displayer.show(repo[nodes[0]])
338 338 else:
339 339 # multiple possible revisions
340 340 if good:
341 341 ui.write(_("Due to skipped revisions, the first "
342 342 "good revision could be any of:\n"))
343 343 else:
344 344 ui.write(_("Due to skipped revisions, the first "
345 345 "bad revision could be any of:\n"))
346 346 for n in nodes:
347 347 displayer.show(repo[n])
348 348 displayer.close()
349 349
350 350 def check_state(state, interactive=True):
351 351 if not state['good'] or not state['bad']:
352 352 if (good or bad or skip or reset) and interactive:
353 353 return
354 354 if not state['good']:
355 355 raise util.Abort(_('cannot bisect (no known good revisions)'))
356 356 else:
357 357 raise util.Abort(_('cannot bisect (no known bad revisions)'))
358 358 return True
359 359
360 360 # backward compatibility
361 361 if rev in "good bad reset init".split():
362 362 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
363 363 cmd, rev, extra = rev, extra, None
364 364 if cmd == "good":
365 365 good = True
366 366 elif cmd == "bad":
367 367 bad = True
368 368 else:
369 369 reset = True
370 370 elif extra or good + bad + skip + reset + bool(command) > 1:
371 371 raise util.Abort(_('incompatible arguments'))
372 372
373 373 if reset:
374 374 p = repo.join("bisect.state")
375 375 if os.path.exists(p):
376 376 os.unlink(p)
377 377 return
378 378
379 379 state = hbisect.load_state(repo)
380 380
381 381 if command:
382 382 changesets = 1
383 383 try:
384 384 while changesets:
385 385 # update state
386 386 status = util.system(command)
387 387 if status == 125:
388 388 transition = "skip"
389 389 elif status == 0:
390 390 transition = "good"
391 391 # status < 0 means process was killed
392 392 elif status == 127:
393 393 raise util.Abort(_("failed to execute %s") % command)
394 394 elif status < 0:
395 395 raise util.Abort(_("%s killed") % command)
396 396 else:
397 397 transition = "bad"
398 398 ctx = repo[rev or '.']
399 399 state[transition].append(ctx.node())
400 400 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
401 401 check_state(state, interactive=False)
402 402 # bisect
403 403 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
404 404 # update to next check
405 405 cmdutil.bail_if_changed(repo)
406 406 hg.clean(repo, nodes[0], show_stats=False)
407 407 finally:
408 408 hbisect.save_state(repo, state)
409 409 print_result(nodes, good)
410 410 return
411 411
412 412 # update state
413 413 node = repo.lookup(rev or '.')
414 414 if good or bad or skip:
415 415 if good:
416 416 state['good'].append(node)
417 417 elif bad:
418 418 state['bad'].append(node)
419 419 elif skip:
420 420 state['skip'].append(node)
421 421 hbisect.save_state(repo, state)
422 422
423 423 if not check_state(state):
424 424 return
425 425
426 426 # actually bisect
427 427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 428 if changesets == 0:
429 429 print_result(nodes, good)
430 430 else:
431 431 assert len(nodes) == 1 # only a single node can be tested next
432 432 node = nodes[0]
433 433 # compute the approximate number of remaining tests
434 434 tests, size = 0, 2
435 435 while size <= changesets:
436 436 tests, size = tests + 1, size * 2
437 437 rev = repo.changelog.rev(node)
438 438 ui.write(_("Testing changeset %d:%s "
439 439 "(%d changesets remaining, ~%d tests)\n")
440 440 % (rev, short(node), changesets, tests))
441 441 if not noupdate:
442 442 cmdutil.bail_if_changed(repo)
443 443 return hg.clean(repo, node)
444 444
445 445 def branch(ui, repo, label=None, **opts):
446 446 """set or show the current branch name
447 447
448 448 With no argument, show the current branch name. With one argument,
449 449 set the working directory branch name (the branch will not exist
450 450 in the repository until the next commit). Standard practice
451 451 recommends that primary development take place on the 'default'
452 452 branch.
453 453
454 454 Unless -f/--force is specified, branch will not let you set a
455 455 branch name that already exists, even if it's inactive.
456 456
457 457 Use -C/--clean to reset the working directory branch to that of
458 458 the parent of the working directory, negating a previous branch
459 459 change.
460 460
461 461 Use the command :hg:`update` to switch to an existing branch. Use
462 462 :hg:`commit --close-branch` to mark this branch as closed.
463 463
464 464 Returns 0 on success.
465 465 """
466 466
467 467 if opts.get('clean'):
468 468 label = repo[None].parents()[0].branch()
469 469 repo.dirstate.setbranch(label)
470 470 ui.status(_('reset working directory to branch %s\n') % label)
471 471 elif label:
472 472 utflabel = encoding.fromlocal(label)
473 473 if not opts.get('force') and utflabel in repo.branchtags():
474 474 if label not in [p.branch() for p in repo.parents()]:
475 475 raise util.Abort(_('a branch of the same name already exists'
476 476 " (use 'hg update' to switch to it)"))
477 477 repo.dirstate.setbranch(utflabel)
478 478 ui.status(_('marked working directory as branch %s\n') % label)
479 479 else:
480 480 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
481 481
482 482 def branches(ui, repo, active=False, closed=False):
483 483 """list repository named branches
484 484
485 485 List the repository's named branches, indicating which ones are
486 486 inactive. If -c/--closed is specified, also list branches which have
487 487 been marked closed (see :hg:`commit --close-branch`).
488 488
489 489 If -a/--active is specified, only show active branches. A branch
490 490 is considered active if it contains repository heads.
491 491
492 492 Use the command :hg:`update` to switch to an existing branch.
493 493
494 494 Returns 0.
495 495 """
496 496
497 497 hexfunc = ui.debugflag and hex or short
498 498 activebranches = [repo[n].branch() for n in repo.heads()]
499 499 def testactive(tag, node):
500 500 realhead = tag in activebranches
501 501 open = node in repo.branchheads(tag, closed=False)
502 502 return realhead and open
503 503 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
504 504 for tag, node in repo.branchtags().items()],
505 505 reverse=True)
506 506
507 507 for isactive, node, tag in branches:
508 508 if (not active) or isactive:
509 509 encodedtag = encoding.tolocal(tag)
510 510 if ui.quiet:
511 511 ui.write("%s\n" % encodedtag)
512 512 else:
513 513 hn = repo.lookup(node)
514 514 if isactive:
515 515 label = 'branches.active'
516 516 notice = ''
517 517 elif hn not in repo.branchheads(tag, closed=False):
518 518 if not closed:
519 519 continue
520 520 label = 'branches.closed'
521 521 notice = _(' (closed)')
522 522 else:
523 523 label = 'branches.inactive'
524 524 notice = _(' (inactive)')
525 525 if tag == repo.dirstate.branch():
526 526 label = 'branches.current'
527 527 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
528 528 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
529 529 encodedtag = ui.label(encodedtag, label)
530 530 ui.write("%s %s%s\n" % (encodedtag, rev, notice))
531 531
532 532 def bundle(ui, repo, fname, dest=None, **opts):
533 533 """create a changegroup file
534 534
535 535 Generate a compressed changegroup file collecting changesets not
536 536 known to be in another repository.
537 537
538 538 If you omit the destination repository, then hg assumes the
539 539 destination will have all the nodes you specify with --base
540 540 parameters. To create a bundle containing all changesets, use
541 541 -a/--all (or --base null).
542 542
543 543 You can change compression method with the -t/--type option.
544 544 The available compression methods are: none, bzip2, and
545 545 gzip (by default, bundles are compressed using bzip2).
546 546
547 547 The bundle file can then be transferred using conventional means
548 548 and applied to another repository with the unbundle or pull
549 549 command. This is useful when direct push and pull are not
550 550 available or when exporting an entire repository is undesirable.
551 551
552 552 Applying bundles preserves all changeset contents including
553 553 permissions, copy/rename information, and revision history.
554 554
555 555 Returns 0 on success, 1 if no changes found.
556 556 """
557 557 revs = opts.get('rev') or None
558 558 if opts.get('all'):
559 559 base = ['null']
560 560 else:
561 561 base = opts.get('base')
562 562 if base:
563 563 if dest:
564 564 raise util.Abort(_("--base is incompatible with specifying "
565 565 "a destination"))
566 566 base = [repo.lookup(rev) for rev in base]
567 567 # create the right base
568 568 # XXX: nodesbetween / changegroup* should be "fixed" instead
569 569 o = []
570 570 has = set((nullid,))
571 571 for n in base:
572 572 has.update(repo.changelog.reachable(n))
573 573 if revs:
574 574 revs = [repo.lookup(rev) for rev in revs]
575 575 visit = revs[:]
576 576 has.difference_update(visit)
577 577 else:
578 578 visit = repo.changelog.heads()
579 579 seen = {}
580 580 while visit:
581 581 n = visit.pop(0)
582 582 parents = [p for p in repo.changelog.parents(n) if p not in has]
583 583 if len(parents) == 0:
584 584 if n not in has:
585 585 o.append(n)
586 586 else:
587 587 for p in parents:
588 588 if p not in seen:
589 589 seen[p] = 1
590 590 visit.append(p)
591 591 else:
592 592 dest = ui.expandpath(dest or 'default-push', dest or 'default')
593 593 dest, branches = hg.parseurl(dest, opts.get('branch'))
594 594 other = hg.repository(hg.remoteui(repo, opts), dest)
595 595 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
596 596 if revs:
597 597 revs = [repo.lookup(rev) for rev in revs]
598 598 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
599 599
600 600 if not o:
601 601 ui.status(_("no changes found\n"))
602 602 return 1
603 603
604 604 if revs:
605 605 cg = repo.changegroupsubset(o, revs, 'bundle')
606 606 else:
607 607 cg = repo.changegroup(o, 'bundle')
608 608
609 609 bundletype = opts.get('type', 'bzip2').lower()
610 610 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
611 611 bundletype = btypes.get(bundletype)
612 612 if bundletype not in changegroup.bundletypes:
613 613 raise util.Abort(_('unknown bundle type specified with --type'))
614 614
615 615 changegroup.writebundle(cg, fname, bundletype)
616 616
617 617 def cat(ui, repo, file1, *pats, **opts):
618 618 """output the current or given revision of files
619 619
620 620 Print the specified files as they were at the given revision. If
621 621 no revision is given, the parent of the working directory is used,
622 622 or tip if no revision is checked out.
623 623
624 624 Output may be to a file, in which case the name of the file is
625 625 given using a format string. The formatting rules are the same as
626 626 for the export command, with the following additions:
627 627
628 628 :``%s``: basename of file being printed
629 629 :``%d``: dirname of file being printed, or '.' if in repository root
630 630 :``%p``: root-relative path name of file being printed
631 631
632 632 Returns 0 on success.
633 633 """
634 634 ctx = repo[opts.get('rev')]
635 635 err = 1
636 636 m = cmdutil.match(repo, (file1,) + pats, opts)
637 637 for abs in ctx.walk(m):
638 638 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
639 639 data = ctx[abs].data()
640 640 if opts.get('decode'):
641 641 data = repo.wwritedata(abs, data)
642 642 fp.write(data)
643 643 err = 0
644 644 return err
645 645
646 646 def clone(ui, source, dest=None, **opts):
647 647 """make a copy of an existing repository
648 648
649 649 Create a copy of an existing repository in a new directory.
650 650
651 651 If no destination directory name is specified, it defaults to the
652 652 basename of the source.
653 653
654 654 The location of the source is added to the new repository's
655 655 .hg/hgrc file, as the default to be used for future pulls.
656 656
657 657 See :hg:`help urls` for valid source format details.
658 658
659 659 It is possible to specify an ``ssh://`` URL as the destination, but no
660 660 .hg/hgrc and working directory will be created on the remote side.
661 661 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
662 662
663 663 A set of changesets (tags, or branch names) to pull may be specified
664 664 by listing each changeset (tag, or branch name) with -r/--rev.
665 665 If -r/--rev is used, the cloned repository will contain only a subset
666 666 of the changesets of the source repository. Only the set of changesets
667 667 defined by all -r/--rev options (including all their ancestors)
668 668 will be pulled into the destination repository.
669 669 No subsequent changesets (including subsequent tags) will be present
670 670 in the destination.
671 671
672 672 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
673 673 local source repositories.
674 674
675 675 For efficiency, hardlinks are used for cloning whenever the source
676 676 and destination are on the same filesystem (note this applies only
677 677 to the repository data, not to the working directory). Some
678 678 filesystems, such as AFS, implement hardlinking incorrectly, but
679 679 do not report errors. In these cases, use the --pull option to
680 680 avoid hardlinking.
681 681
682 682 In some cases, you can clone repositories and the working directory
683 683 using full hardlinks with ::
684 684
685 685 $ cp -al REPO REPOCLONE
686 686
687 687 This is the fastest way to clone, but it is not always safe. The
688 688 operation is not atomic (making sure REPO is not modified during
689 689 the operation is up to you) and you have to make sure your editor
690 690 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
691 691 this is not compatible with certain extensions that place their
692 692 metadata under the .hg directory, such as mq.
693 693
694 694 Mercurial will update the working directory to the first applicable
695 695 revision from this list:
696 696
697 697 a) null if -U or the source repository has no changesets
698 698 b) if -u . and the source repository is local, the first parent of
699 699 the source repository's working directory
700 700 c) the changeset specified with -u (if a branch name, this means the
701 701 latest head of that branch)
702 702 d) the changeset specified with -r
703 703 e) the tipmost head specified with -b
704 704 f) the tipmost head specified with the url#branch source syntax
705 705 g) the tipmost head of the default branch
706 706 h) tip
707 707
708 708 Returns 0 on success.
709 709 """
710 710 if opts.get('noupdate') and opts.get('updaterev'):
711 711 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
712 712
713 713 r = hg.clone(hg.remoteui(ui, opts), source, dest,
714 714 pull=opts.get('pull'),
715 715 stream=opts.get('uncompressed'),
716 716 rev=opts.get('rev'),
717 717 update=opts.get('updaterev') or not opts.get('noupdate'),
718 718 branch=opts.get('branch'))
719 719
720 720 return r is None
721 721
722 722 def commit(ui, repo, *pats, **opts):
723 723 """commit the specified files or all outstanding changes
724 724
725 725 Commit changes to the given files into the repository. Unlike a
726 726 centralized RCS, this operation is a local operation. See
727 727 :hg:`push` for a way to actively distribute your changes.
728 728
729 729 If a list of files is omitted, all changes reported by :hg:`status`
730 730 will be committed.
731 731
732 732 If you are committing the result of a merge, do not provide any
733 733 filenames or -I/-X filters.
734 734
735 735 If no commit message is specified, Mercurial starts your
736 736 configured editor where you can enter a message. In case your
737 737 commit fails, you will find a backup of your message in
738 738 ``.hg/last-message.txt``.
739 739
740 740 See :hg:`help dates` for a list of formats valid for -d/--date.
741 741
742 742 Returns 0 on success, 1 if nothing changed.
743 743 """
744 744 extra = {}
745 745 if opts.get('close_branch'):
746 746 if repo['.'].node() not in repo.branchheads():
747 747 # The topo heads set is included in the branch heads set of the
748 748 # current branch, so it's sufficient to test branchheads
749 749 raise util.Abort(_('can only close branch heads'))
750 750 extra['close'] = 1
751 751 e = cmdutil.commiteditor
752 752 if opts.get('force_editor'):
753 753 e = cmdutil.commitforceeditor
754 754
755 755 def commitfunc(ui, repo, message, match, opts):
756 756 return repo.commit(message, opts.get('user'), opts.get('date'), match,
757 757 editor=e, extra=extra)
758 758
759 759 branch = repo[None].branch()
760 760 bheads = repo.branchheads(branch)
761 761
762 762 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
763 763 if not node:
764 764 ui.status(_("nothing changed\n"))
765 765 return 1
766 766
767 767 ctx = repo[node]
768 768 parents = ctx.parents()
769 769
770 770 if bheads and not [x for x in parents
771 771 if x.node() in bheads and x.branch() == branch]:
772 772 ui.status(_('created new head\n'))
773 773 # The message is not printed for initial roots. For the other
774 774 # changesets, it is printed in the following situations:
775 775 #
776 776 # Par column: for the 2 parents with ...
777 777 # N: null or no parent
778 778 # B: parent is on another named branch
779 779 # C: parent is a regular non head changeset
780 780 # H: parent was a branch head of the current branch
781 781 # Msg column: whether we print "created new head" message
782 782 # In the following, it is assumed that there already exists some
783 783 # initial branch heads of the current branch, otherwise nothing is
784 784 # printed anyway.
785 785 #
786 786 # Par Msg Comment
787 787 # NN y additional topo root
788 788 #
789 789 # BN y additional branch root
790 790 # CN y additional topo head
791 791 # HN n usual case
792 792 #
793 793 # BB y weird additional branch root
794 794 # CB y branch merge
795 795 # HB n merge with named branch
796 796 #
797 797 # CC y additional head from merge
798 798 # CH n merge with a head
799 799 #
800 800 # HH n head merge: head count decreases
801 801
802 802 if not opts.get('close_branch'):
803 803 for r in parents:
804 804 if r.extra().get('close') and r.branch() == branch:
805 805 ui.status(_('reopening closed branch head %d\n') % r)
806 806
807 807 if ui.debugflag:
808 808 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
809 809 elif ui.verbose:
810 810 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
811 811
812 812 def copy(ui, repo, *pats, **opts):
813 813 """mark files as copied for the next commit
814 814
815 815 Mark dest as having copies of source files. If dest is a
816 816 directory, copies are put in that directory. If dest is a file,
817 817 the source must be a single file.
818 818
819 819 By default, this command copies the contents of files as they
820 820 exist in the working directory. If invoked with -A/--after, the
821 821 operation is recorded, but no copying is performed.
822 822
823 823 This command takes effect with the next commit. To undo a copy
824 824 before that, see :hg:`revert`.
825 825
826 826 Returns 0 on success, 1 if errors are encountered.
827 827 """
828 828 wlock = repo.wlock(False)
829 829 try:
830 830 return cmdutil.copy(ui, repo, pats, opts)
831 831 finally:
832 832 wlock.release()
833 833
834 834 def debugancestor(ui, repo, *args):
835 835 """find the ancestor revision of two revisions in a given index"""
836 836 if len(args) == 3:
837 837 index, rev1, rev2 = args
838 838 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
839 839 lookup = r.lookup
840 840 elif len(args) == 2:
841 841 if not repo:
842 raise util.Abort(_("There is no Mercurial repository here "
842 raise util.Abort(_("there is no Mercurial repository here "
843 843 "(.hg not found)"))
844 844 rev1, rev2 = args
845 845 r = repo.changelog
846 846 lookup = repo.lookup
847 847 else:
848 848 raise util.Abort(_('either two or three arguments required'))
849 849 a = r.ancestor(lookup(rev1), lookup(rev2))
850 850 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
851 851
852 852 def debugbuilddag(ui, repo, text,
853 853 mergeable_file=False,
854 854 appended_file=False,
855 855 overwritten_file=False,
856 856 new_file=False):
857 857 """builds a repo with a given dag from scratch in the current empty repo
858 858
859 859 Elements:
860 860
861 861 - "+n" is a linear run of n nodes based on the current default parent
862 862 - "." is a single node based on the current default parent
863 863 - "$" resets the default parent to null (implied at the start);
864 864 otherwise the default parent is always the last node created
865 865 - "<p" sets the default parent to the backref p
866 866 - "*p" is a fork at parent p, which is a backref
867 867 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
868 868 - "/p2" is a merge of the preceding node and p2
869 869 - ":tag" defines a local tag for the preceding node
870 870 - "@branch" sets the named branch for subsequent nodes
871 871 - "!command" runs the command using your shell
872 872 - "!!my command\\n" is like "!", but to the end of the line
873 873 - "#...\\n" is a comment up to the end of the line
874 874
875 875 Whitespace between the above elements is ignored.
876 876
877 877 A backref is either
878 878
879 879 - a number n, which references the node curr-n, where curr is the current
880 880 node, or
881 881 - the name of a local tag you placed earlier using ":tag", or
882 882 - empty to denote the default parent.
883 883
884 884 All string valued-elements are either strictly alphanumeric, or must
885 885 be enclosed in double quotes ("..."), with "\\" as escape character.
886 886
887 887 Note that the --overwritten-file and --appended-file options imply the
888 888 use of "HGMERGE=internal:local" during DAG buildup.
889 889 """
890 890
891 891 if not (mergeable_file or appended_file or overwritten_file or new_file):
892 892 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
893 893
894 894 if len(repo.changelog) > 0:
895 895 raise util.Abort(_('repository is not empty'))
896 896
897 897 if overwritten_file or appended_file:
898 898 # we don't want to fail in merges during buildup
899 899 os.environ['HGMERGE'] = 'internal:local'
900 900
901 901 def writefile(fname, text, fmode="wb"):
902 902 f = open(fname, fmode)
903 903 try:
904 904 f.write(text)
905 905 finally:
906 906 f.close()
907 907
908 908 if mergeable_file:
909 909 linesperrev = 2
910 910 # determine number of revs in DAG
911 911 n = 0
912 912 for type, data in dagparser.parsedag(text):
913 913 if type == 'n':
914 914 n += 1
915 915 # make a file with k lines per rev
916 916 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
917 917 + "\n")
918 918
919 919 at = -1
920 920 atbranch = 'default'
921 921 for type, data in dagparser.parsedag(text):
922 922 if type == 'n':
923 923 ui.status('node %s\n' % str(data))
924 924 id, ps = data
925 925 p1 = ps[0]
926 926 if p1 != at:
927 927 update(ui, repo, node=p1, clean=True)
928 928 at = p1
929 929 if repo.dirstate.branch() != atbranch:
930 930 branch(ui, repo, atbranch, force=True)
931 931 if len(ps) > 1:
932 932 p2 = ps[1]
933 933 merge(ui, repo, node=p2)
934 934
935 935 if mergeable_file:
936 936 f = open("mf", "rb+")
937 937 try:
938 938 lines = f.read().split("\n")
939 939 lines[id * linesperrev] += " r%i" % id
940 940 f.seek(0)
941 941 f.write("\n".join(lines))
942 942 finally:
943 943 f.close()
944 944
945 945 if appended_file:
946 946 writefile("af", "r%i\n" % id, "ab")
947 947
948 948 if overwritten_file:
949 949 writefile("of", "r%i\n" % id)
950 950
951 951 if new_file:
952 952 writefile("nf%i" % id, "r%i\n" % id)
953 953
954 954 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
955 955 at = id
956 956 elif type == 'l':
957 957 id, name = data
958 958 ui.status('tag %s\n' % name)
959 959 tag(ui, repo, name, local=True)
960 960 elif type == 'a':
961 961 ui.status('branch %s\n' % data)
962 962 atbranch = data
963 963 elif type in 'cC':
964 964 r = util.system(data, cwd=repo.root)
965 965 if r:
966 966 desc, r = util.explain_exit(r)
967 967 raise util.Abort(_('%s command %s') % (data, desc))
968 968
969 969 def debugcommands(ui, cmd='', *args):
970 970 """list all available commands and options"""
971 971 for cmd, vals in sorted(table.iteritems()):
972 972 cmd = cmd.split('|')[0].strip('^')
973 973 opts = ', '.join([i[1] for i in vals[1]])
974 974 ui.write('%s: %s\n' % (cmd, opts))
975 975
976 976 def debugcomplete(ui, cmd='', **opts):
977 977 """returns the completion list associated with the given command"""
978 978
979 979 if opts.get('options'):
980 980 options = []
981 981 otables = [globalopts]
982 982 if cmd:
983 983 aliases, entry = cmdutil.findcmd(cmd, table, False)
984 984 otables.append(entry[1])
985 985 for t in otables:
986 986 for o in t:
987 987 if "(DEPRECATED)" in o[3]:
988 988 continue
989 989 if o[0]:
990 990 options.append('-%s' % o[0])
991 991 options.append('--%s' % o[1])
992 992 ui.write("%s\n" % "\n".join(options))
993 993 return
994 994
995 995 cmdlist = cmdutil.findpossible(cmd, table)
996 996 if ui.verbose:
997 997 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
998 998 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
999 999
1000 1000 def debugfsinfo(ui, path = "."):
1001 1001 """show information detected about current filesystem"""
1002 1002 open('.debugfsinfo', 'w').write('')
1003 1003 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1004 1004 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1005 1005 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1006 1006 and 'yes' or 'no'))
1007 1007 os.unlink('.debugfsinfo')
1008 1008
1009 1009 def debugrebuildstate(ui, repo, rev="tip"):
1010 1010 """rebuild the dirstate as it would look like for the given revision"""
1011 1011 ctx = repo[rev]
1012 1012 wlock = repo.wlock()
1013 1013 try:
1014 1014 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1015 1015 finally:
1016 1016 wlock.release()
1017 1017
1018 1018 def debugcheckstate(ui, repo):
1019 1019 """validate the correctness of the current dirstate"""
1020 1020 parent1, parent2 = repo.dirstate.parents()
1021 1021 m1 = repo[parent1].manifest()
1022 1022 m2 = repo[parent2].manifest()
1023 1023 errors = 0
1024 1024 for f in repo.dirstate:
1025 1025 state = repo.dirstate[f]
1026 1026 if state in "nr" and f not in m1:
1027 1027 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1028 1028 errors += 1
1029 1029 if state in "a" and f in m1:
1030 1030 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1031 1031 errors += 1
1032 1032 if state in "m" and f not in m1 and f not in m2:
1033 1033 ui.warn(_("%s in state %s, but not in either manifest\n") %
1034 1034 (f, state))
1035 1035 errors += 1
1036 1036 for f in m1:
1037 1037 state = repo.dirstate[f]
1038 1038 if state not in "nrm":
1039 1039 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1040 1040 errors += 1
1041 1041 if errors:
1042 1042 error = _(".hg/dirstate inconsistent with current parent's manifest")
1043 1043 raise util.Abort(error)
1044 1044
1045 1045 def showconfig(ui, repo, *values, **opts):
1046 1046 """show combined config settings from all hgrc files
1047 1047
1048 1048 With no arguments, print names and values of all config items.
1049 1049
1050 1050 With one argument of the form section.name, print just the value
1051 1051 of that config item.
1052 1052
1053 1053 With multiple arguments, print names and values of all config
1054 1054 items with matching section names.
1055 1055
1056 1056 With --debug, the source (filename and line number) is printed
1057 1057 for each config item.
1058 1058
1059 1059 Returns 0 on success.
1060 1060 """
1061 1061
1062 1062 for f in util.rcpath():
1063 1063 ui.debug(_('read config from: %s\n') % f)
1064 1064 untrusted = bool(opts.get('untrusted'))
1065 1065 if values:
1066 1066 if len([v for v in values if '.' in v]) > 1:
1067 1067 raise util.Abort(_('only one config item permitted'))
1068 1068 for section, name, value in ui.walkconfig(untrusted=untrusted):
1069 1069 sectname = section + '.' + name
1070 1070 if values:
1071 1071 for v in values:
1072 1072 if v == section:
1073 1073 ui.debug('%s: ' %
1074 1074 ui.configsource(section, name, untrusted))
1075 1075 ui.write('%s=%s\n' % (sectname, value))
1076 1076 elif v == sectname:
1077 1077 ui.debug('%s: ' %
1078 1078 ui.configsource(section, name, untrusted))
1079 1079 ui.write(value, '\n')
1080 1080 else:
1081 1081 ui.debug('%s: ' %
1082 1082 ui.configsource(section, name, untrusted))
1083 1083 ui.write('%s=%s\n' % (sectname, value))
1084 1084
1085 1085 def debugpushkey(ui, repopath, namespace, *keyinfo):
1086 1086 '''access the pushkey key/value protocol
1087 1087
1088 1088 With two args, list the keys in the given namespace.
1089 1089
1090 1090 With five args, set a key to new if it currently is set to old.
1091 1091 Reports success or failure.
1092 1092 '''
1093 1093
1094 1094 target = hg.repository(ui, repopath)
1095 1095 if keyinfo:
1096 1096 key, old, new = keyinfo
1097 1097 r = target.pushkey(namespace, key, old, new)
1098 1098 ui.status(str(r) + '\n')
1099 1099 return not(r)
1100 1100 else:
1101 1101 for k, v in target.listkeys(namespace).iteritems():
1102 1102 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1103 1103 v.encode('string-escape')))
1104 1104
1105 1105 def debugrevspec(ui, repo, expr):
1106 1106 '''parse and apply a revision specification'''
1107 1107 if ui.verbose:
1108 1108 tree = revset.parse(expr)
1109 1109 ui.note(tree, "\n")
1110 1110 func = revset.match(expr)
1111 1111 for c in func(repo, range(len(repo))):
1112 1112 ui.write("%s\n" % c)
1113 1113
1114 1114 def debugsetparents(ui, repo, rev1, rev2=None):
1115 1115 """manually set the parents of the current working directory
1116 1116
1117 1117 This is useful for writing repository conversion tools, but should
1118 1118 be used with care.
1119 1119
1120 1120 Returns 0 on success.
1121 1121 """
1122 1122
1123 1123 if not rev2:
1124 1124 rev2 = hex(nullid)
1125 1125
1126 1126 wlock = repo.wlock()
1127 1127 try:
1128 1128 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1129 1129 finally:
1130 1130 wlock.release()
1131 1131
1132 1132 def debugstate(ui, repo, nodates=None):
1133 1133 """show the contents of the current dirstate"""
1134 1134 timestr = ""
1135 1135 showdate = not nodates
1136 1136 for file_, ent in sorted(repo.dirstate._map.iteritems()):
1137 1137 if showdate:
1138 1138 if ent[3] == -1:
1139 1139 # Pad or slice to locale representation
1140 1140 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1141 1141 time.localtime(0)))
1142 1142 timestr = 'unset'
1143 1143 timestr = (timestr[:locale_len] +
1144 1144 ' ' * (locale_len - len(timestr)))
1145 1145 else:
1146 1146 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1147 1147 time.localtime(ent[3]))
1148 1148 if ent[1] & 020000:
1149 1149 mode = 'lnk'
1150 1150 else:
1151 1151 mode = '%3o' % (ent[1] & 0777)
1152 1152 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1153 1153 for f in repo.dirstate.copies():
1154 1154 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1155 1155
1156 1156 def debugsub(ui, repo, rev=None):
1157 1157 if rev == '':
1158 1158 rev = None
1159 1159 for k, v in sorted(repo[rev].substate.items()):
1160 1160 ui.write('path %s\n' % k)
1161 1161 ui.write(' source %s\n' % v[0])
1162 1162 ui.write(' revision %s\n' % v[1])
1163 1163
1164 1164 def debugdag(ui, repo, file_=None, *revs, **opts):
1165 1165 """format the changelog or an index DAG as a concise textual description
1166 1166
1167 1167 If you pass a revlog index, the revlog's DAG is emitted. If you list
1168 1168 revision numbers, they get labelled in the output as rN.
1169 1169
1170 1170 Otherwise, the changelog DAG of the current repo is emitted.
1171 1171 """
1172 1172 spaces = opts.get('spaces')
1173 1173 dots = opts.get('dots')
1174 1174 if file_:
1175 1175 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1176 1176 revs = set((int(r) for r in revs))
1177 1177 def events():
1178 1178 for r in rlog:
1179 1179 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1180 1180 if r in revs:
1181 1181 yield 'l', (r, "r%i" % r)
1182 1182 elif repo:
1183 1183 cl = repo.changelog
1184 1184 tags = opts.get('tags')
1185 1185 branches = opts.get('branches')
1186 1186 if tags:
1187 1187 labels = {}
1188 1188 for l, n in repo.tags().items():
1189 1189 labels.setdefault(cl.rev(n), []).append(l)
1190 1190 def events():
1191 1191 b = "default"
1192 1192 for r in cl:
1193 1193 if branches:
1194 1194 newb = cl.read(cl.node(r))[5]['branch']
1195 1195 if newb != b:
1196 1196 yield 'a', newb
1197 1197 b = newb
1198 1198 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1199 1199 if tags:
1200 1200 ls = labels.get(r)
1201 1201 if ls:
1202 1202 for l in ls:
1203 1203 yield 'l', (r, l)
1204 1204 else:
1205 1205 raise util.Abort(_('need repo for changelog dag'))
1206 1206
1207 1207 for line in dagparser.dagtextlines(events(),
1208 1208 addspaces=spaces,
1209 1209 wraplabels=True,
1210 1210 wrapannotations=True,
1211 1211 wrapnonlinear=dots,
1212 1212 usedots=dots,
1213 1213 maxlinewidth=70):
1214 1214 ui.write(line)
1215 1215 ui.write("\n")
1216 1216
1217 1217 def debugdata(ui, file_, rev):
1218 1218 """dump the contents of a data file revision"""
1219 1219 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1220 1220 try:
1221 1221 ui.write(r.revision(r.lookup(rev)))
1222 1222 except KeyError:
1223 1223 raise util.Abort(_('invalid revision identifier %s') % rev)
1224 1224
1225 1225 def debugdate(ui, date, range=None, **opts):
1226 1226 """parse and display a date"""
1227 1227 if opts["extended"]:
1228 1228 d = util.parsedate(date, util.extendeddateformats)
1229 1229 else:
1230 1230 d = util.parsedate(date)
1231 1231 ui.write("internal: %s %s\n" % d)
1232 1232 ui.write("standard: %s\n" % util.datestr(d))
1233 1233 if range:
1234 1234 m = util.matchdate(range)
1235 1235 ui.write("match: %s\n" % m(d[0]))
1236 1236
1237 1237 def debugindex(ui, file_):
1238 1238 """dump the contents of an index file"""
1239 1239 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1240 1240 ui.write(" rev offset length base linkrev"
1241 1241 " nodeid p1 p2\n")
1242 1242 for i in r:
1243 1243 node = r.node(i)
1244 1244 try:
1245 1245 pp = r.parents(node)
1246 1246 except:
1247 1247 pp = [nullid, nullid]
1248 1248 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1249 1249 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1250 1250 short(node), short(pp[0]), short(pp[1])))
1251 1251
1252 1252 def debugindexdot(ui, file_):
1253 1253 """dump an index DAG as a graphviz dot file"""
1254 1254 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1255 1255 ui.write("digraph G {\n")
1256 1256 for i in r:
1257 1257 node = r.node(i)
1258 1258 pp = r.parents(node)
1259 1259 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1260 1260 if pp[1] != nullid:
1261 1261 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1262 1262 ui.write("}\n")
1263 1263
1264 1264 def debuginstall(ui):
1265 1265 '''test Mercurial installation
1266 1266
1267 1267 Returns 0 on success.
1268 1268 '''
1269 1269
1270 1270 def writetemp(contents):
1271 1271 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1272 1272 f = os.fdopen(fd, "wb")
1273 1273 f.write(contents)
1274 1274 f.close()
1275 1275 return name
1276 1276
1277 1277 problems = 0
1278 1278
1279 1279 # encoding
1280 1280 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1281 1281 try:
1282 1282 encoding.fromlocal("test")
1283 1283 except util.Abort, inst:
1284 1284 ui.write(" %s\n" % inst)
1285 1285 ui.write(_(" (check that your locale is properly set)\n"))
1286 1286 problems += 1
1287 1287
1288 1288 # compiled modules
1289 1289 ui.status(_("Checking installed modules (%s)...\n")
1290 1290 % os.path.dirname(__file__))
1291 1291 try:
1292 1292 import bdiff, mpatch, base85, osutil
1293 1293 except Exception, inst:
1294 1294 ui.write(" %s\n" % inst)
1295 1295 ui.write(_(" One or more extensions could not be found"))
1296 1296 ui.write(_(" (check that you compiled the extensions)\n"))
1297 1297 problems += 1
1298 1298
1299 1299 # templates
1300 1300 ui.status(_("Checking templates...\n"))
1301 1301 try:
1302 1302 import templater
1303 1303 templater.templater(templater.templatepath("map-cmdline.default"))
1304 1304 except Exception, inst:
1305 1305 ui.write(" %s\n" % inst)
1306 1306 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1307 1307 problems += 1
1308 1308
1309 1309 # patch
1310 1310 ui.status(_("Checking patch...\n"))
1311 1311 patchproblems = 0
1312 1312 a = "1\n2\n3\n4\n"
1313 1313 b = "1\n2\n3\ninsert\n4\n"
1314 1314 fa = writetemp(a)
1315 1315 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1316 1316 os.path.basename(fa))
1317 1317 fd = writetemp(d)
1318 1318
1319 1319 files = {}
1320 1320 try:
1321 1321 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1322 1322 except util.Abort, e:
1323 1323 ui.write(_(" patch call failed:\n"))
1324 1324 ui.write(" " + str(e) + "\n")
1325 1325 patchproblems += 1
1326 1326 else:
1327 1327 if list(files) != [os.path.basename(fa)]:
1328 1328 ui.write(_(" unexpected patch output!\n"))
1329 1329 patchproblems += 1
1330 1330 a = open(fa).read()
1331 1331 if a != b:
1332 1332 ui.write(_(" patch test failed!\n"))
1333 1333 patchproblems += 1
1334 1334
1335 1335 if patchproblems:
1336 1336 if ui.config('ui', 'patch'):
1337 1337 ui.write(_(" (Current patch tool may be incompatible with patch,"
1338 1338 " or misconfigured. Please check your .hgrc file)\n"))
1339 1339 else:
1340 1340 ui.write(_(" Internal patcher failure, please report this error"
1341 1341 " to http://mercurial.selenic.com/bts/\n"))
1342 1342 problems += patchproblems
1343 1343
1344 1344 os.unlink(fa)
1345 1345 os.unlink(fd)
1346 1346
1347 1347 # editor
1348 1348 ui.status(_("Checking commit editor...\n"))
1349 1349 editor = ui.geteditor()
1350 1350 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1351 1351 if not cmdpath:
1352 1352 if editor == 'vi':
1353 1353 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1354 1354 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1355 1355 else:
1356 1356 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1357 1357 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1358 1358 problems += 1
1359 1359
1360 1360 # check username
1361 1361 ui.status(_("Checking username...\n"))
1362 1362 try:
1363 1363 ui.username()
1364 1364 except util.Abort, e:
1365 1365 ui.write(" %s\n" % e)
1366 1366 ui.write(_(" (specify a username in your .hgrc file)\n"))
1367 1367 problems += 1
1368 1368
1369 1369 if not problems:
1370 1370 ui.status(_("No problems detected\n"))
1371 1371 else:
1372 1372 ui.write(_("%s problems detected,"
1373 1373 " please check your install!\n") % problems)
1374 1374
1375 1375 return problems
1376 1376
1377 1377 def debugrename(ui, repo, file1, *pats, **opts):
1378 1378 """dump rename information"""
1379 1379
1380 1380 ctx = repo[opts.get('rev')]
1381 1381 m = cmdutil.match(repo, (file1,) + pats, opts)
1382 1382 for abs in ctx.walk(m):
1383 1383 fctx = ctx[abs]
1384 1384 o = fctx.filelog().renamed(fctx.filenode())
1385 1385 rel = m.rel(abs)
1386 1386 if o:
1387 1387 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1388 1388 else:
1389 1389 ui.write(_("%s not renamed\n") % rel)
1390 1390
1391 1391 def debugwalk(ui, repo, *pats, **opts):
1392 1392 """show how files match on given patterns"""
1393 1393 m = cmdutil.match(repo, pats, opts)
1394 1394 items = list(repo.walk(m))
1395 1395 if not items:
1396 1396 return
1397 1397 fmt = 'f %%-%ds %%-%ds %%s' % (
1398 1398 max([len(abs) for abs in items]),
1399 1399 max([len(m.rel(abs)) for abs in items]))
1400 1400 for abs in items:
1401 1401 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1402 1402 ui.write("%s\n" % line.rstrip())
1403 1403
1404 1404 def diff(ui, repo, *pats, **opts):
1405 1405 """diff repository (or selected files)
1406 1406
1407 1407 Show differences between revisions for the specified files.
1408 1408
1409 1409 Differences between files are shown using the unified diff format.
1410 1410
1411 1411 NOTE: diff may generate unexpected results for merges, as it will
1412 1412 default to comparing against the working directory's first parent
1413 1413 changeset if no revisions are specified.
1414 1414
1415 1415 When two revision arguments are given, then changes are shown
1416 1416 between those revisions. If only one revision is specified then
1417 1417 that revision is compared to the working directory, and, when no
1418 1418 revisions are specified, the working directory files are compared
1419 1419 to its parent.
1420 1420
1421 1421 Alternatively you can specify -c/--change with a revision to see
1422 1422 the changes in that changeset relative to its first parent.
1423 1423
1424 1424 Without the -a/--text option, diff will avoid generating diffs of
1425 1425 files it detects as binary. With -a, diff will generate a diff
1426 1426 anyway, probably with undesirable results.
1427 1427
1428 1428 Use the -g/--git option to generate diffs in the git extended diff
1429 1429 format. For more information, read :hg:`help diffs`.
1430 1430
1431 1431 Returns 0 on success.
1432 1432 """
1433 1433
1434 1434 revs = opts.get('rev')
1435 1435 change = opts.get('change')
1436 1436 stat = opts.get('stat')
1437 1437 reverse = opts.get('reverse')
1438 1438
1439 1439 if revs and change:
1440 1440 msg = _('cannot specify --rev and --change at the same time')
1441 1441 raise util.Abort(msg)
1442 1442 elif change:
1443 1443 node2 = repo.lookup(change)
1444 1444 node1 = repo[node2].parents()[0].node()
1445 1445 else:
1446 1446 node1, node2 = cmdutil.revpair(repo, revs)
1447 1447
1448 1448 if reverse:
1449 1449 node1, node2 = node2, node1
1450 1450
1451 1451 diffopts = patch.diffopts(ui, opts)
1452 1452 m = cmdutil.match(repo, pats, opts)
1453 1453 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat)
1454 1454
1455 1455 def export(ui, repo, *changesets, **opts):
1456 1456 """dump the header and diffs for one or more changesets
1457 1457
1458 1458 Print the changeset header and diffs for one or more revisions.
1459 1459
1460 1460 The information shown in the changeset header is: author, date,
1461 1461 branch name (if non-default), changeset hash, parent(s) and commit
1462 1462 comment.
1463 1463
1464 1464 NOTE: export may generate unexpected diff output for merge
1465 1465 changesets, as it will compare the merge changeset against its
1466 1466 first parent only.
1467 1467
1468 1468 Output may be to a file, in which case the name of the file is
1469 1469 given using a format string. The formatting rules are as follows:
1470 1470
1471 1471 :``%%``: literal "%" character
1472 1472 :``%H``: changeset hash (40 hexadecimal digits)
1473 1473 :``%N``: number of patches being generated
1474 1474 :``%R``: changeset revision number
1475 1475 :``%b``: basename of the exporting repository
1476 1476 :``%h``: short-form changeset hash (12 hexadecimal digits)
1477 1477 :``%n``: zero-padded sequence number, starting at 1
1478 1478 :``%r``: zero-padded changeset revision number
1479 1479
1480 1480 Without the -a/--text option, export will avoid generating diffs
1481 1481 of files it detects as binary. With -a, export will generate a
1482 1482 diff anyway, probably with undesirable results.
1483 1483
1484 1484 Use the -g/--git option to generate diffs in the git extended diff
1485 1485 format. See :hg:`help diffs` for more information.
1486 1486
1487 1487 With the --switch-parent option, the diff will be against the
1488 1488 second parent. It can be useful to review a merge.
1489 1489
1490 1490 Returns 0 on success.
1491 1491 """
1492 1492 changesets += tuple(opts.get('rev', []))
1493 1493 if not changesets:
1494 1494 raise util.Abort(_("export requires at least one changeset"))
1495 1495 revs = cmdutil.revrange(repo, changesets)
1496 1496 if len(revs) > 1:
1497 1497 ui.note(_('exporting patches:\n'))
1498 1498 else:
1499 1499 ui.note(_('exporting patch:\n'))
1500 1500 cmdutil.export(repo, revs, template=opts.get('output'),
1501 1501 switch_parent=opts.get('switch_parent'),
1502 1502 opts=patch.diffopts(ui, opts))
1503 1503
1504 1504 def forget(ui, repo, *pats, **opts):
1505 1505 """forget the specified files on the next commit
1506 1506
1507 1507 Mark the specified files so they will no longer be tracked
1508 1508 after the next commit.
1509 1509
1510 1510 This only removes files from the current branch, not from the
1511 1511 entire project history, and it does not delete them from the
1512 1512 working directory.
1513 1513
1514 1514 To undo a forget before the next commit, see :hg:`add`.
1515 1515
1516 1516 Returns 0 on success.
1517 1517 """
1518 1518
1519 1519 if not pats:
1520 1520 raise util.Abort(_('no files specified'))
1521 1521
1522 1522 m = cmdutil.match(repo, pats, opts)
1523 1523 s = repo.status(match=m, clean=True)
1524 1524 forget = sorted(s[0] + s[1] + s[3] + s[6])
1525 1525 errs = 0
1526 1526
1527 1527 for f in m.files():
1528 1528 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1529 1529 ui.warn(_('not removing %s: file is already untracked\n')
1530 1530 % m.rel(f))
1531 1531 errs = 1
1532 1532
1533 1533 for f in forget:
1534 1534 if ui.verbose or not m.exact(f):
1535 1535 ui.status(_('removing %s\n') % m.rel(f))
1536 1536
1537 1537 repo[None].remove(forget, unlink=False)
1538 1538 return errs
1539 1539
1540 1540 def grep(ui, repo, pattern, *pats, **opts):
1541 1541 """search for a pattern in specified files and revisions
1542 1542
1543 1543 Search revisions of files for a regular expression.
1544 1544
1545 1545 This command behaves differently than Unix grep. It only accepts
1546 1546 Python/Perl regexps. It searches repository history, not the
1547 1547 working directory. It always prints the revision number in which a
1548 1548 match appears.
1549 1549
1550 1550 By default, grep only prints output for the first revision of a
1551 1551 file in which it finds a match. To get it to print every revision
1552 1552 that contains a change in match status ("-" for a match that
1553 1553 becomes a non-match, or "+" for a non-match that becomes a match),
1554 1554 use the --all flag.
1555 1555
1556 1556 Returns 0 if a match is found, 1 otherwise.
1557 1557 """
1558 1558 reflags = 0
1559 1559 if opts.get('ignore_case'):
1560 1560 reflags |= re.I
1561 1561 try:
1562 1562 regexp = re.compile(pattern, reflags)
1563 1563 except Exception, inst:
1564 1564 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1565 1565 return 1
1566 1566 sep, eol = ':', '\n'
1567 1567 if opts.get('print0'):
1568 1568 sep = eol = '\0'
1569 1569
1570 1570 getfile = util.lrucachefunc(repo.file)
1571 1571
1572 1572 def matchlines(body):
1573 1573 begin = 0
1574 1574 linenum = 0
1575 1575 while True:
1576 1576 match = regexp.search(body, begin)
1577 1577 if not match:
1578 1578 break
1579 1579 mstart, mend = match.span()
1580 1580 linenum += body.count('\n', begin, mstart) + 1
1581 1581 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1582 1582 begin = body.find('\n', mend) + 1 or len(body)
1583 1583 lend = begin - 1
1584 1584 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1585 1585
1586 1586 class linestate(object):
1587 1587 def __init__(self, line, linenum, colstart, colend):
1588 1588 self.line = line
1589 1589 self.linenum = linenum
1590 1590 self.colstart = colstart
1591 1591 self.colend = colend
1592 1592
1593 1593 def __hash__(self):
1594 1594 return hash((self.linenum, self.line))
1595 1595
1596 1596 def __eq__(self, other):
1597 1597 return self.line == other.line
1598 1598
1599 1599 matches = {}
1600 1600 copies = {}
1601 1601 def grepbody(fn, rev, body):
1602 1602 matches[rev].setdefault(fn, [])
1603 1603 m = matches[rev][fn]
1604 1604 for lnum, cstart, cend, line in matchlines(body):
1605 1605 s = linestate(line, lnum, cstart, cend)
1606 1606 m.append(s)
1607 1607
1608 1608 def difflinestates(a, b):
1609 1609 sm = difflib.SequenceMatcher(None, a, b)
1610 1610 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1611 1611 if tag == 'insert':
1612 1612 for i in xrange(blo, bhi):
1613 1613 yield ('+', b[i])
1614 1614 elif tag == 'delete':
1615 1615 for i in xrange(alo, ahi):
1616 1616 yield ('-', a[i])
1617 1617 elif tag == 'replace':
1618 1618 for i in xrange(alo, ahi):
1619 1619 yield ('-', a[i])
1620 1620 for i in xrange(blo, bhi):
1621 1621 yield ('+', b[i])
1622 1622
1623 1623 def display(fn, ctx, pstates, states):
1624 1624 rev = ctx.rev()
1625 1625 datefunc = ui.quiet and util.shortdate or util.datestr
1626 1626 found = False
1627 1627 filerevmatches = {}
1628 1628 if opts.get('all'):
1629 1629 iter = difflinestates(pstates, states)
1630 1630 else:
1631 1631 iter = [('', l) for l in states]
1632 1632 for change, l in iter:
1633 1633 cols = [fn, str(rev)]
1634 1634 before, match, after = None, None, None
1635 1635 if opts.get('line_number'):
1636 1636 cols.append(str(l.linenum))
1637 1637 if opts.get('all'):
1638 1638 cols.append(change)
1639 1639 if opts.get('user'):
1640 1640 cols.append(ui.shortuser(ctx.user()))
1641 1641 if opts.get('date'):
1642 1642 cols.append(datefunc(ctx.date()))
1643 1643 if opts.get('files_with_matches'):
1644 1644 c = (fn, rev)
1645 1645 if c in filerevmatches:
1646 1646 continue
1647 1647 filerevmatches[c] = 1
1648 1648 else:
1649 1649 before = l.line[:l.colstart]
1650 1650 match = l.line[l.colstart:l.colend]
1651 1651 after = l.line[l.colend:]
1652 1652 ui.write(sep.join(cols))
1653 1653 if before is not None:
1654 1654 ui.write(sep + before)
1655 1655 ui.write(match, label='grep.match')
1656 1656 ui.write(after)
1657 1657 ui.write(eol)
1658 1658 found = True
1659 1659 return found
1660 1660
1661 1661 skip = {}
1662 1662 revfiles = {}
1663 1663 matchfn = cmdutil.match(repo, pats, opts)
1664 1664 found = False
1665 1665 follow = opts.get('follow')
1666 1666
1667 1667 def prep(ctx, fns):
1668 1668 rev = ctx.rev()
1669 1669 pctx = ctx.parents()[0]
1670 1670 parent = pctx.rev()
1671 1671 matches.setdefault(rev, {})
1672 1672 matches.setdefault(parent, {})
1673 1673 files = revfiles.setdefault(rev, [])
1674 1674 for fn in fns:
1675 1675 flog = getfile(fn)
1676 1676 try:
1677 1677 fnode = ctx.filenode(fn)
1678 1678 except error.LookupError:
1679 1679 continue
1680 1680
1681 1681 copied = flog.renamed(fnode)
1682 1682 copy = follow and copied and copied[0]
1683 1683 if copy:
1684 1684 copies.setdefault(rev, {})[fn] = copy
1685 1685 if fn in skip:
1686 1686 if copy:
1687 1687 skip[copy] = True
1688 1688 continue
1689 1689 files.append(fn)
1690 1690
1691 1691 if fn not in matches[rev]:
1692 1692 grepbody(fn, rev, flog.read(fnode))
1693 1693
1694 1694 pfn = copy or fn
1695 1695 if pfn not in matches[parent]:
1696 1696 try:
1697 1697 fnode = pctx.filenode(pfn)
1698 1698 grepbody(pfn, parent, flog.read(fnode))
1699 1699 except error.LookupError:
1700 1700 pass
1701 1701
1702 1702 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1703 1703 rev = ctx.rev()
1704 1704 parent = ctx.parents()[0].rev()
1705 1705 for fn in sorted(revfiles.get(rev, [])):
1706 1706 states = matches[rev][fn]
1707 1707 copy = copies.get(rev, {}).get(fn)
1708 1708 if fn in skip:
1709 1709 if copy:
1710 1710 skip[copy] = True
1711 1711 continue
1712 1712 pstates = matches.get(parent, {}).get(copy or fn, [])
1713 1713 if pstates or states:
1714 1714 r = display(fn, ctx, pstates, states)
1715 1715 found = found or r
1716 1716 if r and not opts.get('all'):
1717 1717 skip[fn] = True
1718 1718 if copy:
1719 1719 skip[copy] = True
1720 1720 del matches[rev]
1721 1721 del revfiles[rev]
1722 1722
1723 1723 return not found
1724 1724
1725 1725 def heads(ui, repo, *branchrevs, **opts):
1726 1726 """show current repository heads or show branch heads
1727 1727
1728 1728 With no arguments, show all repository branch heads.
1729 1729
1730 1730 Repository "heads" are changesets with no child changesets. They are
1731 1731 where development generally takes place and are the usual targets
1732 1732 for update and merge operations. Branch heads are changesets that have
1733 1733 no child changeset on the same branch.
1734 1734
1735 1735 If one or more REVs are given, only branch heads on the branches
1736 1736 associated with the specified changesets are shown.
1737 1737
1738 1738 If -c/--closed is specified, also show branch heads marked closed
1739 1739 (see :hg:`commit --close-branch`).
1740 1740
1741 1741 If STARTREV is specified, only those heads that are descendants of
1742 1742 STARTREV will be displayed.
1743 1743
1744 1744 If -t/--topo is specified, named branch mechanics will be ignored and only
1745 1745 changesets without children will be shown.
1746 1746
1747 1747 Returns 0 if matching heads are found, 1 if not.
1748 1748 """
1749 1749
1750 1750 if opts.get('rev'):
1751 1751 start = repo.lookup(opts['rev'])
1752 1752 else:
1753 1753 start = None
1754 1754
1755 1755 if opts.get('topo'):
1756 1756 heads = [repo[h] for h in repo.heads(start)]
1757 1757 else:
1758 1758 heads = []
1759 1759 for b, ls in repo.branchmap().iteritems():
1760 1760 if start is None:
1761 1761 heads += [repo[h] for h in ls]
1762 1762 continue
1763 1763 startrev = repo.changelog.rev(start)
1764 1764 descendants = set(repo.changelog.descendants(startrev))
1765 1765 descendants.add(startrev)
1766 1766 rev = repo.changelog.rev
1767 1767 heads += [repo[h] for h in ls if rev(h) in descendants]
1768 1768
1769 1769 if branchrevs:
1770 1770 decode, encode = encoding.fromlocal, encoding.tolocal
1771 1771 branches = set(repo[decode(br)].branch() for br in branchrevs)
1772 1772 heads = [h for h in heads if h.branch() in branches]
1773 1773
1774 1774 if not opts.get('closed'):
1775 1775 heads = [h for h in heads if not h.extra().get('close')]
1776 1776
1777 1777 if opts.get('active') and branchrevs:
1778 1778 dagheads = repo.heads(start)
1779 1779 heads = [h for h in heads if h.node() in dagheads]
1780 1780
1781 1781 if branchrevs:
1782 1782 haveheads = set(h.branch() for h in heads)
1783 1783 if branches - haveheads:
1784 1784 headless = ', '.join(encode(b) for b in branches - haveheads)
1785 1785 msg = _('no open branch heads found on branches %s')
1786 1786 if opts.get('rev'):
1787 1787 msg += _(' (started at %s)' % opts['rev'])
1788 1788 ui.warn((msg + '\n') % headless)
1789 1789
1790 1790 if not heads:
1791 1791 return 1
1792 1792
1793 1793 heads = sorted(heads, key=lambda x: -x.rev())
1794 1794 displayer = cmdutil.show_changeset(ui, repo, opts)
1795 1795 for ctx in heads:
1796 1796 displayer.show(ctx)
1797 1797 displayer.close()
1798 1798
1799 1799 def help_(ui, name=None, with_version=False, unknowncmd=False):
1800 1800 """show help for a given topic or a help overview
1801 1801
1802 1802 With no arguments, print a list of commands with short help messages.
1803 1803
1804 1804 Given a topic, extension, or command name, print help for that
1805 1805 topic.
1806 1806
1807 1807 Returns 0 if successful.
1808 1808 """
1809 1809 option_lists = []
1810 1810 textwidth = util.termwidth() - 2
1811 1811
1812 1812 def addglobalopts(aliases):
1813 1813 if ui.verbose:
1814 1814 option_lists.append((_("global options:"), globalopts))
1815 1815 if name == 'shortlist':
1816 1816 option_lists.append((_('use "hg help" for the full list '
1817 1817 'of commands'), ()))
1818 1818 else:
1819 1819 if name == 'shortlist':
1820 1820 msg = _('use "hg help" for the full list of commands '
1821 1821 'or "hg -v" for details')
1822 1822 elif aliases:
1823 1823 msg = _('use "hg -v help%s" to show aliases and '
1824 1824 'global options') % (name and " " + name or "")
1825 1825 else:
1826 1826 msg = _('use "hg -v help %s" to show global options') % name
1827 1827 option_lists.append((msg, ()))
1828 1828
1829 1829 def helpcmd(name):
1830 1830 if with_version:
1831 1831 version_(ui)
1832 1832 ui.write('\n')
1833 1833
1834 1834 try:
1835 1835 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1836 1836 except error.AmbiguousCommand, inst:
1837 1837 # py3k fix: except vars can't be used outside the scope of the
1838 1838 # except block, nor can be used inside a lambda. python issue4617
1839 1839 prefix = inst.args[0]
1840 1840 select = lambda c: c.lstrip('^').startswith(prefix)
1841 1841 helplist(_('list of commands:\n\n'), select)
1842 1842 return
1843 1843
1844 1844 # check if it's an invalid alias and display its error if it is
1845 1845 if getattr(entry[0], 'badalias', False):
1846 1846 if not unknowncmd:
1847 1847 entry[0](ui)
1848 1848 return
1849 1849
1850 1850 # synopsis
1851 1851 if len(entry) > 2:
1852 1852 if entry[2].startswith('hg'):
1853 1853 ui.write("%s\n" % entry[2])
1854 1854 else:
1855 1855 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1856 1856 else:
1857 1857 ui.write('hg %s\n' % aliases[0])
1858 1858
1859 1859 # aliases
1860 1860 if not ui.quiet and len(aliases) > 1:
1861 1861 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1862 1862
1863 1863 # description
1864 1864 doc = gettext(entry[0].__doc__)
1865 1865 if not doc:
1866 1866 doc = _("(no help text available)")
1867 1867 if hasattr(entry[0], 'definition'): # aliased command
1868 1868 if entry[0].definition.startswith('!'): # shell alias
1869 1869 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
1870 1870 else:
1871 1871 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
1872 1872 if ui.quiet:
1873 1873 doc = doc.splitlines()[0]
1874 1874 keep = ui.verbose and ['verbose'] or []
1875 1875 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
1876 1876 ui.write("\n%s\n" % formatted)
1877 1877 if pruned:
1878 1878 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
1879 1879
1880 1880 if not ui.quiet:
1881 1881 # options
1882 1882 if entry[1]:
1883 1883 option_lists.append((_("options:\n"), entry[1]))
1884 1884
1885 1885 addglobalopts(False)
1886 1886
1887 1887 def helplist(header, select=None):
1888 1888 h = {}
1889 1889 cmds = {}
1890 1890 for c, e in table.iteritems():
1891 1891 f = c.split("|", 1)[0]
1892 1892 if select and not select(f):
1893 1893 continue
1894 1894 if (not select and name != 'shortlist' and
1895 1895 e[0].__module__ != __name__):
1896 1896 continue
1897 1897 if name == "shortlist" and not f.startswith("^"):
1898 1898 continue
1899 1899 f = f.lstrip("^")
1900 1900 if not ui.debugflag and f.startswith("debug"):
1901 1901 continue
1902 1902 doc = e[0].__doc__
1903 1903 if doc and 'DEPRECATED' in doc and not ui.verbose:
1904 1904 continue
1905 1905 doc = gettext(doc)
1906 1906 if not doc:
1907 1907 doc = _("(no help text available)")
1908 1908 h[f] = doc.splitlines()[0].rstrip()
1909 1909 cmds[f] = c.lstrip("^")
1910 1910
1911 1911 if not h:
1912 1912 ui.status(_('no commands defined\n'))
1913 1913 return
1914 1914
1915 1915 ui.status(header)
1916 1916 fns = sorted(h)
1917 1917 m = max(map(len, fns))
1918 1918 for f in fns:
1919 1919 if ui.verbose:
1920 1920 commands = cmds[f].replace("|",", ")
1921 1921 ui.write(" %s:\n %s\n"%(commands, h[f]))
1922 1922 else:
1923 1923 ui.write('%s\n' % (util.wrap(h[f],
1924 1924 initindent=' %-*s ' % (m, f),
1925 1925 hangindent=' ' * (m + 4))))
1926 1926
1927 1927 if not ui.quiet:
1928 1928 addglobalopts(True)
1929 1929
1930 1930 def helptopic(name):
1931 1931 for names, header, doc in help.helptable:
1932 1932 if name in names:
1933 1933 break
1934 1934 else:
1935 1935 raise error.UnknownCommand(name)
1936 1936
1937 1937 # description
1938 1938 if not doc:
1939 1939 doc = _("(no help text available)")
1940 1940 if hasattr(doc, '__call__'):
1941 1941 doc = doc()
1942 1942
1943 1943 ui.write("%s\n\n" % header)
1944 1944 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1945 1945
1946 1946 def helpext(name):
1947 1947 try:
1948 1948 mod = extensions.find(name)
1949 1949 doc = gettext(mod.__doc__) or _('no help text available')
1950 1950 except KeyError:
1951 1951 mod = None
1952 1952 doc = extensions.disabledext(name)
1953 1953 if not doc:
1954 1954 raise error.UnknownCommand(name)
1955 1955
1956 1956 if '\n' not in doc:
1957 1957 head, tail = doc, ""
1958 1958 else:
1959 1959 head, tail = doc.split('\n', 1)
1960 1960 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1961 1961 if tail:
1962 1962 ui.write(minirst.format(tail, textwidth))
1963 1963 ui.status('\n\n')
1964 1964
1965 1965 if mod:
1966 1966 try:
1967 1967 ct = mod.cmdtable
1968 1968 except AttributeError:
1969 1969 ct = {}
1970 1970 modcmds = set([c.split('|', 1)[0] for c in ct])
1971 1971 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1972 1972 else:
1973 1973 ui.write(_('use "hg help extensions" for information on enabling '
1974 1974 'extensions\n'))
1975 1975
1976 1976 def helpextcmd(name):
1977 1977 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1978 1978 doc = gettext(mod.__doc__).splitlines()[0]
1979 1979
1980 1980 msg = help.listexts(_("'%s' is provided by the following "
1981 1981 "extension:") % cmd, {ext: doc}, len(ext),
1982 1982 indent=4)
1983 1983 ui.write(minirst.format(msg, textwidth))
1984 1984 ui.write('\n\n')
1985 1985 ui.write(_('use "hg help extensions" for information on enabling '
1986 1986 'extensions\n'))
1987 1987
1988 1988 if name and name != 'shortlist':
1989 1989 i = None
1990 1990 if unknowncmd:
1991 1991 queries = (helpextcmd,)
1992 1992 else:
1993 1993 queries = (helptopic, helpcmd, helpext, helpextcmd)
1994 1994 for f in queries:
1995 1995 try:
1996 1996 f(name)
1997 1997 i = None
1998 1998 break
1999 1999 except error.UnknownCommand, inst:
2000 2000 i = inst
2001 2001 if i:
2002 2002 raise i
2003 2003
2004 2004 else:
2005 2005 # program name
2006 2006 if ui.verbose or with_version:
2007 2007 version_(ui)
2008 2008 else:
2009 2009 ui.status(_("Mercurial Distributed SCM\n"))
2010 2010 ui.status('\n')
2011 2011
2012 2012 # list of commands
2013 2013 if name == "shortlist":
2014 2014 header = _('basic commands:\n\n')
2015 2015 else:
2016 2016 header = _('list of commands:\n\n')
2017 2017
2018 2018 helplist(header)
2019 2019 if name != 'shortlist':
2020 2020 exts, maxlength = extensions.enabled()
2021 2021 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2022 2022 if text:
2023 2023 ui.write("\n%s\n" % minirst.format(text, textwidth))
2024 2024
2025 2025 # list all option lists
2026 2026 opt_output = []
2027 2027 multioccur = False
2028 2028 for title, options in option_lists:
2029 2029 opt_output.append(("\n%s" % title, None))
2030 2030 for option in options:
2031 2031 if len(option) == 5:
2032 2032 shortopt, longopt, default, desc, optlabel = option
2033 2033 else:
2034 2034 shortopt, longopt, default, desc = option
2035 2035 optlabel = _("VALUE") # default label
2036 2036
2037 2037 if _("DEPRECATED") in desc and not ui.verbose:
2038 2038 continue
2039 2039 if isinstance(default, list):
2040 2040 numqualifier = " %s [+]" % optlabel
2041 2041 multioccur = True
2042 2042 elif (default is not None) and not isinstance(default, bool):
2043 2043 numqualifier = " %s" % optlabel
2044 2044 else:
2045 2045 numqualifier = ""
2046 2046 opt_output.append(("%2s%s" %
2047 2047 (shortopt and "-%s" % shortopt,
2048 2048 longopt and " --%s%s" %
2049 2049 (longopt, numqualifier)),
2050 2050 "%s%s" % (desc,
2051 2051 default
2052 2052 and _(" (default: %s)") % default
2053 2053 or "")))
2054 2054 if multioccur:
2055 2055 msg = _("\n[+] marked option can be specified multiple times")
2056 2056 if ui.verbose and name != 'shortlist':
2057 2057 opt_output.append((msg, None))
2058 2058 else:
2059 2059 opt_output.insert(-1, (msg, None))
2060 2060
2061 2061 if not name:
2062 2062 ui.write(_("\nadditional help topics:\n\n"))
2063 2063 topics = []
2064 2064 for names, header, doc in help.helptable:
2065 2065 topics.append((sorted(names, key=len, reverse=True)[0], header))
2066 2066 topics_len = max([len(s[0]) for s in topics])
2067 2067 for t, desc in topics:
2068 2068 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2069 2069
2070 2070 if opt_output:
2071 2071 colwidth = encoding.colwidth
2072 2072 # normalize: (opt or message, desc or None, width of opt)
2073 2073 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2074 2074 for opt, desc in opt_output]
2075 2075 hanging = max([e[2] for e in entries])
2076 2076 for opt, desc, width in entries:
2077 2077 if desc:
2078 2078 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2079 2079 hangindent = ' ' * (hanging + 3)
2080 2080 ui.write('%s\n' % (util.wrap(desc,
2081 2081 initindent=initindent,
2082 2082 hangindent=hangindent)))
2083 2083 else:
2084 2084 ui.write("%s\n" % opt)
2085 2085
2086 2086 def identify(ui, repo, source=None,
2087 2087 rev=None, num=None, id=None, branch=None, tags=None):
2088 2088 """identify the working copy or specified revision
2089 2089
2090 2090 With no revision, print a summary of the current state of the
2091 2091 repository.
2092 2092
2093 2093 Specifying a path to a repository root or Mercurial bundle will
2094 2094 cause lookup to operate on that repository/bundle.
2095 2095
2096 2096 This summary identifies the repository state using one or two
2097 2097 parent hash identifiers, followed by a "+" if there are
2098 2098 uncommitted changes in the working directory, a list of tags for
2099 2099 this revision and a branch name for non-default branches.
2100 2100
2101 2101 Returns 0 if successful.
2102 2102 """
2103 2103
2104 2104 if not repo and not source:
2105 raise util.Abort(_("There is no Mercurial repository here "
2105 raise util.Abort(_("there is no Mercurial repository here "
2106 2106 "(.hg not found)"))
2107 2107
2108 2108 hexfunc = ui.debugflag and hex or short
2109 2109 default = not (num or id or branch or tags)
2110 2110 output = []
2111 2111
2112 2112 revs = []
2113 2113 if source:
2114 2114 source, branches = hg.parseurl(ui.expandpath(source))
2115 2115 repo = hg.repository(ui, source)
2116 2116 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2117 2117
2118 2118 if not repo.local():
2119 2119 if not rev and revs:
2120 2120 rev = revs[0]
2121 2121 if not rev:
2122 2122 rev = "tip"
2123 2123 if num or branch or tags:
2124 2124 raise util.Abort(
2125 2125 "can't query remote revision number, branch, or tags")
2126 2126 output = [hexfunc(repo.lookup(rev))]
2127 2127 elif not rev:
2128 2128 ctx = repo[None]
2129 2129 parents = ctx.parents()
2130 2130 changed = False
2131 2131 if default or id or num:
2132 2132 changed = util.any(repo.status())
2133 2133 if default or id:
2134 2134 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
2135 2135 (changed) and "+" or "")]
2136 2136 if num:
2137 2137 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
2138 2138 (changed) and "+" or ""))
2139 2139 else:
2140 2140 ctx = repo[rev]
2141 2141 if default or id:
2142 2142 output = [hexfunc(ctx.node())]
2143 2143 if num:
2144 2144 output.append(str(ctx.rev()))
2145 2145
2146 2146 if repo.local() and default and not ui.quiet:
2147 2147 b = encoding.tolocal(ctx.branch())
2148 2148 if b != 'default':
2149 2149 output.append("(%s)" % b)
2150 2150
2151 2151 # multiple tags for a single parent separated by '/'
2152 2152 t = "/".join(ctx.tags())
2153 2153 if t:
2154 2154 output.append(t)
2155 2155
2156 2156 if branch:
2157 2157 output.append(encoding.tolocal(ctx.branch()))
2158 2158
2159 2159 if tags:
2160 2160 output.extend(ctx.tags())
2161 2161
2162 2162 ui.write("%s\n" % ' '.join(output))
2163 2163
2164 2164 def import_(ui, repo, patch1, *patches, **opts):
2165 2165 """import an ordered set of patches
2166 2166
2167 2167 Import a list of patches and commit them individually (unless
2168 2168 --no-commit is specified).
2169 2169
2170 2170 If there are outstanding changes in the working directory, import
2171 2171 will abort unless given the -f/--force flag.
2172 2172
2173 2173 You can import a patch straight from a mail message. Even patches
2174 2174 as attachments work (to use the body part, it must have type
2175 2175 text/plain or text/x-patch). From and Subject headers of email
2176 2176 message are used as default committer and commit message. All
2177 2177 text/plain body parts before first diff are added to commit
2178 2178 message.
2179 2179
2180 2180 If the imported patch was generated by :hg:`export`, user and
2181 2181 description from patch override values from message headers and
2182 2182 body. Values given on command line with -m/--message and -u/--user
2183 2183 override these.
2184 2184
2185 2185 If --exact is specified, import will set the working directory to
2186 2186 the parent of each patch before applying it, and will abort if the
2187 2187 resulting changeset has a different ID than the one recorded in
2188 2188 the patch. This may happen due to character set problems or other
2189 2189 deficiencies in the text patch format.
2190 2190
2191 2191 With -s/--similarity, hg will attempt to discover renames and
2192 2192 copies in the patch in the same way as 'addremove'.
2193 2193
2194 2194 To read a patch from standard input, use "-" as the patch name. If
2195 2195 a URL is specified, the patch will be downloaded from it.
2196 2196 See :hg:`help dates` for a list of formats valid for -d/--date.
2197 2197
2198 2198 Returns 0 on success.
2199 2199 """
2200 2200 patches = (patch1,) + patches
2201 2201
2202 2202 date = opts.get('date')
2203 2203 if date:
2204 2204 opts['date'] = util.parsedate(date)
2205 2205
2206 2206 try:
2207 2207 sim = float(opts.get('similarity') or 0)
2208 2208 except ValueError:
2209 2209 raise util.Abort(_('similarity must be a number'))
2210 2210 if sim < 0 or sim > 100:
2211 2211 raise util.Abort(_('similarity must be between 0 and 100'))
2212 2212
2213 2213 if opts.get('exact') or not opts.get('force'):
2214 2214 cmdutil.bail_if_changed(repo)
2215 2215
2216 2216 d = opts["base"]
2217 2217 strip = opts["strip"]
2218 2218 wlock = lock = None
2219 2219
2220 2220 def tryone(ui, hunk):
2221 2221 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2222 2222 patch.extract(ui, hunk)
2223 2223
2224 2224 if not tmpname:
2225 2225 return None
2226 2226 commitid = _('to working directory')
2227 2227
2228 2228 try:
2229 2229 cmdline_message = cmdutil.logmessage(opts)
2230 2230 if cmdline_message:
2231 2231 # pickup the cmdline msg
2232 2232 message = cmdline_message
2233 2233 elif message:
2234 2234 # pickup the patch msg
2235 2235 message = message.strip()
2236 2236 else:
2237 2237 # launch the editor
2238 2238 message = None
2239 2239 ui.debug('message:\n%s\n' % message)
2240 2240
2241 2241 wp = repo.parents()
2242 2242 if opts.get('exact'):
2243 2243 if not nodeid or not p1:
2244 2244 raise util.Abort(_('not a Mercurial patch'))
2245 2245 p1 = repo.lookup(p1)
2246 2246 p2 = repo.lookup(p2 or hex(nullid))
2247 2247
2248 2248 if p1 != wp[0].node():
2249 2249 hg.clean(repo, p1)
2250 2250 repo.dirstate.setparents(p1, p2)
2251 2251 elif p2:
2252 2252 try:
2253 2253 p1 = repo.lookup(p1)
2254 2254 p2 = repo.lookup(p2)
2255 2255 if p1 == wp[0].node():
2256 2256 repo.dirstate.setparents(p1, p2)
2257 2257 except error.RepoError:
2258 2258 pass
2259 2259 if opts.get('exact') or opts.get('import_branch'):
2260 2260 repo.dirstate.setbranch(branch or 'default')
2261 2261
2262 2262 files = {}
2263 2263 try:
2264 2264 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2265 2265 files=files, eolmode=None)
2266 2266 finally:
2267 2267 files = patch.updatedir(ui, repo, files,
2268 2268 similarity=sim / 100.0)
2269 2269 if not opts.get('no_commit'):
2270 2270 if opts.get('exact'):
2271 2271 m = None
2272 2272 else:
2273 2273 m = cmdutil.matchfiles(repo, files or [])
2274 2274 n = repo.commit(message, opts.get('user') or user,
2275 2275 opts.get('date') or date, match=m,
2276 2276 editor=cmdutil.commiteditor)
2277 2277 if opts.get('exact'):
2278 2278 if hex(n) != nodeid:
2279 2279 repo.rollback()
2280 2280 raise util.Abort(_('patch is damaged'
2281 2281 ' or loses information'))
2282 2282 # Force a dirstate write so that the next transaction
2283 2283 # backups an up-do-date file.
2284 2284 repo.dirstate.write()
2285 2285 if n:
2286 2286 commitid = short(n)
2287 2287
2288 2288 return commitid
2289 2289 finally:
2290 2290 os.unlink(tmpname)
2291 2291
2292 2292 try:
2293 2293 wlock = repo.wlock()
2294 2294 lock = repo.lock()
2295 2295 lastcommit = None
2296 2296 for p in patches:
2297 2297 pf = os.path.join(d, p)
2298 2298
2299 2299 if pf == '-':
2300 2300 ui.status(_("applying patch from stdin\n"))
2301 2301 pf = sys.stdin
2302 2302 else:
2303 2303 ui.status(_("applying %s\n") % p)
2304 2304 pf = url.open(ui, pf)
2305 2305
2306 2306 haspatch = False
2307 2307 for hunk in patch.split(pf):
2308 2308 commitid = tryone(ui, hunk)
2309 2309 if commitid:
2310 2310 haspatch = True
2311 2311 if lastcommit:
2312 2312 ui.status(_('applied %s\n') % lastcommit)
2313 2313 lastcommit = commitid
2314 2314
2315 2315 if not haspatch:
2316 2316 raise util.Abort(_('no diffs found'))
2317 2317
2318 2318 finally:
2319 2319 release(lock, wlock)
2320 2320
2321 2321 def incoming(ui, repo, source="default", **opts):
2322 2322 """show new changesets found in source
2323 2323
2324 2324 Show new changesets found in the specified path/URL or the default
2325 2325 pull location. These are the changesets that would have been pulled
2326 2326 if a pull at the time you issued this command.
2327 2327
2328 2328 For remote repository, using --bundle avoids downloading the
2329 2329 changesets twice if the incoming is followed by a pull.
2330 2330
2331 2331 See pull for valid source format details.
2332 2332
2333 2333 Returns 0 if there are incoming changes, 1 otherwise.
2334 2334 """
2335 2335 limit = cmdutil.loglimit(opts)
2336 2336 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2337 2337 other = hg.repository(hg.remoteui(repo, opts), source)
2338 2338 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2339 2339 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2340 2340 if revs:
2341 2341 revs = [other.lookup(rev) for rev in revs]
2342 2342
2343 2343 tmp = discovery.findcommonincoming(repo, other, heads=revs,
2344 2344 force=opts.get('force'))
2345 2345 common, incoming, rheads = tmp
2346 2346 if not incoming:
2347 2347 try:
2348 2348 os.unlink(opts["bundle"])
2349 2349 except:
2350 2350 pass
2351 2351 ui.status(_("no changes found\n"))
2352 2352 return 1
2353 2353
2354 2354 cleanup = None
2355 2355 try:
2356 2356 fname = opts["bundle"]
2357 2357 if fname or not other.local():
2358 2358 # create a bundle (uncompressed if other repo is not local)
2359 2359
2360 2360 if revs is None and other.capable('changegroupsubset'):
2361 2361 revs = rheads
2362 2362
2363 2363 if revs is None:
2364 2364 cg = other.changegroup(incoming, "incoming")
2365 2365 else:
2366 2366 cg = other.changegroupsubset(incoming, revs, 'incoming')
2367 2367 bundletype = other.local() and "HG10BZ" or "HG10UN"
2368 2368 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
2369 2369 # keep written bundle?
2370 2370 if opts["bundle"]:
2371 2371 cleanup = None
2372 2372 if not other.local():
2373 2373 # use the created uncompressed bundlerepo
2374 2374 other = bundlerepo.bundlerepository(ui, repo.root, fname)
2375 2375
2376 2376 o = other.changelog.nodesbetween(incoming, revs)[0]
2377 2377 if opts.get('newest_first'):
2378 2378 o.reverse()
2379 2379 displayer = cmdutil.show_changeset(ui, other, opts)
2380 2380 count = 0
2381 2381 for n in o:
2382 2382 if limit is not None and count >= limit:
2383 2383 break
2384 2384 parents = [p for p in other.changelog.parents(n) if p != nullid]
2385 2385 if opts.get('no_merges') and len(parents) == 2:
2386 2386 continue
2387 2387 count += 1
2388 2388 displayer.show(other[n])
2389 2389 displayer.close()
2390 2390 finally:
2391 2391 if hasattr(other, 'close'):
2392 2392 other.close()
2393 2393 if cleanup:
2394 2394 os.unlink(cleanup)
2395 2395
2396 2396 def init(ui, dest=".", **opts):
2397 2397 """create a new repository in the given directory
2398 2398
2399 2399 Initialize a new repository in the given directory. If the given
2400 2400 directory does not exist, it will be created.
2401 2401
2402 2402 If no directory is given, the current directory is used.
2403 2403
2404 2404 It is possible to specify an ``ssh://`` URL as the destination.
2405 2405 See :hg:`help urls` for more information.
2406 2406
2407 2407 Returns 0 on success.
2408 2408 """
2409 2409 hg.repository(hg.remoteui(ui, opts), dest, create=1)
2410 2410
2411 2411 def locate(ui, repo, *pats, **opts):
2412 2412 """locate files matching specific patterns
2413 2413
2414 2414 Print files under Mercurial control in the working directory whose
2415 2415 names match the given patterns.
2416 2416
2417 2417 By default, this command searches all directories in the working
2418 2418 directory. To search just the current directory and its
2419 2419 subdirectories, use "--include .".
2420 2420
2421 2421 If no patterns are given to match, this command prints the names
2422 2422 of all files under Mercurial control in the working directory.
2423 2423
2424 2424 If you want to feed the output of this command into the "xargs"
2425 2425 command, use the -0 option to both this command and "xargs". This
2426 2426 will avoid the problem of "xargs" treating single filenames that
2427 2427 contain whitespace as multiple filenames.
2428 2428
2429 2429 Returns 0 if a match is found, 1 otherwise.
2430 2430 """
2431 2431 end = opts.get('print0') and '\0' or '\n'
2432 2432 rev = opts.get('rev') or None
2433 2433
2434 2434 ret = 1
2435 2435 m = cmdutil.match(repo, pats, opts, default='relglob')
2436 2436 m.bad = lambda x, y: False
2437 2437 for abs in repo[rev].walk(m):
2438 2438 if not rev and abs not in repo.dirstate:
2439 2439 continue
2440 2440 if opts.get('fullpath'):
2441 2441 ui.write(repo.wjoin(abs), end)
2442 2442 else:
2443 2443 ui.write(((pats and m.rel(abs)) or abs), end)
2444 2444 ret = 0
2445 2445
2446 2446 return ret
2447 2447
2448 2448 def log(ui, repo, *pats, **opts):
2449 2449 """show revision history of entire repository or files
2450 2450
2451 2451 Print the revision history of the specified files or the entire
2452 2452 project.
2453 2453
2454 2454 File history is shown without following rename or copy history of
2455 2455 files. Use -f/--follow with a filename to follow history across
2456 2456 renames and copies. --follow without a filename will only show
2457 2457 ancestors or descendants of the starting revision. --follow-first
2458 2458 only follows the first parent of merge revisions.
2459 2459
2460 2460 If no revision range is specified, the default is tip:0 unless
2461 2461 --follow is set, in which case the working directory parent is
2462 2462 used as the starting revision. You can specify a revision set for
2463 2463 log, see :hg:`help revsets` for more information.
2464 2464
2465 2465 See :hg:`help dates` for a list of formats valid for -d/--date.
2466 2466
2467 2467 By default this command prints revision number and changeset id,
2468 2468 tags, non-trivial parents, user, date and time, and a summary for
2469 2469 each commit. When the -v/--verbose switch is used, the list of
2470 2470 changed files and full commit message are shown.
2471 2471
2472 2472 NOTE: log -p/--patch may generate unexpected diff output for merge
2473 2473 changesets, as it will only compare the merge changeset against
2474 2474 its first parent. Also, only files different from BOTH parents
2475 2475 will appear in files:.
2476 2476
2477 2477 Returns 0 on success.
2478 2478 """
2479 2479
2480 2480 matchfn = cmdutil.match(repo, pats, opts)
2481 2481 limit = cmdutil.loglimit(opts)
2482 2482 count = 0
2483 2483
2484 2484 endrev = None
2485 2485 if opts.get('copies') and opts.get('rev'):
2486 2486 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2487 2487
2488 2488 df = False
2489 2489 if opts["date"]:
2490 2490 df = util.matchdate(opts["date"])
2491 2491
2492 2492 branches = opts.get('branch', []) + opts.get('only_branch', [])
2493 2493 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2494 2494
2495 2495 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2496 2496 def prep(ctx, fns):
2497 2497 rev = ctx.rev()
2498 2498 parents = [p for p in repo.changelog.parentrevs(rev)
2499 2499 if p != nullrev]
2500 2500 if opts.get('no_merges') and len(parents) == 2:
2501 2501 return
2502 2502 if opts.get('only_merges') and len(parents) != 2:
2503 2503 return
2504 2504 if opts.get('branch') and ctx.branch() not in opts['branch']:
2505 2505 return
2506 2506 if df and not df(ctx.date()[0]):
2507 2507 return
2508 2508 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2509 2509 return
2510 2510 if opts.get('keyword'):
2511 2511 for k in [kw.lower() for kw in opts['keyword']]:
2512 2512 if (k in ctx.user().lower() or
2513 2513 k in ctx.description().lower() or
2514 2514 k in " ".join(ctx.files()).lower()):
2515 2515 break
2516 2516 else:
2517 2517 return
2518 2518
2519 2519 copies = None
2520 2520 if opts.get('copies') and rev:
2521 2521 copies = []
2522 2522 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2523 2523 for fn in ctx.files():
2524 2524 rename = getrenamed(fn, rev)
2525 2525 if rename:
2526 2526 copies.append((fn, rename[0]))
2527 2527
2528 2528 revmatchfn = None
2529 2529 if opts.get('patch') or opts.get('stat'):
2530 2530 revmatchfn = cmdutil.match(repo, fns, default='path')
2531 2531
2532 2532 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2533 2533
2534 2534 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2535 2535 if count == limit:
2536 2536 break
2537 2537 if displayer.flush(ctx.rev()):
2538 2538 count += 1
2539 2539 displayer.close()
2540 2540
2541 2541 def manifest(ui, repo, node=None, rev=None):
2542 2542 """output the current or given revision of the project manifest
2543 2543
2544 2544 Print a list of version controlled files for the given revision.
2545 2545 If no revision is given, the first parent of the working directory
2546 2546 is used, or the null revision if no revision is checked out.
2547 2547
2548 2548 With -v, print file permissions, symlink and executable bits.
2549 2549 With --debug, print file revision hashes.
2550 2550
2551 2551 Returns 0 on success.
2552 2552 """
2553 2553
2554 2554 if rev and node:
2555 2555 raise util.Abort(_("please specify just one revision"))
2556 2556
2557 2557 if not node:
2558 2558 node = rev
2559 2559
2560 2560 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2561 2561 ctx = repo[node]
2562 2562 for f in ctx:
2563 2563 if ui.debugflag:
2564 2564 ui.write("%40s " % hex(ctx.manifest()[f]))
2565 2565 if ui.verbose:
2566 2566 ui.write(decor[ctx.flags(f)])
2567 2567 ui.write("%s\n" % f)
2568 2568
2569 2569 def merge(ui, repo, node=None, **opts):
2570 2570 """merge working directory with another revision
2571 2571
2572 2572 The current working directory is updated with all changes made in
2573 2573 the requested revision since the last common predecessor revision.
2574 2574
2575 2575 Files that changed between either parent are marked as changed for
2576 2576 the next commit and a commit must be performed before any further
2577 2577 updates to the repository are allowed. The next commit will have
2578 2578 two parents.
2579 2579
2580 2580 If no revision is specified, the working directory's parent is a
2581 2581 head revision, and the current branch contains exactly one other
2582 2582 head, the other head is merged with by default. Otherwise, an
2583 2583 explicit revision with which to merge with must be provided.
2584 2584
2585 2585 To undo an uncommitted merge, use :hg:`update --clean .` which
2586 2586 will check out a clean copy of the original merge parent, losing
2587 2587 all changes.
2588 2588
2589 2589 Returns 0 on success, 1 if there are unresolved files.
2590 2590 """
2591 2591
2592 2592 if opts.get('rev') and node:
2593 2593 raise util.Abort(_("please specify just one revision"))
2594 2594 if not node:
2595 2595 node = opts.get('rev')
2596 2596
2597 2597 if not node:
2598 2598 branch = repo.changectx(None).branch()
2599 2599 bheads = repo.branchheads(branch)
2600 2600 if len(bheads) > 2:
2601 2601 raise util.Abort(_(
2602 2602 'branch \'%s\' has %d heads - '
2603 2603 'please merge with an explicit rev\n'
2604 2604 '(run \'hg heads .\' to see heads)')
2605 2605 % (branch, len(bheads)))
2606 2606
2607 2607 parent = repo.dirstate.parents()[0]
2608 2608 if len(bheads) == 1:
2609 2609 if len(repo.heads()) > 1:
2610 2610 raise util.Abort(_(
2611 2611 'branch \'%s\' has one head - '
2612 2612 'please merge with an explicit rev\n'
2613 2613 '(run \'hg heads\' to see all heads)')
2614 2614 % branch)
2615 2615 msg = _('there is nothing to merge')
2616 2616 if parent != repo.lookup(repo[None].branch()):
2617 2617 msg = _('%s - use "hg update" instead') % msg
2618 2618 raise util.Abort(msg)
2619 2619
2620 2620 if parent not in bheads:
2621 2621 raise util.Abort(_('working dir not at a head rev - '
2622 2622 'use "hg update" or merge with an explicit rev'))
2623 2623 node = parent == bheads[0] and bheads[-1] or bheads[0]
2624 2624
2625 2625 if opts.get('preview'):
2626 2626 # find nodes that are ancestors of p2 but not of p1
2627 2627 p1 = repo.lookup('.')
2628 2628 p2 = repo.lookup(node)
2629 2629 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2630 2630
2631 2631 displayer = cmdutil.show_changeset(ui, repo, opts)
2632 2632 for node in nodes:
2633 2633 displayer.show(repo[node])
2634 2634 displayer.close()
2635 2635 return 0
2636 2636
2637 2637 return hg.merge(repo, node, force=opts.get('force'))
2638 2638
2639 2639 def outgoing(ui, repo, dest=None, **opts):
2640 2640 """show changesets not found in the destination
2641 2641
2642 2642 Show changesets not found in the specified destination repository
2643 2643 or the default push location. These are the changesets that would
2644 2644 be pushed if a push was requested.
2645 2645
2646 2646 See pull for details of valid destination formats.
2647 2647
2648 2648 Returns 0 if there are outgoing changes, 1 otherwise.
2649 2649 """
2650 2650 limit = cmdutil.loglimit(opts)
2651 2651 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2652 2652 dest, branches = hg.parseurl(dest, opts.get('branch'))
2653 2653 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2654 2654 if revs:
2655 2655 revs = [repo.lookup(rev) for rev in revs]
2656 2656
2657 2657 other = hg.repository(hg.remoteui(repo, opts), dest)
2658 2658 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2659 2659 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
2660 2660 if not o:
2661 2661 ui.status(_("no changes found\n"))
2662 2662 return 1
2663 2663 o = repo.changelog.nodesbetween(o, revs)[0]
2664 2664 if opts.get('newest_first'):
2665 2665 o.reverse()
2666 2666 displayer = cmdutil.show_changeset(ui, repo, opts)
2667 2667 count = 0
2668 2668 for n in o:
2669 2669 if limit is not None and count >= limit:
2670 2670 break
2671 2671 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2672 2672 if opts.get('no_merges') and len(parents) == 2:
2673 2673 continue
2674 2674 count += 1
2675 2675 displayer.show(repo[n])
2676 2676 displayer.close()
2677 2677
2678 2678 def parents(ui, repo, file_=None, **opts):
2679 2679 """show the parents of the working directory or revision
2680 2680
2681 2681 Print the working directory's parent revisions. If a revision is
2682 2682 given via -r/--rev, the parent of that revision will be printed.
2683 2683 If a file argument is given, the revision in which the file was
2684 2684 last changed (before the working directory revision or the
2685 2685 argument to --rev if given) is printed.
2686 2686
2687 2687 Returns 0 on success.
2688 2688 """
2689 2689 rev = opts.get('rev')
2690 2690 if rev:
2691 2691 ctx = repo[rev]
2692 2692 else:
2693 2693 ctx = repo[None]
2694 2694
2695 2695 if file_:
2696 2696 m = cmdutil.match(repo, (file_,), opts)
2697 2697 if m.anypats() or len(m.files()) != 1:
2698 2698 raise util.Abort(_('can only specify an explicit filename'))
2699 2699 file_ = m.files()[0]
2700 2700 filenodes = []
2701 2701 for cp in ctx.parents():
2702 2702 if not cp:
2703 2703 continue
2704 2704 try:
2705 2705 filenodes.append(cp.filenode(file_))
2706 2706 except error.LookupError:
2707 2707 pass
2708 2708 if not filenodes:
2709 2709 raise util.Abort(_("'%s' not found in manifest!") % file_)
2710 2710 fl = repo.file(file_)
2711 2711 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2712 2712 else:
2713 2713 p = [cp.node() for cp in ctx.parents()]
2714 2714
2715 2715 displayer = cmdutil.show_changeset(ui, repo, opts)
2716 2716 for n in p:
2717 2717 if n != nullid:
2718 2718 displayer.show(repo[n])
2719 2719 displayer.close()
2720 2720
2721 2721 def paths(ui, repo, search=None):
2722 2722 """show aliases for remote repositories
2723 2723
2724 2724 Show definition of symbolic path name NAME. If no name is given,
2725 2725 show definition of all available names.
2726 2726
2727 2727 Path names are defined in the [paths] section of
2728 2728 ``/etc/mercurial/hgrc`` and ``$HOME/.hgrc``. If run inside a
2729 2729 repository, ``.hg/hgrc`` is used, too.
2730 2730
2731 2731 The path names ``default`` and ``default-push`` have a special
2732 2732 meaning. When performing a push or pull operation, they are used
2733 2733 as fallbacks if no location is specified on the command-line.
2734 2734 When ``default-push`` is set, it will be used for push and
2735 2735 ``default`` will be used for pull; otherwise ``default`` is used
2736 2736 as the fallback for both. When cloning a repository, the clone
2737 2737 source is written as ``default`` in ``.hg/hgrc``. Note that
2738 2738 ``default`` and ``default-push`` apply to all inbound (e.g.
2739 2739 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2740 2740 :hg:`bundle`) operations.
2741 2741
2742 2742 See :hg:`help urls` for more information.
2743 2743
2744 2744 Returns 0 on success.
2745 2745 """
2746 2746 if search:
2747 2747 for name, path in ui.configitems("paths"):
2748 2748 if name == search:
2749 2749 ui.write("%s\n" % url.hidepassword(path))
2750 2750 return
2751 2751 ui.warn(_("not found!\n"))
2752 2752 return 1
2753 2753 else:
2754 2754 for name, path in ui.configitems("paths"):
2755 2755 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2756 2756
2757 2757 def postincoming(ui, repo, modheads, optupdate, checkout):
2758 2758 if modheads == 0:
2759 2759 return
2760 2760 if optupdate:
2761 2761 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2762 2762 return hg.update(repo, checkout)
2763 2763 else:
2764 2764 ui.status(_("not updating, since new heads added\n"))
2765 2765 if modheads > 1:
2766 2766 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2767 2767 else:
2768 2768 ui.status(_("(run 'hg update' to get a working copy)\n"))
2769 2769
2770 2770 def pull(ui, repo, source="default", **opts):
2771 2771 """pull changes from the specified source
2772 2772
2773 2773 Pull changes from a remote repository to a local one.
2774 2774
2775 2775 This finds all changes from the repository at the specified path
2776 2776 or URL and adds them to a local repository (the current one unless
2777 2777 -R is specified). By default, this does not update the copy of the
2778 2778 project in the working directory.
2779 2779
2780 2780 Use :hg:`incoming` if you want to see what would have been added
2781 2781 by a pull at the time you issued this command. If you then decide
2782 2782 to add those changes to the repository, you should use :hg:`pull
2783 2783 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
2784 2784
2785 2785 If SOURCE is omitted, the 'default' path will be used.
2786 2786 See :hg:`help urls` for more information.
2787 2787
2788 2788 Returns 0 on success, 1 if an update had unresolved files.
2789 2789 """
2790 2790 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2791 2791 other = hg.repository(hg.remoteui(repo, opts), source)
2792 2792 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2793 2793 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2794 2794 if revs:
2795 2795 try:
2796 2796 revs = [other.lookup(rev) for rev in revs]
2797 2797 except error.CapabilityError:
2798 2798 err = _("Other repository doesn't support revision lookup, "
2799 2799 "so a rev cannot be specified.")
2800 2800 raise util.Abort(err)
2801 2801
2802 2802 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2803 2803 if checkout:
2804 2804 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2805 2805 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2806 2806
2807 2807 def push(ui, repo, dest=None, **opts):
2808 2808 """push changes to the specified destination
2809 2809
2810 2810 Push changesets from the local repository to the specified
2811 2811 destination.
2812 2812
2813 2813 This operation is symmetrical to pull: it is identical to a pull
2814 2814 in the destination repository from the current one.
2815 2815
2816 2816 By default, push will not allow creation of new heads at the
2817 2817 destination, since multiple heads would make it unclear which head
2818 2818 to use. In this situation, it is recommended to pull and merge
2819 2819 before pushing.
2820 2820
2821 2821 Use --new-branch if you want to allow push to create a new named
2822 2822 branch that is not present at the destination. This allows you to
2823 2823 only create a new branch without forcing other changes.
2824 2824
2825 2825 Use -f/--force to override the default behavior and push all
2826 2826 changesets on all branches.
2827 2827
2828 2828 If -r/--rev is used, the specified revision and all its ancestors
2829 2829 will be pushed to the remote repository.
2830 2830
2831 2831 Please see :hg:`help urls` for important details about ``ssh://``
2832 2832 URLs. If DESTINATION is omitted, a default path will be used.
2833 2833
2834 2834 Returns 0 if push was successful, 1 if nothing to push.
2835 2835 """
2836 2836 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2837 2837 dest, branches = hg.parseurl(dest, opts.get('branch'))
2838 2838 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2839 2839 other = hg.repository(hg.remoteui(repo, opts), dest)
2840 2840 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2841 2841 if revs:
2842 2842 revs = [repo.lookup(rev) for rev in revs]
2843 2843
2844 2844 # push subrepos depth-first for coherent ordering
2845 2845 c = repo['']
2846 2846 subs = c.substate # only repos that are committed
2847 2847 for s in sorted(subs):
2848 2848 if not c.sub(s).push(opts.get('force')):
2849 2849 return False
2850 2850
2851 2851 r = repo.push(other, opts.get('force'), revs=revs,
2852 2852 newbranch=opts.get('new_branch'))
2853 2853 return r == 0
2854 2854
2855 2855 def recover(ui, repo):
2856 2856 """roll back an interrupted transaction
2857 2857
2858 2858 Recover from an interrupted commit or pull.
2859 2859
2860 2860 This command tries to fix the repository status after an
2861 2861 interrupted operation. It should only be necessary when Mercurial
2862 2862 suggests it.
2863 2863
2864 2864 Returns 0 if successful, 1 if nothing to recover or verify fails.
2865 2865 """
2866 2866 if repo.recover():
2867 2867 return hg.verify(repo)
2868 2868 return 1
2869 2869
2870 2870 def remove(ui, repo, *pats, **opts):
2871 2871 """remove the specified files on the next commit
2872 2872
2873 2873 Schedule the indicated files for removal from the repository.
2874 2874
2875 2875 This only removes files from the current branch, not from the
2876 2876 entire project history. -A/--after can be used to remove only
2877 2877 files that have already been deleted, -f/--force can be used to
2878 2878 force deletion, and -Af can be used to remove files from the next
2879 2879 revision without deleting them from the working directory.
2880 2880
2881 2881 The following table details the behavior of remove for different
2882 2882 file states (columns) and option combinations (rows). The file
2883 2883 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2884 2884 reported by :hg:`status`). The actions are Warn, Remove (from
2885 2885 branch) and Delete (from disk)::
2886 2886
2887 2887 A C M !
2888 2888 none W RD W R
2889 2889 -f R RD RD R
2890 2890 -A W W W R
2891 2891 -Af R R R R
2892 2892
2893 2893 This command schedules the files to be removed at the next commit.
2894 2894 To undo a remove before that, see :hg:`revert`.
2895 2895
2896 2896 Returns 0 on success, 1 if any warnings encountered.
2897 2897 """
2898 2898
2899 2899 ret = 0
2900 2900 after, force = opts.get('after'), opts.get('force')
2901 2901 if not pats and not after:
2902 2902 raise util.Abort(_('no files specified'))
2903 2903
2904 2904 m = cmdutil.match(repo, pats, opts)
2905 2905 s = repo.status(match=m, clean=True)
2906 2906 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2907 2907
2908 2908 for f in m.files():
2909 2909 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2910 2910 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2911 2911 ret = 1
2912 2912
2913 2913 def warn(files, reason):
2914 2914 for f in files:
2915 2915 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2916 2916 % (m.rel(f), reason))
2917 2917 ret = 1
2918 2918
2919 2919 if force:
2920 2920 remove, forget = modified + deleted + clean, added
2921 2921 elif after:
2922 2922 remove, forget = deleted, []
2923 2923 warn(modified + added + clean, _('still exists'))
2924 2924 else:
2925 2925 remove, forget = deleted + clean, []
2926 2926 warn(modified, _('is modified'))
2927 2927 warn(added, _('has been marked for add'))
2928 2928
2929 2929 for f in sorted(remove + forget):
2930 2930 if ui.verbose or not m.exact(f):
2931 2931 ui.status(_('removing %s\n') % m.rel(f))
2932 2932
2933 2933 repo[None].forget(forget)
2934 2934 repo[None].remove(remove, unlink=not after)
2935 2935 return ret
2936 2936
2937 2937 def rename(ui, repo, *pats, **opts):
2938 2938 """rename files; equivalent of copy + remove
2939 2939
2940 2940 Mark dest as copies of sources; mark sources for deletion. If dest
2941 2941 is a directory, copies are put in that directory. If dest is a
2942 2942 file, there can only be one source.
2943 2943
2944 2944 By default, this command copies the contents of files as they
2945 2945 exist in the working directory. If invoked with -A/--after, the
2946 2946 operation is recorded, but no copying is performed.
2947 2947
2948 2948 This command takes effect at the next commit. To undo a rename
2949 2949 before that, see :hg:`revert`.
2950 2950
2951 2951 Returns 0 on success, 1 if errors are encountered.
2952 2952 """
2953 2953 wlock = repo.wlock(False)
2954 2954 try:
2955 2955 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2956 2956 finally:
2957 2957 wlock.release()
2958 2958
2959 2959 def resolve(ui, repo, *pats, **opts):
2960 2960 """redo merges or set/view the merge status of files
2961 2961
2962 2962 Merges with unresolved conflicts are often the result of
2963 2963 non-interactive merging using the ``internal:merge`` hgrc setting,
2964 2964 or a command-line merge tool like ``diff3``. The resolve command
2965 2965 is used to manage the files involved in a merge, after :hg:`merge`
2966 2966 has been run, and before :hg:`commit` is run (i.e. the working
2967 2967 directory must have two parents).
2968 2968
2969 2969 The resolve command can be used in the following ways:
2970 2970
2971 2971 - :hg:`resolve FILE...`: attempt to re-merge the specified files,
2972 2972 discarding any previous merge attempts. Re-merging is not
2973 2973 performed for files already marked as resolved. Use ``--all/-a``
2974 2974 to selects all unresolved files.
2975 2975
2976 2976 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
2977 2977 (e.g. after having manually fixed-up the files). The default is
2978 2978 to mark all unresolved files.
2979 2979
2980 2980 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
2981 2981 default is to mark all resolved files.
2982 2982
2983 2983 - :hg:`resolve -l`: list files which had or still have conflicts.
2984 2984 In the printed list, ``U`` = unresolved and ``R`` = resolved.
2985 2985
2986 2986 Note that Mercurial will not let you commit files with unresolved
2987 2987 merge conflicts. You must use :hg:`resolve -m ...` before you can
2988 2988 commit after a conflicting merge.
2989 2989
2990 2990 Returns 0 on success, 1 if any files fail a resolve attempt.
2991 2991 """
2992 2992
2993 2993 all, mark, unmark, show, nostatus = \
2994 2994 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2995 2995
2996 2996 if (show and (mark or unmark)) or (mark and unmark):
2997 2997 raise util.Abort(_("too many options specified"))
2998 2998 if pats and all:
2999 2999 raise util.Abort(_("can't specify --all and patterns"))
3000 3000 if not (all or pats or show or mark or unmark):
3001 3001 raise util.Abort(_('no files or directories specified; '
3002 3002 'use --all to remerge all files'))
3003 3003
3004 3004 ms = mergemod.mergestate(repo)
3005 3005 m = cmdutil.match(repo, pats, opts)
3006 3006 ret = 0
3007 3007
3008 3008 for f in ms:
3009 3009 if m(f):
3010 3010 if show:
3011 3011 if nostatus:
3012 3012 ui.write("%s\n" % f)
3013 3013 else:
3014 3014 ui.write("%s %s\n" % (ms[f].upper(), f),
3015 3015 label='resolve.' +
3016 3016 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3017 3017 elif mark:
3018 3018 ms.mark(f, "r")
3019 3019 elif unmark:
3020 3020 ms.mark(f, "u")
3021 3021 else:
3022 3022 wctx = repo[None]
3023 3023 mctx = wctx.parents()[-1]
3024 3024
3025 3025 # backup pre-resolve (merge uses .orig for its own purposes)
3026 3026 a = repo.wjoin(f)
3027 3027 util.copyfile(a, a + ".resolve")
3028 3028
3029 3029 # resolve file
3030 3030 if ms.resolve(f, wctx, mctx):
3031 3031 ret = 1
3032 3032
3033 3033 # replace filemerge's .orig file with our resolve file
3034 3034 util.rename(a + ".resolve", a + ".orig")
3035 3035 return ret
3036 3036
3037 3037 def revert(ui, repo, *pats, **opts):
3038 3038 """restore individual files or directories to an earlier state
3039 3039
3040 3040 NOTE: This command is most likely not what you are looking for. revert
3041 3041 will partially overwrite content in the working directory without changing
3042 3042 the working directory parents. Use :hg:`update -r rev` to check out earlier
3043 3043 revisions, or :hg:`update --clean .` to undo a merge which has added
3044 3044 another parent.
3045 3045
3046 3046 With no revision specified, revert the named files or directories
3047 3047 to the contents they had in the parent of the working directory.
3048 3048 This restores the contents of the affected files to an unmodified
3049 3049 state and unschedules adds, removes, copies, and renames. If the
3050 3050 working directory has two parents, you must explicitly specify a
3051 3051 revision.
3052 3052
3053 3053 Using the -r/--rev option, revert the given files or directories
3054 3054 to their contents as of a specific revision. This can be helpful
3055 3055 to "roll back" some or all of an earlier change. See :hg:`help
3056 3056 dates` for a list of formats valid for -d/--date.
3057 3057
3058 3058 Revert modifies the working directory. It does not commit any
3059 3059 changes, or change the parent of the working directory. If you
3060 3060 revert to a revision other than the parent of the working
3061 3061 directory, the reverted files will thus appear modified
3062 3062 afterwards.
3063 3063
3064 3064 If a file has been deleted, it is restored. If the executable mode
3065 3065 of a file was changed, it is reset.
3066 3066
3067 3067 If names are given, all files matching the names are reverted.
3068 3068 If no arguments are given, no files are reverted.
3069 3069
3070 3070 Modified files are saved with a .orig suffix before reverting.
3071 3071 To disable these backups, use --no-backup.
3072 3072
3073 3073 Returns 0 on success.
3074 3074 """
3075 3075
3076 3076 if opts.get("date"):
3077 3077 if opts.get("rev"):
3078 3078 raise util.Abort(_("you can't specify a revision and a date"))
3079 3079 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3080 3080
3081 3081 if not pats and not opts.get('all'):
3082 3082 raise util.Abort(_('no files or directories specified; '
3083 3083 'use --all to revert the whole repo'))
3084 3084
3085 3085 parent, p2 = repo.dirstate.parents()
3086 3086 if not opts.get('rev') and p2 != nullid:
3087 3087 raise util.Abort(_('uncommitted merge - please provide a '
3088 3088 'specific revision'))
3089 3089 ctx = repo[opts.get('rev')]
3090 3090 node = ctx.node()
3091 3091 mf = ctx.manifest()
3092 3092 if node == parent:
3093 3093 pmf = mf
3094 3094 else:
3095 3095 pmf = None
3096 3096
3097 3097 # need all matching names in dirstate and manifest of target rev,
3098 3098 # so have to walk both. do not print errors if files exist in one
3099 3099 # but not other.
3100 3100
3101 3101 names = {}
3102 3102
3103 3103 wlock = repo.wlock()
3104 3104 try:
3105 3105 # walk dirstate.
3106 3106
3107 3107 m = cmdutil.match(repo, pats, opts)
3108 3108 m.bad = lambda x, y: False
3109 3109 for abs in repo.walk(m):
3110 3110 names[abs] = m.rel(abs), m.exact(abs)
3111 3111
3112 3112 # walk target manifest.
3113 3113
3114 3114 def badfn(path, msg):
3115 3115 if path in names:
3116 3116 return
3117 3117 path_ = path + '/'
3118 3118 for f in names:
3119 3119 if f.startswith(path_):
3120 3120 return
3121 3121 ui.warn("%s: %s\n" % (m.rel(path), msg))
3122 3122
3123 3123 m = cmdutil.match(repo, pats, opts)
3124 3124 m.bad = badfn
3125 3125 for abs in repo[node].walk(m):
3126 3126 if abs not in names:
3127 3127 names[abs] = m.rel(abs), m.exact(abs)
3128 3128
3129 3129 m = cmdutil.matchfiles(repo, names)
3130 3130 changes = repo.status(match=m)[:4]
3131 3131 modified, added, removed, deleted = map(set, changes)
3132 3132
3133 3133 # if f is a rename, also revert the source
3134 3134 cwd = repo.getcwd()
3135 3135 for f in added:
3136 3136 src = repo.dirstate.copied(f)
3137 3137 if src and src not in names and repo.dirstate[src] == 'r':
3138 3138 removed.add(src)
3139 3139 names[src] = (repo.pathto(src, cwd), True)
3140 3140
3141 3141 def removeforget(abs):
3142 3142 if repo.dirstate[abs] == 'a':
3143 3143 return _('forgetting %s\n')
3144 3144 return _('removing %s\n')
3145 3145
3146 3146 revert = ([], _('reverting %s\n'))
3147 3147 add = ([], _('adding %s\n'))
3148 3148 remove = ([], removeforget)
3149 3149 undelete = ([], _('undeleting %s\n'))
3150 3150
3151 3151 disptable = (
3152 3152 # dispatch table:
3153 3153 # file state
3154 3154 # action if in target manifest
3155 3155 # action if not in target manifest
3156 3156 # make backup if in target manifest
3157 3157 # make backup if not in target manifest
3158 3158 (modified, revert, remove, True, True),
3159 3159 (added, revert, remove, True, False),
3160 3160 (removed, undelete, None, False, False),
3161 3161 (deleted, revert, remove, False, False),
3162 3162 )
3163 3163
3164 3164 for abs, (rel, exact) in sorted(names.items()):
3165 3165 mfentry = mf.get(abs)
3166 3166 target = repo.wjoin(abs)
3167 3167 def handle(xlist, dobackup):
3168 3168 xlist[0].append(abs)
3169 3169 if (dobackup and not opts.get('no_backup') and
3170 3170 os.path.lexists(target)):
3171 3171 bakname = "%s.orig" % rel
3172 3172 ui.note(_('saving current version of %s as %s\n') %
3173 3173 (rel, bakname))
3174 3174 if not opts.get('dry_run'):
3175 3175 util.rename(target, bakname)
3176 3176 if ui.verbose or not exact:
3177 3177 msg = xlist[1]
3178 3178 if not isinstance(msg, basestring):
3179 3179 msg = msg(abs)
3180 3180 ui.status(msg % rel)
3181 3181 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3182 3182 if abs not in table:
3183 3183 continue
3184 3184 # file has changed in dirstate
3185 3185 if mfentry:
3186 3186 handle(hitlist, backuphit)
3187 3187 elif misslist is not None:
3188 3188 handle(misslist, backupmiss)
3189 3189 break
3190 3190 else:
3191 3191 if abs not in repo.dirstate:
3192 3192 if mfentry:
3193 3193 handle(add, True)
3194 3194 elif exact:
3195 3195 ui.warn(_('file not managed: %s\n') % rel)
3196 3196 continue
3197 3197 # file has not changed in dirstate
3198 3198 if node == parent:
3199 3199 if exact:
3200 3200 ui.warn(_('no changes needed to %s\n') % rel)
3201 3201 continue
3202 3202 if pmf is None:
3203 3203 # only need parent manifest in this unlikely case,
3204 3204 # so do not read by default
3205 3205 pmf = repo[parent].manifest()
3206 3206 if abs in pmf:
3207 3207 if mfentry:
3208 3208 # if version of file is same in parent and target
3209 3209 # manifests, do nothing
3210 3210 if (pmf[abs] != mfentry or
3211 3211 pmf.flags(abs) != mf.flags(abs)):
3212 3212 handle(revert, False)
3213 3213 else:
3214 3214 handle(remove, False)
3215 3215
3216 3216 if not opts.get('dry_run'):
3217 3217 def checkout(f):
3218 3218 fc = ctx[f]
3219 3219 repo.wwrite(f, fc.data(), fc.flags())
3220 3220
3221 3221 audit_path = util.path_auditor(repo.root)
3222 3222 for f in remove[0]:
3223 3223 if repo.dirstate[f] == 'a':
3224 3224 repo.dirstate.forget(f)
3225 3225 continue
3226 3226 audit_path(f)
3227 3227 try:
3228 3228 util.unlink(repo.wjoin(f))
3229 3229 except OSError:
3230 3230 pass
3231 3231 repo.dirstate.remove(f)
3232 3232
3233 3233 normal = None
3234 3234 if node == parent:
3235 3235 # We're reverting to our parent. If possible, we'd like status
3236 3236 # to report the file as clean. We have to use normallookup for
3237 3237 # merges to avoid losing information about merged/dirty files.
3238 3238 if p2 != nullid:
3239 3239 normal = repo.dirstate.normallookup
3240 3240 else:
3241 3241 normal = repo.dirstate.normal
3242 3242 for f in revert[0]:
3243 3243 checkout(f)
3244 3244 if normal:
3245 3245 normal(f)
3246 3246
3247 3247 for f in add[0]:
3248 3248 checkout(f)
3249 3249 repo.dirstate.add(f)
3250 3250
3251 3251 normal = repo.dirstate.normallookup
3252 3252 if node == parent and p2 == nullid:
3253 3253 normal = repo.dirstate.normal
3254 3254 for f in undelete[0]:
3255 3255 checkout(f)
3256 3256 normal(f)
3257 3257
3258 3258 finally:
3259 3259 wlock.release()
3260 3260
3261 3261 def rollback(ui, repo, **opts):
3262 3262 """roll back the last transaction (dangerous)
3263 3263
3264 3264 This command should be used with care. There is only one level of
3265 3265 rollback, and there is no way to undo a rollback. It will also
3266 3266 restore the dirstate at the time of the last transaction, losing
3267 3267 any dirstate changes since that time. This command does not alter
3268 3268 the working directory.
3269 3269
3270 3270 Transactions are used to encapsulate the effects of all commands
3271 3271 that create new changesets or propagate existing changesets into a
3272 3272 repository. For example, the following commands are transactional,
3273 3273 and their effects can be rolled back:
3274 3274
3275 3275 - commit
3276 3276 - import
3277 3277 - pull
3278 3278 - push (with this repository as the destination)
3279 3279 - unbundle
3280 3280
3281 3281 This command is not intended for use on public repositories. Once
3282 3282 changes are visible for pull by other users, rolling a transaction
3283 3283 back locally is ineffective (someone else may already have pulled
3284 3284 the changes). Furthermore, a race is possible with readers of the
3285 3285 repository; for example an in-progress pull from the repository
3286 3286 may fail if a rollback is performed.
3287 3287
3288 3288 Returns 0 on success, 1 if no rollback data is available.
3289 3289 """
3290 3290 return repo.rollback(opts.get('dry_run'))
3291 3291
3292 3292 def root(ui, repo):
3293 3293 """print the root (top) of the current working directory
3294 3294
3295 3295 Print the root directory of the current repository.
3296 3296
3297 3297 Returns 0 on success.
3298 3298 """
3299 3299 ui.write(repo.root + "\n")
3300 3300
3301 3301 def serve(ui, repo, **opts):
3302 3302 """start stand-alone webserver
3303 3303
3304 3304 Start a local HTTP repository browser and pull server. You can use
3305 3305 this for ad-hoc sharing and browing of repositories. It is
3306 3306 recommended to use a real web server to serve a repository for
3307 3307 longer periods of time.
3308 3308
3309 3309 Please note that the server does not implement access control.
3310 3310 This means that, by default, anybody can read from the server and
3311 3311 nobody can write to it by default. Set the ``web.allow_push``
3312 3312 option to ``*`` to allow everybody to push to the server. You
3313 3313 should use a real web server if you need to authenticate users.
3314 3314
3315 3315 By default, the server logs accesses to stdout and errors to
3316 3316 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3317 3317 files.
3318 3318
3319 3319 To have the server choose a free port number to listen on, specify
3320 3320 a port number of 0; in this case, the server will print the port
3321 3321 number it uses.
3322 3322
3323 3323 Returns 0 on success.
3324 3324 """
3325 3325
3326 3326 if opts["stdio"]:
3327 3327 if repo is None:
3328 3328 raise error.RepoError(_("There is no Mercurial repository here"
3329 3329 " (.hg not found)"))
3330 3330 s = sshserver.sshserver(ui, repo)
3331 3331 s.serve_forever()
3332 3332
3333 3333 # this way we can check if something was given in the command-line
3334 3334 if opts.get('port'):
3335 3335 opts['port'] = int(opts.get('port'))
3336 3336
3337 3337 baseui = repo and repo.baseui or ui
3338 3338 optlist = ("name templates style address port prefix ipv6"
3339 3339 " accesslog errorlog certificate encoding")
3340 3340 for o in optlist.split():
3341 3341 val = opts.get(o, '')
3342 3342 if val in (None, ''): # should check against default options instead
3343 3343 continue
3344 3344 baseui.setconfig("web", o, val)
3345 3345 if repo and repo.ui != baseui:
3346 3346 repo.ui.setconfig("web", o, val)
3347 3347
3348 3348 o = opts.get('web_conf') or opts.get('webdir_conf')
3349 3349 if not o:
3350 3350 if not repo:
3351 3351 raise error.RepoError(_("There is no Mercurial repository"
3352 3352 " here (.hg not found)"))
3353 3353 o = repo.root
3354 3354
3355 3355 app = hgweb.hgweb(o, baseui=ui)
3356 3356
3357 3357 class service(object):
3358 3358 def init(self):
3359 3359 util.set_signal_handler()
3360 3360 self.httpd = hgweb.server.create_server(ui, app)
3361 3361
3362 3362 if opts['port'] and not ui.verbose:
3363 3363 return
3364 3364
3365 3365 if self.httpd.prefix:
3366 3366 prefix = self.httpd.prefix.strip('/') + '/'
3367 3367 else:
3368 3368 prefix = ''
3369 3369
3370 3370 port = ':%d' % self.httpd.port
3371 3371 if port == ':80':
3372 3372 port = ''
3373 3373
3374 3374 bindaddr = self.httpd.addr
3375 3375 if bindaddr == '0.0.0.0':
3376 3376 bindaddr = '*'
3377 3377 elif ':' in bindaddr: # IPv6
3378 3378 bindaddr = '[%s]' % bindaddr
3379 3379
3380 3380 fqaddr = self.httpd.fqaddr
3381 3381 if ':' in fqaddr:
3382 3382 fqaddr = '[%s]' % fqaddr
3383 3383 if opts['port']:
3384 3384 write = ui.status
3385 3385 else:
3386 3386 write = ui.write
3387 3387 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3388 3388 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3389 3389
3390 3390 def run(self):
3391 3391 self.httpd.serve_forever()
3392 3392
3393 3393 service = service()
3394 3394
3395 3395 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3396 3396
3397 3397 def status(ui, repo, *pats, **opts):
3398 3398 """show changed files in the working directory
3399 3399
3400 3400 Show status of files in the repository. If names are given, only
3401 3401 files that match are shown. Files that are clean or ignored or
3402 3402 the source of a copy/move operation, are not listed unless
3403 3403 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3404 3404 Unless options described with "show only ..." are given, the
3405 3405 options -mardu are used.
3406 3406
3407 3407 Option -q/--quiet hides untracked (unknown and ignored) files
3408 3408 unless explicitly requested with -u/--unknown or -i/--ignored.
3409 3409
3410 3410 NOTE: status may appear to disagree with diff if permissions have
3411 3411 changed or a merge has occurred. The standard diff format does not
3412 3412 report permission changes and diff only reports changes relative
3413 3413 to one merge parent.
3414 3414
3415 3415 If one revision is given, it is used as the base revision.
3416 3416 If two revisions are given, the differences between them are
3417 3417 shown. The --change option can also be used as a shortcut to list
3418 3418 the changed files of a revision from its first parent.
3419 3419
3420 3420 The codes used to show the status of files are::
3421 3421
3422 3422 M = modified
3423 3423 A = added
3424 3424 R = removed
3425 3425 C = clean
3426 3426 ! = missing (deleted by non-hg command, but still tracked)
3427 3427 ? = not tracked
3428 3428 I = ignored
3429 3429 = origin of the previous file listed as A (added)
3430 3430
3431 3431 Returns 0 on success.
3432 3432 """
3433 3433
3434 3434 revs = opts.get('rev')
3435 3435 change = opts.get('change')
3436 3436
3437 3437 if revs and change:
3438 3438 msg = _('cannot specify --rev and --change at the same time')
3439 3439 raise util.Abort(msg)
3440 3440 elif change:
3441 3441 node2 = repo.lookup(change)
3442 3442 node1 = repo[node2].parents()[0].node()
3443 3443 else:
3444 3444 node1, node2 = cmdutil.revpair(repo, revs)
3445 3445
3446 3446 cwd = (pats and repo.getcwd()) or ''
3447 3447 end = opts.get('print0') and '\0' or '\n'
3448 3448 copy = {}
3449 3449 states = 'modified added removed deleted unknown ignored clean'.split()
3450 3450 show = [k for k in states if opts.get(k)]
3451 3451 if opts.get('all'):
3452 3452 show += ui.quiet and (states[:4] + ['clean']) or states
3453 3453 if not show:
3454 3454 show = ui.quiet and states[:4] or states[:5]
3455 3455
3456 3456 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3457 3457 'ignored' in show, 'clean' in show, 'unknown' in show)
3458 3458 changestates = zip(states, 'MAR!?IC', stat)
3459 3459
3460 3460 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3461 3461 ctxn = repo[nullid]
3462 3462 ctx1 = repo[node1]
3463 3463 ctx2 = repo[node2]
3464 3464 added = stat[1]
3465 3465 if node2 is None:
3466 3466 added = stat[0] + stat[1] # merged?
3467 3467
3468 3468 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3469 3469 if k in added:
3470 3470 copy[k] = v
3471 3471 elif v in added:
3472 3472 copy[v] = k
3473 3473
3474 3474 for state, char, files in changestates:
3475 3475 if state in show:
3476 3476 format = "%s %%s%s" % (char, end)
3477 3477 if opts.get('no_status'):
3478 3478 format = "%%s%s" % end
3479 3479
3480 3480 for f in files:
3481 3481 ui.write(format % repo.pathto(f, cwd),
3482 3482 label='status.' + state)
3483 3483 if f in copy:
3484 3484 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3485 3485 label='status.copied')
3486 3486
3487 3487 def summary(ui, repo, **opts):
3488 3488 """summarize working directory state
3489 3489
3490 3490 This generates a brief summary of the working directory state,
3491 3491 including parents, branch, commit status, and available updates.
3492 3492
3493 3493 With the --remote option, this will check the default paths for
3494 3494 incoming and outgoing changes. This can be time-consuming.
3495 3495
3496 3496 Returns 0 on success.
3497 3497 """
3498 3498
3499 3499 ctx = repo[None]
3500 3500 parents = ctx.parents()
3501 3501 pnode = parents[0].node()
3502 3502
3503 3503 for p in parents:
3504 3504 # label with log.changeset (instead of log.parent) since this
3505 3505 # shows a working directory parent *changeset*:
3506 3506 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3507 3507 label='log.changeset')
3508 3508 ui.write(' '.join(p.tags()), label='log.tag')
3509 3509 if p.rev() == -1:
3510 3510 if not len(repo):
3511 3511 ui.write(_(' (empty repository)'))
3512 3512 else:
3513 3513 ui.write(_(' (no revision checked out)'))
3514 3514 ui.write('\n')
3515 3515 if p.description():
3516 3516 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3517 3517 label='log.summary')
3518 3518
3519 3519 branch = ctx.branch()
3520 3520 bheads = repo.branchheads(branch)
3521 3521 m = _('branch: %s\n') % branch
3522 3522 if branch != 'default':
3523 3523 ui.write(m, label='log.branch')
3524 3524 else:
3525 3525 ui.status(m, label='log.branch')
3526 3526
3527 3527 st = list(repo.status(unknown=True))[:6]
3528 3528
3529 3529 c = repo.dirstate.copies()
3530 3530 copied, renamed = [], []
3531 3531 for d, s in c.iteritems():
3532 3532 if s in st[2]:
3533 3533 st[2].remove(s)
3534 3534 renamed.append(d)
3535 3535 else:
3536 3536 copied.append(d)
3537 3537 if d in st[1]:
3538 3538 st[1].remove(d)
3539 3539 st.insert(3, renamed)
3540 3540 st.insert(4, copied)
3541 3541
3542 3542 ms = mergemod.mergestate(repo)
3543 3543 st.append([f for f in ms if ms[f] == 'u'])
3544 3544
3545 3545 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3546 3546 st.append(subs)
3547 3547
3548 3548 labels = [ui.label(_('%d modified'), 'status.modified'),
3549 3549 ui.label(_('%d added'), 'status.added'),
3550 3550 ui.label(_('%d removed'), 'status.removed'),
3551 3551 ui.label(_('%d renamed'), 'status.copied'),
3552 3552 ui.label(_('%d copied'), 'status.copied'),
3553 3553 ui.label(_('%d deleted'), 'status.deleted'),
3554 3554 ui.label(_('%d unknown'), 'status.unknown'),
3555 3555 ui.label(_('%d ignored'), 'status.ignored'),
3556 3556 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3557 3557 ui.label(_('%d subrepos'), 'status.modified')]
3558 3558 t = []
3559 3559 for s, l in zip(st, labels):
3560 3560 if s:
3561 3561 t.append(l % len(s))
3562 3562
3563 3563 t = ', '.join(t)
3564 3564 cleanworkdir = False
3565 3565
3566 3566 if len(parents) > 1:
3567 3567 t += _(' (merge)')
3568 3568 elif branch != parents[0].branch():
3569 3569 t += _(' (new branch)')
3570 3570 elif (parents[0].extra().get('close') and
3571 3571 pnode in repo.branchheads(branch, closed=True)):
3572 3572 t += _(' (head closed)')
3573 3573 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3574 3574 t += _(' (clean)')
3575 3575 cleanworkdir = True
3576 3576 elif pnode not in bheads:
3577 3577 t += _(' (new branch head)')
3578 3578
3579 3579 if cleanworkdir:
3580 3580 ui.status(_('commit: %s\n') % t.strip())
3581 3581 else:
3582 3582 ui.write(_('commit: %s\n') % t.strip())
3583 3583
3584 3584 # all ancestors of branch heads - all ancestors of parent = new csets
3585 3585 new = [0] * len(repo)
3586 3586 cl = repo.changelog
3587 3587 for a in [cl.rev(n) for n in bheads]:
3588 3588 new[a] = 1
3589 3589 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3590 3590 new[a] = 1
3591 3591 for a in [p.rev() for p in parents]:
3592 3592 if a >= 0:
3593 3593 new[a] = 0
3594 3594 for a in cl.ancestors(*[p.rev() for p in parents]):
3595 3595 new[a] = 0
3596 3596 new = sum(new)
3597 3597
3598 3598 if new == 0:
3599 3599 ui.status(_('update: (current)\n'))
3600 3600 elif pnode not in bheads:
3601 3601 ui.write(_('update: %d new changesets (update)\n') % new)
3602 3602 else:
3603 3603 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3604 3604 (new, len(bheads)))
3605 3605
3606 3606 if opts.get('remote'):
3607 3607 t = []
3608 3608 source, branches = hg.parseurl(ui.expandpath('default'))
3609 3609 other = hg.repository(hg.remoteui(repo, {}), source)
3610 3610 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3611 3611 ui.debug('comparing with %s\n' % url.hidepassword(source))
3612 3612 repo.ui.pushbuffer()
3613 3613 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3614 3614 repo.ui.popbuffer()
3615 3615 if incoming:
3616 3616 t.append(_('1 or more incoming'))
3617 3617
3618 3618 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3619 3619 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3620 3620 other = hg.repository(hg.remoteui(repo, {}), dest)
3621 3621 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3622 3622 repo.ui.pushbuffer()
3623 3623 o = discovery.findoutgoing(repo, other)
3624 3624 repo.ui.popbuffer()
3625 3625 o = repo.changelog.nodesbetween(o, None)[0]
3626 3626 if o:
3627 3627 t.append(_('%d outgoing') % len(o))
3628 3628
3629 3629 if t:
3630 3630 ui.write(_('remote: %s\n') % (', '.join(t)))
3631 3631 else:
3632 3632 ui.status(_('remote: (synced)\n'))
3633 3633
3634 3634 def tag(ui, repo, name1, *names, **opts):
3635 3635 """add one or more tags for the current or given revision
3636 3636
3637 3637 Name a particular revision using <name>.
3638 3638
3639 3639 Tags are used to name particular revisions of the repository and are
3640 3640 very useful to compare different revisions, to go back to significant
3641 3641 earlier versions or to mark branch points as releases, etc.
3642 3642
3643 3643 If no revision is given, the parent of the working directory is
3644 3644 used, or tip if no revision is checked out.
3645 3645
3646 3646 To facilitate version control, distribution, and merging of tags,
3647 3647 they are stored as a file named ".hgtags" which is managed
3648 3648 similarly to other project files and can be hand-edited if
3649 3649 necessary. The file '.hg/localtags' is used for local tags (not
3650 3650 shared among repositories).
3651 3651
3652 3652 See :hg:`help dates` for a list of formats valid for -d/--date.
3653 3653
3654 3654 Since tag names have priority over branch names during revision
3655 3655 lookup, using an existing branch name as a tag name is discouraged.
3656 3656
3657 3657 Returns 0 on success.
3658 3658 """
3659 3659
3660 3660 rev_ = "."
3661 3661 names = [t.strip() for t in (name1,) + names]
3662 3662 if len(names) != len(set(names)):
3663 3663 raise util.Abort(_('tag names must be unique'))
3664 3664 for n in names:
3665 3665 if n in ['tip', '.', 'null']:
3666 3666 raise util.Abort(_('the name \'%s\' is reserved') % n)
3667 3667 if not n:
3668 3668 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
3669 3669 if opts.get('rev') and opts.get('remove'):
3670 3670 raise util.Abort(_("--rev and --remove are incompatible"))
3671 3671 if opts.get('rev'):
3672 3672 rev_ = opts['rev']
3673 3673 message = opts.get('message')
3674 3674 if opts.get('remove'):
3675 3675 expectedtype = opts.get('local') and 'local' or 'global'
3676 3676 for n in names:
3677 3677 if not repo.tagtype(n):
3678 3678 raise util.Abort(_('tag \'%s\' does not exist') % n)
3679 3679 if repo.tagtype(n) != expectedtype:
3680 3680 if expectedtype == 'global':
3681 3681 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3682 3682 else:
3683 3683 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3684 3684 rev_ = nullid
3685 3685 if not message:
3686 3686 # we don't translate commit messages
3687 3687 message = 'Removed tag %s' % ', '.join(names)
3688 3688 elif not opts.get('force'):
3689 3689 for n in names:
3690 3690 if n in repo.tags():
3691 3691 raise util.Abort(_('tag \'%s\' already exists '
3692 3692 '(use -f to force)') % n)
3693 3693 if not rev_ and repo.dirstate.parents()[1] != nullid:
3694 3694 raise util.Abort(_('uncommitted merge - please provide a '
3695 3695 'specific revision'))
3696 3696 r = repo[rev_].node()
3697 3697
3698 3698 if not message:
3699 3699 # we don't translate commit messages
3700 3700 message = ('Added tag %s for changeset %s' %
3701 3701 (', '.join(names), short(r)))
3702 3702
3703 3703 date = opts.get('date')
3704 3704 if date:
3705 3705 date = util.parsedate(date)
3706 3706
3707 3707 if opts.get('edit'):
3708 3708 message = ui.edit(message, ui.username())
3709 3709
3710 3710 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3711 3711
3712 3712 def tags(ui, repo):
3713 3713 """list repository tags
3714 3714
3715 3715 This lists both regular and local tags. When the -v/--verbose
3716 3716 switch is used, a third column "local" is printed for local tags.
3717 3717
3718 3718 Returns 0 on success.
3719 3719 """
3720 3720
3721 3721 hexfunc = ui.debugflag and hex or short
3722 3722 tagtype = ""
3723 3723
3724 3724 for t, n in reversed(repo.tagslist()):
3725 3725 if ui.quiet:
3726 3726 ui.write("%s\n" % t)
3727 3727 continue
3728 3728
3729 3729 try:
3730 3730 hn = hexfunc(n)
3731 3731 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3732 3732 except error.LookupError:
3733 3733 r = " ?:%s" % hn
3734 3734 else:
3735 3735 spaces = " " * (30 - encoding.colwidth(t))
3736 3736 if ui.verbose:
3737 3737 if repo.tagtype(t) == 'local':
3738 3738 tagtype = " local"
3739 3739 else:
3740 3740 tagtype = ""
3741 3741 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3742 3742
3743 3743 def tip(ui, repo, **opts):
3744 3744 """show the tip revision
3745 3745
3746 3746 The tip revision (usually just called the tip) is the changeset
3747 3747 most recently added to the repository (and therefore the most
3748 3748 recently changed head).
3749 3749
3750 3750 If you have just made a commit, that commit will be the tip. If
3751 3751 you have just pulled changes from another repository, the tip of
3752 3752 that repository becomes the current tip. The "tip" tag is special
3753 3753 and cannot be renamed or assigned to a different changeset.
3754 3754
3755 3755 Returns 0 on success.
3756 3756 """
3757 3757 displayer = cmdutil.show_changeset(ui, repo, opts)
3758 3758 displayer.show(repo[len(repo) - 1])
3759 3759 displayer.close()
3760 3760
3761 3761 def unbundle(ui, repo, fname1, *fnames, **opts):
3762 3762 """apply one or more changegroup files
3763 3763
3764 3764 Apply one or more compressed changegroup files generated by the
3765 3765 bundle command.
3766 3766
3767 3767 Returns 0 on success, 1 if an update has unresolved files.
3768 3768 """
3769 3769 fnames = (fname1,) + fnames
3770 3770
3771 3771 lock = repo.lock()
3772 3772 try:
3773 3773 for fname in fnames:
3774 3774 f = url.open(ui, fname)
3775 3775 gen = changegroup.readbundle(f, fname)
3776 3776 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
3777 3777 lock=lock)
3778 3778 finally:
3779 3779 lock.release()
3780 3780
3781 3781 return postincoming(ui, repo, modheads, opts.get('update'), None)
3782 3782
3783 3783 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3784 3784 """update working directory (or switch revisions)
3785 3785
3786 3786 Update the repository's working directory to the specified
3787 3787 changeset.
3788 3788
3789 3789 If no changeset is specified, attempt to update to the tip of the
3790 3790 current branch. If this changeset is a descendant of the working
3791 3791 directory's parent, update to it, otherwise abort.
3792 3792
3793 3793 The following rules apply when the working directory contains
3794 3794 uncommitted changes:
3795 3795
3796 3796 1. If neither -c/--check nor -C/--clean is specified, and if
3797 3797 the requested changeset is an ancestor or descendant of
3798 3798 the working directory's parent, the uncommitted changes
3799 3799 are merged into the requested changeset and the merged
3800 3800 result is left uncommitted. If the requested changeset is
3801 3801 not an ancestor or descendant (that is, it is on another
3802 3802 branch), the update is aborted and the uncommitted changes
3803 3803 are preserved.
3804 3804
3805 3805 2. With the -c/--check option, the update is aborted and the
3806 3806 uncommitted changes are preserved.
3807 3807
3808 3808 3. With the -C/--clean option, uncommitted changes are discarded and
3809 3809 the working directory is updated to the requested changeset.
3810 3810
3811 3811 Use null as the changeset to remove the working directory (like
3812 3812 :hg:`clone -U`).
3813 3813
3814 3814 If you want to update just one file to an older changeset, use :hg:`revert`.
3815 3815
3816 3816 See :hg:`help dates` for a list of formats valid for -d/--date.
3817 3817
3818 3818 Returns 0 on success, 1 if there are unresolved files.
3819 3819 """
3820 3820 if rev and node:
3821 3821 raise util.Abort(_("please specify just one revision"))
3822 3822
3823 3823 if not rev:
3824 3824 rev = node
3825 3825
3826 3826 if check and clean:
3827 3827 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3828 3828
3829 3829 if check:
3830 3830 # we could use dirty() but we can ignore merge and branch trivia
3831 3831 c = repo[None]
3832 3832 if c.modified() or c.added() or c.removed():
3833 3833 raise util.Abort(_("uncommitted local changes"))
3834 3834
3835 3835 if date:
3836 3836 if rev:
3837 3837 raise util.Abort(_("you can't specify a revision and a date"))
3838 3838 rev = cmdutil.finddate(ui, repo, date)
3839 3839
3840 3840 if clean or check:
3841 3841 return hg.clean(repo, rev)
3842 3842 else:
3843 3843 return hg.update(repo, rev)
3844 3844
3845 3845 def verify(ui, repo):
3846 3846 """verify the integrity of the repository
3847 3847
3848 3848 Verify the integrity of the current repository.
3849 3849
3850 3850 This will perform an extensive check of the repository's
3851 3851 integrity, validating the hashes and checksums of each entry in
3852 3852 the changelog, manifest, and tracked files, as well as the
3853 3853 integrity of their crosslinks and indices.
3854 3854
3855 3855 Returns 0 on success, 1 if errors are encountered.
3856 3856 """
3857 3857 return hg.verify(repo)
3858 3858
3859 3859 def version_(ui):
3860 3860 """output version and copyright information"""
3861 3861 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3862 3862 % util.version())
3863 3863 ui.status(_(
3864 3864 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3865 3865 "This is free software; see the source for copying conditions. "
3866 3866 "There is NO\nwarranty; "
3867 3867 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3868 3868 ))
3869 3869
3870 3870 # Command options and aliases are listed here, alphabetically
3871 3871
3872 3872 globalopts = [
3873 3873 ('R', 'repository', '',
3874 3874 _('repository root directory or name of overlay bundle file'),
3875 3875 _('REPO')),
3876 3876 ('', 'cwd', '',
3877 3877 _('change working directory'), _('DIR')),
3878 3878 ('y', 'noninteractive', None,
3879 3879 _('do not prompt, assume \'yes\' for any required answers')),
3880 3880 ('q', 'quiet', None, _('suppress output')),
3881 3881 ('v', 'verbose', None, _('enable additional output')),
3882 3882 ('', 'config', [],
3883 3883 _('set/override config option (use \'section.name=value\')'),
3884 3884 _('CONFIG')),
3885 3885 ('', 'debug', None, _('enable debugging output')),
3886 3886 ('', 'debugger', None, _('start debugger')),
3887 3887 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
3888 3888 _('ENCODE')),
3889 3889 ('', 'encodingmode', encoding.encodingmode,
3890 3890 _('set the charset encoding mode'), _('MODE')),
3891 3891 ('', 'traceback', None, _('always print a traceback on exception')),
3892 3892 ('', 'time', None, _('time how long the command takes')),
3893 3893 ('', 'profile', None, _('print command execution profile')),
3894 3894 ('', 'version', None, _('output version information and exit')),
3895 3895 ('h', 'help', None, _('display help and exit')),
3896 3896 ]
3897 3897
3898 3898 dryrunopts = [('n', 'dry-run', None,
3899 3899 _('do not perform actions, just print output'))]
3900 3900
3901 3901 remoteopts = [
3902 3902 ('e', 'ssh', '',
3903 3903 _('specify ssh command to use'), _('CMD')),
3904 3904 ('', 'remotecmd', '',
3905 3905 _('specify hg command to run on the remote side'), _('CMD')),
3906 3906 ]
3907 3907
3908 3908 walkopts = [
3909 3909 ('I', 'include', [],
3910 3910 _('include names matching the given patterns'), _('PATTERN')),
3911 3911 ('X', 'exclude', [],
3912 3912 _('exclude names matching the given patterns'), _('PATTERN')),
3913 3913 ]
3914 3914
3915 3915 commitopts = [
3916 3916 ('m', 'message', '',
3917 3917 _('use text as commit message'), _('TEXT')),
3918 3918 ('l', 'logfile', '',
3919 3919 _('read commit message from file'), _('FILE')),
3920 3920 ]
3921 3921
3922 3922 commitopts2 = [
3923 3923 ('d', 'date', '',
3924 3924 _('record datecode as commit date'), _('DATE')),
3925 3925 ('u', 'user', '',
3926 3926 _('record the specified user as committer'), _('USER')),
3927 3927 ]
3928 3928
3929 3929 templateopts = [
3930 3930 ('', 'style', '',
3931 3931 _('display using template map file'), _('STYLE')),
3932 3932 ('', 'template', '',
3933 3933 _('display with template'), _('TEMPLATE')),
3934 3934 ]
3935 3935
3936 3936 logopts = [
3937 3937 ('p', 'patch', None, _('show patch')),
3938 3938 ('g', 'git', None, _('use git extended diff format')),
3939 3939 ('l', 'limit', '',
3940 3940 _('limit number of changes displayed'), _('NUM')),
3941 3941 ('M', 'no-merges', None, _('do not show merges')),
3942 3942 ('', 'stat', None, _('output diffstat-style summary of changes')),
3943 3943 ] + templateopts
3944 3944
3945 3945 diffopts = [
3946 3946 ('a', 'text', None, _('treat all files as text')),
3947 3947 ('g', 'git', None, _('use git extended diff format')),
3948 3948 ('', 'nodates', None, _('omit dates from diff headers'))
3949 3949 ]
3950 3950
3951 3951 diffopts2 = [
3952 3952 ('p', 'show-function', None, _('show which function each change is in')),
3953 3953 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3954 3954 ('w', 'ignore-all-space', None,
3955 3955 _('ignore white space when comparing lines')),
3956 3956 ('b', 'ignore-space-change', None,
3957 3957 _('ignore changes in the amount of white space')),
3958 3958 ('B', 'ignore-blank-lines', None,
3959 3959 _('ignore changes whose lines are all blank')),
3960 3960 ('U', 'unified', '',
3961 3961 _('number of lines of context to show'), _('NUM')),
3962 3962 ('', 'stat', None, _('output diffstat-style summary of changes')),
3963 3963 ]
3964 3964
3965 3965 similarityopts = [
3966 3966 ('s', 'similarity', '',
3967 3967 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
3968 3968 ]
3969 3969
3970 3970 table = {
3971 3971 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3972 3972 "addremove":
3973 3973 (addremove, similarityopts + walkopts + dryrunopts,
3974 3974 _('[OPTION]... [FILE]...')),
3975 3975 "^annotate|blame":
3976 3976 (annotate,
3977 3977 [('r', 'rev', '',
3978 3978 _('annotate the specified revision'), _('REV')),
3979 3979 ('', 'follow', None,
3980 3980 _('follow copies/renames and list the filename (DEPRECATED)')),
3981 3981 ('', 'no-follow', None, _("don't follow copies and renames")),
3982 3982 ('a', 'text', None, _('treat all files as text')),
3983 3983 ('u', 'user', None, _('list the author (long with -v)')),
3984 3984 ('f', 'file', None, _('list the filename')),
3985 3985 ('d', 'date', None, _('list the date (short with -q)')),
3986 3986 ('n', 'number', None, _('list the revision number (default)')),
3987 3987 ('c', 'changeset', None, _('list the changeset')),
3988 3988 ('l', 'line-number', None,
3989 3989 _('show line number at the first appearance'))
3990 3990 ] + walkopts,
3991 3991 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3992 3992 "archive":
3993 3993 (archive,
3994 3994 [('', 'no-decode', None, _('do not pass files through decoders')),
3995 3995 ('p', 'prefix', '',
3996 3996 _('directory prefix for files in archive'), _('PREFIX')),
3997 3997 ('r', 'rev', '',
3998 3998 _('revision to distribute'), _('REV')),
3999 3999 ('t', 'type', '',
4000 4000 _('type of distribution to create'), _('TYPE')),
4001 4001 ] + walkopts,
4002 4002 _('[OPTION]... DEST')),
4003 4003 "backout":
4004 4004 (backout,
4005 4005 [('', 'merge', None,
4006 4006 _('merge with old dirstate parent after backout')),
4007 4007 ('', 'parent', '',
4008 4008 _('parent to choose when backing out merge'), _('REV')),
4009 4009 ('r', 'rev', '',
4010 4010 _('revision to backout'), _('REV')),
4011 4011 ] + walkopts + commitopts + commitopts2,
4012 4012 _('[OPTION]... [-r] REV')),
4013 4013 "bisect":
4014 4014 (bisect,
4015 4015 [('r', 'reset', False, _('reset bisect state')),
4016 4016 ('g', 'good', False, _('mark changeset good')),
4017 4017 ('b', 'bad', False, _('mark changeset bad')),
4018 4018 ('s', 'skip', False, _('skip testing changeset')),
4019 4019 ('c', 'command', '',
4020 4020 _('use command to check changeset state'), _('CMD')),
4021 4021 ('U', 'noupdate', False, _('do not update to target'))],
4022 4022 _("[-gbsr] [-U] [-c CMD] [REV]")),
4023 4023 "branch":
4024 4024 (branch,
4025 4025 [('f', 'force', None,
4026 4026 _('set branch name even if it shadows an existing branch')),
4027 4027 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4028 4028 _('[-fC] [NAME]')),
4029 4029 "branches":
4030 4030 (branches,
4031 4031 [('a', 'active', False,
4032 4032 _('show only branches that have unmerged heads')),
4033 4033 ('c', 'closed', False,
4034 4034 _('show normal and closed branches'))],
4035 4035 _('[-ac]')),
4036 4036 "bundle":
4037 4037 (bundle,
4038 4038 [('f', 'force', None,
4039 4039 _('run even when the destination is unrelated')),
4040 4040 ('r', 'rev', [],
4041 4041 _('a changeset intended to be added to the destination'),
4042 4042 _('REV')),
4043 4043 ('b', 'branch', [],
4044 4044 _('a specific branch you would like to bundle'),
4045 4045 _('BRANCH')),
4046 4046 ('', 'base', [],
4047 4047 _('a base changeset assumed to be available at the destination'),
4048 4048 _('REV')),
4049 4049 ('a', 'all', None, _('bundle all changesets in the repository')),
4050 4050 ('t', 'type', 'bzip2',
4051 4051 _('bundle compression type to use'), _('TYPE')),
4052 4052 ] + remoteopts,
4053 4053 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4054 4054 "cat":
4055 4055 (cat,
4056 4056 [('o', 'output', '',
4057 4057 _('print output to file with formatted name'), _('FORMAT')),
4058 4058 ('r', 'rev', '',
4059 4059 _('print the given revision'), _('REV')),
4060 4060 ('', 'decode', None, _('apply any matching decode filter')),
4061 4061 ] + walkopts,
4062 4062 _('[OPTION]... FILE...')),
4063 4063 "^clone":
4064 4064 (clone,
4065 4065 [('U', 'noupdate', None,
4066 4066 _('the clone will include an empty working copy (only a repository)')),
4067 4067 ('u', 'updaterev', '',
4068 4068 _('revision, tag or branch to check out'), _('REV')),
4069 4069 ('r', 'rev', [],
4070 4070 _('include the specified changeset'), _('REV')),
4071 4071 ('b', 'branch', [],
4072 4072 _('clone only the specified branch'), _('BRANCH')),
4073 4073 ('', 'pull', None, _('use pull protocol to copy metadata')),
4074 4074 ('', 'uncompressed', None,
4075 4075 _('use uncompressed transfer (fast over LAN)')),
4076 4076 ] + remoteopts,
4077 4077 _('[OPTION]... SOURCE [DEST]')),
4078 4078 "^commit|ci":
4079 4079 (commit,
4080 4080 [('A', 'addremove', None,
4081 4081 _('mark new/missing files as added/removed before committing')),
4082 4082 ('', 'close-branch', None,
4083 4083 _('mark a branch as closed, hiding it from the branch list')),
4084 4084 ] + walkopts + commitopts + commitopts2,
4085 4085 _('[OPTION]... [FILE]...')),
4086 4086 "copy|cp":
4087 4087 (copy,
4088 4088 [('A', 'after', None, _('record a copy that has already occurred')),
4089 4089 ('f', 'force', None,
4090 4090 _('forcibly copy over an existing managed file')),
4091 4091 ] + walkopts + dryrunopts,
4092 4092 _('[OPTION]... [SOURCE]... DEST')),
4093 4093 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4094 4094 "debugbuilddag":
4095 4095 (debugbuilddag,
4096 4096 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4097 4097 ('a', 'appended-file', None, _('add single file all revs append to')),
4098 4098 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4099 4099 ('n', 'new-file', None, _('add new file at each rev')),
4100 4100 ],
4101 4101 _('[OPTION]... TEXT')),
4102 4102 "debugcheckstate": (debugcheckstate, [], ''),
4103 4103 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4104 4104 "debugcomplete":
4105 4105 (debugcomplete,
4106 4106 [('o', 'options', None, _('show the command options'))],
4107 4107 _('[-o] CMD')),
4108 4108 "debugdag":
4109 4109 (debugdag,
4110 4110 [('t', 'tags', None, _('use tags as labels')),
4111 4111 ('b', 'branches', None, _('annotate with branch names')),
4112 4112 ('', 'dots', None, _('use dots for runs')),
4113 4113 ('s', 'spaces', None, _('separate elements by spaces')),
4114 4114 ],
4115 4115 _('[OPTION]... [FILE [REV]...]')),
4116 4116 "debugdate":
4117 4117 (debugdate,
4118 4118 [('e', 'extended', None, _('try extended date formats'))],
4119 4119 _('[-e] DATE [RANGE]')),
4120 4120 "debugdata": (debugdata, [], _('FILE REV')),
4121 4121 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4122 4122 "debugindex": (debugindex, [], _('FILE')),
4123 4123 "debugindexdot": (debugindexdot, [], _('FILE')),
4124 4124 "debuginstall": (debuginstall, [], ''),
4125 4125 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4126 4126 "debugrebuildstate":
4127 4127 (debugrebuildstate,
4128 4128 [('r', 'rev', '',
4129 4129 _('revision to rebuild to'), _('REV'))],
4130 4130 _('[-r REV] [REV]')),
4131 4131 "debugrename":
4132 4132 (debugrename,
4133 4133 [('r', 'rev', '',
4134 4134 _('revision to debug'), _('REV'))],
4135 4135 _('[-r REV] FILE')),
4136 4136 "debugrevspec":
4137 4137 (debugrevspec, [], ('REVSPEC')),
4138 4138 "debugsetparents":
4139 4139 (debugsetparents, [], _('REV1 [REV2]')),
4140 4140 "debugstate":
4141 4141 (debugstate,
4142 4142 [('', 'nodates', None, _('do not display the saved mtime'))],
4143 4143 _('[OPTION]...')),
4144 4144 "debugsub":
4145 4145 (debugsub,
4146 4146 [('r', 'rev', '',
4147 4147 _('revision to check'), _('REV'))],
4148 4148 _('[-r REV] [REV]')),
4149 4149 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4150 4150 "^diff":
4151 4151 (diff,
4152 4152 [('r', 'rev', [],
4153 4153 _('revision'), _('REV')),
4154 4154 ('c', 'change', '',
4155 4155 _('change made by revision'), _('REV'))
4156 4156 ] + diffopts + diffopts2 + walkopts,
4157 4157 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4158 4158 "^export":
4159 4159 (export,
4160 4160 [('o', 'output', '',
4161 4161 _('print output to file with formatted name'), _('FORMAT')),
4162 4162 ('', 'switch-parent', None, _('diff against the second parent')),
4163 4163 ('r', 'rev', [],
4164 4164 _('revisions to export'), _('REV')),
4165 4165 ] + diffopts,
4166 4166 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4167 4167 "^forget":
4168 4168 (forget,
4169 4169 [] + walkopts,
4170 4170 _('[OPTION]... FILE...')),
4171 4171 "grep":
4172 4172 (grep,
4173 4173 [('0', 'print0', None, _('end fields with NUL')),
4174 4174 ('', 'all', None, _('print all revisions that match')),
4175 4175 ('f', 'follow', None,
4176 4176 _('follow changeset history,'
4177 4177 ' or file history across copies and renames')),
4178 4178 ('i', 'ignore-case', None, _('ignore case when matching')),
4179 4179 ('l', 'files-with-matches', None,
4180 4180 _('print only filenames and revisions that match')),
4181 4181 ('n', 'line-number', None, _('print matching line numbers')),
4182 4182 ('r', 'rev', [],
4183 4183 _('only search files changed within revision range'), _('REV')),
4184 4184 ('u', 'user', None, _('list the author (long with -v)')),
4185 4185 ('d', 'date', None, _('list the date (short with -q)')),
4186 4186 ] + walkopts,
4187 4187 _('[OPTION]... PATTERN [FILE]...')),
4188 4188 "heads":
4189 4189 (heads,
4190 4190 [('r', 'rev', '',
4191 4191 _('show only heads which are descendants of REV'), _('REV')),
4192 4192 ('t', 'topo', False, _('show topological heads only')),
4193 4193 ('a', 'active', False,
4194 4194 _('show active branchheads only (DEPRECATED)')),
4195 4195 ('c', 'closed', False,
4196 4196 _('show normal and closed branch heads')),
4197 4197 ] + templateopts,
4198 4198 _('[-ac] [-r REV] [REV]...')),
4199 4199 "help": (help_, [], _('[TOPIC]')),
4200 4200 "identify|id":
4201 4201 (identify,
4202 4202 [('r', 'rev', '',
4203 4203 _('identify the specified revision'), _('REV')),
4204 4204 ('n', 'num', None, _('show local revision number')),
4205 4205 ('i', 'id', None, _('show global revision id')),
4206 4206 ('b', 'branch', None, _('show branch')),
4207 4207 ('t', 'tags', None, _('show tags'))],
4208 4208 _('[-nibt] [-r REV] [SOURCE]')),
4209 4209 "import|patch":
4210 4210 (import_,
4211 4211 [('p', 'strip', 1,
4212 4212 _('directory strip option for patch. This has the same '
4213 4213 'meaning as the corresponding patch option'),
4214 4214 _('NUM')),
4215 4215 ('b', 'base', '',
4216 4216 _('base path'), _('PATH')),
4217 4217 ('f', 'force', None,
4218 4218 _('skip check for outstanding uncommitted changes')),
4219 4219 ('', 'no-commit', None,
4220 4220 _("don't commit, just update the working directory")),
4221 4221 ('', 'exact', None,
4222 4222 _('apply patch to the nodes from which it was generated')),
4223 4223 ('', 'import-branch', None,
4224 4224 _('use any branch information in patch (implied by --exact)'))] +
4225 4225 commitopts + commitopts2 + similarityopts,
4226 4226 _('[OPTION]... PATCH...')),
4227 4227 "incoming|in":
4228 4228 (incoming,
4229 4229 [('f', 'force', None,
4230 4230 _('run even if remote repository is unrelated')),
4231 4231 ('n', 'newest-first', None, _('show newest record first')),
4232 4232 ('', 'bundle', '',
4233 4233 _('file to store the bundles into'), _('FILE')),
4234 4234 ('r', 'rev', [],
4235 4235 _('a remote changeset intended to be added'), _('REV')),
4236 4236 ('b', 'branch', [],
4237 4237 _('a specific branch you would like to pull'), _('BRANCH')),
4238 4238 ] + logopts + remoteopts,
4239 4239 _('[-p] [-n] [-M] [-f] [-r REV]...'
4240 4240 ' [--bundle FILENAME] [SOURCE]')),
4241 4241 "^init":
4242 4242 (init,
4243 4243 remoteopts,
4244 4244 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4245 4245 "locate":
4246 4246 (locate,
4247 4247 [('r', 'rev', '',
4248 4248 _('search the repository as it is in REV'), _('REV')),
4249 4249 ('0', 'print0', None,
4250 4250 _('end filenames with NUL, for use with xargs')),
4251 4251 ('f', 'fullpath', None,
4252 4252 _('print complete paths from the filesystem root')),
4253 4253 ] + walkopts,
4254 4254 _('[OPTION]... [PATTERN]...')),
4255 4255 "^log|history":
4256 4256 (log,
4257 4257 [('f', 'follow', None,
4258 4258 _('follow changeset history,'
4259 4259 ' or file history across copies and renames')),
4260 4260 ('', 'follow-first', None,
4261 4261 _('only follow the first parent of merge changesets')),
4262 4262 ('d', 'date', '',
4263 4263 _('show revisions matching date spec'), _('DATE')),
4264 4264 ('C', 'copies', None, _('show copied files')),
4265 4265 ('k', 'keyword', [],
4266 4266 _('do case-insensitive search for a given text'), _('TEXT')),
4267 4267 ('r', 'rev', [],
4268 4268 _('show the specified revision or range'), _('REV')),
4269 4269 ('', 'removed', None, _('include revisions where files were removed')),
4270 4270 ('m', 'only-merges', None, _('show only merges')),
4271 4271 ('u', 'user', [],
4272 4272 _('revisions committed by user'), _('USER')),
4273 4273 ('', 'only-branch', [],
4274 4274 _('show only changesets within the given named branch (DEPRECATED)'),
4275 4275 _('BRANCH')),
4276 4276 ('b', 'branch', [],
4277 4277 _('show changesets within the given named branch'), _('BRANCH')),
4278 4278 ('P', 'prune', [],
4279 4279 _('do not display revision or any of its ancestors'), _('REV')),
4280 4280 ] + logopts + walkopts,
4281 4281 _('[OPTION]... [FILE]')),
4282 4282 "manifest":
4283 4283 (manifest,
4284 4284 [('r', 'rev', '',
4285 4285 _('revision to display'), _('REV'))],
4286 4286 _('[-r REV]')),
4287 4287 "^merge":
4288 4288 (merge,
4289 4289 [('f', 'force', None, _('force a merge with outstanding changes')),
4290 4290 ('r', 'rev', '',
4291 4291 _('revision to merge'), _('REV')),
4292 4292 ('P', 'preview', None,
4293 4293 _('review revisions to merge (no merge is performed)'))],
4294 4294 _('[-P] [-f] [[-r] REV]')),
4295 4295 "outgoing|out":
4296 4296 (outgoing,
4297 4297 [('f', 'force', None,
4298 4298 _('run even when the destination is unrelated')),
4299 4299 ('r', 'rev', [],
4300 4300 _('a changeset intended to be included in the destination'),
4301 4301 _('REV')),
4302 4302 ('n', 'newest-first', None, _('show newest record first')),
4303 4303 ('b', 'branch', [],
4304 4304 _('a specific branch you would like to push'), _('BRANCH')),
4305 4305 ] + logopts + remoteopts,
4306 4306 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4307 4307 "parents":
4308 4308 (parents,
4309 4309 [('r', 'rev', '',
4310 4310 _('show parents of the specified revision'), _('REV')),
4311 4311 ] + templateopts,
4312 4312 _('[-r REV] [FILE]')),
4313 4313 "paths": (paths, [], _('[NAME]')),
4314 4314 "^pull":
4315 4315 (pull,
4316 4316 [('u', 'update', None,
4317 4317 _('update to new branch head if changesets were pulled')),
4318 4318 ('f', 'force', None,
4319 4319 _('run even when remote repository is unrelated')),
4320 4320 ('r', 'rev', [],
4321 4321 _('a remote changeset intended to be added'), _('REV')),
4322 4322 ('b', 'branch', [],
4323 4323 _('a specific branch you would like to pull'), _('BRANCH')),
4324 4324 ] + remoteopts,
4325 4325 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4326 4326 "^push":
4327 4327 (push,
4328 4328 [('f', 'force', None, _('force push')),
4329 4329 ('r', 'rev', [],
4330 4330 _('a changeset intended to be included in the destination'),
4331 4331 _('REV')),
4332 4332 ('b', 'branch', [],
4333 4333 _('a specific branch you would like to push'), _('BRANCH')),
4334 4334 ('', 'new-branch', False, _('allow pushing a new branch')),
4335 4335 ] + remoteopts,
4336 4336 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4337 4337 "recover": (recover, []),
4338 4338 "^remove|rm":
4339 4339 (remove,
4340 4340 [('A', 'after', None, _('record delete for missing files')),
4341 4341 ('f', 'force', None,
4342 4342 _('remove (and delete) file even if added or modified')),
4343 4343 ] + walkopts,
4344 4344 _('[OPTION]... FILE...')),
4345 4345 "rename|mv":
4346 4346 (rename,
4347 4347 [('A', 'after', None, _('record a rename that has already occurred')),
4348 4348 ('f', 'force', None,
4349 4349 _('forcibly copy over an existing managed file')),
4350 4350 ] + walkopts + dryrunopts,
4351 4351 _('[OPTION]... SOURCE... DEST')),
4352 4352 "resolve":
4353 4353 (resolve,
4354 4354 [('a', 'all', None, _('select all unresolved files')),
4355 4355 ('l', 'list', None, _('list state of files needing merge')),
4356 4356 ('m', 'mark', None, _('mark files as resolved')),
4357 4357 ('u', 'unmark', None, _('mark files as unresolved')),
4358 4358 ('n', 'no-status', None, _('hide status prefix'))]
4359 4359 + walkopts,
4360 4360 _('[OPTION]... [FILE]...')),
4361 4361 "revert":
4362 4362 (revert,
4363 4363 [('a', 'all', None, _('revert all changes when no arguments given')),
4364 4364 ('d', 'date', '',
4365 4365 _('tipmost revision matching date'), _('DATE')),
4366 4366 ('r', 'rev', '',
4367 4367 _('revert to the specified revision'), _('REV')),
4368 4368 ('', 'no-backup', None, _('do not save backup copies of files')),
4369 4369 ] + walkopts + dryrunopts,
4370 4370 _('[OPTION]... [-r REV] [NAME]...')),
4371 4371 "rollback": (rollback, dryrunopts),
4372 4372 "root": (root, []),
4373 4373 "^serve":
4374 4374 (serve,
4375 4375 [('A', 'accesslog', '',
4376 4376 _('name of access log file to write to'), _('FILE')),
4377 4377 ('d', 'daemon', None, _('run server in background')),
4378 4378 ('', 'daemon-pipefds', '',
4379 4379 _('used internally by daemon mode'), _('NUM')),
4380 4380 ('E', 'errorlog', '',
4381 4381 _('name of error log file to write to'), _('FILE')),
4382 4382 # use string type, then we can check if something was passed
4383 4383 ('p', 'port', '',
4384 4384 _('port to listen on (default: 8000)'), _('PORT')),
4385 4385 ('a', 'address', '',
4386 4386 _('address to listen on (default: all interfaces)'), _('ADDR')),
4387 4387 ('', 'prefix', '',
4388 4388 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4389 4389 ('n', 'name', '',
4390 4390 _('name to show in web pages (default: working directory)'),
4391 4391 _('NAME')),
4392 4392 ('', 'web-conf', '',
4393 4393 _('name of the hgweb config file (serve more than one repository)'),
4394 4394 _('FILE')),
4395 4395 ('', 'webdir-conf', '',
4396 4396 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4397 4397 ('', 'pid-file', '',
4398 4398 _('name of file to write process ID to'), _('FILE')),
4399 4399 ('', 'stdio', None, _('for remote clients')),
4400 4400 ('t', 'templates', '',
4401 4401 _('web templates to use'), _('TEMPLATE')),
4402 4402 ('', 'style', '',
4403 4403 _('template style to use'), _('STYLE')),
4404 4404 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4405 4405 ('', 'certificate', '',
4406 4406 _('SSL certificate file'), _('FILE'))],
4407 4407 _('[OPTION]...')),
4408 4408 "showconfig|debugconfig":
4409 4409 (showconfig,
4410 4410 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4411 4411 _('[-u] [NAME]...')),
4412 4412 "^summary|sum":
4413 4413 (summary,
4414 4414 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4415 4415 "^status|st":
4416 4416 (status,
4417 4417 [('A', 'all', None, _('show status of all files')),
4418 4418 ('m', 'modified', None, _('show only modified files')),
4419 4419 ('a', 'added', None, _('show only added files')),
4420 4420 ('r', 'removed', None, _('show only removed files')),
4421 4421 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4422 4422 ('c', 'clean', None, _('show only files without changes')),
4423 4423 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4424 4424 ('i', 'ignored', None, _('show only ignored files')),
4425 4425 ('n', 'no-status', None, _('hide status prefix')),
4426 4426 ('C', 'copies', None, _('show source of copied files')),
4427 4427 ('0', 'print0', None,
4428 4428 _('end filenames with NUL, for use with xargs')),
4429 4429 ('', 'rev', [],
4430 4430 _('show difference from revision'), _('REV')),
4431 4431 ('', 'change', '',
4432 4432 _('list the changed files of a revision'), _('REV')),
4433 4433 ] + walkopts,
4434 4434 _('[OPTION]... [FILE]...')),
4435 4435 "tag":
4436 4436 (tag,
4437 4437 [('f', 'force', None, _('replace existing tag')),
4438 4438 ('l', 'local', None, _('make the tag local')),
4439 4439 ('r', 'rev', '',
4440 4440 _('revision to tag'), _('REV')),
4441 4441 ('', 'remove', None, _('remove a tag')),
4442 4442 # -l/--local is already there, commitopts cannot be used
4443 4443 ('e', 'edit', None, _('edit commit message')),
4444 4444 ('m', 'message', '',
4445 4445 _('use <text> as commit message'), _('TEXT')),
4446 4446 ] + commitopts2,
4447 4447 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4448 4448 "tags": (tags, [], ''),
4449 4449 "tip":
4450 4450 (tip,
4451 4451 [('p', 'patch', None, _('show patch')),
4452 4452 ('g', 'git', None, _('use git extended diff format')),
4453 4453 ] + templateopts,
4454 4454 _('[-p] [-g]')),
4455 4455 "unbundle":
4456 4456 (unbundle,
4457 4457 [('u', 'update', None,
4458 4458 _('update to new branch head if changesets were unbundled'))],
4459 4459 _('[-u] FILE...')),
4460 4460 "^update|up|checkout|co":
4461 4461 (update,
4462 4462 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4463 4463 ('c', 'check', None, _('check for uncommitted changes')),
4464 4464 ('d', 'date', '',
4465 4465 _('tipmost revision matching date'), _('DATE')),
4466 4466 ('r', 'rev', '',
4467 4467 _('revision'), _('REV'))],
4468 4468 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4469 4469 "verify": (verify, []),
4470 4470 "version": (version_, []),
4471 4471 }
4472 4472
4473 4473 norepo = ("clone init version help debugcommands debugcomplete debugdata"
4474 4474 " debugindex debugindexdot debugdate debuginstall debugfsinfo"
4475 4475 " debugpushkey")
4476 4476 optionalrepo = ("identify paths serve showconfig debugancestor debugdag")
@@ -1,1088 +1,1088
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, short, hex
9 9 from i18n import _
10 10 import ancestor, bdiff, error, util, subrepo, patch
11 11 import os, errno, stat
12 12
13 13 propertycache = util.propertycache
14 14
15 15 class changectx(object):
16 16 """A changecontext object makes access to data related to a particular
17 17 changeset convenient."""
18 18 def __init__(self, repo, changeid=''):
19 19 """changeid is a revision number, node, or tag"""
20 20 if changeid == '':
21 21 changeid = '.'
22 22 self._repo = repo
23 23 if isinstance(changeid, (long, int)):
24 24 self._rev = changeid
25 25 self._node = self._repo.changelog.node(changeid)
26 26 else:
27 27 self._node = self._repo.lookup(changeid)
28 28 self._rev = self._repo.changelog.rev(self._node)
29 29
30 30 def __str__(self):
31 31 return short(self.node())
32 32
33 33 def __int__(self):
34 34 return self.rev()
35 35
36 36 def __repr__(self):
37 37 return "<changectx %s>" % str(self)
38 38
39 39 def __hash__(self):
40 40 try:
41 41 return hash(self._rev)
42 42 except AttributeError:
43 43 return id(self)
44 44
45 45 def __eq__(self, other):
46 46 try:
47 47 return self._rev == other._rev
48 48 except AttributeError:
49 49 return False
50 50
51 51 def __ne__(self, other):
52 52 return not (self == other)
53 53
54 54 def __nonzero__(self):
55 55 return self._rev != nullrev
56 56
57 57 @propertycache
58 58 def _changeset(self):
59 59 return self._repo.changelog.read(self.node())
60 60
61 61 @propertycache
62 62 def _manifest(self):
63 63 return self._repo.manifest.read(self._changeset[0])
64 64
65 65 @propertycache
66 66 def _manifestdelta(self):
67 67 return self._repo.manifest.readdelta(self._changeset[0])
68 68
69 69 @propertycache
70 70 def _parents(self):
71 71 p = self._repo.changelog.parentrevs(self._rev)
72 72 if p[1] == nullrev:
73 73 p = p[:-1]
74 74 return [changectx(self._repo, x) for x in p]
75 75
76 76 @propertycache
77 77 def substate(self):
78 78 return subrepo.state(self, self._repo.ui)
79 79
80 80 def __contains__(self, key):
81 81 return key in self._manifest
82 82
83 83 def __getitem__(self, key):
84 84 return self.filectx(key)
85 85
86 86 def __iter__(self):
87 87 for f in sorted(self._manifest):
88 88 yield f
89 89
90 90 def changeset(self):
91 91 return self._changeset
92 92 def manifest(self):
93 93 return self._manifest
94 94 def manifestnode(self):
95 95 return self._changeset[0]
96 96
97 97 def rev(self):
98 98 return self._rev
99 99 def node(self):
100 100 return self._node
101 101 def hex(self):
102 102 return hex(self._node)
103 103 def user(self):
104 104 return self._changeset[1]
105 105 def date(self):
106 106 return self._changeset[2]
107 107 def files(self):
108 108 return self._changeset[3]
109 109 def description(self):
110 110 return self._changeset[4]
111 111 def branch(self):
112 112 return self._changeset[5].get("branch")
113 113 def extra(self):
114 114 return self._changeset[5]
115 115 def tags(self):
116 116 return self._repo.nodetags(self._node)
117 117
118 118 def parents(self):
119 119 """return contexts for each parent changeset"""
120 120 return self._parents
121 121
122 122 def p1(self):
123 123 return self._parents[0]
124 124
125 125 def p2(self):
126 126 if len(self._parents) == 2:
127 127 return self._parents[1]
128 128 return changectx(self._repo, -1)
129 129
130 130 def children(self):
131 131 """return contexts for each child changeset"""
132 132 c = self._repo.changelog.children(self._node)
133 133 return [changectx(self._repo, x) for x in c]
134 134
135 135 def ancestors(self):
136 136 for a in self._repo.changelog.ancestors(self._rev):
137 137 yield changectx(self._repo, a)
138 138
139 139 def descendants(self):
140 140 for d in self._repo.changelog.descendants(self._rev):
141 141 yield changectx(self._repo, d)
142 142
143 143 def _fileinfo(self, path):
144 144 if '_manifest' in self.__dict__:
145 145 try:
146 146 return self._manifest[path], self._manifest.flags(path)
147 147 except KeyError:
148 148 raise error.LookupError(self._node, path,
149 149 _('not found in manifest'))
150 150 if '_manifestdelta' in self.__dict__ or path in self.files():
151 151 if path in self._manifestdelta:
152 152 return self._manifestdelta[path], self._manifestdelta.flags(path)
153 153 node, flag = self._repo.manifest.find(self._changeset[0], path)
154 154 if not node:
155 155 raise error.LookupError(self._node, path,
156 156 _('not found in manifest'))
157 157
158 158 return node, flag
159 159
160 160 def filenode(self, path):
161 161 return self._fileinfo(path)[0]
162 162
163 163 def flags(self, path):
164 164 try:
165 165 return self._fileinfo(path)[1]
166 166 except error.LookupError:
167 167 return ''
168 168
169 169 def filectx(self, path, fileid=None, filelog=None):
170 170 """get a file context from this changeset"""
171 171 if fileid is None:
172 172 fileid = self.filenode(path)
173 173 return filectx(self._repo, path, fileid=fileid,
174 174 changectx=self, filelog=filelog)
175 175
176 176 def ancestor(self, c2):
177 177 """
178 178 return the ancestor context of self and c2
179 179 """
180 180 # deal with workingctxs
181 181 n2 = c2._node
182 182 if n2 == None:
183 183 n2 = c2._parents[0]._node
184 184 n = self._repo.changelog.ancestor(self._node, n2)
185 185 return changectx(self._repo, n)
186 186
187 187 def walk(self, match):
188 188 fset = set(match.files())
189 189 # for dirstate.walk, files=['.'] means "walk the whole tree".
190 190 # follow that here, too
191 191 fset.discard('.')
192 192 for fn in self:
193 193 for ffn in fset:
194 194 # match if the file is the exact name or a directory
195 195 if ffn == fn or fn.startswith("%s/" % ffn):
196 196 fset.remove(ffn)
197 197 break
198 198 if match(fn):
199 199 yield fn
200 200 for fn in sorted(fset):
201 if match.bad(fn, _('No such file in rev %s') % self) and match(fn):
201 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
202 202 yield fn
203 203
204 204 def sub(self, path):
205 205 return subrepo.subrepo(self, path)
206 206
207 207 def diff(self, ctx2=None, match=None, **opts):
208 208 """Returns a diff generator for the given contexts and matcher"""
209 209 if ctx2 is None:
210 210 ctx2 = self.p1()
211 211 if ctx2 is not None and not isinstance(ctx2, changectx):
212 212 ctx2 = self._repo[ctx2]
213 213 diffopts = patch.diffopts(self._repo.ui, opts)
214 214 return patch.diff(self._repo, ctx2.node(), self.node(),
215 215 match=match, opts=diffopts)
216 216
217 217 class filectx(object):
218 218 """A filecontext object makes access to data related to a particular
219 219 filerevision convenient."""
220 220 def __init__(self, repo, path, changeid=None, fileid=None,
221 221 filelog=None, changectx=None):
222 222 """changeid can be a changeset revision, node, or tag.
223 223 fileid can be a file revision or node."""
224 224 self._repo = repo
225 225 self._path = path
226 226
227 227 assert (changeid is not None
228 228 or fileid is not None
229 229 or changectx is not None), \
230 230 ("bad args: changeid=%r, fileid=%r, changectx=%r"
231 231 % (changeid, fileid, changectx))
232 232
233 233 if filelog:
234 234 self._filelog = filelog
235 235
236 236 if changeid is not None:
237 237 self._changeid = changeid
238 238 if changectx is not None:
239 239 self._changectx = changectx
240 240 if fileid is not None:
241 241 self._fileid = fileid
242 242
243 243 @propertycache
244 244 def _changectx(self):
245 245 return changectx(self._repo, self._changeid)
246 246
247 247 @propertycache
248 248 def _filelog(self):
249 249 return self._repo.file(self._path)
250 250
251 251 @propertycache
252 252 def _changeid(self):
253 253 if '_changectx' in self.__dict__:
254 254 return self._changectx.rev()
255 255 else:
256 256 return self._filelog.linkrev(self._filerev)
257 257
258 258 @propertycache
259 259 def _filenode(self):
260 260 if '_fileid' in self.__dict__:
261 261 return self._filelog.lookup(self._fileid)
262 262 else:
263 263 return self._changectx.filenode(self._path)
264 264
265 265 @propertycache
266 266 def _filerev(self):
267 267 return self._filelog.rev(self._filenode)
268 268
269 269 @propertycache
270 270 def _repopath(self):
271 271 return self._path
272 272
273 273 def __nonzero__(self):
274 274 try:
275 275 self._filenode
276 276 return True
277 277 except error.LookupError:
278 278 # file is missing
279 279 return False
280 280
281 281 def __str__(self):
282 282 return "%s@%s" % (self.path(), short(self.node()))
283 283
284 284 def __repr__(self):
285 285 return "<filectx %s>" % str(self)
286 286
287 287 def __hash__(self):
288 288 try:
289 289 return hash((self._path, self._filenode))
290 290 except AttributeError:
291 291 return id(self)
292 292
293 293 def __eq__(self, other):
294 294 try:
295 295 return (self._path == other._path
296 296 and self._filenode == other._filenode)
297 297 except AttributeError:
298 298 return False
299 299
300 300 def __ne__(self, other):
301 301 return not (self == other)
302 302
303 303 def filectx(self, fileid):
304 304 '''opens an arbitrary revision of the file without
305 305 opening a new filelog'''
306 306 return filectx(self._repo, self._path, fileid=fileid,
307 307 filelog=self._filelog)
308 308
309 309 def filerev(self):
310 310 return self._filerev
311 311 def filenode(self):
312 312 return self._filenode
313 313 def flags(self):
314 314 return self._changectx.flags(self._path)
315 315 def filelog(self):
316 316 return self._filelog
317 317
318 318 def rev(self):
319 319 if '_changectx' in self.__dict__:
320 320 return self._changectx.rev()
321 321 if '_changeid' in self.__dict__:
322 322 return self._changectx.rev()
323 323 return self._filelog.linkrev(self._filerev)
324 324
325 325 def linkrev(self):
326 326 return self._filelog.linkrev(self._filerev)
327 327 def node(self):
328 328 return self._changectx.node()
329 329 def hex(self):
330 330 return hex(self.node())
331 331 def user(self):
332 332 return self._changectx.user()
333 333 def date(self):
334 334 return self._changectx.date()
335 335 def files(self):
336 336 return self._changectx.files()
337 337 def description(self):
338 338 return self._changectx.description()
339 339 def branch(self):
340 340 return self._changectx.branch()
341 341 def extra(self):
342 342 return self._changectx.extra()
343 343 def manifest(self):
344 344 return self._changectx.manifest()
345 345 def changectx(self):
346 346 return self._changectx
347 347
348 348 def data(self):
349 349 return self._filelog.read(self._filenode)
350 350 def path(self):
351 351 return self._path
352 352 def size(self):
353 353 return self._filelog.size(self._filerev)
354 354
355 355 def cmp(self, fctx):
356 356 """compare with other file context
357 357
358 358 returns True if different than fctx.
359 359 """
360 360 return self._filelog.cmp(self._filenode, fctx.data())
361 361
362 362 def renamed(self):
363 363 """check if file was actually renamed in this changeset revision
364 364
365 365 If rename logged in file revision, we report copy for changeset only
366 366 if file revisions linkrev points back to the changeset in question
367 367 or both changeset parents contain different file revisions.
368 368 """
369 369
370 370 renamed = self._filelog.renamed(self._filenode)
371 371 if not renamed:
372 372 return renamed
373 373
374 374 if self.rev() == self.linkrev():
375 375 return renamed
376 376
377 377 name = self.path()
378 378 fnode = self._filenode
379 379 for p in self._changectx.parents():
380 380 try:
381 381 if fnode == p.filenode(name):
382 382 return None
383 383 except error.LookupError:
384 384 pass
385 385 return renamed
386 386
387 387 def parents(self):
388 388 p = self._path
389 389 fl = self._filelog
390 390 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
391 391
392 392 r = self._filelog.renamed(self._filenode)
393 393 if r:
394 394 pl[0] = (r[0], r[1], None)
395 395
396 396 return [filectx(self._repo, p, fileid=n, filelog=l)
397 397 for p, n, l in pl if n != nullid]
398 398
399 399 def children(self):
400 400 # hard for renames
401 401 c = self._filelog.children(self._filenode)
402 402 return [filectx(self._repo, self._path, fileid=x,
403 403 filelog=self._filelog) for x in c]
404 404
405 405 def annotate(self, follow=False, linenumber=None):
406 406 '''returns a list of tuples of (ctx, line) for each line
407 407 in the file, where ctx is the filectx of the node where
408 408 that line was last changed.
409 409 This returns tuples of ((ctx, linenumber), line) for each line,
410 410 if "linenumber" parameter is NOT "None".
411 411 In such tuples, linenumber means one at the first appearance
412 412 in the managed file.
413 413 To reduce annotation cost,
414 414 this returns fixed value(False is used) as linenumber,
415 415 if "linenumber" parameter is "False".'''
416 416
417 417 def decorate_compat(text, rev):
418 418 return ([rev] * len(text.splitlines()), text)
419 419
420 420 def without_linenumber(text, rev):
421 421 return ([(rev, False)] * len(text.splitlines()), text)
422 422
423 423 def with_linenumber(text, rev):
424 424 size = len(text.splitlines())
425 425 return ([(rev, i) for i in xrange(1, size + 1)], text)
426 426
427 427 decorate = (((linenumber is None) and decorate_compat) or
428 428 (linenumber and with_linenumber) or
429 429 without_linenumber)
430 430
431 431 def pair(parent, child):
432 432 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
433 433 child[0][b1:b2] = parent[0][a1:a2]
434 434 return child
435 435
436 436 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
437 437 def getctx(path, fileid):
438 438 log = path == self._path and self._filelog or getlog(path)
439 439 return filectx(self._repo, path, fileid=fileid, filelog=log)
440 440 getctx = util.lrucachefunc(getctx)
441 441
442 442 def parents(f):
443 443 # we want to reuse filectx objects as much as possible
444 444 p = f._path
445 445 if f._filerev is None: # working dir
446 446 pl = [(n.path(), n.filerev()) for n in f.parents()]
447 447 else:
448 448 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
449 449
450 450 if follow:
451 451 r = f.renamed()
452 452 if r:
453 453 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
454 454
455 455 return [getctx(p, n) for p, n in pl if n != nullrev]
456 456
457 457 # use linkrev to find the first changeset where self appeared
458 458 if self.rev() != self.linkrev():
459 459 base = self.filectx(self.filerev())
460 460 else:
461 461 base = self
462 462
463 463 # find all ancestors
464 464 needed = {base: 1}
465 465 visit = [base]
466 466 files = [base._path]
467 467 while visit:
468 468 f = visit.pop(0)
469 469 for p in parents(f):
470 470 if p not in needed:
471 471 needed[p] = 1
472 472 visit.append(p)
473 473 if p._path not in files:
474 474 files.append(p._path)
475 475 else:
476 476 # count how many times we'll use this
477 477 needed[p] += 1
478 478
479 479 # sort by revision (per file) which is a topological order
480 480 visit = []
481 481 for f in files:
482 482 visit.extend(n for n in needed if n._path == f)
483 483
484 484 hist = {}
485 485 for f in sorted(visit, key=lambda x: x.rev()):
486 486 curr = decorate(f.data(), f)
487 487 for p in parents(f):
488 488 curr = pair(hist[p], curr)
489 489 # trim the history of unneeded revs
490 490 needed[p] -= 1
491 491 if not needed[p]:
492 492 del hist[p]
493 493 hist[f] = curr
494 494
495 495 return zip(hist[f][0], hist[f][1].splitlines(True))
496 496
497 497 def ancestor(self, fc2, actx=None):
498 498 """
499 499 find the common ancestor file context, if any, of self, and fc2
500 500
501 501 If actx is given, it must be the changectx of the common ancestor
502 502 of self's and fc2's respective changesets.
503 503 """
504 504
505 505 if actx is None:
506 506 actx = self.changectx().ancestor(fc2.changectx())
507 507
508 508 # the trivial case: changesets are unrelated, files must be too
509 509 if not actx:
510 510 return None
511 511
512 512 # the easy case: no (relevant) renames
513 513 if fc2.path() == self.path() and self.path() in actx:
514 514 return actx[self.path()]
515 515 acache = {}
516 516
517 517 # prime the ancestor cache for the working directory
518 518 for c in (self, fc2):
519 519 if c._filerev is None:
520 520 pl = [(n.path(), n.filenode()) for n in c.parents()]
521 521 acache[(c._path, None)] = pl
522 522
523 523 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
524 524 def parents(vertex):
525 525 if vertex in acache:
526 526 return acache[vertex]
527 527 f, n = vertex
528 528 if f not in flcache:
529 529 flcache[f] = self._repo.file(f)
530 530 fl = flcache[f]
531 531 pl = [(f, p) for p in fl.parents(n) if p != nullid]
532 532 re = fl.renamed(n)
533 533 if re:
534 534 pl.append(re)
535 535 acache[vertex] = pl
536 536 return pl
537 537
538 538 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
539 539 v = ancestor.ancestor(a, b, parents)
540 540 if v:
541 541 f, n = v
542 542 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
543 543
544 544 return None
545 545
546 546 def ancestors(self):
547 547 seen = set(str(self))
548 548 visit = [self]
549 549 while visit:
550 550 for parent in visit.pop(0).parents():
551 551 s = str(parent)
552 552 if s not in seen:
553 553 visit.append(parent)
554 554 seen.add(s)
555 555 yield parent
556 556
557 557 class workingctx(changectx):
558 558 """A workingctx object makes access to data related to
559 559 the current working directory convenient.
560 560 date - any valid date string or (unixtime, offset), or None.
561 561 user - username string, or None.
562 562 extra - a dictionary of extra values, or None.
563 563 changes - a list of file lists as returned by localrepo.status()
564 564 or None to use the repository status.
565 565 """
566 566 def __init__(self, repo, text="", user=None, date=None, extra=None,
567 567 changes=None):
568 568 self._repo = repo
569 569 self._rev = None
570 570 self._node = None
571 571 self._text = text
572 572 if date:
573 573 self._date = util.parsedate(date)
574 574 if user:
575 575 self._user = user
576 576 if changes:
577 577 self._status = list(changes[:4])
578 578 self._unknown = changes[4]
579 579 self._ignored = changes[5]
580 580 self._clean = changes[6]
581 581 else:
582 582 self._unknown = None
583 583 self._ignored = None
584 584 self._clean = None
585 585
586 586 self._extra = {}
587 587 if extra:
588 588 self._extra = extra.copy()
589 589 if 'branch' not in self._extra:
590 590 branch = self._repo.dirstate.branch()
591 591 try:
592 592 branch = branch.decode('UTF-8').encode('UTF-8')
593 593 except UnicodeDecodeError:
594 594 raise util.Abort(_('branch name not in UTF-8!'))
595 595 self._extra['branch'] = branch
596 596 if self._extra['branch'] == '':
597 597 self._extra['branch'] = 'default'
598 598
599 599 def __str__(self):
600 600 return str(self._parents[0]) + "+"
601 601
602 602 def __nonzero__(self):
603 603 return True
604 604
605 605 def __contains__(self, key):
606 606 return self._repo.dirstate[key] not in "?r"
607 607
608 608 @propertycache
609 609 def _manifest(self):
610 610 """generate a manifest corresponding to the working directory"""
611 611
612 612 if self._unknown is None:
613 613 self.status(unknown=True)
614 614
615 615 man = self._parents[0].manifest().copy()
616 616 copied = self._repo.dirstate.copies()
617 617 if len(self._parents) > 1:
618 618 man2 = self.p2().manifest()
619 619 def getman(f):
620 620 if f in man:
621 621 return man
622 622 return man2
623 623 else:
624 624 getman = lambda f: man
625 625 def cf(f):
626 626 f = copied.get(f, f)
627 627 return getman(f).flags(f)
628 628 ff = self._repo.dirstate.flagfunc(cf)
629 629 modified, added, removed, deleted = self._status
630 630 unknown = self._unknown
631 631 for i, l in (("a", added), ("m", modified), ("u", unknown)):
632 632 for f in l:
633 633 orig = copied.get(f, f)
634 634 man[f] = getman(orig).get(orig, nullid) + i
635 635 try:
636 636 man.set(f, ff(f))
637 637 except OSError:
638 638 pass
639 639
640 640 for f in deleted + removed:
641 641 if f in man:
642 642 del man[f]
643 643
644 644 return man
645 645
646 646 @propertycache
647 647 def _status(self):
648 648 return self._repo.status()[:4]
649 649
650 650 @propertycache
651 651 def _user(self):
652 652 return self._repo.ui.username()
653 653
654 654 @propertycache
655 655 def _date(self):
656 656 return util.makedate()
657 657
658 658 @propertycache
659 659 def _parents(self):
660 660 p = self._repo.dirstate.parents()
661 661 if p[1] == nullid:
662 662 p = p[:-1]
663 663 self._parents = [changectx(self._repo, x) for x in p]
664 664 return self._parents
665 665
666 666 def status(self, ignored=False, clean=False, unknown=False):
667 667 """Explicit status query
668 668 Unless this method is used to query the working copy status, the
669 669 _status property will implicitly read the status using its default
670 670 arguments."""
671 671 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
672 672 self._unknown = self._ignored = self._clean = None
673 673 if unknown:
674 674 self._unknown = stat[4]
675 675 if ignored:
676 676 self._ignored = stat[5]
677 677 if clean:
678 678 self._clean = stat[6]
679 679 self._status = stat[:4]
680 680 return stat
681 681
682 682 def manifest(self):
683 683 return self._manifest
684 684 def user(self):
685 685 return self._user or self._repo.ui.username()
686 686 def date(self):
687 687 return self._date
688 688 def description(self):
689 689 return self._text
690 690 def files(self):
691 691 return sorted(self._status[0] + self._status[1] + self._status[2])
692 692
693 693 def modified(self):
694 694 return self._status[0]
695 695 def added(self):
696 696 return self._status[1]
697 697 def removed(self):
698 698 return self._status[2]
699 699 def deleted(self):
700 700 return self._status[3]
701 701 def unknown(self):
702 702 assert self._unknown is not None # must call status first
703 703 return self._unknown
704 704 def ignored(self):
705 705 assert self._ignored is not None # must call status first
706 706 return self._ignored
707 707 def clean(self):
708 708 assert self._clean is not None # must call status first
709 709 return self._clean
710 710 def branch(self):
711 711 return self._extra['branch']
712 712 def extra(self):
713 713 return self._extra
714 714
715 715 def tags(self):
716 716 t = []
717 717 [t.extend(p.tags()) for p in self.parents()]
718 718 return t
719 719
720 720 def children(self):
721 721 return []
722 722
723 723 def flags(self, path):
724 724 if '_manifest' in self.__dict__:
725 725 try:
726 726 return self._manifest.flags(path)
727 727 except KeyError:
728 728 return ''
729 729
730 730 orig = self._repo.dirstate.copies().get(path, path)
731 731
732 732 def findflag(ctx):
733 733 mnode = ctx.changeset()[0]
734 734 node, flag = self._repo.manifest.find(mnode, orig)
735 735 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
736 736 try:
737 737 return ff(path)
738 738 except OSError:
739 739 pass
740 740
741 741 flag = findflag(self._parents[0])
742 742 if flag is None and len(self.parents()) > 1:
743 743 flag = findflag(self._parents[1])
744 744 if flag is None or self._repo.dirstate[path] == 'r':
745 745 return ''
746 746 return flag
747 747
748 748 def filectx(self, path, filelog=None):
749 749 """get a file context from the working directory"""
750 750 return workingfilectx(self._repo, path, workingctx=self,
751 751 filelog=filelog)
752 752
753 753 def ancestor(self, c2):
754 754 """return the ancestor context of self and c2"""
755 755 return self._parents[0].ancestor(c2) # punt on two parents for now
756 756
757 757 def walk(self, match):
758 758 return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
759 759 True, False))
760 760
761 761 def dirty(self, missing=False):
762 762 "check whether a working directory is modified"
763 763 # check subrepos first
764 764 for s in self.substate:
765 765 if self.sub(s).dirty():
766 766 return True
767 767 # check current working dir
768 768 return (self.p2() or self.branch() != self.p1().branch() or
769 769 self.modified() or self.added() or self.removed() or
770 770 (missing and self.deleted()))
771 771
772 772 def add(self, list):
773 773 wlock = self._repo.wlock()
774 774 ui, ds = self._repo.ui, self._repo.dirstate
775 775 try:
776 776 rejected = []
777 777 for f in list:
778 778 p = self._repo.wjoin(f)
779 779 try:
780 780 st = os.lstat(p)
781 781 except:
782 782 ui.warn(_("%s does not exist!\n") % f)
783 783 rejected.append(f)
784 784 continue
785 785 if st.st_size > 10000000:
786 786 ui.warn(_("%s: up to %d MB of RAM may be required "
787 787 "to manage this file\n"
788 788 "(use 'hg revert %s' to cancel the "
789 789 "pending addition)\n")
790 790 % (f, 3 * st.st_size // 1000000, f))
791 791 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
792 792 ui.warn(_("%s not added: only files and symlinks "
793 793 "supported currently\n") % f)
794 794 rejected.append(p)
795 795 elif ds[f] in 'amn':
796 796 ui.warn(_("%s already tracked!\n") % f)
797 797 elif ds[f] == 'r':
798 798 ds.normallookup(f)
799 799 else:
800 800 ds.add(f)
801 801 return rejected
802 802 finally:
803 803 wlock.release()
804 804
805 805 def forget(self, list):
806 806 wlock = self._repo.wlock()
807 807 try:
808 808 for f in list:
809 809 if self._repo.dirstate[f] != 'a':
810 810 self._repo.ui.warn(_("%s not added!\n") % f)
811 811 else:
812 812 self._repo.dirstate.forget(f)
813 813 finally:
814 814 wlock.release()
815 815
816 816 def remove(self, list, unlink=False):
817 817 if unlink:
818 818 for f in list:
819 819 try:
820 820 util.unlink(self._repo.wjoin(f))
821 821 except OSError, inst:
822 822 if inst.errno != errno.ENOENT:
823 823 raise
824 824 wlock = self._repo.wlock()
825 825 try:
826 826 for f in list:
827 827 if unlink and os.path.exists(self._repo.wjoin(f)):
828 828 self._repo.ui.warn(_("%s still exists!\n") % f)
829 829 elif self._repo.dirstate[f] == 'a':
830 830 self._repo.dirstate.forget(f)
831 831 elif f not in self._repo.dirstate:
832 832 self._repo.ui.warn(_("%s not tracked!\n") % f)
833 833 else:
834 834 self._repo.dirstate.remove(f)
835 835 finally:
836 836 wlock.release()
837 837
838 838 def undelete(self, list):
839 839 pctxs = self.parents()
840 840 wlock = self._repo.wlock()
841 841 try:
842 842 for f in list:
843 843 if self._repo.dirstate[f] != 'r':
844 844 self._repo.ui.warn(_("%s not removed!\n") % f)
845 845 else:
846 846 fctx = f in pctxs[0] and pctxs[0] or pctxs[1]
847 847 t = fctx.data()
848 848 self._repo.wwrite(f, t, fctx.flags())
849 849 self._repo.dirstate.normal(f)
850 850 finally:
851 851 wlock.release()
852 852
853 853 def copy(self, source, dest):
854 854 p = self._repo.wjoin(dest)
855 855 if not (os.path.exists(p) or os.path.islink(p)):
856 856 self._repo.ui.warn(_("%s does not exist!\n") % dest)
857 857 elif not (os.path.isfile(p) or os.path.islink(p)):
858 858 self._repo.ui.warn(_("copy failed: %s is not a file or a "
859 859 "symbolic link\n") % dest)
860 860 else:
861 861 wlock = self._repo.wlock()
862 862 try:
863 863 if self._repo.dirstate[dest] in '?r':
864 864 self._repo.dirstate.add(dest)
865 865 self._repo.dirstate.copy(source, dest)
866 866 finally:
867 867 wlock.release()
868 868
869 869 class workingfilectx(filectx):
870 870 """A workingfilectx object makes access to data related to a particular
871 871 file in the working directory convenient."""
872 872 def __init__(self, repo, path, filelog=None, workingctx=None):
873 873 """changeid can be a changeset revision, node, or tag.
874 874 fileid can be a file revision or node."""
875 875 self._repo = repo
876 876 self._path = path
877 877 self._changeid = None
878 878 self._filerev = self._filenode = None
879 879
880 880 if filelog:
881 881 self._filelog = filelog
882 882 if workingctx:
883 883 self._changectx = workingctx
884 884
885 885 @propertycache
886 886 def _changectx(self):
887 887 return workingctx(self._repo)
888 888
889 889 def __nonzero__(self):
890 890 return True
891 891
892 892 def __str__(self):
893 893 return "%s@%s" % (self.path(), self._changectx)
894 894
895 895 def data(self):
896 896 return self._repo.wread(self._path)
897 897 def renamed(self):
898 898 rp = self._repo.dirstate.copied(self._path)
899 899 if not rp:
900 900 return None
901 901 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
902 902
903 903 def parents(self):
904 904 '''return parent filectxs, following copies if necessary'''
905 905 def filenode(ctx, path):
906 906 return ctx._manifest.get(path, nullid)
907 907
908 908 path = self._path
909 909 fl = self._filelog
910 910 pcl = self._changectx._parents
911 911 renamed = self.renamed()
912 912
913 913 if renamed:
914 914 pl = [renamed + (None,)]
915 915 else:
916 916 pl = [(path, filenode(pcl[0], path), fl)]
917 917
918 918 for pc in pcl[1:]:
919 919 pl.append((path, filenode(pc, path), fl))
920 920
921 921 return [filectx(self._repo, p, fileid=n, filelog=l)
922 922 for p, n, l in pl if n != nullid]
923 923
924 924 def children(self):
925 925 return []
926 926
927 927 def size(self):
928 928 return os.lstat(self._repo.wjoin(self._path)).st_size
929 929 def date(self):
930 930 t, tz = self._changectx.date()
931 931 try:
932 932 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
933 933 except OSError, err:
934 934 if err.errno != errno.ENOENT:
935 935 raise
936 936 return (t, tz)
937 937
938 938 def cmp(self, fctx):
939 939 """compare with other file context
940 940
941 941 returns True if different than fctx.
942 942 """
943 943 # fctx should be a filectx (not a wfctx)
944 944 # invert comparison to reuse the same code path
945 945 return fctx.cmp(self)
946 946
947 947 class memctx(object):
948 948 """Use memctx to perform in-memory commits via localrepo.commitctx().
949 949
950 950 Revision information is supplied at initialization time while
951 951 related files data and is made available through a callback
952 952 mechanism. 'repo' is the current localrepo, 'parents' is a
953 953 sequence of two parent revisions identifiers (pass None for every
954 954 missing parent), 'text' is the commit message and 'files' lists
955 955 names of files touched by the revision (normalized and relative to
956 956 repository root).
957 957
958 958 filectxfn(repo, memctx, path) is a callable receiving the
959 959 repository, the current memctx object and the normalized path of
960 960 requested file, relative to repository root. It is fired by the
961 961 commit function for every file in 'files', but calls order is
962 962 undefined. If the file is available in the revision being
963 963 committed (updated or added), filectxfn returns a memfilectx
964 964 object. If the file was removed, filectxfn raises an
965 965 IOError. Moved files are represented by marking the source file
966 966 removed and the new file added with copy information (see
967 967 memfilectx).
968 968
969 969 user receives the committer name and defaults to current
970 970 repository username, date is the commit date in any format
971 971 supported by util.parsedate() and defaults to current date, extra
972 972 is a dictionary of metadata or is left empty.
973 973 """
974 974 def __init__(self, repo, parents, text, files, filectxfn, user=None,
975 975 date=None, extra=None):
976 976 self._repo = repo
977 977 self._rev = None
978 978 self._node = None
979 979 self._text = text
980 980 self._date = date and util.parsedate(date) or util.makedate()
981 981 self._user = user
982 982 parents = [(p or nullid) for p in parents]
983 983 p1, p2 = parents
984 984 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
985 985 files = sorted(set(files))
986 986 self._status = [files, [], [], [], []]
987 987 self._filectxfn = filectxfn
988 988
989 989 self._extra = extra and extra.copy() or {}
990 990 if 'branch' not in self._extra:
991 991 self._extra['branch'] = 'default'
992 992 elif self._extra.get('branch') == '':
993 993 self._extra['branch'] = 'default'
994 994
995 995 def __str__(self):
996 996 return str(self._parents[0]) + "+"
997 997
998 998 def __int__(self):
999 999 return self._rev
1000 1000
1001 1001 def __nonzero__(self):
1002 1002 return True
1003 1003
1004 1004 def __getitem__(self, key):
1005 1005 return self.filectx(key)
1006 1006
1007 1007 def p1(self):
1008 1008 return self._parents[0]
1009 1009 def p2(self):
1010 1010 return self._parents[1]
1011 1011
1012 1012 def user(self):
1013 1013 return self._user or self._repo.ui.username()
1014 1014 def date(self):
1015 1015 return self._date
1016 1016 def description(self):
1017 1017 return self._text
1018 1018 def files(self):
1019 1019 return self.modified()
1020 1020 def modified(self):
1021 1021 return self._status[0]
1022 1022 def added(self):
1023 1023 return self._status[1]
1024 1024 def removed(self):
1025 1025 return self._status[2]
1026 1026 def deleted(self):
1027 1027 return self._status[3]
1028 1028 def unknown(self):
1029 1029 return self._status[4]
1030 1030 def ignored(self):
1031 1031 return self._status[5]
1032 1032 def clean(self):
1033 1033 return self._status[6]
1034 1034 def branch(self):
1035 1035 return self._extra['branch']
1036 1036 def extra(self):
1037 1037 return self._extra
1038 1038 def flags(self, f):
1039 1039 return self[f].flags()
1040 1040
1041 1041 def parents(self):
1042 1042 """return contexts for each parent changeset"""
1043 1043 return self._parents
1044 1044
1045 1045 def filectx(self, path, filelog=None):
1046 1046 """get a file context from the working directory"""
1047 1047 return self._filectxfn(self._repo, self, path)
1048 1048
1049 1049 def commit(self):
1050 1050 """commit context to the repo"""
1051 1051 return self._repo.commitctx(self)
1052 1052
1053 1053 class memfilectx(object):
1054 1054 """memfilectx represents an in-memory file to commit.
1055 1055
1056 1056 See memctx for more details.
1057 1057 """
1058 1058 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1059 1059 """
1060 1060 path is the normalized file path relative to repository root.
1061 1061 data is the file content as a string.
1062 1062 islink is True if the file is a symbolic link.
1063 1063 isexec is True if the file is executable.
1064 1064 copied is the source file path if current file was copied in the
1065 1065 revision being committed, or None."""
1066 1066 self._path = path
1067 1067 self._data = data
1068 1068 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1069 1069 self._copied = None
1070 1070 if copied:
1071 1071 self._copied = (copied, nullid)
1072 1072
1073 1073 def __nonzero__(self):
1074 1074 return True
1075 1075 def __str__(self):
1076 1076 return "%s@%s" % (self.path(), self._changectx)
1077 1077 def path(self):
1078 1078 return self._path
1079 1079 def data(self):
1080 1080 return self._data
1081 1081 def flags(self):
1082 1082 return self._flags
1083 1083 def isexec(self):
1084 1084 return 'x' in self._flags
1085 1085 def islink(self):
1086 1086 return 'l' in self._flags
1087 1087 def renamed(self):
1088 1088 return self._copied
@@ -1,586 +1,586
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
10 10 import util, commands, hg, fancyopts, extensions, hook, error
11 11 import cmdutil, encoding
12 12 import ui as uimod
13 13
14 14 def run():
15 15 "run the command in sys.argv"
16 16 sys.exit(dispatch(sys.argv[1:]))
17 17
18 18 def dispatch(args):
19 19 "run the command specified in args"
20 20 try:
21 21 u = uimod.ui()
22 22 if '--traceback' in args:
23 23 u.setconfig('ui', 'traceback', 'on')
24 24 except util.Abort, inst:
25 25 sys.stderr.write(_("abort: %s\n") % inst)
26 26 if inst.hint:
27 27 sys.stderr.write(_("(%s)\n") % inst.hint)
28 28 return -1
29 29 except error.ParseError, inst:
30 30 if len(inst.args) > 1:
31 31 sys.stderr.write(_("hg: parse error at %s: %s\n") %
32 32 (inst.args[1], inst.args[0]))
33 33 else:
34 34 sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0])
35 35 return -1
36 36 return _runcatch(u, args)
37 37
38 38 def _runcatch(ui, args):
39 39 def catchterm(*args):
40 40 raise error.SignalInterrupt
41 41
42 42 try:
43 43 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
44 44 num = getattr(signal, name, None)
45 45 if num:
46 46 signal.signal(num, catchterm)
47 47 except ValueError:
48 48 pass # happens if called in a thread
49 49
50 50 try:
51 51 try:
52 52 # enter the debugger before command execution
53 53 if '--debugger' in args:
54 54 ui.warn(_("entering debugger - "
55 55 "type c to continue starting hg or h for help\n"))
56 56 pdb.set_trace()
57 57 try:
58 58 return _dispatch(ui, args)
59 59 finally:
60 60 ui.flush()
61 61 except:
62 62 # enter the debugger when we hit an exception
63 63 if '--debugger' in args:
64 64 traceback.print_exc()
65 65 pdb.post_mortem(sys.exc_info()[2])
66 66 ui.traceback()
67 67 raise
68 68
69 69 # Global exception handling, alphabetically
70 70 # Mercurial-specific first, followed by built-in and library exceptions
71 71 except error.AmbiguousCommand, inst:
72 72 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
73 73 (inst.args[0], " ".join(inst.args[1])))
74 74 except error.ParseError, inst:
75 75 if len(inst.args) > 1:
76 76 ui.warn(_("hg: parse error at %s: %s\n") %
77 77 (inst.args[1], inst.args[0]))
78 78 else:
79 79 ui.warn(_("hg: parse error: %s\n") % inst.args[0])
80 80 return -1
81 81 except error.LockHeld, inst:
82 82 if inst.errno == errno.ETIMEDOUT:
83 83 reason = _('timed out waiting for lock held by %s') % inst.locker
84 84 else:
85 85 reason = _('lock held by %s') % inst.locker
86 86 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
87 87 except error.LockUnavailable, inst:
88 88 ui.warn(_("abort: could not lock %s: %s\n") %
89 89 (inst.desc or inst.filename, inst.strerror))
90 90 except error.CommandError, inst:
91 91 if inst.args[0]:
92 92 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
93 93 commands.help_(ui, inst.args[0])
94 94 else:
95 95 ui.warn(_("hg: %s\n") % inst.args[1])
96 96 commands.help_(ui, 'shortlist')
97 97 except error.RepoError, inst:
98 98 ui.warn(_("abort: %s!\n") % inst)
99 99 except error.ResponseError, inst:
100 100 ui.warn(_("abort: %s") % inst.args[0])
101 101 if not isinstance(inst.args[1], basestring):
102 102 ui.warn(" %r\n" % (inst.args[1],))
103 103 elif not inst.args[1]:
104 104 ui.warn(_(" empty string\n"))
105 105 else:
106 106 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
107 107 except error.RevlogError, inst:
108 108 ui.warn(_("abort: %s!\n") % inst)
109 109 except error.SignalInterrupt:
110 110 ui.warn(_("killed!\n"))
111 111 except error.UnknownCommand, inst:
112 112 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
113 113 try:
114 114 # check if the command is in a disabled extension
115 115 # (but don't check for extensions themselves)
116 116 commands.help_(ui, inst.args[0], unknowncmd=True)
117 117 except error.UnknownCommand:
118 118 commands.help_(ui, 'shortlist')
119 119 except util.Abort, inst:
120 120 ui.warn(_("abort: %s\n") % inst)
121 121 if inst.hint:
122 122 ui.warn(_("(%s)\n") % inst.hint)
123 123 except ImportError, inst:
124 124 ui.warn(_("abort: %s!\n") % inst)
125 125 m = str(inst).split()[-1]
126 126 if m in "mpatch bdiff".split():
127 127 ui.warn(_("(did you forget to compile extensions?)\n"))
128 128 elif m in "zlib".split():
129 129 ui.warn(_("(is your Python install correct?)\n"))
130 130 except IOError, inst:
131 131 if hasattr(inst, "code"):
132 132 ui.warn(_("abort: %s\n") % inst)
133 133 elif hasattr(inst, "reason"):
134 134 try: # usually it is in the form (errno, strerror)
135 135 reason = inst.reason.args[1]
136 136 except: # it might be anything, for example a string
137 137 reason = inst.reason
138 138 ui.warn(_("abort: error: %s\n") % reason)
139 139 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
140 140 if ui.debugflag:
141 141 ui.warn(_("broken pipe\n"))
142 142 elif getattr(inst, "strerror", None):
143 143 if getattr(inst, "filename", None):
144 144 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
145 145 else:
146 146 ui.warn(_("abort: %s\n") % inst.strerror)
147 147 else:
148 148 raise
149 149 except OSError, inst:
150 150 if getattr(inst, "filename", None):
151 151 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
152 152 else:
153 153 ui.warn(_("abort: %s\n") % inst.strerror)
154 154 except KeyboardInterrupt:
155 155 try:
156 156 ui.warn(_("interrupted!\n"))
157 157 except IOError, inst:
158 158 if inst.errno == errno.EPIPE:
159 159 if ui.debugflag:
160 160 ui.warn(_("\nbroken pipe\n"))
161 161 else:
162 162 raise
163 163 except MemoryError:
164 164 ui.warn(_("abort: out of memory\n"))
165 165 except SystemExit, inst:
166 166 # Commands shouldn't sys.exit directly, but give a return code.
167 167 # Just in case catch this and and pass exit code to caller.
168 168 return inst.code
169 169 except socket.error, inst:
170 170 ui.warn(_("abort: %s\n") % inst.args[-1])
171 171 except:
172 172 ui.warn(_("** unknown exception encountered, details follow\n"))
173 173 ui.warn(_("** report bug details to "
174 174 "http://mercurial.selenic.com/bts/\n"))
175 175 ui.warn(_("** or mercurial@selenic.com\n"))
176 176 ui.warn(_("** Python %s\n") % sys.version.replace('\n', ''))
177 177 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
178 178 % util.version())
179 179 ui.warn(_("** Extensions loaded: %s\n")
180 180 % ", ".join([x[0] for x in extensions.extensions()]))
181 181 raise
182 182
183 183 return -1
184 184
185 185 def aliasargs(fn):
186 186 if hasattr(fn, 'args'):
187 187 return fn.args
188 188 return []
189 189
190 190 class cmdalias(object):
191 191 def __init__(self, name, definition, cmdtable):
192 192 self.name = self.cmd = name
193 193 self.definition = definition
194 194 self.args = []
195 195 self.opts = []
196 196 self.help = ''
197 197 self.norepo = True
198 198 self.badalias = False
199 199
200 200 try:
201 201 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
202 202 for alias, e in cmdtable.iteritems():
203 203 if e is entry:
204 204 self.cmd = alias
205 205 break
206 206 self.shadows = True
207 207 except error.UnknownCommand:
208 208 self.shadows = False
209 209
210 210 if not self.definition:
211 211 def fn(ui, *args):
212 212 ui.warn(_("no definition for alias '%s'\n") % self.name)
213 213 return 1
214 214 self.fn = fn
215 215 self.badalias = True
216 216
217 217 return
218 218
219 219 if self.definition.startswith('!'):
220 220 def fn(ui, *args):
221 221 env = {'HG_ARGS': ' '.join((self.name,) + args)}
222 222 def _checkvar(m):
223 223 if int(m.groups()[0]) <= len(args):
224 224 return m.group()
225 225 else:
226 226 return ''
227 227 cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:])
228 228 replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
229 229 replace['0'] = self.name
230 230 replace['@'] = ' '.join(args)
231 231 cmd = util.interpolate(r'\$', replace, cmd)
232 232 return util.system(cmd, environ=env)
233 233 self.fn = fn
234 234 return
235 235
236 236 args = shlex.split(self.definition)
237 237 cmd = args.pop(0)
238 238 args = map(util.expandpath, args)
239 239
240 240 for invalidarg in ("--cwd", "-R", "--repository", "--repo"):
241 241 if _earlygetopt([invalidarg], args):
242 242 def fn(ui, *args):
243 243 ui.warn(_("error in definition for alias '%s': %s may only "
244 244 "be given on the command line\n")
245 245 % (self.name, invalidarg))
246 246 return 1
247 247
248 248 self.fn = fn
249 249 self.badalias = True
250 250 return
251 251
252 252 try:
253 253 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
254 254 if len(tableentry) > 2:
255 255 self.fn, self.opts, self.help = tableentry
256 256 else:
257 257 self.fn, self.opts = tableentry
258 258
259 259 self.args = aliasargs(self.fn) + args
260 260 if cmd not in commands.norepo.split(' '):
261 261 self.norepo = False
262 262 if self.help.startswith("hg " + cmd):
263 263 # drop prefix in old-style help lines so hg shows the alias
264 264 self.help = self.help[4 + len(cmd):]
265 265 self.__doc__ = self.fn.__doc__
266 266
267 267 except error.UnknownCommand:
268 268 def fn(ui, *args):
269 269 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
270 270 % (self.name, cmd))
271 271 try:
272 272 # check if the command is in a disabled extension
273 273 commands.help_(ui, cmd, unknowncmd=True)
274 274 except error.UnknownCommand:
275 275 pass
276 276 return 1
277 277 self.fn = fn
278 278 self.badalias = True
279 279 except error.AmbiguousCommand:
280 280 def fn(ui, *args):
281 281 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
282 282 % (self.name, cmd))
283 283 return 1
284 284 self.fn = fn
285 285 self.badalias = True
286 286
287 287 def __call__(self, ui, *args, **opts):
288 288 if self.shadows:
289 289 ui.debug("alias '%s' shadows command\n" % self.name)
290 290
291 291 if self.definition.startswith('!'):
292 292 return self.fn(ui, *args, **opts)
293 293 else:
294 294 return util.checksignature(self.fn)(ui, *args, **opts)
295 295
296 296 def addaliases(ui, cmdtable):
297 297 # aliases are processed after extensions have been loaded, so they
298 298 # may use extension commands. Aliases can also use other alias definitions,
299 299 # but only if they have been defined prior to the current definition.
300 300 for alias, definition in ui.configitems('alias'):
301 301 aliasdef = cmdalias(alias, definition, cmdtable)
302 302 cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help)
303 303 if aliasdef.norepo:
304 304 commands.norepo += ' %s' % alias
305 305
306 306 def _parse(ui, args):
307 307 options = {}
308 308 cmdoptions = {}
309 309
310 310 try:
311 311 args = fancyopts.fancyopts(args, commands.globalopts, options)
312 312 except fancyopts.getopt.GetoptError, inst:
313 313 raise error.CommandError(None, inst)
314 314
315 315 if args:
316 316 cmd, args = args[0], args[1:]
317 317 aliases, entry = cmdutil.findcmd(cmd, commands.table,
318 318 ui.config("ui", "strict"))
319 319 cmd = aliases[0]
320 320 args = aliasargs(entry[0]) + args
321 321 defaults = ui.config("defaults", cmd)
322 322 if defaults:
323 323 args = map(util.expandpath, shlex.split(defaults)) + args
324 324 c = list(entry[1])
325 325 else:
326 326 cmd = None
327 327 c = []
328 328
329 329 # combine global options into local
330 330 for o in commands.globalopts:
331 331 c.append((o[0], o[1], options[o[1]], o[3]))
332 332
333 333 try:
334 334 args = fancyopts.fancyopts(args, c, cmdoptions, True)
335 335 except fancyopts.getopt.GetoptError, inst:
336 336 raise error.CommandError(cmd, inst)
337 337
338 338 # separate global options back out
339 339 for o in commands.globalopts:
340 340 n = o[1]
341 341 options[n] = cmdoptions[n]
342 342 del cmdoptions[n]
343 343
344 344 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
345 345
346 346 def _parseconfig(ui, config):
347 347 """parse the --config options from the command line"""
348 348 for cfg in config:
349 349 try:
350 350 name, value = cfg.split('=', 1)
351 351 section, name = name.split('.', 1)
352 352 if not section or not name:
353 353 raise IndexError
354 354 ui.setconfig(section, name, value)
355 355 except (IndexError, ValueError):
356 356 raise util.Abort(_('malformed --config option: %r '
357 357 '(use --config section.name=value)') % cfg)
358 358
359 359 def _earlygetopt(aliases, args):
360 360 """Return list of values for an option (or aliases).
361 361
362 362 The values are listed in the order they appear in args.
363 363 The options and values are removed from args.
364 364 """
365 365 try:
366 366 argcount = args.index("--")
367 367 except ValueError:
368 368 argcount = len(args)
369 369 shortopts = [opt for opt in aliases if len(opt) == 2]
370 370 values = []
371 371 pos = 0
372 372 while pos < argcount:
373 373 if args[pos] in aliases:
374 374 if pos + 1 >= argcount:
375 375 # ignore and let getopt report an error if there is no value
376 376 break
377 377 del args[pos]
378 378 values.append(args.pop(pos))
379 379 argcount -= 2
380 380 elif args[pos][:2] in shortopts:
381 381 # short option can have no following space, e.g. hg log -Rfoo
382 382 values.append(args.pop(pos)[2:])
383 383 argcount -= 1
384 384 else:
385 385 pos += 1
386 386 return values
387 387
388 388 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
389 389 # run pre-hook, and abort if it fails
390 390 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs),
391 391 pats=cmdpats, opts=cmdoptions)
392 392 if ret:
393 393 return ret
394 394 ret = _runcommand(ui, options, cmd, d)
395 395 # run post-hook, passing command result
396 396 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
397 397 result=ret, pats=cmdpats, opts=cmdoptions)
398 398 return ret
399 399
400 400 _loaded = set()
401 401 def _dispatch(ui, args):
402 402 # read --config before doing anything else
403 403 # (e.g. to change trust settings for reading .hg/hgrc)
404 404 _parseconfig(ui, _earlygetopt(['--config'], args))
405 405
406 406 # check for cwd
407 407 cwd = _earlygetopt(['--cwd'], args)
408 408 if cwd:
409 409 os.chdir(cwd[-1])
410 410
411 411 # read the local repository .hgrc into a local ui object
412 412 try:
413 413 wd = os.getcwd()
414 414 except OSError, e:
415 415 raise util.Abort(_("error getting current working directory: %s") %
416 416 e.strerror)
417 417 path = cmdutil.findrepo(wd) or ""
418 418 if not path:
419 419 lui = ui
420 420 else:
421 421 try:
422 422 lui = ui.copy()
423 423 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
424 424 except IOError:
425 425 pass
426 426
427 427 # now we can expand paths, even ones in .hg/hgrc
428 428 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
429 429 if rpath:
430 430 path = lui.expandpath(rpath[-1])
431 431 lui = ui.copy()
432 432 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
433 433
434 434 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
435 435 # reposetup. Programs like TortoiseHg will call _dispatch several
436 436 # times so we keep track of configured extensions in _loaded.
437 437 extensions.loadall(lui)
438 438 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
439 439 # Propagate any changes to lui.__class__ by extensions
440 440 ui.__class__ = lui.__class__
441 441
442 442 # (uisetup and extsetup are handled in extensions.loadall)
443 443
444 444 for name, module in exts:
445 445 cmdtable = getattr(module, 'cmdtable', {})
446 446 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
447 447 if overrides:
448 448 ui.warn(_("extension '%s' overrides commands: %s\n")
449 449 % (name, " ".join(overrides)))
450 450 commands.table.update(cmdtable)
451 451 _loaded.add(name)
452 452
453 453 # (reposetup is handled in hg.repository)
454 454
455 455 addaliases(lui, commands.table)
456 456
457 457 # check for fallback encoding
458 458 fallback = lui.config('ui', 'fallbackencoding')
459 459 if fallback:
460 460 encoding.fallbackencoding = fallback
461 461
462 462 fullargs = args
463 463 cmd, func, args, options, cmdoptions = _parse(lui, args)
464 464
465 465 if options["config"]:
466 raise util.Abort(_("Option --config may not be abbreviated!"))
466 raise util.Abort(_("option --config may not be abbreviated!"))
467 467 if options["cwd"]:
468 raise util.Abort(_("Option --cwd may not be abbreviated!"))
468 raise util.Abort(_("option --cwd may not be abbreviated!"))
469 469 if options["repository"]:
470 470 raise util.Abort(_(
471 471 "Option -R has to be separated from other options (e.g. not -qR) "
472 472 "and --repository may only be abbreviated as --repo!"))
473 473
474 474 if options["encoding"]:
475 475 encoding.encoding = options["encoding"]
476 476 if options["encodingmode"]:
477 477 encoding.encodingmode = options["encodingmode"]
478 478 if options["time"]:
479 479 def get_times():
480 480 t = os.times()
481 481 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
482 482 t = (t[0], t[1], t[2], t[3], time.clock())
483 483 return t
484 484 s = get_times()
485 485 def print_time():
486 486 t = get_times()
487 487 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
488 488 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
489 489 atexit.register(print_time)
490 490
491 491 if options['verbose'] or options['debug'] or options['quiet']:
492 492 ui.setconfig('ui', 'verbose', str(bool(options['verbose'])))
493 493 ui.setconfig('ui', 'debug', str(bool(options['debug'])))
494 494 ui.setconfig('ui', 'quiet', str(bool(options['quiet'])))
495 495 if options['traceback']:
496 496 ui.setconfig('ui', 'traceback', 'on')
497 497 if options['noninteractive']:
498 498 ui.setconfig('ui', 'interactive', 'off')
499 499
500 500 if options['help']:
501 501 return commands.help_(ui, cmd, options['version'])
502 502 elif options['version']:
503 503 return commands.version_(ui)
504 504 elif not cmd:
505 505 return commands.help_(ui, 'shortlist')
506 506
507 507 repo = None
508 508 cmdpats = args[:]
509 509 if cmd not in commands.norepo.split():
510 510 try:
511 511 repo = hg.repository(ui, path=path)
512 512 ui = repo.ui
513 513 if not repo.local():
514 514 raise util.Abort(_("repository '%s' is not local") % path)
515 515 ui.setconfig("bundle", "mainreporoot", repo.root)
516 516 except error.RepoError:
517 517 if cmd not in commands.optionalrepo.split():
518 518 if args and not path: # try to infer -R from command args
519 519 repos = map(cmdutil.findrepo, args)
520 520 guess = repos[0]
521 521 if guess and repos.count(guess) == len(repos):
522 522 return _dispatch(ui, ['--repository', guess] + fullargs)
523 523 if not path:
524 524 raise error.RepoError(_("There is no Mercurial repository"
525 525 " here (.hg not found)"))
526 526 raise
527 527 args.insert(0, repo)
528 528 elif rpath:
529 529 ui.warn(_("warning: --repository ignored\n"))
530 530
531 531 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
532 532 ui.log("command", msg + "\n")
533 533 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
534 534 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
535 535 cmdpats, cmdoptions)
536 536
537 537 def _runcommand(ui, options, cmd, cmdfunc):
538 538 def checkargs():
539 539 try:
540 540 return cmdfunc()
541 541 except error.SignatureError:
542 542 raise error.CommandError(cmd, _("invalid arguments"))
543 543
544 544 if options['profile']:
545 545 format = ui.config('profiling', 'format', default='text')
546 546
547 547 if not format in ['text', 'kcachegrind']:
548 548 ui.warn(_("unrecognized profiling format '%s'"
549 549 " - Ignored\n") % format)
550 550 format = 'text'
551 551
552 552 output = ui.config('profiling', 'output')
553 553
554 554 if output:
555 555 path = ui.expandpath(output)
556 556 ostream = open(path, 'wb')
557 557 else:
558 558 ostream = sys.stderr
559 559
560 560 try:
561 561 from mercurial import lsprof
562 562 except ImportError:
563 563 raise util.Abort(_(
564 564 'lsprof not available - install from '
565 565 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
566 566 p = lsprof.Profiler()
567 567 p.enable(subcalls=True)
568 568 try:
569 569 return checkargs()
570 570 finally:
571 571 p.disable()
572 572
573 573 if format == 'kcachegrind':
574 574 import lsprofcalltree
575 575 calltree = lsprofcalltree.KCacheGrind(p)
576 576 calltree.output(ostream)
577 577 else:
578 578 # format == 'text'
579 579 stats = lsprof.Stats(p.getstats())
580 580 stats.sort()
581 581 stats.pprint(top=10, file=ostream, climit=5)
582 582
583 583 if output:
584 584 ostream.close()
585 585 else:
586 586 return checkargs()
@@ -1,155 +1,155
1 1 # changelog bisection for mercurial
2 2 #
3 3 # Copyright 2007 Matt Mackall
4 4 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
5 5 #
6 6 # Inspired by git bisect, extension skeleton taken from mq.py.
7 7 #
8 8 # This software may be used and distributed according to the terms of the
9 9 # GNU General Public License version 2 or any later version.
10 10
11 11 import os
12 12 from i18n import _
13 13 from node import short, hex
14 14 import util
15 15
16 16 def bisect(changelog, state):
17 17 """find the next node (if any) for testing during a bisect search.
18 18 returns a (nodes, number, good) tuple.
19 19
20 20 'nodes' is the final result of the bisect if 'number' is 0.
21 21 Otherwise 'number' indicates the remaining possible candidates for
22 22 the search and 'nodes' contains the next bisect target.
23 23 'good' is True if bisect is searching for a first good changeset, False
24 24 if searching for a first bad one.
25 25 """
26 26
27 27 clparents = changelog.parentrevs
28 28 skip = set([changelog.rev(n) for n in state['skip']])
29 29
30 30 def buildancestors(bad, good):
31 31 # only the earliest bad revision matters
32 32 badrev = min([changelog.rev(n) for n in bad])
33 33 goodrevs = [changelog.rev(n) for n in good]
34 34 goodrev = min(goodrevs)
35 35 # build visit array
36 36 ancestors = [None] * (len(changelog) + 1) # an extra for [-1]
37 37
38 38 # set nodes descended from goodrev
39 39 ancestors[goodrev] = []
40 40 for rev in xrange(goodrev + 1, len(changelog)):
41 41 for prev in clparents(rev):
42 42 if ancestors[prev] == []:
43 43 ancestors[rev] = []
44 44
45 45 # clear good revs from array
46 46 for node in goodrevs:
47 47 ancestors[node] = None
48 48 for rev in xrange(len(changelog), -1, -1):
49 49 if ancestors[rev] is None:
50 50 for prev in clparents(rev):
51 51 ancestors[prev] = None
52 52
53 53 if ancestors[badrev] is None:
54 54 return badrev, None
55 55 return badrev, ancestors
56 56
57 57 good = 0
58 58 badrev, ancestors = buildancestors(state['bad'], state['good'])
59 59 if not ancestors: # looking for bad to good transition?
60 60 good = 1
61 61 badrev, ancestors = buildancestors(state['good'], state['bad'])
62 62 bad = changelog.node(badrev)
63 63 if not ancestors: # now we're confused
64 64 if len(state['bad']) == 1 and len(state['good']) == 1:
65 65 raise util.Abort(_("starting revisions are not directly related"))
66 raise util.Abort(_("Inconsistent state, %s:%s is good and bad")
66 raise util.Abort(_("inconsistent state, %s:%s is good and bad")
67 67 % (badrev, short(bad)))
68 68
69 69 # build children dict
70 70 children = {}
71 71 visit = [badrev]
72 72 candidates = []
73 73 while visit:
74 74 rev = visit.pop(0)
75 75 if ancestors[rev] == []:
76 76 candidates.append(rev)
77 77 for prev in clparents(rev):
78 78 if prev != -1:
79 79 if prev in children:
80 80 children[prev].append(rev)
81 81 else:
82 82 children[prev] = [rev]
83 83 visit.append(prev)
84 84
85 85 candidates.sort()
86 86 # have we narrowed it down to one entry?
87 87 # or have all other possible candidates besides 'bad' have been skipped?
88 88 tot = len(candidates)
89 89 unskipped = [c for c in candidates if (c not in skip) and (c != badrev)]
90 90 if tot == 1 or not unskipped:
91 91 return ([changelog.node(rev) for rev in candidates], 0, good)
92 92 perfect = tot // 2
93 93
94 94 # find the best node to test
95 95 best_rev = None
96 96 best_len = -1
97 97 poison = set()
98 98 for rev in candidates:
99 99 if rev in poison:
100 100 # poison children
101 101 poison.update(children.get(rev, []))
102 102 continue
103 103
104 104 a = ancestors[rev] or [rev]
105 105 ancestors[rev] = None
106 106
107 107 x = len(a) # number of ancestors
108 108 y = tot - x # number of non-ancestors
109 109 value = min(x, y) # how good is this test?
110 110 if value > best_len and rev not in skip:
111 111 best_len = value
112 112 best_rev = rev
113 113 if value == perfect: # found a perfect candidate? quit early
114 114 break
115 115
116 116 if y < perfect and rev not in skip: # all downhill from here?
117 117 # poison children
118 118 poison.update(children.get(rev, []))
119 119 continue
120 120
121 121 for c in children.get(rev, []):
122 122 if ancestors[c]:
123 123 ancestors[c] = list(set(ancestors[c] + a))
124 124 else:
125 125 ancestors[c] = a + [c]
126 126
127 127 assert best_rev is not None
128 128 best_node = changelog.node(best_rev)
129 129
130 130 return ([best_node], tot, good)
131 131
132 132
133 133 def load_state(repo):
134 134 state = {'good': [], 'bad': [], 'skip': []}
135 135 if os.path.exists(repo.join("bisect.state")):
136 136 for l in repo.opener("bisect.state"):
137 137 kind, node = l[:-1].split()
138 138 node = repo.lookup(node)
139 139 if kind not in state:
140 140 raise util.Abort(_("unknown bisect kind %s") % kind)
141 141 state[kind].append(node)
142 142 return state
143 143
144 144
145 145 def save_state(repo, state):
146 146 f = repo.opener("bisect.state", "w", atomictemp=True)
147 147 wlock = repo.wlock()
148 148 try:
149 149 for kind in state:
150 150 for node in state[kind]:
151 151 f.write("%s %s\n" % (kind, hex(node)))
152 152 f.rename()
153 153 finally:
154 154 wlock.release()
155 155
@@ -1,1805 +1,1805
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 import url as urlmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 propertycache = util.propertycache
21 21
22 22 class localrepository(repo.repository):
23 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey'))
24 24 supported = set('revlogv1 store fncache shared parentdelta'.split())
25 25
26 26 def __init__(self, baseui, path=None, create=0):
27 27 repo.repository.__init__(self)
28 28 self.root = os.path.realpath(util.expandpath(path))
29 29 self.path = os.path.join(self.root, ".hg")
30 30 self.origroot = path
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.baseui = baseui
34 34 self.ui = baseui.copy()
35 35
36 36 try:
37 37 self.ui.readconfig(self.join("hgrc"), self.root)
38 38 extensions.loadall(self.ui)
39 39 except IOError:
40 40 pass
41 41
42 42 if not os.path.isdir(self.path):
43 43 if create:
44 44 if not os.path.exists(path):
45 45 util.makedirs(path)
46 46 os.mkdir(self.path)
47 47 requirements = ["revlogv1"]
48 48 if self.ui.configbool('format', 'usestore', True):
49 49 os.mkdir(os.path.join(self.path, "store"))
50 50 requirements.append("store")
51 51 if self.ui.configbool('format', 'usefncache', True):
52 52 requirements.append("fncache")
53 53 # create an invalid changelog
54 54 self.opener("00changelog.i", "a").write(
55 55 '\0\0\0\2' # represents revlogv2
56 56 ' dummy changelog to prevent using the old repo layout'
57 57 )
58 58 if self.ui.configbool('format', 'parentdelta', False):
59 59 requirements.append("parentdelta")
60 60 reqfile = self.opener("requires", "w")
61 61 for r in requirements:
62 62 reqfile.write("%s\n" % r)
63 63 reqfile.close()
64 64 else:
65 65 raise error.RepoError(_("repository %s not found") % path)
66 66 elif create:
67 67 raise error.RepoError(_("repository %s already exists") % path)
68 68 else:
69 69 # find requirements
70 70 requirements = set()
71 71 try:
72 72 requirements = set(self.opener("requires").read().splitlines())
73 73 except IOError, inst:
74 74 if inst.errno != errno.ENOENT:
75 75 raise
76 76 for r in requirements - self.supported:
77 77 raise error.RepoError(_("requirement '%s' not supported") % r)
78 78
79 79 self.sharedpath = self.path
80 80 try:
81 81 s = os.path.realpath(self.opener("sharedpath").read())
82 82 if not os.path.exists(s):
83 83 raise error.RepoError(
84 84 _('.hg/sharedpath points to nonexistent directory %s') % s)
85 85 self.sharedpath = s
86 86 except IOError, inst:
87 87 if inst.errno != errno.ENOENT:
88 88 raise
89 89
90 90 self.store = store.store(requirements, self.sharedpath, util.opener)
91 91 self.spath = self.store.path
92 92 self.sopener = self.store.opener
93 93 self.sjoin = self.store.join
94 94 self.opener.createmode = self.store.createmode
95 95 self.sopener.options = {}
96 96 if 'parentdelta' in requirements:
97 97 self.sopener.options['parentdelta'] = 1
98 98
99 99 # These two define the set of tags for this repository. _tags
100 100 # maps tag name to node; _tagtypes maps tag name to 'global' or
101 101 # 'local'. (Global tags are defined by .hgtags across all
102 102 # heads, and local tags are defined in .hg/localtags.) They
103 103 # constitute the in-memory cache of tags.
104 104 self._tags = None
105 105 self._tagtypes = None
106 106
107 107 self._branchcache = None # in UTF-8
108 108 self._branchcachetip = None
109 109 self.nodetagscache = None
110 110 self.filterpats = {}
111 111 self._datafilters = {}
112 112 self._transref = self._lockref = self._wlockref = None
113 113
114 114 @propertycache
115 115 def changelog(self):
116 116 c = changelog.changelog(self.sopener)
117 117 if 'HG_PENDING' in os.environ:
118 118 p = os.environ['HG_PENDING']
119 119 if p.startswith(self.root):
120 120 c.readpending('00changelog.i.a')
121 121 self.sopener.options['defversion'] = c.version
122 122 return c
123 123
124 124 @propertycache
125 125 def manifest(self):
126 126 return manifest.manifest(self.sopener)
127 127
128 128 @propertycache
129 129 def dirstate(self):
130 130 return dirstate.dirstate(self.opener, self.ui, self.root)
131 131
132 132 def __getitem__(self, changeid):
133 133 if changeid is None:
134 134 return context.workingctx(self)
135 135 return context.changectx(self, changeid)
136 136
137 137 def __contains__(self, changeid):
138 138 try:
139 139 return bool(self.lookup(changeid))
140 140 except error.RepoLookupError:
141 141 return False
142 142
143 143 def __nonzero__(self):
144 144 return True
145 145
146 146 def __len__(self):
147 147 return len(self.changelog)
148 148
149 149 def __iter__(self):
150 150 for i in xrange(len(self)):
151 151 yield i
152 152
153 153 def url(self):
154 154 return 'file:' + self.root
155 155
156 156 def hook(self, name, throw=False, **args):
157 157 return hook.hook(self.ui, self, name, throw, **args)
158 158
159 159 tag_disallowed = ':\r\n'
160 160
161 161 def _tag(self, names, node, message, local, user, date, extra={}):
162 162 if isinstance(names, str):
163 163 allchars = names
164 164 names = (names,)
165 165 else:
166 166 allchars = ''.join(names)
167 167 for c in self.tag_disallowed:
168 168 if c in allchars:
169 169 raise util.Abort(_('%r cannot be used in a tag name') % c)
170 170
171 171 branches = self.branchmap()
172 172 for name in names:
173 173 self.hook('pretag', throw=True, node=hex(node), tag=name,
174 174 local=local)
175 175 if name in branches:
176 176 self.ui.warn(_("warning: tag %s conflicts with existing"
177 177 " branch name\n") % name)
178 178
179 179 def writetags(fp, names, munge, prevtags):
180 180 fp.seek(0, 2)
181 181 if prevtags and prevtags[-1] != '\n':
182 182 fp.write('\n')
183 183 for name in names:
184 184 m = munge and munge(name) or name
185 185 if self._tagtypes and name in self._tagtypes:
186 186 old = self._tags.get(name, nullid)
187 187 fp.write('%s %s\n' % (hex(old), m))
188 188 fp.write('%s %s\n' % (hex(node), m))
189 189 fp.close()
190 190
191 191 prevtags = ''
192 192 if local:
193 193 try:
194 194 fp = self.opener('localtags', 'r+')
195 195 except IOError:
196 196 fp = self.opener('localtags', 'a')
197 197 else:
198 198 prevtags = fp.read()
199 199
200 200 # local tags are stored in the current charset
201 201 writetags(fp, names, None, prevtags)
202 202 for name in names:
203 203 self.hook('tag', node=hex(node), tag=name, local=local)
204 204 return
205 205
206 206 try:
207 207 fp = self.wfile('.hgtags', 'rb+')
208 208 except IOError:
209 209 fp = self.wfile('.hgtags', 'ab')
210 210 else:
211 211 prevtags = fp.read()
212 212
213 213 # committed tags are stored in UTF-8
214 214 writetags(fp, names, encoding.fromlocal, prevtags)
215 215
216 216 if '.hgtags' not in self.dirstate:
217 217 self[None].add(['.hgtags'])
218 218
219 219 m = matchmod.exact(self.root, '', ['.hgtags'])
220 220 tagnode = self.commit(message, user, date, extra=extra, match=m)
221 221
222 222 for name in names:
223 223 self.hook('tag', node=hex(node), tag=name, local=local)
224 224
225 225 return tagnode
226 226
227 227 def tag(self, names, node, message, local, user, date):
228 228 '''tag a revision with one or more symbolic names.
229 229
230 230 names is a list of strings or, when adding a single tag, names may be a
231 231 string.
232 232
233 233 if local is True, the tags are stored in a per-repository file.
234 234 otherwise, they are stored in the .hgtags file, and a new
235 235 changeset is committed with the change.
236 236
237 237 keyword arguments:
238 238
239 239 local: whether to store tags in non-version-controlled file
240 240 (default False)
241 241
242 242 message: commit message to use if committing
243 243
244 244 user: name of user to use if committing
245 245
246 246 date: date tuple to use if committing'''
247 247
248 248 for x in self.status()[:5]:
249 249 if '.hgtags' in x:
250 250 raise util.Abort(_('working copy of .hgtags is changed '
251 251 '(please commit .hgtags manually)'))
252 252
253 253 self.tags() # instantiate the cache
254 254 self._tag(names, node, message, local, user, date)
255 255
256 256 def tags(self):
257 257 '''return a mapping of tag to node'''
258 258 if self._tags is None:
259 259 (self._tags, self._tagtypes) = self._findtags()
260 260
261 261 return self._tags
262 262
263 263 def _findtags(self):
264 264 '''Do the hard work of finding tags. Return a pair of dicts
265 265 (tags, tagtypes) where tags maps tag name to node, and tagtypes
266 266 maps tag name to a string like \'global\' or \'local\'.
267 267 Subclasses or extensions are free to add their own tags, but
268 268 should be aware that the returned dicts will be retained for the
269 269 duration of the localrepo object.'''
270 270
271 271 # XXX what tagtype should subclasses/extensions use? Currently
272 272 # mq and bookmarks add tags, but do not set the tagtype at all.
273 273 # Should each extension invent its own tag type? Should there
274 274 # be one tagtype for all such "virtual" tags? Or is the status
275 275 # quo fine?
276 276
277 277 alltags = {} # map tag name to (node, hist)
278 278 tagtypes = {}
279 279
280 280 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
281 281 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
282 282
283 283 # Build the return dicts. Have to re-encode tag names because
284 284 # the tags module always uses UTF-8 (in order not to lose info
285 285 # writing to the cache), but the rest of Mercurial wants them in
286 286 # local encoding.
287 287 tags = {}
288 288 for (name, (node, hist)) in alltags.iteritems():
289 289 if node != nullid:
290 290 tags[encoding.tolocal(name)] = node
291 291 tags['tip'] = self.changelog.tip()
292 292 tagtypes = dict([(encoding.tolocal(name), value)
293 293 for (name, value) in tagtypes.iteritems()])
294 294 return (tags, tagtypes)
295 295
296 296 def tagtype(self, tagname):
297 297 '''
298 298 return the type of the given tag. result can be:
299 299
300 300 'local' : a local tag
301 301 'global' : a global tag
302 302 None : tag does not exist
303 303 '''
304 304
305 305 self.tags()
306 306
307 307 return self._tagtypes.get(tagname)
308 308
309 309 def tagslist(self):
310 310 '''return a list of tags ordered by revision'''
311 311 l = []
312 312 for t, n in self.tags().iteritems():
313 313 try:
314 314 r = self.changelog.rev(n)
315 315 except:
316 316 r = -2 # sort to the beginning of the list if unknown
317 317 l.append((r, t, n))
318 318 return [(t, n) for r, t, n in sorted(l)]
319 319
320 320 def nodetags(self, node):
321 321 '''return the tags associated with a node'''
322 322 if not self.nodetagscache:
323 323 self.nodetagscache = {}
324 324 for t, n in self.tags().iteritems():
325 325 self.nodetagscache.setdefault(n, []).append(t)
326 326 for tags in self.nodetagscache.itervalues():
327 327 tags.sort()
328 328 return self.nodetagscache.get(node, [])
329 329
330 330 def _branchtags(self, partial, lrev):
331 331 # TODO: rename this function?
332 332 tiprev = len(self) - 1
333 333 if lrev != tiprev:
334 334 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
335 335 self._updatebranchcache(partial, ctxgen)
336 336 self._writebranchcache(partial, self.changelog.tip(), tiprev)
337 337
338 338 return partial
339 339
340 340 def updatebranchcache(self):
341 341 tip = self.changelog.tip()
342 342 if self._branchcache is not None and self._branchcachetip == tip:
343 343 return self._branchcache
344 344
345 345 oldtip = self._branchcachetip
346 346 self._branchcachetip = tip
347 347 if oldtip is None or oldtip not in self.changelog.nodemap:
348 348 partial, last, lrev = self._readbranchcache()
349 349 else:
350 350 lrev = self.changelog.rev(oldtip)
351 351 partial = self._branchcache
352 352
353 353 self._branchtags(partial, lrev)
354 354 # this private cache holds all heads (not just tips)
355 355 self._branchcache = partial
356 356
357 357 def branchmap(self):
358 358 '''returns a dictionary {branch: [branchheads]}'''
359 359 self.updatebranchcache()
360 360 return self._branchcache
361 361
362 362 def branchtags(self):
363 363 '''return a dict where branch names map to the tipmost head of
364 364 the branch, open heads come before closed'''
365 365 bt = {}
366 366 for bn, heads in self.branchmap().iteritems():
367 367 tip = heads[-1]
368 368 for h in reversed(heads):
369 369 if 'close' not in self.changelog.read(h)[5]:
370 370 tip = h
371 371 break
372 372 bt[bn] = tip
373 373 return bt
374 374
375 375
376 376 def _readbranchcache(self):
377 377 partial = {}
378 378 try:
379 379 f = self.opener("branchheads.cache")
380 380 lines = f.read().split('\n')
381 381 f.close()
382 382 except (IOError, OSError):
383 383 return {}, nullid, nullrev
384 384
385 385 try:
386 386 last, lrev = lines.pop(0).split(" ", 1)
387 387 last, lrev = bin(last), int(lrev)
388 388 if lrev >= len(self) or self[lrev].node() != last:
389 389 # invalidate the cache
390 390 raise ValueError('invalidating branch cache (tip differs)')
391 391 for l in lines:
392 392 if not l:
393 393 continue
394 394 node, label = l.split(" ", 1)
395 395 partial.setdefault(label.strip(), []).append(bin(node))
396 396 except KeyboardInterrupt:
397 397 raise
398 398 except Exception, inst:
399 399 if self.ui.debugflag:
400 400 self.ui.warn(str(inst), '\n')
401 401 partial, last, lrev = {}, nullid, nullrev
402 402 return partial, last, lrev
403 403
404 404 def _writebranchcache(self, branches, tip, tiprev):
405 405 try:
406 406 f = self.opener("branchheads.cache", "w", atomictemp=True)
407 407 f.write("%s %s\n" % (hex(tip), tiprev))
408 408 for label, nodes in branches.iteritems():
409 409 for node in nodes:
410 410 f.write("%s %s\n" % (hex(node), label))
411 411 f.rename()
412 412 except (IOError, OSError):
413 413 pass
414 414
415 415 def _updatebranchcache(self, partial, ctxgen):
416 416 # collect new branch entries
417 417 newbranches = {}
418 418 for c in ctxgen:
419 419 newbranches.setdefault(c.branch(), []).append(c.node())
420 420 # if older branchheads are reachable from new ones, they aren't
421 421 # really branchheads. Note checking parents is insufficient:
422 422 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
423 423 for branch, newnodes in newbranches.iteritems():
424 424 bheads = partial.setdefault(branch, [])
425 425 bheads.extend(newnodes)
426 426 if len(bheads) <= 1:
427 427 continue
428 428 # starting from tip means fewer passes over reachable
429 429 while newnodes:
430 430 latest = newnodes.pop()
431 431 if latest not in bheads:
432 432 continue
433 433 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
434 434 reachable = self.changelog.reachable(latest, minbhrev)
435 435 reachable.remove(latest)
436 436 bheads = [b for b in bheads if b not in reachable]
437 437 partial[branch] = bheads
438 438
439 439 def lookup(self, key):
440 440 if isinstance(key, int):
441 441 return self.changelog.node(key)
442 442 elif key == '.':
443 443 return self.dirstate.parents()[0]
444 444 elif key == 'null':
445 445 return nullid
446 446 elif key == 'tip':
447 447 return self.changelog.tip()
448 448 n = self.changelog._match(key)
449 449 if n:
450 450 return n
451 451 if key in self.tags():
452 452 return self.tags()[key]
453 453 if key in self.branchtags():
454 454 return self.branchtags()[key]
455 455 n = self.changelog._partialmatch(key)
456 456 if n:
457 457 return n
458 458
459 459 # can't find key, check if it might have come from damaged dirstate
460 460 if key in self.dirstate.parents():
461 461 raise error.Abort(_("working directory has unknown parent '%s'!")
462 462 % short(key))
463 463 try:
464 464 if len(key) == 20:
465 465 key = hex(key)
466 466 except:
467 467 pass
468 468 raise error.RepoLookupError(_("unknown revision '%s'") % key)
469 469
470 470 def lookupbranch(self, key, remote=None):
471 471 repo = remote or self
472 472 if key in repo.branchmap():
473 473 return key
474 474
475 475 repo = (remote and remote.local()) and remote or self
476 476 return repo[key].branch()
477 477
478 478 def local(self):
479 479 return True
480 480
481 481 def join(self, f):
482 482 return os.path.join(self.path, f)
483 483
484 484 def wjoin(self, f):
485 485 return os.path.join(self.root, f)
486 486
487 487 def file(self, f):
488 488 if f[0] == '/':
489 489 f = f[1:]
490 490 return filelog.filelog(self.sopener, f)
491 491
492 492 def changectx(self, changeid):
493 493 return self[changeid]
494 494
495 495 def parents(self, changeid=None):
496 496 '''get list of changectxs for parents of changeid'''
497 497 return self[changeid].parents()
498 498
499 499 def filectx(self, path, changeid=None, fileid=None):
500 500 """changeid can be a changeset revision, node, or tag.
501 501 fileid can be a file revision or node."""
502 502 return context.filectx(self, path, changeid, fileid)
503 503
504 504 def getcwd(self):
505 505 return self.dirstate.getcwd()
506 506
507 507 def pathto(self, f, cwd=None):
508 508 return self.dirstate.pathto(f, cwd)
509 509
510 510 def wfile(self, f, mode='r'):
511 511 return self.wopener(f, mode)
512 512
513 513 def _link(self, f):
514 514 return os.path.islink(self.wjoin(f))
515 515
516 516 def _loadfilter(self, filter):
517 517 if filter not in self.filterpats:
518 518 l = []
519 519 for pat, cmd in self.ui.configitems(filter):
520 520 if cmd == '!':
521 521 continue
522 522 mf = matchmod.match(self.root, '', [pat])
523 523 fn = None
524 524 params = cmd
525 525 for name, filterfn in self._datafilters.iteritems():
526 526 if cmd.startswith(name):
527 527 fn = filterfn
528 528 params = cmd[len(name):].lstrip()
529 529 break
530 530 if not fn:
531 531 fn = lambda s, c, **kwargs: util.filter(s, c)
532 532 # Wrap old filters not supporting keyword arguments
533 533 if not inspect.getargspec(fn)[2]:
534 534 oldfn = fn
535 535 fn = lambda s, c, **kwargs: oldfn(s, c)
536 536 l.append((mf, fn, params))
537 537 self.filterpats[filter] = l
538 538
539 539 def _filter(self, filter, filename, data):
540 540 self._loadfilter(filter)
541 541
542 542 for mf, fn, cmd in self.filterpats[filter]:
543 543 if mf(filename):
544 544 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
545 545 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
546 546 break
547 547
548 548 return data
549 549
550 550 def adddatafilter(self, name, filter):
551 551 self._datafilters[name] = filter
552 552
553 553 def wread(self, filename):
554 554 if self._link(filename):
555 555 data = os.readlink(self.wjoin(filename))
556 556 else:
557 557 data = self.wopener(filename, 'r').read()
558 558 return self._filter("encode", filename, data)
559 559
560 560 def wwrite(self, filename, data, flags):
561 561 data = self._filter("decode", filename, data)
562 562 try:
563 563 os.unlink(self.wjoin(filename))
564 564 except OSError:
565 565 pass
566 566 if 'l' in flags:
567 567 self.wopener.symlink(data, filename)
568 568 else:
569 569 self.wopener(filename, 'w').write(data)
570 570 if 'x' in flags:
571 571 util.set_flags(self.wjoin(filename), False, True)
572 572
573 573 def wwritedata(self, filename, data):
574 574 return self._filter("decode", filename, data)
575 575
576 576 def transaction(self, desc):
577 577 tr = self._transref and self._transref() or None
578 578 if tr and tr.running():
579 579 return tr.nest()
580 580
581 581 # abort here if the journal already exists
582 582 if os.path.exists(self.sjoin("journal")):
583 583 raise error.RepoError(
584 584 _("abandoned transaction found - run hg recover"))
585 585
586 586 # save dirstate for rollback
587 587 try:
588 588 ds = self.opener("dirstate").read()
589 589 except IOError:
590 590 ds = ""
591 591 self.opener("journal.dirstate", "w").write(ds)
592 592 self.opener("journal.branch", "w").write(self.dirstate.branch())
593 593 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
594 594
595 595 renames = [(self.sjoin("journal"), self.sjoin("undo")),
596 596 (self.join("journal.dirstate"), self.join("undo.dirstate")),
597 597 (self.join("journal.branch"), self.join("undo.branch")),
598 598 (self.join("journal.desc"), self.join("undo.desc"))]
599 599 tr = transaction.transaction(self.ui.warn, self.sopener,
600 600 self.sjoin("journal"),
601 601 aftertrans(renames),
602 602 self.store.createmode)
603 603 self._transref = weakref.ref(tr)
604 604 return tr
605 605
606 606 def recover(self):
607 607 lock = self.lock()
608 608 try:
609 609 if os.path.exists(self.sjoin("journal")):
610 610 self.ui.status(_("rolling back interrupted transaction\n"))
611 611 transaction.rollback(self.sopener, self.sjoin("journal"),
612 612 self.ui.warn)
613 613 self.invalidate()
614 614 return True
615 615 else:
616 616 self.ui.warn(_("no interrupted transaction available\n"))
617 617 return False
618 618 finally:
619 619 lock.release()
620 620
621 621 def rollback(self, dryrun=False):
622 622 wlock = lock = None
623 623 try:
624 624 wlock = self.wlock()
625 625 lock = self.lock()
626 626 if os.path.exists(self.sjoin("undo")):
627 627 try:
628 628 args = self.opener("undo.desc", "r").read().splitlines()
629 629 if len(args) >= 3 and self.ui.verbose:
630 630 desc = _("rolling back to revision %s"
631 631 " (undo %s: %s)\n") % (
632 632 int(args[0]) - 1, args[1], args[2])
633 633 elif len(args) >= 2:
634 634 desc = _("rolling back to revision %s (undo %s)\n") % (
635 635 int(args[0]) - 1, args[1])
636 636 except IOError:
637 637 desc = _("rolling back unknown transaction\n")
638 638 self.ui.status(desc)
639 639 if dryrun:
640 640 return
641 641 transaction.rollback(self.sopener, self.sjoin("undo"),
642 642 self.ui.warn)
643 643 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
644 644 try:
645 645 branch = self.opener("undo.branch").read()
646 646 self.dirstate.setbranch(branch)
647 647 except IOError:
648 648 self.ui.warn(_("Named branch could not be reset, "
649 649 "current branch still is: %s\n")
650 650 % encoding.tolocal(self.dirstate.branch()))
651 651 self.invalidate()
652 652 self.dirstate.invalidate()
653 653 self.destroyed()
654 654 else:
655 655 self.ui.warn(_("no rollback information available\n"))
656 656 return 1
657 657 finally:
658 658 release(lock, wlock)
659 659
660 660 def invalidatecaches(self):
661 661 self._tags = None
662 662 self._tagtypes = None
663 663 self.nodetagscache = None
664 664 self._branchcache = None # in UTF-8
665 665 self._branchcachetip = None
666 666
667 667 def invalidate(self):
668 668 for a in "changelog manifest".split():
669 669 if a in self.__dict__:
670 670 delattr(self, a)
671 671 self.invalidatecaches()
672 672
673 673 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
674 674 try:
675 675 l = lock.lock(lockname, 0, releasefn, desc=desc)
676 676 except error.LockHeld, inst:
677 677 if not wait:
678 678 raise
679 679 self.ui.warn(_("waiting for lock on %s held by %r\n") %
680 680 (desc, inst.locker))
681 681 # default to 600 seconds timeout
682 682 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
683 683 releasefn, desc=desc)
684 684 if acquirefn:
685 685 acquirefn()
686 686 return l
687 687
688 688 def lock(self, wait=True):
689 689 '''Lock the repository store (.hg/store) and return a weak reference
690 690 to the lock. Use this before modifying the store (e.g. committing or
691 691 stripping). If you are opening a transaction, get a lock as well.)'''
692 692 l = self._lockref and self._lockref()
693 693 if l is not None and l.held:
694 694 l.lock()
695 695 return l
696 696
697 697 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
698 698 _('repository %s') % self.origroot)
699 699 self._lockref = weakref.ref(l)
700 700 return l
701 701
702 702 def wlock(self, wait=True):
703 703 '''Lock the non-store parts of the repository (everything under
704 704 .hg except .hg/store) and return a weak reference to the lock.
705 705 Use this before modifying files in .hg.'''
706 706 l = self._wlockref and self._wlockref()
707 707 if l is not None and l.held:
708 708 l.lock()
709 709 return l
710 710
711 711 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
712 712 self.dirstate.invalidate, _('working directory of %s') %
713 713 self.origroot)
714 714 self._wlockref = weakref.ref(l)
715 715 return l
716 716
717 717 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
718 718 """
719 719 commit an individual file as part of a larger transaction
720 720 """
721 721
722 722 fname = fctx.path()
723 723 text = fctx.data()
724 724 flog = self.file(fname)
725 725 fparent1 = manifest1.get(fname, nullid)
726 726 fparent2 = fparent2o = manifest2.get(fname, nullid)
727 727
728 728 meta = {}
729 729 copy = fctx.renamed()
730 730 if copy and copy[0] != fname:
731 731 # Mark the new revision of this file as a copy of another
732 732 # file. This copy data will effectively act as a parent
733 733 # of this new revision. If this is a merge, the first
734 734 # parent will be the nullid (meaning "look up the copy data")
735 735 # and the second one will be the other parent. For example:
736 736 #
737 737 # 0 --- 1 --- 3 rev1 changes file foo
738 738 # \ / rev2 renames foo to bar and changes it
739 739 # \- 2 -/ rev3 should have bar with all changes and
740 740 # should record that bar descends from
741 741 # bar in rev2 and foo in rev1
742 742 #
743 743 # this allows this merge to succeed:
744 744 #
745 745 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
746 746 # \ / merging rev3 and rev4 should use bar@rev2
747 747 # \- 2 --- 4 as the merge base
748 748 #
749 749
750 750 cfname = copy[0]
751 751 crev = manifest1.get(cfname)
752 752 newfparent = fparent2
753 753
754 754 if manifest2: # branch merge
755 755 if fparent2 == nullid or crev is None: # copied on remote side
756 756 if cfname in manifest2:
757 757 crev = manifest2[cfname]
758 758 newfparent = fparent1
759 759
760 760 # find source in nearest ancestor if we've lost track
761 761 if not crev:
762 762 self.ui.debug(" %s: searching for copy revision for %s\n" %
763 763 (fname, cfname))
764 764 for ancestor in self['.'].ancestors():
765 765 if cfname in ancestor:
766 766 crev = ancestor[cfname].filenode()
767 767 break
768 768
769 769 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
770 770 meta["copy"] = cfname
771 771 meta["copyrev"] = hex(crev)
772 772 fparent1, fparent2 = nullid, newfparent
773 773 elif fparent2 != nullid:
774 774 # is one parent an ancestor of the other?
775 775 fparentancestor = flog.ancestor(fparent1, fparent2)
776 776 if fparentancestor == fparent1:
777 777 fparent1, fparent2 = fparent2, nullid
778 778 elif fparentancestor == fparent2:
779 779 fparent2 = nullid
780 780
781 781 # is the file changed?
782 782 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
783 783 changelist.append(fname)
784 784 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
785 785
786 786 # are just the flags changed during merge?
787 787 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
788 788 changelist.append(fname)
789 789
790 790 return fparent1
791 791
792 792 def commit(self, text="", user=None, date=None, match=None, force=False,
793 793 editor=False, extra={}):
794 794 """Add a new revision to current repository.
795 795
796 796 Revision information is gathered from the working directory,
797 797 match can be used to filter the committed files. If editor is
798 798 supplied, it is called to get a commit message.
799 799 """
800 800
801 801 def fail(f, msg):
802 802 raise util.Abort('%s: %s' % (f, msg))
803 803
804 804 if not match:
805 805 match = matchmod.always(self.root, '')
806 806
807 807 if not force:
808 808 vdirs = []
809 809 match.dir = vdirs.append
810 810 match.bad = fail
811 811
812 812 wlock = self.wlock()
813 813 try:
814 814 wctx = self[None]
815 815 merge = len(wctx.parents()) > 1
816 816
817 817 if (not force and merge and match and
818 818 (match.files() or match.anypats())):
819 819 raise util.Abort(_('cannot partially commit a merge '
820 820 '(do not specify files or patterns)'))
821 821
822 822 changes = self.status(match=match, clean=force)
823 823 if force:
824 824 changes[0].extend(changes[6]) # mq may commit unchanged files
825 825
826 826 # check subrepos
827 827 subs = []
828 828 removedsubs = set()
829 829 for p in wctx.parents():
830 830 removedsubs.update(s for s in p.substate if match(s))
831 831 for s in wctx.substate:
832 832 removedsubs.discard(s)
833 833 if match(s) and wctx.sub(s).dirty():
834 834 subs.append(s)
835 835 if (subs or removedsubs):
836 836 if (not match('.hgsub') and
837 837 '.hgsub' in (wctx.modified() + wctx.added())):
838 838 raise util.Abort(_("can't commit subrepos without .hgsub"))
839 839 if '.hgsubstate' not in changes[0]:
840 840 changes[0].insert(0, '.hgsubstate')
841 841
842 842 # make sure all explicit patterns are matched
843 843 if not force and match.files():
844 844 matched = set(changes[0] + changes[1] + changes[2])
845 845
846 846 for f in match.files():
847 847 if f == '.' or f in matched or f in wctx.substate:
848 848 continue
849 849 if f in changes[3]: # missing
850 850 fail(f, _('file not found!'))
851 851 if f in vdirs: # visited directory
852 852 d = f + '/'
853 853 for mf in matched:
854 854 if mf.startswith(d):
855 855 break
856 856 else:
857 857 fail(f, _("no match under directory!"))
858 858 elif f not in self.dirstate:
859 859 fail(f, _("file not tracked!"))
860 860
861 861 if (not force and not extra.get("close") and not merge
862 862 and not (changes[0] or changes[1] or changes[2])
863 863 and wctx.branch() == wctx.p1().branch()):
864 864 return None
865 865
866 866 ms = mergemod.mergestate(self)
867 867 for f in changes[0]:
868 868 if f in ms and ms[f] == 'u':
869 869 raise util.Abort(_("unresolved merge conflicts "
870 870 "(see hg resolve)"))
871 871
872 872 cctx = context.workingctx(self, text, user, date, extra, changes)
873 873 if editor:
874 874 cctx._text = editor(self, cctx, subs)
875 875 edited = (text != cctx._text)
876 876
877 877 # commit subs
878 878 if subs or removedsubs:
879 879 state = wctx.substate.copy()
880 880 for s in subs:
881 881 sub = wctx.sub(s)
882 882 self.ui.status(_('committing subrepository %s\n') %
883 883 subrepo.relpath(sub))
884 884 sr = sub.commit(cctx._text, user, date)
885 885 state[s] = (state[s][0], sr)
886 886 subrepo.writestate(self, state)
887 887
888 888 # Save commit message in case this transaction gets rolled back
889 889 # (e.g. by a pretxncommit hook). Leave the content alone on
890 890 # the assumption that the user will use the same editor again.
891 891 msgfile = self.opener('last-message.txt', 'wb')
892 892 msgfile.write(cctx._text)
893 893 msgfile.close()
894 894
895 895 p1, p2 = self.dirstate.parents()
896 896 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
897 897 try:
898 898 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
899 899 ret = self.commitctx(cctx, True)
900 900 except:
901 901 if edited:
902 902 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
903 903 self.ui.write(
904 904 _('note: commit message saved in %s\n') % msgfn)
905 905 raise
906 906
907 907 # update dirstate and mergestate
908 908 for f in changes[0] + changes[1]:
909 909 self.dirstate.normal(f)
910 910 for f in changes[2]:
911 911 self.dirstate.forget(f)
912 912 self.dirstate.setparents(ret)
913 913 ms.reset()
914 914 finally:
915 915 wlock.release()
916 916
917 917 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
918 918 return ret
919 919
920 920 def commitctx(self, ctx, error=False):
921 921 """Add a new revision to current repository.
922 922 Revision information is passed via the context argument.
923 923 """
924 924
925 925 tr = lock = None
926 926 removed = ctx.removed()
927 927 p1, p2 = ctx.p1(), ctx.p2()
928 928 m1 = p1.manifest().copy()
929 929 m2 = p2.manifest()
930 930 user = ctx.user()
931 931
932 932 lock = self.lock()
933 933 try:
934 934 tr = self.transaction("commit")
935 935 trp = weakref.proxy(tr)
936 936
937 937 # check in files
938 938 new = {}
939 939 changed = []
940 940 linkrev = len(self)
941 941 for f in sorted(ctx.modified() + ctx.added()):
942 942 self.ui.note(f + "\n")
943 943 try:
944 944 fctx = ctx[f]
945 945 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
946 946 changed)
947 947 m1.set(f, fctx.flags())
948 948 except OSError, inst:
949 949 self.ui.warn(_("trouble committing %s!\n") % f)
950 950 raise
951 951 except IOError, inst:
952 952 errcode = getattr(inst, 'errno', errno.ENOENT)
953 953 if error or errcode and errcode != errno.ENOENT:
954 954 self.ui.warn(_("trouble committing %s!\n") % f)
955 955 raise
956 956 else:
957 957 removed.append(f)
958 958
959 959 # update manifest
960 960 m1.update(new)
961 961 removed = [f for f in sorted(removed) if f in m1 or f in m2]
962 962 drop = [f for f in removed if f in m1]
963 963 for f in drop:
964 964 del m1[f]
965 965 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
966 966 p2.manifestnode(), (new, drop))
967 967
968 968 # update changelog
969 969 self.changelog.delayupdate()
970 970 n = self.changelog.add(mn, changed + removed, ctx.description(),
971 971 trp, p1.node(), p2.node(),
972 972 user, ctx.date(), ctx.extra().copy())
973 973 p = lambda: self.changelog.writepending() and self.root or ""
974 974 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
975 975 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
976 976 parent2=xp2, pending=p)
977 977 self.changelog.finalize(trp)
978 978 tr.close()
979 979
980 980 if self._branchcache:
981 981 self.updatebranchcache()
982 982 return n
983 983 finally:
984 984 if tr:
985 985 tr.release()
986 986 lock.release()
987 987
988 988 def destroyed(self):
989 989 '''Inform the repository that nodes have been destroyed.
990 990 Intended for use by strip and rollback, so there's a common
991 991 place for anything that has to be done after destroying history.'''
992 992 # XXX it might be nice if we could take the list of destroyed
993 993 # nodes, but I don't see an easy way for rollback() to do that
994 994
995 995 # Ensure the persistent tag cache is updated. Doing it now
996 996 # means that the tag cache only has to worry about destroyed
997 997 # heads immediately after a strip/rollback. That in turn
998 998 # guarantees that "cachetip == currenttip" (comparing both rev
999 999 # and node) always means no nodes have been added or destroyed.
1000 1000
1001 1001 # XXX this is suboptimal when qrefresh'ing: we strip the current
1002 1002 # head, refresh the tag cache, then immediately add a new head.
1003 1003 # But I think doing it this way is necessary for the "instant
1004 1004 # tag cache retrieval" case to work.
1005 1005 self.invalidatecaches()
1006 1006
1007 1007 def walk(self, match, node=None):
1008 1008 '''
1009 1009 walk recursively through the directory tree or a given
1010 1010 changeset, finding all files matched by the match
1011 1011 function
1012 1012 '''
1013 1013 return self[node].walk(match)
1014 1014
1015 1015 def status(self, node1='.', node2=None, match=None,
1016 1016 ignored=False, clean=False, unknown=False):
1017 1017 """return status of files between two nodes or node and working directory
1018 1018
1019 1019 If node1 is None, use the first dirstate parent instead.
1020 1020 If node2 is None, compare node1 with working directory.
1021 1021 """
1022 1022
1023 1023 def mfmatches(ctx):
1024 1024 mf = ctx.manifest().copy()
1025 1025 for fn in mf.keys():
1026 1026 if not match(fn):
1027 1027 del mf[fn]
1028 1028 return mf
1029 1029
1030 1030 if isinstance(node1, context.changectx):
1031 1031 ctx1 = node1
1032 1032 else:
1033 1033 ctx1 = self[node1]
1034 1034 if isinstance(node2, context.changectx):
1035 1035 ctx2 = node2
1036 1036 else:
1037 1037 ctx2 = self[node2]
1038 1038
1039 1039 working = ctx2.rev() is None
1040 1040 parentworking = working and ctx1 == self['.']
1041 1041 match = match or matchmod.always(self.root, self.getcwd())
1042 1042 listignored, listclean, listunknown = ignored, clean, unknown
1043 1043
1044 1044 # load earliest manifest first for caching reasons
1045 1045 if not working and ctx2.rev() < ctx1.rev():
1046 1046 ctx2.manifest()
1047 1047
1048 1048 if not parentworking:
1049 1049 def bad(f, msg):
1050 1050 if f not in ctx1:
1051 1051 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1052 1052 match.bad = bad
1053 1053
1054 1054 if working: # we need to scan the working dir
1055 1055 subrepos = []
1056 1056 if '.hgsub' in self.dirstate:
1057 1057 subrepos = ctx1.substate.keys()
1058 1058 s = self.dirstate.status(match, subrepos, listignored,
1059 1059 listclean, listunknown)
1060 1060 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1061 1061
1062 1062 # check for any possibly clean files
1063 1063 if parentworking and cmp:
1064 1064 fixup = []
1065 1065 # do a full compare of any files that might have changed
1066 1066 for f in sorted(cmp):
1067 1067 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1068 1068 or ctx1[f].cmp(ctx2[f])):
1069 1069 modified.append(f)
1070 1070 else:
1071 1071 fixup.append(f)
1072 1072
1073 1073 # update dirstate for files that are actually clean
1074 1074 if fixup:
1075 1075 if listclean:
1076 1076 clean += fixup
1077 1077
1078 1078 try:
1079 1079 # updating the dirstate is optional
1080 1080 # so we don't wait on the lock
1081 1081 wlock = self.wlock(False)
1082 1082 try:
1083 1083 for f in fixup:
1084 1084 self.dirstate.normal(f)
1085 1085 finally:
1086 1086 wlock.release()
1087 1087 except error.LockError:
1088 1088 pass
1089 1089
1090 1090 if not parentworking:
1091 1091 mf1 = mfmatches(ctx1)
1092 1092 if working:
1093 1093 # we are comparing working dir against non-parent
1094 1094 # generate a pseudo-manifest for the working dir
1095 1095 mf2 = mfmatches(self['.'])
1096 1096 for f in cmp + modified + added:
1097 1097 mf2[f] = None
1098 1098 mf2.set(f, ctx2.flags(f))
1099 1099 for f in removed:
1100 1100 if f in mf2:
1101 1101 del mf2[f]
1102 1102 else:
1103 1103 # we are comparing two revisions
1104 1104 deleted, unknown, ignored = [], [], []
1105 1105 mf2 = mfmatches(ctx2)
1106 1106
1107 1107 modified, added, clean = [], [], []
1108 1108 for fn in mf2:
1109 1109 if fn in mf1:
1110 1110 if (mf1.flags(fn) != mf2.flags(fn) or
1111 1111 (mf1[fn] != mf2[fn] and
1112 1112 (mf2[fn] or ctx1[fn].cmp(ctx2[fn])))):
1113 1113 modified.append(fn)
1114 1114 elif listclean:
1115 1115 clean.append(fn)
1116 1116 del mf1[fn]
1117 1117 else:
1118 1118 added.append(fn)
1119 1119 removed = mf1.keys()
1120 1120
1121 1121 r = modified, added, removed, deleted, unknown, ignored, clean
1122 1122 [l.sort() for l in r]
1123 1123 return r
1124 1124
1125 1125 def heads(self, start=None):
1126 1126 heads = self.changelog.heads(start)
1127 1127 # sort the output in rev descending order
1128 1128 heads = [(-self.changelog.rev(h), h) for h in heads]
1129 1129 return [n for (r, n) in sorted(heads)]
1130 1130
1131 1131 def branchheads(self, branch=None, start=None, closed=False):
1132 1132 '''return a (possibly filtered) list of heads for the given branch
1133 1133
1134 1134 Heads are returned in topological order, from newest to oldest.
1135 1135 If branch is None, use the dirstate branch.
1136 1136 If start is not None, return only heads reachable from start.
1137 1137 If closed is True, return heads that are marked as closed as well.
1138 1138 '''
1139 1139 if branch is None:
1140 1140 branch = self[None].branch()
1141 1141 branches = self.branchmap()
1142 1142 if branch not in branches:
1143 1143 return []
1144 1144 # the cache returns heads ordered lowest to highest
1145 1145 bheads = list(reversed(branches[branch]))
1146 1146 if start is not None:
1147 1147 # filter out the heads that cannot be reached from startrev
1148 1148 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1149 1149 bheads = [h for h in bheads if h in fbheads]
1150 1150 if not closed:
1151 1151 bheads = [h for h in bheads if
1152 1152 ('close' not in self.changelog.read(h)[5])]
1153 1153 return bheads
1154 1154
1155 1155 def branches(self, nodes):
1156 1156 if not nodes:
1157 1157 nodes = [self.changelog.tip()]
1158 1158 b = []
1159 1159 for n in nodes:
1160 1160 t = n
1161 1161 while 1:
1162 1162 p = self.changelog.parents(n)
1163 1163 if p[1] != nullid or p[0] == nullid:
1164 1164 b.append((t, n, p[0], p[1]))
1165 1165 break
1166 1166 n = p[0]
1167 1167 return b
1168 1168
1169 1169 def between(self, pairs):
1170 1170 r = []
1171 1171
1172 1172 for top, bottom in pairs:
1173 1173 n, l, i = top, [], 0
1174 1174 f = 1
1175 1175
1176 1176 while n != bottom and n != nullid:
1177 1177 p = self.changelog.parents(n)[0]
1178 1178 if i == f:
1179 1179 l.append(n)
1180 1180 f = f * 2
1181 1181 n = p
1182 1182 i += 1
1183 1183
1184 1184 r.append(l)
1185 1185
1186 1186 return r
1187 1187
1188 1188 def pull(self, remote, heads=None, force=False):
1189 1189 lock = self.lock()
1190 1190 try:
1191 1191 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1192 1192 force=force)
1193 1193 common, fetch, rheads = tmp
1194 1194 if not fetch:
1195 1195 self.ui.status(_("no changes found\n"))
1196 1196 return 0
1197 1197
1198 1198 if fetch == [nullid]:
1199 1199 self.ui.status(_("requesting all changes\n"))
1200 1200 elif heads is None and remote.capable('changegroupsubset'):
1201 1201 # issue1320, avoid a race if remote changed after discovery
1202 1202 heads = rheads
1203 1203
1204 1204 if heads is None:
1205 1205 cg = remote.changegroup(fetch, 'pull')
1206 1206 else:
1207 1207 if not remote.capable('changegroupsubset'):
1208 raise util.Abort(_("Partial pull cannot be done because "
1208 raise util.Abort(_("partial pull cannot be done because "
1209 1209 "other repository doesn't support "
1210 1210 "changegroupsubset."))
1211 1211 cg = remote.changegroupsubset(fetch, heads, 'pull')
1212 1212 return self.addchangegroup(cg, 'pull', remote.url(), lock=lock)
1213 1213 finally:
1214 1214 lock.release()
1215 1215
1216 1216 def push(self, remote, force=False, revs=None, newbranch=False):
1217 1217 '''Push outgoing changesets (limited by revs) from the current
1218 1218 repository to remote. Return an integer:
1219 1219 - 0 means HTTP error *or* nothing to push
1220 1220 - 1 means we pushed and remote head count is unchanged *or*
1221 1221 we have outgoing changesets but refused to push
1222 1222 - other values as described by addchangegroup()
1223 1223 '''
1224 1224 # there are two ways to push to remote repo:
1225 1225 #
1226 1226 # addchangegroup assumes local user can lock remote
1227 1227 # repo (local filesystem, old ssh servers).
1228 1228 #
1229 1229 # unbundle assumes local user cannot lock remote repo (new ssh
1230 1230 # servers, http servers).
1231 1231
1232 1232 lock = None
1233 1233 unbundle = remote.capable('unbundle')
1234 1234 if not unbundle:
1235 1235 lock = remote.lock()
1236 1236 try:
1237 1237 ret = discovery.prepush(self, remote, force, revs, newbranch)
1238 1238 if ret[0] is None:
1239 1239 # and here we return 0 for "nothing to push" or 1 for
1240 1240 # "something to push but I refuse"
1241 1241 return ret[1]
1242 1242
1243 1243 cg, remote_heads = ret
1244 1244 if unbundle:
1245 1245 # local repo finds heads on server, finds out what revs it must
1246 1246 # push. once revs transferred, if server finds it has
1247 1247 # different heads (someone else won commit/push race), server
1248 1248 # aborts.
1249 1249 if force:
1250 1250 remote_heads = ['force']
1251 1251 # ssh: return remote's addchangegroup()
1252 1252 # http: return remote's addchangegroup() or 0 for error
1253 1253 return remote.unbundle(cg, remote_heads, 'push')
1254 1254 else:
1255 1255 # we return an integer indicating remote head count change
1256 1256 return remote.addchangegroup(cg, 'push', self.url(), lock=lock)
1257 1257 finally:
1258 1258 if lock is not None:
1259 1259 lock.release()
1260 1260
1261 1261 def changegroupinfo(self, nodes, source):
1262 1262 if self.ui.verbose or source == 'bundle':
1263 1263 self.ui.status(_("%d changesets found\n") % len(nodes))
1264 1264 if self.ui.debugflag:
1265 1265 self.ui.debug("list of changesets:\n")
1266 1266 for node in nodes:
1267 1267 self.ui.debug("%s\n" % hex(node))
1268 1268
1269 1269 def changegroupsubset(self, bases, heads, source, extranodes=None):
1270 1270 """Compute a changegroup consisting of all the nodes that are
1271 1271 descendents of any of the bases and ancestors of any of the heads.
1272 1272 Return a chunkbuffer object whose read() method will return
1273 1273 successive changegroup chunks.
1274 1274
1275 1275 It is fairly complex as determining which filenodes and which
1276 1276 manifest nodes need to be included for the changeset to be complete
1277 1277 is non-trivial.
1278 1278
1279 1279 Another wrinkle is doing the reverse, figuring out which changeset in
1280 1280 the changegroup a particular filenode or manifestnode belongs to.
1281 1281
1282 1282 The caller can specify some nodes that must be included in the
1283 1283 changegroup using the extranodes argument. It should be a dict
1284 1284 where the keys are the filenames (or 1 for the manifest), and the
1285 1285 values are lists of (node, linknode) tuples, where node is a wanted
1286 1286 node and linknode is the changelog node that should be transmitted as
1287 1287 the linkrev.
1288 1288 """
1289 1289
1290 1290 # Set up some initial variables
1291 1291 # Make it easy to refer to self.changelog
1292 1292 cl = self.changelog
1293 1293 # Compute the list of changesets in this changegroup.
1294 1294 # Some bases may turn out to be superfluous, and some heads may be
1295 1295 # too. nodesbetween will return the minimal set of bases and heads
1296 1296 # necessary to re-create the changegroup.
1297 1297 if not bases:
1298 1298 bases = [nullid]
1299 1299 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1300 1300
1301 1301 if extranodes is None:
1302 1302 # can we go through the fast path ?
1303 1303 heads.sort()
1304 1304 allheads = self.heads()
1305 1305 allheads.sort()
1306 1306 if heads == allheads:
1307 1307 return self._changegroup(msng_cl_lst, source)
1308 1308
1309 1309 # slow path
1310 1310 self.hook('preoutgoing', throw=True, source=source)
1311 1311
1312 1312 self.changegroupinfo(msng_cl_lst, source)
1313 1313
1314 1314 # We assume that all ancestors of bases are known
1315 1315 commonrevs = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1316 1316
1317 1317 # Make it easy to refer to self.manifest
1318 1318 mnfst = self.manifest
1319 1319 # We don't know which manifests are missing yet
1320 1320 msng_mnfst_set = {}
1321 1321 # Nor do we know which filenodes are missing.
1322 1322 msng_filenode_set = {}
1323 1323
1324 1324 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1325 1325 junk = None
1326 1326
1327 1327 # A changeset always belongs to itself, so the changenode lookup
1328 1328 # function for a changenode is identity.
1329 1329 def identity(x):
1330 1330 return x
1331 1331
1332 1332 # A function generating function that sets up the initial environment
1333 1333 # the inner function.
1334 1334 def filenode_collector(changedfiles):
1335 1335 # This gathers information from each manifestnode included in the
1336 1336 # changegroup about which filenodes the manifest node references
1337 1337 # so we can include those in the changegroup too.
1338 1338 #
1339 1339 # It also remembers which changenode each filenode belongs to. It
1340 1340 # does this by assuming the a filenode belongs to the changenode
1341 1341 # the first manifest that references it belongs to.
1342 1342 def collect_msng_filenodes(mnfstnode):
1343 1343 r = mnfst.rev(mnfstnode)
1344 1344 if r - 1 in mnfst.parentrevs(r):
1345 1345 # If the previous rev is one of the parents,
1346 1346 # we only need to see a diff.
1347 1347 deltamf = mnfst.readdelta(mnfstnode)
1348 1348 # For each line in the delta
1349 1349 for f, fnode in deltamf.iteritems():
1350 1350 # And if the file is in the list of files we care
1351 1351 # about.
1352 1352 if f in changedfiles:
1353 1353 # Get the changenode this manifest belongs to
1354 1354 clnode = msng_mnfst_set[mnfstnode]
1355 1355 # Create the set of filenodes for the file if
1356 1356 # there isn't one already.
1357 1357 ndset = msng_filenode_set.setdefault(f, {})
1358 1358 # And set the filenode's changelog node to the
1359 1359 # manifest's if it hasn't been set already.
1360 1360 ndset.setdefault(fnode, clnode)
1361 1361 else:
1362 1362 # Otherwise we need a full manifest.
1363 1363 m = mnfst.read(mnfstnode)
1364 1364 # For every file in we care about.
1365 1365 for f in changedfiles:
1366 1366 fnode = m.get(f, None)
1367 1367 # If it's in the manifest
1368 1368 if fnode is not None:
1369 1369 # See comments above.
1370 1370 clnode = msng_mnfst_set[mnfstnode]
1371 1371 ndset = msng_filenode_set.setdefault(f, {})
1372 1372 ndset.setdefault(fnode, clnode)
1373 1373 return collect_msng_filenodes
1374 1374
1375 1375 # If we determine that a particular file or manifest node must be a
1376 1376 # node that the recipient of the changegroup will already have, we can
1377 1377 # also assume the recipient will have all the parents. This function
1378 1378 # prunes them from the set of missing nodes.
1379 1379 def prune(revlog, missingnodes):
1380 1380 hasset = set()
1381 1381 # If a 'missing' filenode thinks it belongs to a changenode we
1382 1382 # assume the recipient must have, then the recipient must have
1383 1383 # that filenode.
1384 1384 for n in missingnodes:
1385 1385 clrev = revlog.linkrev(revlog.rev(n))
1386 1386 if clrev in commonrevs:
1387 1387 hasset.add(n)
1388 1388 for n in hasset:
1389 1389 missingnodes.pop(n, None)
1390 1390 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1391 1391 missingnodes.pop(revlog.node(r), None)
1392 1392
1393 1393 # Add the nodes that were explicitly requested.
1394 1394 def add_extra_nodes(name, nodes):
1395 1395 if not extranodes or name not in extranodes:
1396 1396 return
1397 1397
1398 1398 for node, linknode in extranodes[name]:
1399 1399 if node not in nodes:
1400 1400 nodes[node] = linknode
1401 1401
1402 1402 # Now that we have all theses utility functions to help out and
1403 1403 # logically divide up the task, generate the group.
1404 1404 def gengroup():
1405 1405 # The set of changed files starts empty.
1406 1406 changedfiles = set()
1407 1407 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1408 1408
1409 1409 # Create a changenode group generator that will call our functions
1410 1410 # back to lookup the owning changenode and collect information.
1411 1411 group = cl.group(msng_cl_lst, identity, collect)
1412 1412 for cnt, chnk in enumerate(group):
1413 1413 yield chnk
1414 1414 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1415 1415 self.ui.progress(_('bundling changes'), None)
1416 1416
1417 1417 prune(mnfst, msng_mnfst_set)
1418 1418 add_extra_nodes(1, msng_mnfst_set)
1419 1419 msng_mnfst_lst = msng_mnfst_set.keys()
1420 1420 # Sort the manifestnodes by revision number.
1421 1421 msng_mnfst_lst.sort(key=mnfst.rev)
1422 1422 # Create a generator for the manifestnodes that calls our lookup
1423 1423 # and data collection functions back.
1424 1424 group = mnfst.group(msng_mnfst_lst,
1425 1425 lambda mnode: msng_mnfst_set[mnode],
1426 1426 filenode_collector(changedfiles))
1427 1427 for cnt, chnk in enumerate(group):
1428 1428 yield chnk
1429 1429 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1430 1430 self.ui.progress(_('bundling manifests'), None)
1431 1431
1432 1432 # These are no longer needed, dereference and toss the memory for
1433 1433 # them.
1434 1434 msng_mnfst_lst = None
1435 1435 msng_mnfst_set.clear()
1436 1436
1437 1437 if extranodes:
1438 1438 for fname in extranodes:
1439 1439 if isinstance(fname, int):
1440 1440 continue
1441 1441 msng_filenode_set.setdefault(fname, {})
1442 1442 changedfiles.add(fname)
1443 1443 # Go through all our files in order sorted by name.
1444 1444 cnt = 0
1445 1445 for fname in sorted(changedfiles):
1446 1446 filerevlog = self.file(fname)
1447 1447 if not len(filerevlog):
1448 1448 raise util.Abort(_("empty or missing revlog for %s") % fname)
1449 1449 # Toss out the filenodes that the recipient isn't really
1450 1450 # missing.
1451 1451 missingfnodes = msng_filenode_set.pop(fname, {})
1452 1452 prune(filerevlog, missingfnodes)
1453 1453 add_extra_nodes(fname, missingfnodes)
1454 1454 # If any filenodes are left, generate the group for them,
1455 1455 # otherwise don't bother.
1456 1456 if missingfnodes:
1457 1457 yield changegroup.chunkheader(len(fname))
1458 1458 yield fname
1459 1459 # Sort the filenodes by their revision # (topological order)
1460 1460 nodeiter = list(missingfnodes)
1461 1461 nodeiter.sort(key=filerevlog.rev)
1462 1462 # Create a group generator and only pass in a changenode
1463 1463 # lookup function as we need to collect no information
1464 1464 # from filenodes.
1465 1465 group = filerevlog.group(nodeiter,
1466 1466 lambda fnode: missingfnodes[fnode])
1467 1467 for chnk in group:
1468 1468 self.ui.progress(
1469 1469 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1470 1470 cnt += 1
1471 1471 yield chnk
1472 1472 # Signal that no more groups are left.
1473 1473 yield changegroup.closechunk()
1474 1474 self.ui.progress(_('bundling files'), None)
1475 1475
1476 1476 if msng_cl_lst:
1477 1477 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1478 1478
1479 1479 return util.chunkbuffer(gengroup())
1480 1480
1481 1481 def changegroup(self, basenodes, source):
1482 1482 # to avoid a race we use changegroupsubset() (issue1320)
1483 1483 return self.changegroupsubset(basenodes, self.heads(), source)
1484 1484
1485 1485 def _changegroup(self, nodes, source):
1486 1486 """Compute the changegroup of all nodes that we have that a recipient
1487 1487 doesn't. Return a chunkbuffer object whose read() method will return
1488 1488 successive changegroup chunks.
1489 1489
1490 1490 This is much easier than the previous function as we can assume that
1491 1491 the recipient has any changenode we aren't sending them.
1492 1492
1493 1493 nodes is the set of nodes to send"""
1494 1494
1495 1495 self.hook('preoutgoing', throw=True, source=source)
1496 1496
1497 1497 cl = self.changelog
1498 1498 revset = set([cl.rev(n) for n in nodes])
1499 1499 self.changegroupinfo(nodes, source)
1500 1500
1501 1501 def identity(x):
1502 1502 return x
1503 1503
1504 1504 def gennodelst(log):
1505 1505 for r in log:
1506 1506 if log.linkrev(r) in revset:
1507 1507 yield log.node(r)
1508 1508
1509 1509 def lookuplinkrev_func(revlog):
1510 1510 def lookuplinkrev(n):
1511 1511 return cl.node(revlog.linkrev(revlog.rev(n)))
1512 1512 return lookuplinkrev
1513 1513
1514 1514 def gengroup():
1515 1515 '''yield a sequence of changegroup chunks (strings)'''
1516 1516 # construct a list of all changed files
1517 1517 changedfiles = set()
1518 1518 mmfs = {}
1519 1519 collect = changegroup.collector(cl, mmfs, changedfiles)
1520 1520
1521 1521 for cnt, chnk in enumerate(cl.group(nodes, identity, collect)):
1522 1522 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1523 1523 yield chnk
1524 1524 self.ui.progress(_('bundling changes'), None)
1525 1525
1526 1526 mnfst = self.manifest
1527 1527 nodeiter = gennodelst(mnfst)
1528 1528 for cnt, chnk in enumerate(mnfst.group(nodeiter,
1529 1529 lookuplinkrev_func(mnfst))):
1530 1530 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1531 1531 yield chnk
1532 1532 self.ui.progress(_('bundling manifests'), None)
1533 1533
1534 1534 cnt = 0
1535 1535 for fname in sorted(changedfiles):
1536 1536 filerevlog = self.file(fname)
1537 1537 if not len(filerevlog):
1538 1538 raise util.Abort(_("empty or missing revlog for %s") % fname)
1539 1539 nodeiter = gennodelst(filerevlog)
1540 1540 nodeiter = list(nodeiter)
1541 1541 if nodeiter:
1542 1542 yield changegroup.chunkheader(len(fname))
1543 1543 yield fname
1544 1544 lookup = lookuplinkrev_func(filerevlog)
1545 1545 for chnk in filerevlog.group(nodeiter, lookup):
1546 1546 self.ui.progress(
1547 1547 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1548 1548 cnt += 1
1549 1549 yield chnk
1550 1550 self.ui.progress(_('bundling files'), None)
1551 1551
1552 1552 yield changegroup.closechunk()
1553 1553
1554 1554 if nodes:
1555 1555 self.hook('outgoing', node=hex(nodes[0]), source=source)
1556 1556
1557 1557 return util.chunkbuffer(gengroup())
1558 1558
1559 1559 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1560 1560 """Add the changegroup returned by source.read() to this repo.
1561 1561 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1562 1562 the URL of the repo where this changegroup is coming from.
1563 1563
1564 1564 Return an integer summarizing the change to this repo:
1565 1565 - nothing changed or no source: 0
1566 1566 - more heads than before: 1+added heads (2..n)
1567 1567 - fewer heads than before: -1-removed heads (-2..-n)
1568 1568 - number of heads stays the same: 1
1569 1569 """
1570 1570 def csmap(x):
1571 1571 self.ui.debug("add changeset %s\n" % short(x))
1572 1572 return len(cl)
1573 1573
1574 1574 def revmap(x):
1575 1575 return cl.rev(x)
1576 1576
1577 1577 if not source:
1578 1578 return 0
1579 1579
1580 1580 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1581 1581
1582 1582 changesets = files = revisions = 0
1583 1583 efiles = set()
1584 1584
1585 1585 # write changelog data to temp files so concurrent readers will not see
1586 1586 # inconsistent view
1587 1587 cl = self.changelog
1588 1588 cl.delayupdate()
1589 1589 oldheads = len(cl.heads())
1590 1590
1591 1591 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1592 1592 try:
1593 1593 trp = weakref.proxy(tr)
1594 1594 # pull off the changeset group
1595 1595 self.ui.status(_("adding changesets\n"))
1596 1596 clstart = len(cl)
1597 1597 class prog(object):
1598 1598 step = _('changesets')
1599 1599 count = 1
1600 1600 ui = self.ui
1601 1601 total = None
1602 1602 def __call__(self):
1603 1603 self.ui.progress(self.step, self.count, unit=_('chunks'),
1604 1604 total=self.total)
1605 1605 self.count += 1
1606 1606 pr = prog()
1607 1607 chunkiter = changegroup.chunkiter(source, progress=pr)
1608 1608 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1609 1609 raise util.Abort(_("received changelog group is empty"))
1610 1610 clend = len(cl)
1611 1611 changesets = clend - clstart
1612 1612 for c in xrange(clstart, clend):
1613 1613 efiles.update(self[c].files())
1614 1614 efiles = len(efiles)
1615 1615 self.ui.progress(_('changesets'), None)
1616 1616
1617 1617 # pull off the manifest group
1618 1618 self.ui.status(_("adding manifests\n"))
1619 1619 pr.step = _('manifests')
1620 1620 pr.count = 1
1621 1621 pr.total = changesets # manifests <= changesets
1622 1622 chunkiter = changegroup.chunkiter(source, progress=pr)
1623 1623 # no need to check for empty manifest group here:
1624 1624 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1625 1625 # no new manifest will be created and the manifest group will
1626 1626 # be empty during the pull
1627 1627 self.manifest.addgroup(chunkiter, revmap, trp)
1628 1628 self.ui.progress(_('manifests'), None)
1629 1629
1630 1630 needfiles = {}
1631 1631 if self.ui.configbool('server', 'validate', default=False):
1632 1632 # validate incoming csets have their manifests
1633 1633 for cset in xrange(clstart, clend):
1634 1634 mfest = self.changelog.read(self.changelog.node(cset))[0]
1635 1635 mfest = self.manifest.readdelta(mfest)
1636 1636 # store file nodes we must see
1637 1637 for f, n in mfest.iteritems():
1638 1638 needfiles.setdefault(f, set()).add(n)
1639 1639
1640 1640 # process the files
1641 1641 self.ui.status(_("adding file changes\n"))
1642 1642 pr.step = 'files'
1643 1643 pr.count = 1
1644 1644 pr.total = efiles
1645 1645 while 1:
1646 1646 f = changegroup.getchunk(source)
1647 1647 if not f:
1648 1648 break
1649 1649 self.ui.debug("adding %s revisions\n" % f)
1650 1650 pr()
1651 1651 fl = self.file(f)
1652 1652 o = len(fl)
1653 1653 chunkiter = changegroup.chunkiter(source)
1654 1654 if fl.addgroup(chunkiter, revmap, trp) is None:
1655 1655 raise util.Abort(_("received file revlog group is empty"))
1656 1656 revisions += len(fl) - o
1657 1657 files += 1
1658 1658 if f in needfiles:
1659 1659 needs = needfiles[f]
1660 1660 for new in xrange(o, len(fl)):
1661 1661 n = fl.node(new)
1662 1662 if n in needs:
1663 1663 needs.remove(n)
1664 1664 if not needs:
1665 1665 del needfiles[f]
1666 1666 self.ui.progress(_('files'), None)
1667 1667
1668 1668 for f, needs in needfiles.iteritems():
1669 1669 fl = self.file(f)
1670 1670 for n in needs:
1671 1671 try:
1672 1672 fl.rev(n)
1673 1673 except error.LookupError:
1674 1674 raise util.Abort(
1675 1675 _('missing file data for %s:%s - run hg verify') %
1676 1676 (f, hex(n)))
1677 1677
1678 1678 newheads = len(cl.heads())
1679 1679 heads = ""
1680 1680 if oldheads and newheads != oldheads:
1681 1681 heads = _(" (%+d heads)") % (newheads - oldheads)
1682 1682
1683 1683 self.ui.status(_("added %d changesets"
1684 1684 " with %d changes to %d files%s\n")
1685 1685 % (changesets, revisions, files, heads))
1686 1686
1687 1687 if changesets > 0:
1688 1688 p = lambda: cl.writepending() and self.root or ""
1689 1689 self.hook('pretxnchangegroup', throw=True,
1690 1690 node=hex(cl.node(clstart)), source=srctype,
1691 1691 url=url, pending=p)
1692 1692
1693 1693 # make changelog see real files again
1694 1694 cl.finalize(trp)
1695 1695
1696 1696 tr.close()
1697 1697 finally:
1698 1698 tr.release()
1699 1699 if lock:
1700 1700 lock.release()
1701 1701
1702 1702 if changesets > 0:
1703 1703 # forcefully update the on-disk branch cache
1704 1704 self.ui.debug("updating the branch cache\n")
1705 1705 self.updatebranchcache()
1706 1706 self.hook("changegroup", node=hex(cl.node(clstart)),
1707 1707 source=srctype, url=url)
1708 1708
1709 1709 for i in xrange(clstart, clend):
1710 1710 self.hook("incoming", node=hex(cl.node(i)),
1711 1711 source=srctype, url=url)
1712 1712
1713 1713 # never return 0 here:
1714 1714 if newheads < oldheads:
1715 1715 return newheads - oldheads - 1
1716 1716 else:
1717 1717 return newheads - oldheads + 1
1718 1718
1719 1719
1720 1720 def stream_in(self, remote):
1721 1721 fp = remote.stream_out()
1722 1722 l = fp.readline()
1723 1723 try:
1724 1724 resp = int(l)
1725 1725 except ValueError:
1726 1726 raise error.ResponseError(
1727 1727 _('Unexpected response from remote server:'), l)
1728 1728 if resp == 1:
1729 1729 raise util.Abort(_('operation forbidden by server'))
1730 1730 elif resp == 2:
1731 1731 raise util.Abort(_('locking the remote repository failed'))
1732 1732 elif resp != 0:
1733 1733 raise util.Abort(_('the server sent an unknown error code'))
1734 1734 self.ui.status(_('streaming all changes\n'))
1735 1735 l = fp.readline()
1736 1736 try:
1737 1737 total_files, total_bytes = map(int, l.split(' ', 1))
1738 1738 except (ValueError, TypeError):
1739 1739 raise error.ResponseError(
1740 1740 _('Unexpected response from remote server:'), l)
1741 1741 self.ui.status(_('%d files to transfer, %s of data\n') %
1742 1742 (total_files, util.bytecount(total_bytes)))
1743 1743 start = time.time()
1744 1744 for i in xrange(total_files):
1745 1745 # XXX doesn't support '\n' or '\r' in filenames
1746 1746 l = fp.readline()
1747 1747 try:
1748 1748 name, size = l.split('\0', 1)
1749 1749 size = int(size)
1750 1750 except (ValueError, TypeError):
1751 1751 raise error.ResponseError(
1752 1752 _('Unexpected response from remote server:'), l)
1753 1753 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1754 1754 # for backwards compat, name was partially encoded
1755 1755 ofp = self.sopener(store.decodedir(name), 'w')
1756 1756 for chunk in util.filechunkiter(fp, limit=size):
1757 1757 ofp.write(chunk)
1758 1758 ofp.close()
1759 1759 elapsed = time.time() - start
1760 1760 if elapsed <= 0:
1761 1761 elapsed = 0.001
1762 1762 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1763 1763 (util.bytecount(total_bytes), elapsed,
1764 1764 util.bytecount(total_bytes / elapsed)))
1765 1765 self.invalidate()
1766 1766 return len(self.heads()) + 1
1767 1767
1768 1768 def clone(self, remote, heads=[], stream=False):
1769 1769 '''clone remote repository.
1770 1770
1771 1771 keyword arguments:
1772 1772 heads: list of revs to clone (forces use of pull)
1773 1773 stream: use streaming clone if possible'''
1774 1774
1775 1775 # now, all clients that can request uncompressed clones can
1776 1776 # read repo formats supported by all servers that can serve
1777 1777 # them.
1778 1778
1779 1779 # if revlog format changes, client will have to check version
1780 1780 # and format flags on "stream" capability, and use
1781 1781 # uncompressed only if compatible.
1782 1782
1783 1783 if stream and not heads and remote.capable('stream'):
1784 1784 return self.stream_in(remote)
1785 1785 return self.pull(remote, heads)
1786 1786
1787 1787 def pushkey(self, namespace, key, old, new):
1788 1788 return pushkey.push(self, namespace, key, old, new)
1789 1789
1790 1790 def listkeys(self, namespace):
1791 1791 return pushkey.list(self, namespace)
1792 1792
1793 1793 # used to avoid circular references so destructors work
1794 1794 def aftertrans(files):
1795 1795 renamefiles = [tuple(t) for t in files]
1796 1796 def a():
1797 1797 for src, dest in renamefiles:
1798 1798 util.rename(src, dest)
1799 1799 return a
1800 1800
1801 1801 def instance(ui, path, create):
1802 1802 return localrepository(ui, util.drop_scheme('file', path), create)
1803 1803
1804 1804 def islocal(path):
1805 1805 return True
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now