##// END OF EJS Templates
dirstate: add __contains__ and make __getitem__ more useful...
Matt Mackall -
r4906:30847b8a default
parent child Browse files
Show More
@@ -1,97 +1,97 b''
1 1 # hg backend for convert extension
2 2
3 3 import os, time
4 4 from mercurial import hg
5 5
6 6 from common import NoRepo, converter_sink
7 7
8 8 class convert_mercurial(converter_sink):
9 9 def __init__(self, ui, path):
10 10 self.path = path
11 11 self.ui = ui
12 12 try:
13 13 self.repo = hg.repository(self.ui, path)
14 14 except:
15 15 raise NoRepo("could open hg repo %s" % path)
16 16
17 17 def mapfile(self):
18 18 return os.path.join(self.path, ".hg", "shamap")
19 19
20 20 def authorfile(self):
21 21 return os.path.join(self.path, ".hg", "authormap")
22 22
23 23 def getheads(self):
24 24 h = self.repo.changelog.heads()
25 25 return [ hg.hex(x) for x in h ]
26 26
27 27 def putfile(self, f, e, data):
28 28 self.repo.wwrite(f, data, e)
29 if self.repo.dirstate.state(f) == '?':
29 if f not in self.repo.dirstate:
30 30 self.repo.dirstate.add(f)
31 31
32 32 def copyfile(self, source, dest):
33 33 self.repo.copy(source, dest)
34 34
35 35 def delfile(self, f):
36 36 try:
37 37 os.unlink(self.repo.wjoin(f))
38 38 #self.repo.remove([f])
39 39 except:
40 40 pass
41 41
42 42 def putcommit(self, files, parents, commit):
43 43 seen = {}
44 44 pl = []
45 45 for p in parents:
46 46 if p not in seen:
47 47 pl.append(p)
48 48 seen[p] = 1
49 49 parents = pl
50 50
51 51 if len(parents) < 2: parents.append("0" * 40)
52 52 if len(parents) < 2: parents.append("0" * 40)
53 53 p2 = parents.pop(0)
54 54
55 55 text = commit.desc
56 56 extra = {}
57 57 if commit.branch:
58 58 extra['branch'] = commit.branch
59 59 if commit.rev:
60 60 extra['convert_revision'] = commit.rev
61
61
62 62 while parents:
63 63 p1 = p2
64 64 p2 = parents.pop(0)
65 65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
66 66 hg.bin(p1), hg.bin(p2), extra=extra)
67 67 text = "(octopus merge fixup)\n"
68 68 p2 = hg.hex(self.repo.changelog.tip())
69 69
70 70 return p2
71 71
72 72 def puttags(self, tags):
73 73 try:
74 74 old = self.repo.wfile(".hgtags").read()
75 75 oldlines = old.splitlines(1)
76 76 oldlines.sort()
77 77 except:
78 78 oldlines = []
79 79
80 80 k = tags.keys()
81 81 k.sort()
82 82 newlines = []
83 83 for tag in k:
84 84 newlines.append("%s %s\n" % (tags[tag], tag))
85 85
86 86 newlines.sort()
87 87
88 88 if newlines != oldlines:
89 89 self.ui.status("updating tags\n")
90 90 f = self.repo.wfile(".hgtags", "w")
91 91 f.write("".join(newlines))
92 92 f.close()
93 93 if not oldlines: self.repo.add([".hgtags"])
94 94 date = "%s 0" % int(time.mktime(time.gmtime()))
95 95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
96 96 date, self.repo.changelog.tip(), hg.nullid)
97 97 return hg.hex(self.repo.changelog.tip())
@@ -1,280 +1,280 b''
1 1 # GnuPG signing extension for Mercurial
2 2 #
3 3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, tempfile, binascii
9 9 from mercurial import util
10 10 from mercurial import node as hgnode
11 11 from mercurial.i18n import _
12 12
13 13 class gpg:
14 14 def __init__(self, path, key=None):
15 15 self.path = path
16 16 self.key = (key and " --local-user \"%s\"" % key) or ""
17 17
18 18 def sign(self, data):
19 19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 20 return util.filter(data, gpgcmd)
21 21
22 22 def verify(self, data, sig):
23 23 """ returns of the good and bad signatures"""
24 24 sigfile = datafile = None
25 25 try:
26 26 # create temporary files
27 27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 28 fp = os.fdopen(fd, 'wb')
29 29 fp.write(sig)
30 30 fp.close()
31 31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 32 fp = os.fdopen(fd, 'wb')
33 33 fp.write(data)
34 34 fp.close()
35 35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 37 ret = util.filter("", gpgcmd)
38 38 finally:
39 39 for f in (sigfile, datafile):
40 40 try:
41 41 if f: os.unlink(f)
42 42 except: pass
43 43 keys = []
44 44 key, fingerprint = None, None
45 45 err = ""
46 46 for l in ret.splitlines():
47 47 # see DETAILS in the gnupg documentation
48 48 # filter the logger output
49 49 if not l.startswith("[GNUPG:]"):
50 50 continue
51 51 l = l[9:]
52 52 if l.startswith("ERRSIG"):
53 53 err = _("error while verifying signature")
54 54 break
55 55 elif l.startswith("VALIDSIG"):
56 56 # fingerprint of the primary key
57 57 fingerprint = l.split()[10]
58 58 elif (l.startswith("GOODSIG") or
59 59 l.startswith("EXPSIG") or
60 60 l.startswith("EXPKEYSIG") or
61 61 l.startswith("BADSIG")):
62 62 if key is not None:
63 63 keys.append(key + [fingerprint])
64 64 key = l.split(" ", 2)
65 65 fingerprint = None
66 66 if err:
67 67 return err, []
68 68 if key is not None:
69 69 keys.append(key + [fingerprint])
70 70 return err, keys
71 71
72 72 def newgpg(ui, **opts):
73 73 """create a new gpg instance"""
74 74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 75 gpgkey = opts.get('key')
76 76 if not gpgkey:
77 77 gpgkey = ui.config("gpg", "key", None)
78 78 return gpg(gpgpath, gpgkey)
79 79
80 80 def sigwalk(repo):
81 81 """
82 82 walk over every sigs, yields a couple
83 83 ((node, version, sig), (filename, linenumber))
84 84 """
85 85 def parsefile(fileiter, context):
86 86 ln = 1
87 87 for l in fileiter:
88 88 if not l:
89 89 continue
90 90 yield (l.split(" ", 2), (context, ln))
91 91 ln +=1
92 92
93 93 fl = repo.file(".hgsigs")
94 94 h = fl.heads()
95 95 h.reverse()
96 96 # read the heads
97 97 for r in h:
98 98 fn = ".hgsigs|%s" % hgnode.short(r)
99 99 for item in parsefile(fl.read(r).splitlines(), fn):
100 100 yield item
101 101 try:
102 102 # read local signatures
103 103 fn = "localsigs"
104 104 for item in parsefile(repo.opener(fn), fn):
105 105 yield item
106 106 except IOError:
107 107 pass
108 108
109 109 def getkeys(ui, repo, mygpg, sigdata, context):
110 110 """get the keys who signed a data"""
111 111 fn, ln = context
112 112 node, version, sig = sigdata
113 113 prefix = "%s:%d" % (fn, ln)
114 114 node = hgnode.bin(node)
115 115
116 116 data = node2txt(repo, node, version)
117 117 sig = binascii.a2b_base64(sig)
118 118 err, keys = mygpg.verify(data, sig)
119 119 if err:
120 120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 121 return None
122 122
123 123 validkeys = []
124 124 # warn for expired key and/or sigs
125 125 for key in keys:
126 126 if key[0] == "BADSIG":
127 127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 128 continue
129 129 if key[0] == "EXPSIG":
130 130 ui.write(_("%s Note: Signature has expired"
131 131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 132 elif key[0] == "EXPKEYSIG":
133 133 ui.write(_("%s Note: This key has expired"
134 134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 135 validkeys.append((key[1], key[2], key[3]))
136 136 return validkeys
137 137
138 138 def sigs(ui, repo):
139 139 """list signed changesets"""
140 140 mygpg = newgpg(ui)
141 141 revs = {}
142 142
143 143 for data, context in sigwalk(repo):
144 144 node, version, sig = data
145 145 fn, ln = context
146 146 try:
147 147 n = repo.lookup(node)
148 148 except KeyError:
149 149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 150 continue
151 151 r = repo.changelog.rev(n)
152 152 keys = getkeys(ui, repo, mygpg, data, context)
153 153 if not keys:
154 154 continue
155 155 revs.setdefault(r, [])
156 156 revs[r].extend(keys)
157 157 nodes = list(revs)
158 158 nodes.reverse()
159 159 for rev in nodes:
160 160 for k in revs[rev]:
161 161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163 163
164 164 def check(ui, repo, rev):
165 165 """verify all the signatures there may be for a particular revision"""
166 166 mygpg = newgpg(ui)
167 167 rev = repo.lookup(rev)
168 168 hexrev = hgnode.hex(rev)
169 169 keys = []
170 170
171 171 for data, context in sigwalk(repo):
172 172 node, version, sig = data
173 173 if node == hexrev:
174 174 k = getkeys(ui, repo, mygpg, data, context)
175 175 if k:
176 176 keys.extend(k)
177 177
178 178 if not keys:
179 179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 180 return
181 181
182 182 # print summary
183 183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 184 for key in keys:
185 185 ui.write(" %s\n" % keystr(ui, key))
186 186
187 187 def keystr(ui, key):
188 188 """associate a string to a key (username, comment)"""
189 189 keyid, user, fingerprint = key
190 190 comment = ui.config("gpg", fingerprint, None)
191 191 if comment:
192 192 return "%s (%s)" % (user, comment)
193 193 else:
194 194 return user
195 195
196 196 def sign(ui, repo, *revs, **opts):
197 197 """add a signature for the current or given revision
198 198
199 199 If no revision is given, the parent of the working directory is used,
200 200 or tip if no revision is checked out.
201 201 """
202 202
203 203 mygpg = newgpg(ui, **opts)
204 204 sigver = "0"
205 205 sigmessage = ""
206 206 if revs:
207 207 nodes = [repo.lookup(n) for n in revs]
208 208 else:
209 209 nodes = [node for node in repo.dirstate.parents()
210 210 if node != hgnode.nullid]
211 211 if len(nodes) > 1:
212 212 raise util.Abort(_('uncommitted merge - please provide a '
213 213 'specific revision'))
214 214 if not nodes:
215 215 nodes = [repo.changelog.tip()]
216 216
217 217 for n in nodes:
218 218 hexnode = hgnode.hex(n)
219 219 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
220 220 hgnode.short(n)))
221 221 # build data
222 222 data = node2txt(repo, n, sigver)
223 223 sig = mygpg.sign(data)
224 224 if not sig:
225 225 raise util.Abort(_("Error while signing"))
226 226 sig = binascii.b2a_base64(sig)
227 227 sig = sig.replace("\n", "")
228 228 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
229 229
230 230 # write it
231 231 if opts['local']:
232 232 repo.opener("localsigs", "ab").write(sigmessage)
233 233 return
234 234
235 235 for x in repo.status()[:5]:
236 236 if ".hgsigs" in x and not opts["force"]:
237 237 raise util.Abort(_("working copy of .hgsigs is changed "
238 238 "(please commit .hgsigs manually "
239 239 "or use --force)"))
240 240
241 241 repo.wfile(".hgsigs", "ab").write(sigmessage)
242 242
243 if repo.dirstate.state(".hgsigs") == '?':
243 if '.hgsigs' not in repo.dirstate:
244 244 repo.add([".hgsigs"])
245 245
246 246 if opts["no_commit"]:
247 247 return
248 248
249 249 message = opts['message']
250 250 if not message:
251 251 message = "\n".join([_("Added signature for changeset %s")
252 252 % hgnode.hex(n)
253 253 for n in nodes])
254 254 try:
255 255 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
256 256 except ValueError, inst:
257 257 raise util.Abort(str(inst))
258 258
259 259 def node2txt(repo, node, ver):
260 260 """map a manifest into some text"""
261 261 if ver == "0":
262 262 return "%s\n" % hgnode.hex(node)
263 263 else:
264 264 raise util.Abort(_("unknown signature version"))
265 265
266 266 cmdtable = {
267 267 "sign":
268 268 (sign,
269 269 [('l', 'local', None, _('make the signature local')),
270 270 ('f', 'force', None, _('sign even if the sigfile is modified')),
271 271 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
272 272 ('m', 'message', '', _('commit message')),
273 273 ('d', 'date', '', _('date code')),
274 274 ('u', 'user', '', _('user')),
275 275 ('k', 'key', '', _('the key id to sign with'))],
276 276 _('hg sign [OPTION]... [REVISION]...')),
277 277 "sigcheck": (check, [], _('hg sigcheck REVISION')),
278 278 "sigs": (sigs, [], _('hg sigs')),
279 279 }
280 280
@@ -1,2235 +1,2235 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev, wlock=wlock)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head, wlock=wlock)
341 341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev, wlock=wlock)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(),
348 348 force=1, wlock=wlock)
349 349 if n == None:
350 350 raise util.Abort(_("repo commit failed"))
351 351 try:
352 352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 353 except:
354 354 raise util.Abort(_("unable to read %s") % patch)
355 355
356 356 patchf = self.opener(patch, "w")
357 357 if comments:
358 358 comments = "\n".join(comments) + '\n\n'
359 359 patchf.write(comments)
360 360 self.printdiff(repo, head, n, fp=patchf)
361 361 patchf.close()
362 362 self.removeundo(repo)
363 363 return (0, n)
364 364
365 365 def qparents(self, repo, rev=None):
366 366 if rev is None:
367 367 (p1, p2) = repo.dirstate.parents()
368 368 if p2 == revlog.nullid:
369 369 return p1
370 370 if len(self.applied) == 0:
371 371 return None
372 372 return revlog.bin(self.applied[-1].rev)
373 373 pp = repo.changelog.parents(rev)
374 374 if pp[1] != revlog.nullid:
375 375 arevs = [ x.rev for x in self.applied ]
376 376 p0 = revlog.hex(pp[0])
377 377 p1 = revlog.hex(pp[1])
378 378 if p0 in arevs:
379 379 return pp[0]
380 380 if p1 in arevs:
381 381 return pp[1]
382 382 return pp[0]
383 383
384 384 def mergepatch(self, repo, mergeq, series, wlock):
385 385 if len(self.applied) == 0:
386 386 # each of the patches merged in will have two parents. This
387 387 # can confuse the qrefresh, qdiff, and strip code because it
388 388 # needs to know which parent is actually in the patch queue.
389 389 # so, we insert a merge marker with only one parent. This way
390 390 # the first patch in the queue is never a merge patch
391 391 #
392 392 pname = ".hg.patches.merge.marker"
393 393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 394 wlock=wlock)
395 395 self.removeundo(repo)
396 396 self.applied.append(statusentry(revlog.hex(n), pname))
397 397 self.applied_dirty = 1
398 398
399 399 head = self.qparents(repo)
400 400
401 401 for patch in series:
402 402 patch = mergeq.lookup(patch, strict=True)
403 403 if not patch:
404 404 self.ui.warn("patch %s does not exist\n" % patch)
405 405 return (1, None)
406 406 pushable, reason = self.pushable(patch)
407 407 if not pushable:
408 408 self.explain_pushable(patch, all_patches=True)
409 409 continue
410 410 info = mergeq.isapplied(patch)
411 411 if not info:
412 412 self.ui.warn("patch %s is not applied\n" % patch)
413 413 return (1, None)
414 414 rev = revlog.bin(info[1])
415 415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 416 if head:
417 417 self.applied.append(statusentry(revlog.hex(head), patch))
418 418 self.applied_dirty = 1
419 419 if err:
420 420 return (err, head)
421 421 self.save_dirty()
422 422 return (0, head)
423 423
424 424 def patch(self, repo, patchfile):
425 425 '''Apply patchfile to the working directory.
426 426 patchfile: file name of patch'''
427 427 files = {}
428 428 try:
429 429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 430 files=files)
431 431 except Exception, inst:
432 432 self.ui.note(str(inst) + '\n')
433 433 if not self.ui.verbose:
434 434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 435 return (False, files, False)
436 436
437 437 return (True, files, fuzz)
438 438
439 439 def apply(self, repo, series, list=False, update_status=True,
440 440 strict=False, patchdir=None, merge=None, wlock=None,
441 441 all_files={}):
442 442 if not wlock:
443 443 wlock = repo.wlock()
444 444 lock = repo.lock()
445 445 tr = repo.transaction()
446 446 try:
447 447 ret = self._apply(tr, repo, series, list, update_status,
448 448 strict, patchdir, merge, wlock,
449 449 lock=lock, all_files=all_files)
450 450 tr.close()
451 451 self.save_dirty()
452 452 return ret
453 453 except:
454 454 try:
455 455 tr.abort()
456 456 finally:
457 457 repo.invalidate()
458 458 repo.dirstate.invalidate()
459 459 raise
460 460
461 461 def _apply(self, tr, repo, series, list=False, update_status=True,
462 462 strict=False, patchdir=None, merge=None, wlock=None,
463 463 lock=None, all_files={}):
464 464 # TODO unify with commands.py
465 465 if not patchdir:
466 466 patchdir = self.path
467 467 err = 0
468 468 n = None
469 469 for patchname in series:
470 470 pushable, reason = self.pushable(patchname)
471 471 if not pushable:
472 472 self.explain_pushable(patchname, all_patches=True)
473 473 continue
474 474 self.ui.warn("applying %s\n" % patchname)
475 475 pf = os.path.join(patchdir, patchname)
476 476
477 477 try:
478 478 message, comments, user, date, patchfound = self.readheaders(patchname)
479 479 except:
480 480 self.ui.warn("Unable to read %s\n" % patchname)
481 481 err = 1
482 482 break
483 483
484 484 if not message:
485 485 message = "imported patch %s\n" % patchname
486 486 else:
487 487 if list:
488 488 message.append("\nimported patch %s" % patchname)
489 489 message = '\n'.join(message)
490 490
491 491 (patcherr, files, fuzz) = self.patch(repo, pf)
492 492 all_files.update(files)
493 493 patcherr = not patcherr
494 494
495 495 if merge and files:
496 496 # Mark as removed/merged and update dirstate parent info
497 497 removed = []
498 498 merged = []
499 499 for f in files:
500 500 if os.path.exists(repo.wjoin(f)):
501 501 merged.append(f)
502 502 else:
503 503 removed.append(f)
504 504 for f in removed:
505 505 repo.dirstate.remove(f)
506 506 for f in merged:
507 507 repo.dirstate.merge(f)
508 508 p1, p2 = repo.dirstate.parents()
509 509 repo.dirstate.setparents(p1, merge)
510 510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
511 511 n = repo.commit(files, message, user, date, force=1, lock=lock,
512 512 wlock=wlock)
513 513
514 514 if n == None:
515 515 raise util.Abort(_("repo commit failed"))
516 516
517 517 if update_status:
518 518 self.applied.append(statusentry(revlog.hex(n), patchname))
519 519
520 520 if patcherr:
521 521 if not patchfound:
522 522 self.ui.warn("patch %s is empty\n" % patchname)
523 523 err = 0
524 524 else:
525 525 self.ui.warn("patch failed, rejects left in working dir\n")
526 526 err = 1
527 527 break
528 528
529 529 if fuzz and strict:
530 530 self.ui.warn("fuzz found when applying patch, stopping\n")
531 531 err = 1
532 532 break
533 533 self.removeundo(repo)
534 534 return (err, n)
535 535
536 536 def delete(self, repo, patches, opts):
537 537 if not patches and not opts.get('rev'):
538 538 raise util.Abort(_('qdelete requires at least one revision or '
539 539 'patch name'))
540 540
541 541 realpatches = []
542 542 for patch in patches:
543 543 patch = self.lookup(patch, strict=True)
544 544 info = self.isapplied(patch)
545 545 if info:
546 546 raise util.Abort(_("cannot delete applied patch %s") % patch)
547 547 if patch not in self.series:
548 548 raise util.Abort(_("patch %s not in series file") % patch)
549 549 realpatches.append(patch)
550 550
551 551 appliedbase = 0
552 552 if opts.get('rev'):
553 553 if not self.applied:
554 554 raise util.Abort(_('no patches applied'))
555 555 revs = cmdutil.revrange(repo, opts['rev'])
556 556 if len(revs) > 1 and revs[0] > revs[1]:
557 557 revs.reverse()
558 558 for rev in revs:
559 559 if appliedbase >= len(self.applied):
560 560 raise util.Abort(_("revision %d is not managed") % rev)
561 561
562 562 base = revlog.bin(self.applied[appliedbase].rev)
563 563 node = repo.changelog.node(rev)
564 564 if node != base:
565 565 raise util.Abort(_("cannot delete revision %d above "
566 566 "applied patches") % rev)
567 567 realpatches.append(self.applied[appliedbase].name)
568 568 appliedbase += 1
569 569
570 570 if not opts.get('keep'):
571 571 r = self.qrepo()
572 572 if r:
573 573 r.remove(realpatches, True)
574 574 else:
575 575 for p in realpatches:
576 576 os.unlink(self.join(p))
577 577
578 578 if appliedbase:
579 579 del self.applied[:appliedbase]
580 580 self.applied_dirty = 1
581 581 indices = [self.find_series(p) for p in realpatches]
582 582 indices.sort()
583 583 for i in indices[-1::-1]:
584 584 del self.full_series[i]
585 585 self.parse_series()
586 586 self.series_dirty = 1
587 587
588 588 def check_toppatch(self, repo):
589 589 if len(self.applied) > 0:
590 590 top = revlog.bin(self.applied[-1].rev)
591 591 pp = repo.dirstate.parents()
592 592 if top not in pp:
593 593 raise util.Abort(_("queue top not at same revision as working directory"))
594 594 return top
595 595 return None
596 596 def check_localchanges(self, repo, force=False, refresh=True):
597 597 m, a, r, d = repo.status()[:4]
598 598 if m or a or r or d:
599 599 if not force:
600 600 if refresh:
601 601 raise util.Abort(_("local changes found, refresh first"))
602 602 else:
603 603 raise util.Abort(_("local changes found"))
604 604 return m, a, r, d
605 605
606 606 def new(self, repo, patch, *pats, **opts):
607 607 msg = opts.get('msg')
608 608 force = opts.get('force')
609 609 if os.path.exists(self.join(patch)):
610 610 raise util.Abort(_('patch "%s" already exists') % patch)
611 611 if opts.get('include') or opts.get('exclude') or pats:
612 612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
613 613 m, a, r, d = repo.status(files=fns, match=match)[:4]
614 614 else:
615 615 m, a, r, d = self.check_localchanges(repo, force)
616 616 commitfiles = m + a + r
617 617 self.check_toppatch(repo)
618 618 wlock = repo.wlock()
619 619 insert = self.full_series_end()
620 620 if msg:
621 621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
622 622 else:
623 623 n = repo.commit(commitfiles,
624 624 "[mq]: %s" % patch, force=True, wlock=wlock)
625 625 if n == None:
626 626 raise util.Abort(_("repo commit failed"))
627 627 self.full_series[insert:insert] = [patch]
628 628 self.applied.append(statusentry(revlog.hex(n), patch))
629 629 self.parse_series()
630 630 self.series_dirty = 1
631 631 self.applied_dirty = 1
632 632 p = self.opener(patch, "w")
633 633 if msg:
634 634 msg = msg + "\n"
635 635 p.write(msg)
636 636 p.close()
637 637 wlock = None
638 638 r = self.qrepo()
639 639 if r: r.add([patch])
640 640 if commitfiles:
641 641 self.refresh(repo, short=True)
642 642 self.removeundo(repo)
643 643
644 644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
645 645 if not wlock:
646 646 wlock = repo.wlock()
647 647 lock = repo.lock()
648 648
649 649 if update:
650 650 self.check_localchanges(repo, refresh=False)
651 651 urev = self.qparents(repo, rev)
652 652 hg.clean(repo, urev, wlock=wlock)
653 653 repo.dirstate.write()
654 654
655 655 self.removeundo(repo)
656 656 repair.strip(self.ui, repo, rev, backup)
657 657
658 658 def isapplied(self, patch):
659 659 """returns (index, rev, patch)"""
660 660 for i in xrange(len(self.applied)):
661 661 a = self.applied[i]
662 662 if a.name == patch:
663 663 return (i, a.rev, a.name)
664 664 return None
665 665
666 666 # if the exact patch name does not exist, we try a few
667 667 # variations. If strict is passed, we try only #1
668 668 #
669 669 # 1) a number to indicate an offset in the series file
670 670 # 2) a unique substring of the patch name was given
671 671 # 3) patchname[-+]num to indicate an offset in the series file
672 672 def lookup(self, patch, strict=False):
673 673 patch = patch and str(patch)
674 674
675 675 def partial_name(s):
676 676 if s in self.series:
677 677 return s
678 678 matches = [x for x in self.series if s in x]
679 679 if len(matches) > 1:
680 680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
681 681 for m in matches:
682 682 self.ui.warn(' %s\n' % m)
683 683 return None
684 684 if matches:
685 685 return matches[0]
686 686 if len(self.series) > 0 and len(self.applied) > 0:
687 687 if s == 'qtip':
688 688 return self.series[self.series_end(True)-1]
689 689 if s == 'qbase':
690 690 return self.series[0]
691 691 return None
692 692 if patch == None:
693 693 return None
694 694
695 695 # we don't want to return a partial match until we make
696 696 # sure the file name passed in does not exist (checked below)
697 697 res = partial_name(patch)
698 698 if res and res == patch:
699 699 return res
700 700
701 701 if not os.path.isfile(self.join(patch)):
702 702 try:
703 703 sno = int(patch)
704 704 except(ValueError, OverflowError):
705 705 pass
706 706 else:
707 707 if sno < len(self.series):
708 708 return self.series[sno]
709 709 if not strict:
710 710 # return any partial match made above
711 711 if res:
712 712 return res
713 713 minus = patch.rfind('-')
714 714 if minus >= 0:
715 715 res = partial_name(patch[:minus])
716 716 if res:
717 717 i = self.series.index(res)
718 718 try:
719 719 off = int(patch[minus+1:] or 1)
720 720 except(ValueError, OverflowError):
721 721 pass
722 722 else:
723 723 if i - off >= 0:
724 724 return self.series[i - off]
725 725 plus = patch.rfind('+')
726 726 if plus >= 0:
727 727 res = partial_name(patch[:plus])
728 728 if res:
729 729 i = self.series.index(res)
730 730 try:
731 731 off = int(patch[plus+1:] or 1)
732 732 except(ValueError, OverflowError):
733 733 pass
734 734 else:
735 735 if i + off < len(self.series):
736 736 return self.series[i + off]
737 737 raise util.Abort(_("patch %s not in series") % patch)
738 738
739 739 def push(self, repo, patch=None, force=False, list=False,
740 740 mergeq=None, wlock=None):
741 741 if not wlock:
742 742 wlock = repo.wlock()
743 743 patch = self.lookup(patch)
744 744 # Suppose our series file is: A B C and the current 'top' patch is B.
745 745 # qpush C should be performed (moving forward)
746 746 # qpush B is a NOP (no change)
747 747 # qpush A is an error (can't go backwards with qpush)
748 748 if patch:
749 749 info = self.isapplied(patch)
750 750 if info:
751 751 if info[0] < len(self.applied) - 1:
752 752 raise util.Abort(_("cannot push to a previous patch: %s") %
753 753 patch)
754 754 if info[0] < len(self.series) - 1:
755 755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
756 756 else:
757 757 self.ui.warn(_('all patches are currently applied\n'))
758 758 return
759 759
760 760 # Following the above example, starting at 'top' of B:
761 761 # qpush should be performed (pushes C), but a subsequent qpush without
762 762 # an argument is an error (nothing to apply). This allows a loop
763 763 # of "...while hg qpush..." to work as it detects an error when done
764 764 if self.series_end() == len(self.series):
765 765 self.ui.warn(_('patch series already fully applied\n'))
766 766 return 1
767 767 if not force:
768 768 self.check_localchanges(repo)
769 769
770 770 self.applied_dirty = 1;
771 771 start = self.series_end()
772 772 if start > 0:
773 773 self.check_toppatch(repo)
774 774 if not patch:
775 775 patch = self.series[start]
776 776 end = start + 1
777 777 else:
778 778 end = self.series.index(patch, start) + 1
779 779 s = self.series[start:end]
780 780 all_files = {}
781 781 try:
782 782 if mergeq:
783 783 ret = self.mergepatch(repo, mergeq, s, wlock)
784 784 else:
785 785 ret = self.apply(repo, s, list, wlock=wlock,
786 786 all_files=all_files)
787 787 except:
788 788 self.ui.warn(_('cleaning up working directory...'))
789 789 node = repo.dirstate.parents()[0]
790 790 hg.revert(repo, node, None, wlock)
791 791 unknown = repo.status(wlock=wlock)[4]
792 792 # only remove unknown files that we know we touched or
793 793 # created while patching
794 794 for f in unknown:
795 795 if f in all_files:
796 796 util.unlink(repo.wjoin(f))
797 797 self.ui.warn(_('done\n'))
798 798 raise
799 799 top = self.applied[-1].name
800 800 if ret[0]:
801 801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
802 802 top)
803 803 else:
804 804 self.ui.write("Now at: %s\n" % top)
805 805 return ret[0]
806 806
807 807 def pop(self, repo, patch=None, force=False, update=True, all=False,
808 808 wlock=None):
809 809 def getfile(f, rev):
810 810 t = repo.file(f).read(rev)
811 811 repo.wfile(f, "w").write(t)
812 812
813 813 if not wlock:
814 814 wlock = repo.wlock()
815 815 if patch:
816 816 # index, rev, patch
817 817 info = self.isapplied(patch)
818 818 if not info:
819 819 patch = self.lookup(patch)
820 820 info = self.isapplied(patch)
821 821 if not info:
822 822 raise util.Abort(_("patch %s is not applied") % patch)
823 823
824 824 if len(self.applied) == 0:
825 825 # Allow qpop -a to work repeatedly,
826 826 # but not qpop without an argument
827 827 self.ui.warn(_("no patches applied\n"))
828 828 return not all
829 829
830 830 if not update:
831 831 parents = repo.dirstate.parents()
832 832 rr = [ revlog.bin(x.rev) for x in self.applied ]
833 833 for p in parents:
834 834 if p in rr:
835 835 self.ui.warn("qpop: forcing dirstate update\n")
836 836 update = True
837 837
838 838 if not force and update:
839 839 self.check_localchanges(repo)
840 840
841 841 self.applied_dirty = 1;
842 842 end = len(self.applied)
843 843 if not patch:
844 844 if all:
845 845 popi = 0
846 846 else:
847 847 popi = len(self.applied) - 1
848 848 else:
849 849 popi = info[0] + 1
850 850 if popi >= end:
851 851 self.ui.warn("qpop: %s is already at the top\n" % patch)
852 852 return
853 853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
854 854
855 855 start = info[0]
856 856 rev = revlog.bin(info[1])
857 857
858 858 # we know there are no local changes, so we can make a simplified
859 859 # form of hg.update.
860 860 if update:
861 861 top = self.check_toppatch(repo)
862 862 qp = self.qparents(repo, rev)
863 863 changes = repo.changelog.read(qp)
864 864 mmap = repo.manifest.read(changes[0])
865 865 m, a, r, d, u = repo.status(qp, top)[:5]
866 866 if d:
867 867 raise util.Abort("deletions found between repo revs")
868 868 for f in m:
869 869 getfile(f, mmap[f])
870 870 for f in r:
871 871 getfile(f, mmap[f])
872 872 util.set_exec(repo.wjoin(f), mmap.execf(f))
873 873 for f in m + r:
874 874 repo.dirstate.normal(f)
875 875 for f in a:
876 876 try:
877 877 os.unlink(repo.wjoin(f))
878 878 except OSError, e:
879 879 if e.errno != errno.ENOENT:
880 880 raise
881 881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
882 882 except: pass
883 883 repo.dirstate.forget(f)
884 884 repo.dirstate.setparents(qp, revlog.nullid)
885 885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
886 886 del self.applied[start:end]
887 887 if len(self.applied):
888 888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
889 889 else:
890 890 self.ui.write("Patch queue now empty\n")
891 891
892 892 def diff(self, repo, pats, opts):
893 893 top = self.check_toppatch(repo)
894 894 if not top:
895 895 self.ui.write("No patches applied\n")
896 896 return
897 897 qp = self.qparents(repo, top)
898 898 if opts.get('git'):
899 899 self.diffopts().git = True
900 900 self.printdiff(repo, qp, files=pats, opts=opts)
901 901
902 902 def refresh(self, repo, pats=None, **opts):
903 903 if len(self.applied) == 0:
904 904 self.ui.write("No patches applied\n")
905 905 return 1
906 906 wlock = repo.wlock()
907 907 self.check_toppatch(repo)
908 908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
909 909 top = revlog.bin(top)
910 910 cparents = repo.changelog.parents(top)
911 911 patchparent = self.qparents(repo, top)
912 912 message, comments, user, date, patchfound = self.readheaders(patchfn)
913 913
914 914 patchf = self.opener(patchfn, 'r+')
915 915
916 916 # if the patch was a git patch, refresh it as a git patch
917 917 for line in patchf:
918 918 if line.startswith('diff --git'):
919 919 self.diffopts().git = True
920 920 break
921 921 patchf.seek(0)
922 922 patchf.truncate()
923 923
924 924 msg = opts.get('msg', '').rstrip()
925 925 if msg:
926 926 if comments:
927 927 # Remove existing message.
928 928 ci = 0
929 929 subj = None
930 930 for mi in xrange(len(message)):
931 931 if comments[ci].lower().startswith('subject: '):
932 932 subj = comments[ci][9:]
933 933 while message[mi] != comments[ci] and message[mi] != subj:
934 934 ci += 1
935 935 del comments[ci]
936 936 comments.append(msg)
937 937 if comments:
938 938 comments = "\n".join(comments) + '\n\n'
939 939 patchf.write(comments)
940 940
941 941 if opts.get('git'):
942 942 self.diffopts().git = True
943 943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
944 944 tip = repo.changelog.tip()
945 945 if top == tip:
946 946 # if the top of our patch queue is also the tip, there is an
947 947 # optimization here. We update the dirstate in place and strip
948 948 # off the tip commit. Then just commit the current directory
949 949 # tree. We can also send repo.commit the list of files
950 950 # changed to speed up the diff
951 951 #
952 952 # in short mode, we only diff the files included in the
953 953 # patch already
954 954 #
955 955 # this should really read:
956 956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
957 957 # but we do it backwards to take advantage of manifest/chlog
958 958 # caching against the next repo.status call
959 959 #
960 960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
961 961 changes = repo.changelog.read(tip)
962 962 man = repo.manifest.read(changes[0])
963 963 aaa = aa[:]
964 964 if opts.get('short'):
965 965 filelist = mm + aa + dd
966 966 match = dict.fromkeys(filelist).__contains__
967 967 else:
968 968 filelist = None
969 969 match = util.always
970 970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
971 971
972 972 # we might end up with files that were added between tip and
973 973 # the dirstate parent, but then changed in the local dirstate.
974 974 # in this case, we want them to only show up in the added section
975 975 for x in m:
976 976 if x not in aa:
977 977 mm.append(x)
978 978 # we might end up with files added by the local dirstate that
979 979 # were deleted by the patch. In this case, they should only
980 980 # show up in the changed section.
981 981 for x in a:
982 982 if x in dd:
983 983 del dd[dd.index(x)]
984 984 mm.append(x)
985 985 else:
986 986 aa.append(x)
987 987 # make sure any files deleted in the local dirstate
988 988 # are not in the add or change column of the patch
989 989 forget = []
990 990 for x in d + r:
991 991 if x in aa:
992 992 del aa[aa.index(x)]
993 993 forget.append(x)
994 994 continue
995 995 elif x in mm:
996 996 del mm[mm.index(x)]
997 997 dd.append(x)
998 998
999 999 m = util.unique(mm)
1000 1000 r = util.unique(dd)
1001 1001 a = util.unique(aa)
1002 1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1003 1003 filelist = util.unique(c[0] + c[1] + c[2])
1004 1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1005 1005 fp=patchf, changes=c, opts=self.diffopts())
1006 1006 patchf.close()
1007 1007
1008 1008 repo.dirstate.setparents(*cparents)
1009 1009 copies = {}
1010 1010 for dst in a:
1011 1011 src = repo.dirstate.copied(dst)
1012 1012 if src is None:
1013 1013 continue
1014 1014 copies.setdefault(src, []).append(dst)
1015 1015 repo.dirstate.add(dst)
1016 1016 # remember the copies between patchparent and tip
1017 1017 # this may be slow, so don't do it if we're not tracking copies
1018 1018 if self.diffopts().git:
1019 1019 for dst in aaa:
1020 1020 f = repo.file(dst)
1021 1021 src = f.renamed(man[dst])
1022 1022 if src:
1023 1023 copies[src[0]] = copies.get(dst, [])
1024 1024 if dst in a:
1025 1025 copies[src[0]].append(dst)
1026 1026 # we can't copy a file created by the patch itself
1027 1027 if dst in copies:
1028 1028 del copies[dst]
1029 1029 for src, dsts in copies.iteritems():
1030 1030 for dst in dsts:
1031 1031 repo.dirstate.copy(src, dst)
1032 1032 for f in r:
1033 1033 repo.dirstate.remove(f)
1034 1034 # if the patch excludes a modified file, mark that file with mtime=0
1035 1035 # so status can see it.
1036 1036 mm = []
1037 1037 for i in xrange(len(m)-1, -1, -1):
1038 1038 if not matchfn(m[i]):
1039 1039 mm.append(m[i])
1040 1040 del m[i]
1041 1041 for f in m:
1042 1042 repo.dirstate.normal(f)
1043 1043 for f in mm:
1044 1044 repo.dirstate.normaldirty(f)
1045 1045 for f in forget:
1046 1046 repo.dirstate.forget(f)
1047 1047
1048 1048 if not msg:
1049 1049 if not message:
1050 1050 message = "[mq]: %s\n" % patchfn
1051 1051 else:
1052 1052 message = "\n".join(message)
1053 1053 else:
1054 1054 message = msg
1055 1055
1056 1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1057 1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1058 1058 force=1, wlock=wlock)
1059 1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1060 1060 self.applied_dirty = 1
1061 1061 self.removeundo(repo)
1062 1062 else:
1063 1063 self.printdiff(repo, patchparent, fp=patchf)
1064 1064 patchf.close()
1065 1065 added = repo.status()[1]
1066 1066 for a in added:
1067 1067 f = repo.wjoin(a)
1068 1068 try:
1069 1069 os.unlink(f)
1070 1070 except OSError, e:
1071 1071 if e.errno != errno.ENOENT:
1072 1072 raise
1073 1073 try: os.removedirs(os.path.dirname(f))
1074 1074 except: pass
1075 1075 # forget the file copies in the dirstate
1076 1076 # push should readd the files later on
1077 1077 repo.dirstate.forget(a)
1078 1078 self.pop(repo, force=True, wlock=wlock)
1079 1079 self.push(repo, force=True, wlock=wlock)
1080 1080
1081 1081 def init(self, repo, create=False):
1082 1082 if not create and os.path.isdir(self.path):
1083 1083 raise util.Abort(_("patch queue directory already exists"))
1084 1084 try:
1085 1085 os.mkdir(self.path)
1086 1086 except OSError, inst:
1087 1087 if inst.errno != errno.EEXIST or not create:
1088 1088 raise
1089 1089 if create:
1090 1090 return self.qrepo(create=True)
1091 1091
1092 1092 def unapplied(self, repo, patch=None):
1093 1093 if patch and patch not in self.series:
1094 1094 raise util.Abort(_("patch %s is not in series file") % patch)
1095 1095 if not patch:
1096 1096 start = self.series_end()
1097 1097 else:
1098 1098 start = self.series.index(patch) + 1
1099 1099 unapplied = []
1100 1100 for i in xrange(start, len(self.series)):
1101 1101 pushable, reason = self.pushable(i)
1102 1102 if pushable:
1103 1103 unapplied.append((i, self.series[i]))
1104 1104 self.explain_pushable(i)
1105 1105 return unapplied
1106 1106
1107 1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1108 1108 summary=False):
1109 1109 def displayname(patchname):
1110 1110 if summary:
1111 1111 msg = self.readheaders(patchname)[0]
1112 1112 msg = msg and ': ' + msg[0] or ': '
1113 1113 else:
1114 1114 msg = ''
1115 1115 return '%s%s' % (patchname, msg)
1116 1116
1117 1117 applied = dict.fromkeys([p.name for p in self.applied])
1118 1118 if length is None:
1119 1119 length = len(self.series) - start
1120 1120 if not missing:
1121 1121 for i in xrange(start, start+length):
1122 1122 patch = self.series[i]
1123 1123 if patch in applied:
1124 1124 stat = 'A'
1125 1125 elif self.pushable(i)[0]:
1126 1126 stat = 'U'
1127 1127 else:
1128 1128 stat = 'G'
1129 1129 pfx = ''
1130 1130 if self.ui.verbose:
1131 1131 pfx = '%d %s ' % (i, stat)
1132 1132 elif status and status != stat:
1133 1133 continue
1134 1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1135 1135 else:
1136 1136 msng_list = []
1137 1137 for root, dirs, files in os.walk(self.path):
1138 1138 d = root[len(self.path) + 1:]
1139 1139 for f in files:
1140 1140 fl = os.path.join(d, f)
1141 1141 if (fl not in self.series and
1142 1142 fl not in (self.status_path, self.series_path,
1143 1143 self.guards_path)
1144 1144 and not fl.startswith('.')):
1145 1145 msng_list.append(fl)
1146 1146 msng_list.sort()
1147 1147 for x in msng_list:
1148 1148 pfx = self.ui.verbose and ('D ') or ''
1149 1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1150 1150
1151 1151 def issaveline(self, l):
1152 1152 if l.name == '.hg.patches.save.line':
1153 1153 return True
1154 1154
1155 1155 def qrepo(self, create=False):
1156 1156 if create or os.path.isdir(self.join(".hg")):
1157 1157 return hg.repository(self.ui, path=self.path, create=create)
1158 1158
1159 1159 def restore(self, repo, rev, delete=None, qupdate=None):
1160 1160 c = repo.changelog.read(rev)
1161 1161 desc = c[4].strip()
1162 1162 lines = desc.splitlines()
1163 1163 i = 0
1164 1164 datastart = None
1165 1165 series = []
1166 1166 applied = []
1167 1167 qpp = None
1168 1168 for i in xrange(0, len(lines)):
1169 1169 if lines[i] == 'Patch Data:':
1170 1170 datastart = i + 1
1171 1171 elif lines[i].startswith('Dirstate:'):
1172 1172 l = lines[i].rstrip()
1173 1173 l = l[10:].split(' ')
1174 1174 qpp = [ hg.bin(x) for x in l ]
1175 1175 elif datastart != None:
1176 1176 l = lines[i].rstrip()
1177 1177 se = statusentry(l)
1178 1178 file_ = se.name
1179 1179 if se.rev:
1180 1180 applied.append(se)
1181 1181 else:
1182 1182 series.append(file_)
1183 1183 if datastart == None:
1184 1184 self.ui.warn("No saved patch data found\n")
1185 1185 return 1
1186 1186 self.ui.warn("restoring status: %s\n" % lines[0])
1187 1187 self.full_series = series
1188 1188 self.applied = applied
1189 1189 self.parse_series()
1190 1190 self.series_dirty = 1
1191 1191 self.applied_dirty = 1
1192 1192 heads = repo.changelog.heads()
1193 1193 if delete:
1194 1194 if rev not in heads:
1195 1195 self.ui.warn("save entry has children, leaving it alone\n")
1196 1196 else:
1197 1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1198 1198 pp = repo.dirstate.parents()
1199 1199 if rev in pp:
1200 1200 update = True
1201 1201 else:
1202 1202 update = False
1203 1203 self.strip(repo, rev, update=update, backup='strip')
1204 1204 if qpp:
1205 1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1206 1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1207 1207 if qupdate:
1208 1208 print "queue directory updating"
1209 1209 r = self.qrepo()
1210 1210 if not r:
1211 1211 self.ui.warn("Unable to load queue repository\n")
1212 1212 return 1
1213 1213 hg.clean(r, qpp[0])
1214 1214
1215 1215 def save(self, repo, msg=None):
1216 1216 if len(self.applied) == 0:
1217 1217 self.ui.warn("save: no patches applied, exiting\n")
1218 1218 return 1
1219 1219 if self.issaveline(self.applied[-1]):
1220 1220 self.ui.warn("status is already saved\n")
1221 1221 return 1
1222 1222
1223 1223 ar = [ ':' + x for x in self.full_series ]
1224 1224 if not msg:
1225 1225 msg = "hg patches saved state"
1226 1226 else:
1227 1227 msg = "hg patches: " + msg.rstrip('\r\n')
1228 1228 r = self.qrepo()
1229 1229 if r:
1230 1230 pp = r.dirstate.parents()
1231 1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1232 1232 msg += "\n\nPatch Data:\n"
1233 1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1234 1234 "\n".join(ar) + '\n' or "")
1235 1235 n = repo.commit(None, text, user=None, force=1)
1236 1236 if not n:
1237 1237 self.ui.warn("repo commit failed\n")
1238 1238 return 1
1239 1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1240 1240 self.applied_dirty = 1
1241 1241 self.removeundo(repo)
1242 1242
1243 1243 def full_series_end(self):
1244 1244 if len(self.applied) > 0:
1245 1245 p = self.applied[-1].name
1246 1246 end = self.find_series(p)
1247 1247 if end == None:
1248 1248 return len(self.full_series)
1249 1249 return end + 1
1250 1250 return 0
1251 1251
1252 1252 def series_end(self, all_patches=False):
1253 1253 """If all_patches is False, return the index of the next pushable patch
1254 1254 in the series, or the series length. If all_patches is True, return the
1255 1255 index of the first patch past the last applied one.
1256 1256 """
1257 1257 end = 0
1258 1258 def next(start):
1259 1259 if all_patches:
1260 1260 return start
1261 1261 i = start
1262 1262 while i < len(self.series):
1263 1263 p, reason = self.pushable(i)
1264 1264 if p:
1265 1265 break
1266 1266 self.explain_pushable(i)
1267 1267 i += 1
1268 1268 return i
1269 1269 if len(self.applied) > 0:
1270 1270 p = self.applied[-1].name
1271 1271 try:
1272 1272 end = self.series.index(p)
1273 1273 except ValueError:
1274 1274 return 0
1275 1275 return next(end + 1)
1276 1276 return next(end)
1277 1277
1278 1278 def appliedname(self, index):
1279 1279 pname = self.applied[index].name
1280 1280 if not self.ui.verbose:
1281 1281 p = pname
1282 1282 else:
1283 1283 p = str(self.series.index(pname)) + " " + pname
1284 1284 return p
1285 1285
1286 1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1287 1287 force=None, git=False):
1288 1288 def checkseries(patchname):
1289 1289 if patchname in self.series:
1290 1290 raise util.Abort(_('patch %s is already in the series file')
1291 1291 % patchname)
1292 1292 def checkfile(patchname):
1293 1293 if not force and os.path.exists(self.join(patchname)):
1294 1294 raise util.Abort(_('patch "%s" already exists')
1295 1295 % patchname)
1296 1296
1297 1297 if rev:
1298 1298 if files:
1299 1299 raise util.Abort(_('option "-r" not valid when importing '
1300 1300 'files'))
1301 1301 rev = cmdutil.revrange(repo, rev)
1302 1302 rev.sort(lambda x, y: cmp(y, x))
1303 1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1304 1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1305 1305 'patches'))
1306 1306 i = 0
1307 1307 added = []
1308 1308 if rev:
1309 1309 # If mq patches are applied, we can only import revisions
1310 1310 # that form a linear path to qbase.
1311 1311 # Otherwise, they should form a linear path to a head.
1312 1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1313 1313 if len(heads) > 1:
1314 1314 raise util.Abort(_('revision %d is the root of more than one '
1315 1315 'branch') % rev[-1])
1316 1316 if self.applied:
1317 1317 base = revlog.hex(repo.changelog.node(rev[0]))
1318 1318 if base in [n.rev for n in self.applied]:
1319 1319 raise util.Abort(_('revision %d is already managed')
1320 1320 % rev[0])
1321 1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1322 1322 raise util.Abort(_('revision %d is not the parent of '
1323 1323 'the queue') % rev[0])
1324 1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1325 1325 lastparent = repo.changelog.parentrevs(base)[0]
1326 1326 else:
1327 1327 if heads != [repo.changelog.node(rev[0])]:
1328 1328 raise util.Abort(_('revision %d has unmanaged children')
1329 1329 % rev[0])
1330 1330 lastparent = None
1331 1331
1332 1332 if git:
1333 1333 self.diffopts().git = True
1334 1334
1335 1335 for r in rev:
1336 1336 p1, p2 = repo.changelog.parentrevs(r)
1337 1337 n = repo.changelog.node(r)
1338 1338 if p2 != revlog.nullrev:
1339 1339 raise util.Abort(_('cannot import merge revision %d') % r)
1340 1340 if lastparent and lastparent != r:
1341 1341 raise util.Abort(_('revision %d is not the parent of %d')
1342 1342 % (r, lastparent))
1343 1343 lastparent = p1
1344 1344
1345 1345 if not patchname:
1346 1346 patchname = normname('%d.diff' % r)
1347 1347 checkseries(patchname)
1348 1348 checkfile(patchname)
1349 1349 self.full_series.insert(0, patchname)
1350 1350
1351 1351 patchf = self.opener(patchname, "w")
1352 1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1353 1353 patchf.close()
1354 1354
1355 1355 se = statusentry(revlog.hex(n), patchname)
1356 1356 self.applied.insert(0, se)
1357 1357
1358 1358 added.append(patchname)
1359 1359 patchname = None
1360 1360 self.parse_series()
1361 1361 self.applied_dirty = 1
1362 1362
1363 1363 for filename in files:
1364 1364 if existing:
1365 1365 if filename == '-':
1366 1366 raise util.Abort(_('-e is incompatible with import from -'))
1367 1367 if not patchname:
1368 1368 patchname = normname(filename)
1369 1369 if not os.path.isfile(self.join(patchname)):
1370 1370 raise util.Abort(_("patch %s does not exist") % patchname)
1371 1371 else:
1372 1372 try:
1373 1373 if filename == '-':
1374 1374 if not patchname:
1375 1375 raise util.Abort(_('need --name to import a patch from -'))
1376 1376 text = sys.stdin.read()
1377 1377 else:
1378 1378 text = file(filename).read()
1379 1379 except IOError:
1380 1380 raise util.Abort(_("unable to read %s") % patchname)
1381 1381 if not patchname:
1382 1382 patchname = normname(os.path.basename(filename))
1383 1383 checkfile(patchname)
1384 1384 patchf = self.opener(patchname, "w")
1385 1385 patchf.write(text)
1386 1386 checkseries(patchname)
1387 1387 index = self.full_series_end() + i
1388 1388 self.full_series[index:index] = [patchname]
1389 1389 self.parse_series()
1390 1390 self.ui.warn("adding %s to series file\n" % patchname)
1391 1391 i += 1
1392 1392 added.append(patchname)
1393 1393 patchname = None
1394 1394 self.series_dirty = 1
1395 1395 qrepo = self.qrepo()
1396 1396 if qrepo:
1397 1397 qrepo.add(added)
1398 1398
1399 1399 def delete(ui, repo, *patches, **opts):
1400 1400 """remove patches from queue
1401 1401
1402 1402 The patches must not be applied, unless they are arguments to
1403 1403 the --rev parameter. At least one patch or revision is required.
1404 1404
1405 1405 With --rev, mq will stop managing the named revisions (converting
1406 1406 them to regular mercurial changesets). The patches must be applied
1407 1407 and at the base of the stack. This option is useful when the patches
1408 1408 have been applied upstream.
1409 1409
1410 1410 With --keep, the patch files are preserved in the patch directory."""
1411 1411 q = repo.mq
1412 1412 q.delete(repo, patches, opts)
1413 1413 q.save_dirty()
1414 1414 return 0
1415 1415
1416 1416 def applied(ui, repo, patch=None, **opts):
1417 1417 """print the patches already applied"""
1418 1418 q = repo.mq
1419 1419 if patch:
1420 1420 if patch not in q.series:
1421 1421 raise util.Abort(_("patch %s is not in series file") % patch)
1422 1422 end = q.series.index(patch) + 1
1423 1423 else:
1424 1424 end = q.series_end(True)
1425 1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1426 1426
1427 1427 def unapplied(ui, repo, patch=None, **opts):
1428 1428 """print the patches not yet applied"""
1429 1429 q = repo.mq
1430 1430 if patch:
1431 1431 if patch not in q.series:
1432 1432 raise util.Abort(_("patch %s is not in series file") % patch)
1433 1433 start = q.series.index(patch) + 1
1434 1434 else:
1435 1435 start = q.series_end(True)
1436 1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1437 1437
1438 1438 def qimport(ui, repo, *filename, **opts):
1439 1439 """import a patch
1440 1440
1441 1441 The patch will have the same name as its source file unless you
1442 1442 give it a new one with --name.
1443 1443
1444 1444 You can register an existing patch inside the patch directory
1445 1445 with the --existing flag.
1446 1446
1447 1447 With --force, an existing patch of the same name will be overwritten.
1448 1448
1449 1449 An existing changeset may be placed under mq control with --rev
1450 1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1451 1451 With --git, patches imported with --rev will use the git diff
1452 1452 format.
1453 1453 """
1454 1454 q = repo.mq
1455 1455 q.qimport(repo, filename, patchname=opts['name'],
1456 1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1457 1457 git=opts['git'])
1458 1458 q.save_dirty()
1459 1459 return 0
1460 1460
1461 1461 def init(ui, repo, **opts):
1462 1462 """init a new queue repository
1463 1463
1464 1464 The queue repository is unversioned by default. If -c is
1465 1465 specified, qinit will create a separate nested repository
1466 1466 for patches (qinit -c may also be run later to convert
1467 1467 an unversioned patch repository into a versioned one).
1468 1468 You can use qcommit to commit changes to this queue repository."""
1469 1469 q = repo.mq
1470 1470 r = q.init(repo, create=opts['create_repo'])
1471 1471 q.save_dirty()
1472 1472 if r:
1473 1473 if not os.path.exists(r.wjoin('.hgignore')):
1474 1474 fp = r.wopener('.hgignore', 'w')
1475 1475 fp.write('syntax: glob\n')
1476 1476 fp.write('status\n')
1477 1477 fp.write('guards\n')
1478 1478 fp.close()
1479 1479 if not os.path.exists(r.wjoin('series')):
1480 1480 r.wopener('series', 'w').close()
1481 1481 r.add(['.hgignore', 'series'])
1482 1482 commands.add(ui, r)
1483 1483 return 0
1484 1484
1485 1485 def clone(ui, source, dest=None, **opts):
1486 1486 '''clone main and patch repository at same time
1487 1487
1488 1488 If source is local, destination will have no patches applied. If
1489 1489 source is remote, this command can not check if patches are
1490 1490 applied in source, so cannot guarantee that patches are not
1491 1491 applied in destination. If you clone remote repository, be sure
1492 1492 before that it has no patches applied.
1493 1493
1494 1494 Source patch repository is looked for in <src>/.hg/patches by
1495 1495 default. Use -p <url> to change.
1496 1496
1497 1497 The patch directory must be a nested mercurial repository, as
1498 1498 would be created by qinit -c.
1499 1499 '''
1500 1500 cmdutil.setremoteconfig(ui, opts)
1501 1501 if dest is None:
1502 1502 dest = hg.defaultdest(source)
1503 1503 sr = hg.repository(ui, ui.expandpath(source))
1504 1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1505 1505 try:
1506 1506 pr = hg.repository(ui, patchdir)
1507 1507 except hg.RepoError:
1508 1508 raise util.Abort(_('versioned patch repository not found'
1509 1509 ' (see qinit -c)'))
1510 1510 qbase, destrev = None, None
1511 1511 if sr.local():
1512 1512 if sr.mq.applied:
1513 1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1514 1514 if not hg.islocal(dest):
1515 1515 heads = dict.fromkeys(sr.heads())
1516 1516 for h in sr.heads(qbase):
1517 1517 del heads[h]
1518 1518 destrev = heads.keys()
1519 1519 destrev.append(sr.changelog.parents(qbase)[0])
1520 1520 ui.note(_('cloning main repo\n'))
1521 1521 sr, dr = hg.clone(ui, sr.url(), dest,
1522 1522 pull=opts['pull'],
1523 1523 rev=destrev,
1524 1524 update=False,
1525 1525 stream=opts['uncompressed'])
1526 1526 ui.note(_('cloning patch repo\n'))
1527 1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1528 1528 dr.url() + '/.hg/patches',
1529 1529 pull=opts['pull'],
1530 1530 update=not opts['noupdate'],
1531 1531 stream=opts['uncompressed'])
1532 1532 if dr.local():
1533 1533 if qbase:
1534 1534 ui.note(_('stripping applied patches from destination repo\n'))
1535 1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1536 1536 if not opts['noupdate']:
1537 1537 ui.note(_('updating destination repo\n'))
1538 1538 hg.update(dr, dr.changelog.tip())
1539 1539
1540 1540 def commit(ui, repo, *pats, **opts):
1541 1541 """commit changes in the queue repository"""
1542 1542 q = repo.mq
1543 1543 r = q.qrepo()
1544 1544 if not r: raise util.Abort('no queue repository')
1545 1545 commands.commit(r.ui, r, *pats, **opts)
1546 1546
1547 1547 def series(ui, repo, **opts):
1548 1548 """print the entire series file"""
1549 1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1550 1550 return 0
1551 1551
1552 1552 def top(ui, repo, **opts):
1553 1553 """print the name of the current patch"""
1554 1554 q = repo.mq
1555 1555 t = q.applied and q.series_end(True) or 0
1556 1556 if t:
1557 1557 return q.qseries(repo, start=t-1, length=1, status='A',
1558 1558 summary=opts.get('summary'))
1559 1559 else:
1560 1560 ui.write("No patches applied\n")
1561 1561 return 1
1562 1562
1563 1563 def next(ui, repo, **opts):
1564 1564 """print the name of the next patch"""
1565 1565 q = repo.mq
1566 1566 end = q.series_end()
1567 1567 if end == len(q.series):
1568 1568 ui.write("All patches applied\n")
1569 1569 return 1
1570 1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1571 1571
1572 1572 def prev(ui, repo, **opts):
1573 1573 """print the name of the previous patch"""
1574 1574 q = repo.mq
1575 1575 l = len(q.applied)
1576 1576 if l == 1:
1577 1577 ui.write("Only one patch applied\n")
1578 1578 return 1
1579 1579 if not l:
1580 1580 ui.write("No patches applied\n")
1581 1581 return 1
1582 1582 return q.qseries(repo, start=l-2, length=1, status='A',
1583 1583 summary=opts.get('summary'))
1584 1584
1585 1585 def new(ui, repo, patch, *args, **opts):
1586 1586 """create a new patch
1587 1587
1588 1588 qnew creates a new patch on top of the currently-applied patch
1589 1589 (if any). It will refuse to run if there are any outstanding
1590 1590 changes unless -f is specified, in which case the patch will
1591 1591 be initialised with them. You may also use -I, -X, and/or a list of
1592 1592 files after the patch name to add only changes to matching files
1593 1593 to the new patch, leaving the rest as uncommitted modifications.
1594 1594
1595 1595 -e, -m or -l set the patch header as well as the commit message.
1596 1596 If none is specified, the patch header is empty and the
1597 1597 commit message is '[mq]: PATCH'"""
1598 1598 q = repo.mq
1599 1599 message = cmdutil.logmessage(opts)
1600 1600 if opts['edit']:
1601 1601 message = ui.edit(message, ui.username())
1602 1602 opts['msg'] = message
1603 1603 q.new(repo, patch, *args, **opts)
1604 1604 q.save_dirty()
1605 1605 return 0
1606 1606
1607 1607 def refresh(ui, repo, *pats, **opts):
1608 1608 """update the current patch
1609 1609
1610 1610 If any file patterns are provided, the refreshed patch will contain only
1611 1611 the modifications that match those patterns; the remaining modifications
1612 1612 will remain in the working directory.
1613 1613
1614 1614 hg add/remove/copy/rename work as usual, though you might want to use
1615 1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1616 1616 """
1617 1617 q = repo.mq
1618 1618 message = cmdutil.logmessage(opts)
1619 1619 if opts['edit']:
1620 1620 if message:
1621 1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1622 1622 patch = q.applied[-1].name
1623 1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1624 1624 message = ui.edit('\n'.join(message), user or ui.username())
1625 1625 ret = q.refresh(repo, pats, msg=message, **opts)
1626 1626 q.save_dirty()
1627 1627 return ret
1628 1628
1629 1629 def diff(ui, repo, *pats, **opts):
1630 1630 """diff of the current patch"""
1631 1631 repo.mq.diff(repo, pats, opts)
1632 1632 return 0
1633 1633
1634 1634 def fold(ui, repo, *files, **opts):
1635 1635 """fold the named patches into the current patch
1636 1636
1637 1637 Patches must not yet be applied. Each patch will be successively
1638 1638 applied to the current patch in the order given. If all the
1639 1639 patches apply successfully, the current patch will be refreshed
1640 1640 with the new cumulative patch, and the folded patches will
1641 1641 be deleted. With -k/--keep, the folded patch files will not
1642 1642 be removed afterwards.
1643 1643
1644 1644 The header for each folded patch will be concatenated with
1645 1645 the current patch header, separated by a line of '* * *'."""
1646 1646
1647 1647 q = repo.mq
1648 1648
1649 1649 if not files:
1650 1650 raise util.Abort(_('qfold requires at least one patch name'))
1651 1651 if not q.check_toppatch(repo):
1652 1652 raise util.Abort(_('No patches applied'))
1653 1653
1654 1654 message = cmdutil.logmessage(opts)
1655 1655 if opts['edit']:
1656 1656 if message:
1657 1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1658 1658
1659 1659 parent = q.lookup('qtip')
1660 1660 patches = []
1661 1661 messages = []
1662 1662 for f in files:
1663 1663 p = q.lookup(f)
1664 1664 if p in patches or p == parent:
1665 1665 ui.warn(_('Skipping already folded patch %s') % p)
1666 1666 if q.isapplied(p):
1667 1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1668 1668 patches.append(p)
1669 1669
1670 1670 for p in patches:
1671 1671 if not message:
1672 1672 messages.append(q.readheaders(p)[0])
1673 1673 pf = q.join(p)
1674 1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1675 1675 if not patchsuccess:
1676 1676 raise util.Abort(_('Error folding patch %s') % p)
1677 1677 patch.updatedir(ui, repo, files)
1678 1678
1679 1679 if not message:
1680 1680 message, comments, user = q.readheaders(parent)[0:3]
1681 1681 for msg in messages:
1682 1682 message.append('* * *')
1683 1683 message.extend(msg)
1684 1684 message = '\n'.join(message)
1685 1685
1686 1686 if opts['edit']:
1687 1687 message = ui.edit(message, user or ui.username())
1688 1688
1689 1689 q.refresh(repo, msg=message)
1690 1690 q.delete(repo, patches, opts)
1691 1691 q.save_dirty()
1692 1692
1693 1693 def goto(ui, repo, patch, **opts):
1694 1694 '''push or pop patches until named patch is at top of stack'''
1695 1695 q = repo.mq
1696 1696 patch = q.lookup(patch)
1697 1697 if q.isapplied(patch):
1698 1698 ret = q.pop(repo, patch, force=opts['force'])
1699 1699 else:
1700 1700 ret = q.push(repo, patch, force=opts['force'])
1701 1701 q.save_dirty()
1702 1702 return ret
1703 1703
1704 1704 def guard(ui, repo, *args, **opts):
1705 1705 '''set or print guards for a patch
1706 1706
1707 1707 Guards control whether a patch can be pushed. A patch with no
1708 1708 guards is always pushed. A patch with a positive guard ("+foo") is
1709 1709 pushed only if the qselect command has activated it. A patch with
1710 1710 a negative guard ("-foo") is never pushed if the qselect command
1711 1711 has activated it.
1712 1712
1713 1713 With no arguments, print the currently active guards.
1714 1714 With arguments, set guards for the named patch.
1715 1715
1716 1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1717 1717 hg will not interpret "-foo" as an option):
1718 1718 hg qguard -- -foo
1719 1719
1720 1720 To set guards on another patch:
1721 1721 hg qguard other.patch +2.6.17 -stable
1722 1722 '''
1723 1723 def status(idx):
1724 1724 guards = q.series_guards[idx] or ['unguarded']
1725 1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1726 1726 q = repo.mq
1727 1727 patch = None
1728 1728 args = list(args)
1729 1729 if opts['list']:
1730 1730 if args or opts['none']:
1731 1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1732 1732 for i in xrange(len(q.series)):
1733 1733 status(i)
1734 1734 return
1735 1735 if not args or args[0][0:1] in '-+':
1736 1736 if not q.applied:
1737 1737 raise util.Abort(_('no patches applied'))
1738 1738 patch = q.applied[-1].name
1739 1739 if patch is None and args[0][0:1] not in '-+':
1740 1740 patch = args.pop(0)
1741 1741 if patch is None:
1742 1742 raise util.Abort(_('no patch to work with'))
1743 1743 if args or opts['none']:
1744 1744 idx = q.find_series(patch)
1745 1745 if idx is None:
1746 1746 raise util.Abort(_('no patch named %s') % patch)
1747 1747 q.set_guards(idx, args)
1748 1748 q.save_dirty()
1749 1749 else:
1750 1750 status(q.series.index(q.lookup(patch)))
1751 1751
1752 1752 def header(ui, repo, patch=None):
1753 1753 """Print the header of the topmost or specified patch"""
1754 1754 q = repo.mq
1755 1755
1756 1756 if patch:
1757 1757 patch = q.lookup(patch)
1758 1758 else:
1759 1759 if not q.applied:
1760 1760 ui.write('No patches applied\n')
1761 1761 return 1
1762 1762 patch = q.lookup('qtip')
1763 1763 message = repo.mq.readheaders(patch)[0]
1764 1764
1765 1765 ui.write('\n'.join(message) + '\n')
1766 1766
1767 1767 def lastsavename(path):
1768 1768 (directory, base) = os.path.split(path)
1769 1769 names = os.listdir(directory)
1770 1770 namere = re.compile("%s.([0-9]+)" % base)
1771 1771 maxindex = None
1772 1772 maxname = None
1773 1773 for f in names:
1774 1774 m = namere.match(f)
1775 1775 if m:
1776 1776 index = int(m.group(1))
1777 1777 if maxindex == None or index > maxindex:
1778 1778 maxindex = index
1779 1779 maxname = f
1780 1780 if maxname:
1781 1781 return (os.path.join(directory, maxname), maxindex)
1782 1782 return (None, None)
1783 1783
1784 1784 def savename(path):
1785 1785 (last, index) = lastsavename(path)
1786 1786 if last is None:
1787 1787 index = 0
1788 1788 newpath = path + ".%d" % (index + 1)
1789 1789 return newpath
1790 1790
1791 1791 def push(ui, repo, patch=None, **opts):
1792 1792 """push the next patch onto the stack"""
1793 1793 q = repo.mq
1794 1794 mergeq = None
1795 1795
1796 1796 if opts['all']:
1797 1797 if not q.series:
1798 1798 ui.warn(_('no patches in series\n'))
1799 1799 return 0
1800 1800 patch = q.series[-1]
1801 1801 if opts['merge']:
1802 1802 if opts['name']:
1803 1803 newpath = opts['name']
1804 1804 else:
1805 1805 newpath, i = lastsavename(q.path)
1806 1806 if not newpath:
1807 1807 ui.warn("no saved queues found, please use -n\n")
1808 1808 return 1
1809 1809 mergeq = queue(ui, repo.join(""), newpath)
1810 1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1811 1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1812 1812 mergeq=mergeq)
1813 1813 return ret
1814 1814
1815 1815 def pop(ui, repo, patch=None, **opts):
1816 1816 """pop the current patch off the stack"""
1817 1817 localupdate = True
1818 1818 if opts['name']:
1819 1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1820 1820 ui.warn('using patch queue: %s\n' % q.path)
1821 1821 localupdate = False
1822 1822 else:
1823 1823 q = repo.mq
1824 1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1825 1825 all=opts['all'])
1826 1826 q.save_dirty()
1827 1827 return ret
1828 1828
1829 1829 def rename(ui, repo, patch, name=None, **opts):
1830 1830 """rename a patch
1831 1831
1832 1832 With one argument, renames the current patch to PATCH1.
1833 1833 With two arguments, renames PATCH1 to PATCH2."""
1834 1834
1835 1835 q = repo.mq
1836 1836
1837 1837 if not name:
1838 1838 name = patch
1839 1839 patch = None
1840 1840
1841 1841 if patch:
1842 1842 patch = q.lookup(patch)
1843 1843 else:
1844 1844 if not q.applied:
1845 1845 ui.write(_('No patches applied\n'))
1846 1846 return
1847 1847 patch = q.lookup('qtip')
1848 1848 absdest = q.join(name)
1849 1849 if os.path.isdir(absdest):
1850 1850 name = normname(os.path.join(name, os.path.basename(patch)))
1851 1851 absdest = q.join(name)
1852 1852 if os.path.exists(absdest):
1853 1853 raise util.Abort(_('%s already exists') % absdest)
1854 1854
1855 1855 if name in q.series:
1856 1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1857 1857
1858 1858 if ui.verbose:
1859 1859 ui.write('Renaming %s to %s\n' % (patch, name))
1860 1860 i = q.find_series(patch)
1861 1861 guards = q.guard_re.findall(q.full_series[i])
1862 1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1863 1863 q.parse_series()
1864 1864 q.series_dirty = 1
1865 1865
1866 1866 info = q.isapplied(patch)
1867 1867 if info:
1868 1868 q.applied[info[0]] = statusentry(info[1], name)
1869 1869 q.applied_dirty = 1
1870 1870
1871 1871 util.rename(q.join(patch), absdest)
1872 1872 r = q.qrepo()
1873 1873 if r:
1874 1874 wlock = r.wlock()
1875 if r.dirstate.state(name) == 'r':
1875 if r.dirstate[name] == 'r':
1876 1876 r.undelete([name], wlock)
1877 1877 r.copy(patch, name, wlock)
1878 1878 r.remove([patch], False, wlock)
1879 1879
1880 1880 q.save_dirty()
1881 1881
1882 1882 def restore(ui, repo, rev, **opts):
1883 1883 """restore the queue state saved by a rev"""
1884 1884 rev = repo.lookup(rev)
1885 1885 q = repo.mq
1886 1886 q.restore(repo, rev, delete=opts['delete'],
1887 1887 qupdate=opts['update'])
1888 1888 q.save_dirty()
1889 1889 return 0
1890 1890
1891 1891 def save(ui, repo, **opts):
1892 1892 """save current queue state"""
1893 1893 q = repo.mq
1894 1894 message = cmdutil.logmessage(opts)
1895 1895 ret = q.save(repo, msg=message)
1896 1896 if ret:
1897 1897 return ret
1898 1898 q.save_dirty()
1899 1899 if opts['copy']:
1900 1900 path = q.path
1901 1901 if opts['name']:
1902 1902 newpath = os.path.join(q.basepath, opts['name'])
1903 1903 if os.path.exists(newpath):
1904 1904 if not os.path.isdir(newpath):
1905 1905 raise util.Abort(_('destination %s exists and is not '
1906 1906 'a directory') % newpath)
1907 1907 if not opts['force']:
1908 1908 raise util.Abort(_('destination %s exists, '
1909 1909 'use -f to force') % newpath)
1910 1910 else:
1911 1911 newpath = savename(path)
1912 1912 ui.warn("copy %s to %s\n" % (path, newpath))
1913 1913 util.copyfiles(path, newpath)
1914 1914 if opts['empty']:
1915 1915 try:
1916 1916 os.unlink(q.join(q.status_path))
1917 1917 except:
1918 1918 pass
1919 1919 return 0
1920 1920
1921 1921 def strip(ui, repo, rev, **opts):
1922 1922 """strip a revision and all later revs on the same branch"""
1923 1923 rev = repo.lookup(rev)
1924 1924 backup = 'all'
1925 1925 if opts['backup']:
1926 1926 backup = 'strip'
1927 1927 elif opts['nobackup']:
1928 1928 backup = 'none'
1929 1929 update = repo.dirstate.parents()[0] != revlog.nullid
1930 1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1931 1931 return 0
1932 1932
1933 1933 def select(ui, repo, *args, **opts):
1934 1934 '''set or print guarded patches to push
1935 1935
1936 1936 Use the qguard command to set or print guards on patch, then use
1937 1937 qselect to tell mq which guards to use. A patch will be pushed if it
1938 1938 has no guards or any positive guards match the currently selected guard,
1939 1939 but will not be pushed if any negative guards match the current guard.
1940 1940 For example:
1941 1941
1942 1942 qguard foo.patch -stable (negative guard)
1943 1943 qguard bar.patch +stable (positive guard)
1944 1944 qselect stable
1945 1945
1946 1946 This activates the "stable" guard. mq will skip foo.patch (because
1947 1947 it has a negative match) but push bar.patch (because it
1948 1948 has a positive match).
1949 1949
1950 1950 With no arguments, prints the currently active guards.
1951 1951 With one argument, sets the active guard.
1952 1952
1953 1953 Use -n/--none to deactivate guards (no other arguments needed).
1954 1954 When no guards are active, patches with positive guards are skipped
1955 1955 and patches with negative guards are pushed.
1956 1956
1957 1957 qselect can change the guards on applied patches. It does not pop
1958 1958 guarded patches by default. Use --pop to pop back to the last applied
1959 1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1960 1960 back to the current patch afterwards, but skip guarded patches.
1961 1961
1962 1962 Use -s/--series to print a list of all guards in the series file (no
1963 1963 other arguments needed). Use -v for more information.'''
1964 1964
1965 1965 q = repo.mq
1966 1966 guards = q.active()
1967 1967 if args or opts['none']:
1968 1968 old_unapplied = q.unapplied(repo)
1969 1969 old_guarded = [i for i in xrange(len(q.applied)) if
1970 1970 not q.pushable(i)[0]]
1971 1971 q.set_active(args)
1972 1972 q.save_dirty()
1973 1973 if not args:
1974 1974 ui.status(_('guards deactivated\n'))
1975 1975 if not opts['pop'] and not opts['reapply']:
1976 1976 unapplied = q.unapplied(repo)
1977 1977 guarded = [i for i in xrange(len(q.applied))
1978 1978 if not q.pushable(i)[0]]
1979 1979 if len(unapplied) != len(old_unapplied):
1980 1980 ui.status(_('number of unguarded, unapplied patches has '
1981 1981 'changed from %d to %d\n') %
1982 1982 (len(old_unapplied), len(unapplied)))
1983 1983 if len(guarded) != len(old_guarded):
1984 1984 ui.status(_('number of guarded, applied patches has changed '
1985 1985 'from %d to %d\n') %
1986 1986 (len(old_guarded), len(guarded)))
1987 1987 elif opts['series']:
1988 1988 guards = {}
1989 1989 noguards = 0
1990 1990 for gs in q.series_guards:
1991 1991 if not gs:
1992 1992 noguards += 1
1993 1993 for g in gs:
1994 1994 guards.setdefault(g, 0)
1995 1995 guards[g] += 1
1996 1996 if ui.verbose:
1997 1997 guards['NONE'] = noguards
1998 1998 guards = guards.items()
1999 1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2000 2000 if guards:
2001 2001 ui.note(_('guards in series file:\n'))
2002 2002 for guard, count in guards:
2003 2003 ui.note('%2d ' % count)
2004 2004 ui.write(guard, '\n')
2005 2005 else:
2006 2006 ui.note(_('no guards in series file\n'))
2007 2007 else:
2008 2008 if guards:
2009 2009 ui.note(_('active guards:\n'))
2010 2010 for g in guards:
2011 2011 ui.write(g, '\n')
2012 2012 else:
2013 2013 ui.write(_('no active guards\n'))
2014 2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2015 2015 popped = False
2016 2016 if opts['pop'] or opts['reapply']:
2017 2017 for i in xrange(len(q.applied)):
2018 2018 pushable, reason = q.pushable(i)
2019 2019 if not pushable:
2020 2020 ui.status(_('popping guarded patches\n'))
2021 2021 popped = True
2022 2022 if i == 0:
2023 2023 q.pop(repo, all=True)
2024 2024 else:
2025 2025 q.pop(repo, i-1)
2026 2026 break
2027 2027 if popped:
2028 2028 try:
2029 2029 if reapply:
2030 2030 ui.status(_('reapplying unguarded patches\n'))
2031 2031 q.push(repo, reapply)
2032 2032 finally:
2033 2033 q.save_dirty()
2034 2034
2035 2035 def reposetup(ui, repo):
2036 2036 class mqrepo(repo.__class__):
2037 2037 def abort_if_wdir_patched(self, errmsg, force=False):
2038 2038 if self.mq.applied and not force:
2039 2039 parent = revlog.hex(self.dirstate.parents()[0])
2040 2040 if parent in [s.rev for s in self.mq.applied]:
2041 2041 raise util.Abort(errmsg)
2042 2042
2043 2043 def commit(self, *args, **opts):
2044 2044 if len(args) >= 6:
2045 2045 force = args[5]
2046 2046 else:
2047 2047 force = opts.get('force')
2048 2048 self.abort_if_wdir_patched(
2049 2049 _('cannot commit over an applied mq patch'),
2050 2050 force)
2051 2051
2052 2052 return super(mqrepo, self).commit(*args, **opts)
2053 2053
2054 2054 def push(self, remote, force=False, revs=None):
2055 2055 if self.mq.applied and not force and not revs:
2056 2056 raise util.Abort(_('source has mq patches applied'))
2057 2057 return super(mqrepo, self).push(remote, force, revs)
2058 2058
2059 2059 def tags(self):
2060 2060 if self.tagscache:
2061 2061 return self.tagscache
2062 2062
2063 2063 tagscache = super(mqrepo, self).tags()
2064 2064
2065 2065 q = self.mq
2066 2066 if not q.applied:
2067 2067 return tagscache
2068 2068
2069 2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2070 2070 mqtags.append((mqtags[-1][0], 'qtip'))
2071 2071 mqtags.append((mqtags[0][0], 'qbase'))
2072 2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2073 2073 for patch in mqtags:
2074 2074 if patch[1] in tagscache:
2075 2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2076 2076 else:
2077 2077 tagscache[patch[1]] = patch[0]
2078 2078
2079 2079 return tagscache
2080 2080
2081 2081 def _branchtags(self):
2082 2082 q = self.mq
2083 2083 if not q.applied:
2084 2084 return super(mqrepo, self)._branchtags()
2085 2085
2086 2086 self.branchcache = {} # avoid recursion in changectx
2087 2087 cl = self.changelog
2088 2088 partial, last, lrev = self._readbranchcache()
2089 2089
2090 2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2091 2091 start = lrev + 1
2092 2092 if start < qbase:
2093 2093 # update the cache (excluding the patches) and save it
2094 2094 self._updatebranchcache(partial, lrev+1, qbase)
2095 2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2096 2096 start = qbase
2097 2097 # if start = qbase, the cache is as updated as it should be.
2098 2098 # if start > qbase, the cache includes (part of) the patches.
2099 2099 # we might as well use it, but we won't save it.
2100 2100
2101 2101 # update the cache up to the tip
2102 2102 self._updatebranchcache(partial, start, cl.count())
2103 2103
2104 2104 return partial
2105 2105
2106 2106 if repo.local():
2107 2107 repo.__class__ = mqrepo
2108 2108 repo.mq = queue(ui, repo.join(""))
2109 2109
2110 2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2111 2111
2112 2112 cmdtable = {
2113 2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2114 2114 "qclone":
2115 2115 (clone,
2116 2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2117 2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2118 2118 ('', 'uncompressed', None,
2119 2119 _('use uncompressed transfer (fast over LAN)')),
2120 2120 ('e', 'ssh', '', _('specify ssh command to use')),
2121 2121 ('p', 'patches', '', _('location of source patch repo')),
2122 2122 ('', 'remotecmd', '',
2123 2123 _('specify hg command to run on the remote side'))],
2124 2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2125 2125 "qcommit|qci":
2126 2126 (commit,
2127 2127 commands.table["^commit|ci"][1],
2128 2128 _('hg qcommit [OPTION]... [FILE]...')),
2129 2129 "^qdiff":
2130 2130 (diff,
2131 2131 [('g', 'git', None, _('use git extended diff format')),
2132 2132 ('I', 'include', [], _('include names matching the given patterns')),
2133 2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2134 2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2135 2135 "qdelete|qremove|qrm":
2136 2136 (delete,
2137 2137 [('k', 'keep', None, _('keep patch file')),
2138 2138 ('r', 'rev', [], _('stop managing a revision'))],
2139 2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2140 2140 'qfold':
2141 2141 (fold,
2142 2142 [('e', 'edit', None, _('edit patch header')),
2143 2143 ('k', 'keep', None, _('keep folded patch files')),
2144 2144 ] + commands.commitopts,
2145 2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2146 2146 'qgoto':
2147 2147 (goto,
2148 2148 [('f', 'force', None, _('overwrite any local changes'))],
2149 2149 _('hg qgoto [OPTION]... PATCH')),
2150 2150 'qguard':
2151 2151 (guard,
2152 2152 [('l', 'list', None, _('list all patches and guards')),
2153 2153 ('n', 'none', None, _('drop all guards'))],
2154 2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2155 2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2156 2156 "^qimport":
2157 2157 (qimport,
2158 2158 [('e', 'existing', None, 'import file in patch dir'),
2159 2159 ('n', 'name', '', 'patch file name'),
2160 2160 ('f', 'force', None, 'overwrite existing files'),
2161 2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2162 2162 ('g', 'git', None, _('use git extended diff format'))],
2163 2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2164 2164 "^qinit":
2165 2165 (init,
2166 2166 [('c', 'create-repo', None, 'create queue repository')],
2167 2167 _('hg qinit [-c]')),
2168 2168 "qnew":
2169 2169 (new,
2170 2170 [('e', 'edit', None, _('edit commit message')),
2171 2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2172 2172 ('I', 'include', [], _('include names matching the given patterns')),
2173 2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2174 2174 ] + commands.commitopts,
2175 2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2176 2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2177 2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2178 2178 "^qpop":
2179 2179 (pop,
2180 2180 [('a', 'all', None, _('pop all patches')),
2181 2181 ('n', 'name', '', _('queue name to pop')),
2182 2182 ('f', 'force', None, _('forget any local changes'))],
2183 2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2184 2184 "^qpush":
2185 2185 (push,
2186 2186 [('f', 'force', None, _('apply if the patch has rejects')),
2187 2187 ('l', 'list', None, _('list patch name in commit text')),
2188 2188 ('a', 'all', None, _('apply all patches')),
2189 2189 ('m', 'merge', None, _('merge from another queue')),
2190 2190 ('n', 'name', '', _('merge queue name'))],
2191 2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2192 2192 "^qrefresh":
2193 2193 (refresh,
2194 2194 [('e', 'edit', None, _('edit commit message')),
2195 2195 ('g', 'git', None, _('use git extended diff format')),
2196 2196 ('s', 'short', None, _('refresh only files already in the patch')),
2197 2197 ('I', 'include', [], _('include names matching the given patterns')),
2198 2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2199 2199 ] + commands.commitopts,
2200 2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2201 2201 'qrename|qmv':
2202 2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2203 2203 "qrestore":
2204 2204 (restore,
2205 2205 [('d', 'delete', None, _('delete save entry')),
2206 2206 ('u', 'update', None, _('update queue working dir'))],
2207 2207 _('hg qrestore [-d] [-u] REV')),
2208 2208 "qsave":
2209 2209 (save,
2210 2210 [('c', 'copy', None, _('copy patch directory')),
2211 2211 ('n', 'name', '', _('copy directory name')),
2212 2212 ('e', 'empty', None, _('clear queue status file')),
2213 2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2214 2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2215 2215 "qselect":
2216 2216 (select,
2217 2217 [('n', 'none', None, _('disable all guards')),
2218 2218 ('s', 'series', None, _('list all guards in series file')),
2219 2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2220 2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2221 2221 _('hg qselect [OPTION]... [GUARD]...')),
2222 2222 "qseries":
2223 2223 (series,
2224 2224 [('m', 'missing', None, _('print patches not in series')),
2225 2225 ] + seriesopts,
2226 2226 _('hg qseries [-ms]')),
2227 2227 "^strip":
2228 2228 (strip,
2229 2229 [('f', 'force', None, _('force multi-head removal')),
2230 2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2231 2231 ('n', 'nobackup', None, _('no backups'))],
2232 2232 _('hg strip [-f] [-b] [-n] REV')),
2233 2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2234 2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2235 2235 }
@@ -1,1278 +1,1278 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 12 import fancyopts, revlog, version, extensions, hook
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20 class ParseError(Exception):
21 21 """Exception raised on errors in parsing the command line."""
22 22
23 23 def runcatch(ui, args, argv0=None):
24 24 def catchterm(*args):
25 25 raise util.SignalInterrupt
26 26
27 27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 28 num = getattr(signal, name, None)
29 29 if num: signal.signal(num, catchterm)
30 30
31 31 try:
32 32 try:
33 33 # enter the debugger before command execution
34 34 if '--debugger' in args:
35 35 pdb.set_trace()
36 36 try:
37 37 return dispatch(ui, args, argv0=argv0)
38 38 finally:
39 39 ui.flush()
40 40 except:
41 41 # enter the debugger when we hit an exception
42 42 if '--debugger' in args:
43 43 pdb.post_mortem(sys.exc_info()[2])
44 44 ui.print_exc()
45 45 raise
46 46
47 47 except ParseError, inst:
48 48 if inst.args[0]:
49 49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 50 commands.help_(ui, inst.args[0])
51 51 else:
52 52 ui.warn(_("hg: %s\n") % inst.args[1])
53 53 commands.help_(ui, 'shortlist')
54 54 except AmbiguousCommand, inst:
55 55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 56 (inst.args[0], " ".join(inst.args[1])))
57 57 except UnknownCommand, inst:
58 58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 59 commands.help_(ui, 'shortlist')
60 60 except hg.RepoError, inst:
61 61 ui.warn(_("abort: %s!\n") % inst)
62 62 except lock.LockHeld, inst:
63 63 if inst.errno == errno.ETIMEDOUT:
64 64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 65 else:
66 66 reason = _('lock held by %s') % inst.locker
67 67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 68 except lock.LockUnavailable, inst:
69 69 ui.warn(_("abort: could not lock %s: %s\n") %
70 70 (inst.desc or inst.filename, inst.strerror))
71 71 except revlog.RevlogError, inst:
72 72 ui.warn(_("abort: %s!\n") % inst)
73 73 except util.SignalInterrupt:
74 74 ui.warn(_("killed!\n"))
75 75 except KeyboardInterrupt:
76 76 try:
77 77 ui.warn(_("interrupted!\n"))
78 78 except IOError, inst:
79 79 if inst.errno == errno.EPIPE:
80 80 if ui.debugflag:
81 81 ui.warn(_("\nbroken pipe\n"))
82 82 else:
83 83 raise
84 84 except socket.error, inst:
85 85 ui.warn(_("abort: %s\n") % inst[1])
86 86 except IOError, inst:
87 87 if hasattr(inst, "code"):
88 88 ui.warn(_("abort: %s\n") % inst)
89 89 elif hasattr(inst, "reason"):
90 90 try: # usually it is in the form (errno, strerror)
91 91 reason = inst.reason.args[1]
92 92 except: # it might be anything, for example a string
93 93 reason = inst.reason
94 94 ui.warn(_("abort: error: %s\n") % reason)
95 95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 96 if ui.debugflag:
97 97 ui.warn(_("broken pipe\n"))
98 98 elif getattr(inst, "strerror", None):
99 99 if getattr(inst, "filename", None):
100 100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 101 else:
102 102 ui.warn(_("abort: %s\n") % inst.strerror)
103 103 else:
104 104 raise
105 105 except OSError, inst:
106 106 if getattr(inst, "filename", None):
107 107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 108 else:
109 109 ui.warn(_("abort: %s\n") % inst.strerror)
110 110 except util.UnexpectedOutput, inst:
111 111 ui.warn(_("abort: %s") % inst[0])
112 112 if not isinstance(inst[1], basestring):
113 113 ui.warn(" %r\n" % (inst[1],))
114 114 elif not inst[1]:
115 115 ui.warn(_(" empty string\n"))
116 116 else:
117 117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 118 except ImportError, inst:
119 119 m = str(inst).split()[-1]
120 120 ui.warn(_("abort: could not import module %s!\n" % m))
121 121 if m in "mpatch bdiff".split():
122 122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 123 elif m in "zlib".split():
124 124 ui.warn(_("(is your Python install correct?)\n"))
125 125
126 126 except util.Abort, inst:
127 127 ui.warn(_("abort: %s\n") % inst)
128 128 except SystemExit, inst:
129 129 # Commands shouldn't sys.exit directly, but give a return code.
130 130 # Just in case catch this and and pass exit code to caller.
131 131 return inst.code
132 132 except:
133 133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 134 ui.warn(_("** report bug details to "
135 135 "http://www.selenic.com/mercurial/bts\n"))
136 136 ui.warn(_("** or mercurial@selenic.com\n"))
137 137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 138 % version.get_version())
139 139 raise
140 140
141 141 return -1
142 142
143 143 def findpossible(ui, cmd):
144 144 """
145 145 Return cmd -> (aliases, command table entry)
146 146 for each matching command.
147 147 Return debug commands (or their aliases) only if no normal command matches.
148 148 """
149 149 choice = {}
150 150 debugchoice = {}
151 151 for e in commands.table.keys():
152 152 aliases = e.lstrip("^").split("|")
153 153 found = None
154 154 if cmd in aliases:
155 155 found = cmd
156 156 elif not ui.config("ui", "strict"):
157 157 for a in aliases:
158 158 if a.startswith(cmd):
159 159 found = a
160 160 break
161 161 if found is not None:
162 162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 163 debugchoice[found] = (aliases, commands.table[e])
164 164 else:
165 165 choice[found] = (aliases, commands.table[e])
166 166
167 167 if not choice and debugchoice:
168 168 choice = debugchoice
169 169
170 170 return choice
171 171
172 172 def findcmd(ui, cmd):
173 173 """Return (aliases, command table entry) for command string."""
174 174 choice = findpossible(ui, cmd)
175 175
176 176 if choice.has_key(cmd):
177 177 return choice[cmd]
178 178
179 179 if len(choice) > 1:
180 180 clist = choice.keys()
181 181 clist.sort()
182 182 raise AmbiguousCommand(cmd, clist)
183 183
184 184 if choice:
185 185 return choice.values()[0]
186 186
187 187 raise UnknownCommand(cmd)
188 188
189 189 def findrepo():
190 190 p = os.getcwd()
191 191 while not os.path.isdir(os.path.join(p, ".hg")):
192 192 oldp, p = p, os.path.dirname(p)
193 193 if p == oldp:
194 194 return None
195 195
196 196 return p
197 197
198 198 def parse(ui, args):
199 199 options = {}
200 200 cmdoptions = {}
201 201
202 202 try:
203 203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 204 except fancyopts.getopt.GetoptError, inst:
205 205 raise ParseError(None, inst)
206 206
207 207 if args:
208 208 cmd, args = args[0], args[1:]
209 209 aliases, i = findcmd(ui, cmd)
210 210 cmd = aliases[0]
211 211 defaults = ui.config("defaults", cmd)
212 212 if defaults:
213 213 args = shlex.split(defaults) + args
214 214 c = list(i[1])
215 215 else:
216 216 cmd = None
217 217 c = []
218 218
219 219 # combine global options into local
220 220 for o in commands.globalopts:
221 221 c.append((o[0], o[1], options[o[1]], o[3]))
222 222
223 223 try:
224 224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 225 except fancyopts.getopt.GetoptError, inst:
226 226 raise ParseError(cmd, inst)
227 227
228 228 # separate global options back out
229 229 for o in commands.globalopts:
230 230 n = o[1]
231 231 options[n] = cmdoptions[n]
232 232 del cmdoptions[n]
233 233
234 234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235 235
236 236 def parseconfig(config):
237 237 """parse the --config options from the command line"""
238 238 parsed = []
239 239 for cfg in config:
240 240 try:
241 241 name, value = cfg.split('=', 1)
242 242 section, name = name.split('.', 1)
243 243 if not section or not name:
244 244 raise IndexError
245 245 parsed.append((section, name, value))
246 246 except (IndexError, ValueError):
247 247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 248 return parsed
249 249
250 250 def earlygetopt(aliases, args):
251 251 """Return list of values for an option (or aliases).
252 252
253 253 The values are listed in the order they appear in args.
254 254 The options and values are removed from args.
255 255 """
256 256 try:
257 257 argcount = args.index("--")
258 258 except ValueError:
259 259 argcount = len(args)
260 260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 261 values = []
262 262 pos = 0
263 263 while pos < argcount:
264 264 if args[pos] in aliases:
265 265 if pos + 1 >= argcount:
266 266 # ignore and let getopt report an error if there is no value
267 267 break
268 268 del args[pos]
269 269 values.append(args.pop(pos))
270 270 argcount -= 2
271 271 elif args[pos][:2] in shortopts:
272 272 # short option can have no following space, e.g. hg log -Rfoo
273 273 values.append(args.pop(pos)[2:])
274 274 argcount -= 1
275 275 else:
276 276 pos += 1
277 277 return values
278 278
279 279 def dispatch(ui, args, argv0=None):
280 280 # remember how to call 'hg' before changing the working dir
281 281 util.set_hgexecutable(argv0)
282 282
283 283 # read --config before doing anything else
284 284 # (e.g. to change trust settings for reading .hg/hgrc)
285 285 config = earlygetopt(['--config'], args)
286 286 if config:
287 287 ui.updateopts(config=parseconfig(config))
288 288
289 289 # check for cwd
290 290 cwd = earlygetopt(['--cwd'], args)
291 291 if cwd:
292 292 os.chdir(cwd[-1])
293 293
294 294 # read the local repository .hgrc into a local ui object
295 295 path = findrepo() or ""
296 296 if not path:
297 297 lui = ui
298 298 if path:
299 299 try:
300 300 lui = commands.ui.ui(parentui=ui)
301 301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 302 except IOError:
303 303 pass
304 304
305 305 # now we can expand paths, even ones in .hg/hgrc
306 306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 307 if rpath:
308 308 path = lui.expandpath(rpath[-1])
309 309 lui = commands.ui.ui(parentui=ui)
310 310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311 311
312 312 extensions.loadall(lui)
313 313 # check for fallback encoding
314 314 fallback = lui.config('ui', 'fallbackencoding')
315 315 if fallback:
316 316 util._fallbackencoding = fallback
317 317
318 318 fullargs = args
319 319 cmd, func, args, options, cmdoptions = parse(ui, args)
320 320
321 321 if options["config"]:
322 322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 323 if options["cwd"]:
324 324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 325 if options["repository"]:
326 326 raise util.Abort(_(
327 327 "Option -R has to be separated from other options (i.e. not -qR) "
328 328 "and --repository may only be abbreviated as --repo!"))
329 329
330 330 if options["encoding"]:
331 331 util._encoding = options["encoding"]
332 332 if options["encodingmode"]:
333 333 util._encodingmode = options["encodingmode"]
334 334 if options["time"]:
335 335 def get_times():
336 336 t = os.times()
337 337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 338 t = (t[0], t[1], t[2], t[3], time.clock())
339 339 return t
340 340 s = get_times()
341 341 def print_time():
342 342 t = get_times()
343 343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 345 atexit.register(print_time)
346 346
347 347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 348 not options["noninteractive"], options["traceback"])
349 349
350 350 if options['help']:
351 351 return commands.help_(ui, cmd, options['version'])
352 352 elif options['version']:
353 353 return commands.version_(ui)
354 354 elif not cmd:
355 355 return commands.help_(ui, 'shortlist')
356 356
357 357 repo = None
358 358 if cmd not in commands.norepo.split():
359 359 try:
360 360 repo = hg.repository(ui, path=path)
361 361 ui = repo.ui
362 362 if not repo.local():
363 363 raise util.Abort(_("repository '%s' is not local") % path)
364 364 except hg.RepoError:
365 365 if cmd not in commands.optionalrepo.split():
366 366 if not path:
367 367 raise hg.RepoError(_("There is no Mercurial repository here"
368 368 " (.hg not found)"))
369 369 raise
370 370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 371 else:
372 372 d = lambda: func(ui, *args, **cmdoptions)
373 373
374 374 # run pre-hook, and abort if it fails
375 375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 376 if ret:
377 377 return ret
378 378 ret = runcommand(ui, options, cmd, d)
379 379 # run post-hook, passing command result
380 380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 381 result = ret)
382 382 return ret
383 383
384 384 def runcommand(ui, options, cmd, cmdfunc):
385 385 def checkargs():
386 386 try:
387 387 return cmdfunc()
388 388 except TypeError, inst:
389 389 # was this an argument error?
390 390 tb = traceback.extract_tb(sys.exc_info()[2])
391 391 if len(tb) != 2: # no
392 392 raise
393 393 raise ParseError(cmd, _("invalid arguments"))
394 394
395 395 if options['profile']:
396 396 import hotshot, hotshot.stats
397 397 prof = hotshot.Profile("hg.prof")
398 398 try:
399 399 try:
400 400 return prof.runcall(checkargs)
401 401 except:
402 402 try:
403 403 ui.warn(_('exception raised - generating '
404 404 'profile anyway\n'))
405 405 except:
406 406 pass
407 407 raise
408 408 finally:
409 409 prof.close()
410 410 stats = hotshot.stats.load("hg.prof")
411 411 stats.strip_dirs()
412 412 stats.sort_stats('time', 'calls')
413 413 stats.print_stats(40)
414 414 elif options['lsprof']:
415 415 try:
416 416 from mercurial import lsprof
417 417 except ImportError:
418 418 raise util.Abort(_(
419 419 'lsprof not available - install from '
420 420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 421 p = lsprof.Profiler()
422 422 p.enable(subcalls=True)
423 423 try:
424 424 return checkargs()
425 425 finally:
426 426 p.disable()
427 427 stats = lsprof.Stats(p.getstats())
428 428 stats.sort()
429 429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 430 else:
431 431 return checkargs()
432 432
433 433 def bail_if_changed(repo):
434 434 modified, added, removed, deleted = repo.status()[:4]
435 435 if modified or added or removed or deleted:
436 436 raise util.Abort(_("outstanding uncommitted changes"))
437 437
438 438 def logmessage(opts):
439 439 """ get the log message according to -m and -l option """
440 440 message = opts['message']
441 441 logfile = opts['logfile']
442 442
443 443 if message and logfile:
444 444 raise util.Abort(_('options --message and --logfile are mutually '
445 445 'exclusive'))
446 446 if not message and logfile:
447 447 try:
448 448 if logfile == '-':
449 449 message = sys.stdin.read()
450 450 else:
451 451 message = open(logfile).read()
452 452 except IOError, inst:
453 453 raise util.Abort(_("can't read commit message '%s': %s") %
454 454 (logfile, inst.strerror))
455 455 return message
456 456
457 457 def setremoteconfig(ui, opts):
458 458 "copy remote options to ui tree"
459 459 if opts.get('ssh'):
460 460 ui.setconfig("ui", "ssh", opts['ssh'])
461 461 if opts.get('remotecmd'):
462 462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463 463
464 464 def parseurl(url, revs):
465 465 '''parse url#branch, returning url, branch + revs'''
466 466
467 467 if '#' not in url:
468 468 return url, (revs or None)
469 469
470 470 url, rev = url.split('#', 1)
471 471 return url, revs + [rev]
472 472
473 473 def revpair(repo, revs):
474 474 '''return pair of nodes, given list of revisions. second item can
475 475 be None, meaning use working dir.'''
476 476
477 477 def revfix(repo, val, defval):
478 478 if not val and val != 0 and defval is not None:
479 479 val = defval
480 480 return repo.lookup(val)
481 481
482 482 if not revs:
483 483 return repo.dirstate.parents()[0], None
484 484 end = None
485 485 if len(revs) == 1:
486 486 if revrangesep in revs[0]:
487 487 start, end = revs[0].split(revrangesep, 1)
488 488 start = revfix(repo, start, 0)
489 489 end = revfix(repo, end, repo.changelog.count() - 1)
490 490 else:
491 491 start = revfix(repo, revs[0], None)
492 492 elif len(revs) == 2:
493 493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 494 raise util.Abort(_('too many revisions specified'))
495 495 start = revfix(repo, revs[0], None)
496 496 end = revfix(repo, revs[1], None)
497 497 else:
498 498 raise util.Abort(_('too many revisions specified'))
499 499 return start, end
500 500
501 501 def revrange(repo, revs):
502 502 """Yield revision as strings from a list of revision specifications."""
503 503
504 504 def revfix(repo, val, defval):
505 505 if not val and val != 0 and defval is not None:
506 506 return defval
507 507 return repo.changelog.rev(repo.lookup(val))
508 508
509 509 seen, l = {}, []
510 510 for spec in revs:
511 511 if revrangesep in spec:
512 512 start, end = spec.split(revrangesep, 1)
513 513 start = revfix(repo, start, 0)
514 514 end = revfix(repo, end, repo.changelog.count() - 1)
515 515 step = start > end and -1 or 1
516 516 for rev in xrange(start, end+step, step):
517 517 if rev in seen:
518 518 continue
519 519 seen[rev] = 1
520 520 l.append(rev)
521 521 else:
522 522 rev = revfix(repo, spec, None)
523 523 if rev in seen:
524 524 continue
525 525 seen[rev] = 1
526 526 l.append(rev)
527 527
528 528 return l
529 529
530 530 def make_filename(repo, pat, node,
531 531 total=None, seqno=None, revwidth=None, pathname=None):
532 532 node_expander = {
533 533 'H': lambda: hex(node),
534 534 'R': lambda: str(repo.changelog.rev(node)),
535 535 'h': lambda: short(node),
536 536 }
537 537 expander = {
538 538 '%': lambda: '%',
539 539 'b': lambda: os.path.basename(repo.root),
540 540 }
541 541
542 542 try:
543 543 if node:
544 544 expander.update(node_expander)
545 545 if node:
546 546 expander['r'] = (lambda:
547 547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 548 if total is not None:
549 549 expander['N'] = lambda: str(total)
550 550 if seqno is not None:
551 551 expander['n'] = lambda: str(seqno)
552 552 if total is not None and seqno is not None:
553 553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 554 if pathname is not None:
555 555 expander['s'] = lambda: os.path.basename(pathname)
556 556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 557 expander['p'] = lambda: pathname
558 558
559 559 newname = []
560 560 patlen = len(pat)
561 561 i = 0
562 562 while i < patlen:
563 563 c = pat[i]
564 564 if c == '%':
565 565 i += 1
566 566 c = pat[i]
567 567 c = expander[c]()
568 568 newname.append(c)
569 569 i += 1
570 570 return ''.join(newname)
571 571 except KeyError, inst:
572 572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 573 inst.args[0])
574 574
575 575 def make_file(repo, pat, node=None,
576 576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 577 if not pat or pat == '-':
578 578 return 'w' in mode and sys.stdout or sys.stdin
579 579 if hasattr(pat, 'write') and 'w' in mode:
580 580 return pat
581 581 if hasattr(pat, 'read') and 'r' in mode:
582 582 return pat
583 583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 584 pathname),
585 585 mode)
586 586
587 587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 588 cwd = repo.getcwd()
589 589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 590 opts.get('exclude'), globbed=globbed,
591 591 default=default)
592 592
593 593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 594 default=None):
595 595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 596 default=default)
597 597 exact = dict.fromkeys(files)
598 598 cwd = repo.getcwd()
599 599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 600 badmatch=badmatch):
601 601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602 602
603 603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 604 '''find renamed files -- yields (before, after, score) tuples'''
605 605 if added is None or removed is None:
606 606 added, removed = repo.status()[1:3]
607 607 ctx = repo.changectx()
608 608 for a in added:
609 609 aa = repo.wread(a)
610 610 bestname, bestscore = None, threshold
611 611 for r in removed:
612 612 rr = ctx.filectx(r).data()
613 613
614 614 # bdiff.blocks() returns blocks of matching lines
615 615 # count the number of bytes in each
616 616 equal = 0
617 617 alines = mdiff.splitnewlines(aa)
618 618 matches = bdiff.blocks(aa, rr)
619 619 for x1,x2,y1,y2 in matches:
620 620 for line in alines[x1:x2]:
621 621 equal += len(line)
622 622
623 623 lengths = len(aa) + len(rr)
624 624 if lengths:
625 625 myscore = equal*2.0 / lengths
626 626 if myscore >= bestscore:
627 627 bestname, bestscore = r, myscore
628 628 if bestname:
629 629 yield bestname, a, bestscore
630 630
631 631 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
632 632 similarity=None):
633 633 if dry_run is None:
634 634 dry_run = opts.get('dry_run')
635 635 if similarity is None:
636 636 similarity = float(opts.get('similarity') or 0)
637 637 add, remove = [], []
638 638 mapping = {}
639 639 for src, abs, rel, exact in walk(repo, pats, opts):
640 640 target = repo.wjoin(abs)
641 if src == 'f' and repo.dirstate.state(abs) == '?':
641 if src == 'f' and abs not in repo.dirstate:
642 642 add.append(abs)
643 643 mapping[abs] = rel, exact
644 644 if repo.ui.verbose or not exact:
645 645 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
646 if repo.dirstate[abs] != 'r' and not util.lexists(target):
647 647 remove.append(abs)
648 648 mapping[abs] = rel, exact
649 649 if repo.ui.verbose or not exact:
650 650 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
651 651 if not dry_run:
652 652 repo.add(add, wlock=wlock)
653 653 repo.remove(remove, wlock=wlock)
654 654 if similarity > 0:
655 655 for old, new, score in findrenames(repo, add, remove, similarity):
656 656 oldrel, oldexact = mapping[old]
657 657 newrel, newexact = mapping[new]
658 658 if repo.ui.verbose or not oldexact or not newexact:
659 659 repo.ui.status(_('recording removal of %s as rename to %s '
660 660 '(%d%% similar)\n') %
661 661 (oldrel, newrel, score * 100))
662 662 if not dry_run:
663 663 repo.copy(old, new, wlock=wlock)
664 664
665 665 def service(opts, parentfn=None, initfn=None, runfn=None):
666 666 '''Run a command as a service.'''
667 667
668 668 if opts['daemon'] and not opts['daemon_pipefds']:
669 669 rfd, wfd = os.pipe()
670 670 args = sys.argv[:]
671 671 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
672 672 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
673 673 args[0], args)
674 674 os.close(wfd)
675 675 os.read(rfd, 1)
676 676 if parentfn:
677 677 return parentfn(pid)
678 678 else:
679 679 os._exit(0)
680 680
681 681 if initfn:
682 682 initfn()
683 683
684 684 if opts['pid_file']:
685 685 fp = open(opts['pid_file'], 'w')
686 686 fp.write(str(os.getpid()) + '\n')
687 687 fp.close()
688 688
689 689 if opts['daemon_pipefds']:
690 690 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
691 691 os.close(rfd)
692 692 try:
693 693 os.setsid()
694 694 except AttributeError:
695 695 pass
696 696 os.write(wfd, 'y')
697 697 os.close(wfd)
698 698 sys.stdout.flush()
699 699 sys.stderr.flush()
700 700 fd = os.open(util.nulldev, os.O_RDWR)
701 701 if fd != 0: os.dup2(fd, 0)
702 702 if fd != 1: os.dup2(fd, 1)
703 703 if fd != 2: os.dup2(fd, 2)
704 704 if fd not in (0, 1, 2): os.close(fd)
705 705
706 706 if runfn:
707 707 return runfn()
708 708
709 709 class changeset_printer(object):
710 710 '''show changeset information when templating not requested.'''
711 711
712 712 def __init__(self, ui, repo, patch, buffered):
713 713 self.ui = ui
714 714 self.repo = repo
715 715 self.buffered = buffered
716 716 self.patch = patch
717 717 self.header = {}
718 718 self.hunk = {}
719 719 self.lastheader = None
720 720
721 721 def flush(self, rev):
722 722 if rev in self.header:
723 723 h = self.header[rev]
724 724 if h != self.lastheader:
725 725 self.lastheader = h
726 726 self.ui.write(h)
727 727 del self.header[rev]
728 728 if rev in self.hunk:
729 729 self.ui.write(self.hunk[rev])
730 730 del self.hunk[rev]
731 731 return 1
732 732 return 0
733 733
734 734 def show(self, rev=0, changenode=None, copies=(), **props):
735 735 if self.buffered:
736 736 self.ui.pushbuffer()
737 737 self._show(rev, changenode, copies, props)
738 738 self.hunk[rev] = self.ui.popbuffer()
739 739 else:
740 740 self._show(rev, changenode, copies, props)
741 741
742 742 def _show(self, rev, changenode, copies, props):
743 743 '''show a single changeset or file revision'''
744 744 log = self.repo.changelog
745 745 if changenode is None:
746 746 changenode = log.node(rev)
747 747 elif not rev:
748 748 rev = log.rev(changenode)
749 749
750 750 if self.ui.quiet:
751 751 self.ui.write("%d:%s\n" % (rev, short(changenode)))
752 752 return
753 753
754 754 changes = log.read(changenode)
755 755 date = util.datestr(changes[2])
756 756 extra = changes[5]
757 757 branch = extra.get("branch")
758 758
759 759 hexfunc = self.ui.debugflag and hex or short
760 760
761 761 parents = [(p, hexfunc(log.node(p)))
762 762 for p in self._meaningful_parentrevs(log, rev)]
763 763
764 764 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
765 765
766 766 # don't show the default branch name
767 767 if branch != 'default':
768 768 branch = util.tolocal(branch)
769 769 self.ui.write(_("branch: %s\n") % branch)
770 770 for tag in self.repo.nodetags(changenode):
771 771 self.ui.write(_("tag: %s\n") % tag)
772 772 for parent in parents:
773 773 self.ui.write(_("parent: %d:%s\n") % parent)
774 774
775 775 if self.ui.debugflag:
776 776 self.ui.write(_("manifest: %d:%s\n") %
777 777 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
778 778 self.ui.write(_("user: %s\n") % changes[1])
779 779 self.ui.write(_("date: %s\n") % date)
780 780
781 781 if self.ui.debugflag:
782 782 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
783 783 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
784 784 files):
785 785 if value:
786 786 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
787 787 elif changes[3] and self.ui.verbose:
788 788 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
789 789 if copies and self.ui.verbose:
790 790 copies = ['%s (%s)' % c for c in copies]
791 791 self.ui.write(_("copies: %s\n") % ' '.join(copies))
792 792
793 793 if extra and self.ui.debugflag:
794 794 extraitems = extra.items()
795 795 extraitems.sort()
796 796 for key, value in extraitems:
797 797 self.ui.write(_("extra: %s=%s\n")
798 798 % (key, value.encode('string_escape')))
799 799
800 800 description = changes[4].strip()
801 801 if description:
802 802 if self.ui.verbose:
803 803 self.ui.write(_("description:\n"))
804 804 self.ui.write(description)
805 805 self.ui.write("\n\n")
806 806 else:
807 807 self.ui.write(_("summary: %s\n") %
808 808 description.splitlines()[0])
809 809 self.ui.write("\n")
810 810
811 811 self.showpatch(changenode)
812 812
813 813 def showpatch(self, node):
814 814 if self.patch:
815 815 prev = self.repo.changelog.parents(node)[0]
816 816 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
817 817 opts=patch.diffopts(self.ui))
818 818 self.ui.write("\n")
819 819
820 820 def _meaningful_parentrevs(self, log, rev):
821 821 """Return list of meaningful (or all if debug) parentrevs for rev.
822 822
823 823 For merges (two non-nullrev revisions) both parents are meaningful.
824 824 Otherwise the first parent revision is considered meaningful if it
825 825 is not the preceding revision.
826 826 """
827 827 parents = log.parentrevs(rev)
828 828 if not self.ui.debugflag and parents[1] == nullrev:
829 829 if parents[0] >= rev - 1:
830 830 parents = []
831 831 else:
832 832 parents = [parents[0]]
833 833 return parents
834 834
835 835
836 836 class changeset_templater(changeset_printer):
837 837 '''format changeset information.'''
838 838
839 839 def __init__(self, ui, repo, patch, mapfile, buffered):
840 840 changeset_printer.__init__(self, ui, repo, patch, buffered)
841 841 filters = templater.common_filters.copy()
842 842 filters['formatnode'] = (ui.debugflag and (lambda x: x)
843 843 or (lambda x: x[:12]))
844 844 self.t = templater.templater(mapfile, filters,
845 845 cache={
846 846 'parent': '{rev}:{node|formatnode} ',
847 847 'manifest': '{rev}:{node|formatnode}',
848 848 'filecopy': '{name} ({source})'})
849 849
850 850 def use_template(self, t):
851 851 '''set template string to use'''
852 852 self.t.cache['changeset'] = t
853 853
854 854 def _show(self, rev, changenode, copies, props):
855 855 '''show a single changeset or file revision'''
856 856 log = self.repo.changelog
857 857 if changenode is None:
858 858 changenode = log.node(rev)
859 859 elif not rev:
860 860 rev = log.rev(changenode)
861 861
862 862 changes = log.read(changenode)
863 863
864 864 def showlist(name, values, plural=None, **args):
865 865 '''expand set of values.
866 866 name is name of key in template map.
867 867 values is list of strings or dicts.
868 868 plural is plural of name, if not simply name + 's'.
869 869
870 870 expansion works like this, given name 'foo'.
871 871
872 872 if values is empty, expand 'no_foos'.
873 873
874 874 if 'foo' not in template map, return values as a string,
875 875 joined by space.
876 876
877 877 expand 'start_foos'.
878 878
879 879 for each value, expand 'foo'. if 'last_foo' in template
880 880 map, expand it instead of 'foo' for last key.
881 881
882 882 expand 'end_foos'.
883 883 '''
884 884 if plural: names = plural
885 885 else: names = name + 's'
886 886 if not values:
887 887 noname = 'no_' + names
888 888 if noname in self.t:
889 889 yield self.t(noname, **args)
890 890 return
891 891 if name not in self.t:
892 892 if isinstance(values[0], str):
893 893 yield ' '.join(values)
894 894 else:
895 895 for v in values:
896 896 yield dict(v, **args)
897 897 return
898 898 startname = 'start_' + names
899 899 if startname in self.t:
900 900 yield self.t(startname, **args)
901 901 vargs = args.copy()
902 902 def one(v, tag=name):
903 903 try:
904 904 vargs.update(v)
905 905 except (AttributeError, ValueError):
906 906 try:
907 907 for a, b in v:
908 908 vargs[a] = b
909 909 except ValueError:
910 910 vargs[name] = v
911 911 return self.t(tag, **vargs)
912 912 lastname = 'last_' + name
913 913 if lastname in self.t:
914 914 last = values.pop()
915 915 else:
916 916 last = None
917 917 for v in values:
918 918 yield one(v)
919 919 if last is not None:
920 920 yield one(last, tag=lastname)
921 921 endname = 'end_' + names
922 922 if endname in self.t:
923 923 yield self.t(endname, **args)
924 924
925 925 def showbranches(**args):
926 926 branch = changes[5].get("branch")
927 927 if branch != 'default':
928 928 branch = util.tolocal(branch)
929 929 return showlist('branch', [branch], plural='branches', **args)
930 930
931 931 def showparents(**args):
932 932 parents = [[('rev', p), ('node', hex(log.node(p)))]
933 933 for p in self._meaningful_parentrevs(log, rev)]
934 934 return showlist('parent', parents, **args)
935 935
936 936 def showtags(**args):
937 937 return showlist('tag', self.repo.nodetags(changenode), **args)
938 938
939 939 def showextras(**args):
940 940 extras = changes[5].items()
941 941 extras.sort()
942 942 for key, value in extras:
943 943 args = args.copy()
944 944 args.update(dict(key=key, value=value))
945 945 yield self.t('extra', **args)
946 946
947 947 def showcopies(**args):
948 948 c = [{'name': x[0], 'source': x[1]} for x in copies]
949 949 return showlist('file_copy', c, plural='file_copies', **args)
950 950
951 951 if self.ui.debugflag:
952 952 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
953 953 def showfiles(**args):
954 954 return showlist('file', files[0], **args)
955 955 def showadds(**args):
956 956 return showlist('file_add', files[1], **args)
957 957 def showdels(**args):
958 958 return showlist('file_del', files[2], **args)
959 959 def showmanifest(**args):
960 960 args = args.copy()
961 961 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
962 962 node=hex(changes[0])))
963 963 return self.t('manifest', **args)
964 964 else:
965 965 def showfiles(**args):
966 966 return showlist('file', changes[3], **args)
967 967 showadds = ''
968 968 showdels = ''
969 969 showmanifest = ''
970 970
971 971 defprops = {
972 972 'author': changes[1],
973 973 'branches': showbranches,
974 974 'date': changes[2],
975 975 'desc': changes[4].strip(),
976 976 'file_adds': showadds,
977 977 'file_dels': showdels,
978 978 'files': showfiles,
979 979 'file_copies': showcopies,
980 980 'manifest': showmanifest,
981 981 'node': hex(changenode),
982 982 'parents': showparents,
983 983 'rev': rev,
984 984 'tags': showtags,
985 985 'extras': showextras,
986 986 }
987 987 props = props.copy()
988 988 props.update(defprops)
989 989
990 990 try:
991 991 if self.ui.debugflag and 'header_debug' in self.t:
992 992 key = 'header_debug'
993 993 elif self.ui.quiet and 'header_quiet' in self.t:
994 994 key = 'header_quiet'
995 995 elif self.ui.verbose and 'header_verbose' in self.t:
996 996 key = 'header_verbose'
997 997 elif 'header' in self.t:
998 998 key = 'header'
999 999 else:
1000 1000 key = ''
1001 1001 if key:
1002 1002 h = templater.stringify(self.t(key, **props))
1003 1003 if self.buffered:
1004 1004 self.header[rev] = h
1005 1005 else:
1006 1006 self.ui.write(h)
1007 1007 if self.ui.debugflag and 'changeset_debug' in self.t:
1008 1008 key = 'changeset_debug'
1009 1009 elif self.ui.quiet and 'changeset_quiet' in self.t:
1010 1010 key = 'changeset_quiet'
1011 1011 elif self.ui.verbose and 'changeset_verbose' in self.t:
1012 1012 key = 'changeset_verbose'
1013 1013 else:
1014 1014 key = 'changeset'
1015 1015 self.ui.write(templater.stringify(self.t(key, **props)))
1016 1016 self.showpatch(changenode)
1017 1017 except KeyError, inst:
1018 1018 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1019 1019 inst.args[0]))
1020 1020 except SyntaxError, inst:
1021 1021 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1022 1022
1023 1023 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1024 1024 """show one changeset using template or regular display.
1025 1025
1026 1026 Display format will be the first non-empty hit of:
1027 1027 1. option 'template'
1028 1028 2. option 'style'
1029 1029 3. [ui] setting 'logtemplate'
1030 1030 4. [ui] setting 'style'
1031 1031 If all of these values are either the unset or the empty string,
1032 1032 regular display via changeset_printer() is done.
1033 1033 """
1034 1034 # options
1035 1035 patch = False
1036 1036 if opts.get('patch'):
1037 1037 patch = matchfn or util.always
1038 1038
1039 1039 tmpl = opts.get('template')
1040 1040 mapfile = None
1041 1041 if tmpl:
1042 1042 tmpl = templater.parsestring(tmpl, quoted=False)
1043 1043 else:
1044 1044 mapfile = opts.get('style')
1045 1045 # ui settings
1046 1046 if not mapfile:
1047 1047 tmpl = ui.config('ui', 'logtemplate')
1048 1048 if tmpl:
1049 1049 tmpl = templater.parsestring(tmpl)
1050 1050 else:
1051 1051 mapfile = ui.config('ui', 'style')
1052 1052
1053 1053 if tmpl or mapfile:
1054 1054 if mapfile:
1055 1055 if not os.path.split(mapfile)[0]:
1056 1056 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1057 1057 or templater.templatepath(mapfile))
1058 1058 if mapname: mapfile = mapname
1059 1059 try:
1060 1060 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1061 1061 except SyntaxError, inst:
1062 1062 raise util.Abort(inst.args[0])
1063 1063 if tmpl: t.use_template(tmpl)
1064 1064 return t
1065 1065 return changeset_printer(ui, repo, patch, buffered)
1066 1066
1067 1067 def finddate(ui, repo, date):
1068 1068 """Find the tipmost changeset that matches the given date spec"""
1069 1069 df = util.matchdate(date + " to " + date)
1070 1070 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1071 1071 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1072 1072 results = {}
1073 1073 for st, rev, fns in changeiter:
1074 1074 if st == 'add':
1075 1075 d = get(rev)[2]
1076 1076 if df(d[0]):
1077 1077 results[rev] = d
1078 1078 elif st == 'iter':
1079 1079 if rev in results:
1080 1080 ui.status("Found revision %s from %s\n" %
1081 1081 (rev, util.datestr(results[rev])))
1082 1082 return str(rev)
1083 1083
1084 1084 raise util.Abort(_("revision matching date not found"))
1085 1085
1086 1086 def walkchangerevs(ui, repo, pats, change, opts):
1087 1087 '''Iterate over files and the revs they changed in.
1088 1088
1089 1089 Callers most commonly need to iterate backwards over the history
1090 1090 it is interested in. Doing so has awful (quadratic-looking)
1091 1091 performance, so we use iterators in a "windowed" way.
1092 1092
1093 1093 We walk a window of revisions in the desired order. Within the
1094 1094 window, we first walk forwards to gather data, then in the desired
1095 1095 order (usually backwards) to display it.
1096 1096
1097 1097 This function returns an (iterator, matchfn) tuple. The iterator
1098 1098 yields 3-tuples. They will be of one of the following forms:
1099 1099
1100 1100 "window", incrementing, lastrev: stepping through a window,
1101 1101 positive if walking forwards through revs, last rev in the
1102 1102 sequence iterated over - use to reset state for the current window
1103 1103
1104 1104 "add", rev, fns: out-of-order traversal of the given file names
1105 1105 fns, which changed during revision rev - use to gather data for
1106 1106 possible display
1107 1107
1108 1108 "iter", rev, None: in-order traversal of the revs earlier iterated
1109 1109 over with "add" - use to display data'''
1110 1110
1111 1111 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1112 1112 if start < end:
1113 1113 while start < end:
1114 1114 yield start, min(windowsize, end-start)
1115 1115 start += windowsize
1116 1116 if windowsize < sizelimit:
1117 1117 windowsize *= 2
1118 1118 else:
1119 1119 while start > end:
1120 1120 yield start, min(windowsize, start-end-1)
1121 1121 start -= windowsize
1122 1122 if windowsize < sizelimit:
1123 1123 windowsize *= 2
1124 1124
1125 1125 files, matchfn, anypats = matchpats(repo, pats, opts)
1126 1126 follow = opts.get('follow') or opts.get('follow_first')
1127 1127
1128 1128 if repo.changelog.count() == 0:
1129 1129 return [], matchfn
1130 1130
1131 1131 if follow:
1132 1132 defrange = '%s:0' % repo.changectx().rev()
1133 1133 else:
1134 1134 defrange = 'tip:0'
1135 1135 revs = revrange(repo, opts['rev'] or [defrange])
1136 1136 wanted = {}
1137 1137 slowpath = anypats or opts.get('removed')
1138 1138 fncache = {}
1139 1139
1140 1140 if not slowpath and not files:
1141 1141 # No files, no patterns. Display all revs.
1142 1142 wanted = dict.fromkeys(revs)
1143 1143 copies = []
1144 1144 if not slowpath:
1145 1145 # Only files, no patterns. Check the history of each file.
1146 1146 def filerevgen(filelog, node):
1147 1147 cl_count = repo.changelog.count()
1148 1148 if node is None:
1149 1149 last = filelog.count() - 1
1150 1150 else:
1151 1151 last = filelog.rev(node)
1152 1152 for i, window in increasing_windows(last, nullrev):
1153 1153 revs = []
1154 1154 for j in xrange(i - window, i + 1):
1155 1155 n = filelog.node(j)
1156 1156 revs.append((filelog.linkrev(n),
1157 1157 follow and filelog.renamed(n)))
1158 1158 revs.reverse()
1159 1159 for rev in revs:
1160 1160 # only yield rev for which we have the changelog, it can
1161 1161 # happen while doing "hg log" during a pull or commit
1162 1162 if rev[0] < cl_count:
1163 1163 yield rev
1164 1164 def iterfiles():
1165 1165 for filename in files:
1166 1166 yield filename, None
1167 1167 for filename_node in copies:
1168 1168 yield filename_node
1169 1169 minrev, maxrev = min(revs), max(revs)
1170 1170 for file_, node in iterfiles():
1171 1171 filelog = repo.file(file_)
1172 1172 # A zero count may be a directory or deleted file, so
1173 1173 # try to find matching entries on the slow path.
1174 1174 if filelog.count() == 0:
1175 1175 slowpath = True
1176 1176 break
1177 1177 for rev, copied in filerevgen(filelog, node):
1178 1178 if rev <= maxrev:
1179 1179 if rev < minrev:
1180 1180 break
1181 1181 fncache.setdefault(rev, [])
1182 1182 fncache[rev].append(file_)
1183 1183 wanted[rev] = 1
1184 1184 if follow and copied:
1185 1185 copies.append(copied)
1186 1186 if slowpath:
1187 1187 if follow:
1188 1188 raise util.Abort(_('can only follow copies/renames for explicit '
1189 1189 'file names'))
1190 1190
1191 1191 # The slow path checks files modified in every changeset.
1192 1192 def changerevgen():
1193 1193 for i, window in increasing_windows(repo.changelog.count()-1,
1194 1194 nullrev):
1195 1195 for j in xrange(i - window, i + 1):
1196 1196 yield j, change(j)[3]
1197 1197
1198 1198 for rev, changefiles in changerevgen():
1199 1199 matches = filter(matchfn, changefiles)
1200 1200 if matches:
1201 1201 fncache[rev] = matches
1202 1202 wanted[rev] = 1
1203 1203
1204 1204 class followfilter:
1205 1205 def __init__(self, onlyfirst=False):
1206 1206 self.startrev = nullrev
1207 1207 self.roots = []
1208 1208 self.onlyfirst = onlyfirst
1209 1209
1210 1210 def match(self, rev):
1211 1211 def realparents(rev):
1212 1212 if self.onlyfirst:
1213 1213 return repo.changelog.parentrevs(rev)[0:1]
1214 1214 else:
1215 1215 return filter(lambda x: x != nullrev,
1216 1216 repo.changelog.parentrevs(rev))
1217 1217
1218 1218 if self.startrev == nullrev:
1219 1219 self.startrev = rev
1220 1220 return True
1221 1221
1222 1222 if rev > self.startrev:
1223 1223 # forward: all descendants
1224 1224 if not self.roots:
1225 1225 self.roots.append(self.startrev)
1226 1226 for parent in realparents(rev):
1227 1227 if parent in self.roots:
1228 1228 self.roots.append(rev)
1229 1229 return True
1230 1230 else:
1231 1231 # backwards: all parents
1232 1232 if not self.roots:
1233 1233 self.roots.extend(realparents(self.startrev))
1234 1234 if rev in self.roots:
1235 1235 self.roots.remove(rev)
1236 1236 self.roots.extend(realparents(rev))
1237 1237 return True
1238 1238
1239 1239 return False
1240 1240
1241 1241 # it might be worthwhile to do this in the iterator if the rev range
1242 1242 # is descending and the prune args are all within that range
1243 1243 for rev in opts.get('prune', ()):
1244 1244 rev = repo.changelog.rev(repo.lookup(rev))
1245 1245 ff = followfilter()
1246 1246 stop = min(revs[0], revs[-1])
1247 1247 for x in xrange(rev, stop-1, -1):
1248 1248 if ff.match(x) and x in wanted:
1249 1249 del wanted[x]
1250 1250
1251 1251 def iterate():
1252 1252 if follow and not files:
1253 1253 ff = followfilter(onlyfirst=opts.get('follow_first'))
1254 1254 def want(rev):
1255 1255 if ff.match(rev) and rev in wanted:
1256 1256 return True
1257 1257 return False
1258 1258 else:
1259 1259 def want(rev):
1260 1260 return rev in wanted
1261 1261
1262 1262 for i, window in increasing_windows(0, len(revs)):
1263 1263 yield 'window', revs[0] < revs[-1], revs[-1]
1264 1264 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1265 1265 srevs = list(nrevs)
1266 1266 srevs.sort()
1267 1267 for rev in srevs:
1268 1268 fns = fncache.get(rev)
1269 1269 if not fns:
1270 1270 def fns_generator():
1271 1271 for f in change(rev)[3]:
1272 1272 if matchfn(f):
1273 1273 yield f
1274 1274 fns = fns_generator()
1275 1275 yield 'add', rev, fns
1276 1276 for rev in nrevs:
1277 1277 yield 'iter', rev, None
1278 1278 return iterate(), matchfn
@@ -1,3163 +1,3164 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, urllib, shlex, stat
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 elif repo.dirstate.state(abs) == '?':
36 elif abs not in repo.dirstate:
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 sim = float(opts.get('similarity') or 0)
57 57 if sim < 0 or sim > 100:
58 58 raise util.Abort(_('similarity must be between 0 and 100'))
59 59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60 60
61 61 def annotate(ui, repo, *pats, **opts):
62 62 """show changeset information per file line
63 63
64 64 List changes in files, showing the revision id responsible for each line
65 65
66 66 This command is useful to discover who did a change or when a change took
67 67 place.
68 68
69 69 Without the -a option, annotate will avoid processing files it
70 70 detects as binary. With -a, annotate will generate an annotation
71 71 anyway, probably with undesirable results.
72 72 """
73 73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74 74
75 75 if not pats:
76 76 raise util.Abort(_('at least one file name or pattern required'))
77 77
78 78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 79 ('number', lambda x: str(x[0].rev())),
80 80 ('changeset', lambda x: short(x[0].node())),
81 81 ('date', getdate),
82 82 ('follow', lambda x: x[0].path()),
83 83 ]
84 84
85 85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 86 and not opts['follow']):
87 87 opts['number'] = 1
88 88
89 89 linenumber = opts.get('line_number') is not None
90 90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92 92
93 93 funcmap = [func for op, func in opmap if opts.get(op)]
94 94 if linenumber:
95 95 lastfunc = funcmap[-1]
96 96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97 97
98 98 ctx = repo.changectx(opts['rev'])
99 99
100 100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 101 node=ctx.node()):
102 102 fctx = ctx.filectx(abs)
103 103 if not opts['text'] and util.binary(fctx.data()):
104 104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 105 continue
106 106
107 107 lines = fctx.annotate(follow=opts.get('follow'),
108 108 linenumber=linenumber)
109 109 pieces = []
110 110
111 111 for f in funcmap:
112 112 l = [f(n) for n, dummy in lines]
113 113 if l:
114 114 m = max(map(len, l))
115 115 pieces.append(["%*s" % (m, x) for x in l])
116 116
117 117 if pieces:
118 118 for p, l in zip(zip(*pieces), lines):
119 119 ui.write("%s: %s" % (" ".join(p), l[1]))
120 120
121 121 def archive(ui, repo, dest, **opts):
122 122 '''create unversioned archive of a repository revision
123 123
124 124 By default, the revision used is the parent of the working
125 125 directory; use "-r" to specify a different revision.
126 126
127 127 To specify the type of archive to create, use "-t". Valid
128 128 types are:
129 129
130 130 "files" (default): a directory full of files
131 131 "tar": tar archive, uncompressed
132 132 "tbz2": tar archive, compressed using bzip2
133 133 "tgz": tar archive, compressed using gzip
134 134 "uzip": zip archive, uncompressed
135 135 "zip": zip archive, compressed using deflate
136 136
137 137 The exact name of the destination archive or directory is given
138 138 using a format string; see "hg help export" for details.
139 139
140 140 Each member added to an archive file has a directory prefix
141 141 prepended. Use "-p" to specify a format string for the prefix.
142 142 The default is the basename of the archive, with suffixes removed.
143 143 '''
144 144
145 145 ctx = repo.changectx(opts['rev'])
146 146 if not ctx:
147 147 raise util.Abort(_('repository has no revisions'))
148 148 node = ctx.node()
149 149 dest = cmdutil.make_filename(repo, dest, node)
150 150 if os.path.realpath(dest) == repo.root:
151 151 raise util.Abort(_('repository root cannot be destination'))
152 152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 153 kind = opts.get('type') or 'files'
154 154 prefix = opts['prefix']
155 155 if dest == '-':
156 156 if kind == 'files':
157 157 raise util.Abort(_('cannot archive plain files to stdout'))
158 158 dest = sys.stdout
159 159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 160 prefix = cmdutil.make_filename(repo, prefix, node)
161 161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 162 matchfn, prefix)
163 163
164 164 def backout(ui, repo, node=None, rev=None, **opts):
165 165 '''reverse effect of earlier changeset
166 166
167 167 Commit the backed out changes as a new changeset. The new
168 168 changeset is a child of the backed out changeset.
169 169
170 170 If you back out a changeset other than the tip, a new head is
171 171 created. This head is the parent of the working directory. If
172 172 you back out an old changeset, your working directory will appear
173 173 old after the backout. You should merge the backout changeset
174 174 with another head.
175 175
176 176 The --merge option remembers the parent of the working directory
177 177 before starting the backout, then merges the new head with that
178 178 changeset afterwards. This saves you from doing the merge by
179 179 hand. The result of this merge is not committed, as for a normal
180 180 merge.'''
181 181 if rev and node:
182 182 raise util.Abort(_("please specify just one revision"))
183 183
184 184 if not rev:
185 185 rev = node
186 186
187 187 if not rev:
188 188 raise util.Abort(_("please specify a revision to backout"))
189 189
190 190 cmdutil.bail_if_changed(repo)
191 191 op1, op2 = repo.dirstate.parents()
192 192 if op2 != nullid:
193 193 raise util.Abort(_('outstanding uncommitted merge'))
194 194 node = repo.lookup(rev)
195 195 p1, p2 = repo.changelog.parents(node)
196 196 if p1 == nullid:
197 197 raise util.Abort(_('cannot back out a change with no parents'))
198 198 if p2 != nullid:
199 199 if not opts['parent']:
200 200 raise util.Abort(_('cannot back out a merge changeset without '
201 201 '--parent'))
202 202 p = repo.lookup(opts['parent'])
203 203 if p not in (p1, p2):
204 204 raise util.Abort(_('%s is not a parent of %s') %
205 205 (short(p), short(node)))
206 206 parent = p
207 207 else:
208 208 if opts['parent']:
209 209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 210 parent = p1
211 211 hg.clean(repo, node, show_stats=False)
212 212 revert_opts = opts.copy()
213 213 revert_opts['date'] = None
214 214 revert_opts['all'] = True
215 215 revert_opts['rev'] = hex(parent)
216 216 revert(ui, repo, **revert_opts)
217 217 commit_opts = opts.copy()
218 218 commit_opts['addremove'] = False
219 219 if not commit_opts['message'] and not commit_opts['logfile']:
220 220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 221 commit_opts['force_editor'] = True
222 222 commit(ui, repo, **commit_opts)
223 223 def nice(node):
224 224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 225 ui.status(_('changeset %s backs out changeset %s\n') %
226 226 (nice(repo.changelog.tip()), nice(node)))
227 227 if op1 != node:
228 228 if opts['merge']:
229 229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 230 hg.merge(repo, hex(op1))
231 231 else:
232 232 ui.status(_('the backout changeset is a new head - '
233 233 'do not forget to merge\n'))
234 234 ui.status(_('(use "backout --merge" '
235 235 'if you want to auto-merge)\n'))
236 236
237 237 def branch(ui, repo, label=None, **opts):
238 238 """set or show the current branch name
239 239
240 240 With no argument, show the current branch name. With one argument,
241 241 set the working directory branch name (the branch does not exist in
242 242 the repository until the next commit).
243 243
244 244 Unless --force is specified, branch will not let you set a
245 245 branch name that shadows an existing branch.
246 246 """
247 247
248 248 if label:
249 249 if not opts.get('force') and label in repo.branchtags():
250 250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 251 raise util.Abort(_('a branch of the same name already exists'
252 252 ' (use --force to override)'))
253 253 repo.dirstate.setbranch(util.fromlocal(label))
254 254 ui.status(_('marked working directory as branch %s\n') % label)
255 255 else:
256 256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257 257
258 258 def branches(ui, repo, active=False):
259 259 """list repository named branches
260 260
261 261 List the repository's named branches, indicating which ones are
262 262 inactive. If active is specified, only show active branches.
263 263
264 264 A branch is considered active if it contains unmerged heads.
265 265 """
266 266 b = repo.branchtags()
267 267 heads = dict.fromkeys(repo.heads(), 1)
268 268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 269 l.sort()
270 270 l.reverse()
271 271 for ishead, r, n, t in l:
272 272 if active and not ishead:
273 273 # If we're only displaying active branches, abort the loop on
274 274 # encountering the first inactive head
275 275 break
276 276 else:
277 277 hexfunc = ui.debugflag and hex or short
278 278 if ui.quiet:
279 279 ui.write("%s\n" % t)
280 280 else:
281 281 spaces = " " * (30 - util.locallen(t))
282 282 # The code only gets here if inactive branches are being
283 283 # displayed or the branch is active.
284 284 isinactive = ((not ishead) and " (inactive)") or ''
285 285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286 286
287 287 def bundle(ui, repo, fname, dest=None, **opts):
288 288 """create a changegroup file
289 289
290 290 Generate a compressed changegroup file collecting changesets not
291 291 found in the other repository.
292 292
293 293 If no destination repository is specified the destination is assumed
294 294 to have all the nodes specified by one or more --base parameters.
295 295
296 296 The bundle file can then be transferred using conventional means and
297 297 applied to another repository with the unbundle or pull command.
298 298 This is useful when direct push and pull are not available or when
299 299 exporting an entire repository is undesirable.
300 300
301 301 Applying bundles preserves all changeset contents including
302 302 permissions, copy/rename information, and revision history.
303 303 """
304 304 revs = opts.get('rev') or None
305 305 if revs:
306 306 revs = [repo.lookup(rev) for rev in revs]
307 307 base = opts.get('base')
308 308 if base:
309 309 if dest:
310 310 raise util.Abort(_("--base is incompatible with specifiying "
311 311 "a destination"))
312 312 base = [repo.lookup(rev) for rev in base]
313 313 # create the right base
314 314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 315 o = []
316 316 has = {nullid: None}
317 317 for n in base:
318 318 has.update(repo.changelog.reachable(n))
319 319 if revs:
320 320 visit = list(revs)
321 321 else:
322 322 visit = repo.changelog.heads()
323 323 seen = {}
324 324 while visit:
325 325 n = visit.pop(0)
326 326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 327 if len(parents) == 0:
328 328 o.insert(0, n)
329 329 else:
330 330 for p in parents:
331 331 if p not in seen:
332 332 seen[p] = 1
333 333 visit.append(p)
334 334 else:
335 335 cmdutil.setremoteconfig(ui, opts)
336 336 dest, revs = cmdutil.parseurl(
337 337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 338 other = hg.repository(ui, dest)
339 339 o = repo.findoutgoing(other, force=opts['force'])
340 340
341 341 if revs:
342 342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 343 else:
344 344 cg = repo.changegroup(o, 'bundle')
345 345 changegroup.writebundle(cg, fname, "HG10BZ")
346 346
347 347 def cat(ui, repo, file1, *pats, **opts):
348 348 """output the current or given revision of files
349 349
350 350 Print the specified files as they were at the given revision.
351 351 If no revision is given, the parent of the working directory is used,
352 352 or tip if no revision is checked out.
353 353
354 354 Output may be to a file, in which case the name of the file is
355 355 given using a format string. The formatting rules are the same as
356 356 for the export command, with the following additions:
357 357
358 358 %s basename of file being printed
359 359 %d dirname of file being printed, or '.' if in repo root
360 360 %p root-relative path name of file being printed
361 361 """
362 362 ctx = repo.changectx(opts['rev'])
363 363 err = 1
364 364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 365 ctx.node()):
366 366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 367 fp.write(ctx.filectx(abs).data())
368 368 err = 0
369 369 return err
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 cmdutil.setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = cmdutil.logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 try:
444 444 mode = os.lstat(rf)[stat.ST_MODE]
445 445 except OSError:
446 446 raise util.Abort(_("file %s not found!") % rf)
447 447 if stat.S_ISDIR(mode):
448 448 name = f + '/'
449 449 if slist is None:
450 450 slist = list(files)
451 451 slist.sort()
452 452 i = bisect.bisect(slist, name)
453 453 if i >= len(slist) or not slist[i].startswith(name):
454 454 raise util.Abort(_("no match under directory %s!")
455 455 % rf)
456 456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 457 raise util.Abort(_("can't commit %s: "
458 458 "unsupported file type!") % rf)
459 elif repo.dirstate.state(f) == '?':
459 elif f not in repo.dirstate:
460 460 raise util.Abort(_("file %s not tracked!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 469 def docopy(ui, repo, pats, opts, wlock):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'r': _('has been marked for remove')}
485 state = repo.dirstate.state(abs)
485 state = repo.dirstate[abs]
486 486 reason = reasons.get(state)
487 487 if reason:
488 488 if exact:
489 489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 490 else:
491 491 if state == 'a':
492 492 origsrc = repo.dirstate.copied(abs)
493 493 if origsrc is not None:
494 494 return origsrc
495 495 return abs
496 496
497 497 # origsrc: hgsep
498 498 # abssrc: hgsep
499 499 # relsrc: ossep
500 500 # otarget: ossep
501 501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 503 reltarget = repo.pathto(abstarget, cwd)
504 504 prevsrc = targets.get(abstarget)
505 505 src = repo.wjoin(abssrc)
506 506 target = repo.wjoin(abstarget)
507 507 if prevsrc is not None:
508 508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 509 (reltarget, repo.pathto(abssrc, cwd),
510 510 repo.pathto(prevsrc, cwd)))
511 511 return
512 512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 514 if not opts['force']:
515 515 ui.warn(_('%s: not overwriting - file exists\n') %
516 516 reltarget)
517 517 return
518 518 if not opts['after'] and not opts.get('dry_run'):
519 519 os.unlink(target)
520 520 if opts['after']:
521 521 if not os.path.exists(target):
522 522 return
523 523 else:
524 524 targetdir = os.path.dirname(target) or '.'
525 525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 526 os.makedirs(targetdir)
527 527 try:
528 restore = repo.dirstate.state(abstarget) == 'r'
528 restore = repo.dirstate[abstarget] == 'r'
529 529 if restore and not opts.get('dry_run'):
530 530 repo.undelete([abstarget], wlock)
531 531 try:
532 532 if not opts.get('dry_run'):
533 533 util.copyfile(src, target)
534 534 restore = False
535 535 finally:
536 536 if restore:
537 537 repo.remove([abstarget], wlock=wlock)
538 538 except IOError, inst:
539 539 if inst.errno == errno.ENOENT:
540 540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 541 else:
542 542 ui.warn(_('%s: cannot copy - %s\n') %
543 543 (relsrc, inst.strerror))
544 544 errors += 1
545 545 return
546 546 if ui.verbose or not exact:
547 547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 548 targets[abstarget] = abssrc
549 549 if abstarget != origsrc:
550 if repo.dirstate.state(origsrc) == 'a':
550 if repo.dirstate[origsrc] == 'a':
551 551 if not ui.quiet:
552 552 ui.warn(_("%s has not been committed yet, so no copy "
553 553 "data will be stored for %s.\n")
554 554 % (repo.pathto(origsrc, cwd), reltarget))
555 555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 556 repo.add([abstarget], wlock)
557 557 elif not opts.get('dry_run'):
558 558 repo.copy(origsrc, abstarget, wlock)
559 559 copied.append((abssrc, relsrc, exact))
560 560
561 561 # pat: ossep
562 562 # dest ossep
563 563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 564 # return: function that takes hgsep and returns ossep
565 565 def targetpathfn(pat, dest, srcs):
566 566 if os.path.isdir(pat):
567 567 abspfx = util.canonpath(repo.root, cwd, pat)
568 568 abspfx = util.localpath(abspfx)
569 569 if destdirexists:
570 570 striplen = len(os.path.split(abspfx)[0])
571 571 else:
572 572 striplen = len(abspfx)
573 573 if striplen:
574 574 striplen += len(os.sep)
575 575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 576 elif destdirexists:
577 577 res = lambda p: os.path.join(dest,
578 578 os.path.basename(util.localpath(p)))
579 579 else:
580 580 res = lambda p: dest
581 581 return res
582 582
583 583 # pat: ossep
584 584 # dest ossep
585 585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 586 # return: function that takes hgsep and returns ossep
587 587 def targetpathafterfn(pat, dest, srcs):
588 588 if util.patkind(pat, None)[0]:
589 589 # a mercurial pattern
590 590 res = lambda p: os.path.join(dest,
591 591 os.path.basename(util.localpath(p)))
592 592 else:
593 593 abspfx = util.canonpath(repo.root, cwd, pat)
594 594 if len(abspfx) < len(srcs[0][0]):
595 595 # A directory. Either the target path contains the last
596 596 # component of the source path or it does not.
597 597 def evalpath(striplen):
598 598 score = 0
599 599 for s in srcs:
600 600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 601 if os.path.exists(t):
602 602 score += 1
603 603 return score
604 604
605 605 abspfx = util.localpath(abspfx)
606 606 striplen = len(abspfx)
607 607 if striplen:
608 608 striplen += len(os.sep)
609 609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 610 score = evalpath(striplen)
611 611 striplen1 = len(os.path.split(abspfx)[0])
612 612 if striplen1:
613 613 striplen1 += len(os.sep)
614 614 if evalpath(striplen1) > score:
615 615 striplen = striplen1
616 616 res = lambda p: os.path.join(dest,
617 617 util.localpath(p)[striplen:])
618 618 else:
619 619 # a file
620 620 if destdirexists:
621 621 res = lambda p: os.path.join(dest,
622 622 os.path.basename(util.localpath(p)))
623 623 else:
624 624 res = lambda p: dest
625 625 return res
626 626
627 627
628 628 pats = util.expand_glob(pats)
629 629 if not pats:
630 630 raise util.Abort(_('no source or destination specified'))
631 631 if len(pats) == 1:
632 632 raise util.Abort(_('no destination specified'))
633 633 dest = pats.pop()
634 634 destdirexists = os.path.isdir(dest)
635 635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 636 raise util.Abort(_('with multiple sources, destination must be an '
637 637 'existing directory'))
638 638 if opts['after']:
639 639 tfn = targetpathafterfn
640 640 else:
641 641 tfn = targetpathfn
642 642 copylist = []
643 643 for pat in pats:
644 644 srcs = []
645 645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 646 globbed=True):
647 647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 648 if origsrc:
649 649 srcs.append((origsrc, abssrc, relsrc, exact))
650 650 if not srcs:
651 651 continue
652 652 copylist.append((tfn(pat, dest, srcs), srcs))
653 653 if not copylist:
654 654 raise util.Abort(_('no files to copy'))
655 655
656 656 for targetpath, srcs in copylist:
657 657 for origsrc, abssrc, relsrc, exact in srcs:
658 658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659 659
660 660 if errors:
661 661 ui.warn(_('(consider using --after)\n'))
662 662 return errors, copied
663 663
664 664 def copy(ui, repo, *pats, **opts):
665 665 """mark files as copied for the next commit
666 666
667 667 Mark dest as having copies of source files. If dest is a
668 668 directory, copies are put in that directory. If dest is a file,
669 669 there can only be one source.
670 670
671 671 By default, this command copies the contents of files as they
672 672 stand in the working directory. If invoked with --after, the
673 673 operation is recorded, but no copying is performed.
674 674
675 675 This command takes effect in the next commit. To undo a copy
676 676 before that, see hg revert.
677 677 """
678 678 wlock = repo.wlock(0)
679 679 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 680 return errs
681 681
682 682 def debugancestor(ui, index, rev1, rev2):
683 683 """find the ancestor revision of two revisions in a given index"""
684 684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687 687
688 688 def debugcomplete(ui, cmd='', **opts):
689 689 """returns the completion list associated with the given command"""
690 690
691 691 if opts['options']:
692 692 options = []
693 693 otables = [globalopts]
694 694 if cmd:
695 695 aliases, entry = cmdutil.findcmd(ui, cmd)
696 696 otables.append(entry[1])
697 697 for t in otables:
698 698 for o in t:
699 699 if o[0]:
700 700 options.append('-%s' % o[0])
701 701 options.append('--%s' % o[1])
702 702 ui.write("%s\n" % "\n".join(options))
703 703 return
704 704
705 705 clist = cmdutil.findpossible(ui, cmd).keys()
706 706 clist.sort()
707 707 ui.write("%s\n" % "\n".join(clist))
708 708
709 709 def debugrebuildstate(ui, repo, rev=""):
710 710 """rebuild the dirstate as it would look like for the given revision"""
711 711 if rev == "":
712 712 rev = repo.changelog.tip()
713 713 ctx = repo.changectx(rev)
714 714 files = ctx.manifest()
715 715 wlock = repo.wlock()
716 716 repo.dirstate.rebuild(rev, files)
717 717
718 718 def debugcheckstate(ui, repo):
719 719 """validate the correctness of the current dirstate"""
720 720 parent1, parent2 = repo.dirstate.parents()
721 dc = repo.dirstate
722 721 m1 = repo.changectx(parent1).manifest()
723 722 m2 = repo.changectx(parent2).manifest()
724 723 errors = 0
725 for f in dc:
726 state = repo.dirstate.state(f)
724 for f in repo.dirstate:
725 state = repo.dirstate[f]
727 726 if state in "nr" and f not in m1:
728 727 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
729 728 errors += 1
730 729 if state in "a" and f in m1:
731 730 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
732 731 errors += 1
733 732 if state in "m" and f not in m1 and f not in m2:
734 733 ui.warn(_("%s in state %s, but not in either manifest\n") %
735 734 (f, state))
736 735 errors += 1
737 736 for f in m1:
738 state = repo.dirstate.state(f)
737 state = repo.dirstate[f]
739 738 if state not in "nrm":
740 739 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
741 740 errors += 1
742 741 if errors:
743 742 error = _(".hg/dirstate inconsistent with current parent's manifest")
744 743 raise util.Abort(error)
745 744
746 745 def showconfig(ui, repo, *values, **opts):
747 746 """show combined config settings from all hgrc files
748 747
749 748 With no args, print names and values of all config items.
750 749
751 750 With one arg of the form section.name, print just the value of
752 751 that config item.
753 752
754 753 With multiple args, print names and values of all config items
755 754 with matching section names."""
756 755
757 756 untrusted = bool(opts.get('untrusted'))
758 757 if values:
759 758 if len([v for v in values if '.' in v]) > 1:
760 759 raise util.Abort(_('only one config item permitted'))
761 760 for section, name, value in ui.walkconfig(untrusted=untrusted):
762 761 sectname = section + '.' + name
763 762 if values:
764 763 for v in values:
765 764 if v == section:
766 765 ui.write('%s=%s\n' % (sectname, value))
767 766 elif v == sectname:
768 767 ui.write(value, '\n')
769 768 else:
770 769 ui.write('%s=%s\n' % (sectname, value))
771 770
772 771 def debugsetparents(ui, repo, rev1, rev2=None):
773 772 """manually set the parents of the current working directory
774 773
775 774 This is useful for writing repository conversion tools, but should
776 775 be used with care.
777 776 """
778 777
779 778 if not rev2:
780 779 rev2 = hex(nullid)
781 780
782 781 wlock = repo.wlock()
783 782 try:
784 783 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
785 784 finally:
786 785 wlock.release()
787 786
788 787 def debugstate(ui, repo):
789 788 """show the contents of the current dirstate"""
790 dc = repo.dirstate
791 for file_ in dc:
789 dc = repo.dirstate._map
790 k = dc.keys()
791 k.sort()
792 for file_ in k:
792 793 if dc[file_][3] == -1:
793 794 # Pad or slice to locale representation
794 795 locale_len = len(time.strftime("%x %X", time.localtime(0)))
795 796 timestr = 'unset'
796 797 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
797 798 else:
798 799 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
799 800 ui.write("%c %3o %10d %s %s\n"
800 801 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
801 802 timestr, file_))
802 803 for f in repo.dirstate.copies():
803 804 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
804 805
805 806 def debugdata(ui, file_, rev):
806 807 """dump the contents of a data file revision"""
807 808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
808 809 try:
809 810 ui.write(r.revision(r.lookup(rev)))
810 811 except KeyError:
811 812 raise util.Abort(_('invalid revision identifier %s') % rev)
812 813
813 814 def debugdate(ui, date, range=None, **opts):
814 815 """parse and display a date"""
815 816 if opts["extended"]:
816 817 d = util.parsedate(date, util.extendeddateformats)
817 818 else:
818 819 d = util.parsedate(date)
819 820 ui.write("internal: %s %s\n" % d)
820 821 ui.write("standard: %s\n" % util.datestr(d))
821 822 if range:
822 823 m = util.matchdate(range)
823 824 ui.write("match: %s\n" % m(d[0]))
824 825
825 826 def debugindex(ui, file_):
826 827 """dump the contents of an index file"""
827 828 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
828 829 ui.write(" rev offset length base linkrev" +
829 830 " nodeid p1 p2\n")
830 831 for i in xrange(r.count()):
831 832 node = r.node(i)
832 833 pp = r.parents(node)
833 834 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
834 835 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
835 836 short(node), short(pp[0]), short(pp[1])))
836 837
837 838 def debugindexdot(ui, file_):
838 839 """dump an index DAG as a .dot file"""
839 840 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
840 841 ui.write("digraph G {\n")
841 842 for i in xrange(r.count()):
842 843 node = r.node(i)
843 844 pp = r.parents(node)
844 845 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
845 846 if pp[1] != nullid:
846 847 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
847 848 ui.write("}\n")
848 849
849 850 def debuginstall(ui):
850 851 '''test Mercurial installation'''
851 852
852 853 def writetemp(contents):
853 854 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
854 855 f = os.fdopen(fd, "wb")
855 856 f.write(contents)
856 857 f.close()
857 858 return name
858 859
859 860 problems = 0
860 861
861 862 # encoding
862 863 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
863 864 try:
864 865 util.fromlocal("test")
865 866 except util.Abort, inst:
866 867 ui.write(" %s\n" % inst)
867 868 ui.write(_(" (check that your locale is properly set)\n"))
868 869 problems += 1
869 870
870 871 # compiled modules
871 872 ui.status(_("Checking extensions...\n"))
872 873 try:
873 874 import bdiff, mpatch, base85
874 875 except Exception, inst:
875 876 ui.write(" %s\n" % inst)
876 877 ui.write(_(" One or more extensions could not be found"))
877 878 ui.write(_(" (check that you compiled the extensions)\n"))
878 879 problems += 1
879 880
880 881 # templates
881 882 ui.status(_("Checking templates...\n"))
882 883 try:
883 884 import templater
884 885 t = templater.templater(templater.templatepath("map-cmdline.default"))
885 886 except Exception, inst:
886 887 ui.write(" %s\n" % inst)
887 888 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
888 889 problems += 1
889 890
890 891 # patch
891 892 ui.status(_("Checking patch...\n"))
892 893 patcher = ui.config('ui', 'patch')
893 894 patcher = ((patcher and util.find_exe(patcher)) or
894 895 util.find_exe('gpatch') or
895 896 util.find_exe('patch'))
896 897 if not patcher:
897 898 ui.write(_(" Can't find patch or gpatch in PATH\n"))
898 899 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
899 900 problems += 1
900 901 else:
901 902 # actually attempt a patch here
902 903 a = "1\n2\n3\n4\n"
903 904 b = "1\n2\n3\ninsert\n4\n"
904 905 fa = writetemp(a)
905 906 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
906 907 fd = writetemp(d)
907 908
908 909 files = {}
909 910 try:
910 911 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 912 except util.Abort, e:
912 913 ui.write(_(" patch call failed:\n"))
913 914 ui.write(" " + str(e) + "\n")
914 915 problems += 1
915 916 else:
916 917 if list(files) != [os.path.basename(fa)]:
917 918 ui.write(_(" unexpected patch output!"))
918 919 ui.write(_(" (you may have an incompatible version of patch)\n"))
919 920 problems += 1
920 921 a = file(fa).read()
921 922 if a != b:
922 923 ui.write(_(" patch test failed!"))
923 924 ui.write(_(" (you may have an incompatible version of patch)\n"))
924 925 problems += 1
925 926
926 927 os.unlink(fa)
927 928 os.unlink(fd)
928 929
929 930 # merge helper
930 931 ui.status(_("Checking merge helper...\n"))
931 932 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
932 933 or "hgmerge")
933 934 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
934 935 if not cmdpath:
935 936 if cmd == 'hgmerge':
936 937 ui.write(_(" No merge helper set and can't find default"
937 938 " hgmerge script in PATH\n"))
938 939 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 940 else:
940 941 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
941 942 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
942 943 problems += 1
943 944 else:
944 945 # actually attempt a patch here
945 946 fa = writetemp("1\n2\n3\n4\n")
946 947 fl = writetemp("1\n2\n3\ninsert\n4\n")
947 948 fr = writetemp("begin\n1\n2\n3\n4\n")
948 949 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
949 950 if r:
950 951 ui.write(_(" Got unexpected merge error %d!\n") % r)
951 952 problems += 1
952 953 m = file(fl).read()
953 954 if m != "begin\n1\n2\n3\ninsert\n4\n":
954 955 ui.write(_(" Got unexpected merge results!\n"))
955 956 ui.write(_(" (your merge helper may have the"
956 957 " wrong argument order)\n"))
957 958 ui.write(_(" Result: %r\n") % m)
958 959 problems += 1
959 960 os.unlink(fa)
960 961 os.unlink(fl)
961 962 os.unlink(fr)
962 963
963 964 # editor
964 965 ui.status(_("Checking commit editor...\n"))
965 966 editor = (os.environ.get("HGEDITOR") or
966 967 ui.config("ui", "editor") or
967 968 os.environ.get("EDITOR", "vi"))
968 969 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
969 970 if not cmdpath:
970 971 if editor == 'vi':
971 972 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
972 973 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
973 974 else:
974 975 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
975 976 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
976 977 problems += 1
977 978
978 979 # check username
979 980 ui.status(_("Checking username...\n"))
980 981 user = os.environ.get("HGUSER")
981 982 if user is None:
982 983 user = ui.config("ui", "username")
983 984 if user is None:
984 985 user = os.environ.get("EMAIL")
985 986 if not user:
986 987 ui.warn(" ")
987 988 ui.username()
988 989 ui.write(_(" (specify a username in your .hgrc file)\n"))
989 990
990 991 if not problems:
991 992 ui.status(_("No problems detected\n"))
992 993 else:
993 994 ui.write(_("%s problems detected,"
994 995 " please check your install!\n") % problems)
995 996
996 997 return problems
997 998
998 999 def debugrename(ui, repo, file1, *pats, **opts):
999 1000 """dump rename information"""
1000 1001
1001 1002 ctx = repo.changectx(opts.get('rev', 'tip'))
1002 1003 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1003 1004 ctx.node()):
1004 1005 m = ctx.filectx(abs).renamed()
1005 1006 if m:
1006 1007 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1007 1008 else:
1008 1009 ui.write(_("%s not renamed\n") % rel)
1009 1010
1010 1011 def debugwalk(ui, repo, *pats, **opts):
1011 1012 """show how files match on given patterns"""
1012 1013 items = list(cmdutil.walk(repo, pats, opts))
1013 1014 if not items:
1014 1015 return
1015 1016 fmt = '%%s %%-%ds %%-%ds %%s' % (
1016 1017 max([len(abs) for (src, abs, rel, exact) in items]),
1017 1018 max([len(rel) for (src, abs, rel, exact) in items]))
1018 1019 for src, abs, rel, exact in items:
1019 1020 line = fmt % (src, abs, rel, exact and 'exact' or '')
1020 1021 ui.write("%s\n" % line.rstrip())
1021 1022
1022 1023 def diff(ui, repo, *pats, **opts):
1023 1024 """diff repository (or selected files)
1024 1025
1025 1026 Show differences between revisions for the specified files.
1026 1027
1027 1028 Differences between files are shown using the unified diff format.
1028 1029
1029 1030 NOTE: diff may generate unexpected results for merges, as it will
1030 1031 default to comparing against the working directory's first parent
1031 1032 changeset if no revisions are specified.
1032 1033
1033 1034 When two revision arguments are given, then changes are shown
1034 1035 between those revisions. If only one revision is specified then
1035 1036 that revision is compared to the working directory, and, when no
1036 1037 revisions are specified, the working directory files are compared
1037 1038 to its parent.
1038 1039
1039 1040 Without the -a option, diff will avoid generating diffs of files
1040 1041 it detects as binary. With -a, diff will generate a diff anyway,
1041 1042 probably with undesirable results.
1042 1043 """
1043 1044 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1044 1045
1045 1046 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1046 1047
1047 1048 patch.diff(repo, node1, node2, fns, match=matchfn,
1048 1049 opts=patch.diffopts(ui, opts))
1049 1050
1050 1051 def export(ui, repo, *changesets, **opts):
1051 1052 """dump the header and diffs for one or more changesets
1052 1053
1053 1054 Print the changeset header and diffs for one or more revisions.
1054 1055
1055 1056 The information shown in the changeset header is: author,
1056 1057 changeset hash, parent(s) and commit comment.
1057 1058
1058 1059 NOTE: export may generate unexpected diff output for merge changesets,
1059 1060 as it will compare the merge changeset against its first parent only.
1060 1061
1061 1062 Output may be to a file, in which case the name of the file is
1062 1063 given using a format string. The formatting rules are as follows:
1063 1064
1064 1065 %% literal "%" character
1065 1066 %H changeset hash (40 bytes of hexadecimal)
1066 1067 %N number of patches being generated
1067 1068 %R changeset revision number
1068 1069 %b basename of the exporting repository
1069 1070 %h short-form changeset hash (12 bytes of hexadecimal)
1070 1071 %n zero-padded sequence number, starting at 1
1071 1072 %r zero-padded changeset revision number
1072 1073
1073 1074 Without the -a option, export will avoid generating diffs of files
1074 1075 it detects as binary. With -a, export will generate a diff anyway,
1075 1076 probably with undesirable results.
1076 1077
1077 1078 With the --switch-parent option, the diff will be against the second
1078 1079 parent. It can be useful to review a merge.
1079 1080 """
1080 1081 if not changesets:
1081 1082 raise util.Abort(_("export requires at least one changeset"))
1082 1083 revs = cmdutil.revrange(repo, changesets)
1083 1084 if len(revs) > 1:
1084 1085 ui.note(_('exporting patches:\n'))
1085 1086 else:
1086 1087 ui.note(_('exporting patch:\n'))
1087 1088 patch.export(repo, revs, template=opts['output'],
1088 1089 switch_parent=opts['switch_parent'],
1089 1090 opts=patch.diffopts(ui, opts))
1090 1091
1091 1092 def grep(ui, repo, pattern, *pats, **opts):
1092 1093 """search for a pattern in specified files and revisions
1093 1094
1094 1095 Search revisions of files for a regular expression.
1095 1096
1096 1097 This command behaves differently than Unix grep. It only accepts
1097 1098 Python/Perl regexps. It searches repository history, not the
1098 1099 working directory. It always prints the revision number in which
1099 1100 a match appears.
1100 1101
1101 1102 By default, grep only prints output for the first revision of a
1102 1103 file in which it finds a match. To get it to print every revision
1103 1104 that contains a change in match status ("-" for a match that
1104 1105 becomes a non-match, or "+" for a non-match that becomes a match),
1105 1106 use the --all flag.
1106 1107 """
1107 1108 reflags = 0
1108 1109 if opts['ignore_case']:
1109 1110 reflags |= re.I
1110 1111 try:
1111 1112 regexp = re.compile(pattern, reflags)
1112 1113 except Exception, inst:
1113 1114 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1114 1115 return None
1115 1116 sep, eol = ':', '\n'
1116 1117 if opts['print0']:
1117 1118 sep = eol = '\0'
1118 1119
1119 1120 fcache = {}
1120 1121 def getfile(fn):
1121 1122 if fn not in fcache:
1122 1123 fcache[fn] = repo.file(fn)
1123 1124 return fcache[fn]
1124 1125
1125 1126 def matchlines(body):
1126 1127 begin = 0
1127 1128 linenum = 0
1128 1129 while True:
1129 1130 match = regexp.search(body, begin)
1130 1131 if not match:
1131 1132 break
1132 1133 mstart, mend = match.span()
1133 1134 linenum += body.count('\n', begin, mstart) + 1
1134 1135 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1135 1136 lend = body.find('\n', mend)
1136 1137 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137 1138 begin = lend + 1
1138 1139
1139 1140 class linestate(object):
1140 1141 def __init__(self, line, linenum, colstart, colend):
1141 1142 self.line = line
1142 1143 self.linenum = linenum
1143 1144 self.colstart = colstart
1144 1145 self.colend = colend
1145 1146
1146 1147 def __eq__(self, other):
1147 1148 return self.line == other.line
1148 1149
1149 1150 matches = {}
1150 1151 copies = {}
1151 1152 def grepbody(fn, rev, body):
1152 1153 matches[rev].setdefault(fn, [])
1153 1154 m = matches[rev][fn]
1154 1155 for lnum, cstart, cend, line in matchlines(body):
1155 1156 s = linestate(line, lnum, cstart, cend)
1156 1157 m.append(s)
1157 1158
1158 1159 def difflinestates(a, b):
1159 1160 sm = difflib.SequenceMatcher(None, a, b)
1160 1161 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1161 1162 if tag == 'insert':
1162 1163 for i in xrange(blo, bhi):
1163 1164 yield ('+', b[i])
1164 1165 elif tag == 'delete':
1165 1166 for i in xrange(alo, ahi):
1166 1167 yield ('-', a[i])
1167 1168 elif tag == 'replace':
1168 1169 for i in xrange(alo, ahi):
1169 1170 yield ('-', a[i])
1170 1171 for i in xrange(blo, bhi):
1171 1172 yield ('+', b[i])
1172 1173
1173 1174 prev = {}
1174 1175 def display(fn, rev, states, prevstates):
1175 1176 found = False
1176 1177 filerevmatches = {}
1177 1178 r = prev.get(fn, -1)
1178 1179 if opts['all']:
1179 1180 iter = difflinestates(states, prevstates)
1180 1181 else:
1181 1182 iter = [('', l) for l in prevstates]
1182 1183 for change, l in iter:
1183 1184 cols = [fn, str(r)]
1184 1185 if opts['line_number']:
1185 1186 cols.append(str(l.linenum))
1186 1187 if opts['all']:
1187 1188 cols.append(change)
1188 1189 if opts['user']:
1189 1190 cols.append(ui.shortuser(get(r)[1]))
1190 1191 if opts['files_with_matches']:
1191 1192 c = (fn, r)
1192 1193 if c in filerevmatches:
1193 1194 continue
1194 1195 filerevmatches[c] = 1
1195 1196 else:
1196 1197 cols.append(l.line)
1197 1198 ui.write(sep.join(cols), eol)
1198 1199 found = True
1199 1200 return found
1200 1201
1201 1202 fstate = {}
1202 1203 skip = {}
1203 1204 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1204 1205 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1205 1206 found = False
1206 1207 follow = opts.get('follow')
1207 1208 for st, rev, fns in changeiter:
1208 1209 if st == 'window':
1209 1210 matches.clear()
1210 1211 elif st == 'add':
1211 1212 mf = repo.changectx(rev).manifest()
1212 1213 matches[rev] = {}
1213 1214 for fn in fns:
1214 1215 if fn in skip:
1215 1216 continue
1216 1217 fstate.setdefault(fn, {})
1217 1218 try:
1218 1219 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1219 1220 if follow:
1220 1221 copied = getfile(fn).renamed(mf[fn])
1221 1222 if copied:
1222 1223 copies.setdefault(rev, {})[fn] = copied[0]
1223 1224 except KeyError:
1224 1225 pass
1225 1226 elif st == 'iter':
1226 1227 states = matches[rev].items()
1227 1228 states.sort()
1228 1229 for fn, m in states:
1229 1230 copy = copies.get(rev, {}).get(fn)
1230 1231 if fn in skip:
1231 1232 if copy:
1232 1233 skip[copy] = True
1233 1234 continue
1234 1235 if fn in prev or fstate[fn]:
1235 1236 r = display(fn, rev, m, fstate[fn])
1236 1237 found = found or r
1237 1238 if r and not opts['all']:
1238 1239 skip[fn] = True
1239 1240 if copy:
1240 1241 skip[copy] = True
1241 1242 fstate[fn] = m
1242 1243 if copy:
1243 1244 fstate[copy] = m
1244 1245 prev[fn] = rev
1245 1246
1246 1247 fstate = fstate.items()
1247 1248 fstate.sort()
1248 1249 for fn, state in fstate:
1249 1250 if fn in skip:
1250 1251 continue
1251 1252 if fn not in copies.get(prev[fn], {}):
1252 1253 found = display(fn, rev, {}, state) or found
1253 1254 return (not found and 1) or 0
1254 1255
1255 1256 def heads(ui, repo, *branchrevs, **opts):
1256 1257 """show current repository heads or show branch heads
1257 1258
1258 1259 With no arguments, show all repository head changesets.
1259 1260
1260 1261 If branch or revisions names are given this will show the heads of
1261 1262 the specified branches or the branches those revisions are tagged
1262 1263 with.
1263 1264
1264 1265 Repository "heads" are changesets that don't have child
1265 1266 changesets. They are where development generally takes place and
1266 1267 are the usual targets for update and merge operations.
1267 1268
1268 1269 Branch heads are changesets that have a given branch tag, but have
1269 1270 no child changesets with that tag. They are usually where
1270 1271 development on the given branch takes place.
1271 1272 """
1272 1273 if opts['rev']:
1273 1274 start = repo.lookup(opts['rev'])
1274 1275 else:
1275 1276 start = None
1276 1277 if not branchrevs:
1277 1278 # Assume we're looking repo-wide heads if no revs were specified.
1278 1279 heads = repo.heads(start)
1279 1280 else:
1280 1281 heads = []
1281 1282 visitedset = util.set()
1282 1283 for branchrev in branchrevs:
1283 1284 branch = repo.changectx(branchrev).branch()
1284 1285 if branch in visitedset:
1285 1286 continue
1286 1287 visitedset.add(branch)
1287 1288 bheads = repo.branchheads(branch, start)
1288 1289 if not bheads:
1289 1290 if branch != branchrev:
1290 1291 ui.warn(_("no changes on branch %s containing %s are "
1291 1292 "reachable from %s\n")
1292 1293 % (branch, branchrev, opts['rev']))
1293 1294 else:
1294 1295 ui.warn(_("no changes on branch %s are reachable from %s\n")
1295 1296 % (branch, opts['rev']))
1296 1297 heads.extend(bheads)
1297 1298 if not heads:
1298 1299 return 1
1299 1300 displayer = cmdutil.show_changeset(ui, repo, opts)
1300 1301 for n in heads:
1301 1302 displayer.show(changenode=n)
1302 1303
1303 1304 def help_(ui, name=None, with_version=False):
1304 1305 """show help for a command, extension, or list of commands
1305 1306
1306 1307 With no arguments, print a list of commands and short help.
1307 1308
1308 1309 Given a command name, print help for that command.
1309 1310
1310 1311 Given an extension name, print help for that extension, and the
1311 1312 commands it provides."""
1312 1313 option_lists = []
1313 1314
1314 1315 def addglobalopts(aliases):
1315 1316 if ui.verbose:
1316 1317 option_lists.append((_("global options:"), globalopts))
1317 1318 if name == 'shortlist':
1318 1319 option_lists.append((_('use "hg help" for the full list '
1319 1320 'of commands'), ()))
1320 1321 else:
1321 1322 if name == 'shortlist':
1322 1323 msg = _('use "hg help" for the full list of commands '
1323 1324 'or "hg -v" for details')
1324 1325 elif aliases:
1325 1326 msg = _('use "hg -v help%s" to show aliases and '
1326 1327 'global options') % (name and " " + name or "")
1327 1328 else:
1328 1329 msg = _('use "hg -v help %s" to show global options') % name
1329 1330 option_lists.append((msg, ()))
1330 1331
1331 1332 def helpcmd(name):
1332 1333 if with_version:
1333 1334 version_(ui)
1334 1335 ui.write('\n')
1335 1336 aliases, i = cmdutil.findcmd(ui, name)
1336 1337 # synopsis
1337 1338 ui.write("%s\n\n" % i[2])
1338 1339
1339 1340 # description
1340 1341 doc = i[0].__doc__
1341 1342 if not doc:
1342 1343 doc = _("(No help text available)")
1343 1344 if ui.quiet:
1344 1345 doc = doc.splitlines(0)[0]
1345 1346 ui.write("%s\n" % doc.rstrip())
1346 1347
1347 1348 if not ui.quiet:
1348 1349 # aliases
1349 1350 if len(aliases) > 1:
1350 1351 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351 1352
1352 1353 # options
1353 1354 if i[1]:
1354 1355 option_lists.append((_("options:\n"), i[1]))
1355 1356
1356 1357 addglobalopts(False)
1357 1358
1358 1359 def helplist(select=None):
1359 1360 h = {}
1360 1361 cmds = {}
1361 1362 for c, e in table.items():
1362 1363 f = c.split("|", 1)[0]
1363 1364 if select and not select(f):
1364 1365 continue
1365 1366 if name == "shortlist" and not f.startswith("^"):
1366 1367 continue
1367 1368 f = f.lstrip("^")
1368 1369 if not ui.debugflag and f.startswith("debug"):
1369 1370 continue
1370 1371 doc = e[0].__doc__
1371 1372 if not doc:
1372 1373 doc = _("(No help text available)")
1373 1374 h[f] = doc.splitlines(0)[0].rstrip()
1374 1375 cmds[f] = c.lstrip("^")
1375 1376
1376 1377 fns = h.keys()
1377 1378 fns.sort()
1378 1379 m = max(map(len, fns))
1379 1380 for f in fns:
1380 1381 if ui.verbose:
1381 1382 commands = cmds[f].replace("|",", ")
1382 1383 ui.write(" %s:\n %s\n"%(commands, h[f]))
1383 1384 else:
1384 1385 ui.write(' %-*s %s\n' % (m, f, h[f]))
1385 1386
1386 1387 if not ui.quiet:
1387 1388 addglobalopts(True)
1388 1389
1389 1390 def helptopic(name):
1390 1391 v = None
1391 1392 for i in help.helptable:
1392 1393 l = i.split('|')
1393 1394 if name in l:
1394 1395 v = i
1395 1396 header = l[-1]
1396 1397 if not v:
1397 1398 raise cmdutil.UnknownCommand(name)
1398 1399
1399 1400 # description
1400 1401 doc = help.helptable[v]
1401 1402 if not doc:
1402 1403 doc = _("(No help text available)")
1403 1404 if callable(doc):
1404 1405 doc = doc()
1405 1406
1406 1407 ui.write("%s\n" % header)
1407 1408 ui.write("%s\n" % doc.rstrip())
1408 1409
1409 1410 def helpext(name):
1410 1411 try:
1411 1412 mod = extensions.find(name)
1412 1413 except KeyError:
1413 1414 raise cmdutil.UnknownCommand(name)
1414 1415
1415 1416 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1416 1417 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1417 1418 for d in doc[1:]:
1418 1419 ui.write(d, '\n')
1419 1420
1420 1421 ui.status('\n')
1421 1422
1422 1423 try:
1423 1424 ct = mod.cmdtable
1424 1425 except AttributeError:
1425 1426 ct = None
1426 1427 if not ct:
1427 1428 ui.status(_('no commands defined\n'))
1428 1429 return
1429 1430
1430 1431 ui.status(_('list of commands:\n\n'))
1431 1432 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1432 1433 helplist(modcmds.has_key)
1433 1434
1434 1435 if name and name != 'shortlist':
1435 1436 i = None
1436 1437 for f in (helpcmd, helptopic, helpext):
1437 1438 try:
1438 1439 f(name)
1439 1440 i = None
1440 1441 break
1441 1442 except cmdutil.UnknownCommand, inst:
1442 1443 i = inst
1443 1444 if i:
1444 1445 raise i
1445 1446
1446 1447 else:
1447 1448 # program name
1448 1449 if ui.verbose or with_version:
1449 1450 version_(ui)
1450 1451 else:
1451 1452 ui.status(_("Mercurial Distributed SCM\n"))
1452 1453 ui.status('\n')
1453 1454
1454 1455 # list of commands
1455 1456 if name == "shortlist":
1456 1457 ui.status(_('basic commands:\n\n'))
1457 1458 else:
1458 1459 ui.status(_('list of commands:\n\n'))
1459 1460
1460 1461 helplist()
1461 1462
1462 1463 # list all option lists
1463 1464 opt_output = []
1464 1465 for title, options in option_lists:
1465 1466 opt_output.append(("\n%s" % title, None))
1466 1467 for shortopt, longopt, default, desc in options:
1467 1468 if "DEPRECATED" in desc and not ui.verbose: continue
1468 1469 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1469 1470 longopt and " --%s" % longopt),
1470 1471 "%s%s" % (desc,
1471 1472 default
1472 1473 and _(" (default: %s)") % default
1473 1474 or "")))
1474 1475
1475 1476 if opt_output:
1476 1477 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 1478 for first, second in opt_output:
1478 1479 if second:
1479 1480 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 1481 else:
1481 1482 ui.write("%s\n" % first)
1482 1483
1483 1484 def identify(ui, repo, source=None,
1484 1485 rev=None, num=None, id=None, branch=None, tags=None):
1485 1486 """identify the working copy or specified revision
1486 1487
1487 1488 With no revision, print a summary of the current state of the repo.
1488 1489
1489 1490 With a path, do a lookup in another repository.
1490 1491
1491 1492 This summary identifies the repository state using one or two parent
1492 1493 hash identifiers, followed by a "+" if there are uncommitted changes
1493 1494 in the working directory, a list of tags for this revision and a branch
1494 1495 name for non-default branches.
1495 1496 """
1496 1497
1497 1498 hexfunc = ui.debugflag and hex or short
1498 1499 default = not (num or id or branch or tags)
1499 1500 output = []
1500 1501
1501 1502 if source:
1502 1503 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1503 1504 srepo = hg.repository(ui, source)
1504 1505 if not rev and revs:
1505 1506 rev = revs[0]
1506 1507 if not rev:
1507 1508 rev = "tip"
1508 1509 if num or branch or tags:
1509 1510 raise util.Abort(
1510 1511 "can't query remote revision number, branch, or tags")
1511 1512 output = [hexfunc(srepo.lookup(rev))]
1512 1513 elif not rev:
1513 1514 ctx = repo.workingctx()
1514 1515 parents = ctx.parents()
1515 1516 changed = False
1516 1517 if default or id or num:
1517 1518 changed = ctx.files() + ctx.deleted()
1518 1519 if default or id:
1519 1520 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1520 1521 (changed) and "+" or "")]
1521 1522 if num:
1522 1523 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1523 1524 (changed) and "+" or ""))
1524 1525 else:
1525 1526 ctx = repo.changectx(rev)
1526 1527 if default or id:
1527 1528 output = [hexfunc(ctx.node())]
1528 1529 if num:
1529 1530 output.append(str(ctx.rev()))
1530 1531
1531 1532 if not source and default and not ui.quiet:
1532 1533 b = util.tolocal(ctx.branch())
1533 1534 if b != 'default':
1534 1535 output.append("(%s)" % b)
1535 1536
1536 1537 # multiple tags for a single parent separated by '/'
1537 1538 t = "/".join(ctx.tags())
1538 1539 if t:
1539 1540 output.append(t)
1540 1541
1541 1542 if branch:
1542 1543 output.append(util.tolocal(ctx.branch()))
1543 1544
1544 1545 if tags:
1545 1546 output.extend(ctx.tags())
1546 1547
1547 1548 ui.write("%s\n" % ' '.join(output))
1548 1549
1549 1550 def import_(ui, repo, patch1, *patches, **opts):
1550 1551 """import an ordered set of patches
1551 1552
1552 1553 Import a list of patches and commit them individually.
1553 1554
1554 1555 If there are outstanding changes in the working directory, import
1555 1556 will abort unless given the -f flag.
1556 1557
1557 1558 You can import a patch straight from a mail message. Even patches
1558 1559 as attachments work (body part must be type text/plain or
1559 1560 text/x-patch to be used). From and Subject headers of email
1560 1561 message are used as default committer and commit message. All
1561 1562 text/plain body parts before first diff are added to commit
1562 1563 message.
1563 1564
1564 1565 If the imported patch was generated by hg export, user and description
1565 1566 from patch override values from message headers and body. Values
1566 1567 given on command line with -m and -u override these.
1567 1568
1568 1569 If --exact is specified, import will set the working directory
1569 1570 to the parent of each patch before applying it, and will abort
1570 1571 if the resulting changeset has a different ID than the one
1571 1572 recorded in the patch. This may happen due to character set
1572 1573 problems or other deficiencies in the text patch format.
1573 1574
1574 1575 To read a patch from standard input, use patch name "-".
1575 1576 """
1576 1577 patches = (patch1,) + patches
1577 1578
1578 1579 if opts.get('exact') or not opts['force']:
1579 1580 cmdutil.bail_if_changed(repo)
1580 1581
1581 1582 d = opts["base"]
1582 1583 strip = opts["strip"]
1583 1584
1584 1585 wlock = repo.wlock()
1585 1586 lock = repo.lock()
1586 1587
1587 1588 for p in patches:
1588 1589 pf = os.path.join(d, p)
1589 1590
1590 1591 if pf == '-':
1591 1592 ui.status(_("applying patch from stdin\n"))
1592 1593 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1593 1594 else:
1594 1595 ui.status(_("applying %s\n") % p)
1595 1596 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1596 1597
1597 1598 if tmpname is None:
1598 1599 raise util.Abort(_('no diffs found'))
1599 1600
1600 1601 try:
1601 1602 cmdline_message = cmdutil.logmessage(opts)
1602 1603 if cmdline_message:
1603 1604 # pickup the cmdline msg
1604 1605 message = cmdline_message
1605 1606 elif message:
1606 1607 # pickup the patch msg
1607 1608 message = message.strip()
1608 1609 else:
1609 1610 # launch the editor
1610 1611 message = None
1611 1612 ui.debug(_('message:\n%s\n') % message)
1612 1613
1613 1614 wp = repo.workingctx().parents()
1614 1615 if opts.get('exact'):
1615 1616 if not nodeid or not p1:
1616 1617 raise util.Abort(_('not a mercurial patch'))
1617 1618 p1 = repo.lookup(p1)
1618 1619 p2 = repo.lookup(p2 or hex(nullid))
1619 1620
1620 1621 if p1 != wp[0].node():
1621 1622 hg.clean(repo, p1, wlock=wlock)
1622 1623 repo.dirstate.setparents(p1, p2)
1623 1624 elif p2:
1624 1625 try:
1625 1626 p1 = repo.lookup(p1)
1626 1627 p2 = repo.lookup(p2)
1627 1628 if p1 == wp[0].node():
1628 1629 repo.dirstate.setparents(p1, p2)
1629 1630 except hg.RepoError:
1630 1631 pass
1631 1632 if opts.get('exact') or opts.get('import_branch'):
1632 1633 repo.dirstate.setbranch(branch or 'default')
1633 1634
1634 1635 files = {}
1635 1636 try:
1636 1637 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 1638 files=files)
1638 1639 finally:
1639 1640 files = patch.updatedir(ui, repo, files, wlock=wlock)
1640 1641 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1641 1642 if opts.get('exact'):
1642 1643 if hex(n) != nodeid:
1643 1644 repo.rollback(wlock=wlock, lock=lock)
1644 1645 raise util.Abort(_('patch is damaged or loses information'))
1645 1646 finally:
1646 1647 os.unlink(tmpname)
1647 1648
1648 1649 def incoming(ui, repo, source="default", **opts):
1649 1650 """show new changesets found in source
1650 1651
1651 1652 Show new changesets found in the specified path/URL or the default
1652 1653 pull location. These are the changesets that would be pulled if a pull
1653 1654 was requested.
1654 1655
1655 1656 For remote repository, using --bundle avoids downloading the changesets
1656 1657 twice if the incoming is followed by a pull.
1657 1658
1658 1659 See pull for valid source format details.
1659 1660 """
1660 1661 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1661 1662 cmdutil.setremoteconfig(ui, opts)
1662 1663
1663 1664 other = hg.repository(ui, source)
1664 1665 ui.status(_('comparing with %s\n') % source)
1665 1666 if revs:
1666 1667 if 'lookup' in other.capabilities:
1667 1668 revs = [other.lookup(rev) for rev in revs]
1668 1669 else:
1669 1670 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1670 1671 raise util.Abort(error)
1671 1672 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1672 1673 if not incoming:
1673 1674 try:
1674 1675 os.unlink(opts["bundle"])
1675 1676 except:
1676 1677 pass
1677 1678 ui.status(_("no changes found\n"))
1678 1679 return 1
1679 1680
1680 1681 cleanup = None
1681 1682 try:
1682 1683 fname = opts["bundle"]
1683 1684 if fname or not other.local():
1684 1685 # create a bundle (uncompressed if other repo is not local)
1685 1686 if revs is None:
1686 1687 cg = other.changegroup(incoming, "incoming")
1687 1688 else:
1688 1689 if 'changegroupsubset' not in other.capabilities:
1689 1690 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1690 1691 cg = other.changegroupsubset(incoming, revs, 'incoming')
1691 1692 bundletype = other.local() and "HG10BZ" or "HG10UN"
1692 1693 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1693 1694 # keep written bundle?
1694 1695 if opts["bundle"]:
1695 1696 cleanup = None
1696 1697 if not other.local():
1697 1698 # use the created uncompressed bundlerepo
1698 1699 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1699 1700
1700 1701 o = other.changelog.nodesbetween(incoming, revs)[0]
1701 1702 if opts['newest_first']:
1702 1703 o.reverse()
1703 1704 displayer = cmdutil.show_changeset(ui, other, opts)
1704 1705 for n in o:
1705 1706 parents = [p for p in other.changelog.parents(n) if p != nullid]
1706 1707 if opts['no_merges'] and len(parents) == 2:
1707 1708 continue
1708 1709 displayer.show(changenode=n)
1709 1710 finally:
1710 1711 if hasattr(other, 'close'):
1711 1712 other.close()
1712 1713 if cleanup:
1713 1714 os.unlink(cleanup)
1714 1715
1715 1716 def init(ui, dest=".", **opts):
1716 1717 """create a new repository in the given directory
1717 1718
1718 1719 Initialize a new repository in the given directory. If the given
1719 1720 directory does not exist, it is created.
1720 1721
1721 1722 If no directory is given, the current directory is used.
1722 1723
1723 1724 It is possible to specify an ssh:// URL as the destination.
1724 1725 Look at the help text for the pull command for important details
1725 1726 about ssh:// URLs.
1726 1727 """
1727 1728 cmdutil.setremoteconfig(ui, opts)
1728 1729 hg.repository(ui, dest, create=1)
1729 1730
1730 1731 def locate(ui, repo, *pats, **opts):
1731 1732 """locate files matching specific patterns
1732 1733
1733 1734 Print all files under Mercurial control whose names match the
1734 1735 given patterns.
1735 1736
1736 1737 This command searches the entire repository by default. To search
1737 1738 just the current directory and its subdirectories, use
1738 1739 "--include .".
1739 1740
1740 1741 If no patterns are given to match, this command prints all file
1741 1742 names.
1742 1743
1743 1744 If you want to feed the output of this command into the "xargs"
1744 1745 command, use the "-0" option to both this command and "xargs".
1745 1746 This will avoid the problem of "xargs" treating single filenames
1746 1747 that contain white space as multiple filenames.
1747 1748 """
1748 1749 end = opts['print0'] and '\0' or '\n'
1749 1750 rev = opts['rev']
1750 1751 if rev:
1751 1752 node = repo.lookup(rev)
1752 1753 else:
1753 1754 node = None
1754 1755
1755 1756 ret = 1
1756 1757 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1757 1758 badmatch=util.always,
1758 1759 default='relglob'):
1759 1760 if src == 'b':
1760 1761 continue
1761 if not node and repo.dirstate.state(abs) == '?':
1762 if not node and abs not in repo.dirstate:
1762 1763 continue
1763 1764 if opts['fullpath']:
1764 1765 ui.write(os.path.join(repo.root, abs), end)
1765 1766 else:
1766 1767 ui.write(((pats and rel) or abs), end)
1767 1768 ret = 0
1768 1769
1769 1770 return ret
1770 1771
1771 1772 def log(ui, repo, *pats, **opts):
1772 1773 """show revision history of entire repository or files
1773 1774
1774 1775 Print the revision history of the specified files or the entire
1775 1776 project.
1776 1777
1777 1778 File history is shown without following rename or copy history of
1778 1779 files. Use -f/--follow with a file name to follow history across
1779 1780 renames and copies. --follow without a file name will only show
1780 1781 ancestors or descendants of the starting revision. --follow-first
1781 1782 only follows the first parent of merge revisions.
1782 1783
1783 1784 If no revision range is specified, the default is tip:0 unless
1784 1785 --follow is set, in which case the working directory parent is
1785 1786 used as the starting revision.
1786 1787
1787 1788 By default this command outputs: changeset id and hash, tags,
1788 1789 non-trivial parents, user, date and time, and a summary for each
1789 1790 commit. When the -v/--verbose switch is used, the list of changed
1790 1791 files and full commit message is shown.
1791 1792
1792 1793 NOTE: log -p may generate unexpected diff output for merge
1793 1794 changesets, as it will compare the merge changeset against its
1794 1795 first parent only. Also, the files: list will only reflect files
1795 1796 that are different from BOTH parents.
1796 1797
1797 1798 """
1798 1799
1799 1800 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1800 1801 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1801 1802
1802 1803 if opts['limit']:
1803 1804 try:
1804 1805 limit = int(opts['limit'])
1805 1806 except ValueError:
1806 1807 raise util.Abort(_('limit must be a positive integer'))
1807 1808 if limit <= 0: raise util.Abort(_('limit must be positive'))
1808 1809 else:
1809 1810 limit = sys.maxint
1810 1811 count = 0
1811 1812
1812 1813 if opts['copies'] and opts['rev']:
1813 1814 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1814 1815 else:
1815 1816 endrev = repo.changelog.count()
1816 1817 rcache = {}
1817 1818 ncache = {}
1818 1819 dcache = []
1819 1820 def getrenamed(fn, rev, man):
1820 1821 '''looks up all renames for a file (up to endrev) the first
1821 1822 time the file is given. It indexes on the changerev and only
1822 1823 parses the manifest if linkrev != changerev.
1823 1824 Returns rename info for fn at changerev rev.'''
1824 1825 if fn not in rcache:
1825 1826 rcache[fn] = {}
1826 1827 ncache[fn] = {}
1827 1828 fl = repo.file(fn)
1828 1829 for i in xrange(fl.count()):
1829 1830 node = fl.node(i)
1830 1831 lr = fl.linkrev(node)
1831 1832 renamed = fl.renamed(node)
1832 1833 rcache[fn][lr] = renamed
1833 1834 if renamed:
1834 1835 ncache[fn][node] = renamed
1835 1836 if lr >= endrev:
1836 1837 break
1837 1838 if rev in rcache[fn]:
1838 1839 return rcache[fn][rev]
1839 1840 mr = repo.manifest.rev(man)
1840 1841 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1841 1842 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1842 1843 if not dcache or dcache[0] != man:
1843 1844 dcache[:] = [man, repo.manifest.readdelta(man)]
1844 1845 if fn in dcache[1]:
1845 1846 return ncache[fn].get(dcache[1][fn])
1846 1847 return None
1847 1848
1848 1849 df = False
1849 1850 if opts["date"]:
1850 1851 df = util.matchdate(opts["date"])
1851 1852
1852 1853 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1853 1854 for st, rev, fns in changeiter:
1854 1855 if st == 'add':
1855 1856 changenode = repo.changelog.node(rev)
1856 1857 parents = [p for p in repo.changelog.parentrevs(rev)
1857 1858 if p != nullrev]
1858 1859 if opts['no_merges'] and len(parents) == 2:
1859 1860 continue
1860 1861 if opts['only_merges'] and len(parents) != 2:
1861 1862 continue
1862 1863
1863 1864 if df:
1864 1865 changes = get(rev)
1865 1866 if not df(changes[2][0]):
1866 1867 continue
1867 1868
1868 1869 if opts['keyword']:
1869 1870 changes = get(rev)
1870 1871 miss = 0
1871 1872 for k in [kw.lower() for kw in opts['keyword']]:
1872 1873 if not (k in changes[1].lower() or
1873 1874 k in changes[4].lower() or
1874 1875 k in " ".join(changes[3]).lower()):
1875 1876 miss = 1
1876 1877 break
1877 1878 if miss:
1878 1879 continue
1879 1880
1880 1881 copies = []
1881 1882 if opts.get('copies') and rev:
1882 1883 mf = get(rev)[0]
1883 1884 for fn in get(rev)[3]:
1884 1885 rename = getrenamed(fn, rev, mf)
1885 1886 if rename:
1886 1887 copies.append((fn, rename[0]))
1887 1888 displayer.show(rev, changenode, copies=copies)
1888 1889 elif st == 'iter':
1889 1890 if count == limit: break
1890 1891 if displayer.flush(rev):
1891 1892 count += 1
1892 1893
1893 1894 def manifest(ui, repo, rev=None):
1894 1895 """output the current or given revision of the project manifest
1895 1896
1896 1897 Print a list of version controlled files for the given revision.
1897 1898 If no revision is given, the parent of the working directory is used,
1898 1899 or tip if no revision is checked out.
1899 1900
1900 1901 The manifest is the list of files being version controlled. If no revision
1901 1902 is given then the first parent of the working directory is used.
1902 1903
1903 1904 With -v flag, print file permissions. With --debug flag, print
1904 1905 file revision hashes.
1905 1906 """
1906 1907
1907 1908 m = repo.changectx(rev).manifest()
1908 1909 files = m.keys()
1909 1910 files.sort()
1910 1911
1911 1912 for f in files:
1912 1913 if ui.debugflag:
1913 1914 ui.write("%40s " % hex(m[f]))
1914 1915 if ui.verbose:
1915 1916 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1916 1917 ui.write("%s\n" % f)
1917 1918
1918 1919 def merge(ui, repo, node=None, force=None, rev=None):
1919 1920 """merge working directory with another revision
1920 1921
1921 1922 Merge the contents of the current working directory and the
1922 1923 requested revision. Files that changed between either parent are
1923 1924 marked as changed for the next commit and a commit must be
1924 1925 performed before any further updates are allowed.
1925 1926
1926 1927 If no revision is specified, the working directory's parent is a
1927 1928 head revision, and the repository contains exactly one other head,
1928 1929 the other head is merged with by default. Otherwise, an explicit
1929 1930 revision to merge with must be provided.
1930 1931 """
1931 1932
1932 1933 if rev and node:
1933 1934 raise util.Abort(_("please specify just one revision"))
1934 1935
1935 1936 if not node:
1936 1937 node = rev
1937 1938
1938 1939 if not node:
1939 1940 heads = repo.heads()
1940 1941 if len(heads) > 2:
1941 1942 raise util.Abort(_('repo has %d heads - '
1942 1943 'please merge with an explicit rev') %
1943 1944 len(heads))
1944 1945 if len(heads) == 1:
1945 1946 raise util.Abort(_('there is nothing to merge - '
1946 1947 'use "hg update" instead'))
1947 1948 parent = repo.dirstate.parents()[0]
1948 1949 if parent not in heads:
1949 1950 raise util.Abort(_('working dir not at a head rev - '
1950 1951 'use "hg update" or merge with an explicit rev'))
1951 1952 node = parent == heads[0] and heads[-1] or heads[0]
1952 1953 return hg.merge(repo, node, force=force)
1953 1954
1954 1955 def outgoing(ui, repo, dest=None, **opts):
1955 1956 """show changesets not found in destination
1956 1957
1957 1958 Show changesets not found in the specified destination repository or
1958 1959 the default push location. These are the changesets that would be pushed
1959 1960 if a push was requested.
1960 1961
1961 1962 See pull for valid destination format details.
1962 1963 """
1963 1964 dest, revs = cmdutil.parseurl(
1964 1965 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1965 1966 cmdutil.setremoteconfig(ui, opts)
1966 1967 if revs:
1967 1968 revs = [repo.lookup(rev) for rev in revs]
1968 1969
1969 1970 other = hg.repository(ui, dest)
1970 1971 ui.status(_('comparing with %s\n') % dest)
1971 1972 o = repo.findoutgoing(other, force=opts['force'])
1972 1973 if not o:
1973 1974 ui.status(_("no changes found\n"))
1974 1975 return 1
1975 1976 o = repo.changelog.nodesbetween(o, revs)[0]
1976 1977 if opts['newest_first']:
1977 1978 o.reverse()
1978 1979 displayer = cmdutil.show_changeset(ui, repo, opts)
1979 1980 for n in o:
1980 1981 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1981 1982 if opts['no_merges'] and len(parents) == 2:
1982 1983 continue
1983 1984 displayer.show(changenode=n)
1984 1985
1985 1986 def parents(ui, repo, file_=None, **opts):
1986 1987 """show the parents of the working dir or revision
1987 1988
1988 1989 Print the working directory's parent revisions. If a
1989 1990 revision is given via --rev, the parent of that revision
1990 1991 will be printed. If a file argument is given, revision in
1991 1992 which the file was last changed (before the working directory
1992 1993 revision or the argument to --rev if given) is printed.
1993 1994 """
1994 1995 rev = opts.get('rev')
1995 1996 if file_:
1996 1997 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1997 1998 if anypats or len(files) != 1:
1998 1999 raise util.Abort(_('can only specify an explicit file name'))
1999 2000 ctx = repo.filectx(files[0], changeid=rev)
2000 2001 elif rev:
2001 2002 ctx = repo.changectx(rev)
2002 2003 else:
2003 2004 ctx = repo.workingctx()
2004 2005 p = [cp.node() for cp in ctx.parents()]
2005 2006
2006 2007 displayer = cmdutil.show_changeset(ui, repo, opts)
2007 2008 for n in p:
2008 2009 if n != nullid:
2009 2010 displayer.show(changenode=n)
2010 2011
2011 2012 def paths(ui, repo, search=None):
2012 2013 """show definition of symbolic path names
2013 2014
2014 2015 Show definition of symbolic path name NAME. If no name is given, show
2015 2016 definition of available names.
2016 2017
2017 2018 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 2019 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 2020 """
2020 2021 if search:
2021 2022 for name, path in ui.configitems("paths"):
2022 2023 if name == search:
2023 2024 ui.write("%s\n" % path)
2024 2025 return
2025 2026 ui.warn(_("not found!\n"))
2026 2027 return 1
2027 2028 else:
2028 2029 for name, path in ui.configitems("paths"):
2029 2030 ui.write("%s = %s\n" % (name, path))
2030 2031
2031 2032 def postincoming(ui, repo, modheads, optupdate, wasempty):
2032 2033 if modheads == 0:
2033 2034 return
2034 2035 if optupdate:
2035 2036 if wasempty:
2036 2037 return hg.update(repo, repo.lookup('default'))
2037 2038 elif modheads == 1:
2038 2039 return hg.update(repo, repo.changelog.tip()) # update
2039 2040 else:
2040 2041 ui.status(_("not updating, since new heads added\n"))
2041 2042 if modheads > 1:
2042 2043 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2043 2044 else:
2044 2045 ui.status(_("(run 'hg update' to get a working copy)\n"))
2045 2046
2046 2047 def pull(ui, repo, source="default", **opts):
2047 2048 """pull changes from the specified source
2048 2049
2049 2050 Pull changes from a remote repository to a local one.
2050 2051
2051 2052 This finds all changes from the repository at the specified path
2052 2053 or URL and adds them to the local repository. By default, this
2053 2054 does not update the copy of the project in the working directory.
2054 2055
2055 2056 Valid URLs are of the form:
2056 2057
2057 2058 local/filesystem/path (or file://local/filesystem/path)
2058 2059 http://[user@]host[:port]/[path]
2059 2060 https://[user@]host[:port]/[path]
2060 2061 ssh://[user@]host[:port]/[path]
2061 2062 static-http://host[:port]/[path]
2062 2063
2063 2064 Paths in the local filesystem can either point to Mercurial
2064 2065 repositories or to bundle files (as created by 'hg bundle' or
2065 2066 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2066 2067 allows access to a Mercurial repository where you simply use a web
2067 2068 server to publish the .hg directory as static content.
2068 2069
2069 2070 An optional identifier after # indicates a particular branch, tag,
2070 2071 or changeset to pull.
2071 2072
2072 2073 Some notes about using SSH with Mercurial:
2073 2074 - SSH requires an accessible shell account on the destination machine
2074 2075 and a copy of hg in the remote path or specified with as remotecmd.
2075 2076 - path is relative to the remote user's home directory by default.
2076 2077 Use an extra slash at the start of a path to specify an absolute path:
2077 2078 ssh://example.com//tmp/repository
2078 2079 - Mercurial doesn't use its own compression via SSH; the right thing
2079 2080 to do is to configure it in your ~/.ssh/config, e.g.:
2080 2081 Host *.mylocalnetwork.example.com
2081 2082 Compression no
2082 2083 Host *
2083 2084 Compression yes
2084 2085 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2085 2086 with the --ssh command line option.
2086 2087 """
2087 2088 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2088 2089 cmdutil.setremoteconfig(ui, opts)
2089 2090
2090 2091 other = hg.repository(ui, source)
2091 2092 ui.status(_('pulling from %s\n') % (source))
2092 2093 if revs:
2093 2094 if 'lookup' in other.capabilities:
2094 2095 revs = [other.lookup(rev) for rev in revs]
2095 2096 else:
2096 2097 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2097 2098 raise util.Abort(error)
2098 2099
2099 2100 wasempty = repo.changelog.count() == 0
2100 2101 modheads = repo.pull(other, heads=revs, force=opts['force'])
2101 2102 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2102 2103
2103 2104 def push(ui, repo, dest=None, **opts):
2104 2105 """push changes to the specified destination
2105 2106
2106 2107 Push changes from the local repository to the given destination.
2107 2108
2108 2109 This is the symmetrical operation for pull. It helps to move
2109 2110 changes from the current repository to a different one. If the
2110 2111 destination is local this is identical to a pull in that directory
2111 2112 from the current one.
2112 2113
2113 2114 By default, push will refuse to run if it detects the result would
2114 2115 increase the number of remote heads. This generally indicates the
2115 2116 the client has forgotten to sync and merge before pushing.
2116 2117
2117 2118 Valid URLs are of the form:
2118 2119
2119 2120 local/filesystem/path (or file://local/filesystem/path)
2120 2121 ssh://[user@]host[:port]/[path]
2121 2122 http://[user@]host[:port]/[path]
2122 2123 https://[user@]host[:port]/[path]
2123 2124
2124 2125 An optional identifier after # indicates a particular branch, tag,
2125 2126 or changeset to push.
2126 2127
2127 2128 Look at the help text for the pull command for important details
2128 2129 about ssh:// URLs.
2129 2130
2130 2131 Pushing to http:// and https:// URLs is only possible, if this
2131 2132 feature is explicitly enabled on the remote Mercurial server.
2132 2133 """
2133 2134 dest, revs = cmdutil.parseurl(
2134 2135 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2135 2136 cmdutil.setremoteconfig(ui, opts)
2136 2137
2137 2138 other = hg.repository(ui, dest)
2138 2139 ui.status('pushing to %s\n' % (dest))
2139 2140 if revs:
2140 2141 revs = [repo.lookup(rev) for rev in revs]
2141 2142 r = repo.push(other, opts['force'], revs=revs)
2142 2143 return r == 0
2143 2144
2144 2145 def rawcommit(ui, repo, *pats, **opts):
2145 2146 """raw commit interface (DEPRECATED)
2146 2147
2147 2148 (DEPRECATED)
2148 2149 Lowlevel commit, for use in helper scripts.
2149 2150
2150 2151 This command is not intended to be used by normal users, as it is
2151 2152 primarily useful for importing from other SCMs.
2152 2153
2153 2154 This command is now deprecated and will be removed in a future
2154 2155 release, please use debugsetparents and commit instead.
2155 2156 """
2156 2157
2157 2158 ui.warn(_("(the rawcommit command is deprecated)\n"))
2158 2159
2159 2160 message = cmdutil.logmessage(opts)
2160 2161
2161 2162 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2162 2163 if opts['files']:
2163 2164 files += open(opts['files']).read().splitlines()
2164 2165
2165 2166 parents = [repo.lookup(p) for p in opts['parent']]
2166 2167
2167 2168 try:
2168 2169 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2169 2170 except ValueError, inst:
2170 2171 raise util.Abort(str(inst))
2171 2172
2172 2173 def recover(ui, repo):
2173 2174 """roll back an interrupted transaction
2174 2175
2175 2176 Recover from an interrupted commit or pull.
2176 2177
2177 2178 This command tries to fix the repository status after an interrupted
2178 2179 operation. It should only be necessary when Mercurial suggests it.
2179 2180 """
2180 2181 if repo.recover():
2181 2182 return hg.verify(repo)
2182 2183 return 1
2183 2184
2184 2185 def remove(ui, repo, *pats, **opts):
2185 2186 """remove the specified files on the next commit
2186 2187
2187 2188 Schedule the indicated files for removal from the repository.
2188 2189
2189 2190 This only removes files from the current branch, not from the
2190 2191 entire project history. If the files still exist in the working
2191 2192 directory, they will be deleted from it. If invoked with --after,
2192 2193 files are marked as removed, but not actually unlinked unless --force
2193 2194 is also given. Without exact file names, --after will only mark
2194 2195 files as removed if they are no longer in the working directory.
2195 2196
2196 2197 This command schedules the files to be removed at the next commit.
2197 2198 To undo a remove before that, see hg revert.
2198 2199
2199 2200 Modified files and added files are not removed by default. To
2200 2201 remove them, use the -f/--force option.
2201 2202 """
2202 2203 names = []
2203 2204 if not opts['after'] and not pats:
2204 2205 raise util.Abort(_('no files specified'))
2205 2206 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 2207 exact = dict.fromkeys(files)
2207 2208 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 2209 modified, added, removed, deleted, unknown = mardu
2209 2210 remove, forget = [], []
2210 2211 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 2212 reason = None
2212 2213 if abs in modified and not opts['force']:
2213 2214 reason = _('is modified (use -f to force removal)')
2214 2215 elif abs in added:
2215 2216 if opts['force']:
2216 2217 forget.append(abs)
2217 2218 continue
2218 2219 reason = _('has been marked for add (use -f to force removal)')
2219 elif repo.dirstate.state(abs) == '?':
2220 elif abs not in repo.dirstate:
2220 2221 reason = _('is not managed')
2221 2222 elif opts['after'] and not exact and abs not in deleted:
2222 2223 continue
2223 2224 elif abs in removed:
2224 2225 continue
2225 2226 if reason:
2226 2227 if exact:
2227 2228 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 2229 else:
2229 2230 if ui.verbose or not exact:
2230 2231 ui.status(_('removing %s\n') % rel)
2231 2232 remove.append(abs)
2232 2233 repo.forget(forget)
2233 2234 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2234 2235
2235 2236 def rename(ui, repo, *pats, **opts):
2236 2237 """rename files; equivalent of copy + remove
2237 2238
2238 2239 Mark dest as copies of sources; mark sources for deletion. If
2239 2240 dest is a directory, copies are put in that directory. If dest is
2240 2241 a file, there can only be one source.
2241 2242
2242 2243 By default, this command copies the contents of files as they
2243 2244 stand in the working directory. If invoked with --after, the
2244 2245 operation is recorded, but no copying is performed.
2245 2246
2246 2247 This command takes effect in the next commit. To undo a rename
2247 2248 before that, see hg revert.
2248 2249 """
2249 2250 wlock = repo.wlock(0)
2250 2251 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 2252 names = []
2252 2253 for abs, rel, exact in copied:
2253 2254 if ui.verbose or not exact:
2254 2255 ui.status(_('removing %s\n') % rel)
2255 2256 names.append(abs)
2256 2257 if not opts.get('dry_run'):
2257 2258 repo.remove(names, True, wlock=wlock)
2258 2259 return errs
2259 2260
2260 2261 def revert(ui, repo, *pats, **opts):
2261 2262 """revert files or dirs to their states as of some revision
2262 2263
2263 2264 With no revision specified, revert the named files or directories
2264 2265 to the contents they had in the parent of the working directory.
2265 2266 This restores the contents of the affected files to an unmodified
2266 2267 state and unschedules adds, removes, copies, and renames. If the
2267 2268 working directory has two parents, you must explicitly specify the
2268 2269 revision to revert to.
2269 2270
2270 2271 Modified files are saved with a .orig suffix before reverting.
2271 2272 To disable these backups, use --no-backup.
2272 2273
2273 2274 Using the -r option, revert the given files or directories to their
2274 2275 contents as of a specific revision. This can be helpful to "roll
2275 2276 back" some or all of a change that should not have been committed.
2276 2277
2277 2278 Revert modifies the working directory. It does not commit any
2278 2279 changes, or change the parent of the working directory. If you
2279 2280 revert to a revision other than the parent of the working
2280 2281 directory, the reverted files will thus appear modified
2281 2282 afterwards.
2282 2283
2283 2284 If a file has been deleted, it is restored. If the executable
2284 2285 mode of a file was changed, it is reset.
2285 2286
2286 2287 If names are given, all files matching the names are reverted.
2287 2288
2288 2289 If no arguments are given, no files are reverted.
2289 2290 """
2290 2291
2291 2292 if opts["date"]:
2292 2293 if opts["rev"]:
2293 2294 raise util.Abort(_("you can't specify a revision and a date"))
2294 2295 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2295 2296
2296 2297 if not pats and not opts['all']:
2297 2298 raise util.Abort(_('no files or directories specified; '
2298 2299 'use --all to revert the whole repo'))
2299 2300
2300 2301 parent, p2 = repo.dirstate.parents()
2301 2302 if not opts['rev'] and p2 != nullid:
2302 2303 raise util.Abort(_('uncommitted merge - please provide a '
2303 2304 'specific revision'))
2304 2305 ctx = repo.changectx(opts['rev'])
2305 2306 node = ctx.node()
2306 2307 mf = ctx.manifest()
2307 2308 if node == parent:
2308 2309 pmf = mf
2309 2310 else:
2310 2311 pmf = None
2311 2312
2312 2313 wlock = repo.wlock()
2313 2314
2314 2315 # need all matching names in dirstate and manifest of target rev,
2315 2316 # so have to walk both. do not print errors if files exist in one
2316 2317 # but not other.
2317 2318
2318 2319 names = {}
2319 2320 target_only = {}
2320 2321
2321 2322 # walk dirstate.
2322 2323
2323 2324 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 2325 badmatch=mf.has_key):
2325 2326 names[abs] = (rel, exact)
2326 2327 if src == 'b':
2327 2328 target_only[abs] = True
2328 2329
2329 2330 # walk target manifest.
2330 2331
2331 2332 def badmatch(path):
2332 2333 if path in names:
2333 2334 return True
2334 2335 path_ = path + '/'
2335 2336 for f in names:
2336 2337 if f.startswith(path_):
2337 2338 return True
2338 2339 return False
2339 2340
2340 2341 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2341 2342 badmatch=badmatch):
2342 2343 if abs in names or src == 'b':
2343 2344 continue
2344 2345 names[abs] = (rel, exact)
2345 2346 target_only[abs] = True
2346 2347
2347 2348 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2348 2349 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2349 2350
2350 2351 revert = ([], _('reverting %s\n'))
2351 2352 add = ([], _('adding %s\n'))
2352 2353 remove = ([], _('removing %s\n'))
2353 2354 forget = ([], _('forgetting %s\n'))
2354 2355 undelete = ([], _('undeleting %s\n'))
2355 2356 update = {}
2356 2357
2357 2358 disptable = (
2358 2359 # dispatch table:
2359 2360 # file state
2360 2361 # action if in target manifest
2361 2362 # action if not in target manifest
2362 2363 # make backup if in target manifest
2363 2364 # make backup if not in target manifest
2364 2365 (modified, revert, remove, True, True),
2365 2366 (added, revert, forget, True, False),
2366 2367 (removed, undelete, None, False, False),
2367 2368 (deleted, revert, remove, False, False),
2368 2369 (unknown, add, None, True, False),
2369 2370 (target_only, add, None, False, False),
2370 2371 )
2371 2372
2372 2373 entries = names.items()
2373 2374 entries.sort()
2374 2375
2375 2376 for abs, (rel, exact) in entries:
2376 2377 mfentry = mf.get(abs)
2377 2378 target = repo.wjoin(abs)
2378 2379 def handle(xlist, dobackup):
2379 2380 xlist[0].append(abs)
2380 2381 update[abs] = 1
2381 2382 if dobackup and not opts['no_backup'] and util.lexists(target):
2382 2383 bakname = "%s.orig" % rel
2383 2384 ui.note(_('saving current version of %s as %s\n') %
2384 2385 (rel, bakname))
2385 2386 if not opts.get('dry_run'):
2386 2387 util.copyfile(target, bakname)
2387 2388 if ui.verbose or not exact:
2388 2389 ui.status(xlist[1] % rel)
2389 2390 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2390 2391 if abs not in table: continue
2391 2392 # file has changed in dirstate
2392 2393 if mfentry:
2393 2394 handle(hitlist, backuphit)
2394 2395 elif misslist is not None:
2395 2396 handle(misslist, backupmiss)
2396 2397 else:
2397 2398 if exact: ui.warn(_('file not managed: %s\n') % rel)
2398 2399 break
2399 2400 else:
2400 2401 # file has not changed in dirstate
2401 2402 if node == parent:
2402 2403 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2403 2404 continue
2404 2405 if pmf is None:
2405 2406 # only need parent manifest in this unlikely case,
2406 2407 # so do not read by default
2407 2408 pmf = repo.changectx(parent).manifest()
2408 2409 if abs in pmf:
2409 2410 if mfentry:
2410 2411 # if version of file is same in parent and target
2411 2412 # manifests, do nothing
2412 2413 if pmf[abs] != mfentry:
2413 2414 handle(revert, False)
2414 2415 else:
2415 2416 handle(remove, False)
2416 2417
2417 2418 if not opts.get('dry_run'):
2418 2419 for f in forget[0]:
2419 2420 repo.dirstate.forget(f)
2420 2421 r = hg.revert(repo, node, update.has_key, wlock)
2421 2422 for f in add[0]:
2422 2423 repo.dirstate.add(f)
2423 2424 for f in undelete[0]:
2424 2425 repo.dirstate.normal(f)
2425 2426 for f in remove[0]:
2426 2427 repo.dirstate.remove(f)
2427 2428 return r
2428 2429
2429 2430 def rollback(ui, repo):
2430 2431 """roll back the last transaction in this repository
2431 2432
2432 2433 Roll back the last transaction in this repository, restoring the
2433 2434 project to its state prior to the transaction.
2434 2435
2435 2436 Transactions are used to encapsulate the effects of all commands
2436 2437 that create new changesets or propagate existing changesets into a
2437 2438 repository. For example, the following commands are transactional,
2438 2439 and their effects can be rolled back:
2439 2440
2440 2441 commit
2441 2442 import
2442 2443 pull
2443 2444 push (with this repository as destination)
2444 2445 unbundle
2445 2446
2446 2447 This command should be used with care. There is only one level of
2447 2448 rollback, and there is no way to undo a rollback. It will also
2448 2449 restore the dirstate at the time of the last transaction, which
2449 2450 may lose subsequent dirstate changes.
2450 2451
2451 2452 This command is not intended for use on public repositories. Once
2452 2453 changes are visible for pull by other users, rolling a transaction
2453 2454 back locally is ineffective (someone else may already have pulled
2454 2455 the changes). Furthermore, a race is possible with readers of the
2455 2456 repository; for example an in-progress pull from the repository
2456 2457 may fail if a rollback is performed.
2457 2458 """
2458 2459 repo.rollback()
2459 2460
2460 2461 def root(ui, repo):
2461 2462 """print the root (top) of the current working dir
2462 2463
2463 2464 Print the root directory of the current repository.
2464 2465 """
2465 2466 ui.write(repo.root + "\n")
2466 2467
2467 2468 def serve(ui, repo, **opts):
2468 2469 """export the repository via HTTP
2469 2470
2470 2471 Start a local HTTP repository browser and pull server.
2471 2472
2472 2473 By default, the server logs accesses to stdout and errors to
2473 2474 stderr. Use the "-A" and "-E" options to log to files.
2474 2475 """
2475 2476
2476 2477 if opts["stdio"]:
2477 2478 if repo is None:
2478 2479 raise hg.RepoError(_("There is no Mercurial repository here"
2479 2480 " (.hg not found)"))
2480 2481 s = sshserver.sshserver(ui, repo)
2481 2482 s.serve_forever()
2482 2483
2483 2484 parentui = ui.parentui or ui
2484 2485 optlist = ("name templates style address port ipv6"
2485 2486 " accesslog errorlog webdir_conf certificate")
2486 2487 for o in optlist.split():
2487 2488 if opts[o]:
2488 2489 parentui.setconfig("web", o, str(opts[o]))
2489 2490 if repo.ui != parentui:
2490 2491 repo.ui.setconfig("web", o, str(opts[o]))
2491 2492
2492 2493 if repo is None and not ui.config("web", "webdir_conf"):
2493 2494 raise hg.RepoError(_("There is no Mercurial repository here"
2494 2495 " (.hg not found)"))
2495 2496
2496 2497 class service:
2497 2498 def init(self):
2498 2499 util.set_signal_handler()
2499 2500 try:
2500 2501 self.httpd = hgweb.server.create_server(parentui, repo)
2501 2502 except socket.error, inst:
2502 2503 raise util.Abort(_('cannot start server: ') + inst.args[1])
2503 2504
2504 2505 if not ui.verbose: return
2505 2506
2506 2507 if self.httpd.port != 80:
2507 2508 ui.status(_('listening at http://%s:%d/\n') %
2508 2509 (self.httpd.addr, self.httpd.port))
2509 2510 else:
2510 2511 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2511 2512
2512 2513 def run(self):
2513 2514 self.httpd.serve_forever()
2514 2515
2515 2516 service = service()
2516 2517
2517 2518 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2518 2519
2519 2520 def status(ui, repo, *pats, **opts):
2520 2521 """show changed files in the working directory
2521 2522
2522 2523 Show status of files in the repository. If names are given, only
2523 2524 files that match are shown. Files that are clean or ignored, are
2524 2525 not listed unless -c (clean), -i (ignored) or -A is given.
2525 2526
2526 2527 NOTE: status may appear to disagree with diff if permissions have
2527 2528 changed or a merge has occurred. The standard diff format does not
2528 2529 report permission changes and diff only reports changes relative
2529 2530 to one merge parent.
2530 2531
2531 2532 If one revision is given, it is used as the base revision.
2532 2533 If two revisions are given, the difference between them is shown.
2533 2534
2534 2535 The codes used to show the status of files are:
2535 2536 M = modified
2536 2537 A = added
2537 2538 R = removed
2538 2539 C = clean
2539 2540 ! = deleted, but still tracked
2540 2541 ? = not tracked
2541 2542 I = ignored (not shown by default)
2542 2543 = the previous added file was copied from here
2543 2544 """
2544 2545
2545 2546 all = opts['all']
2546 2547 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2547 2548
2548 2549 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2549 2550 cwd = (pats and repo.getcwd()) or ''
2550 2551 modified, added, removed, deleted, unknown, ignored, clean = [
2551 2552 n for n in repo.status(node1=node1, node2=node2, files=files,
2552 2553 match=matchfn,
2553 2554 list_ignored=all or opts['ignored'],
2554 2555 list_clean=all or opts['clean'])]
2555 2556
2556 2557 changetypes = (('modified', 'M', modified),
2557 2558 ('added', 'A', added),
2558 2559 ('removed', 'R', removed),
2559 2560 ('deleted', '!', deleted),
2560 2561 ('unknown', '?', unknown),
2561 2562 ('ignored', 'I', ignored))
2562 2563
2563 2564 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2564 2565
2565 2566 end = opts['print0'] and '\0' or '\n'
2566 2567
2567 2568 for opt, char, changes in ([ct for ct in explicit_changetypes
2568 2569 if all or opts[ct[0]]]
2569 2570 or changetypes):
2570 2571 if opts['no_status']:
2571 2572 format = "%%s%s" % end
2572 2573 else:
2573 2574 format = "%s %%s%s" % (char, end)
2574 2575
2575 2576 for f in changes:
2576 2577 ui.write(format % repo.pathto(f, cwd))
2577 2578 if ((all or opts.get('copies')) and not opts.get('no_status')):
2578 2579 copied = repo.dirstate.copied(f)
2579 2580 if copied:
2580 2581 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2581 2582
2582 2583 def tag(ui, repo, name, rev_=None, **opts):
2583 2584 """add a tag for the current or given revision
2584 2585
2585 2586 Name a particular revision using <name>.
2586 2587
2587 2588 Tags are used to name particular revisions of the repository and are
2588 2589 very useful to compare different revision, to go back to significant
2589 2590 earlier versions or to mark branch points as releases, etc.
2590 2591
2591 2592 If no revision is given, the parent of the working directory is used,
2592 2593 or tip if no revision is checked out.
2593 2594
2594 2595 To facilitate version control, distribution, and merging of tags,
2595 2596 they are stored as a file named ".hgtags" which is managed
2596 2597 similarly to other project files and can be hand-edited if
2597 2598 necessary. The file '.hg/localtags' is used for local tags (not
2598 2599 shared among repositories).
2599 2600 """
2600 2601 if name in ['tip', '.', 'null']:
2601 2602 raise util.Abort(_("the name '%s' is reserved") % name)
2602 2603 if rev_ is not None:
2603 2604 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2604 2605 "please use 'hg tag [-r REV] NAME' instead\n"))
2605 2606 if opts['rev']:
2606 2607 raise util.Abort(_("use only one form to specify the revision"))
2607 2608 if opts['rev'] and opts['remove']:
2608 2609 raise util.Abort(_("--rev and --remove are incompatible"))
2609 2610 if opts['rev']:
2610 2611 rev_ = opts['rev']
2611 2612 message = opts['message']
2612 2613 if opts['remove']:
2613 2614 if not name in repo.tags():
2614 2615 raise util.Abort(_('tag %s does not exist') % name)
2615 2616 rev_ = nullid
2616 2617 if not message:
2617 2618 message = _('Removed tag %s') % name
2618 2619 elif name in repo.tags() and not opts['force']:
2619 2620 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2620 2621 % name)
2621 2622 if not rev_ and repo.dirstate.parents()[1] != nullid:
2622 2623 raise util.Abort(_('uncommitted merge - please provide a '
2623 2624 'specific revision'))
2624 2625 r = repo.changectx(rev_).node()
2625 2626
2626 2627 if not message:
2627 2628 message = _('Added tag %s for changeset %s') % (name, short(r))
2628 2629
2629 2630 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2630 2631
2631 2632 def tags(ui, repo):
2632 2633 """list repository tags
2633 2634
2634 2635 List the repository tags.
2635 2636
2636 2637 This lists both regular and local tags.
2637 2638 """
2638 2639
2639 2640 l = repo.tagslist()
2640 2641 l.reverse()
2641 2642 hexfunc = ui.debugflag and hex or short
2642 2643 for t, n in l:
2643 2644 try:
2644 2645 hn = hexfunc(n)
2645 2646 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2646 2647 except revlog.LookupError:
2647 2648 r = " ?:%s" % hn
2648 2649 if ui.quiet:
2649 2650 ui.write("%s\n" % t)
2650 2651 else:
2651 2652 spaces = " " * (30 - util.locallen(t))
2652 2653 ui.write("%s%s %s\n" % (t, spaces, r))
2653 2654
2654 2655 def tip(ui, repo, **opts):
2655 2656 """show the tip revision
2656 2657
2657 2658 Show the tip revision.
2658 2659 """
2659 2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2660 2661
2661 2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2662 2663 """apply one or more changegroup files
2663 2664
2664 2665 Apply one or more compressed changegroup files generated by the
2665 2666 bundle command.
2666 2667 """
2667 2668 fnames = (fname1,) + fnames
2668 2669 result = None
2669 2670 wasempty = repo.changelog.count() == 0
2670 2671 for fname in fnames:
2671 2672 if os.path.exists(fname):
2672 2673 f = open(fname, "rb")
2673 2674 else:
2674 2675 f = urllib.urlopen(fname)
2675 2676 gen = changegroup.readbundle(f, fname)
2676 2677 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2677 2678
2678 2679 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2679 2680
2680 2681 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2681 2682 """update working directory
2682 2683
2683 2684 Update the working directory to the specified revision, or the
2684 2685 tip of the current branch if none is specified.
2685 2686
2686 2687 If there are no outstanding changes in the working directory and
2687 2688 there is a linear relationship between the current version and the
2688 2689 requested version, the result is the requested version.
2689 2690
2690 2691 To merge the working directory with another revision, use the
2691 2692 merge command.
2692 2693
2693 2694 By default, update will refuse to run if doing so would require
2694 2695 discarding local changes.
2695 2696 """
2696 2697 if rev and node:
2697 2698 raise util.Abort(_("please specify just one revision"))
2698 2699
2699 2700 if not rev:
2700 2701 rev = node
2701 2702
2702 2703 if date:
2703 2704 if rev:
2704 2705 raise util.Abort(_("you can't specify a revision and a date"))
2705 2706 rev = cmdutil.finddate(ui, repo, date)
2706 2707
2707 2708 if clean:
2708 2709 return hg.clean(repo, rev)
2709 2710 else:
2710 2711 return hg.update(repo, rev)
2711 2712
2712 2713 def verify(ui, repo):
2713 2714 """verify the integrity of the repository
2714 2715
2715 2716 Verify the integrity of the current repository.
2716 2717
2717 2718 This will perform an extensive check of the repository's
2718 2719 integrity, validating the hashes and checksums of each entry in
2719 2720 the changelog, manifest, and tracked files, as well as the
2720 2721 integrity of their crosslinks and indices.
2721 2722 """
2722 2723 return hg.verify(repo)
2723 2724
2724 2725 def version_(ui):
2725 2726 """output version and copyright information"""
2726 2727 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2727 2728 % version.get_version())
2728 2729 ui.status(_(
2729 2730 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2730 2731 "This is free software; see the source for copying conditions. "
2731 2732 "There is NO\nwarranty; "
2732 2733 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2733 2734 ))
2734 2735
2735 2736 # Command options and aliases are listed here, alphabetically
2736 2737
2737 2738 globalopts = [
2738 2739 ('R', 'repository', '',
2739 2740 _('repository root directory or symbolic path name')),
2740 2741 ('', 'cwd', '', _('change working directory')),
2741 2742 ('y', 'noninteractive', None,
2742 2743 _('do not prompt, assume \'yes\' for any required answers')),
2743 2744 ('q', 'quiet', None, _('suppress output')),
2744 2745 ('v', 'verbose', None, _('enable additional output')),
2745 2746 ('', 'config', [], _('set/override config option')),
2746 2747 ('', 'debug', None, _('enable debugging output')),
2747 2748 ('', 'debugger', None, _('start debugger')),
2748 2749 ('', 'encoding', util._encoding, _('set the charset encoding')),
2749 2750 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2750 2751 ('', 'lsprof', None, _('print improved command execution profile')),
2751 2752 ('', 'traceback', None, _('print traceback on exception')),
2752 2753 ('', 'time', None, _('time how long the command takes')),
2753 2754 ('', 'profile', None, _('print command execution profile')),
2754 2755 ('', 'version', None, _('output version information and exit')),
2755 2756 ('h', 'help', None, _('display help and exit')),
2756 2757 ]
2757 2758
2758 2759 dryrunopts = [('n', 'dry-run', None,
2759 2760 _('do not perform actions, just print output'))]
2760 2761
2761 2762 remoteopts = [
2762 2763 ('e', 'ssh', '', _('specify ssh command to use')),
2763 2764 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2764 2765 ]
2765 2766
2766 2767 walkopts = [
2767 2768 ('I', 'include', [], _('include names matching the given patterns')),
2768 2769 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2769 2770 ]
2770 2771
2771 2772 commitopts = [
2772 2773 ('m', 'message', '', _('use <text> as commit message')),
2773 2774 ('l', 'logfile', '', _('read commit message from <file>')),
2774 2775 ]
2775 2776
2776 2777 table = {
2777 2778 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2778 2779 "addremove":
2779 2780 (addremove,
2780 2781 [('s', 'similarity', '',
2781 2782 _('guess renamed files by similarity (0<=s<=100)')),
2782 2783 ] + walkopts + dryrunopts,
2783 2784 _('hg addremove [OPTION]... [FILE]...')),
2784 2785 "^annotate":
2785 2786 (annotate,
2786 2787 [('r', 'rev', '', _('annotate the specified revision')),
2787 2788 ('f', 'follow', None, _('follow file copies and renames')),
2788 2789 ('a', 'text', None, _('treat all files as text')),
2789 2790 ('u', 'user', None, _('list the author')),
2790 2791 ('d', 'date', None, _('list the date')),
2791 2792 ('n', 'number', None, _('list the revision number (default)')),
2792 2793 ('c', 'changeset', None, _('list the changeset')),
2793 2794 ('l', 'line-number', None,
2794 2795 _('show line number at the first appearance'))
2795 2796 ] + walkopts,
2796 2797 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2797 2798 "archive":
2798 2799 (archive,
2799 2800 [('', 'no-decode', None, _('do not pass files through decoders')),
2800 2801 ('p', 'prefix', '', _('directory prefix for files in archive')),
2801 2802 ('r', 'rev', '', _('revision to distribute')),
2802 2803 ('t', 'type', '', _('type of distribution to create')),
2803 2804 ] + walkopts,
2804 2805 _('hg archive [OPTION]... DEST')),
2805 2806 "backout":
2806 2807 (backout,
2807 2808 [('', 'merge', None,
2808 2809 _('merge with old dirstate parent after backout')),
2809 2810 ('d', 'date', '', _('record datecode as commit date')),
2810 2811 ('', 'parent', '', _('parent to choose when backing out merge')),
2811 2812 ('u', 'user', '', _('record user as committer')),
2812 2813 ('r', 'rev', '', _('revision to backout')),
2813 2814 ] + walkopts + commitopts,
2814 2815 _('hg backout [OPTION]... [-r] REV')),
2815 2816 "branch":
2816 2817 (branch,
2817 2818 [('f', 'force', None,
2818 2819 _('set branch name even if it shadows an existing branch'))],
2819 2820 _('hg branch [NAME]')),
2820 2821 "branches":
2821 2822 (branches,
2822 2823 [('a', 'active', False,
2823 2824 _('show only branches that have unmerged heads'))],
2824 2825 _('hg branches [-a]')),
2825 2826 "bundle":
2826 2827 (bundle,
2827 2828 [('f', 'force', None,
2828 2829 _('run even when remote repository is unrelated')),
2829 2830 ('r', 'rev', [],
2830 2831 _('a changeset you would like to bundle')),
2831 2832 ('', 'base', [],
2832 2833 _('a base changeset to specify instead of a destination')),
2833 2834 ] + remoteopts,
2834 2835 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2835 2836 "cat":
2836 2837 (cat,
2837 2838 [('o', 'output', '', _('print output to file with formatted name')),
2838 2839 ('r', 'rev', '', _('print the given revision')),
2839 2840 ] + walkopts,
2840 2841 _('hg cat [OPTION]... FILE...')),
2841 2842 "^clone":
2842 2843 (clone,
2843 2844 [('U', 'noupdate', None, _('do not update the new working directory')),
2844 2845 ('r', 'rev', [],
2845 2846 _('a changeset you would like to have after cloning')),
2846 2847 ('', 'pull', None, _('use pull protocol to copy metadata')),
2847 2848 ('', 'uncompressed', None,
2848 2849 _('use uncompressed transfer (fast over LAN)')),
2849 2850 ] + remoteopts,
2850 2851 _('hg clone [OPTION]... SOURCE [DEST]')),
2851 2852 "^commit|ci":
2852 2853 (commit,
2853 2854 [('A', 'addremove', None,
2854 2855 _('mark new/missing files as added/removed before committing')),
2855 2856 ('d', 'date', '', _('record datecode as commit date')),
2856 2857 ('u', 'user', '', _('record user as commiter')),
2857 2858 ] + walkopts + commitopts,
2858 2859 _('hg commit [OPTION]... [FILE]...')),
2859 2860 "copy|cp":
2860 2861 (copy,
2861 2862 [('A', 'after', None, _('record a copy that has already occurred')),
2862 2863 ('f', 'force', None,
2863 2864 _('forcibly copy over an existing managed file')),
2864 2865 ] + walkopts + dryrunopts,
2865 2866 _('hg copy [OPTION]... [SOURCE]... DEST')),
2866 2867 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2867 2868 "debugcomplete":
2868 2869 (debugcomplete,
2869 2870 [('o', 'options', None, _('show the command options'))],
2870 2871 _('debugcomplete [-o] CMD')),
2871 2872 "debuginstall": (debuginstall, [], _('debuginstall')),
2872 2873 "debugrebuildstate":
2873 2874 (debugrebuildstate,
2874 2875 [('r', 'rev', '', _('revision to rebuild to'))],
2875 2876 _('debugrebuildstate [-r REV] [REV]')),
2876 2877 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2877 2878 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2878 2879 "debugstate": (debugstate, [], _('debugstate')),
2879 2880 "debugdate":
2880 2881 (debugdate,
2881 2882 [('e', 'extended', None, _('try extended date formats'))],
2882 2883 _('debugdate [-e] DATE [RANGE]')),
2883 2884 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2884 2885 "debugindex": (debugindex, [], _('debugindex FILE')),
2885 2886 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2886 2887 "debugrename":
2887 2888 (debugrename,
2888 2889 [('r', 'rev', '', _('revision to debug'))],
2889 2890 _('debugrename [-r REV] FILE')),
2890 2891 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2891 2892 "^diff":
2892 2893 (diff,
2893 2894 [('r', 'rev', [], _('revision')),
2894 2895 ('a', 'text', None, _('treat all files as text')),
2895 2896 ('p', 'show-function', None,
2896 2897 _('show which function each change is in')),
2897 2898 ('g', 'git', None, _('use git extended diff format')),
2898 2899 ('', 'nodates', None, _("don't include dates in diff headers")),
2899 2900 ('w', 'ignore-all-space', None,
2900 2901 _('ignore white space when comparing lines')),
2901 2902 ('b', 'ignore-space-change', None,
2902 2903 _('ignore changes in the amount of white space')),
2903 2904 ('B', 'ignore-blank-lines', None,
2904 2905 _('ignore changes whose lines are all blank')),
2905 2906 ] + walkopts,
2906 2907 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2907 2908 "^export":
2908 2909 (export,
2909 2910 [('o', 'output', '', _('print output to file with formatted name')),
2910 2911 ('a', 'text', None, _('treat all files as text')),
2911 2912 ('g', 'git', None, _('use git extended diff format')),
2912 2913 ('', 'nodates', None, _("don't include dates in diff headers")),
2913 2914 ('', 'switch-parent', None, _('diff against the second parent'))],
2914 2915 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2915 2916 "grep":
2916 2917 (grep,
2917 2918 [('0', 'print0', None, _('end fields with NUL')),
2918 2919 ('', 'all', None, _('print all revisions that match')),
2919 2920 ('f', 'follow', None,
2920 2921 _('follow changeset history, or file history across copies and renames')),
2921 2922 ('i', 'ignore-case', None, _('ignore case when matching')),
2922 2923 ('l', 'files-with-matches', None,
2923 2924 _('print only filenames and revs that match')),
2924 2925 ('n', 'line-number', None, _('print matching line numbers')),
2925 2926 ('r', 'rev', [], _('search in given revision range')),
2926 2927 ('u', 'user', None, _('print user who committed change')),
2927 2928 ] + walkopts,
2928 2929 _('hg grep [OPTION]... PATTERN [FILE]...')),
2929 2930 "heads":
2930 2931 (heads,
2931 2932 [('', 'style', '', _('display using template map file')),
2932 2933 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2933 2934 ('', 'template', '', _('display with template'))],
2934 2935 _('hg heads [-r REV] [REV]...')),
2935 2936 "help": (help_, [], _('hg help [COMMAND]')),
2936 2937 "identify|id":
2937 2938 (identify,
2938 2939 [('r', 'rev', '', _('identify the specified rev')),
2939 2940 ('n', 'num', None, _('show local revision number')),
2940 2941 ('i', 'id', None, _('show global revision id')),
2941 2942 ('b', 'branch', None, _('show branch')),
2942 2943 ('t', 'tags', None, _('show tags'))],
2943 2944 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2944 2945 "import|patch":
2945 2946 (import_,
2946 2947 [('p', 'strip', 1,
2947 2948 _('directory strip option for patch. This has the same\n'
2948 2949 'meaning as the corresponding patch option')),
2949 2950 ('b', 'base', '', _('base path')),
2950 2951 ('f', 'force', None,
2951 2952 _('skip check for outstanding uncommitted changes')),
2952 2953 ('', 'exact', None,
2953 2954 _('apply patch to the nodes from which it was generated')),
2954 2955 ('', 'import-branch', None,
2955 2956 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2956 2957 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2957 2958 "incoming|in": (incoming,
2958 2959 [('M', 'no-merges', None, _('do not show merges')),
2959 2960 ('f', 'force', None,
2960 2961 _('run even when remote repository is unrelated')),
2961 2962 ('', 'style', '', _('display using template map file')),
2962 2963 ('n', 'newest-first', None, _('show newest record first')),
2963 2964 ('', 'bundle', '', _('file to store the bundles into')),
2964 2965 ('p', 'patch', None, _('show patch')),
2965 2966 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2966 2967 ('', 'template', '', _('display with template')),
2967 2968 ] + remoteopts,
2968 2969 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2969 2970 ' [--bundle FILENAME] [SOURCE]')),
2970 2971 "^init":
2971 2972 (init,
2972 2973 remoteopts,
2973 2974 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2974 2975 "locate":
2975 2976 (locate,
2976 2977 [('r', 'rev', '', _('search the repository as it stood at rev')),
2977 2978 ('0', 'print0', None,
2978 2979 _('end filenames with NUL, for use with xargs')),
2979 2980 ('f', 'fullpath', None,
2980 2981 _('print complete paths from the filesystem root')),
2981 2982 ] + walkopts,
2982 2983 _('hg locate [OPTION]... [PATTERN]...')),
2983 2984 "^log|history":
2984 2985 (log,
2985 2986 [('f', 'follow', None,
2986 2987 _('follow changeset history, or file history across copies and renames')),
2987 2988 ('', 'follow-first', None,
2988 2989 _('only follow the first parent of merge changesets')),
2989 2990 ('d', 'date', '', _('show revs matching date spec')),
2990 2991 ('C', 'copies', None, _('show copied files')),
2991 2992 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2992 2993 ('l', 'limit', '', _('limit number of changes displayed')),
2993 2994 ('r', 'rev', [], _('show the specified revision or range')),
2994 2995 ('', 'removed', None, _('include revs where files were removed')),
2995 2996 ('M', 'no-merges', None, _('do not show merges')),
2996 2997 ('', 'style', '', _('display using template map file')),
2997 2998 ('m', 'only-merges', None, _('show only merges')),
2998 2999 ('p', 'patch', None, _('show patch')),
2999 3000 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3000 3001 ('', 'template', '', _('display with template')),
3001 3002 ] + walkopts,
3002 3003 _('hg log [OPTION]... [FILE]')),
3003 3004 "manifest": (manifest, [], _('hg manifest [REV]')),
3004 3005 "^merge":
3005 3006 (merge,
3006 3007 [('f', 'force', None, _('force a merge with outstanding changes')),
3007 3008 ('r', 'rev', '', _('revision to merge')),
3008 3009 ],
3009 3010 _('hg merge [-f] [[-r] REV]')),
3010 3011 "outgoing|out": (outgoing,
3011 3012 [('M', 'no-merges', None, _('do not show merges')),
3012 3013 ('f', 'force', None,
3013 3014 _('run even when remote repository is unrelated')),
3014 3015 ('p', 'patch', None, _('show patch')),
3015 3016 ('', 'style', '', _('display using template map file')),
3016 3017 ('r', 'rev', [], _('a specific revision you would like to push')),
3017 3018 ('n', 'newest-first', None, _('show newest record first')),
3018 3019 ('', 'template', '', _('display with template')),
3019 3020 ] + remoteopts,
3020 3021 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3021 3022 "^parents":
3022 3023 (parents,
3023 3024 [('r', 'rev', '', _('show parents from the specified rev')),
3024 3025 ('', 'style', '', _('display using template map file')),
3025 3026 ('', 'template', '', _('display with template'))],
3026 3027 _('hg parents [-r REV] [FILE]')),
3027 3028 "paths": (paths, [], _('hg paths [NAME]')),
3028 3029 "^pull":
3029 3030 (pull,
3030 3031 [('u', 'update', None,
3031 3032 _('update to new tip if changesets were pulled')),
3032 3033 ('f', 'force', None,
3033 3034 _('run even when remote repository is unrelated')),
3034 3035 ('r', 'rev', [],
3035 3036 _('a specific revision up to which you would like to pull')),
3036 3037 ] + remoteopts,
3037 3038 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3038 3039 "^push":
3039 3040 (push,
3040 3041 [('f', 'force', None, _('force push')),
3041 3042 ('r', 'rev', [], _('a specific revision you would like to push')),
3042 3043 ] + remoteopts,
3043 3044 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3044 3045 "debugrawcommit|rawcommit":
3045 3046 (rawcommit,
3046 3047 [('p', 'parent', [], _('parent')),
3047 3048 ('d', 'date', '', _('date code')),
3048 3049 ('u', 'user', '', _('user')),
3049 3050 ('F', 'files', '', _('file list'))
3050 3051 ] + commitopts,
3051 3052 _('hg debugrawcommit [OPTION]... [FILE]...')),
3052 3053 "recover": (recover, [], _('hg recover')),
3053 3054 "^remove|rm":
3054 3055 (remove,
3055 3056 [('A', 'after', None, _('record remove that has already occurred')),
3056 3057 ('f', 'force', None, _('remove file even if modified')),
3057 3058 ] + walkopts,
3058 3059 _('hg remove [OPTION]... FILE...')),
3059 3060 "rename|mv":
3060 3061 (rename,
3061 3062 [('A', 'after', None, _('record a rename that has already occurred')),
3062 3063 ('f', 'force', None,
3063 3064 _('forcibly copy over an existing managed file')),
3064 3065 ] + walkopts + dryrunopts,
3065 3066 _('hg rename [OPTION]... SOURCE... DEST')),
3066 3067 "^revert":
3067 3068 (revert,
3068 3069 [('a', 'all', None, _('revert all changes when no arguments given')),
3069 3070 ('d', 'date', '', _('tipmost revision matching date')),
3070 3071 ('r', 'rev', '', _('revision to revert to')),
3071 3072 ('', 'no-backup', None, _('do not save backup copies of files')),
3072 3073 ] + walkopts + dryrunopts,
3073 3074 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3074 3075 "rollback": (rollback, [], _('hg rollback')),
3075 3076 "root": (root, [], _('hg root')),
3076 3077 "showconfig|debugconfig":
3077 3078 (showconfig,
3078 3079 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3079 3080 _('showconfig [-u] [NAME]...')),
3080 3081 "^serve":
3081 3082 (serve,
3082 3083 [('A', 'accesslog', '', _('name of access log file to write to')),
3083 3084 ('d', 'daemon', None, _('run server in background')),
3084 3085 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3085 3086 ('E', 'errorlog', '', _('name of error log file to write to')),
3086 3087 ('p', 'port', 0, _('port to use (default: 8000)')),
3087 3088 ('a', 'address', '', _('address to use')),
3088 3089 ('n', 'name', '',
3089 3090 _('name to show in web pages (default: working dir)')),
3090 3091 ('', 'webdir-conf', '', _('name of the webdir config file'
3091 3092 ' (serve more than one repo)')),
3092 3093 ('', 'pid-file', '', _('name of file to write process ID to')),
3093 3094 ('', 'stdio', None, _('for remote clients')),
3094 3095 ('t', 'templates', '', _('web templates to use')),
3095 3096 ('', 'style', '', _('template style to use')),
3096 3097 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3097 3098 ('', 'certificate', '', _('SSL certificate file'))],
3098 3099 _('hg serve [OPTION]...')),
3099 3100 "^status|st":
3100 3101 (status,
3101 3102 [('A', 'all', None, _('show status of all files')),
3102 3103 ('m', 'modified', None, _('show only modified files')),
3103 3104 ('a', 'added', None, _('show only added files')),
3104 3105 ('r', 'removed', None, _('show only removed files')),
3105 3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3106 3107 ('c', 'clean', None, _('show only files without changes')),
3107 3108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 3109 ('i', 'ignored', None, _('show only ignored files')),
3109 3110 ('n', 'no-status', None, _('hide status prefix')),
3110 3111 ('C', 'copies', None, _('show source of copied files')),
3111 3112 ('0', 'print0', None,
3112 3113 _('end filenames with NUL, for use with xargs')),
3113 3114 ('', 'rev', [], _('show difference from revision')),
3114 3115 ] + walkopts,
3115 3116 _('hg status [OPTION]... [FILE]...')),
3116 3117 "tag":
3117 3118 (tag,
3118 3119 [('f', 'force', None, _('replace existing tag')),
3119 3120 ('l', 'local', None, _('make the tag local')),
3120 3121 ('m', 'message', '', _('message for tag commit log entry')),
3121 3122 ('d', 'date', '', _('record datecode as commit date')),
3122 3123 ('u', 'user', '', _('record user as commiter')),
3123 3124 ('r', 'rev', '', _('revision to tag')),
3124 3125 ('', 'remove', None, _('remove a tag'))],
3125 3126 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3126 3127 "tags": (tags, [], _('hg tags')),
3127 3128 "tip":
3128 3129 (tip,
3129 3130 [('', 'style', '', _('display using template map file')),
3130 3131 ('p', 'patch', None, _('show patch')),
3131 3132 ('', 'template', '', _('display with template'))],
3132 3133 _('hg tip [-p]')),
3133 3134 "unbundle":
3134 3135 (unbundle,
3135 3136 [('u', 'update', None,
3136 3137 _('update to new tip if changesets were unbundled'))],
3137 3138 _('hg unbundle [-u] FILE...')),
3138 3139 "^update|up|checkout|co":
3139 3140 (update,
3140 3141 [('C', 'clean', None, _('overwrite locally modified files')),
3141 3142 ('d', 'date', '', _('tipmost revision matching date')),
3142 3143 ('r', 'rev', '', _('revision'))],
3143 3144 _('hg update [-C] [-d DATE] [[-r] REV]')),
3144 3145 "verify": (verify, [], _('hg verify')),
3145 3146 "version": (version_, [], _('hg version')),
3146 3147 }
3147 3148
3148 3149 extensions.commandtable = table
3149 3150
3150 3151 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3151 3152 " debugindex debugindexdot debugdate debuginstall")
3152 3153 optionalrepo = ("paths serve showconfig")
3153 3154
3154 3155 def dispatch(args, argv0=None):
3155 3156 try:
3156 3157 u = ui.ui(traceback='--traceback' in args)
3157 3158 except util.Abort, inst:
3158 3159 sys.stderr.write(_("abort: %s\n") % inst)
3159 3160 return -1
3160 3161 return cmdutil.runcatch(u, args, argv0=argv0)
3161 3162
3162 3163 def run():
3163 3164 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,507 +1,502 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._ui = ui
25 25
26 26 def __getattr__(self, name):
27 27 if name == '_map':
28 28 self._read()
29 29 return self._map
30 30 elif name == '_copymap':
31 31 self._read()
32 32 return self._copymap
33 33 elif name == '_branch':
34 34 try:
35 35 self._branch = (self._opener("branch").read().strip()
36 36 or "default")
37 37 except IOError:
38 38 self._branch = "default"
39 39 return self._branch
40 40 elif name == '_pl':
41 41 self._pl = [nullid, nullid]
42 42 try:
43 43 st = self._opener("dirstate").read(40)
44 44 if len(st) == 40:
45 45 self._pl = st[:20], st[20:40]
46 46 except IOError, err:
47 47 if err.errno != errno.ENOENT: raise
48 48 return self._pl
49 49 elif name == '_dirs':
50 50 self._dirs = {}
51 51 for f in self._map:
52 52 self._incpath(f)
53 53 return self._dirs
54 54 elif name == '_ignore':
55 55 files = [self._join('.hgignore')]
56 56 for name, path in self._ui.configitems("ui"):
57 57 if name == 'ignore' or name.startswith('ignore.'):
58 58 files.append(os.path.expanduser(path))
59 59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 60 return self._ignore
61 61 elif name == '_slash':
62 62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 63 return self._slash
64 64 else:
65 65 raise AttributeError, name
66 66
67 67 def _join(self, f):
68 68 return os.path.join(self._root, f)
69 69
70 70 def getcwd(self):
71 71 cwd = os.getcwd()
72 72 if cwd == self._root: return ''
73 73 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 74 rootsep = self._root
75 75 if not rootsep.endswith(os.sep):
76 76 rootsep += os.sep
77 77 if cwd.startswith(rootsep):
78 78 return cwd[len(rootsep):]
79 79 else:
80 80 # we're outside the repo. return an absolute path.
81 81 return cwd
82 82
83 83 def pathto(self, f, cwd=None):
84 84 if cwd is None:
85 85 cwd = self.getcwd()
86 86 path = util.pathto(self._root, cwd, f)
87 87 if self._slash:
88 88 return path.replace(os.sep, '/')
89 89 return path
90 90
91 def __del__(self):
92 self.write()
93
94 91 def __getitem__(self, key):
95 return self._map[key]
92 ''' current states:
93 n normal
94 m needs merging
95 r marked for removal
96 a marked for addition
97 ? not tracked'''
98 return self._map.get(key, ("?",))[0]
96 99
97 100 def __contains__(self, key):
98 101 return key in self._map
99 102
100 103 def __iter__(self):
101 104 a = self._map.keys()
102 105 a.sort()
103 106 for x in a:
104 107 yield x
105 108
106 109 def parents(self):
107 110 return self._pl
108 111
109 112 def branch(self):
110 113 return self._branch
111 114
112 115 def setparents(self, p1, p2=nullid):
113 116 self._dirty = True
114 117 self._pl = p1, p2
115 118
116 119 def setbranch(self, branch):
117 120 self._branch = branch
118 121 self._opener("branch", "w").write(branch + '\n')
119 122
120 def state(self, key):
121 ''' current states:
122 n normal
123 m needs merging
124 r marked for removal
125 a marked for addition'''
126 return self._map.get(key, ("?",))[0]
127
128 123 def _read(self):
129 124 self._map = {}
130 125 self._copymap = {}
131 126 self._pl = [nullid, nullid]
132 127 try:
133 128 st = self._opener("dirstate").read()
134 129 except IOError, err:
135 130 if err.errno != errno.ENOENT: raise
136 131 return
137 132 if not st:
138 133 return
139 134
140 135 self._pl = [st[:20], st[20: 40]]
141 136
142 137 # deref fields so they will be local in loop
143 138 dmap = self._map
144 139 copymap = self._copymap
145 140 unpack = struct.unpack
146 141
147 142 pos = 40
148 143 e_size = struct.calcsize(_format)
149 144
150 145 while pos < len(st):
151 146 newpos = pos + e_size
152 147 e = unpack(_format, st[pos:newpos])
153 148 l = e[4]
154 149 pos = newpos
155 150 newpos = pos + l
156 151 f = st[pos:newpos]
157 152 if '\0' in f:
158 153 f, c = f.split('\0')
159 154 copymap[f] = c
160 155 dmap[f] = e[:4]
161 156 pos = newpos
162 157
163 158 def invalidate(self):
164 159 for a in "_map _copymap _branch _pl _dirs _ignore".split():
165 160 if hasattr(self, a):
166 161 self.__delattr__(a)
167 162 self._dirty = False
168 163
169 164 def copy(self, source, dest):
170 165 self._dirty = True
171 166 self._copymap[dest] = source
172 167
173 168 def copied(self, file):
174 169 return self._copymap.get(file, None)
175 170
176 171 def copies(self):
177 172 return self._copymap
178 173
179 174 def _incpath(self, path):
180 175 for c in strutil.findall(path, '/'):
181 176 pc = path[:c]
182 177 self._dirs.setdefault(pc, 0)
183 178 self._dirs[pc] += 1
184 179
185 180 def _decpath(self, path):
186 181 for c in strutil.findall(path, '/'):
187 182 pc = path[:c]
188 183 self._dirs.setdefault(pc, 0)
189 184 self._dirs[pc] -= 1
190 185
191 186 def _incpathcheck(self, f):
192 187 if '\r' in f or '\n' in f:
193 188 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
194 189 # shadows
195 190 if f in self._dirs:
196 191 raise util.Abort(_('directory named %r already in dirstate') % f)
197 192 for c in strutil.rfindall(f, '/'):
198 193 d = f[:c]
199 194 if d in self._dirs:
200 195 break
201 196 if d in self._map:
202 197 raise util.Abort(_('file named %r already in dirstate') % d)
203 198 self._incpath(f)
204 199
205 200 def normal(self, f):
206 201 'mark a file normal'
207 202 self._dirty = True
208 203 s = os.lstat(self._join(f))
209 204 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
210 205 if self._copymap.has_key(f):
211 206 del self._copymap[f]
212 207
213 208 def normaldirty(self, f):
214 209 'mark a file normal, but possibly dirty'
215 210 self._dirty = True
216 211 s = os.lstat(self._join(f))
217 212 self._map[f] = ('n', s.st_mode, -1, -1)
218 213 if f in self._copymap:
219 214 del self._copymap[f]
220 215
221 216 def add(self, f):
222 217 'mark a file added'
223 218 self._dirty = True
224 219 self._incpathcheck(f)
225 220 s = os.lstat(self._join(f))
226 221 self._map[f] = ('a', s.st_mode, s.st_size, s.st_mtime)
227 222 if f in self._copymap:
228 223 del self._copymap[f]
229 224
230 225 def remove(self, f):
231 226 'mark a file removed'
232 227 self._dirty = True
233 228 self._map[f] = ('r', 0, 0, 0)
234 229 self._decpath(f)
235 230 if f in self._copymap:
236 231 del self._copymap[f]
237 232
238 233 def merge(self, f):
239 234 'mark a file merged'
240 235 self._dirty = True
241 236 s = os.lstat(self._join(f))
242 237 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
243 238 if f in self._copymap:
244 239 del self._copymap[f]
245 240
246 241 def forget(self, f):
247 242 'forget a file'
248 243 self._dirty = True
249 244 try:
250 245 del self._map[f]
251 246 self._decpath(f)
252 247 except KeyError:
253 248 self._ui.warn(_("not in dirstate: %s!\n") % f)
254 249
255 250 def rebuild(self, parent, files):
256 251 self.invalidate()
257 252 for f in files:
258 253 if files.execf(f):
259 254 self._map[f] = ('n', 0777, -1, 0)
260 255 else:
261 256 self._map[f] = ('n', 0666, -1, 0)
262 257 self._pl = (parent, nullid)
263 258 self._dirty = True
264 259
265 260 def write(self):
266 261 if not self._dirty:
267 262 return
268 263 cs = cStringIO.StringIO()
269 264 cs.write("".join(self._pl))
270 265 for f, e in self._map.iteritems():
271 266 c = self.copied(f)
272 267 if c:
273 268 f = f + "\0" + c
274 269 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
275 270 cs.write(e)
276 271 cs.write(f)
277 272 st = self._opener("dirstate", "w", atomictemp=True)
278 273 st.write(cs.getvalue())
279 274 st.rename()
280 275 self._dirty = False
281 276
282 277 def filterfiles(self, files):
283 278 ret = {}
284 279 unknown = []
285 280
286 281 for x in files:
287 282 if x == '.':
288 283 return self._map.copy()
289 284 if x not in self._map:
290 285 unknown.append(x)
291 286 else:
292 287 ret[x] = self._map[x]
293 288
294 289 if not unknown:
295 290 return ret
296 291
297 292 b = self._map.keys()
298 293 b.sort()
299 294 blen = len(b)
300 295
301 296 for x in unknown:
302 297 bs = bisect.bisect(b, "%s%s" % (x, '/'))
303 298 while bs < blen:
304 299 s = b[bs]
305 300 if len(s) > len(x) and s.startswith(x):
306 301 ret[s] = self._map[s]
307 302 else:
308 303 break
309 304 bs += 1
310 305 return ret
311 306
312 307 def _supported(self, f, st, verbose=False):
313 308 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
314 309 return True
315 310 if verbose:
316 311 kind = 'unknown'
317 312 if stat.S_ISCHR(st.st_mode): kind = _('character device')
318 313 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
319 314 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
320 315 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
321 316 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
322 317 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
323 318 % (self.pathto(f), kind))
324 319 return False
325 320
326 321 def walk(self, files=None, match=util.always, badmatch=None):
327 322 # filter out the stat
328 323 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
329 324 yield src, f
330 325
331 326 def statwalk(self, files=None, match=util.always, ignored=False,
332 327 badmatch=None, directories=False):
333 328 '''
334 329 walk recursively through the directory tree, finding all files
335 330 matched by the match function
336 331
337 332 results are yielded in a tuple (src, filename, st), where src
338 333 is one of:
339 334 'f' the file was found in the directory tree
340 335 'd' the file is a directory of the tree
341 336 'm' the file was only in the dirstate and not in the tree
342 337 'b' file was not found and matched badmatch
343 338
344 339 and st is the stat result if the file was found in the directory.
345 340 '''
346 341
347 342 # walk all files by default
348 343 if not files:
349 344 files = ['.']
350 345 dc = self._map.copy()
351 346 else:
352 347 files = util.unique(files)
353 348 dc = self.filterfiles(files)
354 349
355 350 def imatch(file_):
356 351 if file_ not in dc and self._ignore(file_):
357 352 return False
358 353 return match(file_)
359 354
360 355 ignore = self._ignore
361 356 if ignored:
362 357 imatch = match
363 358 ignore = util.never
364 359
365 360 # self._root may end with a path separator when self._root == '/'
366 361 common_prefix_len = len(self._root)
367 362 if not self._root.endswith(os.sep):
368 363 common_prefix_len += 1
369 364 # recursion free walker, faster than os.walk.
370 365 def findfiles(s):
371 366 work = [s]
372 367 if directories:
373 368 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
374 369 while work:
375 370 top = work.pop()
376 371 names = os.listdir(top)
377 372 names.sort()
378 373 # nd is the top of the repository dir tree
379 374 nd = util.normpath(top[common_prefix_len:])
380 375 if nd == '.':
381 376 nd = ''
382 377 else:
383 378 # do not recurse into a repo contained in this
384 379 # one. use bisect to find .hg directory so speed
385 380 # is good on big directory.
386 381 hg = bisect.bisect_left(names, '.hg')
387 382 if hg < len(names) and names[hg] == '.hg':
388 383 if os.path.isdir(os.path.join(top, '.hg')):
389 384 continue
390 385 for f in names:
391 386 np = util.pconvert(os.path.join(nd, f))
392 387 if seen(np):
393 388 continue
394 389 p = os.path.join(top, f)
395 390 # don't trip over symlinks
396 391 st = os.lstat(p)
397 392 if stat.S_ISDIR(st.st_mode):
398 393 if not ignore(np):
399 394 work.append(p)
400 395 if directories:
401 396 yield 'd', np, st
402 397 if imatch(np) and np in dc:
403 398 yield 'm', np, st
404 399 elif imatch(np):
405 400 if self._supported(np, st):
406 401 yield 'f', np, st
407 402 elif np in dc:
408 403 yield 'm', np, st
409 404
410 405 known = {'.hg': 1}
411 406 def seen(fn):
412 407 if fn in known: return True
413 408 known[fn] = 1
414 409
415 410 # step one, find all files that match our criteria
416 411 files.sort()
417 412 for ff in files:
418 413 nf = util.normpath(ff)
419 414 f = self._join(ff)
420 415 try:
421 416 st = os.lstat(f)
422 417 except OSError, inst:
423 418 found = False
424 419 for fn in dc:
425 420 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
426 421 found = True
427 422 break
428 423 if not found:
429 424 if inst.errno != errno.ENOENT or not badmatch:
430 425 self._ui.warn('%s: %s\n' %
431 426 (self.pathto(ff), inst.strerror))
432 427 elif badmatch and badmatch(ff) and imatch(nf):
433 428 yield 'b', ff, None
434 429 continue
435 430 if stat.S_ISDIR(st.st_mode):
436 431 cmp1 = (lambda x, y: cmp(x[1], y[1]))
437 432 sorted_ = [ x for x in findfiles(f) ]
438 433 sorted_.sort(cmp1)
439 434 for e in sorted_:
440 435 yield e
441 436 else:
442 437 if not seen(nf) and match(nf):
443 438 if self._supported(ff, st, verbose=True):
444 439 yield 'f', nf, st
445 440 elif ff in dc:
446 441 yield 'm', nf, st
447 442
448 443 # step two run through anything left in the dc hash and yield
449 444 # if we haven't already seen it
450 445 ks = dc.keys()
451 446 ks.sort()
452 447 for k in ks:
453 448 if not seen(k) and imatch(k):
454 449 yield 'm', k, None
455 450
456 451 def status(self, files=None, match=util.always, list_ignored=False,
457 452 list_clean=False):
458 453 lookup, modified, added, unknown, ignored = [], [], [], [], []
459 454 removed, deleted, clean = [], [], []
460 455
461 456 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
462 457 try:
463 type_, mode, size, time = self[fn]
458 type_, mode, size, time = self._map[fn]
464 459 except KeyError:
465 460 if list_ignored and self._ignore(fn):
466 461 ignored.append(fn)
467 462 else:
468 463 unknown.append(fn)
469 464 continue
470 465 if src == 'm':
471 466 nonexistent = True
472 467 if not st:
473 468 try:
474 469 st = os.lstat(self._join(fn))
475 470 except OSError, inst:
476 471 if inst.errno != errno.ENOENT:
477 472 raise
478 473 st = None
479 474 # We need to re-check that it is a valid file
480 475 if st and self._supported(fn, st):
481 476 nonexistent = False
482 477 # XXX: what to do with file no longer present in the fs
483 478 # who are not removed in the dirstate ?
484 479 if nonexistent and type_ in "nm":
485 480 deleted.append(fn)
486 481 continue
487 482 # check the common case first
488 483 if type_ == 'n':
489 484 if not st:
490 485 st = os.lstat(self._join(fn))
491 486 if (size >= 0 and (size != st.st_size
492 487 or (mode ^ st.st_mode) & 0100)
493 488 or fn in self._copymap):
494 489 modified.append(fn)
495 490 elif time != int(st.st_mtime):
496 491 lookup.append(fn)
497 492 elif list_clean:
498 493 clean.append(fn)
499 494 elif type_ == 'm':
500 495 modified.append(fn)
501 496 elif type_ == 'a':
502 497 added.append(fn)
503 498 elif type_ == 'r':
504 499 removed.append(fn)
505 500
506 501 return (lookup, modified, added, removed, deleted, unknown, ignored,
507 502 clean)
@@ -1,1949 +1,1949 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.path = path
24 24 self.root = os.path.realpath(path)
25 25 self.path = os.path.join(self.root, ".hg")
26 26 self.origroot = path
27 27 self.opener = util.opener(self.path)
28 28 self.wopener = util.opener(self.root)
29 29
30 30 if not os.path.isdir(self.path):
31 31 if create:
32 32 if not os.path.exists(path):
33 33 os.mkdir(path)
34 34 os.mkdir(self.path)
35 35 requirements = ["revlogv1"]
36 36 if parentui.configbool('format', 'usestore', True):
37 37 os.mkdir(os.path.join(self.path, "store"))
38 38 requirements.append("store")
39 39 # create an invalid changelog
40 40 self.opener("00changelog.i", "a").write(
41 41 '\0\0\0\2' # represents revlogv2
42 42 ' dummy changelog to prevent using the old repo layout'
43 43 )
44 44 reqfile = self.opener("requires", "w")
45 45 for r in requirements:
46 46 reqfile.write("%s\n" % r)
47 47 reqfile.close()
48 48 else:
49 49 raise repo.RepoError(_("repository %s not found") % path)
50 50 elif create:
51 51 raise repo.RepoError(_("repository %s already exists") % path)
52 52 else:
53 53 # find requirements
54 54 try:
55 55 requirements = self.opener("requires").read().splitlines()
56 56 except IOError, inst:
57 57 if inst.errno != errno.ENOENT:
58 58 raise
59 59 requirements = []
60 60 # check them
61 61 for r in requirements:
62 62 if r not in self.supported:
63 63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64 64
65 65 # setup store
66 66 if "store" in requirements:
67 67 self.encodefn = util.encodefilename
68 68 self.decodefn = util.decodefilename
69 69 self.spath = os.path.join(self.path, "store")
70 70 else:
71 71 self.encodefn = lambda x: x
72 72 self.decodefn = lambda x: x
73 73 self.spath = self.path
74 74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75 75
76 76 self.ui = ui.ui(parentui=parentui)
77 77 try:
78 78 self.ui.readconfig(self.join("hgrc"), self.root)
79 79 extensions.loadall(self.ui)
80 80 except IOError:
81 81 pass
82 82
83 83 self.tagscache = None
84 84 self.branchcache = None
85 85 self.nodetagscache = None
86 86 self.filterpats = {}
87 87 self.transhandle = None
88 88
89 89 def __getattr__(self, name):
90 90 if name == 'changelog':
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 return self.changelog
94 94 if name == 'manifest':
95 95 self.changelog
96 96 self.manifest = manifest.manifest(self.sopener)
97 97 return self.manifest
98 98 if name == 'dirstate':
99 99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 100 return self.dirstate
101 101 else:
102 102 raise AttributeError, name
103 103
104 104 def url(self):
105 105 return 'file:' + self.root
106 106
107 107 def hook(self, name, throw=False, **args):
108 108 return hook.hook(self.ui, self, name, throw, **args)
109 109
110 110 tag_disallowed = ':\r\n'
111 111
112 112 def _tag(self, name, node, message, local, user, date, parent=None,
113 113 extra={}):
114 114 use_dirstate = parent is None
115 115
116 116 for c in self.tag_disallowed:
117 117 if c in name:
118 118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119 119
120 120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121 121
122 122 def writetag(fp, name, munge, prevtags):
123 123 if prevtags and prevtags[-1] != '\n':
124 124 fp.write('\n')
125 125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 126 fp.close()
127 127 self.hook('tag', node=hex(node), tag=name, local=local)
128
128
129 129 prevtags = ''
130 130 if local:
131 131 try:
132 132 fp = self.opener('localtags', 'r+')
133 133 except IOError, err:
134 134 fp = self.opener('localtags', 'a')
135 135 else:
136 136 prevtags = fp.read()
137 137
138 138 # local tags are stored in the current charset
139 139 writetag(fp, name, None, prevtags)
140 140 return
141 141
142 142 if use_dirstate:
143 143 try:
144 144 fp = self.wfile('.hgtags', 'rb+')
145 145 except IOError, err:
146 146 fp = self.wfile('.hgtags', 'ab')
147 147 else:
148 148 prevtags = fp.read()
149 149 else:
150 150 try:
151 151 prevtags = self.filectx('.hgtags', parent).data()
152 152 except revlog.LookupError:
153 153 pass
154 154 fp = self.wfile('.hgtags', 'wb')
155 155
156 156 # committed tags are stored in UTF-8
157 157 writetag(fp, name, util.fromlocal, prevtags)
158 158
159 if use_dirstate and self.dirstate.state('.hgtags') == '?':
159 if use_dirstate and '.hgtags' not in self.dirstate:
160 160 self.add(['.hgtags'])
161 161
162 162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 163 extra=extra)
164 164
165 165 self.hook('tag', node=hex(node), tag=name, local=local)
166 166
167 167 return tagnode
168 168
169 169 def tag(self, name, node, message, local, user, date):
170 170 '''tag a revision with a symbolic name.
171 171
172 172 if local is True, the tag is stored in a per-repository file.
173 173 otherwise, it is stored in the .hgtags file, and a new
174 174 changeset is committed with the change.
175 175
176 176 keyword arguments:
177 177
178 178 local: whether to store tag in non-version-controlled file
179 179 (default False)
180 180
181 181 message: commit message to use if committing
182 182
183 183 user: name of user to use if committing
184 184
185 185 date: date tuple to use if committing'''
186 186
187 187 for x in self.status()[:5]:
188 188 if '.hgtags' in x:
189 189 raise util.Abort(_('working copy of .hgtags is changed '
190 190 '(please commit .hgtags manually)'))
191 191
192 192
193 193 self._tag(name, node, message, local, user, date)
194 194
195 195 def tags(self):
196 196 '''return a mapping of tag to node'''
197 197 if self.tagscache:
198 198 return self.tagscache
199 199
200 200 globaltags = {}
201 201
202 202 def readtags(lines, fn):
203 203 filetags = {}
204 204 count = 0
205 205
206 206 def warn(msg):
207 207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208 208
209 209 for l in lines:
210 210 count += 1
211 211 if not l:
212 212 continue
213 213 s = l.split(" ", 1)
214 214 if len(s) != 2:
215 215 warn(_("cannot parse entry"))
216 216 continue
217 217 node, key = s
218 218 key = util.tolocal(key.strip()) # stored in UTF-8
219 219 try:
220 220 bin_n = bin(node)
221 221 except TypeError:
222 222 warn(_("node '%s' is not well formed") % node)
223 223 continue
224 224 if bin_n not in self.changelog.nodemap:
225 225 warn(_("tag '%s' refers to unknown node") % key)
226 226 continue
227 227
228 228 h = []
229 229 if key in filetags:
230 230 n, h = filetags[key]
231 231 h.append(n)
232 232 filetags[key] = (bin_n, h)
233 233
234 234 for k, nh in filetags.items():
235 235 if k not in globaltags:
236 236 globaltags[k] = nh
237 237 continue
238 238 # we prefer the global tag if:
239 239 # it supercedes us OR
240 240 # mutual supercedes and it has a higher rank
241 241 # otherwise we win because we're tip-most
242 242 an, ah = nh
243 243 bn, bh = globaltags[k]
244 244 if (bn != an and an in bh and
245 245 (bn not in ah or len(bh) > len(ah))):
246 246 an = bn
247 247 ah.extend([n for n in bh if n not in ah])
248 248 globaltags[k] = an, ah
249 249
250 250 # read the tags file from each head, ending with the tip
251 251 f = None
252 252 for rev, node, fnode in self._hgtagsnodes():
253 253 f = (f and f.filectx(fnode) or
254 254 self.filectx('.hgtags', fileid=fnode))
255 255 readtags(f.data().splitlines(), f)
256 256
257 257 try:
258 258 data = util.fromlocal(self.opener("localtags").read())
259 259 # localtags are stored in the local character set
260 260 # while the internal tag table is stored in UTF-8
261 261 readtags(data.splitlines(), "localtags")
262 262 except IOError:
263 263 pass
264 264
265 265 self.tagscache = {}
266 266 for k,nh in globaltags.items():
267 267 n = nh[0]
268 268 if n != nullid:
269 269 self.tagscache[k] = n
270 270 self.tagscache['tip'] = self.changelog.tip()
271 271
272 272 return self.tagscache
273 273
274 274 def _hgtagsnodes(self):
275 275 heads = self.heads()
276 276 heads.reverse()
277 277 last = {}
278 278 ret = []
279 279 for node in heads:
280 280 c = self.changectx(node)
281 281 rev = c.rev()
282 282 try:
283 283 fnode = c.filenode('.hgtags')
284 284 except revlog.LookupError:
285 285 continue
286 286 ret.append((rev, node, fnode))
287 287 if fnode in last:
288 288 ret[last[fnode]] = None
289 289 last[fnode] = len(ret) - 1
290 290 return [item for item in ret if item]
291 291
292 292 def tagslist(self):
293 293 '''return a list of tags ordered by revision'''
294 294 l = []
295 295 for t, n in self.tags().items():
296 296 try:
297 297 r = self.changelog.rev(n)
298 298 except:
299 299 r = -2 # sort to the beginning of the list if unknown
300 300 l.append((r, t, n))
301 301 l.sort()
302 302 return [(t, n) for r, t, n in l]
303 303
304 304 def nodetags(self, node):
305 305 '''return the tags associated with a node'''
306 306 if not self.nodetagscache:
307 307 self.nodetagscache = {}
308 308 for t, n in self.tags().items():
309 309 self.nodetagscache.setdefault(n, []).append(t)
310 310 return self.nodetagscache.get(node, [])
311 311
312 312 def _branchtags(self):
313 313 partial, last, lrev = self._readbranchcache()
314 314
315 315 tiprev = self.changelog.count() - 1
316 316 if lrev != tiprev:
317 317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319 319
320 320 return partial
321 321
322 322 def branchtags(self):
323 323 if self.branchcache is not None:
324 324 return self.branchcache
325 325
326 326 self.branchcache = {} # avoid recursion in changectx
327 327 partial = self._branchtags()
328 328
329 329 # the branch cache is stored on disk as UTF-8, but in the local
330 330 # charset internally
331 331 for k, v in partial.items():
332 332 self.branchcache[util.tolocal(k)] = v
333 333 return self.branchcache
334 334
335 335 def _readbranchcache(self):
336 336 partial = {}
337 337 try:
338 338 f = self.opener("branch.cache")
339 339 lines = f.read().split('\n')
340 340 f.close()
341 341 except (IOError, OSError):
342 342 return {}, nullid, nullrev
343 343
344 344 try:
345 345 last, lrev = lines.pop(0).split(" ", 1)
346 346 last, lrev = bin(last), int(lrev)
347 347 if not (lrev < self.changelog.count() and
348 348 self.changelog.node(lrev) == last): # sanity check
349 349 # invalidate the cache
350 350 raise ValueError('Invalid branch cache: unknown tip')
351 351 for l in lines:
352 352 if not l: continue
353 353 node, label = l.split(" ", 1)
354 354 partial[label.strip()] = bin(node)
355 355 except (KeyboardInterrupt, util.SignalInterrupt):
356 356 raise
357 357 except Exception, inst:
358 358 if self.ui.debugflag:
359 359 self.ui.warn(str(inst), '\n')
360 360 partial, last, lrev = {}, nullid, nullrev
361 361 return partial, last, lrev
362 362
363 363 def _writebranchcache(self, branches, tip, tiprev):
364 364 try:
365 365 f = self.opener("branch.cache", "w", atomictemp=True)
366 366 f.write("%s %s\n" % (hex(tip), tiprev))
367 367 for label, node in branches.iteritems():
368 368 f.write("%s %s\n" % (hex(node), label))
369 369 f.rename()
370 370 except (IOError, OSError):
371 371 pass
372 372
373 373 def _updatebranchcache(self, partial, start, end):
374 374 for r in xrange(start, end):
375 375 c = self.changectx(r)
376 376 b = c.branch()
377 377 partial[b] = c.node()
378 378
379 379 def lookup(self, key):
380 380 if key == '.':
381 381 key, second = self.dirstate.parents()
382 382 if key == nullid:
383 383 raise repo.RepoError(_("no revision checked out"))
384 384 if second != nullid:
385 385 self.ui.warn(_("warning: working directory has two parents, "
386 386 "tag '.' uses the first\n"))
387 387 elif key == 'null':
388 388 return nullid
389 389 n = self.changelog._match(key)
390 390 if n:
391 391 return n
392 392 if key in self.tags():
393 393 return self.tags()[key]
394 394 if key in self.branchtags():
395 395 return self.branchtags()[key]
396 396 n = self.changelog._partialmatch(key)
397 397 if n:
398 398 return n
399 399 raise repo.RepoError(_("unknown revision '%s'") % key)
400 400
401 401 def dev(self):
402 402 return os.lstat(self.path).st_dev
403 403
404 404 def local(self):
405 405 return True
406 406
407 407 def join(self, f):
408 408 return os.path.join(self.path, f)
409 409
410 410 def sjoin(self, f):
411 411 f = self.encodefn(f)
412 412 return os.path.join(self.spath, f)
413 413
414 414 def wjoin(self, f):
415 415 return os.path.join(self.root, f)
416 416
417 417 def file(self, f):
418 418 if f[0] == '/':
419 419 f = f[1:]
420 420 return filelog.filelog(self.sopener, f)
421 421
422 422 def changectx(self, changeid=None):
423 423 return context.changectx(self, changeid)
424 424
425 425 def workingctx(self):
426 426 return context.workingctx(self)
427 427
428 428 def parents(self, changeid=None):
429 429 '''
430 430 get list of changectxs for parents of changeid or working directory
431 431 '''
432 432 if changeid is None:
433 433 pl = self.dirstate.parents()
434 434 else:
435 435 n = self.changelog.lookup(changeid)
436 436 pl = self.changelog.parents(n)
437 437 if pl[1] == nullid:
438 438 return [self.changectx(pl[0])]
439 439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440 440
441 441 def filectx(self, path, changeid=None, fileid=None):
442 442 """changeid can be a changeset revision, node, or tag.
443 443 fileid can be a file revision or node."""
444 444 return context.filectx(self, path, changeid, fileid)
445 445
446 446 def getcwd(self):
447 447 return self.dirstate.getcwd()
448 448
449 449 def pathto(self, f, cwd=None):
450 450 return self.dirstate.pathto(f, cwd)
451 451
452 452 def wfile(self, f, mode='r'):
453 453 return self.wopener(f, mode)
454 454
455 455 def _link(self, f):
456 456 return os.path.islink(self.wjoin(f))
457 457
458 458 def _filter(self, filter, filename, data):
459 459 if filter not in self.filterpats:
460 460 l = []
461 461 for pat, cmd in self.ui.configitems(filter):
462 462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 463 l.append((mf, cmd))
464 464 self.filterpats[filter] = l
465 465
466 466 for mf, cmd in self.filterpats[filter]:
467 467 if mf(filename):
468 468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 469 data = util.filter(data, cmd)
470 470 break
471 471
472 472 return data
473 473
474 474 def wread(self, filename):
475 475 if self._link(filename):
476 476 data = os.readlink(self.wjoin(filename))
477 477 else:
478 478 data = self.wopener(filename, 'r').read()
479 479 return self._filter("encode", filename, data)
480 480
481 481 def wwrite(self, filename, data, flags):
482 482 data = self._filter("decode", filename, data)
483 483 if "l" in flags:
484 484 self.wopener.symlink(data, filename)
485 485 else:
486 486 try:
487 487 if self._link(filename):
488 488 os.unlink(self.wjoin(filename))
489 489 except OSError:
490 490 pass
491 491 self.wopener(filename, 'w').write(data)
492 492 util.set_exec(self.wjoin(filename), "x" in flags)
493 493
494 494 def wwritedata(self, filename, data):
495 495 return self._filter("decode", filename, data)
496 496
497 497 def transaction(self):
498 498 tr = self.transhandle
499 499 if tr != None and tr.running():
500 500 return tr.nest()
501 501
502 502 # save dirstate for rollback
503 503 try:
504 504 ds = self.opener("dirstate").read()
505 505 except IOError:
506 506 ds = ""
507 507 self.opener("journal.dirstate", "w").write(ds)
508 508
509 509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 512 self.sjoin("journal"),
513 513 aftertrans(renames))
514 514 self.transhandle = tr
515 515 return tr
516 516
517 517 def recover(self):
518 518 l = self.lock()
519 519 if os.path.exists(self.sjoin("journal")):
520 520 self.ui.status(_("rolling back interrupted transaction\n"))
521 521 transaction.rollback(self.sopener, self.sjoin("journal"))
522 522 self.invalidate()
523 523 return True
524 524 else:
525 525 self.ui.warn(_("no interrupted transaction available\n"))
526 526 return False
527 527
528 528 def rollback(self, wlock=None, lock=None):
529 529 if not wlock:
530 530 wlock = self.wlock()
531 531 if not lock:
532 532 lock = self.lock()
533 533 if os.path.exists(self.sjoin("undo")):
534 534 self.ui.status(_("rolling back last transaction\n"))
535 535 transaction.rollback(self.sopener, self.sjoin("undo"))
536 536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
537 537 self.invalidate()
538 538 self.dirstate.invalidate()
539 539 else:
540 540 self.ui.warn(_("no rollback information available\n"))
541 541
542 542 def invalidate(self):
543 543 for a in "changelog manifest".split():
544 544 if hasattr(self, a):
545 545 self.__delattr__(a)
546 546 self.tagscache = None
547 547 self.nodetagscache = None
548 548
549 549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
550 550 desc=None):
551 551 try:
552 552 l = lock.lock(lockname, 0, releasefn, desc=desc)
553 553 except lock.LockHeld, inst:
554 554 if not wait:
555 555 raise
556 556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
557 557 (desc, inst.locker))
558 558 # default to 600 seconds timeout
559 559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
560 560 releasefn, desc=desc)
561 561 if acquirefn:
562 562 acquirefn()
563 563 return l
564 564
565 565 def lock(self, wait=1):
566 566 return self.do_lock(self.sjoin("lock"), wait,
567 567 acquirefn=self.invalidate,
568 568 desc=_('repository %s') % self.origroot)
569 569
570 570 def wlock(self, wait=1):
571 571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
572 572 self.dirstate.invalidate,
573 573 desc=_('working directory of %s') % self.origroot)
574 574
575 575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
576 576 """
577 577 commit an individual file as part of a larger transaction
578 578 """
579 579
580 580 t = self.wread(fn)
581 581 fl = self.file(fn)
582 582 fp1 = manifest1.get(fn, nullid)
583 583 fp2 = manifest2.get(fn, nullid)
584 584
585 585 meta = {}
586 586 cp = self.dirstate.copied(fn)
587 587 if cp:
588 588 # Mark the new revision of this file as a copy of another
589 589 # file. This copy data will effectively act as a parent
590 590 # of this new revision. If this is a merge, the first
591 591 # parent will be the nullid (meaning "look up the copy data")
592 592 # and the second one will be the other parent. For example:
593 593 #
594 594 # 0 --- 1 --- 3 rev1 changes file foo
595 595 # \ / rev2 renames foo to bar and changes it
596 596 # \- 2 -/ rev3 should have bar with all changes and
597 597 # should record that bar descends from
598 598 # bar in rev2 and foo in rev1
599 599 #
600 600 # this allows this merge to succeed:
601 601 #
602 602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
603 603 # \ / merging rev3 and rev4 should use bar@rev2
604 604 # \- 2 --- 4 as the merge base
605 605 #
606 606 meta["copy"] = cp
607 607 if not manifest2: # not a branch merge
608 608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
609 609 fp2 = nullid
610 610 elif fp2 != nullid: # copied on remote side
611 611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 612 elif fp1 != nullid: # copied on local side, reversed
613 613 meta["copyrev"] = hex(manifest2.get(cp))
614 614 fp2 = fp1
615 615 else: # directory rename
616 616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
617 617 self.ui.debug(_(" %s: copy %s:%s\n") %
618 618 (fn, cp, meta["copyrev"]))
619 619 fp1 = nullid
620 620 elif fp2 != nullid:
621 621 # is one parent an ancestor of the other?
622 622 fpa = fl.ancestor(fp1, fp2)
623 623 if fpa == fp1:
624 624 fp1, fp2 = fp2, nullid
625 625 elif fpa == fp2:
626 626 fp2 = nullid
627 627
628 628 # is the file unmodified from the parent? report existing entry
629 629 if fp2 == nullid and not fl.cmp(fp1, t):
630 630 return fp1
631 631
632 632 changelist.append(fn)
633 633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
634 634
635 635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
636 636 if p1 is None:
637 637 p1, p2 = self.dirstate.parents()
638 638 return self.commit(files=files, text=text, user=user, date=date,
639 639 p1=p1, p2=p2, wlock=wlock, extra=extra)
640 640
641 641 def commit(self, files=None, text="", user=None, date=None,
642 642 match=util.always, force=False, lock=None, wlock=None,
643 643 force_editor=False, p1=None, p2=None, extra={}):
644 644
645 645 commit = []
646 646 remove = []
647 647 changed = []
648 648 use_dirstate = (p1 is None) # not rawcommit
649 649 extra = extra.copy()
650 650
651 651 if use_dirstate:
652 652 if files:
653 653 for f in files:
654 s = self.dirstate.state(f)
655 if s in 'nmai':
654 s = self.dirstate[f]
655 if s in 'nma':
656 656 commit.append(f)
657 657 elif s == 'r':
658 658 remove.append(f)
659 659 else:
660 660 self.ui.warn(_("%s not tracked!\n") % f)
661 661 else:
662 662 changes = self.status(match=match)[:5]
663 663 modified, added, removed, deleted, unknown = changes
664 664 commit = modified + added
665 665 remove = removed
666 666 else:
667 667 commit = files
668 668
669 669 if use_dirstate:
670 670 p1, p2 = self.dirstate.parents()
671 671 update_dirstate = True
672 672 else:
673 673 p1, p2 = p1, p2 or nullid
674 674 update_dirstate = (self.dirstate.parents()[0] == p1)
675 675
676 676 c1 = self.changelog.read(p1)
677 677 c2 = self.changelog.read(p2)
678 678 m1 = self.manifest.read(c1[0]).copy()
679 679 m2 = self.manifest.read(c2[0])
680 680
681 681 if use_dirstate:
682 682 branchname = self.workingctx().branch()
683 683 try:
684 684 branchname = branchname.decode('UTF-8').encode('UTF-8')
685 685 except UnicodeDecodeError:
686 686 raise util.Abort(_('branch name not in UTF-8!'))
687 687 else:
688 688 branchname = ""
689 689
690 690 if use_dirstate:
691 691 oldname = c1[5].get("branch") # stored in UTF-8
692 692 if (not commit and not remove and not force and p2 == nullid
693 693 and branchname == oldname):
694 694 self.ui.status(_("nothing changed\n"))
695 695 return None
696 696
697 697 xp1 = hex(p1)
698 698 if p2 == nullid: xp2 = ''
699 699 else: xp2 = hex(p2)
700 700
701 701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
702 702
703 703 if not wlock:
704 704 wlock = self.wlock()
705 705 if not lock:
706 706 lock = self.lock()
707 707 tr = self.transaction()
708 708
709 709 # check in files
710 710 new = {}
711 711 linkrev = self.changelog.count()
712 712 commit.sort()
713 713 is_exec = util.execfunc(self.root, m1.execf)
714 714 is_link = util.linkfunc(self.root, m1.linkf)
715 715 for f in commit:
716 716 self.ui.note(f + "\n")
717 717 try:
718 718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 719 new_exec = is_exec(f)
720 720 new_link = is_link(f)
721 721 if not changed or changed[-1] != f:
722 722 # mention the file in the changelog if some flag changed,
723 723 # even if there was no content change.
724 724 old_exec = m1.execf(f)
725 725 old_link = m1.linkf(f)
726 726 if old_exec != new_exec or old_link != new_link:
727 727 changed.append(f)
728 728 m1.set(f, new_exec, new_link)
729 729 except (OSError, IOError):
730 730 if use_dirstate:
731 731 self.ui.warn(_("trouble committing %s!\n") % f)
732 732 raise
733 733 else:
734 734 remove.append(f)
735 735
736 736 # update manifest
737 737 m1.update(new)
738 738 remove.sort()
739 739 removed = []
740 740
741 741 for f in remove:
742 742 if f in m1:
743 743 del m1[f]
744 744 removed.append(f)
745 745 elif f in m2:
746 746 removed.append(f)
747 747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
748 748
749 749 # add changeset
750 750 new = new.keys()
751 751 new.sort()
752 752
753 753 user = user or self.ui.username()
754 754 if not text or force_editor:
755 755 edittext = []
756 756 if text:
757 757 edittext.append(text)
758 758 edittext.append("")
759 759 edittext.append("HG: user: %s" % user)
760 760 if p2 != nullid:
761 761 edittext.append("HG: branch merge")
762 762 if branchname:
763 763 edittext.append("HG: branch %s" % util.tolocal(branchname))
764 764 edittext.extend(["HG: changed %s" % f for f in changed])
765 765 edittext.extend(["HG: removed %s" % f for f in removed])
766 766 if not changed and not remove:
767 767 edittext.append("HG: no files changed")
768 768 edittext.append("")
769 769 # run editor in the repository root
770 770 olddir = os.getcwd()
771 771 os.chdir(self.root)
772 772 text = self.ui.edit("\n".join(edittext), user)
773 773 os.chdir(olddir)
774 774
775 775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
776 776 while lines and not lines[0]:
777 777 del lines[0]
778 778 if not lines:
779 779 return None
780 780 text = '\n'.join(lines)
781 781 if branchname:
782 782 extra["branch"] = branchname
783 783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
784 784 user, date, extra)
785 785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
786 786 parent2=xp2)
787 787 tr.close()
788 788
789 789 if self.branchcache and "branch" in extra:
790 790 self.branchcache[util.tolocal(extra["branch"])] = n
791 791
792 792 if use_dirstate or update_dirstate:
793 793 self.dirstate.setparents(n)
794 794 if use_dirstate:
795 795 for f in new:
796 796 self.dirstate.normal(f)
797 797 for f in removed:
798 798 self.dirstate.forget(f)
799 799
800 800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
801 801 return n
802 802
803 803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
804 804 '''
805 805 walk recursively through the directory tree or a given
806 806 changeset, finding all files matched by the match
807 807 function
808 808
809 809 results are yielded in a tuple (src, filename), where src
810 810 is one of:
811 811 'f' the file was found in the directory tree
812 812 'm' the file was only in the dirstate and not in the tree
813 813 'b' file was not found and matched badmatch
814 814 '''
815 815
816 816 if node:
817 817 fdict = dict.fromkeys(files)
818 818 # for dirstate.walk, files=['.'] means "walk the whole tree".
819 819 # follow that here, too
820 820 fdict.pop('.', None)
821 821 mdict = self.manifest.read(self.changelog.read(node)[0])
822 822 mfiles = mdict.keys()
823 823 mfiles.sort()
824 824 for fn in mfiles:
825 825 for ffn in fdict:
826 826 # match if the file is the exact name or a directory
827 827 if ffn == fn or fn.startswith("%s/" % ffn):
828 828 del fdict[ffn]
829 829 break
830 830 if match(fn):
831 831 yield 'm', fn
832 832 ffiles = fdict.keys()
833 833 ffiles.sort()
834 834 for fn in ffiles:
835 835 if badmatch and badmatch(fn):
836 836 if match(fn):
837 837 yield 'b', fn
838 838 else:
839 839 self.ui.warn(_('%s: No such file in rev %s\n')
840 840 % (self.pathto(fn), short(node)))
841 841 else:
842 842 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
843 843 yield src, fn
844 844
845 845 def status(self, node1=None, node2=None, files=[], match=util.always,
846 846 wlock=None, list_ignored=False, list_clean=False):
847 847 """return status of files between two nodes or node and working directory
848 848
849 849 If node1 is None, use the first dirstate parent instead.
850 850 If node2 is None, compare node1 with working directory.
851 851 """
852 852
853 853 def fcmp(fn, getnode):
854 854 t1 = self.wread(fn)
855 855 return self.file(fn).cmp(getnode(fn), t1)
856 856
857 857 def mfmatches(node):
858 858 change = self.changelog.read(node)
859 859 mf = self.manifest.read(change[0]).copy()
860 860 for fn in mf.keys():
861 861 if not match(fn):
862 862 del mf[fn]
863 863 return mf
864 864
865 865 modified, added, removed, deleted, unknown = [], [], [], [], []
866 866 ignored, clean = [], []
867 867
868 868 compareworking = False
869 869 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
870 870 compareworking = True
871 871
872 872 if not compareworking:
873 873 # read the manifest from node1 before the manifest from node2,
874 874 # so that we'll hit the manifest cache if we're going through
875 875 # all the revisions in parent->child order.
876 876 mf1 = mfmatches(node1)
877 877
878 878 mywlock = False
879 879
880 880 # are we comparing the working directory?
881 881 if not node2:
882 882 (lookup, modified, added, removed, deleted, unknown,
883 883 ignored, clean) = self.dirstate.status(files, match,
884 884 list_ignored, list_clean)
885 885
886 886 # are we comparing working dir against its parent?
887 887 if compareworking:
888 888 if lookup:
889 889 # do a full compare of any files that might have changed
890 890 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
891 891 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
892 892 nullid)
893 893 for f in lookup:
894 894 if fcmp(f, getnode):
895 895 modified.append(f)
896 896 else:
897 897 if list_clean:
898 898 clean.append(f)
899 899 if not wlock and not mywlock:
900 900 mywlock = True
901 901 try:
902 902 wlock = self.wlock(wait=0)
903 903 except lock.LockException:
904 904 pass
905 905 if wlock:
906 906 self.dirstate.normal(f)
907 907 else:
908 908 # we are comparing working dir against non-parent
909 909 # generate a pseudo-manifest for the working dir
910 910 # XXX: create it in dirstate.py ?
911 911 mf2 = mfmatches(self.dirstate.parents()[0])
912 912 is_exec = util.execfunc(self.root, mf2.execf)
913 913 is_link = util.linkfunc(self.root, mf2.linkf)
914 914 for f in lookup + modified + added:
915 915 mf2[f] = ""
916 916 mf2.set(f, is_exec(f), is_link(f))
917 917 for f in removed:
918 918 if f in mf2:
919 919 del mf2[f]
920 920
921 921 if mywlock and wlock:
922 922 wlock.release()
923 923 else:
924 924 # we are comparing two revisions
925 925 mf2 = mfmatches(node2)
926 926
927 927 if not compareworking:
928 928 # flush lists from dirstate before comparing manifests
929 929 modified, added, clean = [], [], []
930 930
931 931 # make sure to sort the files so we talk to the disk in a
932 932 # reasonable order
933 933 mf2keys = mf2.keys()
934 934 mf2keys.sort()
935 935 getnode = lambda fn: mf1.get(fn, nullid)
936 936 for fn in mf2keys:
937 937 if mf1.has_key(fn):
938 938 if (mf1.flags(fn) != mf2.flags(fn) or
939 939 (mf1[fn] != mf2[fn] and
940 940 (mf2[fn] != "" or fcmp(fn, getnode)))):
941 941 modified.append(fn)
942 942 elif list_clean:
943 943 clean.append(fn)
944 944 del mf1[fn]
945 945 else:
946 946 added.append(fn)
947 947
948 948 removed = mf1.keys()
949 949
950 950 # sort and return results:
951 951 for l in modified, added, removed, deleted, unknown, ignored, clean:
952 952 l.sort()
953 953 return (modified, added, removed, deleted, unknown, ignored, clean)
954 954
955 955 def add(self, list, wlock=None):
956 956 if not wlock:
957 957 wlock = self.wlock()
958 958 for f in list:
959 959 p = self.wjoin(f)
960 960 try:
961 961 st = os.lstat(p)
962 962 except:
963 963 self.ui.warn(_("%s does not exist!\n") % f)
964 964 continue
965 965 if st.st_size > 10000000:
966 966 self.ui.warn(_("%s: files over 10MB may cause memory and"
967 967 " performance problems\n"
968 968 "(use 'hg revert %s' to unadd the file)\n")
969 969 % (f, f))
970 970 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
971 971 self.ui.warn(_("%s not added: only files and symlinks "
972 972 "supported currently\n") % f)
973 elif self.dirstate.state(f) in 'an':
973 elif self.dirstate[f] in 'an':
974 974 self.ui.warn(_("%s already tracked!\n") % f)
975 975 else:
976 976 self.dirstate.add(f)
977 977
978 978 def forget(self, list, wlock=None):
979 979 if not wlock:
980 980 wlock = self.wlock()
981 981 for f in list:
982 if self.dirstate.state(f) not in 'ai':
982 if self.dirstate[f] != 'a':
983 983 self.ui.warn(_("%s not added!\n") % f)
984 984 else:
985 985 self.dirstate.forget(f)
986 986
987 987 def remove(self, list, unlink=False, wlock=None):
988 988 if unlink:
989 989 for f in list:
990 990 try:
991 991 util.unlink(self.wjoin(f))
992 992 except OSError, inst:
993 993 if inst.errno != errno.ENOENT:
994 994 raise
995 995 if not wlock:
996 996 wlock = self.wlock()
997 997 for f in list:
998 998 if unlink and os.path.exists(self.wjoin(f)):
999 999 self.ui.warn(_("%s still exists!\n") % f)
1000 elif self.dirstate.state(f) == 'a':
1000 elif self.dirstate[f] == 'a':
1001 1001 self.dirstate.forget(f)
1002 1002 elif f not in self.dirstate:
1003 1003 self.ui.warn(_("%s not tracked!\n") % f)
1004 1004 else:
1005 1005 self.dirstate.remove(f)
1006 1006
1007 1007 def undelete(self, list, wlock=None):
1008 1008 p = self.dirstate.parents()[0]
1009 1009 mn = self.changelog.read(p)[0]
1010 1010 m = self.manifest.read(mn)
1011 1011 if not wlock:
1012 1012 wlock = self.wlock()
1013 1013 for f in list:
1014 if self.dirstate.state(f) not in "r":
1014 if self.dirstate[f] != 'r':
1015 1015 self.ui.warn("%s not removed!\n" % f)
1016 1016 else:
1017 1017 t = self.file(f).read(m[f])
1018 1018 self.wwrite(f, t, m.flags(f))
1019 1019 self.dirstate.normal(f)
1020 1020
1021 1021 def copy(self, source, dest, wlock=None):
1022 1022 p = self.wjoin(dest)
1023 1023 if not (os.path.exists(p) or os.path.islink(p)):
1024 1024 self.ui.warn(_("%s does not exist!\n") % dest)
1025 1025 elif not (os.path.isfile(p) or os.path.islink(p)):
1026 1026 self.ui.warn(_("copy failed: %s is not a file or a "
1027 1027 "symbolic link\n") % dest)
1028 1028 else:
1029 1029 if not wlock:
1030 1030 wlock = self.wlock()
1031 if self.dirstate.state(dest) == '?':
1031 if dest not in self.dirstate:
1032 1032 self.dirstate.add(dest)
1033 1033 self.dirstate.copy(source, dest)
1034 1034
1035 1035 def heads(self, start=None):
1036 1036 heads = self.changelog.heads(start)
1037 1037 # sort the output in rev descending order
1038 1038 heads = [(-self.changelog.rev(h), h) for h in heads]
1039 1039 heads.sort()
1040 1040 return [n for (r, n) in heads]
1041 1041
1042 1042 def branchheads(self, branch, start=None):
1043 1043 branches = self.branchtags()
1044 1044 if branch not in branches:
1045 1045 return []
1046 1046 # The basic algorithm is this:
1047 1047 #
1048 1048 # Start from the branch tip since there are no later revisions that can
1049 1049 # possibly be in this branch, and the tip is a guaranteed head.
1050 1050 #
1051 1051 # Remember the tip's parents as the first ancestors, since these by
1052 1052 # definition are not heads.
1053 1053 #
1054 1054 # Step backwards from the brach tip through all the revisions. We are
1055 1055 # guaranteed by the rules of Mercurial that we will now be visiting the
1056 1056 # nodes in reverse topological order (children before parents).
1057 1057 #
1058 1058 # If a revision is one of the ancestors of a head then we can toss it
1059 1059 # out of the ancestors set (we've already found it and won't be
1060 1060 # visiting it again) and put its parents in the ancestors set.
1061 1061 #
1062 1062 # Otherwise, if a revision is in the branch it's another head, since it
1063 1063 # wasn't in the ancestor list of an existing head. So add it to the
1064 1064 # head list, and add its parents to the ancestor list.
1065 1065 #
1066 1066 # If it is not in the branch ignore it.
1067 1067 #
1068 1068 # Once we have a list of heads, use nodesbetween to filter out all the
1069 1069 # heads that cannot be reached from startrev. There may be a more
1070 1070 # efficient way to do this as part of the previous algorithm.
1071 1071
1072 1072 set = util.set
1073 1073 heads = [self.changelog.rev(branches[branch])]
1074 1074 # Don't care if ancestors contains nullrev or not.
1075 1075 ancestors = set(self.changelog.parentrevs(heads[0]))
1076 1076 for rev in xrange(heads[0] - 1, nullrev, -1):
1077 1077 if rev in ancestors:
1078 1078 ancestors.update(self.changelog.parentrevs(rev))
1079 1079 ancestors.remove(rev)
1080 1080 elif self.changectx(rev).branch() == branch:
1081 1081 heads.append(rev)
1082 1082 ancestors.update(self.changelog.parentrevs(rev))
1083 1083 heads = [self.changelog.node(rev) for rev in heads]
1084 1084 if start is not None:
1085 1085 heads = self.changelog.nodesbetween([start], heads)[2]
1086 1086 return heads
1087 1087
1088 1088 def branches(self, nodes):
1089 1089 if not nodes:
1090 1090 nodes = [self.changelog.tip()]
1091 1091 b = []
1092 1092 for n in nodes:
1093 1093 t = n
1094 1094 while 1:
1095 1095 p = self.changelog.parents(n)
1096 1096 if p[1] != nullid or p[0] == nullid:
1097 1097 b.append((t, n, p[0], p[1]))
1098 1098 break
1099 1099 n = p[0]
1100 1100 return b
1101 1101
1102 1102 def between(self, pairs):
1103 1103 r = []
1104 1104
1105 1105 for top, bottom in pairs:
1106 1106 n, l, i = top, [], 0
1107 1107 f = 1
1108 1108
1109 1109 while n != bottom:
1110 1110 p = self.changelog.parents(n)[0]
1111 1111 if i == f:
1112 1112 l.append(n)
1113 1113 f = f * 2
1114 1114 n = p
1115 1115 i += 1
1116 1116
1117 1117 r.append(l)
1118 1118
1119 1119 return r
1120 1120
1121 1121 def findincoming(self, remote, base=None, heads=None, force=False):
1122 1122 """Return list of roots of the subsets of missing nodes from remote
1123 1123
1124 1124 If base dict is specified, assume that these nodes and their parents
1125 1125 exist on the remote side and that no child of a node of base exists
1126 1126 in both remote and self.
1127 1127 Furthermore base will be updated to include the nodes that exists
1128 1128 in self and remote but no children exists in self and remote.
1129 1129 If a list of heads is specified, return only nodes which are heads
1130 1130 or ancestors of these heads.
1131 1131
1132 1132 All the ancestors of base are in self and in remote.
1133 1133 All the descendants of the list returned are missing in self.
1134 1134 (and so we know that the rest of the nodes are missing in remote, see
1135 1135 outgoing)
1136 1136 """
1137 1137 m = self.changelog.nodemap
1138 1138 search = []
1139 1139 fetch = {}
1140 1140 seen = {}
1141 1141 seenbranch = {}
1142 1142 if base == None:
1143 1143 base = {}
1144 1144
1145 1145 if not heads:
1146 1146 heads = remote.heads()
1147 1147
1148 1148 if self.changelog.tip() == nullid:
1149 1149 base[nullid] = 1
1150 1150 if heads != [nullid]:
1151 1151 return [nullid]
1152 1152 return []
1153 1153
1154 1154 # assume we're closer to the tip than the root
1155 1155 # and start by examining the heads
1156 1156 self.ui.status(_("searching for changes\n"))
1157 1157
1158 1158 unknown = []
1159 1159 for h in heads:
1160 1160 if h not in m:
1161 1161 unknown.append(h)
1162 1162 else:
1163 1163 base[h] = 1
1164 1164
1165 1165 if not unknown:
1166 1166 return []
1167 1167
1168 1168 req = dict.fromkeys(unknown)
1169 1169 reqcnt = 0
1170 1170
1171 1171 # search through remote branches
1172 1172 # a 'branch' here is a linear segment of history, with four parts:
1173 1173 # head, root, first parent, second parent
1174 1174 # (a branch always has two parents (or none) by definition)
1175 1175 unknown = remote.branches(unknown)
1176 1176 while unknown:
1177 1177 r = []
1178 1178 while unknown:
1179 1179 n = unknown.pop(0)
1180 1180 if n[0] in seen:
1181 1181 continue
1182 1182
1183 1183 self.ui.debug(_("examining %s:%s\n")
1184 1184 % (short(n[0]), short(n[1])))
1185 1185 if n[0] == nullid: # found the end of the branch
1186 1186 pass
1187 1187 elif n in seenbranch:
1188 1188 self.ui.debug(_("branch already found\n"))
1189 1189 continue
1190 1190 elif n[1] and n[1] in m: # do we know the base?
1191 1191 self.ui.debug(_("found incomplete branch %s:%s\n")
1192 1192 % (short(n[0]), short(n[1])))
1193 1193 search.append(n) # schedule branch range for scanning
1194 1194 seenbranch[n] = 1
1195 1195 else:
1196 1196 if n[1] not in seen and n[1] not in fetch:
1197 1197 if n[2] in m and n[3] in m:
1198 1198 self.ui.debug(_("found new changeset %s\n") %
1199 1199 short(n[1]))
1200 1200 fetch[n[1]] = 1 # earliest unknown
1201 1201 for p in n[2:4]:
1202 1202 if p in m:
1203 1203 base[p] = 1 # latest known
1204 1204
1205 1205 for p in n[2:4]:
1206 1206 if p not in req and p not in m:
1207 1207 r.append(p)
1208 1208 req[p] = 1
1209 1209 seen[n[0]] = 1
1210 1210
1211 1211 if r:
1212 1212 reqcnt += 1
1213 1213 self.ui.debug(_("request %d: %s\n") %
1214 1214 (reqcnt, " ".join(map(short, r))))
1215 1215 for p in xrange(0, len(r), 10):
1216 1216 for b in remote.branches(r[p:p+10]):
1217 1217 self.ui.debug(_("received %s:%s\n") %
1218 1218 (short(b[0]), short(b[1])))
1219 1219 unknown.append(b)
1220 1220
1221 1221 # do binary search on the branches we found
1222 1222 while search:
1223 1223 n = search.pop(0)
1224 1224 reqcnt += 1
1225 1225 l = remote.between([(n[0], n[1])])[0]
1226 1226 l.append(n[1])
1227 1227 p = n[0]
1228 1228 f = 1
1229 1229 for i in l:
1230 1230 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1231 1231 if i in m:
1232 1232 if f <= 2:
1233 1233 self.ui.debug(_("found new branch changeset %s\n") %
1234 1234 short(p))
1235 1235 fetch[p] = 1
1236 1236 base[i] = 1
1237 1237 else:
1238 1238 self.ui.debug(_("narrowed branch search to %s:%s\n")
1239 1239 % (short(p), short(i)))
1240 1240 search.append((p, i))
1241 1241 break
1242 1242 p, f = i, f * 2
1243 1243
1244 1244 # sanity check our fetch list
1245 1245 for f in fetch.keys():
1246 1246 if f in m:
1247 1247 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1248 1248
1249 1249 if base.keys() == [nullid]:
1250 1250 if force:
1251 1251 self.ui.warn(_("warning: repository is unrelated\n"))
1252 1252 else:
1253 1253 raise util.Abort(_("repository is unrelated"))
1254 1254
1255 1255 self.ui.debug(_("found new changesets starting at ") +
1256 1256 " ".join([short(f) for f in fetch]) + "\n")
1257 1257
1258 1258 self.ui.debug(_("%d total queries\n") % reqcnt)
1259 1259
1260 1260 return fetch.keys()
1261 1261
1262 1262 def findoutgoing(self, remote, base=None, heads=None, force=False):
1263 1263 """Return list of nodes that are roots of subsets not in remote
1264 1264
1265 1265 If base dict is specified, assume that these nodes and their parents
1266 1266 exist on the remote side.
1267 1267 If a list of heads is specified, return only nodes which are heads
1268 1268 or ancestors of these heads, and return a second element which
1269 1269 contains all remote heads which get new children.
1270 1270 """
1271 1271 if base == None:
1272 1272 base = {}
1273 1273 self.findincoming(remote, base, heads, force=force)
1274 1274
1275 1275 self.ui.debug(_("common changesets up to ")
1276 1276 + " ".join(map(short, base.keys())) + "\n")
1277 1277
1278 1278 remain = dict.fromkeys(self.changelog.nodemap)
1279 1279
1280 1280 # prune everything remote has from the tree
1281 1281 del remain[nullid]
1282 1282 remove = base.keys()
1283 1283 while remove:
1284 1284 n = remove.pop(0)
1285 1285 if n in remain:
1286 1286 del remain[n]
1287 1287 for p in self.changelog.parents(n):
1288 1288 remove.append(p)
1289 1289
1290 1290 # find every node whose parents have been pruned
1291 1291 subset = []
1292 1292 # find every remote head that will get new children
1293 1293 updated_heads = {}
1294 1294 for n in remain:
1295 1295 p1, p2 = self.changelog.parents(n)
1296 1296 if p1 not in remain and p2 not in remain:
1297 1297 subset.append(n)
1298 1298 if heads:
1299 1299 if p1 in heads:
1300 1300 updated_heads[p1] = True
1301 1301 if p2 in heads:
1302 1302 updated_heads[p2] = True
1303 1303
1304 1304 # this is the set of all roots we have to push
1305 1305 if heads:
1306 1306 return subset, updated_heads.keys()
1307 1307 else:
1308 1308 return subset
1309 1309
1310 1310 def pull(self, remote, heads=None, force=False, lock=None):
1311 1311 mylock = False
1312 1312 if not lock:
1313 1313 lock = self.lock()
1314 1314 mylock = True
1315 1315
1316 1316 try:
1317 1317 fetch = self.findincoming(remote, force=force)
1318 1318 if fetch == [nullid]:
1319 1319 self.ui.status(_("requesting all changes\n"))
1320 1320
1321 1321 if not fetch:
1322 1322 self.ui.status(_("no changes found\n"))
1323 1323 return 0
1324 1324
1325 1325 if heads is None:
1326 1326 cg = remote.changegroup(fetch, 'pull')
1327 1327 else:
1328 1328 if 'changegroupsubset' not in remote.capabilities:
1329 1329 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1330 1330 cg = remote.changegroupsubset(fetch, heads, 'pull')
1331 1331 return self.addchangegroup(cg, 'pull', remote.url())
1332 1332 finally:
1333 1333 if mylock:
1334 1334 lock.release()
1335 1335
1336 1336 def push(self, remote, force=False, revs=None):
1337 1337 # there are two ways to push to remote repo:
1338 1338 #
1339 1339 # addchangegroup assumes local user can lock remote
1340 1340 # repo (local filesystem, old ssh servers).
1341 1341 #
1342 1342 # unbundle assumes local user cannot lock remote repo (new ssh
1343 1343 # servers, http servers).
1344 1344
1345 1345 if remote.capable('unbundle'):
1346 1346 return self.push_unbundle(remote, force, revs)
1347 1347 return self.push_addchangegroup(remote, force, revs)
1348 1348
1349 1349 def prepush(self, remote, force, revs):
1350 1350 base = {}
1351 1351 remote_heads = remote.heads()
1352 1352 inc = self.findincoming(remote, base, remote_heads, force=force)
1353 1353
1354 1354 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1355 1355 if revs is not None:
1356 1356 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1357 1357 else:
1358 1358 bases, heads = update, self.changelog.heads()
1359 1359
1360 1360 if not bases:
1361 1361 self.ui.status(_("no changes found\n"))
1362 1362 return None, 1
1363 1363 elif not force:
1364 1364 # check if we're creating new remote heads
1365 1365 # to be a remote head after push, node must be either
1366 1366 # - unknown locally
1367 1367 # - a local outgoing head descended from update
1368 1368 # - a remote head that's known locally and not
1369 1369 # ancestral to an outgoing head
1370 1370
1371 1371 warn = 0
1372 1372
1373 1373 if remote_heads == [nullid]:
1374 1374 warn = 0
1375 1375 elif not revs and len(heads) > len(remote_heads):
1376 1376 warn = 1
1377 1377 else:
1378 1378 newheads = list(heads)
1379 1379 for r in remote_heads:
1380 1380 if r in self.changelog.nodemap:
1381 1381 desc = self.changelog.heads(r, heads)
1382 1382 l = [h for h in heads if h in desc]
1383 1383 if not l:
1384 1384 newheads.append(r)
1385 1385 else:
1386 1386 newheads.append(r)
1387 1387 if len(newheads) > len(remote_heads):
1388 1388 warn = 1
1389 1389
1390 1390 if warn:
1391 1391 self.ui.warn(_("abort: push creates new remote branches!\n"))
1392 1392 self.ui.status(_("(did you forget to merge?"
1393 1393 " use push -f to force)\n"))
1394 1394 return None, 1
1395 1395 elif inc:
1396 1396 self.ui.warn(_("note: unsynced remote changes!\n"))
1397 1397
1398 1398
1399 1399 if revs is None:
1400 1400 cg = self.changegroup(update, 'push')
1401 1401 else:
1402 1402 cg = self.changegroupsubset(update, revs, 'push')
1403 1403 return cg, remote_heads
1404 1404
1405 1405 def push_addchangegroup(self, remote, force, revs):
1406 1406 lock = remote.lock()
1407 1407
1408 1408 ret = self.prepush(remote, force, revs)
1409 1409 if ret[0] is not None:
1410 1410 cg, remote_heads = ret
1411 1411 return remote.addchangegroup(cg, 'push', self.url())
1412 1412 return ret[1]
1413 1413
1414 1414 def push_unbundle(self, remote, force, revs):
1415 1415 # local repo finds heads on server, finds out what revs it
1416 1416 # must push. once revs transferred, if server finds it has
1417 1417 # different heads (someone else won commit/push race), server
1418 1418 # aborts.
1419 1419
1420 1420 ret = self.prepush(remote, force, revs)
1421 1421 if ret[0] is not None:
1422 1422 cg, remote_heads = ret
1423 1423 if force: remote_heads = ['force']
1424 1424 return remote.unbundle(cg, remote_heads, 'push')
1425 1425 return ret[1]
1426 1426
1427 1427 def changegroupinfo(self, nodes):
1428 1428 self.ui.note(_("%d changesets found\n") % len(nodes))
1429 1429 if self.ui.debugflag:
1430 1430 self.ui.debug(_("List of changesets:\n"))
1431 1431 for node in nodes:
1432 1432 self.ui.debug("%s\n" % hex(node))
1433 1433
1434 1434 def changegroupsubset(self, bases, heads, source):
1435 1435 """This function generates a changegroup consisting of all the nodes
1436 1436 that are descendents of any of the bases, and ancestors of any of
1437 1437 the heads.
1438 1438
1439 1439 It is fairly complex as determining which filenodes and which
1440 1440 manifest nodes need to be included for the changeset to be complete
1441 1441 is non-trivial.
1442 1442
1443 1443 Another wrinkle is doing the reverse, figuring out which changeset in
1444 1444 the changegroup a particular filenode or manifestnode belongs to."""
1445 1445
1446 1446 self.hook('preoutgoing', throw=True, source=source)
1447 1447
1448 1448 # Set up some initial variables
1449 1449 # Make it easy to refer to self.changelog
1450 1450 cl = self.changelog
1451 1451 # msng is short for missing - compute the list of changesets in this
1452 1452 # changegroup.
1453 1453 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1454 1454 self.changegroupinfo(msng_cl_lst)
1455 1455 # Some bases may turn out to be superfluous, and some heads may be
1456 1456 # too. nodesbetween will return the minimal set of bases and heads
1457 1457 # necessary to re-create the changegroup.
1458 1458
1459 1459 # Known heads are the list of heads that it is assumed the recipient
1460 1460 # of this changegroup will know about.
1461 1461 knownheads = {}
1462 1462 # We assume that all parents of bases are known heads.
1463 1463 for n in bases:
1464 1464 for p in cl.parents(n):
1465 1465 if p != nullid:
1466 1466 knownheads[p] = 1
1467 1467 knownheads = knownheads.keys()
1468 1468 if knownheads:
1469 1469 # Now that we know what heads are known, we can compute which
1470 1470 # changesets are known. The recipient must know about all
1471 1471 # changesets required to reach the known heads from the null
1472 1472 # changeset.
1473 1473 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1474 1474 junk = None
1475 1475 # Transform the list into an ersatz set.
1476 1476 has_cl_set = dict.fromkeys(has_cl_set)
1477 1477 else:
1478 1478 # If there were no known heads, the recipient cannot be assumed to
1479 1479 # know about any changesets.
1480 1480 has_cl_set = {}
1481 1481
1482 1482 # Make it easy to refer to self.manifest
1483 1483 mnfst = self.manifest
1484 1484 # We don't know which manifests are missing yet
1485 1485 msng_mnfst_set = {}
1486 1486 # Nor do we know which filenodes are missing.
1487 1487 msng_filenode_set = {}
1488 1488
1489 1489 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1490 1490 junk = None
1491 1491
1492 1492 # A changeset always belongs to itself, so the changenode lookup
1493 1493 # function for a changenode is identity.
1494 1494 def identity(x):
1495 1495 return x
1496 1496
1497 1497 # A function generating function. Sets up an environment for the
1498 1498 # inner function.
1499 1499 def cmp_by_rev_func(revlog):
1500 1500 # Compare two nodes by their revision number in the environment's
1501 1501 # revision history. Since the revision number both represents the
1502 1502 # most efficient order to read the nodes in, and represents a
1503 1503 # topological sorting of the nodes, this function is often useful.
1504 1504 def cmp_by_rev(a, b):
1505 1505 return cmp(revlog.rev(a), revlog.rev(b))
1506 1506 return cmp_by_rev
1507 1507
1508 1508 # If we determine that a particular file or manifest node must be a
1509 1509 # node that the recipient of the changegroup will already have, we can
1510 1510 # also assume the recipient will have all the parents. This function
1511 1511 # prunes them from the set of missing nodes.
1512 1512 def prune_parents(revlog, hasset, msngset):
1513 1513 haslst = hasset.keys()
1514 1514 haslst.sort(cmp_by_rev_func(revlog))
1515 1515 for node in haslst:
1516 1516 parentlst = [p for p in revlog.parents(node) if p != nullid]
1517 1517 while parentlst:
1518 1518 n = parentlst.pop()
1519 1519 if n not in hasset:
1520 1520 hasset[n] = 1
1521 1521 p = [p for p in revlog.parents(n) if p != nullid]
1522 1522 parentlst.extend(p)
1523 1523 for n in hasset:
1524 1524 msngset.pop(n, None)
1525 1525
1526 1526 # This is a function generating function used to set up an environment
1527 1527 # for the inner function to execute in.
1528 1528 def manifest_and_file_collector(changedfileset):
1529 1529 # This is an information gathering function that gathers
1530 1530 # information from each changeset node that goes out as part of
1531 1531 # the changegroup. The information gathered is a list of which
1532 1532 # manifest nodes are potentially required (the recipient may
1533 1533 # already have them) and total list of all files which were
1534 1534 # changed in any changeset in the changegroup.
1535 1535 #
1536 1536 # We also remember the first changenode we saw any manifest
1537 1537 # referenced by so we can later determine which changenode 'owns'
1538 1538 # the manifest.
1539 1539 def collect_manifests_and_files(clnode):
1540 1540 c = cl.read(clnode)
1541 1541 for f in c[3]:
1542 1542 # This is to make sure we only have one instance of each
1543 1543 # filename string for each filename.
1544 1544 changedfileset.setdefault(f, f)
1545 1545 msng_mnfst_set.setdefault(c[0], clnode)
1546 1546 return collect_manifests_and_files
1547 1547
1548 1548 # Figure out which manifest nodes (of the ones we think might be part
1549 1549 # of the changegroup) the recipient must know about and remove them
1550 1550 # from the changegroup.
1551 1551 def prune_manifests():
1552 1552 has_mnfst_set = {}
1553 1553 for n in msng_mnfst_set:
1554 1554 # If a 'missing' manifest thinks it belongs to a changenode
1555 1555 # the recipient is assumed to have, obviously the recipient
1556 1556 # must have that manifest.
1557 1557 linknode = cl.node(mnfst.linkrev(n))
1558 1558 if linknode in has_cl_set:
1559 1559 has_mnfst_set[n] = 1
1560 1560 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1561 1561
1562 1562 # Use the information collected in collect_manifests_and_files to say
1563 1563 # which changenode any manifestnode belongs to.
1564 1564 def lookup_manifest_link(mnfstnode):
1565 1565 return msng_mnfst_set[mnfstnode]
1566 1566
1567 1567 # A function generating function that sets up the initial environment
1568 1568 # the inner function.
1569 1569 def filenode_collector(changedfiles):
1570 1570 next_rev = [0]
1571 1571 # This gathers information from each manifestnode included in the
1572 1572 # changegroup about which filenodes the manifest node references
1573 1573 # so we can include those in the changegroup too.
1574 1574 #
1575 1575 # It also remembers which changenode each filenode belongs to. It
1576 1576 # does this by assuming the a filenode belongs to the changenode
1577 1577 # the first manifest that references it belongs to.
1578 1578 def collect_msng_filenodes(mnfstnode):
1579 1579 r = mnfst.rev(mnfstnode)
1580 1580 if r == next_rev[0]:
1581 1581 # If the last rev we looked at was the one just previous,
1582 1582 # we only need to see a diff.
1583 1583 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1584 1584 # For each line in the delta
1585 1585 for dline in delta.splitlines():
1586 1586 # get the filename and filenode for that line
1587 1587 f, fnode = dline.split('\0')
1588 1588 fnode = bin(fnode[:40])
1589 1589 f = changedfiles.get(f, None)
1590 1590 # And if the file is in the list of files we care
1591 1591 # about.
1592 1592 if f is not None:
1593 1593 # Get the changenode this manifest belongs to
1594 1594 clnode = msng_mnfst_set[mnfstnode]
1595 1595 # Create the set of filenodes for the file if
1596 1596 # there isn't one already.
1597 1597 ndset = msng_filenode_set.setdefault(f, {})
1598 1598 # And set the filenode's changelog node to the
1599 1599 # manifest's if it hasn't been set already.
1600 1600 ndset.setdefault(fnode, clnode)
1601 1601 else:
1602 1602 # Otherwise we need a full manifest.
1603 1603 m = mnfst.read(mnfstnode)
1604 1604 # For every file in we care about.
1605 1605 for f in changedfiles:
1606 1606 fnode = m.get(f, None)
1607 1607 # If it's in the manifest
1608 1608 if fnode is not None:
1609 1609 # See comments above.
1610 1610 clnode = msng_mnfst_set[mnfstnode]
1611 1611 ndset = msng_filenode_set.setdefault(f, {})
1612 1612 ndset.setdefault(fnode, clnode)
1613 1613 # Remember the revision we hope to see next.
1614 1614 next_rev[0] = r + 1
1615 1615 return collect_msng_filenodes
1616 1616
1617 1617 # We have a list of filenodes we think we need for a file, lets remove
1618 1618 # all those we now the recipient must have.
1619 1619 def prune_filenodes(f, filerevlog):
1620 1620 msngset = msng_filenode_set[f]
1621 1621 hasset = {}
1622 1622 # If a 'missing' filenode thinks it belongs to a changenode we
1623 1623 # assume the recipient must have, then the recipient must have
1624 1624 # that filenode.
1625 1625 for n in msngset:
1626 1626 clnode = cl.node(filerevlog.linkrev(n))
1627 1627 if clnode in has_cl_set:
1628 1628 hasset[n] = 1
1629 1629 prune_parents(filerevlog, hasset, msngset)
1630 1630
1631 1631 # A function generator function that sets up the a context for the
1632 1632 # inner function.
1633 1633 def lookup_filenode_link_func(fname):
1634 1634 msngset = msng_filenode_set[fname]
1635 1635 # Lookup the changenode the filenode belongs to.
1636 1636 def lookup_filenode_link(fnode):
1637 1637 return msngset[fnode]
1638 1638 return lookup_filenode_link
1639 1639
1640 1640 # Now that we have all theses utility functions to help out and
1641 1641 # logically divide up the task, generate the group.
1642 1642 def gengroup():
1643 1643 # The set of changed files starts empty.
1644 1644 changedfiles = {}
1645 1645 # Create a changenode group generator that will call our functions
1646 1646 # back to lookup the owning changenode and collect information.
1647 1647 group = cl.group(msng_cl_lst, identity,
1648 1648 manifest_and_file_collector(changedfiles))
1649 1649 for chnk in group:
1650 1650 yield chnk
1651 1651
1652 1652 # The list of manifests has been collected by the generator
1653 1653 # calling our functions back.
1654 1654 prune_manifests()
1655 1655 msng_mnfst_lst = msng_mnfst_set.keys()
1656 1656 # Sort the manifestnodes by revision number.
1657 1657 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1658 1658 # Create a generator for the manifestnodes that calls our lookup
1659 1659 # and data collection functions back.
1660 1660 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1661 1661 filenode_collector(changedfiles))
1662 1662 for chnk in group:
1663 1663 yield chnk
1664 1664
1665 1665 # These are no longer needed, dereference and toss the memory for
1666 1666 # them.
1667 1667 msng_mnfst_lst = None
1668 1668 msng_mnfst_set.clear()
1669 1669
1670 1670 changedfiles = changedfiles.keys()
1671 1671 changedfiles.sort()
1672 1672 # Go through all our files in order sorted by name.
1673 1673 for fname in changedfiles:
1674 1674 filerevlog = self.file(fname)
1675 1675 # Toss out the filenodes that the recipient isn't really
1676 1676 # missing.
1677 1677 if msng_filenode_set.has_key(fname):
1678 1678 prune_filenodes(fname, filerevlog)
1679 1679 msng_filenode_lst = msng_filenode_set[fname].keys()
1680 1680 else:
1681 1681 msng_filenode_lst = []
1682 1682 # If any filenodes are left, generate the group for them,
1683 1683 # otherwise don't bother.
1684 1684 if len(msng_filenode_lst) > 0:
1685 1685 yield changegroup.genchunk(fname)
1686 1686 # Sort the filenodes by their revision #
1687 1687 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1688 1688 # Create a group generator and only pass in a changenode
1689 1689 # lookup function as we need to collect no information
1690 1690 # from filenodes.
1691 1691 group = filerevlog.group(msng_filenode_lst,
1692 1692 lookup_filenode_link_func(fname))
1693 1693 for chnk in group:
1694 1694 yield chnk
1695 1695 if msng_filenode_set.has_key(fname):
1696 1696 # Don't need this anymore, toss it to free memory.
1697 1697 del msng_filenode_set[fname]
1698 1698 # Signal that no more groups are left.
1699 1699 yield changegroup.closechunk()
1700 1700
1701 1701 if msng_cl_lst:
1702 1702 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1703 1703
1704 1704 return util.chunkbuffer(gengroup())
1705 1705
1706 1706 def changegroup(self, basenodes, source):
1707 1707 """Generate a changegroup of all nodes that we have that a recipient
1708 1708 doesn't.
1709 1709
1710 1710 This is much easier than the previous function as we can assume that
1711 1711 the recipient has any changenode we aren't sending them."""
1712 1712
1713 1713 self.hook('preoutgoing', throw=True, source=source)
1714 1714
1715 1715 cl = self.changelog
1716 1716 nodes = cl.nodesbetween(basenodes, None)[0]
1717 1717 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1718 1718 self.changegroupinfo(nodes)
1719 1719
1720 1720 def identity(x):
1721 1721 return x
1722 1722
1723 1723 def gennodelst(revlog):
1724 1724 for r in xrange(0, revlog.count()):
1725 1725 n = revlog.node(r)
1726 1726 if revlog.linkrev(n) in revset:
1727 1727 yield n
1728 1728
1729 1729 def changed_file_collector(changedfileset):
1730 1730 def collect_changed_files(clnode):
1731 1731 c = cl.read(clnode)
1732 1732 for fname in c[3]:
1733 1733 changedfileset[fname] = 1
1734 1734 return collect_changed_files
1735 1735
1736 1736 def lookuprevlink_func(revlog):
1737 1737 def lookuprevlink(n):
1738 1738 return cl.node(revlog.linkrev(n))
1739 1739 return lookuprevlink
1740 1740
1741 1741 def gengroup():
1742 1742 # construct a list of all changed files
1743 1743 changedfiles = {}
1744 1744
1745 1745 for chnk in cl.group(nodes, identity,
1746 1746 changed_file_collector(changedfiles)):
1747 1747 yield chnk
1748 1748 changedfiles = changedfiles.keys()
1749 1749 changedfiles.sort()
1750 1750
1751 1751 mnfst = self.manifest
1752 1752 nodeiter = gennodelst(mnfst)
1753 1753 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1754 1754 yield chnk
1755 1755
1756 1756 for fname in changedfiles:
1757 1757 filerevlog = self.file(fname)
1758 1758 nodeiter = gennodelst(filerevlog)
1759 1759 nodeiter = list(nodeiter)
1760 1760 if nodeiter:
1761 1761 yield changegroup.genchunk(fname)
1762 1762 lookup = lookuprevlink_func(filerevlog)
1763 1763 for chnk in filerevlog.group(nodeiter, lookup):
1764 1764 yield chnk
1765 1765
1766 1766 yield changegroup.closechunk()
1767 1767
1768 1768 if nodes:
1769 1769 self.hook('outgoing', node=hex(nodes[0]), source=source)
1770 1770
1771 1771 return util.chunkbuffer(gengroup())
1772 1772
1773 1773 def addchangegroup(self, source, srctype, url):
1774 1774 """add changegroup to repo.
1775 1775
1776 1776 return values:
1777 1777 - nothing changed or no source: 0
1778 1778 - more heads than before: 1+added heads (2..n)
1779 1779 - less heads than before: -1-removed heads (-2..-n)
1780 1780 - number of heads stays the same: 1
1781 1781 """
1782 1782 def csmap(x):
1783 1783 self.ui.debug(_("add changeset %s\n") % short(x))
1784 1784 return cl.count()
1785 1785
1786 1786 def revmap(x):
1787 1787 return cl.rev(x)
1788 1788
1789 1789 if not source:
1790 1790 return 0
1791 1791
1792 1792 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1793 1793
1794 1794 changesets = files = revisions = 0
1795 1795
1796 1796 tr = self.transaction()
1797 1797
1798 1798 # write changelog data to temp files so concurrent readers will not see
1799 1799 # inconsistent view
1800 1800 cl = self.changelog
1801 1801 cl.delayupdate()
1802 1802 oldheads = len(cl.heads())
1803 1803
1804 1804 # pull off the changeset group
1805 1805 self.ui.status(_("adding changesets\n"))
1806 1806 cor = cl.count() - 1
1807 1807 chunkiter = changegroup.chunkiter(source)
1808 1808 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1809 1809 raise util.Abort(_("received changelog group is empty"))
1810 1810 cnr = cl.count() - 1
1811 1811 changesets = cnr - cor
1812 1812
1813 1813 # pull off the manifest group
1814 1814 self.ui.status(_("adding manifests\n"))
1815 1815 chunkiter = changegroup.chunkiter(source)
1816 1816 # no need to check for empty manifest group here:
1817 1817 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1818 1818 # no new manifest will be created and the manifest group will
1819 1819 # be empty during the pull
1820 1820 self.manifest.addgroup(chunkiter, revmap, tr)
1821 1821
1822 1822 # process the files
1823 1823 self.ui.status(_("adding file changes\n"))
1824 1824 while 1:
1825 1825 f = changegroup.getchunk(source)
1826 1826 if not f:
1827 1827 break
1828 1828 self.ui.debug(_("adding %s revisions\n") % f)
1829 1829 fl = self.file(f)
1830 1830 o = fl.count()
1831 1831 chunkiter = changegroup.chunkiter(source)
1832 1832 if fl.addgroup(chunkiter, revmap, tr) is None:
1833 1833 raise util.Abort(_("received file revlog group is empty"))
1834 1834 revisions += fl.count() - o
1835 1835 files += 1
1836 1836
1837 1837 # make changelog see real files again
1838 1838 cl.finalize(tr)
1839 1839
1840 1840 newheads = len(self.changelog.heads())
1841 1841 heads = ""
1842 1842 if oldheads and newheads != oldheads:
1843 1843 heads = _(" (%+d heads)") % (newheads - oldheads)
1844 1844
1845 1845 self.ui.status(_("added %d changesets"
1846 1846 " with %d changes to %d files%s\n")
1847 1847 % (changesets, revisions, files, heads))
1848 1848
1849 1849 if changesets > 0:
1850 1850 self.hook('pretxnchangegroup', throw=True,
1851 1851 node=hex(self.changelog.node(cor+1)), source=srctype,
1852 1852 url=url)
1853 1853
1854 1854 tr.close()
1855 1855
1856 1856 if changesets > 0:
1857 1857 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1858 1858 source=srctype, url=url)
1859 1859
1860 1860 for i in xrange(cor + 1, cnr + 1):
1861 1861 self.hook("incoming", node=hex(self.changelog.node(i)),
1862 1862 source=srctype, url=url)
1863 1863
1864 1864 # never return 0 here:
1865 1865 if newheads < oldheads:
1866 1866 return newheads - oldheads - 1
1867 1867 else:
1868 1868 return newheads - oldheads + 1
1869 1869
1870 1870
1871 1871 def stream_in(self, remote):
1872 1872 fp = remote.stream_out()
1873 1873 l = fp.readline()
1874 1874 try:
1875 1875 resp = int(l)
1876 1876 except ValueError:
1877 1877 raise util.UnexpectedOutput(
1878 1878 _('Unexpected response from remote server:'), l)
1879 1879 if resp == 1:
1880 1880 raise util.Abort(_('operation forbidden by server'))
1881 1881 elif resp == 2:
1882 1882 raise util.Abort(_('locking the remote repository failed'))
1883 1883 elif resp != 0:
1884 1884 raise util.Abort(_('the server sent an unknown error code'))
1885 1885 self.ui.status(_('streaming all changes\n'))
1886 1886 l = fp.readline()
1887 1887 try:
1888 1888 total_files, total_bytes = map(int, l.split(' ', 1))
1889 1889 except ValueError, TypeError:
1890 1890 raise util.UnexpectedOutput(
1891 1891 _('Unexpected response from remote server:'), l)
1892 1892 self.ui.status(_('%d files to transfer, %s of data\n') %
1893 1893 (total_files, util.bytecount(total_bytes)))
1894 1894 start = time.time()
1895 1895 for i in xrange(total_files):
1896 1896 # XXX doesn't support '\n' or '\r' in filenames
1897 1897 l = fp.readline()
1898 1898 try:
1899 1899 name, size = l.split('\0', 1)
1900 1900 size = int(size)
1901 1901 except ValueError, TypeError:
1902 1902 raise util.UnexpectedOutput(
1903 1903 _('Unexpected response from remote server:'), l)
1904 1904 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1905 1905 ofp = self.sopener(name, 'w')
1906 1906 for chunk in util.filechunkiter(fp, limit=size):
1907 1907 ofp.write(chunk)
1908 1908 ofp.close()
1909 1909 elapsed = time.time() - start
1910 1910 if elapsed <= 0:
1911 1911 elapsed = 0.001
1912 1912 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1913 1913 (util.bytecount(total_bytes), elapsed,
1914 1914 util.bytecount(total_bytes / elapsed)))
1915 1915 self.invalidate()
1916 1916 return len(self.heads()) + 1
1917 1917
1918 1918 def clone(self, remote, heads=[], stream=False):
1919 1919 '''clone remote repository.
1920 1920
1921 1921 keyword arguments:
1922 1922 heads: list of revs to clone (forces use of pull)
1923 1923 stream: use streaming clone if possible'''
1924 1924
1925 1925 # now, all clients that can request uncompressed clones can
1926 1926 # read repo formats supported by all servers that can serve
1927 1927 # them.
1928 1928
1929 1929 # if revlog format changes, client will have to check version
1930 1930 # and format flags on "stream" capability, and use
1931 1931 # uncompressed only if compatible.
1932 1932
1933 1933 if stream and not heads and remote.capable('stream'):
1934 1934 return self.stream_in(remote)
1935 1935 return self.pull(remote, heads)
1936 1936
1937 1937 # used to avoid circular references so destructors work
1938 1938 def aftertrans(files):
1939 1939 renamefiles = [tuple(t) for t in files]
1940 1940 def a():
1941 1941 for src, dest in renamefiles:
1942 1942 util.rename(src, dest)
1943 1943 return a
1944 1944
1945 1945 def instance(ui, path, create):
1946 1946 return localrepository(ui, util.drop_scheme('file', path), create)
1947 1947
1948 1948 def islocal(path):
1949 1949 return True
General Comments 0
You need to be logged in to leave comments. Login now