##// END OF EJS Templates
Fix bad behaviour when specifying an invalid date (issue700)...
Thomas Arendsen Hein -
r6139:989467e8 default
parent child Browse files
Show More
@@ -1,95 +1,99 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 from mercurial.node import *
10 10 from mercurial import commands, cmdutil, hg, node, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated.'''
21 21
22 22 def postincoming(other, modheads):
23 23 if modheads == 0:
24 24 return 0
25 25 if modheads == 1:
26 26 return hg.clean(repo, repo.changelog.tip())
27 27 newheads = repo.heads(parent)
28 28 newchildren = [n for n in repo.heads(parent) if n != parent]
29 29 newparent = parent
30 30 if newchildren:
31 31 newparent = newchildren[0]
32 32 hg.clean(repo, newparent)
33 33 newheads = [n for n in repo.heads() if n != newparent]
34 34 err = False
35 35 if newheads:
36 36 ui.status(_('merging with new head %d:%s\n') %
37 37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
38 38 err = hg.merge(repo, newheads[0], remind=False)
39 39 if not err and len(newheads) > 1:
40 40 ui.status(_('not merging with %d other new heads '
41 41 '(use "hg heads" and "hg merge" to merge them)') %
42 42 (len(newheads) - 1))
43 43 if not err:
44 44 mod, add, rem = repo.status()[:3]
45 45 message = (cmdutil.logmessage(opts) or
46 46 (_('Automated merge with %s') %
47 47 util.removeauth(other.url())))
48 48 n = repo.commit(mod + add + rem, message,
49 49 opts['user'], opts['date'],
50 50 force_editor=opts.get('force_editor'))
51 51 ui.status(_('new changeset %d:%s merges remote changes '
52 52 'with local\n') % (repo.changelog.rev(n),
53 53 short(n)))
54 54 def pull():
55 55 cmdutil.setremoteconfig(ui, opts)
56 56
57 57 other = hg.repository(ui, ui.expandpath(source))
58 58 ui.status(_('pulling from %s\n') %
59 59 util.hidepassword(ui.expandpath(source)))
60 60 revs = None
61 61 if opts['rev'] and not other.local():
62 62 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
63 63 elif opts['rev']:
64 64 revs = [other.lookup(rev) for rev in opts['rev']]
65 65 modheads = repo.pull(other, heads=revs)
66 66 return postincoming(other, modheads)
67 67
68 date = opts.get('date')
69 if date:
70 opts['date'] = util.parsedate(date)
71
68 72 parent, p2 = repo.dirstate.parents()
69 73 if parent != repo.changelog.tip():
70 74 raise util.Abort(_('working dir not at tip '
71 75 '(use "hg update" to check out tip)'))
72 76 if p2 != nullid:
73 77 raise util.Abort(_('outstanding uncommitted merge'))
74 78 wlock = lock = None
75 79 try:
76 80 wlock = repo.wlock()
77 81 lock = repo.lock()
78 82 mod, add, rem = repo.status()[:3]
79 83 if mod or add or rem:
80 84 raise util.Abort(_('outstanding uncommitted changes'))
81 85 if len(repo.heads()) > 1:
82 86 raise util.Abort(_('multiple heads in this repository '
83 87 '(use "hg heads" and "hg merge" to merge)'))
84 88 return pull()
85 89 finally:
86 90 del lock, wlock
87 91
88 92 cmdtable = {
89 93 'fetch':
90 94 (fetch,
91 95 [('r', 'rev', [], _('a specific revision you would like to pull')),
92 96 ('f', 'force-editor', None, _('edit commit message')),
93 97 ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
94 98 _('hg fetch [SOURCE]')),
95 99 }
@@ -1,279 +1,284 b''
1 1 # GnuPG signing extension for Mercurial
2 2 #
3 3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, tempfile, binascii
9 9 from mercurial import util, commands
10 10 from mercurial import node as hgnode
11 11 from mercurial.i18n import _
12 12
13 13 class gpg:
14 14 def __init__(self, path, key=None):
15 15 self.path = path
16 16 self.key = (key and " --local-user \"%s\"" % key) or ""
17 17
18 18 def sign(self, data):
19 19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 20 return util.filter(data, gpgcmd)
21 21
22 22 def verify(self, data, sig):
23 23 """ returns of the good and bad signatures"""
24 24 sigfile = datafile = None
25 25 try:
26 26 # create temporary files
27 27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 28 fp = os.fdopen(fd, 'wb')
29 29 fp.write(sig)
30 30 fp.close()
31 31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 32 fp = os.fdopen(fd, 'wb')
33 33 fp.write(data)
34 34 fp.close()
35 35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 37 ret = util.filter("", gpgcmd)
38 38 finally:
39 39 for f in (sigfile, datafile):
40 40 try:
41 41 if f: os.unlink(f)
42 42 except: pass
43 43 keys = []
44 44 key, fingerprint = None, None
45 45 err = ""
46 46 for l in ret.splitlines():
47 47 # see DETAILS in the gnupg documentation
48 48 # filter the logger output
49 49 if not l.startswith("[GNUPG:]"):
50 50 continue
51 51 l = l[9:]
52 52 if l.startswith("ERRSIG"):
53 53 err = _("error while verifying signature")
54 54 break
55 55 elif l.startswith("VALIDSIG"):
56 56 # fingerprint of the primary key
57 57 fingerprint = l.split()[10]
58 58 elif (l.startswith("GOODSIG") or
59 59 l.startswith("EXPSIG") or
60 60 l.startswith("EXPKEYSIG") or
61 61 l.startswith("BADSIG")):
62 62 if key is not None:
63 63 keys.append(key + [fingerprint])
64 64 key = l.split(" ", 2)
65 65 fingerprint = None
66 66 if err:
67 67 return err, []
68 68 if key is not None:
69 69 keys.append(key + [fingerprint])
70 70 return err, keys
71 71
72 72 def newgpg(ui, **opts):
73 73 """create a new gpg instance"""
74 74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 75 gpgkey = opts.get('key')
76 76 if not gpgkey:
77 77 gpgkey = ui.config("gpg", "key", None)
78 78 return gpg(gpgpath, gpgkey)
79 79
80 80 def sigwalk(repo):
81 81 """
82 82 walk over every sigs, yields a couple
83 83 ((node, version, sig), (filename, linenumber))
84 84 """
85 85 def parsefile(fileiter, context):
86 86 ln = 1
87 87 for l in fileiter:
88 88 if not l:
89 89 continue
90 90 yield (l.split(" ", 2), (context, ln))
91 91 ln +=1
92 92
93 93 fl = repo.file(".hgsigs")
94 94 h = fl.heads()
95 95 h.reverse()
96 96 # read the heads
97 97 for r in h:
98 98 fn = ".hgsigs|%s" % hgnode.short(r)
99 99 for item in parsefile(fl.read(r).splitlines(), fn):
100 100 yield item
101 101 try:
102 102 # read local signatures
103 103 fn = "localsigs"
104 104 for item in parsefile(repo.opener(fn), fn):
105 105 yield item
106 106 except IOError:
107 107 pass
108 108
109 109 def getkeys(ui, repo, mygpg, sigdata, context):
110 110 """get the keys who signed a data"""
111 111 fn, ln = context
112 112 node, version, sig = sigdata
113 113 prefix = "%s:%d" % (fn, ln)
114 114 node = hgnode.bin(node)
115 115
116 116 data = node2txt(repo, node, version)
117 117 sig = binascii.a2b_base64(sig)
118 118 err, keys = mygpg.verify(data, sig)
119 119 if err:
120 120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 121 return None
122 122
123 123 validkeys = []
124 124 # warn for expired key and/or sigs
125 125 for key in keys:
126 126 if key[0] == "BADSIG":
127 127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 128 continue
129 129 if key[0] == "EXPSIG":
130 130 ui.write(_("%s Note: Signature has expired"
131 131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 132 elif key[0] == "EXPKEYSIG":
133 133 ui.write(_("%s Note: This key has expired"
134 134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 135 validkeys.append((key[1], key[2], key[3]))
136 136 return validkeys
137 137
138 138 def sigs(ui, repo):
139 139 """list signed changesets"""
140 140 mygpg = newgpg(ui)
141 141 revs = {}
142 142
143 143 for data, context in sigwalk(repo):
144 144 node, version, sig = data
145 145 fn, ln = context
146 146 try:
147 147 n = repo.lookup(node)
148 148 except KeyError:
149 149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 150 continue
151 151 r = repo.changelog.rev(n)
152 152 keys = getkeys(ui, repo, mygpg, data, context)
153 153 if not keys:
154 154 continue
155 155 revs.setdefault(r, [])
156 156 revs[r].extend(keys)
157 157 nodes = list(revs)
158 158 nodes.reverse()
159 159 for rev in nodes:
160 160 for k in revs[rev]:
161 161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163 163
164 164 def check(ui, repo, rev):
165 165 """verify all the signatures there may be for a particular revision"""
166 166 mygpg = newgpg(ui)
167 167 rev = repo.lookup(rev)
168 168 hexrev = hgnode.hex(rev)
169 169 keys = []
170 170
171 171 for data, context in sigwalk(repo):
172 172 node, version, sig = data
173 173 if node == hexrev:
174 174 k = getkeys(ui, repo, mygpg, data, context)
175 175 if k:
176 176 keys.extend(k)
177 177
178 178 if not keys:
179 179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 180 return
181 181
182 182 # print summary
183 183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 184 for key in keys:
185 185 ui.write(" %s\n" % keystr(ui, key))
186 186
187 187 def keystr(ui, key):
188 188 """associate a string to a key (username, comment)"""
189 189 keyid, user, fingerprint = key
190 190 comment = ui.config("gpg", fingerprint, None)
191 191 if comment:
192 192 return "%s (%s)" % (user, comment)
193 193 else:
194 194 return user
195 195
196 196 def sign(ui, repo, *revs, **opts):
197 197 """add a signature for the current or given revision
198 198
199 199 If no revision is given, the parent of the working directory is used,
200 200 or tip if no revision is checked out.
201 201 """
202 202
203 203 mygpg = newgpg(ui, **opts)
204 204 sigver = "0"
205 205 sigmessage = ""
206
207 date = opts.get('date')
208 if date:
209 opts['date'] = util.parsedate(date)
210
206 211 if revs:
207 212 nodes = [repo.lookup(n) for n in revs]
208 213 else:
209 214 nodes = [node for node in repo.dirstate.parents()
210 215 if node != hgnode.nullid]
211 216 if len(nodes) > 1:
212 217 raise util.Abort(_('uncommitted merge - please provide a '
213 218 'specific revision'))
214 219 if not nodes:
215 220 nodes = [repo.changelog.tip()]
216 221
217 222 for n in nodes:
218 223 hexnode = hgnode.hex(n)
219 224 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
220 225 hgnode.short(n)))
221 226 # build data
222 227 data = node2txt(repo, n, sigver)
223 228 sig = mygpg.sign(data)
224 229 if not sig:
225 230 raise util.Abort(_("Error while signing"))
226 231 sig = binascii.b2a_base64(sig)
227 232 sig = sig.replace("\n", "")
228 233 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
229 234
230 235 # write it
231 236 if opts['local']:
232 237 repo.opener("localsigs", "ab").write(sigmessage)
233 238 return
234 239
235 240 for x in repo.status()[:5]:
236 241 if ".hgsigs" in x and not opts["force"]:
237 242 raise util.Abort(_("working copy of .hgsigs is changed "
238 243 "(please commit .hgsigs manually "
239 244 "or use --force)"))
240 245
241 246 repo.wfile(".hgsigs", "ab").write(sigmessage)
242 247
243 248 if '.hgsigs' not in repo.dirstate:
244 249 repo.add([".hgsigs"])
245 250
246 251 if opts["no_commit"]:
247 252 return
248 253
249 254 message = opts['message']
250 255 if not message:
251 256 message = "\n".join([_("Added signature for changeset %s")
252 257 % hgnode.short(n)
253 258 for n in nodes])
254 259 try:
255 260 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
256 261 except ValueError, inst:
257 262 raise util.Abort(str(inst))
258 263
259 264 def node2txt(repo, node, ver):
260 265 """map a manifest into some text"""
261 266 if ver == "0":
262 267 return "%s\n" % hgnode.hex(node)
263 268 else:
264 269 raise util.Abort(_("unknown signature version"))
265 270
266 271 cmdtable = {
267 272 "sign":
268 273 (sign,
269 274 [('l', 'local', None, _('make the signature local')),
270 275 ('f', 'force', None, _('sign even if the sigfile is modified')),
271 276 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
272 277 ('k', 'key', '', _('the key id to sign with')),
273 278 ('m', 'message', '', _('commit message')),
274 279 ] + commands.commitopts2,
275 280 _('hg sign [OPTION]... [REVISION]...')),
276 281 "sigcheck": (check, [], _('hg sigcheck REVISION')),
277 282 "sigs": (sigs, [], _('hg sigs')),
278 283 }
279 284
@@ -1,2347 +1,2351 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 fp.write("%s\n" % i)
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head)
341 341 self.strip(repo, n, update=False, backup='strip')
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
348 348 if n == None:
349 349 raise util.Abort(_("repo commit failed"))
350 350 try:
351 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 352 except:
353 353 raise util.Abort(_("unable to read %s") % patch)
354 354
355 355 patchf = self.opener(patch, "w")
356 356 if comments:
357 357 comments = "\n".join(comments) + '\n\n'
358 358 patchf.write(comments)
359 359 self.printdiff(repo, head, n, fp=patchf)
360 360 patchf.close()
361 361 self.removeundo(repo)
362 362 return (0, n)
363 363
364 364 def qparents(self, repo, rev=None):
365 365 if rev is None:
366 366 (p1, p2) = repo.dirstate.parents()
367 367 if p2 == revlog.nullid:
368 368 return p1
369 369 if len(self.applied) == 0:
370 370 return None
371 371 return revlog.bin(self.applied[-1].rev)
372 372 pp = repo.changelog.parents(rev)
373 373 if pp[1] != revlog.nullid:
374 374 arevs = [ x.rev for x in self.applied ]
375 375 p0 = revlog.hex(pp[0])
376 376 p1 = revlog.hex(pp[1])
377 377 if p0 in arevs:
378 378 return pp[0]
379 379 if p1 in arevs:
380 380 return pp[1]
381 381 return pp[0]
382 382
383 383 def mergepatch(self, repo, mergeq, series):
384 384 if len(self.applied) == 0:
385 385 # each of the patches merged in will have two parents. This
386 386 # can confuse the qrefresh, qdiff, and strip code because it
387 387 # needs to know which parent is actually in the patch queue.
388 388 # so, we insert a merge marker with only one parent. This way
389 389 # the first patch in the queue is never a merge patch
390 390 #
391 391 pname = ".hg.patches.merge.marker"
392 392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
393 393 self.removeundo(repo)
394 394 self.applied.append(statusentry(revlog.hex(n), pname))
395 395 self.applied_dirty = 1
396 396
397 397 head = self.qparents(repo)
398 398
399 399 for patch in series:
400 400 patch = mergeq.lookup(patch, strict=True)
401 401 if not patch:
402 402 self.ui.warn("patch %s does not exist\n" % patch)
403 403 return (1, None)
404 404 pushable, reason = self.pushable(patch)
405 405 if not pushable:
406 406 self.explain_pushable(patch, all_patches=True)
407 407 continue
408 408 info = mergeq.isapplied(patch)
409 409 if not info:
410 410 self.ui.warn("patch %s is not applied\n" % patch)
411 411 return (1, None)
412 412 rev = revlog.bin(info[1])
413 413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
414 414 if head:
415 415 self.applied.append(statusentry(revlog.hex(head), patch))
416 416 self.applied_dirty = 1
417 417 if err:
418 418 return (err, head)
419 419 self.save_dirty()
420 420 return (0, head)
421 421
422 422 def patch(self, repo, patchfile):
423 423 '''Apply patchfile to the working directory.
424 424 patchfile: file name of patch'''
425 425 files = {}
426 426 try:
427 427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 428 files=files)
429 429 except Exception, inst:
430 430 self.ui.note(str(inst) + '\n')
431 431 if not self.ui.verbose:
432 432 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 433 return (False, files, False)
434 434
435 435 return (True, files, fuzz)
436 436
437 437 def apply(self, repo, series, list=False, update_status=True,
438 438 strict=False, patchdir=None, merge=None, all_files={}):
439 439 wlock = lock = tr = None
440 440 try:
441 441 wlock = repo.wlock()
442 442 lock = repo.lock()
443 443 tr = repo.transaction()
444 444 try:
445 445 ret = self._apply(repo, series, list, update_status,
446 446 strict, patchdir, merge, all_files=all_files)
447 447 tr.close()
448 448 self.save_dirty()
449 449 return ret
450 450 except:
451 451 try:
452 452 tr.abort()
453 453 finally:
454 454 repo.invalidate()
455 455 repo.dirstate.invalidate()
456 456 raise
457 457 finally:
458 458 del tr, lock, wlock
459 459 self.removeundo(repo)
460 460
461 461 def _apply(self, repo, series, list=False, update_status=True,
462 462 strict=False, patchdir=None, merge=None, all_files={}):
463 463 # TODO unify with commands.py
464 464 if not patchdir:
465 465 patchdir = self.path
466 466 err = 0
467 467 n = None
468 468 for patchname in series:
469 469 pushable, reason = self.pushable(patchname)
470 470 if not pushable:
471 471 self.explain_pushable(patchname, all_patches=True)
472 472 continue
473 473 self.ui.warn("applying %s\n" % patchname)
474 474 pf = os.path.join(patchdir, patchname)
475 475
476 476 try:
477 477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 478 except:
479 479 self.ui.warn("Unable to read %s\n" % patchname)
480 480 err = 1
481 481 break
482 482
483 483 if not message:
484 484 message = "imported patch %s\n" % patchname
485 485 else:
486 486 if list:
487 487 message.append("\nimported patch %s" % patchname)
488 488 message = '\n'.join(message)
489 489
490 490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 491 all_files.update(files)
492 492 patcherr = not patcherr
493 493
494 494 if merge and files:
495 495 # Mark as removed/merged and update dirstate parent info
496 496 removed = []
497 497 merged = []
498 498 for f in files:
499 499 if os.path.exists(repo.wjoin(f)):
500 500 merged.append(f)
501 501 else:
502 502 removed.append(f)
503 503 for f in removed:
504 504 repo.dirstate.remove(f)
505 505 for f in merged:
506 506 repo.dirstate.merge(f)
507 507 p1, p2 = repo.dirstate.parents()
508 508 repo.dirstate.setparents(p1, merge)
509 509 files = patch.updatedir(self.ui, repo, files)
510 510 n = repo.commit(files, message, user, date, force=1)
511 511
512 512 if n == None:
513 513 raise util.Abort(_("repo commit failed"))
514 514
515 515 if update_status:
516 516 self.applied.append(statusentry(revlog.hex(n), patchname))
517 517
518 518 if patcherr:
519 519 if not patchfound:
520 520 self.ui.warn("patch %s is empty\n" % patchname)
521 521 err = 0
522 522 else:
523 523 self.ui.warn("patch failed, rejects left in working dir\n")
524 524 err = 1
525 525 break
526 526
527 527 if fuzz and strict:
528 528 self.ui.warn("fuzz found when applying patch, stopping\n")
529 529 err = 1
530 530 break
531 531 return (err, n)
532 532
533 533 def delete(self, repo, patches, opts):
534 534 if not patches and not opts.get('rev'):
535 535 raise util.Abort(_('qdelete requires at least one revision or '
536 536 'patch name'))
537 537
538 538 realpatches = []
539 539 for patch in patches:
540 540 patch = self.lookup(patch, strict=True)
541 541 info = self.isapplied(patch)
542 542 if info:
543 543 raise util.Abort(_("cannot delete applied patch %s") % patch)
544 544 if patch not in self.series:
545 545 raise util.Abort(_("patch %s not in series file") % patch)
546 546 realpatches.append(patch)
547 547
548 548 appliedbase = 0
549 549 if opts.get('rev'):
550 550 if not self.applied:
551 551 raise util.Abort(_('no patches applied'))
552 552 revs = cmdutil.revrange(repo, opts['rev'])
553 553 if len(revs) > 1 and revs[0] > revs[1]:
554 554 revs.reverse()
555 555 for rev in revs:
556 556 if appliedbase >= len(self.applied):
557 557 raise util.Abort(_("revision %d is not managed") % rev)
558 558
559 559 base = revlog.bin(self.applied[appliedbase].rev)
560 560 node = repo.changelog.node(rev)
561 561 if node != base:
562 562 raise util.Abort(_("cannot delete revision %d above "
563 563 "applied patches") % rev)
564 564 realpatches.append(self.applied[appliedbase].name)
565 565 appliedbase += 1
566 566
567 567 if not opts.get('keep'):
568 568 r = self.qrepo()
569 569 if r:
570 570 r.remove(realpatches, True)
571 571 else:
572 572 for p in realpatches:
573 573 os.unlink(self.join(p))
574 574
575 575 if appliedbase:
576 576 del self.applied[:appliedbase]
577 577 self.applied_dirty = 1
578 578 indices = [self.find_series(p) for p in realpatches]
579 579 indices.sort()
580 580 for i in indices[-1::-1]:
581 581 del self.full_series[i]
582 582 self.parse_series()
583 583 self.series_dirty = 1
584 584
585 585 def check_toppatch(self, repo):
586 586 if len(self.applied) > 0:
587 587 top = revlog.bin(self.applied[-1].rev)
588 588 pp = repo.dirstate.parents()
589 589 if top not in pp:
590 590 raise util.Abort(_("working directory revision is not qtip"))
591 591 return top
592 592 return None
593 593 def check_localchanges(self, repo, force=False, refresh=True):
594 594 m, a, r, d = repo.status()[:4]
595 595 if m or a or r or d:
596 596 if not force:
597 597 if refresh:
598 598 raise util.Abort(_("local changes found, refresh first"))
599 599 else:
600 600 raise util.Abort(_("local changes found"))
601 601 return m, a, r, d
602 602
603 603 _reserved = ('series', 'status', 'guards')
604 604 def check_reserved_name(self, name):
605 605 if (name in self._reserved or name.startswith('.hg')
606 606 or name.startswith('.mq')):
607 607 raise util.Abort(_('"%s" cannot be used as the name of a patch')
608 608 % name)
609 609
610 610 def new(self, repo, patch, *pats, **opts):
611 611 msg = opts.get('msg')
612 612 force = opts.get('force')
613 613 user = opts.get('user')
614 614 date = opts.get('date')
615 if date:
616 date = util.parsedate(date)
615 617 self.check_reserved_name(patch)
616 618 if os.path.exists(self.join(patch)):
617 619 raise util.Abort(_('patch "%s" already exists') % patch)
618 620 if opts.get('include') or opts.get('exclude') or pats:
619 621 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
620 622 m, a, r, d = repo.status(files=fns, match=match)[:4]
621 623 else:
622 624 m, a, r, d = self.check_localchanges(repo, force)
623 625 fns, match, anypats = cmdutil.matchpats(repo, m + a + r)
624 626 commitfiles = m + a + r
625 627 self.check_toppatch(repo)
626 628 wlock = repo.wlock()
627 629 try:
628 630 insert = self.full_series_end()
629 631 commitmsg = msg and msg or ("[mq]: %s" % patch)
630 632 n = repo.commit(commitfiles, commitmsg, user, date, match=match, force=True)
631 633 if n == None:
632 634 raise util.Abort(_("repo commit failed"))
633 635 self.full_series[insert:insert] = [patch]
634 636 self.applied.append(statusentry(revlog.hex(n), patch))
635 637 self.parse_series()
636 638 self.series_dirty = 1
637 639 self.applied_dirty = 1
638 640 p = self.opener(patch, "w")
639 641 if date:
640 642 p.write("# HG changeset patch\n")
641 643 if user:
642 644 p.write("# User " + user + "\n")
643 p.write("# Date " + date + "\n")
645 p.write("# Date %d %d\n" % date)
644 646 p.write("\n")
645 647 elif user:
646 648 p.write("From: " + user + "\n")
647 649 p.write("\n")
648 650 if msg:
649 651 msg = msg + "\n"
650 652 p.write(msg)
651 653 p.close()
652 654 wlock = None
653 655 r = self.qrepo()
654 656 if r: r.add([patch])
655 657 if commitfiles:
656 658 self.refresh(repo, short=True, git=opts.get('git'))
657 659 self.removeundo(repo)
658 660 finally:
659 661 del wlock
660 662
661 663 def strip(self, repo, rev, update=True, backup="all"):
662 664 wlock = lock = None
663 665 try:
664 666 wlock = repo.wlock()
665 667 lock = repo.lock()
666 668
667 669 if update:
668 670 self.check_localchanges(repo, refresh=False)
669 671 urev = self.qparents(repo, rev)
670 672 hg.clean(repo, urev)
671 673 repo.dirstate.write()
672 674
673 675 self.removeundo(repo)
674 676 repair.strip(self.ui, repo, rev, backup)
675 677 # strip may have unbundled a set of backed up revisions after
676 678 # the actual strip
677 679 self.removeundo(repo)
678 680 finally:
679 681 del lock, wlock
680 682
681 683 def isapplied(self, patch):
682 684 """returns (index, rev, patch)"""
683 685 for i in xrange(len(self.applied)):
684 686 a = self.applied[i]
685 687 if a.name == patch:
686 688 return (i, a.rev, a.name)
687 689 return None
688 690
689 691 # if the exact patch name does not exist, we try a few
690 692 # variations. If strict is passed, we try only #1
691 693 #
692 694 # 1) a number to indicate an offset in the series file
693 695 # 2) a unique substring of the patch name was given
694 696 # 3) patchname[-+]num to indicate an offset in the series file
695 697 def lookup(self, patch, strict=False):
696 698 patch = patch and str(patch)
697 699
698 700 def partial_name(s):
699 701 if s in self.series:
700 702 return s
701 703 matches = [x for x in self.series if s in x]
702 704 if len(matches) > 1:
703 705 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
704 706 for m in matches:
705 707 self.ui.warn(' %s\n' % m)
706 708 return None
707 709 if matches:
708 710 return matches[0]
709 711 if len(self.series) > 0 and len(self.applied) > 0:
710 712 if s == 'qtip':
711 713 return self.series[self.series_end(True)-1]
712 714 if s == 'qbase':
713 715 return self.series[0]
714 716 return None
715 717 if patch == None:
716 718 return None
717 719
718 720 # we don't want to return a partial match until we make
719 721 # sure the file name passed in does not exist (checked below)
720 722 res = partial_name(patch)
721 723 if res and res == patch:
722 724 return res
723 725
724 726 if not os.path.isfile(self.join(patch)):
725 727 try:
726 728 sno = int(patch)
727 729 except(ValueError, OverflowError):
728 730 pass
729 731 else:
730 732 if sno < len(self.series):
731 733 return self.series[sno]
732 734 if not strict:
733 735 # return any partial match made above
734 736 if res:
735 737 return res
736 738 minus = patch.rfind('-')
737 739 if minus >= 0:
738 740 res = partial_name(patch[:minus])
739 741 if res:
740 742 i = self.series.index(res)
741 743 try:
742 744 off = int(patch[minus+1:] or 1)
743 745 except(ValueError, OverflowError):
744 746 pass
745 747 else:
746 748 if i - off >= 0:
747 749 return self.series[i - off]
748 750 plus = patch.rfind('+')
749 751 if plus >= 0:
750 752 res = partial_name(patch[:plus])
751 753 if res:
752 754 i = self.series.index(res)
753 755 try:
754 756 off = int(patch[plus+1:] or 1)
755 757 except(ValueError, OverflowError):
756 758 pass
757 759 else:
758 760 if i + off < len(self.series):
759 761 return self.series[i + off]
760 762 raise util.Abort(_("patch %s not in series") % patch)
761 763
762 764 def push(self, repo, patch=None, force=False, list=False,
763 765 mergeq=None):
764 766 wlock = repo.wlock()
765 767 try:
766 768 patch = self.lookup(patch)
767 769 # Suppose our series file is: A B C and the current 'top'
768 770 # patch is B. qpush C should be performed (moving forward)
769 771 # qpush B is a NOP (no change) qpush A is an error (can't
770 772 # go backwards with qpush)
771 773 if patch:
772 774 info = self.isapplied(patch)
773 775 if info:
774 776 if info[0] < len(self.applied) - 1:
775 777 raise util.Abort(
776 778 _("cannot push to a previous patch: %s") % patch)
777 779 if info[0] < len(self.series) - 1:
778 780 self.ui.warn(
779 781 _('qpush: %s is already at the top\n') % patch)
780 782 else:
781 783 self.ui.warn(_('all patches are currently applied\n'))
782 784 return
783 785
784 786 # Following the above example, starting at 'top' of B:
785 787 # qpush should be performed (pushes C), but a subsequent
786 788 # qpush without an argument is an error (nothing to
787 789 # apply). This allows a loop of "...while hg qpush..." to
788 790 # work as it detects an error when done
789 791 if self.series_end() == len(self.series):
790 792 self.ui.warn(_('patch series already fully applied\n'))
791 793 return 1
792 794 if not force:
793 795 self.check_localchanges(repo)
794 796
795 797 self.applied_dirty = 1;
796 798 start = self.series_end()
797 799 if start > 0:
798 800 self.check_toppatch(repo)
799 801 if not patch:
800 802 patch = self.series[start]
801 803 end = start + 1
802 804 else:
803 805 end = self.series.index(patch, start) + 1
804 806 s = self.series[start:end]
805 807 all_files = {}
806 808 try:
807 809 if mergeq:
808 810 ret = self.mergepatch(repo, mergeq, s)
809 811 else:
810 812 ret = self.apply(repo, s, list, all_files=all_files)
811 813 except:
812 814 self.ui.warn(_('cleaning up working directory...'))
813 815 node = repo.dirstate.parents()[0]
814 816 hg.revert(repo, node, None)
815 817 unknown = repo.status()[4]
816 818 # only remove unknown files that we know we touched or
817 819 # created while patching
818 820 for f in unknown:
819 821 if f in all_files:
820 822 util.unlink(repo.wjoin(f))
821 823 self.ui.warn(_('done\n'))
822 824 raise
823 825 top = self.applied[-1].name
824 826 if ret[0]:
825 827 self.ui.write(
826 828 "Errors during apply, please fix and refresh %s\n" % top)
827 829 else:
828 830 self.ui.write("Now at: %s\n" % top)
829 831 return ret[0]
830 832 finally:
831 833 del wlock
832 834
833 835 def pop(self, repo, patch=None, force=False, update=True, all=False):
834 836 def getfile(f, rev, flags):
835 837 t = repo.file(f).read(rev)
836 838 repo.wwrite(f, t, flags)
837 839
838 840 wlock = repo.wlock()
839 841 try:
840 842 if patch:
841 843 # index, rev, patch
842 844 info = self.isapplied(patch)
843 845 if not info:
844 846 patch = self.lookup(patch)
845 847 info = self.isapplied(patch)
846 848 if not info:
847 849 raise util.Abort(_("patch %s is not applied") % patch)
848 850
849 851 if len(self.applied) == 0:
850 852 # Allow qpop -a to work repeatedly,
851 853 # but not qpop without an argument
852 854 self.ui.warn(_("no patches applied\n"))
853 855 return not all
854 856
855 857 if not update:
856 858 parents = repo.dirstate.parents()
857 859 rr = [ revlog.bin(x.rev) for x in self.applied ]
858 860 for p in parents:
859 861 if p in rr:
860 862 self.ui.warn("qpop: forcing dirstate update\n")
861 863 update = True
862 864
863 865 if not force and update:
864 866 self.check_localchanges(repo)
865 867
866 868 self.applied_dirty = 1;
867 869 end = len(self.applied)
868 870 if not patch:
869 871 if all:
870 872 popi = 0
871 873 else:
872 874 popi = len(self.applied) - 1
873 875 else:
874 876 popi = info[0] + 1
875 877 if popi >= end:
876 878 self.ui.warn("qpop: %s is already at the top\n" % patch)
877 879 return
878 880 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
879 881
880 882 start = info[0]
881 883 rev = revlog.bin(info[1])
882 884
883 885 if update:
884 886 top = self.check_toppatch(repo)
885 887
886 888 if repo.changelog.heads(rev) != [revlog.bin(self.applied[-1].rev)]:
887 889 raise util.Abort("popping would remove a revision not "
888 890 "managed by this patch queue")
889 891
890 892 # we know there are no local changes, so we can make a simplified
891 893 # form of hg.update.
892 894 if update:
893 895 qp = self.qparents(repo, rev)
894 896 changes = repo.changelog.read(qp)
895 897 mmap = repo.manifest.read(changes[0])
896 898 m, a, r, d, u = repo.status(qp, top)[:5]
897 899 if d:
898 900 raise util.Abort("deletions found between repo revs")
899 901 for f in m:
900 902 getfile(f, mmap[f], mmap.flags(f))
901 903 for f in r:
902 904 getfile(f, mmap[f], mmap.flags(f))
903 905 for f in m + r:
904 906 repo.dirstate.normal(f)
905 907 for f in a:
906 908 try:
907 909 os.unlink(repo.wjoin(f))
908 910 except OSError, e:
909 911 if e.errno != errno.ENOENT:
910 912 raise
911 913 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
912 914 except: pass
913 915 repo.dirstate.forget(f)
914 916 repo.dirstate.setparents(qp, revlog.nullid)
915 917 del self.applied[start:end]
916 918 self.strip(repo, rev, update=False, backup='strip')
917 919 if len(self.applied):
918 920 self.ui.write("Now at: %s\n" % self.applied[-1].name)
919 921 else:
920 922 self.ui.write("Patch queue now empty\n")
921 923 finally:
922 924 del wlock
923 925
924 926 def diff(self, repo, pats, opts):
925 927 top = self.check_toppatch(repo)
926 928 if not top:
927 929 self.ui.write("No patches applied\n")
928 930 return
929 931 qp = self.qparents(repo, top)
930 932 if opts.get('git'):
931 933 self.diffopts().git = True
932 934 self.printdiff(repo, qp, files=pats, opts=opts)
933 935
934 936 def refresh(self, repo, pats=None, **opts):
935 937 if len(self.applied) == 0:
936 938 self.ui.write("No patches applied\n")
937 939 return 1
940 newdate = opts.get('date')
941 if newdate:
942 newdate = '%d %d' % util.parsedate(newdate)
938 943 wlock = repo.wlock()
939 944 try:
940 945 self.check_toppatch(repo)
941 946 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
942 947 top = revlog.bin(top)
943 948 if repo.changelog.heads(top) != [top]:
944 949 raise util.Abort("cannot refresh a revision with children")
945 950 cparents = repo.changelog.parents(top)
946 951 patchparent = self.qparents(repo, top)
947 952 message, comments, user, date, patchfound = self.readheaders(patchfn)
948 953
949 954 patchf = self.opener(patchfn, 'r+')
950 955
951 956 # if the patch was a git patch, refresh it as a git patch
952 957 for line in patchf:
953 958 if line.startswith('diff --git'):
954 959 self.diffopts().git = True
955 960 break
956 961
957 962 msg = opts.get('msg', '').rstrip()
958 963 if msg and comments:
959 964 # Remove existing message, keeping the rest of the comments
960 965 # fields.
961 966 # If comments contains 'subject: ', message will prepend
962 967 # the field and a blank line.
963 968 if message:
964 969 subj = 'subject: ' + message[0].lower()
965 970 for i in xrange(len(comments)):
966 971 if subj == comments[i].lower():
967 972 del comments[i]
968 973 message = message[2:]
969 974 break
970 975 ci = 0
971 976 for mi in xrange(len(message)):
972 977 while message[mi] != comments[ci]:
973 978 ci += 1
974 979 del comments[ci]
975 980
976 981 def setheaderfield(comments, prefixes, new):
977 982 # Update all references to a field in the patch header.
978 983 # If none found, add it email style.
979 984 res = False
980 985 for prefix in prefixes:
981 986 for i in xrange(len(comments)):
982 987 if comments[i].startswith(prefix):
983 988 comments[i] = prefix + new
984 989 res = True
985 990 break
986 991 return res
987 992
988 993 newuser = opts.get('user')
989 994 if newuser:
990 995 if not setheaderfield(comments, ['From: ', '# User '], newuser):
991 996 try:
992 997 patchheaderat = comments.index('# HG changeset patch')
993 998 comments.insert(patchheaderat + 1,'# User ' + newuser)
994 999 except ValueError:
995 1000 comments = ['From: ' + newuser, ''] + comments
996 1001 user = newuser
997 1002
998 newdate = opts.get('date')
999 1003 if newdate:
1000 1004 if setheaderfield(comments, ['# Date '], newdate):
1001 1005 date = newdate
1002 1006
1003 1007 if msg:
1004 1008 comments.append(msg)
1005 1009
1006 1010 patchf.seek(0)
1007 1011 patchf.truncate()
1008 1012
1009 1013 if comments:
1010 1014 comments = "\n".join(comments) + '\n\n'
1011 1015 patchf.write(comments)
1012 1016
1013 1017 if opts.get('git'):
1014 1018 self.diffopts().git = True
1015 1019 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1016 1020 tip = repo.changelog.tip()
1017 1021 if top == tip:
1018 1022 # if the top of our patch queue is also the tip, there is an
1019 1023 # optimization here. We update the dirstate in place and strip
1020 1024 # off the tip commit. Then just commit the current directory
1021 1025 # tree. We can also send repo.commit the list of files
1022 1026 # changed to speed up the diff
1023 1027 #
1024 1028 # in short mode, we only diff the files included in the
1025 1029 # patch already
1026 1030 #
1027 1031 # this should really read:
1028 1032 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1029 1033 # but we do it backwards to take advantage of manifest/chlog
1030 1034 # caching against the next repo.status call
1031 1035 #
1032 1036 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1033 1037 changes = repo.changelog.read(tip)
1034 1038 man = repo.manifest.read(changes[0])
1035 1039 aaa = aa[:]
1036 1040 if opts.get('short'):
1037 1041 filelist = mm + aa + dd
1038 1042 match = dict.fromkeys(filelist).__contains__
1039 1043 else:
1040 1044 filelist = None
1041 1045 match = util.always
1042 1046 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
1043 1047
1044 1048 # we might end up with files that were added between
1045 1049 # tip and the dirstate parent, but then changed in the
1046 1050 # local dirstate. in this case, we want them to only
1047 1051 # show up in the added section
1048 1052 for x in m:
1049 1053 if x not in aa:
1050 1054 mm.append(x)
1051 1055 # we might end up with files added by the local dirstate that
1052 1056 # were deleted by the patch. In this case, they should only
1053 1057 # show up in the changed section.
1054 1058 for x in a:
1055 1059 if x in dd:
1056 1060 del dd[dd.index(x)]
1057 1061 mm.append(x)
1058 1062 else:
1059 1063 aa.append(x)
1060 1064 # make sure any files deleted in the local dirstate
1061 1065 # are not in the add or change column of the patch
1062 1066 forget = []
1063 1067 for x in d + r:
1064 1068 if x in aa:
1065 1069 del aa[aa.index(x)]
1066 1070 forget.append(x)
1067 1071 continue
1068 1072 elif x in mm:
1069 1073 del mm[mm.index(x)]
1070 1074 dd.append(x)
1071 1075
1072 1076 m = util.unique(mm)
1073 1077 r = util.unique(dd)
1074 1078 a = util.unique(aa)
1075 1079 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1076 1080 filelist = util.unique(c[0] + c[1] + c[2])
1077 1081 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1078 1082 fp=patchf, changes=c, opts=self.diffopts())
1079 1083 patchf.close()
1080 1084
1081 1085 repo.dirstate.setparents(*cparents)
1082 1086 copies = {}
1083 1087 for dst in a:
1084 1088 src = repo.dirstate.copied(dst)
1085 1089 if src is not None:
1086 1090 copies.setdefault(src, []).append(dst)
1087 1091 repo.dirstate.add(dst)
1088 1092 # remember the copies between patchparent and tip
1089 1093 # this may be slow, so don't do it if we're not tracking copies
1090 1094 if self.diffopts().git:
1091 1095 for dst in aaa:
1092 1096 f = repo.file(dst)
1093 1097 src = f.renamed(man[dst])
1094 1098 if src:
1095 1099 copies[src[0]] = copies.get(dst, [])
1096 1100 if dst in a:
1097 1101 copies[src[0]].append(dst)
1098 1102 # we can't copy a file created by the patch itself
1099 1103 if dst in copies:
1100 1104 del copies[dst]
1101 1105 for src, dsts in copies.iteritems():
1102 1106 for dst in dsts:
1103 1107 repo.dirstate.copy(src, dst)
1104 1108 for f in r:
1105 1109 repo.dirstate.remove(f)
1106 1110 # if the patch excludes a modified file, mark that
1107 1111 # file with mtime=0 so status can see it.
1108 1112 mm = []
1109 1113 for i in xrange(len(m)-1, -1, -1):
1110 1114 if not matchfn(m[i]):
1111 1115 mm.append(m[i])
1112 1116 del m[i]
1113 1117 for f in m:
1114 1118 repo.dirstate.normal(f)
1115 1119 for f in mm:
1116 1120 repo.dirstate.normallookup(f)
1117 1121 for f in forget:
1118 1122 repo.dirstate.forget(f)
1119 1123
1120 1124 if not msg:
1121 1125 if not message:
1122 1126 message = "[mq]: %s\n" % patchfn
1123 1127 else:
1124 1128 message = "\n".join(message)
1125 1129 else:
1126 1130 message = msg
1127 1131
1128 1132 if not user:
1129 1133 user = changes[1]
1130 1134
1131 1135 self.applied.pop()
1132 1136 self.applied_dirty = 1
1133 1137 self.strip(repo, top, update=False,
1134 1138 backup='strip')
1135 1139 n = repo.commit(filelist, message, user, date, match=matchfn,
1136 1140 force=1)
1137 1141 self.applied.append(statusentry(revlog.hex(n), patchfn))
1138 1142 self.removeundo(repo)
1139 1143 else:
1140 1144 self.printdiff(repo, patchparent, fp=patchf)
1141 1145 patchf.close()
1142 1146 added = repo.status()[1]
1143 1147 for a in added:
1144 1148 f = repo.wjoin(a)
1145 1149 try:
1146 1150 os.unlink(f)
1147 1151 except OSError, e:
1148 1152 if e.errno != errno.ENOENT:
1149 1153 raise
1150 1154 try: os.removedirs(os.path.dirname(f))
1151 1155 except: pass
1152 1156 # forget the file copies in the dirstate
1153 1157 # push should readd the files later on
1154 1158 repo.dirstate.forget(a)
1155 1159 self.pop(repo, force=True)
1156 1160 self.push(repo, force=True)
1157 1161 finally:
1158 1162 del wlock
1159 1163
1160 1164 def init(self, repo, create=False):
1161 1165 if not create and os.path.isdir(self.path):
1162 1166 raise util.Abort(_("patch queue directory already exists"))
1163 1167 try:
1164 1168 os.mkdir(self.path)
1165 1169 except OSError, inst:
1166 1170 if inst.errno != errno.EEXIST or not create:
1167 1171 raise
1168 1172 if create:
1169 1173 return self.qrepo(create=True)
1170 1174
1171 1175 def unapplied(self, repo, patch=None):
1172 1176 if patch and patch not in self.series:
1173 1177 raise util.Abort(_("patch %s is not in series file") % patch)
1174 1178 if not patch:
1175 1179 start = self.series_end()
1176 1180 else:
1177 1181 start = self.series.index(patch) + 1
1178 1182 unapplied = []
1179 1183 for i in xrange(start, len(self.series)):
1180 1184 pushable, reason = self.pushable(i)
1181 1185 if pushable:
1182 1186 unapplied.append((i, self.series[i]))
1183 1187 self.explain_pushable(i)
1184 1188 return unapplied
1185 1189
1186 1190 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1187 1191 summary=False):
1188 1192 def displayname(patchname):
1189 1193 if summary:
1190 1194 msg = self.readheaders(patchname)[0]
1191 1195 msg = msg and ': ' + msg[0] or ': '
1192 1196 else:
1193 1197 msg = ''
1194 1198 return '%s%s' % (patchname, msg)
1195 1199
1196 1200 applied = dict.fromkeys([p.name for p in self.applied])
1197 1201 if length is None:
1198 1202 length = len(self.series) - start
1199 1203 if not missing:
1200 1204 for i in xrange(start, start+length):
1201 1205 patch = self.series[i]
1202 1206 if patch in applied:
1203 1207 stat = 'A'
1204 1208 elif self.pushable(i)[0]:
1205 1209 stat = 'U'
1206 1210 else:
1207 1211 stat = 'G'
1208 1212 pfx = ''
1209 1213 if self.ui.verbose:
1210 1214 pfx = '%d %s ' % (i, stat)
1211 1215 elif status and status != stat:
1212 1216 continue
1213 1217 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1214 1218 else:
1215 1219 msng_list = []
1216 1220 for root, dirs, files in os.walk(self.path):
1217 1221 d = root[len(self.path) + 1:]
1218 1222 for f in files:
1219 1223 fl = os.path.join(d, f)
1220 1224 if (fl not in self.series and
1221 1225 fl not in (self.status_path, self.series_path,
1222 1226 self.guards_path)
1223 1227 and not fl.startswith('.')):
1224 1228 msng_list.append(fl)
1225 1229 msng_list.sort()
1226 1230 for x in msng_list:
1227 1231 pfx = self.ui.verbose and ('D ') or ''
1228 1232 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1229 1233
1230 1234 def issaveline(self, l):
1231 1235 if l.name == '.hg.patches.save.line':
1232 1236 return True
1233 1237
1234 1238 def qrepo(self, create=False):
1235 1239 if create or os.path.isdir(self.join(".hg")):
1236 1240 return hg.repository(self.ui, path=self.path, create=create)
1237 1241
1238 1242 def restore(self, repo, rev, delete=None, qupdate=None):
1239 1243 c = repo.changelog.read(rev)
1240 1244 desc = c[4].strip()
1241 1245 lines = desc.splitlines()
1242 1246 i = 0
1243 1247 datastart = None
1244 1248 series = []
1245 1249 applied = []
1246 1250 qpp = None
1247 1251 for i in xrange(0, len(lines)):
1248 1252 if lines[i] == 'Patch Data:':
1249 1253 datastart = i + 1
1250 1254 elif lines[i].startswith('Dirstate:'):
1251 1255 l = lines[i].rstrip()
1252 1256 l = l[10:].split(' ')
1253 1257 qpp = [ hg.bin(x) for x in l ]
1254 1258 elif datastart != None:
1255 1259 l = lines[i].rstrip()
1256 1260 se = statusentry(l)
1257 1261 file_ = se.name
1258 1262 if se.rev:
1259 1263 applied.append(se)
1260 1264 else:
1261 1265 series.append(file_)
1262 1266 if datastart == None:
1263 1267 self.ui.warn("No saved patch data found\n")
1264 1268 return 1
1265 1269 self.ui.warn("restoring status: %s\n" % lines[0])
1266 1270 self.full_series = series
1267 1271 self.applied = applied
1268 1272 self.parse_series()
1269 1273 self.series_dirty = 1
1270 1274 self.applied_dirty = 1
1271 1275 heads = repo.changelog.heads()
1272 1276 if delete:
1273 1277 if rev not in heads:
1274 1278 self.ui.warn("save entry has children, leaving it alone\n")
1275 1279 else:
1276 1280 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1277 1281 pp = repo.dirstate.parents()
1278 1282 if rev in pp:
1279 1283 update = True
1280 1284 else:
1281 1285 update = False
1282 1286 self.strip(repo, rev, update=update, backup='strip')
1283 1287 if qpp:
1284 1288 self.ui.warn("saved queue repository parents: %s %s\n" %
1285 1289 (hg.short(qpp[0]), hg.short(qpp[1])))
1286 1290 if qupdate:
1287 1291 self.ui.status(_("queue directory updating\n"))
1288 1292 r = self.qrepo()
1289 1293 if not r:
1290 1294 self.ui.warn("Unable to load queue repository\n")
1291 1295 return 1
1292 1296 hg.clean(r, qpp[0])
1293 1297
1294 1298 def save(self, repo, msg=None):
1295 1299 if len(self.applied) == 0:
1296 1300 self.ui.warn("save: no patches applied, exiting\n")
1297 1301 return 1
1298 1302 if self.issaveline(self.applied[-1]):
1299 1303 self.ui.warn("status is already saved\n")
1300 1304 return 1
1301 1305
1302 1306 ar = [ ':' + x for x in self.full_series ]
1303 1307 if not msg:
1304 1308 msg = "hg patches saved state"
1305 1309 else:
1306 1310 msg = "hg patches: " + msg.rstrip('\r\n')
1307 1311 r = self.qrepo()
1308 1312 if r:
1309 1313 pp = r.dirstate.parents()
1310 1314 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1311 1315 msg += "\n\nPatch Data:\n"
1312 1316 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1313 1317 "\n".join(ar) + '\n' or "")
1314 1318 n = repo.commit(None, text, user=None, force=1)
1315 1319 if not n:
1316 1320 self.ui.warn("repo commit failed\n")
1317 1321 return 1
1318 1322 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1319 1323 self.applied_dirty = 1
1320 1324 self.removeundo(repo)
1321 1325
1322 1326 def full_series_end(self):
1323 1327 if len(self.applied) > 0:
1324 1328 p = self.applied[-1].name
1325 1329 end = self.find_series(p)
1326 1330 if end == None:
1327 1331 return len(self.full_series)
1328 1332 return end + 1
1329 1333 return 0
1330 1334
1331 1335 def series_end(self, all_patches=False):
1332 1336 """If all_patches is False, return the index of the next pushable patch
1333 1337 in the series, or the series length. If all_patches is True, return the
1334 1338 index of the first patch past the last applied one.
1335 1339 """
1336 1340 end = 0
1337 1341 def next(start):
1338 1342 if all_patches:
1339 1343 return start
1340 1344 i = start
1341 1345 while i < len(self.series):
1342 1346 p, reason = self.pushable(i)
1343 1347 if p:
1344 1348 break
1345 1349 self.explain_pushable(i)
1346 1350 i += 1
1347 1351 return i
1348 1352 if len(self.applied) > 0:
1349 1353 p = self.applied[-1].name
1350 1354 try:
1351 1355 end = self.series.index(p)
1352 1356 except ValueError:
1353 1357 return 0
1354 1358 return next(end + 1)
1355 1359 return next(end)
1356 1360
1357 1361 def appliedname(self, index):
1358 1362 pname = self.applied[index].name
1359 1363 if not self.ui.verbose:
1360 1364 p = pname
1361 1365 else:
1362 1366 p = str(self.series.index(pname)) + " " + pname
1363 1367 return p
1364 1368
1365 1369 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1366 1370 force=None, git=False):
1367 1371 def checkseries(patchname):
1368 1372 if patchname in self.series:
1369 1373 raise util.Abort(_('patch %s is already in the series file')
1370 1374 % patchname)
1371 1375 def checkfile(patchname):
1372 1376 if not force and os.path.exists(self.join(patchname)):
1373 1377 raise util.Abort(_('patch "%s" already exists')
1374 1378 % patchname)
1375 1379
1376 1380 if rev:
1377 1381 if files:
1378 1382 raise util.Abort(_('option "-r" not valid when importing '
1379 1383 'files'))
1380 1384 rev = cmdutil.revrange(repo, rev)
1381 1385 rev.sort(lambda x, y: cmp(y, x))
1382 1386 if (len(files) > 1 or len(rev) > 1) and patchname:
1383 1387 raise util.Abort(_('option "-n" not valid when importing multiple '
1384 1388 'patches'))
1385 1389 i = 0
1386 1390 added = []
1387 1391 if rev:
1388 1392 # If mq patches are applied, we can only import revisions
1389 1393 # that form a linear path to qbase.
1390 1394 # Otherwise, they should form a linear path to a head.
1391 1395 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1392 1396 if len(heads) > 1:
1393 1397 raise util.Abort(_('revision %d is the root of more than one '
1394 1398 'branch') % rev[-1])
1395 1399 if self.applied:
1396 1400 base = revlog.hex(repo.changelog.node(rev[0]))
1397 1401 if base in [n.rev for n in self.applied]:
1398 1402 raise util.Abort(_('revision %d is already managed')
1399 1403 % rev[0])
1400 1404 if heads != [revlog.bin(self.applied[-1].rev)]:
1401 1405 raise util.Abort(_('revision %d is not the parent of '
1402 1406 'the queue') % rev[0])
1403 1407 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1404 1408 lastparent = repo.changelog.parentrevs(base)[0]
1405 1409 else:
1406 1410 if heads != [repo.changelog.node(rev[0])]:
1407 1411 raise util.Abort(_('revision %d has unmanaged children')
1408 1412 % rev[0])
1409 1413 lastparent = None
1410 1414
1411 1415 if git:
1412 1416 self.diffopts().git = True
1413 1417
1414 1418 for r in rev:
1415 1419 p1, p2 = repo.changelog.parentrevs(r)
1416 1420 n = repo.changelog.node(r)
1417 1421 if p2 != revlog.nullrev:
1418 1422 raise util.Abort(_('cannot import merge revision %d') % r)
1419 1423 if lastparent and lastparent != r:
1420 1424 raise util.Abort(_('revision %d is not the parent of %d')
1421 1425 % (r, lastparent))
1422 1426 lastparent = p1
1423 1427
1424 1428 if not patchname:
1425 1429 patchname = normname('%d.diff' % r)
1426 1430 self.check_reserved_name(patchname)
1427 1431 checkseries(patchname)
1428 1432 checkfile(patchname)
1429 1433 self.full_series.insert(0, patchname)
1430 1434
1431 1435 patchf = self.opener(patchname, "w")
1432 1436 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1433 1437 patchf.close()
1434 1438
1435 1439 se = statusentry(revlog.hex(n), patchname)
1436 1440 self.applied.insert(0, se)
1437 1441
1438 1442 added.append(patchname)
1439 1443 patchname = None
1440 1444 self.parse_series()
1441 1445 self.applied_dirty = 1
1442 1446
1443 1447 for filename in files:
1444 1448 if existing:
1445 1449 if filename == '-':
1446 1450 raise util.Abort(_('-e is incompatible with import from -'))
1447 1451 if not patchname:
1448 1452 patchname = normname(filename)
1449 1453 self.check_reserved_name(patchname)
1450 1454 if not os.path.isfile(self.join(patchname)):
1451 1455 raise util.Abort(_("patch %s does not exist") % patchname)
1452 1456 else:
1453 1457 try:
1454 1458 if filename == '-':
1455 1459 if not patchname:
1456 1460 raise util.Abort(_('need --name to import a patch from -'))
1457 1461 text = sys.stdin.read()
1458 1462 else:
1459 1463 text = file(filename, 'rb').read()
1460 1464 except IOError:
1461 1465 raise util.Abort(_("unable to read %s") % patchname)
1462 1466 if not patchname:
1463 1467 patchname = normname(os.path.basename(filename))
1464 1468 self.check_reserved_name(patchname)
1465 1469 checkfile(patchname)
1466 1470 patchf = self.opener(patchname, "w")
1467 1471 patchf.write(text)
1468 1472 checkseries(patchname)
1469 1473 index = self.full_series_end() + i
1470 1474 self.full_series[index:index] = [patchname]
1471 1475 self.parse_series()
1472 1476 self.ui.warn("adding %s to series file\n" % patchname)
1473 1477 i += 1
1474 1478 added.append(patchname)
1475 1479 patchname = None
1476 1480 self.series_dirty = 1
1477 1481 qrepo = self.qrepo()
1478 1482 if qrepo:
1479 1483 qrepo.add(added)
1480 1484
1481 1485 def delete(ui, repo, *patches, **opts):
1482 1486 """remove patches from queue
1483 1487
1484 1488 The patches must not be applied, unless they are arguments to
1485 1489 the --rev parameter. At least one patch or revision is required.
1486 1490
1487 1491 With --rev, mq will stop managing the named revisions (converting
1488 1492 them to regular mercurial changesets). The patches must be applied
1489 1493 and at the base of the stack. This option is useful when the patches
1490 1494 have been applied upstream.
1491 1495
1492 1496 With --keep, the patch files are preserved in the patch directory."""
1493 1497 q = repo.mq
1494 1498 q.delete(repo, patches, opts)
1495 1499 q.save_dirty()
1496 1500 return 0
1497 1501
1498 1502 def applied(ui, repo, patch=None, **opts):
1499 1503 """print the patches already applied"""
1500 1504 q = repo.mq
1501 1505 if patch:
1502 1506 if patch not in q.series:
1503 1507 raise util.Abort(_("patch %s is not in series file") % patch)
1504 1508 end = q.series.index(patch) + 1
1505 1509 else:
1506 1510 end = q.series_end(True)
1507 1511 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1508 1512
1509 1513 def unapplied(ui, repo, patch=None, **opts):
1510 1514 """print the patches not yet applied"""
1511 1515 q = repo.mq
1512 1516 if patch:
1513 1517 if patch not in q.series:
1514 1518 raise util.Abort(_("patch %s is not in series file") % patch)
1515 1519 start = q.series.index(patch) + 1
1516 1520 else:
1517 1521 start = q.series_end(True)
1518 1522 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1519 1523
1520 1524 def qimport(ui, repo, *filename, **opts):
1521 1525 """import a patch
1522 1526
1523 1527 The patch will have the same name as its source file unless you
1524 1528 give it a new one with --name.
1525 1529
1526 1530 You can register an existing patch inside the patch directory
1527 1531 with the --existing flag.
1528 1532
1529 1533 With --force, an existing patch of the same name will be overwritten.
1530 1534
1531 1535 An existing changeset may be placed under mq control with --rev
1532 1536 (e.g. qimport --rev tip -n patch will place tip under mq control).
1533 1537 With --git, patches imported with --rev will use the git diff
1534 1538 format.
1535 1539 """
1536 1540 q = repo.mq
1537 1541 q.qimport(repo, filename, patchname=opts['name'],
1538 1542 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1539 1543 git=opts['git'])
1540 1544 q.save_dirty()
1541 1545 return 0
1542 1546
1543 1547 def init(ui, repo, **opts):
1544 1548 """init a new queue repository
1545 1549
1546 1550 The queue repository is unversioned by default. If -c is
1547 1551 specified, qinit will create a separate nested repository
1548 1552 for patches (qinit -c may also be run later to convert
1549 1553 an unversioned patch repository into a versioned one).
1550 1554 You can use qcommit to commit changes to this queue repository."""
1551 1555 q = repo.mq
1552 1556 r = q.init(repo, create=opts['create_repo'])
1553 1557 q.save_dirty()
1554 1558 if r:
1555 1559 if not os.path.exists(r.wjoin('.hgignore')):
1556 1560 fp = r.wopener('.hgignore', 'w')
1557 1561 fp.write('^\\.hg\n')
1558 1562 fp.write('^\\.mq\n')
1559 1563 fp.write('syntax: glob\n')
1560 1564 fp.write('status\n')
1561 1565 fp.write('guards\n')
1562 1566 fp.close()
1563 1567 if not os.path.exists(r.wjoin('series')):
1564 1568 r.wopener('series', 'w').close()
1565 1569 r.add(['.hgignore', 'series'])
1566 1570 commands.add(ui, r)
1567 1571 return 0
1568 1572
1569 1573 def clone(ui, source, dest=None, **opts):
1570 1574 '''clone main and patch repository at same time
1571 1575
1572 1576 If source is local, destination will have no patches applied. If
1573 1577 source is remote, this command can not check if patches are
1574 1578 applied in source, so cannot guarantee that patches are not
1575 1579 applied in destination. If you clone remote repository, be sure
1576 1580 before that it has no patches applied.
1577 1581
1578 1582 Source patch repository is looked for in <src>/.hg/patches by
1579 1583 default. Use -p <url> to change.
1580 1584
1581 1585 The patch directory must be a nested mercurial repository, as
1582 1586 would be created by qinit -c.
1583 1587 '''
1584 1588 def patchdir(repo):
1585 1589 url = repo.url()
1586 1590 if url.endswith('/'):
1587 1591 url = url[:-1]
1588 1592 return url + '/.hg/patches'
1589 1593 cmdutil.setremoteconfig(ui, opts)
1590 1594 if dest is None:
1591 1595 dest = hg.defaultdest(source)
1592 1596 sr = hg.repository(ui, ui.expandpath(source))
1593 1597 patchespath = opts['patches'] or patchdir(sr)
1594 1598 try:
1595 1599 pr = hg.repository(ui, patchespath)
1596 1600 except hg.RepoError:
1597 1601 raise util.Abort(_('versioned patch repository not found'
1598 1602 ' (see qinit -c)'))
1599 1603 qbase, destrev = None, None
1600 1604 if sr.local():
1601 1605 if sr.mq.applied:
1602 1606 qbase = revlog.bin(sr.mq.applied[0].rev)
1603 1607 if not hg.islocal(dest):
1604 1608 heads = dict.fromkeys(sr.heads())
1605 1609 for h in sr.heads(qbase):
1606 1610 del heads[h]
1607 1611 destrev = heads.keys()
1608 1612 destrev.append(sr.changelog.parents(qbase)[0])
1609 1613 ui.note(_('cloning main repo\n'))
1610 1614 sr, dr = hg.clone(ui, sr.url(), dest,
1611 1615 pull=opts['pull'],
1612 1616 rev=destrev,
1613 1617 update=False,
1614 1618 stream=opts['uncompressed'])
1615 1619 ui.note(_('cloning patch repo\n'))
1616 1620 spr, dpr = hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1617 1621 pull=opts['pull'], update=not opts['noupdate'],
1618 1622 stream=opts['uncompressed'])
1619 1623 if dr.local():
1620 1624 if qbase:
1621 1625 ui.note(_('stripping applied patches from destination repo\n'))
1622 1626 dr.mq.strip(dr, qbase, update=False, backup=None)
1623 1627 if not opts['noupdate']:
1624 1628 ui.note(_('updating destination repo\n'))
1625 1629 hg.update(dr, dr.changelog.tip())
1626 1630
1627 1631 def commit(ui, repo, *pats, **opts):
1628 1632 """commit changes in the queue repository"""
1629 1633 q = repo.mq
1630 1634 r = q.qrepo()
1631 1635 if not r: raise util.Abort('no queue repository')
1632 1636 commands.commit(r.ui, r, *pats, **opts)
1633 1637
1634 1638 def series(ui, repo, **opts):
1635 1639 """print the entire series file"""
1636 1640 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1637 1641 return 0
1638 1642
1639 1643 def top(ui, repo, **opts):
1640 1644 """print the name of the current patch"""
1641 1645 q = repo.mq
1642 1646 t = q.applied and q.series_end(True) or 0
1643 1647 if t:
1644 1648 return q.qseries(repo, start=t-1, length=1, status='A',
1645 1649 summary=opts.get('summary'))
1646 1650 else:
1647 1651 ui.write("No patches applied\n")
1648 1652 return 1
1649 1653
1650 1654 def next(ui, repo, **opts):
1651 1655 """print the name of the next patch"""
1652 1656 q = repo.mq
1653 1657 end = q.series_end()
1654 1658 if end == len(q.series):
1655 1659 ui.write("All patches applied\n")
1656 1660 return 1
1657 1661 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1658 1662
1659 1663 def prev(ui, repo, **opts):
1660 1664 """print the name of the previous patch"""
1661 1665 q = repo.mq
1662 1666 l = len(q.applied)
1663 1667 if l == 1:
1664 1668 ui.write("Only one patch applied\n")
1665 1669 return 1
1666 1670 if not l:
1667 1671 ui.write("No patches applied\n")
1668 1672 return 1
1669 1673 return q.qseries(repo, start=l-2, length=1, status='A',
1670 1674 summary=opts.get('summary'))
1671 1675
1672 1676 def setupheaderopts(ui, opts):
1673 1677 def do(opt,val):
1674 1678 if not opts[opt] and opts['current' + opt]:
1675 1679 opts[opt] = val
1676 1680 do('user', ui.username())
1677 1681 do('date', "%d %d" % util.makedate())
1678 1682
1679 1683 def new(ui, repo, patch, *args, **opts):
1680 1684 """create a new patch
1681 1685
1682 1686 qnew creates a new patch on top of the currently-applied patch
1683 1687 (if any). It will refuse to run if there are any outstanding
1684 1688 changes unless -f is specified, in which case the patch will
1685 1689 be initialised with them. You may also use -I, -X, and/or a list of
1686 1690 files after the patch name to add only changes to matching files
1687 1691 to the new patch, leaving the rest as uncommitted modifications.
1688 1692
1689 1693 -e, -m or -l set the patch header as well as the commit message.
1690 1694 If none is specified, the patch header is empty and the
1691 1695 commit message is '[mq]: PATCH'"""
1692 1696 q = repo.mq
1693 1697 message = cmdutil.logmessage(opts)
1694 1698 if opts['edit']:
1695 1699 message = ui.edit(message, ui.username())
1696 1700 opts['msg'] = message
1697 1701 setupheaderopts(ui, opts)
1698 1702 q.new(repo, patch, *args, **opts)
1699 1703 q.save_dirty()
1700 1704 return 0
1701 1705
1702 1706 def refresh(ui, repo, *pats, **opts):
1703 1707 """update the current patch
1704 1708
1705 1709 If any file patterns are provided, the refreshed patch will contain only
1706 1710 the modifications that match those patterns; the remaining modifications
1707 1711 will remain in the working directory.
1708 1712
1709 1713 hg add/remove/copy/rename work as usual, though you might want to use
1710 1714 git-style patches (--git or [diff] git=1) to track copies and renames.
1711 1715 """
1712 1716 q = repo.mq
1713 1717 message = cmdutil.logmessage(opts)
1714 1718 if opts['edit']:
1715 1719 if not q.applied:
1716 1720 ui.write(_("No patches applied\n"))
1717 1721 return 1
1718 1722 if message:
1719 1723 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1720 1724 patch = q.applied[-1].name
1721 1725 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1722 1726 message = ui.edit('\n'.join(message), user or ui.username())
1723 1727 setupheaderopts(ui, opts)
1724 1728 ret = q.refresh(repo, pats, msg=message, **opts)
1725 1729 q.save_dirty()
1726 1730 return ret
1727 1731
1728 1732 def diff(ui, repo, *pats, **opts):
1729 1733 """diff of the current patch"""
1730 1734 repo.mq.diff(repo, pats, opts)
1731 1735 return 0
1732 1736
1733 1737 def fold(ui, repo, *files, **opts):
1734 1738 """fold the named patches into the current patch
1735 1739
1736 1740 Patches must not yet be applied. Each patch will be successively
1737 1741 applied to the current patch in the order given. If all the
1738 1742 patches apply successfully, the current patch will be refreshed
1739 1743 with the new cumulative patch, and the folded patches will
1740 1744 be deleted. With -k/--keep, the folded patch files will not
1741 1745 be removed afterwards.
1742 1746
1743 1747 The header for each folded patch will be concatenated with
1744 1748 the current patch header, separated by a line of '* * *'."""
1745 1749
1746 1750 q = repo.mq
1747 1751
1748 1752 if not files:
1749 1753 raise util.Abort(_('qfold requires at least one patch name'))
1750 1754 if not q.check_toppatch(repo):
1751 1755 raise util.Abort(_('No patches applied'))
1752 1756
1753 1757 message = cmdutil.logmessage(opts)
1754 1758 if opts['edit']:
1755 1759 if message:
1756 1760 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1757 1761
1758 1762 parent = q.lookup('qtip')
1759 1763 patches = []
1760 1764 messages = []
1761 1765 for f in files:
1762 1766 p = q.lookup(f)
1763 1767 if p in patches or p == parent:
1764 1768 ui.warn(_('Skipping already folded patch %s') % p)
1765 1769 if q.isapplied(p):
1766 1770 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1767 1771 patches.append(p)
1768 1772
1769 1773 for p in patches:
1770 1774 if not message:
1771 1775 messages.append(q.readheaders(p)[0])
1772 1776 pf = q.join(p)
1773 1777 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1774 1778 if not patchsuccess:
1775 1779 raise util.Abort(_('Error folding patch %s') % p)
1776 1780 patch.updatedir(ui, repo, files)
1777 1781
1778 1782 if not message:
1779 1783 message, comments, user = q.readheaders(parent)[0:3]
1780 1784 for msg in messages:
1781 1785 message.append('* * *')
1782 1786 message.extend(msg)
1783 1787 message = '\n'.join(message)
1784 1788
1785 1789 if opts['edit']:
1786 1790 message = ui.edit(message, user or ui.username())
1787 1791
1788 1792 q.refresh(repo, msg=message)
1789 1793 q.delete(repo, patches, opts)
1790 1794 q.save_dirty()
1791 1795
1792 1796 def goto(ui, repo, patch, **opts):
1793 1797 '''push or pop patches until named patch is at top of stack'''
1794 1798 q = repo.mq
1795 1799 patch = q.lookup(patch)
1796 1800 if q.isapplied(patch):
1797 1801 ret = q.pop(repo, patch, force=opts['force'])
1798 1802 else:
1799 1803 ret = q.push(repo, patch, force=opts['force'])
1800 1804 q.save_dirty()
1801 1805 return ret
1802 1806
1803 1807 def guard(ui, repo, *args, **opts):
1804 1808 '''set or print guards for a patch
1805 1809
1806 1810 Guards control whether a patch can be pushed. A patch with no
1807 1811 guards is always pushed. A patch with a positive guard ("+foo") is
1808 1812 pushed only if the qselect command has activated it. A patch with
1809 1813 a negative guard ("-foo") is never pushed if the qselect command
1810 1814 has activated it.
1811 1815
1812 1816 With no arguments, print the currently active guards.
1813 1817 With arguments, set guards for the named patch.
1814 1818
1815 1819 To set a negative guard "-foo" on topmost patch ("--" is needed so
1816 1820 hg will not interpret "-foo" as an option):
1817 1821 hg qguard -- -foo
1818 1822
1819 1823 To set guards on another patch:
1820 1824 hg qguard other.patch +2.6.17 -stable
1821 1825 '''
1822 1826 def status(idx):
1823 1827 guards = q.series_guards[idx] or ['unguarded']
1824 1828 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1825 1829 q = repo.mq
1826 1830 patch = None
1827 1831 args = list(args)
1828 1832 if opts['list']:
1829 1833 if args or opts['none']:
1830 1834 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1831 1835 for i in xrange(len(q.series)):
1832 1836 status(i)
1833 1837 return
1834 1838 if not args or args[0][0:1] in '-+':
1835 1839 if not q.applied:
1836 1840 raise util.Abort(_('no patches applied'))
1837 1841 patch = q.applied[-1].name
1838 1842 if patch is None and args[0][0:1] not in '-+':
1839 1843 patch = args.pop(0)
1840 1844 if patch is None:
1841 1845 raise util.Abort(_('no patch to work with'))
1842 1846 if args or opts['none']:
1843 1847 idx = q.find_series(patch)
1844 1848 if idx is None:
1845 1849 raise util.Abort(_('no patch named %s') % patch)
1846 1850 q.set_guards(idx, args)
1847 1851 q.save_dirty()
1848 1852 else:
1849 1853 status(q.series.index(q.lookup(patch)))
1850 1854
1851 1855 def header(ui, repo, patch=None):
1852 1856 """Print the header of the topmost or specified patch"""
1853 1857 q = repo.mq
1854 1858
1855 1859 if patch:
1856 1860 patch = q.lookup(patch)
1857 1861 else:
1858 1862 if not q.applied:
1859 1863 ui.write('No patches applied\n')
1860 1864 return 1
1861 1865 patch = q.lookup('qtip')
1862 1866 message = repo.mq.readheaders(patch)[0]
1863 1867
1864 1868 ui.write('\n'.join(message) + '\n')
1865 1869
1866 1870 def lastsavename(path):
1867 1871 (directory, base) = os.path.split(path)
1868 1872 names = os.listdir(directory)
1869 1873 namere = re.compile("%s.([0-9]+)" % base)
1870 1874 maxindex = None
1871 1875 maxname = None
1872 1876 for f in names:
1873 1877 m = namere.match(f)
1874 1878 if m:
1875 1879 index = int(m.group(1))
1876 1880 if maxindex == None or index > maxindex:
1877 1881 maxindex = index
1878 1882 maxname = f
1879 1883 if maxname:
1880 1884 return (os.path.join(directory, maxname), maxindex)
1881 1885 return (None, None)
1882 1886
1883 1887 def savename(path):
1884 1888 (last, index) = lastsavename(path)
1885 1889 if last is None:
1886 1890 index = 0
1887 1891 newpath = path + ".%d" % (index + 1)
1888 1892 return newpath
1889 1893
1890 1894 def push(ui, repo, patch=None, **opts):
1891 1895 """push the next patch onto the stack"""
1892 1896 q = repo.mq
1893 1897 mergeq = None
1894 1898
1895 1899 if opts['all']:
1896 1900 if not q.series:
1897 1901 ui.warn(_('no patches in series\n'))
1898 1902 return 0
1899 1903 patch = q.series[-1]
1900 1904 if opts['merge']:
1901 1905 if opts['name']:
1902 1906 newpath = opts['name']
1903 1907 else:
1904 1908 newpath, i = lastsavename(q.path)
1905 1909 if not newpath:
1906 1910 ui.warn("no saved queues found, please use -n\n")
1907 1911 return 1
1908 1912 mergeq = queue(ui, repo.join(""), newpath)
1909 1913 ui.warn("merging with queue at: %s\n" % mergeq.path)
1910 1914 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1911 1915 mergeq=mergeq)
1912 1916 return ret
1913 1917
1914 1918 def pop(ui, repo, patch=None, **opts):
1915 1919 """pop the current patch off the stack"""
1916 1920 localupdate = True
1917 1921 if opts['name']:
1918 1922 q = queue(ui, repo.join(""), repo.join(opts['name']))
1919 1923 ui.warn('using patch queue: %s\n' % q.path)
1920 1924 localupdate = False
1921 1925 else:
1922 1926 q = repo.mq
1923 1927 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1924 1928 all=opts['all'])
1925 1929 q.save_dirty()
1926 1930 return ret
1927 1931
1928 1932 def rename(ui, repo, patch, name=None, **opts):
1929 1933 """rename a patch
1930 1934
1931 1935 With one argument, renames the current patch to PATCH1.
1932 1936 With two arguments, renames PATCH1 to PATCH2."""
1933 1937
1934 1938 q = repo.mq
1935 1939
1936 1940 if not name:
1937 1941 name = patch
1938 1942 patch = None
1939 1943
1940 1944 if patch:
1941 1945 patch = q.lookup(patch)
1942 1946 else:
1943 1947 if not q.applied:
1944 1948 ui.write(_('No patches applied\n'))
1945 1949 return
1946 1950 patch = q.lookup('qtip')
1947 1951 absdest = q.join(name)
1948 1952 if os.path.isdir(absdest):
1949 1953 name = normname(os.path.join(name, os.path.basename(patch)))
1950 1954 absdest = q.join(name)
1951 1955 if os.path.exists(absdest):
1952 1956 raise util.Abort(_('%s already exists') % absdest)
1953 1957
1954 1958 if name in q.series:
1955 1959 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1956 1960
1957 1961 if ui.verbose:
1958 1962 ui.write('Renaming %s to %s\n' % (patch, name))
1959 1963 i = q.find_series(patch)
1960 1964 guards = q.guard_re.findall(q.full_series[i])
1961 1965 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1962 1966 q.parse_series()
1963 1967 q.series_dirty = 1
1964 1968
1965 1969 info = q.isapplied(patch)
1966 1970 if info:
1967 1971 q.applied[info[0]] = statusentry(info[1], name)
1968 1972 q.applied_dirty = 1
1969 1973
1970 1974 util.rename(q.join(patch), absdest)
1971 1975 r = q.qrepo()
1972 1976 if r:
1973 1977 wlock = r.wlock()
1974 1978 try:
1975 1979 if r.dirstate[name] == 'r':
1976 1980 r.undelete([name])
1977 1981 r.copy(patch, name)
1978 1982 r.remove([patch], False)
1979 1983 finally:
1980 1984 del wlock
1981 1985
1982 1986 q.save_dirty()
1983 1987
1984 1988 def restore(ui, repo, rev, **opts):
1985 1989 """restore the queue state saved by a rev"""
1986 1990 rev = repo.lookup(rev)
1987 1991 q = repo.mq
1988 1992 q.restore(repo, rev, delete=opts['delete'],
1989 1993 qupdate=opts['update'])
1990 1994 q.save_dirty()
1991 1995 return 0
1992 1996
1993 1997 def save(ui, repo, **opts):
1994 1998 """save current queue state"""
1995 1999 q = repo.mq
1996 2000 message = cmdutil.logmessage(opts)
1997 2001 ret = q.save(repo, msg=message)
1998 2002 if ret:
1999 2003 return ret
2000 2004 q.save_dirty()
2001 2005 if opts['copy']:
2002 2006 path = q.path
2003 2007 if opts['name']:
2004 2008 newpath = os.path.join(q.basepath, opts['name'])
2005 2009 if os.path.exists(newpath):
2006 2010 if not os.path.isdir(newpath):
2007 2011 raise util.Abort(_('destination %s exists and is not '
2008 2012 'a directory') % newpath)
2009 2013 if not opts['force']:
2010 2014 raise util.Abort(_('destination %s exists, '
2011 2015 'use -f to force') % newpath)
2012 2016 else:
2013 2017 newpath = savename(path)
2014 2018 ui.warn("copy %s to %s\n" % (path, newpath))
2015 2019 util.copyfiles(path, newpath)
2016 2020 if opts['empty']:
2017 2021 try:
2018 2022 os.unlink(q.join(q.status_path))
2019 2023 except:
2020 2024 pass
2021 2025 return 0
2022 2026
2023 2027 def strip(ui, repo, rev, **opts):
2024 2028 """strip a revision and all later revs on the same branch"""
2025 2029 rev = repo.lookup(rev)
2026 2030 backup = 'all'
2027 2031 if opts['backup']:
2028 2032 backup = 'strip'
2029 2033 elif opts['nobackup']:
2030 2034 backup = 'none'
2031 2035 update = repo.dirstate.parents()[0] != revlog.nullid
2032 2036 repo.mq.strip(repo, rev, backup=backup, update=update)
2033 2037 return 0
2034 2038
2035 2039 def select(ui, repo, *args, **opts):
2036 2040 '''set or print guarded patches to push
2037 2041
2038 2042 Use the qguard command to set or print guards on patch, then use
2039 2043 qselect to tell mq which guards to use. A patch will be pushed if it
2040 2044 has no guards or any positive guards match the currently selected guard,
2041 2045 but will not be pushed if any negative guards match the current guard.
2042 2046 For example:
2043 2047
2044 2048 qguard foo.patch -stable (negative guard)
2045 2049 qguard bar.patch +stable (positive guard)
2046 2050 qselect stable
2047 2051
2048 2052 This activates the "stable" guard. mq will skip foo.patch (because
2049 2053 it has a negative match) but push bar.patch (because it
2050 2054 has a positive match).
2051 2055
2052 2056 With no arguments, prints the currently active guards.
2053 2057 With one argument, sets the active guard.
2054 2058
2055 2059 Use -n/--none to deactivate guards (no other arguments needed).
2056 2060 When no guards are active, patches with positive guards are skipped
2057 2061 and patches with negative guards are pushed.
2058 2062
2059 2063 qselect can change the guards on applied patches. It does not pop
2060 2064 guarded patches by default. Use --pop to pop back to the last applied
2061 2065 patch that is not guarded. Use --reapply (which implies --pop) to push
2062 2066 back to the current patch afterwards, but skip guarded patches.
2063 2067
2064 2068 Use -s/--series to print a list of all guards in the series file (no
2065 2069 other arguments needed). Use -v for more information.'''
2066 2070
2067 2071 q = repo.mq
2068 2072 guards = q.active()
2069 2073 if args or opts['none']:
2070 2074 old_unapplied = q.unapplied(repo)
2071 2075 old_guarded = [i for i in xrange(len(q.applied)) if
2072 2076 not q.pushable(i)[0]]
2073 2077 q.set_active(args)
2074 2078 q.save_dirty()
2075 2079 if not args:
2076 2080 ui.status(_('guards deactivated\n'))
2077 2081 if not opts['pop'] and not opts['reapply']:
2078 2082 unapplied = q.unapplied(repo)
2079 2083 guarded = [i for i in xrange(len(q.applied))
2080 2084 if not q.pushable(i)[0]]
2081 2085 if len(unapplied) != len(old_unapplied):
2082 2086 ui.status(_('number of unguarded, unapplied patches has '
2083 2087 'changed from %d to %d\n') %
2084 2088 (len(old_unapplied), len(unapplied)))
2085 2089 if len(guarded) != len(old_guarded):
2086 2090 ui.status(_('number of guarded, applied patches has changed '
2087 2091 'from %d to %d\n') %
2088 2092 (len(old_guarded), len(guarded)))
2089 2093 elif opts['series']:
2090 2094 guards = {}
2091 2095 noguards = 0
2092 2096 for gs in q.series_guards:
2093 2097 if not gs:
2094 2098 noguards += 1
2095 2099 for g in gs:
2096 2100 guards.setdefault(g, 0)
2097 2101 guards[g] += 1
2098 2102 if ui.verbose:
2099 2103 guards['NONE'] = noguards
2100 2104 guards = guards.items()
2101 2105 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2102 2106 if guards:
2103 2107 ui.note(_('guards in series file:\n'))
2104 2108 for guard, count in guards:
2105 2109 ui.note('%2d ' % count)
2106 2110 ui.write(guard, '\n')
2107 2111 else:
2108 2112 ui.note(_('no guards in series file\n'))
2109 2113 else:
2110 2114 if guards:
2111 2115 ui.note(_('active guards:\n'))
2112 2116 for g in guards:
2113 2117 ui.write(g, '\n')
2114 2118 else:
2115 2119 ui.write(_('no active guards\n'))
2116 2120 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2117 2121 popped = False
2118 2122 if opts['pop'] or opts['reapply']:
2119 2123 for i in xrange(len(q.applied)):
2120 2124 pushable, reason = q.pushable(i)
2121 2125 if not pushable:
2122 2126 ui.status(_('popping guarded patches\n'))
2123 2127 popped = True
2124 2128 if i == 0:
2125 2129 q.pop(repo, all=True)
2126 2130 else:
2127 2131 q.pop(repo, i-1)
2128 2132 break
2129 2133 if popped:
2130 2134 try:
2131 2135 if reapply:
2132 2136 ui.status(_('reapplying unguarded patches\n'))
2133 2137 q.push(repo, reapply)
2134 2138 finally:
2135 2139 q.save_dirty()
2136 2140
2137 2141 def reposetup(ui, repo):
2138 2142 class mqrepo(repo.__class__):
2139 2143 def abort_if_wdir_patched(self, errmsg, force=False):
2140 2144 if self.mq.applied and not force:
2141 2145 parent = revlog.hex(self.dirstate.parents()[0])
2142 2146 if parent in [s.rev for s in self.mq.applied]:
2143 2147 raise util.Abort(errmsg)
2144 2148
2145 2149 def commit(self, *args, **opts):
2146 2150 if len(args) >= 6:
2147 2151 force = args[5]
2148 2152 else:
2149 2153 force = opts.get('force')
2150 2154 self.abort_if_wdir_patched(
2151 2155 _('cannot commit over an applied mq patch'),
2152 2156 force)
2153 2157
2154 2158 return super(mqrepo, self).commit(*args, **opts)
2155 2159
2156 2160 def push(self, remote, force=False, revs=None):
2157 2161 if self.mq.applied and not force and not revs:
2158 2162 raise util.Abort(_('source has mq patches applied'))
2159 2163 return super(mqrepo, self).push(remote, force, revs)
2160 2164
2161 2165 def tags(self):
2162 2166 if self.tagscache:
2163 2167 return self.tagscache
2164 2168
2165 2169 tagscache = super(mqrepo, self).tags()
2166 2170
2167 2171 q = self.mq
2168 2172 if not q.applied:
2169 2173 return tagscache
2170 2174
2171 2175 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2172 2176
2173 2177 if mqtags[-1][0] not in self.changelog.nodemap:
2174 2178 self.ui.warn('mq status file refers to unknown node %s\n'
2175 2179 % revlog.short(mqtags[-1][0]))
2176 2180 return tagscache
2177 2181
2178 2182 mqtags.append((mqtags[-1][0], 'qtip'))
2179 2183 mqtags.append((mqtags[0][0], 'qbase'))
2180 2184 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2181 2185 for patch in mqtags:
2182 2186 if patch[1] in tagscache:
2183 2187 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2184 2188 else:
2185 2189 tagscache[patch[1]] = patch[0]
2186 2190
2187 2191 return tagscache
2188 2192
2189 2193 def _branchtags(self, partial, lrev):
2190 2194 q = self.mq
2191 2195 if not q.applied:
2192 2196 return super(mqrepo, self)._branchtags(partial, lrev)
2193 2197
2194 2198 cl = self.changelog
2195 2199 qbasenode = revlog.bin(q.applied[0].rev)
2196 2200 if qbasenode not in cl.nodemap:
2197 2201 self.ui.warn('mq status file refers to unknown node %s\n'
2198 2202 % revlog.short(qbasenode))
2199 2203 return super(mqrepo, self)._branchtags(partial, lrev)
2200 2204
2201 2205 qbase = cl.rev(qbasenode)
2202 2206 start = lrev + 1
2203 2207 if start < qbase:
2204 2208 # update the cache (excluding the patches) and save it
2205 2209 self._updatebranchcache(partial, lrev+1, qbase)
2206 2210 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2207 2211 start = qbase
2208 2212 # if start = qbase, the cache is as updated as it should be.
2209 2213 # if start > qbase, the cache includes (part of) the patches.
2210 2214 # we might as well use it, but we won't save it.
2211 2215
2212 2216 # update the cache up to the tip
2213 2217 self._updatebranchcache(partial, start, cl.count())
2214 2218
2215 2219 return partial
2216 2220
2217 2221 if repo.local():
2218 2222 repo.__class__ = mqrepo
2219 2223 repo.mq = queue(ui, repo.join(""))
2220 2224
2221 2225 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2222 2226
2223 2227 headeropts = [
2224 2228 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2225 2229 ('u', 'user', '', _('add "From: <given user>" to patch')),
2226 2230 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2227 2231 ('d', 'date', '', _('add "Date: <given date>" to patch'))]
2228 2232
2229 2233 cmdtable = {
2230 2234 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2231 2235 "qclone":
2232 2236 (clone,
2233 2237 [('', 'pull', None, _('use pull protocol to copy metadata')),
2234 2238 ('U', 'noupdate', None, _('do not update the new working directories')),
2235 2239 ('', 'uncompressed', None,
2236 2240 _('use uncompressed transfer (fast over LAN)')),
2237 2241 ('p', 'patches', '', _('location of source patch repo')),
2238 2242 ] + commands.remoteopts,
2239 2243 _('hg qclone [OPTION]... SOURCE [DEST]')),
2240 2244 "qcommit|qci":
2241 2245 (commit,
2242 2246 commands.table["^commit|ci"][1],
2243 2247 _('hg qcommit [OPTION]... [FILE]...')),
2244 2248 "^qdiff":
2245 2249 (diff,
2246 2250 [('g', 'git', None, _('use git extended diff format')),
2247 2251 ('U', 'unified', 3, _('number of lines of context to show')),
2248 2252 ] + commands.walkopts,
2249 2253 _('hg qdiff [-I] [-X] [-U NUM] [-g] [FILE]...')),
2250 2254 "qdelete|qremove|qrm":
2251 2255 (delete,
2252 2256 [('k', 'keep', None, _('keep patch file')),
2253 2257 ('r', 'rev', [], _('stop managing a revision'))],
2254 2258 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2255 2259 'qfold':
2256 2260 (fold,
2257 2261 [('e', 'edit', None, _('edit patch header')),
2258 2262 ('k', 'keep', None, _('keep folded patch files')),
2259 2263 ] + commands.commitopts,
2260 2264 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2261 2265 'qgoto':
2262 2266 (goto,
2263 2267 [('f', 'force', None, _('overwrite any local changes'))],
2264 2268 _('hg qgoto [OPTION]... PATCH')),
2265 2269 'qguard':
2266 2270 (guard,
2267 2271 [('l', 'list', None, _('list all patches and guards')),
2268 2272 ('n', 'none', None, _('drop all guards'))],
2269 2273 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2270 2274 'qheader': (header, [], _('hg qheader [PATCH]')),
2271 2275 "^qimport":
2272 2276 (qimport,
2273 2277 [('e', 'existing', None, 'import file in patch dir'),
2274 2278 ('n', 'name', '', 'patch file name'),
2275 2279 ('f', 'force', None, 'overwrite existing files'),
2276 2280 ('r', 'rev', [], 'place existing revisions under mq control'),
2277 2281 ('g', 'git', None, _('use git extended diff format'))],
2278 2282 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2279 2283 "^qinit":
2280 2284 (init,
2281 2285 [('c', 'create-repo', None, 'create queue repository')],
2282 2286 _('hg qinit [-c]')),
2283 2287 "qnew":
2284 2288 (new,
2285 2289 [('e', 'edit', None, _('edit commit message')),
2286 2290 ('f', 'force', None, _('import uncommitted changes into patch')),
2287 2291 ('g', 'git', None, _('use git extended diff format')),
2288 2292 ] + commands.walkopts + commands.commitopts + headeropts,
2289 2293 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2290 2294 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2291 2295 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2292 2296 "^qpop":
2293 2297 (pop,
2294 2298 [('a', 'all', None, _('pop all patches')),
2295 2299 ('n', 'name', '', _('queue name to pop')),
2296 2300 ('f', 'force', None, _('forget any local changes'))],
2297 2301 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2298 2302 "^qpush":
2299 2303 (push,
2300 2304 [('f', 'force', None, _('apply if the patch has rejects')),
2301 2305 ('l', 'list', None, _('list patch name in commit text')),
2302 2306 ('a', 'all', None, _('apply all patches')),
2303 2307 ('m', 'merge', None, _('merge from another queue')),
2304 2308 ('n', 'name', '', _('merge queue name'))],
2305 2309 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2306 2310 "^qrefresh":
2307 2311 (refresh,
2308 2312 [('e', 'edit', None, _('edit commit message')),
2309 2313 ('g', 'git', None, _('use git extended diff format')),
2310 2314 ('s', 'short', None, _('refresh only files already in the patch')),
2311 2315 ] + commands.walkopts + commands.commitopts + headeropts,
2312 2316 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2313 2317 'qrename|qmv':
2314 2318 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2315 2319 "qrestore":
2316 2320 (restore,
2317 2321 [('d', 'delete', None, _('delete save entry')),
2318 2322 ('u', 'update', None, _('update queue working dir'))],
2319 2323 _('hg qrestore [-d] [-u] REV')),
2320 2324 "qsave":
2321 2325 (save,
2322 2326 [('c', 'copy', None, _('copy patch directory')),
2323 2327 ('n', 'name', '', _('copy directory name')),
2324 2328 ('e', 'empty', None, _('clear queue status file')),
2325 2329 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2326 2330 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2327 2331 "qselect":
2328 2332 (select,
2329 2333 [('n', 'none', None, _('disable all guards')),
2330 2334 ('s', 'series', None, _('list all guards in series file')),
2331 2335 ('', 'pop', None, _('pop to before first guarded applied patch')),
2332 2336 ('', 'reapply', None, _('pop, then reapply patches'))],
2333 2337 _('hg qselect [OPTION]... [GUARD]...')),
2334 2338 "qseries":
2335 2339 (series,
2336 2340 [('m', 'missing', None, _('print patches not in series')),
2337 2341 ] + seriesopts,
2338 2342 _('hg qseries [-ms]')),
2339 2343 "^strip":
2340 2344 (strip,
2341 2345 [('f', 'force', None, _('force multi-head removal')),
2342 2346 ('b', 'backup', None, _('bundle unrelated changesets')),
2343 2347 ('n', 'nobackup', None, _('no backups'))],
2344 2348 _('hg strip [-f] [-b] [-n] REV')),
2345 2349 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2346 2350 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2347 2351 }
@@ -1,1160 +1,1163 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12
13 13 revrangesep = ':'
14 14
15 15 class UnknownCommand(Exception):
16 16 """Exception raised if command is not in the command table."""
17 17 class AmbiguousCommand(Exception):
18 18 """Exception raised if command shortcut matches more than one command."""
19 19
20 20 def findpossible(ui, cmd, table):
21 21 """
22 22 Return cmd -> (aliases, command table entry)
23 23 for each matching command.
24 24 Return debug commands (or their aliases) only if no normal command matches.
25 25 """
26 26 choice = {}
27 27 debugchoice = {}
28 28 for e in table.keys():
29 29 aliases = e.lstrip("^").split("|")
30 30 found = None
31 31 if cmd in aliases:
32 32 found = cmd
33 33 elif not ui.config("ui", "strict"):
34 34 for a in aliases:
35 35 if a.startswith(cmd):
36 36 found = a
37 37 break
38 38 if found is not None:
39 39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 40 debugchoice[found] = (aliases, table[e])
41 41 else:
42 42 choice[found] = (aliases, table[e])
43 43
44 44 if not choice and debugchoice:
45 45 choice = debugchoice
46 46
47 47 return choice
48 48
49 49 def findcmd(ui, cmd, table):
50 50 """Return (aliases, command table entry) for command string."""
51 51 choice = findpossible(ui, cmd, table)
52 52
53 53 if cmd in choice:
54 54 return choice[cmd]
55 55
56 56 if len(choice) > 1:
57 57 clist = choice.keys()
58 58 clist.sort()
59 59 raise AmbiguousCommand(cmd, clist)
60 60
61 61 if choice:
62 62 return choice.values()[0]
63 63
64 64 raise UnknownCommand(cmd)
65 65
66 66 def bail_if_changed(repo):
67 67 if repo.dirstate.parents()[1] != nullid:
68 68 raise util.Abort(_('outstanding uncommitted merge'))
69 69 modified, added, removed, deleted = repo.status()[:4]
70 70 if modified or added or removed or deleted:
71 71 raise util.Abort(_("outstanding uncommitted changes"))
72 72
73 73 def logmessage(opts):
74 74 """ get the log message according to -m and -l option """
75 75 message = opts['message']
76 76 logfile = opts['logfile']
77 77
78 78 if message and logfile:
79 79 raise util.Abort(_('options --message and --logfile are mutually '
80 80 'exclusive'))
81 81 if not message and logfile:
82 82 try:
83 83 if logfile == '-':
84 84 message = sys.stdin.read()
85 85 else:
86 86 message = open(logfile).read()
87 87 except IOError, inst:
88 88 raise util.Abort(_("can't read commit message '%s': %s") %
89 89 (logfile, inst.strerror))
90 90 return message
91 91
92 92 def setremoteconfig(ui, opts):
93 93 "copy remote options to ui tree"
94 94 if opts.get('ssh'):
95 95 ui.setconfig("ui", "ssh", opts['ssh'])
96 96 if opts.get('remotecmd'):
97 97 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
98 98
99 99 def revpair(repo, revs):
100 100 '''return pair of nodes, given list of revisions. second item can
101 101 be None, meaning use working dir.'''
102 102
103 103 def revfix(repo, val, defval):
104 104 if not val and val != 0 and defval is not None:
105 105 val = defval
106 106 return repo.lookup(val)
107 107
108 108 if not revs:
109 109 return repo.dirstate.parents()[0], None
110 110 end = None
111 111 if len(revs) == 1:
112 112 if revrangesep in revs[0]:
113 113 start, end = revs[0].split(revrangesep, 1)
114 114 start = revfix(repo, start, 0)
115 115 end = revfix(repo, end, repo.changelog.count() - 1)
116 116 else:
117 117 start = revfix(repo, revs[0], None)
118 118 elif len(revs) == 2:
119 119 if revrangesep in revs[0] or revrangesep in revs[1]:
120 120 raise util.Abort(_('too many revisions specified'))
121 121 start = revfix(repo, revs[0], None)
122 122 end = revfix(repo, revs[1], None)
123 123 else:
124 124 raise util.Abort(_('too many revisions specified'))
125 125 return start, end
126 126
127 127 def revrange(repo, revs):
128 128 """Yield revision as strings from a list of revision specifications."""
129 129
130 130 def revfix(repo, val, defval):
131 131 if not val and val != 0 and defval is not None:
132 132 return defval
133 133 return repo.changelog.rev(repo.lookup(val))
134 134
135 135 seen, l = {}, []
136 136 for spec in revs:
137 137 if revrangesep in spec:
138 138 start, end = spec.split(revrangesep, 1)
139 139 start = revfix(repo, start, 0)
140 140 end = revfix(repo, end, repo.changelog.count() - 1)
141 141 step = start > end and -1 or 1
142 142 for rev in xrange(start, end+step, step):
143 143 if rev in seen:
144 144 continue
145 145 seen[rev] = 1
146 146 l.append(rev)
147 147 else:
148 148 rev = revfix(repo, spec, None)
149 149 if rev in seen:
150 150 continue
151 151 seen[rev] = 1
152 152 l.append(rev)
153 153
154 154 return l
155 155
156 156 def make_filename(repo, pat, node,
157 157 total=None, seqno=None, revwidth=None, pathname=None):
158 158 node_expander = {
159 159 'H': lambda: hex(node),
160 160 'R': lambda: str(repo.changelog.rev(node)),
161 161 'h': lambda: short(node),
162 162 }
163 163 expander = {
164 164 '%': lambda: '%',
165 165 'b': lambda: os.path.basename(repo.root),
166 166 }
167 167
168 168 try:
169 169 if node:
170 170 expander.update(node_expander)
171 171 if node:
172 172 expander['r'] = (lambda:
173 173 str(repo.changelog.rev(node)).zfill(revwidth or 0))
174 174 if total is not None:
175 175 expander['N'] = lambda: str(total)
176 176 if seqno is not None:
177 177 expander['n'] = lambda: str(seqno)
178 178 if total is not None and seqno is not None:
179 179 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
180 180 if pathname is not None:
181 181 expander['s'] = lambda: os.path.basename(pathname)
182 182 expander['d'] = lambda: os.path.dirname(pathname) or '.'
183 183 expander['p'] = lambda: pathname
184 184
185 185 newname = []
186 186 patlen = len(pat)
187 187 i = 0
188 188 while i < patlen:
189 189 c = pat[i]
190 190 if c == '%':
191 191 i += 1
192 192 c = pat[i]
193 193 c = expander[c]()
194 194 newname.append(c)
195 195 i += 1
196 196 return ''.join(newname)
197 197 except KeyError, inst:
198 198 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
199 199 inst.args[0])
200 200
201 201 def make_file(repo, pat, node=None,
202 202 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
203 203 if not pat or pat == '-':
204 204 return 'w' in mode and sys.stdout or sys.stdin
205 205 if hasattr(pat, 'write') and 'w' in mode:
206 206 return pat
207 207 if hasattr(pat, 'read') and 'r' in mode:
208 208 return pat
209 209 return open(make_filename(repo, pat, node, total, seqno, revwidth,
210 210 pathname),
211 211 mode)
212 212
213 213 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
214 214 cwd = repo.getcwd()
215 215 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
216 216 opts.get('exclude'), globbed=globbed,
217 217 default=default)
218 218
219 219 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
220 220 default=None):
221 221 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
222 222 default=default)
223 223 exact = dict.fromkeys(files)
224 224 cwd = repo.getcwd()
225 225 for src, fn in repo.walk(node=node, files=files, match=matchfn,
226 226 badmatch=badmatch):
227 227 yield src, fn, repo.pathto(fn, cwd), fn in exact
228 228
229 229 def findrenames(repo, added=None, removed=None, threshold=0.5):
230 230 '''find renamed files -- yields (before, after, score) tuples'''
231 231 if added is None or removed is None:
232 232 added, removed = repo.status()[1:3]
233 233 ctx = repo.changectx()
234 234 for a in added:
235 235 aa = repo.wread(a)
236 236 bestname, bestscore = None, threshold
237 237 for r in removed:
238 238 rr = ctx.filectx(r).data()
239 239
240 240 # bdiff.blocks() returns blocks of matching lines
241 241 # count the number of bytes in each
242 242 equal = 0
243 243 alines = mdiff.splitnewlines(aa)
244 244 matches = bdiff.blocks(aa, rr)
245 245 for x1,x2,y1,y2 in matches:
246 246 for line in alines[x1:x2]:
247 247 equal += len(line)
248 248
249 249 lengths = len(aa) + len(rr)
250 250 if lengths:
251 251 myscore = equal*2.0 / lengths
252 252 if myscore >= bestscore:
253 253 bestname, bestscore = r, myscore
254 254 if bestname:
255 255 yield bestname, a, bestscore
256 256
257 257 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
258 258 if dry_run is None:
259 259 dry_run = opts.get('dry_run')
260 260 if similarity is None:
261 261 similarity = float(opts.get('similarity') or 0)
262 262 add, remove = [], []
263 263 mapping = {}
264 264 for src, abs, rel, exact in walk(repo, pats, opts):
265 265 target = repo.wjoin(abs)
266 266 if src == 'f' and abs not in repo.dirstate:
267 267 add.append(abs)
268 268 mapping[abs] = rel, exact
269 269 if repo.ui.verbose or not exact:
270 270 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
271 271 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
272 272 or (os.path.isdir(target) and not os.path.islink(target))):
273 273 remove.append(abs)
274 274 mapping[abs] = rel, exact
275 275 if repo.ui.verbose or not exact:
276 276 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
277 277 if not dry_run:
278 278 repo.remove(remove)
279 279 repo.add(add)
280 280 if similarity > 0:
281 281 for old, new, score in findrenames(repo, add, remove, similarity):
282 282 oldrel, oldexact = mapping[old]
283 283 newrel, newexact = mapping[new]
284 284 if repo.ui.verbose or not oldexact or not newexact:
285 285 repo.ui.status(_('recording removal of %s as rename to %s '
286 286 '(%d%% similar)\n') %
287 287 (oldrel, newrel, score * 100))
288 288 if not dry_run:
289 289 repo.copy(old, new)
290 290
291 291 def copy(ui, repo, pats, opts, rename=False):
292 292 # called with the repo lock held
293 293 #
294 294 # hgsep => pathname that uses "/" to separate directories
295 295 # ossep => pathname that uses os.sep to separate directories
296 296 cwd = repo.getcwd()
297 297 targets = {}
298 298 after = opts.get("after")
299 299 dryrun = opts.get("dry_run")
300 300
301 301 def walkpat(pat):
302 302 srcs = []
303 303 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
304 304 state = repo.dirstate[abs]
305 305 if state in '?r':
306 306 if exact and state == '?':
307 307 ui.warn(_('%s: not copying - file is not managed\n') % rel)
308 308 if exact and state == 'r':
309 309 ui.warn(_('%s: not copying - file has been marked for'
310 310 ' remove\n') % rel)
311 311 continue
312 312 # abs: hgsep
313 313 # rel: ossep
314 314 srcs.append((abs, rel, exact))
315 315 return srcs
316 316
317 317 # abssrc: hgsep
318 318 # relsrc: ossep
319 319 # otarget: ossep
320 320 def copyfile(abssrc, relsrc, otarget, exact):
321 321 abstarget = util.canonpath(repo.root, cwd, otarget)
322 322 reltarget = repo.pathto(abstarget, cwd)
323 323 target = repo.wjoin(abstarget)
324 324 src = repo.wjoin(abssrc)
325 325 state = repo.dirstate[abstarget]
326 326
327 327 # check for collisions
328 328 prevsrc = targets.get(abstarget)
329 329 if prevsrc is not None:
330 330 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
331 331 (reltarget, repo.pathto(abssrc, cwd),
332 332 repo.pathto(prevsrc, cwd)))
333 333 return
334 334
335 335 # check for overwrites
336 336 exists = os.path.exists(target)
337 337 if (not after and exists or after and state in 'mn'):
338 338 if not opts['force']:
339 339 ui.warn(_('%s: not overwriting - file exists\n') %
340 340 reltarget)
341 341 return
342 342
343 343 if after:
344 344 if not exists:
345 345 return
346 346 elif not dryrun:
347 347 try:
348 348 if exists:
349 349 os.unlink(target)
350 350 targetdir = os.path.dirname(target) or '.'
351 351 if not os.path.isdir(targetdir):
352 352 os.makedirs(targetdir)
353 353 util.copyfile(src, target)
354 354 except IOError, inst:
355 355 if inst.errno == errno.ENOENT:
356 356 ui.warn(_('%s: deleted in working copy\n') % relsrc)
357 357 else:
358 358 ui.warn(_('%s: cannot copy - %s\n') %
359 359 (relsrc, inst.strerror))
360 360 return True # report a failure
361 361
362 362 if ui.verbose or not exact:
363 363 action = rename and "moving" or "copying"
364 364 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
365 365
366 366 targets[abstarget] = abssrc
367 367
368 368 # fix up dirstate
369 369 origsrc = repo.dirstate.copied(abssrc) or abssrc
370 370 if abstarget == origsrc: # copying back a copy?
371 371 if state not in 'mn' and not dryrun:
372 372 repo.dirstate.normallookup(abstarget)
373 373 else:
374 374 if repo.dirstate[origsrc] == 'a':
375 375 if not ui.quiet:
376 376 ui.warn(_("%s has not been committed yet, so no copy "
377 377 "data will be stored for %s.\n")
378 378 % (repo.pathto(origsrc, cwd), reltarget))
379 379 if abstarget not in repo.dirstate and not dryrun:
380 380 repo.add([abstarget])
381 381 elif not dryrun:
382 382 repo.copy(origsrc, abstarget)
383 383
384 384 if rename and not dryrun:
385 385 repo.remove([abssrc], True)
386 386
387 387 # pat: ossep
388 388 # dest ossep
389 389 # srcs: list of (hgsep, hgsep, ossep, bool)
390 390 # return: function that takes hgsep and returns ossep
391 391 def targetpathfn(pat, dest, srcs):
392 392 if os.path.isdir(pat):
393 393 abspfx = util.canonpath(repo.root, cwd, pat)
394 394 abspfx = util.localpath(abspfx)
395 395 if destdirexists:
396 396 striplen = len(os.path.split(abspfx)[0])
397 397 else:
398 398 striplen = len(abspfx)
399 399 if striplen:
400 400 striplen += len(os.sep)
401 401 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
402 402 elif destdirexists:
403 403 res = lambda p: os.path.join(dest,
404 404 os.path.basename(util.localpath(p)))
405 405 else:
406 406 res = lambda p: dest
407 407 return res
408 408
409 409 # pat: ossep
410 410 # dest ossep
411 411 # srcs: list of (hgsep, hgsep, ossep, bool)
412 412 # return: function that takes hgsep and returns ossep
413 413 def targetpathafterfn(pat, dest, srcs):
414 414 if util.patkind(pat, None)[0]:
415 415 # a mercurial pattern
416 416 res = lambda p: os.path.join(dest,
417 417 os.path.basename(util.localpath(p)))
418 418 else:
419 419 abspfx = util.canonpath(repo.root, cwd, pat)
420 420 if len(abspfx) < len(srcs[0][0]):
421 421 # A directory. Either the target path contains the last
422 422 # component of the source path or it does not.
423 423 def evalpath(striplen):
424 424 score = 0
425 425 for s in srcs:
426 426 t = os.path.join(dest, util.localpath(s[0])[striplen:])
427 427 if os.path.exists(t):
428 428 score += 1
429 429 return score
430 430
431 431 abspfx = util.localpath(abspfx)
432 432 striplen = len(abspfx)
433 433 if striplen:
434 434 striplen += len(os.sep)
435 435 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
436 436 score = evalpath(striplen)
437 437 striplen1 = len(os.path.split(abspfx)[0])
438 438 if striplen1:
439 439 striplen1 += len(os.sep)
440 440 if evalpath(striplen1) > score:
441 441 striplen = striplen1
442 442 res = lambda p: os.path.join(dest,
443 443 util.localpath(p)[striplen:])
444 444 else:
445 445 # a file
446 446 if destdirexists:
447 447 res = lambda p: os.path.join(dest,
448 448 os.path.basename(util.localpath(p)))
449 449 else:
450 450 res = lambda p: dest
451 451 return res
452 452
453 453
454 454 pats = util.expand_glob(pats)
455 455 if not pats:
456 456 raise util.Abort(_('no source or destination specified'))
457 457 if len(pats) == 1:
458 458 raise util.Abort(_('no destination specified'))
459 459 dest = pats.pop()
460 460 destdirexists = os.path.isdir(dest)
461 461 if not destdirexists:
462 462 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
463 463 raise util.Abort(_('with multiple sources, destination must be an '
464 464 'existing directory'))
465 465 if util.endswithsep(dest):
466 466 raise util.Abort(_('destination %s is not a directory') % dest)
467 467
468 468 tfn = targetpathfn
469 469 if after:
470 470 tfn = targetpathafterfn
471 471 copylist = []
472 472 for pat in pats:
473 473 srcs = walkpat(pat)
474 474 if not srcs:
475 475 continue
476 476 copylist.append((tfn(pat, dest, srcs), srcs))
477 477 if not copylist:
478 478 raise util.Abort(_('no files to copy'))
479 479
480 480 errors = 0
481 481 for targetpath, srcs in copylist:
482 482 for abssrc, relsrc, exact in srcs:
483 483 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
484 484 errors += 1
485 485
486 486 if errors:
487 487 ui.warn(_('(consider using --after)\n'))
488 488
489 489 return errors
490 490
491 491 def service(opts, parentfn=None, initfn=None, runfn=None):
492 492 '''Run a command as a service.'''
493 493
494 494 if opts['daemon'] and not opts['daemon_pipefds']:
495 495 rfd, wfd = os.pipe()
496 496 args = sys.argv[:]
497 497 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
498 498 # Don't pass --cwd to the child process, because we've already
499 499 # changed directory.
500 500 for i in xrange(1,len(args)):
501 501 if args[i].startswith('--cwd='):
502 502 del args[i]
503 503 break
504 504 elif args[i].startswith('--cwd'):
505 505 del args[i:i+2]
506 506 break
507 507 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
508 508 args[0], args)
509 509 os.close(wfd)
510 510 os.read(rfd, 1)
511 511 if parentfn:
512 512 return parentfn(pid)
513 513 else:
514 514 os._exit(0)
515 515
516 516 if initfn:
517 517 initfn()
518 518
519 519 if opts['pid_file']:
520 520 fp = open(opts['pid_file'], 'w')
521 521 fp.write(str(os.getpid()) + '\n')
522 522 fp.close()
523 523
524 524 if opts['daemon_pipefds']:
525 525 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
526 526 os.close(rfd)
527 527 try:
528 528 os.setsid()
529 529 except AttributeError:
530 530 pass
531 531 os.write(wfd, 'y')
532 532 os.close(wfd)
533 533 sys.stdout.flush()
534 534 sys.stderr.flush()
535 535 fd = os.open(util.nulldev, os.O_RDWR)
536 536 if fd != 0: os.dup2(fd, 0)
537 537 if fd != 1: os.dup2(fd, 1)
538 538 if fd != 2: os.dup2(fd, 2)
539 539 if fd not in (0, 1, 2): os.close(fd)
540 540
541 541 if runfn:
542 542 return runfn()
543 543
544 544 class changeset_printer(object):
545 545 '''show changeset information when templating not requested.'''
546 546
547 547 def __init__(self, ui, repo, patch, buffered):
548 548 self.ui = ui
549 549 self.repo = repo
550 550 self.buffered = buffered
551 551 self.patch = patch
552 552 self.header = {}
553 553 self.hunk = {}
554 554 self.lastheader = None
555 555
556 556 def flush(self, rev):
557 557 if rev in self.header:
558 558 h = self.header[rev]
559 559 if h != self.lastheader:
560 560 self.lastheader = h
561 561 self.ui.write(h)
562 562 del self.header[rev]
563 563 if rev in self.hunk:
564 564 self.ui.write(self.hunk[rev])
565 565 del self.hunk[rev]
566 566 return 1
567 567 return 0
568 568
569 569 def show(self, rev=0, changenode=None, copies=(), **props):
570 570 if self.buffered:
571 571 self.ui.pushbuffer()
572 572 self._show(rev, changenode, copies, props)
573 573 self.hunk[rev] = self.ui.popbuffer()
574 574 else:
575 575 self._show(rev, changenode, copies, props)
576 576
577 577 def _show(self, rev, changenode, copies, props):
578 578 '''show a single changeset or file revision'''
579 579 log = self.repo.changelog
580 580 if changenode is None:
581 581 changenode = log.node(rev)
582 582 elif not rev:
583 583 rev = log.rev(changenode)
584 584
585 585 if self.ui.quiet:
586 586 self.ui.write("%d:%s\n" % (rev, short(changenode)))
587 587 return
588 588
589 589 changes = log.read(changenode)
590 590 date = util.datestr(changes[2])
591 591 extra = changes[5]
592 592 branch = extra.get("branch")
593 593
594 594 hexfunc = self.ui.debugflag and hex or short
595 595
596 596 parents = [(p, hexfunc(log.node(p)))
597 597 for p in self._meaningful_parentrevs(log, rev)]
598 598
599 599 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
600 600
601 601 # don't show the default branch name
602 602 if branch != 'default':
603 603 branch = util.tolocal(branch)
604 604 self.ui.write(_("branch: %s\n") % branch)
605 605 for tag in self.repo.nodetags(changenode):
606 606 self.ui.write(_("tag: %s\n") % tag)
607 607 for parent in parents:
608 608 self.ui.write(_("parent: %d:%s\n") % parent)
609 609
610 610 if self.ui.debugflag:
611 611 self.ui.write(_("manifest: %d:%s\n") %
612 612 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
613 613 self.ui.write(_("user: %s\n") % changes[1])
614 614 self.ui.write(_("date: %s\n") % date)
615 615
616 616 if self.ui.debugflag:
617 617 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
618 618 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
619 619 files):
620 620 if value:
621 621 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
622 622 elif changes[3] and self.ui.verbose:
623 623 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
624 624 if copies and self.ui.verbose:
625 625 copies = ['%s (%s)' % c for c in copies]
626 626 self.ui.write(_("copies: %s\n") % ' '.join(copies))
627 627
628 628 if extra and self.ui.debugflag:
629 629 extraitems = extra.items()
630 630 extraitems.sort()
631 631 for key, value in extraitems:
632 632 self.ui.write(_("extra: %s=%s\n")
633 633 % (key, value.encode('string_escape')))
634 634
635 635 description = changes[4].strip()
636 636 if description:
637 637 if self.ui.verbose:
638 638 self.ui.write(_("description:\n"))
639 639 self.ui.write(description)
640 640 self.ui.write("\n\n")
641 641 else:
642 642 self.ui.write(_("summary: %s\n") %
643 643 description.splitlines()[0])
644 644 self.ui.write("\n")
645 645
646 646 self.showpatch(changenode)
647 647
648 648 def showpatch(self, node):
649 649 if self.patch:
650 650 prev = self.repo.changelog.parents(node)[0]
651 651 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
652 652 opts=patch.diffopts(self.ui))
653 653 self.ui.write("\n")
654 654
655 655 def _meaningful_parentrevs(self, log, rev):
656 656 """Return list of meaningful (or all if debug) parentrevs for rev.
657 657
658 658 For merges (two non-nullrev revisions) both parents are meaningful.
659 659 Otherwise the first parent revision is considered meaningful if it
660 660 is not the preceding revision.
661 661 """
662 662 parents = log.parentrevs(rev)
663 663 if not self.ui.debugflag and parents[1] == nullrev:
664 664 if parents[0] >= rev - 1:
665 665 parents = []
666 666 else:
667 667 parents = [parents[0]]
668 668 return parents
669 669
670 670
671 671 class changeset_templater(changeset_printer):
672 672 '''format changeset information.'''
673 673
674 674 def __init__(self, ui, repo, patch, mapfile, buffered):
675 675 changeset_printer.__init__(self, ui, repo, patch, buffered)
676 676 filters = templatefilters.filters.copy()
677 677 filters['formatnode'] = (ui.debugflag and (lambda x: x)
678 678 or (lambda x: x[:12]))
679 679 self.t = templater.templater(mapfile, filters,
680 680 cache={
681 681 'parent': '{rev}:{node|formatnode} ',
682 682 'manifest': '{rev}:{node|formatnode}',
683 683 'filecopy': '{name} ({source})'})
684 684
685 685 def use_template(self, t):
686 686 '''set template string to use'''
687 687 self.t.cache['changeset'] = t
688 688
689 689 def _show(self, rev, changenode, copies, props):
690 690 '''show a single changeset or file revision'''
691 691 log = self.repo.changelog
692 692 if changenode is None:
693 693 changenode = log.node(rev)
694 694 elif not rev:
695 695 rev = log.rev(changenode)
696 696
697 697 changes = log.read(changenode)
698 698
699 699 def showlist(name, values, plural=None, **args):
700 700 '''expand set of values.
701 701 name is name of key in template map.
702 702 values is list of strings or dicts.
703 703 plural is plural of name, if not simply name + 's'.
704 704
705 705 expansion works like this, given name 'foo'.
706 706
707 707 if values is empty, expand 'no_foos'.
708 708
709 709 if 'foo' not in template map, return values as a string,
710 710 joined by space.
711 711
712 712 expand 'start_foos'.
713 713
714 714 for each value, expand 'foo'. if 'last_foo' in template
715 715 map, expand it instead of 'foo' for last key.
716 716
717 717 expand 'end_foos'.
718 718 '''
719 719 if plural: names = plural
720 720 else: names = name + 's'
721 721 if not values:
722 722 noname = 'no_' + names
723 723 if noname in self.t:
724 724 yield self.t(noname, **args)
725 725 return
726 726 if name not in self.t:
727 727 if isinstance(values[0], str):
728 728 yield ' '.join(values)
729 729 else:
730 730 for v in values:
731 731 yield dict(v, **args)
732 732 return
733 733 startname = 'start_' + names
734 734 if startname in self.t:
735 735 yield self.t(startname, **args)
736 736 vargs = args.copy()
737 737 def one(v, tag=name):
738 738 try:
739 739 vargs.update(v)
740 740 except (AttributeError, ValueError):
741 741 try:
742 742 for a, b in v:
743 743 vargs[a] = b
744 744 except ValueError:
745 745 vargs[name] = v
746 746 return self.t(tag, **vargs)
747 747 lastname = 'last_' + name
748 748 if lastname in self.t:
749 749 last = values.pop()
750 750 else:
751 751 last = None
752 752 for v in values:
753 753 yield one(v)
754 754 if last is not None:
755 755 yield one(last, tag=lastname)
756 756 endname = 'end_' + names
757 757 if endname in self.t:
758 758 yield self.t(endname, **args)
759 759
760 760 def showbranches(**args):
761 761 branch = changes[5].get("branch")
762 762 if branch != 'default':
763 763 branch = util.tolocal(branch)
764 764 return showlist('branch', [branch], plural='branches', **args)
765 765
766 766 def showparents(**args):
767 767 parents = [[('rev', p), ('node', hex(log.node(p)))]
768 768 for p in self._meaningful_parentrevs(log, rev)]
769 769 return showlist('parent', parents, **args)
770 770
771 771 def showtags(**args):
772 772 return showlist('tag', self.repo.nodetags(changenode), **args)
773 773
774 774 def showextras(**args):
775 775 extras = changes[5].items()
776 776 extras.sort()
777 777 for key, value in extras:
778 778 args = args.copy()
779 779 args.update(dict(key=key, value=value))
780 780 yield self.t('extra', **args)
781 781
782 782 def showcopies(**args):
783 783 c = [{'name': x[0], 'source': x[1]} for x in copies]
784 784 return showlist('file_copy', c, plural='file_copies', **args)
785 785
786 786 files = []
787 787 def getfiles():
788 788 if not files:
789 789 files[:] = self.repo.status(
790 790 log.parents(changenode)[0], changenode)[:3]
791 791 return files
792 792 def showfiles(**args):
793 793 return showlist('file', changes[3], **args)
794 794 def showmods(**args):
795 795 return showlist('file_mod', getfiles()[0], **args)
796 796 def showadds(**args):
797 797 return showlist('file_add', getfiles()[1], **args)
798 798 def showdels(**args):
799 799 return showlist('file_del', getfiles()[2], **args)
800 800 def showmanifest(**args):
801 801 args = args.copy()
802 802 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
803 803 node=hex(changes[0])))
804 804 return self.t('manifest', **args)
805 805
806 806 defprops = {
807 807 'author': changes[1],
808 808 'branches': showbranches,
809 809 'date': changes[2],
810 810 'desc': changes[4].strip(),
811 811 'file_adds': showadds,
812 812 'file_dels': showdels,
813 813 'file_mods': showmods,
814 814 'files': showfiles,
815 815 'file_copies': showcopies,
816 816 'manifest': showmanifest,
817 817 'node': hex(changenode),
818 818 'parents': showparents,
819 819 'rev': rev,
820 820 'tags': showtags,
821 821 'extras': showextras,
822 822 }
823 823 props = props.copy()
824 824 props.update(defprops)
825 825
826 826 try:
827 827 if self.ui.debugflag and 'header_debug' in self.t:
828 828 key = 'header_debug'
829 829 elif self.ui.quiet and 'header_quiet' in self.t:
830 830 key = 'header_quiet'
831 831 elif self.ui.verbose and 'header_verbose' in self.t:
832 832 key = 'header_verbose'
833 833 elif 'header' in self.t:
834 834 key = 'header'
835 835 else:
836 836 key = ''
837 837 if key:
838 838 h = templater.stringify(self.t(key, **props))
839 839 if self.buffered:
840 840 self.header[rev] = h
841 841 else:
842 842 self.ui.write(h)
843 843 if self.ui.debugflag and 'changeset_debug' in self.t:
844 844 key = 'changeset_debug'
845 845 elif self.ui.quiet and 'changeset_quiet' in self.t:
846 846 key = 'changeset_quiet'
847 847 elif self.ui.verbose and 'changeset_verbose' in self.t:
848 848 key = 'changeset_verbose'
849 849 else:
850 850 key = 'changeset'
851 851 self.ui.write(templater.stringify(self.t(key, **props)))
852 852 self.showpatch(changenode)
853 853 except KeyError, inst:
854 854 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
855 855 inst.args[0]))
856 856 except SyntaxError, inst:
857 857 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
858 858
859 859 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
860 860 """show one changeset using template or regular display.
861 861
862 862 Display format will be the first non-empty hit of:
863 863 1. option 'template'
864 864 2. option 'style'
865 865 3. [ui] setting 'logtemplate'
866 866 4. [ui] setting 'style'
867 867 If all of these values are either the unset or the empty string,
868 868 regular display via changeset_printer() is done.
869 869 """
870 870 # options
871 871 patch = False
872 872 if opts.get('patch'):
873 873 patch = matchfn or util.always
874 874
875 875 tmpl = opts.get('template')
876 876 mapfile = None
877 877 if tmpl:
878 878 tmpl = templater.parsestring(tmpl, quoted=False)
879 879 else:
880 880 mapfile = opts.get('style')
881 881 # ui settings
882 882 if not mapfile:
883 883 tmpl = ui.config('ui', 'logtemplate')
884 884 if tmpl:
885 885 tmpl = templater.parsestring(tmpl)
886 886 else:
887 887 mapfile = ui.config('ui', 'style')
888 888
889 889 if tmpl or mapfile:
890 890 if mapfile:
891 891 if not os.path.split(mapfile)[0]:
892 892 mapname = (templater.templatepath('map-cmdline.' + mapfile)
893 893 or templater.templatepath(mapfile))
894 894 if mapname: mapfile = mapname
895 895 try:
896 896 t = changeset_templater(ui, repo, patch, mapfile, buffered)
897 897 except SyntaxError, inst:
898 898 raise util.Abort(inst.args[0])
899 899 if tmpl: t.use_template(tmpl)
900 900 return t
901 901 return changeset_printer(ui, repo, patch, buffered)
902 902
903 903 def finddate(ui, repo, date):
904 904 """Find the tipmost changeset that matches the given date spec"""
905 905 df = util.matchdate(date)
906 906 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
907 907 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
908 908 results = {}
909 909 for st, rev, fns in changeiter:
910 910 if st == 'add':
911 911 d = get(rev)[2]
912 912 if df(d[0]):
913 913 results[rev] = d
914 914 elif st == 'iter':
915 915 if rev in results:
916 916 ui.status("Found revision %s from %s\n" %
917 917 (rev, util.datestr(results[rev])))
918 918 return str(rev)
919 919
920 920 raise util.Abort(_("revision matching date not found"))
921 921
922 922 def walkchangerevs(ui, repo, pats, change, opts):
923 923 '''Iterate over files and the revs they changed in.
924 924
925 925 Callers most commonly need to iterate backwards over the history
926 926 it is interested in. Doing so has awful (quadratic-looking)
927 927 performance, so we use iterators in a "windowed" way.
928 928
929 929 We walk a window of revisions in the desired order. Within the
930 930 window, we first walk forwards to gather data, then in the desired
931 931 order (usually backwards) to display it.
932 932
933 933 This function returns an (iterator, matchfn) tuple. The iterator
934 934 yields 3-tuples. They will be of one of the following forms:
935 935
936 936 "window", incrementing, lastrev: stepping through a window,
937 937 positive if walking forwards through revs, last rev in the
938 938 sequence iterated over - use to reset state for the current window
939 939
940 940 "add", rev, fns: out-of-order traversal of the given file names
941 941 fns, which changed during revision rev - use to gather data for
942 942 possible display
943 943
944 944 "iter", rev, None: in-order traversal of the revs earlier iterated
945 945 over with "add" - use to display data'''
946 946
947 947 def increasing_windows(start, end, windowsize=8, sizelimit=512):
948 948 if start < end:
949 949 while start < end:
950 950 yield start, min(windowsize, end-start)
951 951 start += windowsize
952 952 if windowsize < sizelimit:
953 953 windowsize *= 2
954 954 else:
955 955 while start > end:
956 956 yield start, min(windowsize, start-end-1)
957 957 start -= windowsize
958 958 if windowsize < sizelimit:
959 959 windowsize *= 2
960 960
961 961 files, matchfn, anypats = matchpats(repo, pats, opts)
962 962 follow = opts.get('follow') or opts.get('follow_first')
963 963
964 964 if repo.changelog.count() == 0:
965 965 return [], matchfn
966 966
967 967 if follow:
968 968 defrange = '%s:0' % repo.changectx().rev()
969 969 else:
970 970 defrange = 'tip:0'
971 971 revs = revrange(repo, opts['rev'] or [defrange])
972 972 wanted = {}
973 973 slowpath = anypats or opts.get('removed')
974 974 fncache = {}
975 975
976 976 if not slowpath and not files:
977 977 # No files, no patterns. Display all revs.
978 978 wanted = dict.fromkeys(revs)
979 979 copies = []
980 980 if not slowpath:
981 981 # Only files, no patterns. Check the history of each file.
982 982 def filerevgen(filelog, node):
983 983 cl_count = repo.changelog.count()
984 984 if node is None:
985 985 last = filelog.count() - 1
986 986 else:
987 987 last = filelog.rev(node)
988 988 for i, window in increasing_windows(last, nullrev):
989 989 revs = []
990 990 for j in xrange(i - window, i + 1):
991 991 n = filelog.node(j)
992 992 revs.append((filelog.linkrev(n),
993 993 follow and filelog.renamed(n)))
994 994 revs.reverse()
995 995 for rev in revs:
996 996 # only yield rev for which we have the changelog, it can
997 997 # happen while doing "hg log" during a pull or commit
998 998 if rev[0] < cl_count:
999 999 yield rev
1000 1000 def iterfiles():
1001 1001 for filename in files:
1002 1002 yield filename, None
1003 1003 for filename_node in copies:
1004 1004 yield filename_node
1005 1005 minrev, maxrev = min(revs), max(revs)
1006 1006 for file_, node in iterfiles():
1007 1007 filelog = repo.file(file_)
1008 1008 # A zero count may be a directory or deleted file, so
1009 1009 # try to find matching entries on the slow path.
1010 1010 if filelog.count() == 0:
1011 1011 slowpath = True
1012 1012 break
1013 1013 for rev, copied in filerevgen(filelog, node):
1014 1014 if rev <= maxrev:
1015 1015 if rev < minrev:
1016 1016 break
1017 1017 fncache.setdefault(rev, [])
1018 1018 fncache[rev].append(file_)
1019 1019 wanted[rev] = 1
1020 1020 if follow and copied:
1021 1021 copies.append(copied)
1022 1022 if slowpath:
1023 1023 if follow:
1024 1024 raise util.Abort(_('can only follow copies/renames for explicit '
1025 1025 'file names'))
1026 1026
1027 1027 # The slow path checks files modified in every changeset.
1028 1028 def changerevgen():
1029 1029 for i, window in increasing_windows(repo.changelog.count()-1,
1030 1030 nullrev):
1031 1031 for j in xrange(i - window, i + 1):
1032 1032 yield j, change(j)[3]
1033 1033
1034 1034 for rev, changefiles in changerevgen():
1035 1035 matches = filter(matchfn, changefiles)
1036 1036 if matches:
1037 1037 fncache[rev] = matches
1038 1038 wanted[rev] = 1
1039 1039
1040 1040 class followfilter:
1041 1041 def __init__(self, onlyfirst=False):
1042 1042 self.startrev = nullrev
1043 1043 self.roots = []
1044 1044 self.onlyfirst = onlyfirst
1045 1045
1046 1046 def match(self, rev):
1047 1047 def realparents(rev):
1048 1048 if self.onlyfirst:
1049 1049 return repo.changelog.parentrevs(rev)[0:1]
1050 1050 else:
1051 1051 return filter(lambda x: x != nullrev,
1052 1052 repo.changelog.parentrevs(rev))
1053 1053
1054 1054 if self.startrev == nullrev:
1055 1055 self.startrev = rev
1056 1056 return True
1057 1057
1058 1058 if rev > self.startrev:
1059 1059 # forward: all descendants
1060 1060 if not self.roots:
1061 1061 self.roots.append(self.startrev)
1062 1062 for parent in realparents(rev):
1063 1063 if parent in self.roots:
1064 1064 self.roots.append(rev)
1065 1065 return True
1066 1066 else:
1067 1067 # backwards: all parents
1068 1068 if not self.roots:
1069 1069 self.roots.extend(realparents(self.startrev))
1070 1070 if rev in self.roots:
1071 1071 self.roots.remove(rev)
1072 1072 self.roots.extend(realparents(rev))
1073 1073 return True
1074 1074
1075 1075 return False
1076 1076
1077 1077 # it might be worthwhile to do this in the iterator if the rev range
1078 1078 # is descending and the prune args are all within that range
1079 1079 for rev in opts.get('prune', ()):
1080 1080 rev = repo.changelog.rev(repo.lookup(rev))
1081 1081 ff = followfilter()
1082 1082 stop = min(revs[0], revs[-1])
1083 1083 for x in xrange(rev, stop-1, -1):
1084 1084 if ff.match(x) and x in wanted:
1085 1085 del wanted[x]
1086 1086
1087 1087 def iterate():
1088 1088 if follow and not files:
1089 1089 ff = followfilter(onlyfirst=opts.get('follow_first'))
1090 1090 def want(rev):
1091 1091 if ff.match(rev) and rev in wanted:
1092 1092 return True
1093 1093 return False
1094 1094 else:
1095 1095 def want(rev):
1096 1096 return rev in wanted
1097 1097
1098 1098 for i, window in increasing_windows(0, len(revs)):
1099 1099 yield 'window', revs[0] < revs[-1], revs[-1]
1100 1100 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1101 1101 srevs = list(nrevs)
1102 1102 srevs.sort()
1103 1103 for rev in srevs:
1104 1104 fns = fncache.get(rev)
1105 1105 if not fns:
1106 1106 def fns_generator():
1107 1107 for f in change(rev)[3]:
1108 1108 if matchfn(f):
1109 1109 yield f
1110 1110 fns = fns_generator()
1111 1111 yield 'add', rev, fns
1112 1112 for rev in nrevs:
1113 1113 yield 'iter', rev, None
1114 1114 return iterate(), matchfn
1115 1115
1116 1116 def commit(ui, repo, commitfunc, pats, opts):
1117 1117 '''commit the specified files or all outstanding changes'''
1118 date = opts.get('date')
1119 if date:
1120 opts['date'] = util.parsedate(date)
1118 1121 message = logmessage(opts)
1119 1122
1120 1123 # extract addremove carefully -- this function can be called from a command
1121 1124 # that doesn't support addremove
1122 1125 if opts.get('addremove'):
1123 1126 addremove(repo, pats, opts)
1124 1127
1125 1128 fns, match, anypats = matchpats(repo, pats, opts)
1126 1129 if pats:
1127 1130 status = repo.status(files=fns, match=match)
1128 1131 modified, added, removed, deleted, unknown = status[:5]
1129 1132 files = modified + added + removed
1130 1133 slist = None
1131 1134 for f in fns:
1132 1135 if f == '.':
1133 1136 continue
1134 1137 if f not in files:
1135 1138 rf = repo.wjoin(f)
1136 1139 rel = repo.pathto(f)
1137 1140 try:
1138 1141 mode = os.lstat(rf)[stat.ST_MODE]
1139 1142 except OSError:
1140 1143 raise util.Abort(_("file %s not found!") % rel)
1141 1144 if stat.S_ISDIR(mode):
1142 1145 name = f + '/'
1143 1146 if slist is None:
1144 1147 slist = list(files)
1145 1148 slist.sort()
1146 1149 i = bisect.bisect(slist, name)
1147 1150 if i >= len(slist) or not slist[i].startswith(name):
1148 1151 raise util.Abort(_("no match under directory %s!")
1149 1152 % rel)
1150 1153 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1151 1154 raise util.Abort(_("can't commit %s: "
1152 1155 "unsupported file type!") % rel)
1153 1156 elif f not in repo.dirstate:
1154 1157 raise util.Abort(_("file %s not tracked!") % rel)
1155 1158 else:
1156 1159 files = []
1157 1160 try:
1158 1161 return commitfunc(ui, repo, files, message, match, opts)
1159 1162 except ValueError, inst:
1160 1163 raise util.Abort(str(inst))
@@ -1,3128 +1,3136 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, re, sys, urllib
11 11 import hg, util, revlog, bundlerepo, extensions
12 12 import difflib, patch, time, help, mdiff, tempfile
13 13 import errno, version, socket
14 14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 33 badmatch=util.always):
34 34 if exact:
35 35 if ui.verbose:
36 36 ui.status(_('adding %s\n') % rel)
37 37 names.append(abs)
38 38 exacts[abs] = 1
39 39 elif abs not in repo.dirstate:
40 40 ui.status(_('adding %s\n') % rel)
41 41 names.append(abs)
42 42 if not opts.get('dry_run'):
43 43 rejected = repo.add(names)
44 44 rejected = [p for p in rejected if p in exacts]
45 45 return rejected and 1 or 0
46 46
47 47 def addremove(ui, repo, *pats, **opts):
48 48 """add all new files, delete all missing files
49 49
50 50 Add all new files and remove all missing files from the repository.
51 51
52 52 New files are ignored if they match any of the patterns in .hgignore. As
53 53 with add, these changes take effect at the next commit.
54 54
55 55 Use the -s option to detect renamed files. With a parameter > 0,
56 56 this compares every removed file with every added file and records
57 57 those similar enough as renames. This option takes a percentage
58 58 between 0 (disabled) and 100 (files must be identical) as its
59 59 parameter. Detecting renamed files this way can be expensive.
60 60 """
61 61 try:
62 62 sim = float(opts.get('similarity') or 0)
63 63 except ValueError:
64 64 raise util.Abort(_('similarity must be a number'))
65 65 if sim < 0 or sim > 100:
66 66 raise util.Abort(_('similarity must be between 0 and 100'))
67 67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 68
69 69 def annotate(ui, repo, *pats, **opts):
70 70 """show changeset information per file line
71 71
72 72 List changes in files, showing the revision id responsible for each line
73 73
74 74 This command is useful to discover who did a change or when a change took
75 75 place.
76 76
77 77 Without the -a option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will generate an annotation
79 79 anyway, probably with undesirable results.
80 80 """
81 81 datefunc = ui.quiet and util.shortdate or util.datestr
82 82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 83
84 84 if not pats:
85 85 raise util.Abort(_('at least one file name or pattern required'))
86 86
87 87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 88 ('number', lambda x: str(x[0].rev())),
89 89 ('changeset', lambda x: short(x[0].node())),
90 90 ('date', getdate),
91 91 ('follow', lambda x: x[0].path()),
92 92 ]
93 93
94 94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 95 and not opts['follow']):
96 96 opts['number'] = 1
97 97
98 98 linenumber = opts.get('line_number') is not None
99 99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 101
102 102 funcmap = [func for op, func in opmap if opts.get(op)]
103 103 if linenumber:
104 104 lastfunc = funcmap[-1]
105 105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 106
107 107 ctx = repo.changectx(opts['rev'])
108 108
109 109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 110 node=ctx.node()):
111 111 fctx = ctx.filectx(abs)
112 112 if not opts['text'] and util.binary(fctx.data()):
113 113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 114 continue
115 115
116 116 lines = fctx.annotate(follow=opts.get('follow'),
117 117 linenumber=linenumber)
118 118 pieces = []
119 119
120 120 for f in funcmap:
121 121 l = [f(n) for n, dummy in lines]
122 122 if l:
123 123 m = max(map(len, l))
124 124 pieces.append(["%*s" % (m, x) for x in l])
125 125
126 126 if pieces:
127 127 for p, l in zip(zip(*pieces), lines):
128 128 ui.write("%s: %s" % (" ".join(p), l[1]))
129 129
130 130 def archive(ui, repo, dest, **opts):
131 131 '''create unversioned archive of a repository revision
132 132
133 133 By default, the revision used is the parent of the working
134 134 directory; use "-r" to specify a different revision.
135 135
136 136 To specify the type of archive to create, use "-t". Valid
137 137 types are:
138 138
139 139 "files" (default): a directory full of files
140 140 "tar": tar archive, uncompressed
141 141 "tbz2": tar archive, compressed using bzip2
142 142 "tgz": tar archive, compressed using gzip
143 143 "uzip": zip archive, uncompressed
144 144 "zip": zip archive, compressed using deflate
145 145
146 146 The exact name of the destination archive or directory is given
147 147 using a format string; see "hg help export" for details.
148 148
149 149 Each member added to an archive file has a directory prefix
150 150 prepended. Use "-p" to specify a format string for the prefix.
151 151 The default is the basename of the archive, with suffixes removed.
152 152 '''
153 153
154 154 ctx = repo.changectx(opts['rev'])
155 155 if not ctx:
156 156 raise util.Abort(_('repository has no revisions'))
157 157 node = ctx.node()
158 158 dest = cmdutil.make_filename(repo, dest, node)
159 159 if os.path.realpath(dest) == repo.root:
160 160 raise util.Abort(_('repository root cannot be destination'))
161 161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 162 kind = opts.get('type') or 'files'
163 163 prefix = opts['prefix']
164 164 if dest == '-':
165 165 if kind == 'files':
166 166 raise util.Abort(_('cannot archive plain files to stdout'))
167 167 dest = sys.stdout
168 168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 169 prefix = cmdutil.make_filename(repo, prefix, node)
170 170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 171 matchfn, prefix)
172 172
173 173 def backout(ui, repo, node=None, rev=None, **opts):
174 174 '''reverse effect of earlier changeset
175 175
176 176 Commit the backed out changes as a new changeset. The new
177 177 changeset is a child of the backed out changeset.
178 178
179 179 If you back out a changeset other than the tip, a new head is
180 180 created. This head is the parent of the working directory. If
181 181 you back out an old changeset, your working directory will appear
182 182 old after the backout. You should merge the backout changeset
183 183 with another head.
184 184
185 185 The --merge option remembers the parent of the working directory
186 186 before starting the backout, then merges the new head with that
187 187 changeset afterwards. This saves you from doing the merge by
188 188 hand. The result of this merge is not committed, as for a normal
189 189 merge.'''
190 190 if rev and node:
191 191 raise util.Abort(_("please specify just one revision"))
192 192
193 193 if not rev:
194 194 rev = node
195 195
196 196 if not rev:
197 197 raise util.Abort(_("please specify a revision to backout"))
198 198
199 date = opts.get('date')
200 if date:
201 opts['date'] = util.parsedate(date)
202
199 203 cmdutil.bail_if_changed(repo)
200 204 node = repo.lookup(rev)
201 205
202 206 op1, op2 = repo.dirstate.parents()
203 207 a = repo.changelog.ancestor(op1, node)
204 208 if a != node:
205 209 raise util.Abort(_('cannot back out change on a different branch'))
206 210
207 211 p1, p2 = repo.changelog.parents(node)
208 212 if p1 == nullid:
209 213 raise util.Abort(_('cannot back out a change with no parents'))
210 214 if p2 != nullid:
211 215 if not opts['parent']:
212 216 raise util.Abort(_('cannot back out a merge changeset without '
213 217 '--parent'))
214 218 p = repo.lookup(opts['parent'])
215 219 if p not in (p1, p2):
216 220 raise util.Abort(_('%s is not a parent of %s') %
217 221 (short(p), short(node)))
218 222 parent = p
219 223 else:
220 224 if opts['parent']:
221 225 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 226 parent = p1
223 227
224 228 hg.clean(repo, node, show_stats=False)
225 229 revert_opts = opts.copy()
226 230 revert_opts['date'] = None
227 231 revert_opts['all'] = True
228 232 revert_opts['rev'] = hex(parent)
229 233 revert_opts['no_backup'] = None
230 234 revert(ui, repo, **revert_opts)
231 235 commit_opts = opts.copy()
232 236 commit_opts['addremove'] = False
233 237 if not commit_opts['message'] and not commit_opts['logfile']:
234 238 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
235 239 commit_opts['force_editor'] = True
236 240 commit(ui, repo, **commit_opts)
237 241 def nice(node):
238 242 return '%d:%s' % (repo.changelog.rev(node), short(node))
239 243 ui.status(_('changeset %s backs out changeset %s\n') %
240 244 (nice(repo.changelog.tip()), nice(node)))
241 245 if op1 != node:
242 246 if opts['merge']:
243 247 ui.status(_('merging with changeset %s\n') % nice(op1))
244 248 hg.merge(repo, hex(op1))
245 249 else:
246 250 ui.status(_('the backout changeset is a new head - '
247 251 'do not forget to merge\n'))
248 252 ui.status(_('(use "backout --merge" '
249 253 'if you want to auto-merge)\n'))
250 254
251 255 def bisect(ui, repo, rev=None, extra=None,
252 256 reset=None, good=None, bad=None, skip=None, noupdate=None):
253 257 """subdivision search of changesets
254 258
255 259 This command helps to find changesets which introduce problems.
256 260 To use, mark the earliest changeset you know exhibits the problem
257 261 as bad, then mark the latest changeset which is free from the
258 262 problem as good. Bisect will update your working directory to a
259 263 revision for testing. Once you have performed tests, mark the
260 264 working directory as bad or good and bisect will either update to
261 265 another candidate changeset or announce that it has found the bad
262 266 revision.
263 267 """
264 268 # backward compatibility
265 269 if rev in "good bad reset init".split():
266 270 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
267 271 cmd, rev, extra = rev, extra, None
268 272 if cmd == "good":
269 273 good = True
270 274 elif cmd == "bad":
271 275 bad = True
272 276 else:
273 277 reset = True
274 278 elif extra or good + bad + skip + reset > 1:
275 279 raise util.Abort("Incompatible arguments")
276 280
277 281 if reset:
278 282 p = repo.join("bisect.state")
279 283 if os.path.exists(p):
280 284 os.unlink(p)
281 285 return
282 286
283 287 # load state
284 288 state = {'good': [], 'bad': [], 'skip': []}
285 289 if os.path.exists(repo.join("bisect.state")):
286 290 for l in repo.opener("bisect.state"):
287 291 kind, node = l[:-1].split()
288 292 node = repo.lookup(node)
289 293 if kind not in state:
290 294 raise util.Abort(_("unknown bisect kind %s") % kind)
291 295 state[kind].append(node)
292 296
293 297 # update state
294 298 node = repo.lookup(rev or '.')
295 299 if good:
296 300 state['good'].append(node)
297 301 elif bad:
298 302 state['bad'].append(node)
299 303 elif skip:
300 304 state['skip'].append(node)
301 305
302 306 # save state
303 307 f = repo.opener("bisect.state", "w", atomictemp=True)
304 308 wlock = repo.wlock()
305 309 try:
306 310 for kind in state:
307 311 for node in state[kind]:
308 312 f.write("%s %s\n" % (kind, hg.hex(node)))
309 313 f.rename()
310 314 finally:
311 315 del wlock
312 316
313 317 if not state['good'] or not state['bad']:
314 318 return
315 319
316 320 # actually bisect
317 321 node, changesets, good = hbisect.bisect(repo.changelog, state)
318 322 if changesets == 0:
319 323 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
320 324 displayer = cmdutil.show_changeset(ui, repo, {})
321 325 displayer.show(changenode=node)
322 326 elif node is not None:
323 327 # compute the approximate number of remaining tests
324 328 tests, size = 0, 2
325 329 while size <= changesets:
326 330 tests, size = tests + 1, size * 2
327 331 rev = repo.changelog.rev(node)
328 332 ui.write(_("Testing changeset %s:%s "
329 333 "(%s changesets remaining, ~%s tests)\n")
330 334 % (rev, hg.short(node), changesets, tests))
331 335 if not noupdate:
332 336 cmdutil.bail_if_changed(repo)
333 337 return hg.clean(repo, node)
334 338
335 339 def branch(ui, repo, label=None, **opts):
336 340 """set or show the current branch name
337 341
338 342 With no argument, show the current branch name. With one argument,
339 343 set the working directory branch name (the branch does not exist in
340 344 the repository until the next commit).
341 345
342 346 Unless --force is specified, branch will not let you set a
343 347 branch name that shadows an existing branch.
344 348
345 349 Use the command 'hg update' to switch to an existing branch.
346 350 """
347 351
348 352 if label:
349 353 if not opts.get('force') and label in repo.branchtags():
350 354 if label not in [p.branch() for p in repo.workingctx().parents()]:
351 355 raise util.Abort(_('a branch of the same name already exists'
352 356 ' (use --force to override)'))
353 357 repo.dirstate.setbranch(util.fromlocal(label))
354 358 ui.status(_('marked working directory as branch %s\n') % label)
355 359 else:
356 360 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
357 361
358 362 def branches(ui, repo, active=False):
359 363 """list repository named branches
360 364
361 365 List the repository's named branches, indicating which ones are
362 366 inactive. If active is specified, only show active branches.
363 367
364 368 A branch is considered active if it contains unmerged heads.
365 369
366 370 Use the command 'hg update' to switch to an existing branch.
367 371 """
368 372 b = repo.branchtags()
369 373 heads = dict.fromkeys(repo.heads(), 1)
370 374 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
371 375 l.sort()
372 376 l.reverse()
373 377 for ishead, r, n, t in l:
374 378 if active and not ishead:
375 379 # If we're only displaying active branches, abort the loop on
376 380 # encountering the first inactive head
377 381 break
378 382 else:
379 383 hexfunc = ui.debugflag and hex or short
380 384 if ui.quiet:
381 385 ui.write("%s\n" % t)
382 386 else:
383 387 spaces = " " * (30 - util.locallen(t))
384 388 # The code only gets here if inactive branches are being
385 389 # displayed or the branch is active.
386 390 isinactive = ((not ishead) and " (inactive)") or ''
387 391 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
388 392
389 393 def bundle(ui, repo, fname, dest=None, **opts):
390 394 """create a changegroup file
391 395
392 396 Generate a compressed changegroup file collecting changesets not
393 397 found in the other repository.
394 398
395 399 If no destination repository is specified the destination is assumed
396 400 to have all the nodes specified by one or more --base parameters.
397 401 To create a bundle containing all changesets, use --base null.
398 402
399 403 The bundle file can then be transferred using conventional means and
400 404 applied to another repository with the unbundle or pull command.
401 405 This is useful when direct push and pull are not available or when
402 406 exporting an entire repository is undesirable.
403 407
404 408 Applying bundles preserves all changeset contents including
405 409 permissions, copy/rename information, and revision history.
406 410 """
407 411 revs = opts.get('rev') or None
408 412 if revs:
409 413 revs = [repo.lookup(rev) for rev in revs]
410 414 base = opts.get('base')
411 415 if base:
412 416 if dest:
413 417 raise util.Abort(_("--base is incompatible with specifiying "
414 418 "a destination"))
415 419 base = [repo.lookup(rev) for rev in base]
416 420 # create the right base
417 421 # XXX: nodesbetween / changegroup* should be "fixed" instead
418 422 o = []
419 423 has = {nullid: None}
420 424 for n in base:
421 425 has.update(repo.changelog.reachable(n))
422 426 if revs:
423 427 visit = list(revs)
424 428 else:
425 429 visit = repo.changelog.heads()
426 430 seen = {}
427 431 while visit:
428 432 n = visit.pop(0)
429 433 parents = [p for p in repo.changelog.parents(n) if p not in has]
430 434 if len(parents) == 0:
431 435 o.insert(0, n)
432 436 else:
433 437 for p in parents:
434 438 if p not in seen:
435 439 seen[p] = 1
436 440 visit.append(p)
437 441 else:
438 442 cmdutil.setremoteconfig(ui, opts)
439 443 dest, revs, checkout = hg.parseurl(
440 444 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
441 445 other = hg.repository(ui, dest)
442 446 o = repo.findoutgoing(other, force=opts['force'])
443 447
444 448 if revs:
445 449 cg = repo.changegroupsubset(o, revs, 'bundle')
446 450 else:
447 451 cg = repo.changegroup(o, 'bundle')
448 452 changegroup.writebundle(cg, fname, "HG10BZ")
449 453
450 454 def cat(ui, repo, file1, *pats, **opts):
451 455 """output the current or given revision of files
452 456
453 457 Print the specified files as they were at the given revision.
454 458 If no revision is given, the parent of the working directory is used,
455 459 or tip if no revision is checked out.
456 460
457 461 Output may be to a file, in which case the name of the file is
458 462 given using a format string. The formatting rules are the same as
459 463 for the export command, with the following additions:
460 464
461 465 %s basename of file being printed
462 466 %d dirname of file being printed, or '.' if in repo root
463 467 %p root-relative path name of file being printed
464 468 """
465 469 ctx = repo.changectx(opts['rev'])
466 470 err = 1
467 471 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
468 472 ctx.node()):
469 473 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
470 474 data = ctx.filectx(abs).data()
471 475 if opts.get('decode'):
472 476 data = repo.wwritedata(abs, data)
473 477 fp.write(data)
474 478 err = 0
475 479 return err
476 480
477 481 def clone(ui, source, dest=None, **opts):
478 482 """make a copy of an existing repository
479 483
480 484 Create a copy of an existing repository in a new directory.
481 485
482 486 If no destination directory name is specified, it defaults to the
483 487 basename of the source.
484 488
485 489 The location of the source is added to the new repository's
486 490 .hg/hgrc file, as the default to be used for future pulls.
487 491
488 492 For efficiency, hardlinks are used for cloning whenever the source
489 493 and destination are on the same filesystem (note this applies only
490 494 to the repository data, not to the checked out files). Some
491 495 filesystems, such as AFS, implement hardlinking incorrectly, but
492 496 do not report errors. In these cases, use the --pull option to
493 497 avoid hardlinking.
494 498
495 499 You can safely clone repositories and checked out files using full
496 500 hardlinks with
497 501
498 502 $ cp -al REPO REPOCLONE
499 503
500 504 which is the fastest way to clone. However, the operation is not
501 505 atomic (making sure REPO is not modified during the operation is
502 506 up to you) and you have to make sure your editor breaks hardlinks
503 507 (Emacs and most Linux Kernel tools do so).
504 508
505 509 If you use the -r option to clone up to a specific revision, no
506 510 subsequent revisions will be present in the cloned repository.
507 511 This option implies --pull, even on local repositories.
508 512
509 513 See pull for valid source format details.
510 514
511 515 It is possible to specify an ssh:// URL as the destination, but no
512 516 .hg/hgrc and working directory will be created on the remote side.
513 517 Look at the help text for the pull command for important details
514 518 about ssh:// URLs.
515 519 """
516 520 cmdutil.setremoteconfig(ui, opts)
517 521 hg.clone(ui, source, dest,
518 522 pull=opts['pull'],
519 523 stream=opts['uncompressed'],
520 524 rev=opts['rev'],
521 525 update=not opts['noupdate'])
522 526
523 527 def commit(ui, repo, *pats, **opts):
524 528 """commit the specified files or all outstanding changes
525 529
526 530 Commit changes to the given files into the repository.
527 531
528 532 If a list of files is omitted, all changes reported by "hg status"
529 533 will be committed.
530 534
531 535 If no commit message is specified, the configured editor is started to
532 536 enter a message.
533 537 """
534 538 def commitfunc(ui, repo, files, message, match, opts):
535 539 return repo.commit(files, message, opts['user'], opts['date'], match,
536 540 force_editor=opts.get('force_editor'))
537 541 cmdutil.commit(ui, repo, commitfunc, pats, opts)
538 542
539 543 def copy(ui, repo, *pats, **opts):
540 544 """mark files as copied for the next commit
541 545
542 546 Mark dest as having copies of source files. If dest is a
543 547 directory, copies are put in that directory. If dest is a file,
544 548 there can only be one source.
545 549
546 550 By default, this command copies the contents of files as they
547 551 stand in the working directory. If invoked with --after, the
548 552 operation is recorded, but no copying is performed.
549 553
550 554 This command takes effect in the next commit. To undo a copy
551 555 before that, see hg revert.
552 556 """
553 557 wlock = repo.wlock(False)
554 558 try:
555 559 return cmdutil.copy(ui, repo, pats, opts)
556 560 finally:
557 561 del wlock
558 562
559 563 def debugancestor(ui, index, rev1, rev2):
560 564 """find the ancestor revision of two revisions in a given index"""
561 565 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
562 566 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
563 567 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
564 568
565 569 def debugcomplete(ui, cmd='', **opts):
566 570 """returns the completion list associated with the given command"""
567 571
568 572 if opts['options']:
569 573 options = []
570 574 otables = [globalopts]
571 575 if cmd:
572 576 aliases, entry = cmdutil.findcmd(ui, cmd, table)
573 577 otables.append(entry[1])
574 578 for t in otables:
575 579 for o in t:
576 580 if o[0]:
577 581 options.append('-%s' % o[0])
578 582 options.append('--%s' % o[1])
579 583 ui.write("%s\n" % "\n".join(options))
580 584 return
581 585
582 586 clist = cmdutil.findpossible(ui, cmd, table).keys()
583 587 clist.sort()
584 588 ui.write("%s\n" % "\n".join(clist))
585 589
586 590 def debugfsinfo(ui, path = "."):
587 591 file('.debugfsinfo', 'w').write('')
588 592 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
589 593 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
590 594 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
591 595 and 'yes' or 'no'))
592 596 os.unlink('.debugfsinfo')
593 597
594 598 def debugrebuildstate(ui, repo, rev=""):
595 599 """rebuild the dirstate as it would look like for the given revision"""
596 600 if rev == "":
597 601 rev = repo.changelog.tip()
598 602 ctx = repo.changectx(rev)
599 603 files = ctx.manifest()
600 604 wlock = repo.wlock()
601 605 try:
602 606 repo.dirstate.rebuild(rev, files)
603 607 finally:
604 608 del wlock
605 609
606 610 def debugcheckstate(ui, repo):
607 611 """validate the correctness of the current dirstate"""
608 612 parent1, parent2 = repo.dirstate.parents()
609 613 m1 = repo.changectx(parent1).manifest()
610 614 m2 = repo.changectx(parent2).manifest()
611 615 errors = 0
612 616 for f in repo.dirstate:
613 617 state = repo.dirstate[f]
614 618 if state in "nr" and f not in m1:
615 619 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
616 620 errors += 1
617 621 if state in "a" and f in m1:
618 622 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
619 623 errors += 1
620 624 if state in "m" and f not in m1 and f not in m2:
621 625 ui.warn(_("%s in state %s, but not in either manifest\n") %
622 626 (f, state))
623 627 errors += 1
624 628 for f in m1:
625 629 state = repo.dirstate[f]
626 630 if state not in "nrm":
627 631 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
628 632 errors += 1
629 633 if errors:
630 634 error = _(".hg/dirstate inconsistent with current parent's manifest")
631 635 raise util.Abort(error)
632 636
633 637 def showconfig(ui, repo, *values, **opts):
634 638 """show combined config settings from all hgrc files
635 639
636 640 With no args, print names and values of all config items.
637 641
638 642 With one arg of the form section.name, print just the value of
639 643 that config item.
640 644
641 645 With multiple args, print names and values of all config items
642 646 with matching section names."""
643 647
644 648 untrusted = bool(opts.get('untrusted'))
645 649 if values:
646 650 if len([v for v in values if '.' in v]) > 1:
647 651 raise util.Abort(_('only one config item permitted'))
648 652 for section, name, value in ui.walkconfig(untrusted=untrusted):
649 653 sectname = section + '.' + name
650 654 if values:
651 655 for v in values:
652 656 if v == section:
653 657 ui.write('%s=%s\n' % (sectname, value))
654 658 elif v == sectname:
655 659 ui.write(value, '\n')
656 660 else:
657 661 ui.write('%s=%s\n' % (sectname, value))
658 662
659 663 def debugsetparents(ui, repo, rev1, rev2=None):
660 664 """manually set the parents of the current working directory
661 665
662 666 This is useful for writing repository conversion tools, but should
663 667 be used with care.
664 668 """
665 669
666 670 if not rev2:
667 671 rev2 = hex(nullid)
668 672
669 673 wlock = repo.wlock()
670 674 try:
671 675 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
672 676 finally:
673 677 del wlock
674 678
675 679 def debugstate(ui, repo):
676 680 """show the contents of the current dirstate"""
677 681 k = repo.dirstate._map.items()
678 682 k.sort()
679 683 for file_, ent in k:
680 684 if ent[3] == -1:
681 685 # Pad or slice to locale representation
682 686 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
683 687 timestr = 'unset'
684 688 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
685 689 else:
686 690 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
687 691 if ent[1] & 020000:
688 692 mode = 'lnk'
689 693 else:
690 694 mode = '%3o' % (ent[1] & 0777)
691 695 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
692 696 for f in repo.dirstate.copies():
693 697 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
694 698
695 699 def debugdata(ui, file_, rev):
696 700 """dump the contents of a data file revision"""
697 701 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
698 702 try:
699 703 ui.write(r.revision(r.lookup(rev)))
700 704 except KeyError:
701 705 raise util.Abort(_('invalid revision identifier %s') % rev)
702 706
703 707 def debugdate(ui, date, range=None, **opts):
704 708 """parse and display a date"""
705 709 if opts["extended"]:
706 710 d = util.parsedate(date, util.extendeddateformats)
707 711 else:
708 712 d = util.parsedate(date)
709 713 ui.write("internal: %s %s\n" % d)
710 714 ui.write("standard: %s\n" % util.datestr(d))
711 715 if range:
712 716 m = util.matchdate(range)
713 717 ui.write("match: %s\n" % m(d[0]))
714 718
715 719 def debugindex(ui, file_):
716 720 """dump the contents of an index file"""
717 721 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
718 722 ui.write(" rev offset length base linkrev" +
719 723 " nodeid p1 p2\n")
720 724 for i in xrange(r.count()):
721 725 node = r.node(i)
722 726 try:
723 727 pp = r.parents(node)
724 728 except:
725 729 pp = [nullid, nullid]
726 730 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
727 731 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
728 732 short(node), short(pp[0]), short(pp[1])))
729 733
730 734 def debugindexdot(ui, file_):
731 735 """dump an index DAG as a .dot file"""
732 736 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
733 737 ui.write("digraph G {\n")
734 738 for i in xrange(r.count()):
735 739 node = r.node(i)
736 740 pp = r.parents(node)
737 741 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
738 742 if pp[1] != nullid:
739 743 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
740 744 ui.write("}\n")
741 745
742 746 def debuginstall(ui):
743 747 '''test Mercurial installation'''
744 748
745 749 def writetemp(contents):
746 750 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
747 751 f = os.fdopen(fd, "wb")
748 752 f.write(contents)
749 753 f.close()
750 754 return name
751 755
752 756 problems = 0
753 757
754 758 # encoding
755 759 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
756 760 try:
757 761 util.fromlocal("test")
758 762 except util.Abort, inst:
759 763 ui.write(" %s\n" % inst)
760 764 ui.write(_(" (check that your locale is properly set)\n"))
761 765 problems += 1
762 766
763 767 # compiled modules
764 768 ui.status(_("Checking extensions...\n"))
765 769 try:
766 770 import bdiff, mpatch, base85
767 771 except Exception, inst:
768 772 ui.write(" %s\n" % inst)
769 773 ui.write(_(" One or more extensions could not be found"))
770 774 ui.write(_(" (check that you compiled the extensions)\n"))
771 775 problems += 1
772 776
773 777 # templates
774 778 ui.status(_("Checking templates...\n"))
775 779 try:
776 780 import templater
777 781 t = templater.templater(templater.templatepath("map-cmdline.default"))
778 782 except Exception, inst:
779 783 ui.write(" %s\n" % inst)
780 784 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
781 785 problems += 1
782 786
783 787 # patch
784 788 ui.status(_("Checking patch...\n"))
785 789 patchproblems = 0
786 790 a = "1\n2\n3\n4\n"
787 791 b = "1\n2\n3\ninsert\n4\n"
788 792 fa = writetemp(a)
789 793 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
790 794 os.path.basename(fa))
791 795 fd = writetemp(d)
792 796
793 797 files = {}
794 798 try:
795 799 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
796 800 except util.Abort, e:
797 801 ui.write(_(" patch call failed:\n"))
798 802 ui.write(" " + str(e) + "\n")
799 803 patchproblems += 1
800 804 else:
801 805 if list(files) != [os.path.basename(fa)]:
802 806 ui.write(_(" unexpected patch output!\n"))
803 807 patchproblems += 1
804 808 a = file(fa).read()
805 809 if a != b:
806 810 ui.write(_(" patch test failed!\n"))
807 811 patchproblems += 1
808 812
809 813 if patchproblems:
810 814 if ui.config('ui', 'patch'):
811 815 ui.write(_(" (Current patch tool may be incompatible with patch,"
812 816 " or misconfigured. Please check your .hgrc file)\n"))
813 817 else:
814 818 ui.write(_(" Internal patcher failure, please report this error"
815 819 " to http://www.selenic.com/mercurial/bts\n"))
816 820 problems += patchproblems
817 821
818 822 os.unlink(fa)
819 823 os.unlink(fd)
820 824
821 825 # editor
822 826 ui.status(_("Checking commit editor...\n"))
823 827 editor = ui.geteditor()
824 828 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
825 829 if not cmdpath:
826 830 if editor == 'vi':
827 831 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
828 832 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
829 833 else:
830 834 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
831 835 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
832 836 problems += 1
833 837
834 838 # check username
835 839 ui.status(_("Checking username...\n"))
836 840 user = os.environ.get("HGUSER")
837 841 if user is None:
838 842 user = ui.config("ui", "username")
839 843 if user is None:
840 844 user = os.environ.get("EMAIL")
841 845 if not user:
842 846 ui.warn(" ")
843 847 ui.username()
844 848 ui.write(_(" (specify a username in your .hgrc file)\n"))
845 849
846 850 if not problems:
847 851 ui.status(_("No problems detected\n"))
848 852 else:
849 853 ui.write(_("%s problems detected,"
850 854 " please check your install!\n") % problems)
851 855
852 856 return problems
853 857
854 858 def debugrename(ui, repo, file1, *pats, **opts):
855 859 """dump rename information"""
856 860
857 861 ctx = repo.changectx(opts.get('rev', 'tip'))
858 862 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
859 863 ctx.node()):
860 864 fctx = ctx.filectx(abs)
861 865 m = fctx.filelog().renamed(fctx.filenode())
862 866 if m:
863 867 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
864 868 else:
865 869 ui.write(_("%s not renamed\n") % rel)
866 870
867 871 def debugwalk(ui, repo, *pats, **opts):
868 872 """show how files match on given patterns"""
869 873 items = list(cmdutil.walk(repo, pats, opts))
870 874 if not items:
871 875 return
872 876 fmt = '%%s %%-%ds %%-%ds %%s' % (
873 877 max([len(abs) for (src, abs, rel, exact) in items]),
874 878 max([len(rel) for (src, abs, rel, exact) in items]))
875 879 for src, abs, rel, exact in items:
876 880 line = fmt % (src, abs, rel, exact and 'exact' or '')
877 881 ui.write("%s\n" % line.rstrip())
878 882
879 883 def diff(ui, repo, *pats, **opts):
880 884 """diff repository (or selected files)
881 885
882 886 Show differences between revisions for the specified files.
883 887
884 888 Differences between files are shown using the unified diff format.
885 889
886 890 NOTE: diff may generate unexpected results for merges, as it will
887 891 default to comparing against the working directory's first parent
888 892 changeset if no revisions are specified.
889 893
890 894 When two revision arguments are given, then changes are shown
891 895 between those revisions. If only one revision is specified then
892 896 that revision is compared to the working directory, and, when no
893 897 revisions are specified, the working directory files are compared
894 898 to its parent.
895 899
896 900 Without the -a option, diff will avoid generating diffs of files
897 901 it detects as binary. With -a, diff will generate a diff anyway,
898 902 probably with undesirable results.
899 903 """
900 904 node1, node2 = cmdutil.revpair(repo, opts['rev'])
901 905
902 906 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
903 907
904 908 patch.diff(repo, node1, node2, fns, match=matchfn,
905 909 opts=patch.diffopts(ui, opts))
906 910
907 911 def export(ui, repo, *changesets, **opts):
908 912 """dump the header and diffs for one or more changesets
909 913
910 914 Print the changeset header and diffs for one or more revisions.
911 915
912 916 The information shown in the changeset header is: author,
913 917 changeset hash, parent(s) and commit comment.
914 918
915 919 NOTE: export may generate unexpected diff output for merge changesets,
916 920 as it will compare the merge changeset against its first parent only.
917 921
918 922 Output may be to a file, in which case the name of the file is
919 923 given using a format string. The formatting rules are as follows:
920 924
921 925 %% literal "%" character
922 926 %H changeset hash (40 bytes of hexadecimal)
923 927 %N number of patches being generated
924 928 %R changeset revision number
925 929 %b basename of the exporting repository
926 930 %h short-form changeset hash (12 bytes of hexadecimal)
927 931 %n zero-padded sequence number, starting at 1
928 932 %r zero-padded changeset revision number
929 933
930 934 Without the -a option, export will avoid generating diffs of files
931 935 it detects as binary. With -a, export will generate a diff anyway,
932 936 probably with undesirable results.
933 937
934 938 With the --switch-parent option, the diff will be against the second
935 939 parent. It can be useful to review a merge.
936 940 """
937 941 if not changesets:
938 942 raise util.Abort(_("export requires at least one changeset"))
939 943 revs = cmdutil.revrange(repo, changesets)
940 944 if len(revs) > 1:
941 945 ui.note(_('exporting patches:\n'))
942 946 else:
943 947 ui.note(_('exporting patch:\n'))
944 948 patch.export(repo, revs, template=opts['output'],
945 949 switch_parent=opts['switch_parent'],
946 950 opts=patch.diffopts(ui, opts))
947 951
948 952 def grep(ui, repo, pattern, *pats, **opts):
949 953 """search for a pattern in specified files and revisions
950 954
951 955 Search revisions of files for a regular expression.
952 956
953 957 This command behaves differently than Unix grep. It only accepts
954 958 Python/Perl regexps. It searches repository history, not the
955 959 working directory. It always prints the revision number in which
956 960 a match appears.
957 961
958 962 By default, grep only prints output for the first revision of a
959 963 file in which it finds a match. To get it to print every revision
960 964 that contains a change in match status ("-" for a match that
961 965 becomes a non-match, or "+" for a non-match that becomes a match),
962 966 use the --all flag.
963 967 """
964 968 reflags = 0
965 969 if opts['ignore_case']:
966 970 reflags |= re.I
967 971 try:
968 972 regexp = re.compile(pattern, reflags)
969 973 except Exception, inst:
970 974 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
971 975 return None
972 976 sep, eol = ':', '\n'
973 977 if opts['print0']:
974 978 sep = eol = '\0'
975 979
976 980 fcache = {}
977 981 def getfile(fn):
978 982 if fn not in fcache:
979 983 fcache[fn] = repo.file(fn)
980 984 return fcache[fn]
981 985
982 986 def matchlines(body):
983 987 begin = 0
984 988 linenum = 0
985 989 while True:
986 990 match = regexp.search(body, begin)
987 991 if not match:
988 992 break
989 993 mstart, mend = match.span()
990 994 linenum += body.count('\n', begin, mstart) + 1
991 995 lstart = body.rfind('\n', begin, mstart) + 1 or begin
992 996 lend = body.find('\n', mend)
993 997 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
994 998 begin = lend + 1
995 999
996 1000 class linestate(object):
997 1001 def __init__(self, line, linenum, colstart, colend):
998 1002 self.line = line
999 1003 self.linenum = linenum
1000 1004 self.colstart = colstart
1001 1005 self.colend = colend
1002 1006
1003 1007 def __eq__(self, other):
1004 1008 return self.line == other.line
1005 1009
1006 1010 matches = {}
1007 1011 copies = {}
1008 1012 def grepbody(fn, rev, body):
1009 1013 matches[rev].setdefault(fn, [])
1010 1014 m = matches[rev][fn]
1011 1015 for lnum, cstart, cend, line in matchlines(body):
1012 1016 s = linestate(line, lnum, cstart, cend)
1013 1017 m.append(s)
1014 1018
1015 1019 def difflinestates(a, b):
1016 1020 sm = difflib.SequenceMatcher(None, a, b)
1017 1021 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1018 1022 if tag == 'insert':
1019 1023 for i in xrange(blo, bhi):
1020 1024 yield ('+', b[i])
1021 1025 elif tag == 'delete':
1022 1026 for i in xrange(alo, ahi):
1023 1027 yield ('-', a[i])
1024 1028 elif tag == 'replace':
1025 1029 for i in xrange(alo, ahi):
1026 1030 yield ('-', a[i])
1027 1031 for i in xrange(blo, bhi):
1028 1032 yield ('+', b[i])
1029 1033
1030 1034 prev = {}
1031 1035 def display(fn, rev, states, prevstates):
1032 1036 datefunc = ui.quiet and util.shortdate or util.datestr
1033 1037 found = False
1034 1038 filerevmatches = {}
1035 1039 r = prev.get(fn, -1)
1036 1040 if opts['all']:
1037 1041 iter = difflinestates(states, prevstates)
1038 1042 else:
1039 1043 iter = [('', l) for l in prevstates]
1040 1044 for change, l in iter:
1041 1045 cols = [fn, str(r)]
1042 1046 if opts['line_number']:
1043 1047 cols.append(str(l.linenum))
1044 1048 if opts['all']:
1045 1049 cols.append(change)
1046 1050 if opts['user']:
1047 1051 cols.append(ui.shortuser(get(r)[1]))
1048 1052 if opts.get('date'):
1049 1053 cols.append(datefunc(get(r)[2]))
1050 1054 if opts['files_with_matches']:
1051 1055 c = (fn, r)
1052 1056 if c in filerevmatches:
1053 1057 continue
1054 1058 filerevmatches[c] = 1
1055 1059 else:
1056 1060 cols.append(l.line)
1057 1061 ui.write(sep.join(cols), eol)
1058 1062 found = True
1059 1063 return found
1060 1064
1061 1065 fstate = {}
1062 1066 skip = {}
1063 1067 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1064 1068 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1065 1069 found = False
1066 1070 follow = opts.get('follow')
1067 1071 for st, rev, fns in changeiter:
1068 1072 if st == 'window':
1069 1073 matches.clear()
1070 1074 elif st == 'add':
1071 1075 mf = repo.changectx(rev).manifest()
1072 1076 matches[rev] = {}
1073 1077 for fn in fns:
1074 1078 if fn in skip:
1075 1079 continue
1076 1080 try:
1077 1081 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1078 1082 fstate.setdefault(fn, [])
1079 1083 if follow:
1080 1084 copied = getfile(fn).renamed(mf[fn])
1081 1085 if copied:
1082 1086 copies.setdefault(rev, {})[fn] = copied[0]
1083 1087 except KeyError:
1084 1088 pass
1085 1089 elif st == 'iter':
1086 1090 states = matches[rev].items()
1087 1091 states.sort()
1088 1092 for fn, m in states:
1089 1093 copy = copies.get(rev, {}).get(fn)
1090 1094 if fn in skip:
1091 1095 if copy:
1092 1096 skip[copy] = True
1093 1097 continue
1094 1098 if fn in prev or fstate[fn]:
1095 1099 r = display(fn, rev, m, fstate[fn])
1096 1100 found = found or r
1097 1101 if r and not opts['all']:
1098 1102 skip[fn] = True
1099 1103 if copy:
1100 1104 skip[copy] = True
1101 1105 fstate[fn] = m
1102 1106 if copy:
1103 1107 fstate[copy] = m
1104 1108 prev[fn] = rev
1105 1109
1106 1110 fstate = fstate.items()
1107 1111 fstate.sort()
1108 1112 for fn, state in fstate:
1109 1113 if fn in skip:
1110 1114 continue
1111 1115 if fn not in copies.get(prev[fn], {}):
1112 1116 found = display(fn, rev, {}, state) or found
1113 1117 return (not found and 1) or 0
1114 1118
1115 1119 def heads(ui, repo, *branchrevs, **opts):
1116 1120 """show current repository heads or show branch heads
1117 1121
1118 1122 With no arguments, show all repository head changesets.
1119 1123
1120 1124 If branch or revisions names are given this will show the heads of
1121 1125 the specified branches or the branches those revisions are tagged
1122 1126 with.
1123 1127
1124 1128 Repository "heads" are changesets that don't have child
1125 1129 changesets. They are where development generally takes place and
1126 1130 are the usual targets for update and merge operations.
1127 1131
1128 1132 Branch heads are changesets that have a given branch tag, but have
1129 1133 no child changesets with that tag. They are usually where
1130 1134 development on the given branch takes place.
1131 1135 """
1132 1136 if opts['rev']:
1133 1137 start = repo.lookup(opts['rev'])
1134 1138 else:
1135 1139 start = None
1136 1140 if not branchrevs:
1137 1141 # Assume we're looking repo-wide heads if no revs were specified.
1138 1142 heads = repo.heads(start)
1139 1143 else:
1140 1144 heads = []
1141 1145 visitedset = util.set()
1142 1146 for branchrev in branchrevs:
1143 1147 branch = repo.changectx(branchrev).branch()
1144 1148 if branch in visitedset:
1145 1149 continue
1146 1150 visitedset.add(branch)
1147 1151 bheads = repo.branchheads(branch, start)
1148 1152 if not bheads:
1149 1153 if branch != branchrev:
1150 1154 ui.warn(_("no changes on branch %s containing %s are "
1151 1155 "reachable from %s\n")
1152 1156 % (branch, branchrev, opts['rev']))
1153 1157 else:
1154 1158 ui.warn(_("no changes on branch %s are reachable from %s\n")
1155 1159 % (branch, opts['rev']))
1156 1160 heads.extend(bheads)
1157 1161 if not heads:
1158 1162 return 1
1159 1163 displayer = cmdutil.show_changeset(ui, repo, opts)
1160 1164 for n in heads:
1161 1165 displayer.show(changenode=n)
1162 1166
1163 1167 def help_(ui, name=None, with_version=False):
1164 1168 """show help for a command, extension, or list of commands
1165 1169
1166 1170 With no arguments, print a list of commands and short help.
1167 1171
1168 1172 Given a command name, print help for that command.
1169 1173
1170 1174 Given an extension name, print help for that extension, and the
1171 1175 commands it provides."""
1172 1176 option_lists = []
1173 1177
1174 1178 def addglobalopts(aliases):
1175 1179 if ui.verbose:
1176 1180 option_lists.append((_("global options:"), globalopts))
1177 1181 if name == 'shortlist':
1178 1182 option_lists.append((_('use "hg help" for the full list '
1179 1183 'of commands'), ()))
1180 1184 else:
1181 1185 if name == 'shortlist':
1182 1186 msg = _('use "hg help" for the full list of commands '
1183 1187 'or "hg -v" for details')
1184 1188 elif aliases:
1185 1189 msg = _('use "hg -v help%s" to show aliases and '
1186 1190 'global options') % (name and " " + name or "")
1187 1191 else:
1188 1192 msg = _('use "hg -v help %s" to show global options') % name
1189 1193 option_lists.append((msg, ()))
1190 1194
1191 1195 def helpcmd(name):
1192 1196 if with_version:
1193 1197 version_(ui)
1194 1198 ui.write('\n')
1195 1199 aliases, i = cmdutil.findcmd(ui, name, table)
1196 1200 # synopsis
1197 1201 ui.write("%s\n" % i[2])
1198 1202
1199 1203 # aliases
1200 1204 if not ui.quiet and len(aliases) > 1:
1201 1205 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1202 1206
1203 1207 # description
1204 1208 doc = i[0].__doc__
1205 1209 if not doc:
1206 1210 doc = _("(No help text available)")
1207 1211 if ui.quiet:
1208 1212 doc = doc.splitlines(0)[0]
1209 1213 ui.write("\n%s\n" % doc.rstrip())
1210 1214
1211 1215 if not ui.quiet:
1212 1216 # options
1213 1217 if i[1]:
1214 1218 option_lists.append((_("options:\n"), i[1]))
1215 1219
1216 1220 addglobalopts(False)
1217 1221
1218 1222 def helplist(header, select=None):
1219 1223 h = {}
1220 1224 cmds = {}
1221 1225 for c, e in table.items():
1222 1226 f = c.split("|", 1)[0]
1223 1227 if select and not select(f):
1224 1228 continue
1225 1229 if name == "shortlist" and not f.startswith("^"):
1226 1230 continue
1227 1231 f = f.lstrip("^")
1228 1232 if not ui.debugflag and f.startswith("debug"):
1229 1233 continue
1230 1234 doc = e[0].__doc__
1231 1235 if not doc:
1232 1236 doc = _("(No help text available)")
1233 1237 h[f] = doc.splitlines(0)[0].rstrip()
1234 1238 cmds[f] = c.lstrip("^")
1235 1239
1236 1240 if not h:
1237 1241 ui.status(_('no commands defined\n'))
1238 1242 return
1239 1243
1240 1244 ui.status(header)
1241 1245 fns = h.keys()
1242 1246 fns.sort()
1243 1247 m = max(map(len, fns))
1244 1248 for f in fns:
1245 1249 if ui.verbose:
1246 1250 commands = cmds[f].replace("|",", ")
1247 1251 ui.write(" %s:\n %s\n"%(commands, h[f]))
1248 1252 else:
1249 1253 ui.write(' %-*s %s\n' % (m, f, h[f]))
1250 1254
1251 1255 if not ui.quiet:
1252 1256 addglobalopts(True)
1253 1257
1254 1258 def helptopic(name):
1255 1259 v = None
1256 1260 for i in help.helptable:
1257 1261 l = i.split('|')
1258 1262 if name in l:
1259 1263 v = i
1260 1264 header = l[-1]
1261 1265 if not v:
1262 1266 raise cmdutil.UnknownCommand(name)
1263 1267
1264 1268 # description
1265 1269 doc = help.helptable[v]
1266 1270 if not doc:
1267 1271 doc = _("(No help text available)")
1268 1272 if callable(doc):
1269 1273 doc = doc()
1270 1274
1271 1275 ui.write("%s\n" % header)
1272 1276 ui.write("%s\n" % doc.rstrip())
1273 1277
1274 1278 def helpext(name):
1275 1279 try:
1276 1280 mod = extensions.find(name)
1277 1281 except KeyError:
1278 1282 raise cmdutil.UnknownCommand(name)
1279 1283
1280 1284 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1281 1285 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1282 1286 for d in doc[1:]:
1283 1287 ui.write(d, '\n')
1284 1288
1285 1289 ui.status('\n')
1286 1290
1287 1291 try:
1288 1292 ct = mod.cmdtable
1289 1293 except AttributeError:
1290 1294 ct = {}
1291 1295
1292 1296 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1293 1297 helplist(_('list of commands:\n\n'), modcmds.has_key)
1294 1298
1295 1299 if name and name != 'shortlist':
1296 1300 i = None
1297 1301 for f in (helpcmd, helptopic, helpext):
1298 1302 try:
1299 1303 f(name)
1300 1304 i = None
1301 1305 break
1302 1306 except cmdutil.UnknownCommand, inst:
1303 1307 i = inst
1304 1308 if i:
1305 1309 raise i
1306 1310
1307 1311 else:
1308 1312 # program name
1309 1313 if ui.verbose or with_version:
1310 1314 version_(ui)
1311 1315 else:
1312 1316 ui.status(_("Mercurial Distributed SCM\n"))
1313 1317 ui.status('\n')
1314 1318
1315 1319 # list of commands
1316 1320 if name == "shortlist":
1317 1321 header = _('basic commands:\n\n')
1318 1322 else:
1319 1323 header = _('list of commands:\n\n')
1320 1324
1321 1325 helplist(header)
1322 1326
1323 1327 # list all option lists
1324 1328 opt_output = []
1325 1329 for title, options in option_lists:
1326 1330 opt_output.append(("\n%s" % title, None))
1327 1331 for shortopt, longopt, default, desc in options:
1328 1332 if "DEPRECATED" in desc and not ui.verbose: continue
1329 1333 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1330 1334 longopt and " --%s" % longopt),
1331 1335 "%s%s" % (desc,
1332 1336 default
1333 1337 and _(" (default: %s)") % default
1334 1338 or "")))
1335 1339
1336 1340 if opt_output:
1337 1341 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1338 1342 for first, second in opt_output:
1339 1343 if second:
1340 1344 ui.write(" %-*s %s\n" % (opts_len, first, second))
1341 1345 else:
1342 1346 ui.write("%s\n" % first)
1343 1347
1344 1348 def identify(ui, repo, source=None,
1345 1349 rev=None, num=None, id=None, branch=None, tags=None):
1346 1350 """identify the working copy or specified revision
1347 1351
1348 1352 With no revision, print a summary of the current state of the repo.
1349 1353
1350 1354 With a path, do a lookup in another repository.
1351 1355
1352 1356 This summary identifies the repository state using one or two parent
1353 1357 hash identifiers, followed by a "+" if there are uncommitted changes
1354 1358 in the working directory, a list of tags for this revision and a branch
1355 1359 name for non-default branches.
1356 1360 """
1357 1361
1358 1362 if not repo and not source:
1359 1363 raise util.Abort(_("There is no Mercurial repository here "
1360 1364 "(.hg not found)"))
1361 1365
1362 1366 hexfunc = ui.debugflag and hex or short
1363 1367 default = not (num or id or branch or tags)
1364 1368 output = []
1365 1369
1366 1370 if source:
1367 1371 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1368 1372 srepo = hg.repository(ui, source)
1369 1373 if not rev and revs:
1370 1374 rev = revs[0]
1371 1375 if not rev:
1372 1376 rev = "tip"
1373 1377 if num or branch or tags:
1374 1378 raise util.Abort(
1375 1379 "can't query remote revision number, branch, or tags")
1376 1380 output = [hexfunc(srepo.lookup(rev))]
1377 1381 elif not rev:
1378 1382 ctx = repo.workingctx()
1379 1383 parents = ctx.parents()
1380 1384 changed = False
1381 1385 if default or id or num:
1382 1386 changed = ctx.files() + ctx.deleted()
1383 1387 if default or id:
1384 1388 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1385 1389 (changed) and "+" or "")]
1386 1390 if num:
1387 1391 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1388 1392 (changed) and "+" or ""))
1389 1393 else:
1390 1394 ctx = repo.changectx(rev)
1391 1395 if default or id:
1392 1396 output = [hexfunc(ctx.node())]
1393 1397 if num:
1394 1398 output.append(str(ctx.rev()))
1395 1399
1396 1400 if not source and default and not ui.quiet:
1397 1401 b = util.tolocal(ctx.branch())
1398 1402 if b != 'default':
1399 1403 output.append("(%s)" % b)
1400 1404
1401 1405 # multiple tags for a single parent separated by '/'
1402 1406 t = "/".join(ctx.tags())
1403 1407 if t:
1404 1408 output.append(t)
1405 1409
1406 1410 if branch:
1407 1411 output.append(util.tolocal(ctx.branch()))
1408 1412
1409 1413 if tags:
1410 1414 output.extend(ctx.tags())
1411 1415
1412 1416 ui.write("%s\n" % ' '.join(output))
1413 1417
1414 1418 def import_(ui, repo, patch1, *patches, **opts):
1415 1419 """import an ordered set of patches
1416 1420
1417 1421 Import a list of patches and commit them individually.
1418 1422
1419 1423 If there are outstanding changes in the working directory, import
1420 1424 will abort unless given the -f flag.
1421 1425
1422 1426 You can import a patch straight from a mail message. Even patches
1423 1427 as attachments work (body part must be type text/plain or
1424 1428 text/x-patch to be used). From and Subject headers of email
1425 1429 message are used as default committer and commit message. All
1426 1430 text/plain body parts before first diff are added to commit
1427 1431 message.
1428 1432
1429 1433 If the imported patch was generated by hg export, user and description
1430 1434 from patch override values from message headers and body. Values
1431 1435 given on command line with -m and -u override these.
1432 1436
1433 1437 If --exact is specified, import will set the working directory
1434 1438 to the parent of each patch before applying it, and will abort
1435 1439 if the resulting changeset has a different ID than the one
1436 1440 recorded in the patch. This may happen due to character set
1437 1441 problems or other deficiencies in the text patch format.
1438 1442
1439 1443 To read a patch from standard input, use patch name "-".
1440 1444 """
1441 1445 patches = (patch1,) + patches
1442 1446
1447 date = opts.get('date')
1448 if date:
1449 opts['date'] = util.parsedate(date)
1450
1443 1451 if opts.get('exact') or not opts['force']:
1444 1452 cmdutil.bail_if_changed(repo)
1445 1453
1446 1454 d = opts["base"]
1447 1455 strip = opts["strip"]
1448 1456 wlock = lock = None
1449 1457 try:
1450 1458 wlock = repo.wlock()
1451 1459 lock = repo.lock()
1452 1460 for p in patches:
1453 1461 pf = os.path.join(d, p)
1454 1462
1455 1463 if pf == '-':
1456 1464 ui.status(_("applying patch from stdin\n"))
1457 1465 data = patch.extract(ui, sys.stdin)
1458 1466 else:
1459 1467 ui.status(_("applying %s\n") % p)
1460 1468 if os.path.exists(pf):
1461 1469 data = patch.extract(ui, file(pf, 'rb'))
1462 1470 else:
1463 1471 data = patch.extract(ui, urllib.urlopen(pf))
1464 1472 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1465 1473
1466 1474 if tmpname is None:
1467 1475 raise util.Abort(_('no diffs found'))
1468 1476
1469 1477 try:
1470 1478 cmdline_message = cmdutil.logmessage(opts)
1471 1479 if cmdline_message:
1472 1480 # pickup the cmdline msg
1473 1481 message = cmdline_message
1474 1482 elif message:
1475 1483 # pickup the patch msg
1476 1484 message = message.strip()
1477 1485 else:
1478 1486 # launch the editor
1479 1487 message = None
1480 1488 ui.debug(_('message:\n%s\n') % message)
1481 1489
1482 1490 wp = repo.workingctx().parents()
1483 1491 if opts.get('exact'):
1484 1492 if not nodeid or not p1:
1485 1493 raise util.Abort(_('not a mercurial patch'))
1486 1494 p1 = repo.lookup(p1)
1487 1495 p2 = repo.lookup(p2 or hex(nullid))
1488 1496
1489 1497 if p1 != wp[0].node():
1490 1498 hg.clean(repo, p1)
1491 1499 repo.dirstate.setparents(p1, p2)
1492 1500 elif p2:
1493 1501 try:
1494 1502 p1 = repo.lookup(p1)
1495 1503 p2 = repo.lookup(p2)
1496 1504 if p1 == wp[0].node():
1497 1505 repo.dirstate.setparents(p1, p2)
1498 1506 except hg.RepoError:
1499 1507 pass
1500 1508 if opts.get('exact') or opts.get('import_branch'):
1501 1509 repo.dirstate.setbranch(branch or 'default')
1502 1510
1503 1511 files = {}
1504 1512 try:
1505 1513 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1506 1514 files=files)
1507 1515 finally:
1508 1516 files = patch.updatedir(ui, repo, files)
1509 1517 if not opts.get('no_commit'):
1510 1518 n = repo.commit(files, message, opts.get('user') or user,
1511 1519 opts.get('date') or date)
1512 1520 if opts.get('exact'):
1513 1521 if hex(n) != nodeid:
1514 1522 repo.rollback()
1515 1523 raise util.Abort(_('patch is damaged'
1516 1524 ' or loses information'))
1517 1525 # Force a dirstate write so that the next transaction
1518 1526 # backups an up-do-date file.
1519 1527 repo.dirstate.write()
1520 1528 finally:
1521 1529 os.unlink(tmpname)
1522 1530 finally:
1523 1531 del lock, wlock
1524 1532
1525 1533 def incoming(ui, repo, source="default", **opts):
1526 1534 """show new changesets found in source
1527 1535
1528 1536 Show new changesets found in the specified path/URL or the default
1529 1537 pull location. These are the changesets that would be pulled if a pull
1530 1538 was requested.
1531 1539
1532 1540 For remote repository, using --bundle avoids downloading the changesets
1533 1541 twice if the incoming is followed by a pull.
1534 1542
1535 1543 See pull for valid source format details.
1536 1544 """
1537 1545 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1538 1546 cmdutil.setremoteconfig(ui, opts)
1539 1547
1540 1548 other = hg.repository(ui, source)
1541 1549 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1542 1550 if revs:
1543 1551 revs = [other.lookup(rev) for rev in revs]
1544 1552 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1545 1553 if not incoming:
1546 1554 try:
1547 1555 os.unlink(opts["bundle"])
1548 1556 except:
1549 1557 pass
1550 1558 ui.status(_("no changes found\n"))
1551 1559 return 1
1552 1560
1553 1561 cleanup = None
1554 1562 try:
1555 1563 fname = opts["bundle"]
1556 1564 if fname or not other.local():
1557 1565 # create a bundle (uncompressed if other repo is not local)
1558 1566 if revs is None:
1559 1567 cg = other.changegroup(incoming, "incoming")
1560 1568 else:
1561 1569 cg = other.changegroupsubset(incoming, revs, 'incoming')
1562 1570 bundletype = other.local() and "HG10BZ" or "HG10UN"
1563 1571 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1564 1572 # keep written bundle?
1565 1573 if opts["bundle"]:
1566 1574 cleanup = None
1567 1575 if not other.local():
1568 1576 # use the created uncompressed bundlerepo
1569 1577 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1570 1578
1571 1579 o = other.changelog.nodesbetween(incoming, revs)[0]
1572 1580 if opts['newest_first']:
1573 1581 o.reverse()
1574 1582 displayer = cmdutil.show_changeset(ui, other, opts)
1575 1583 for n in o:
1576 1584 parents = [p for p in other.changelog.parents(n) if p != nullid]
1577 1585 if opts['no_merges'] and len(parents) == 2:
1578 1586 continue
1579 1587 displayer.show(changenode=n)
1580 1588 finally:
1581 1589 if hasattr(other, 'close'):
1582 1590 other.close()
1583 1591 if cleanup:
1584 1592 os.unlink(cleanup)
1585 1593
1586 1594 def init(ui, dest=".", **opts):
1587 1595 """create a new repository in the given directory
1588 1596
1589 1597 Initialize a new repository in the given directory. If the given
1590 1598 directory does not exist, it is created.
1591 1599
1592 1600 If no directory is given, the current directory is used.
1593 1601
1594 1602 It is possible to specify an ssh:// URL as the destination.
1595 1603 Look at the help text for the pull command for important details
1596 1604 about ssh:// URLs.
1597 1605 """
1598 1606 cmdutil.setremoteconfig(ui, opts)
1599 1607 hg.repository(ui, dest, create=1)
1600 1608
1601 1609 def locate(ui, repo, *pats, **opts):
1602 1610 """locate files matching specific patterns
1603 1611
1604 1612 Print all files under Mercurial control whose names match the
1605 1613 given patterns.
1606 1614
1607 1615 This command searches the entire repository by default. To search
1608 1616 just the current directory and its subdirectories, use
1609 1617 "--include .".
1610 1618
1611 1619 If no patterns are given to match, this command prints all file
1612 1620 names.
1613 1621
1614 1622 If you want to feed the output of this command into the "xargs"
1615 1623 command, use the "-0" option to both this command and "xargs".
1616 1624 This will avoid the problem of "xargs" treating single filenames
1617 1625 that contain white space as multiple filenames.
1618 1626 """
1619 1627 end = opts['print0'] and '\0' or '\n'
1620 1628 rev = opts['rev']
1621 1629 if rev:
1622 1630 node = repo.lookup(rev)
1623 1631 else:
1624 1632 node = None
1625 1633
1626 1634 ret = 1
1627 1635 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1628 1636 badmatch=util.always,
1629 1637 default='relglob'):
1630 1638 if src == 'b':
1631 1639 continue
1632 1640 if not node and abs not in repo.dirstate:
1633 1641 continue
1634 1642 if opts['fullpath']:
1635 1643 ui.write(os.path.join(repo.root, abs), end)
1636 1644 else:
1637 1645 ui.write(((pats and rel) or abs), end)
1638 1646 ret = 0
1639 1647
1640 1648 return ret
1641 1649
1642 1650 def log(ui, repo, *pats, **opts):
1643 1651 """show revision history of entire repository or files
1644 1652
1645 1653 Print the revision history of the specified files or the entire
1646 1654 project.
1647 1655
1648 1656 File history is shown without following rename or copy history of
1649 1657 files. Use -f/--follow with a file name to follow history across
1650 1658 renames and copies. --follow without a file name will only show
1651 1659 ancestors or descendants of the starting revision. --follow-first
1652 1660 only follows the first parent of merge revisions.
1653 1661
1654 1662 If no revision range is specified, the default is tip:0 unless
1655 1663 --follow is set, in which case the working directory parent is
1656 1664 used as the starting revision.
1657 1665
1658 1666 By default this command outputs: changeset id and hash, tags,
1659 1667 non-trivial parents, user, date and time, and a summary for each
1660 1668 commit. When the -v/--verbose switch is used, the list of changed
1661 1669 files and full commit message is shown.
1662 1670
1663 1671 NOTE: log -p may generate unexpected diff output for merge
1664 1672 changesets, as it will compare the merge changeset against its
1665 1673 first parent only. Also, the files: list will only reflect files
1666 1674 that are different from BOTH parents.
1667 1675
1668 1676 """
1669 1677
1670 1678 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1671 1679 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1672 1680
1673 1681 if opts['limit']:
1674 1682 try:
1675 1683 limit = int(opts['limit'])
1676 1684 except ValueError:
1677 1685 raise util.Abort(_('limit must be a positive integer'))
1678 1686 if limit <= 0: raise util.Abort(_('limit must be positive'))
1679 1687 else:
1680 1688 limit = sys.maxint
1681 1689 count = 0
1682 1690
1683 1691 if opts['copies'] and opts['rev']:
1684 1692 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1685 1693 else:
1686 1694 endrev = repo.changelog.count()
1687 1695 rcache = {}
1688 1696 ncache = {}
1689 1697 def getrenamed(fn, rev):
1690 1698 '''looks up all renames for a file (up to endrev) the first
1691 1699 time the file is given. It indexes on the changerev and only
1692 1700 parses the manifest if linkrev != changerev.
1693 1701 Returns rename info for fn at changerev rev.'''
1694 1702 if fn not in rcache:
1695 1703 rcache[fn] = {}
1696 1704 ncache[fn] = {}
1697 1705 fl = repo.file(fn)
1698 1706 for i in xrange(fl.count()):
1699 1707 node = fl.node(i)
1700 1708 lr = fl.linkrev(node)
1701 1709 renamed = fl.renamed(node)
1702 1710 rcache[fn][lr] = renamed
1703 1711 if renamed:
1704 1712 ncache[fn][node] = renamed
1705 1713 if lr >= endrev:
1706 1714 break
1707 1715 if rev in rcache[fn]:
1708 1716 return rcache[fn][rev]
1709 1717
1710 1718 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1711 1719 # filectx logic.
1712 1720
1713 1721 try:
1714 1722 return repo.changectx(rev).filectx(fn).renamed()
1715 1723 except revlog.LookupError:
1716 1724 pass
1717 1725 return None
1718 1726
1719 1727 df = False
1720 1728 if opts["date"]:
1721 1729 df = util.matchdate(opts["date"])
1722 1730
1723 1731 only_branches = opts['only_branch']
1724 1732
1725 1733 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1726 1734 for st, rev, fns in changeiter:
1727 1735 if st == 'add':
1728 1736 changenode = repo.changelog.node(rev)
1729 1737 parents = [p for p in repo.changelog.parentrevs(rev)
1730 1738 if p != nullrev]
1731 1739 if opts['no_merges'] and len(parents) == 2:
1732 1740 continue
1733 1741 if opts['only_merges'] and len(parents) != 2:
1734 1742 continue
1735 1743
1736 1744 if only_branches:
1737 1745 revbranch = get(rev)[5]['branch']
1738 1746 if revbranch not in only_branches:
1739 1747 continue
1740 1748
1741 1749 if df:
1742 1750 changes = get(rev)
1743 1751 if not df(changes[2][0]):
1744 1752 continue
1745 1753
1746 1754 if opts['keyword']:
1747 1755 changes = get(rev)
1748 1756 miss = 0
1749 1757 for k in [kw.lower() for kw in opts['keyword']]:
1750 1758 if not (k in changes[1].lower() or
1751 1759 k in changes[4].lower() or
1752 1760 k in " ".join(changes[3]).lower()):
1753 1761 miss = 1
1754 1762 break
1755 1763 if miss:
1756 1764 continue
1757 1765
1758 1766 copies = []
1759 1767 if opts.get('copies') and rev:
1760 1768 for fn in get(rev)[3]:
1761 1769 rename = getrenamed(fn, rev)
1762 1770 if rename:
1763 1771 copies.append((fn, rename[0]))
1764 1772 displayer.show(rev, changenode, copies=copies)
1765 1773 elif st == 'iter':
1766 1774 if count == limit: break
1767 1775 if displayer.flush(rev):
1768 1776 count += 1
1769 1777
1770 1778 def manifest(ui, repo, node=None, rev=None):
1771 1779 """output the current or given revision of the project manifest
1772 1780
1773 1781 Print a list of version controlled files for the given revision.
1774 1782 If no revision is given, the parent of the working directory is used,
1775 1783 or tip if no revision is checked out.
1776 1784
1777 1785 The manifest is the list of files being version controlled. If no revision
1778 1786 is given then the first parent of the working directory is used.
1779 1787
1780 1788 With -v flag, print file permissions, symlink and executable bits. With
1781 1789 --debug flag, print file revision hashes.
1782 1790 """
1783 1791
1784 1792 if rev and node:
1785 1793 raise util.Abort(_("please specify just one revision"))
1786 1794
1787 1795 if not node:
1788 1796 node = rev
1789 1797
1790 1798 m = repo.changectx(node).manifest()
1791 1799 files = m.keys()
1792 1800 files.sort()
1793 1801
1794 1802 for f in files:
1795 1803 if ui.debugflag:
1796 1804 ui.write("%40s " % hex(m[f]))
1797 1805 if ui.verbose:
1798 1806 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1799 1807 perm = m.execf(f) and "755" or "644"
1800 1808 ui.write("%3s %1s " % (perm, type))
1801 1809 ui.write("%s\n" % f)
1802 1810
1803 1811 def merge(ui, repo, node=None, force=None, rev=None):
1804 1812 """merge working directory with another revision
1805 1813
1806 1814 Merge the contents of the current working directory and the
1807 1815 requested revision. Files that changed between either parent are
1808 1816 marked as changed for the next commit and a commit must be
1809 1817 performed before any further updates are allowed.
1810 1818
1811 1819 If no revision is specified, the working directory's parent is a
1812 1820 head revision, and the repository contains exactly one other head,
1813 1821 the other head is merged with by default. Otherwise, an explicit
1814 1822 revision to merge with must be provided.
1815 1823 """
1816 1824
1817 1825 if rev and node:
1818 1826 raise util.Abort(_("please specify just one revision"))
1819 1827 if not node:
1820 1828 node = rev
1821 1829
1822 1830 if not node:
1823 1831 heads = repo.heads()
1824 1832 if len(heads) > 2:
1825 1833 raise util.Abort(_('repo has %d heads - '
1826 1834 'please merge with an explicit rev') %
1827 1835 len(heads))
1828 1836 parent = repo.dirstate.parents()[0]
1829 1837 if len(heads) == 1:
1830 1838 msg = _('there is nothing to merge')
1831 1839 if parent != repo.lookup(repo.workingctx().branch()):
1832 1840 msg = _('%s - use "hg update" instead') % msg
1833 1841 raise util.Abort(msg)
1834 1842
1835 1843 if parent not in heads:
1836 1844 raise util.Abort(_('working dir not at a head rev - '
1837 1845 'use "hg update" or merge with an explicit rev'))
1838 1846 node = parent == heads[0] and heads[-1] or heads[0]
1839 1847 return hg.merge(repo, node, force=force)
1840 1848
1841 1849 def outgoing(ui, repo, dest=None, **opts):
1842 1850 """show changesets not found in destination
1843 1851
1844 1852 Show changesets not found in the specified destination repository or
1845 1853 the default push location. These are the changesets that would be pushed
1846 1854 if a push was requested.
1847 1855
1848 1856 See pull for valid destination format details.
1849 1857 """
1850 1858 dest, revs, checkout = hg.parseurl(
1851 1859 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1852 1860 cmdutil.setremoteconfig(ui, opts)
1853 1861 if revs:
1854 1862 revs = [repo.lookup(rev) for rev in revs]
1855 1863
1856 1864 other = hg.repository(ui, dest)
1857 1865 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1858 1866 o = repo.findoutgoing(other, force=opts['force'])
1859 1867 if not o:
1860 1868 ui.status(_("no changes found\n"))
1861 1869 return 1
1862 1870 o = repo.changelog.nodesbetween(o, revs)[0]
1863 1871 if opts['newest_first']:
1864 1872 o.reverse()
1865 1873 displayer = cmdutil.show_changeset(ui, repo, opts)
1866 1874 for n in o:
1867 1875 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1868 1876 if opts['no_merges'] and len(parents) == 2:
1869 1877 continue
1870 1878 displayer.show(changenode=n)
1871 1879
1872 1880 def parents(ui, repo, file_=None, **opts):
1873 1881 """show the parents of the working dir or revision
1874 1882
1875 1883 Print the working directory's parent revisions. If a
1876 1884 revision is given via --rev, the parent of that revision
1877 1885 will be printed. If a file argument is given, revision in
1878 1886 which the file was last changed (before the working directory
1879 1887 revision or the argument to --rev if given) is printed.
1880 1888 """
1881 1889 rev = opts.get('rev')
1882 1890 if rev:
1883 1891 ctx = repo.changectx(rev)
1884 1892 else:
1885 1893 ctx = repo.workingctx()
1886 1894
1887 1895 if file_:
1888 1896 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1889 1897 if anypats or len(files) != 1:
1890 1898 raise util.Abort(_('can only specify an explicit file name'))
1891 1899 file_ = files[0]
1892 1900 filenodes = []
1893 1901 for cp in ctx.parents():
1894 1902 if not cp:
1895 1903 continue
1896 1904 try:
1897 1905 filenodes.append(cp.filenode(file_))
1898 1906 except revlog.LookupError:
1899 1907 pass
1900 1908 if not filenodes:
1901 1909 raise util.Abort(_("'%s' not found in manifest!") % file_)
1902 1910 fl = repo.file(file_)
1903 1911 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1904 1912 else:
1905 1913 p = [cp.node() for cp in ctx.parents()]
1906 1914
1907 1915 displayer = cmdutil.show_changeset(ui, repo, opts)
1908 1916 for n in p:
1909 1917 if n != nullid:
1910 1918 displayer.show(changenode=n)
1911 1919
1912 1920 def paths(ui, repo, search=None):
1913 1921 """show definition of symbolic path names
1914 1922
1915 1923 Show definition of symbolic path name NAME. If no name is given, show
1916 1924 definition of available names.
1917 1925
1918 1926 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1919 1927 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1920 1928 """
1921 1929 if search:
1922 1930 for name, path in ui.configitems("paths"):
1923 1931 if name == search:
1924 1932 ui.write("%s\n" % path)
1925 1933 return
1926 1934 ui.warn(_("not found!\n"))
1927 1935 return 1
1928 1936 else:
1929 1937 for name, path in ui.configitems("paths"):
1930 1938 ui.write("%s = %s\n" % (name, path))
1931 1939
1932 1940 def postincoming(ui, repo, modheads, optupdate, checkout):
1933 1941 if modheads == 0:
1934 1942 return
1935 1943 if optupdate:
1936 1944 if modheads <= 1 or checkout:
1937 1945 return hg.update(repo, checkout)
1938 1946 else:
1939 1947 ui.status(_("not updating, since new heads added\n"))
1940 1948 if modheads > 1:
1941 1949 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1942 1950 else:
1943 1951 ui.status(_("(run 'hg update' to get a working copy)\n"))
1944 1952
1945 1953 def pull(ui, repo, source="default", **opts):
1946 1954 """pull changes from the specified source
1947 1955
1948 1956 Pull changes from a remote repository to a local one.
1949 1957
1950 1958 This finds all changes from the repository at the specified path
1951 1959 or URL and adds them to the local repository. By default, this
1952 1960 does not update the copy of the project in the working directory.
1953 1961
1954 1962 Valid URLs are of the form:
1955 1963
1956 1964 local/filesystem/path (or file://local/filesystem/path)
1957 1965 http://[user@]host[:port]/[path]
1958 1966 https://[user@]host[:port]/[path]
1959 1967 ssh://[user@]host[:port]/[path]
1960 1968 static-http://host[:port]/[path]
1961 1969
1962 1970 Paths in the local filesystem can either point to Mercurial
1963 1971 repositories or to bundle files (as created by 'hg bundle' or
1964 1972 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1965 1973 allows access to a Mercurial repository where you simply use a web
1966 1974 server to publish the .hg directory as static content.
1967 1975
1968 1976 An optional identifier after # indicates a particular branch, tag,
1969 1977 or changeset to pull.
1970 1978
1971 1979 Some notes about using SSH with Mercurial:
1972 1980 - SSH requires an accessible shell account on the destination machine
1973 1981 and a copy of hg in the remote path or specified with as remotecmd.
1974 1982 - path is relative to the remote user's home directory by default.
1975 1983 Use an extra slash at the start of a path to specify an absolute path:
1976 1984 ssh://example.com//tmp/repository
1977 1985 - Mercurial doesn't use its own compression via SSH; the right thing
1978 1986 to do is to configure it in your ~/.ssh/config, e.g.:
1979 1987 Host *.mylocalnetwork.example.com
1980 1988 Compression no
1981 1989 Host *
1982 1990 Compression yes
1983 1991 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1984 1992 with the --ssh command line option.
1985 1993 """
1986 1994 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1987 1995 cmdutil.setremoteconfig(ui, opts)
1988 1996
1989 1997 other = hg.repository(ui, source)
1990 1998 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1991 1999 if revs:
1992 2000 try:
1993 2001 revs = [other.lookup(rev) for rev in revs]
1994 2002 except repo.NoCapability:
1995 2003 error = _("Other repository doesn't support revision lookup, "
1996 2004 "so a rev cannot be specified.")
1997 2005 raise util.Abort(error)
1998 2006
1999 2007 modheads = repo.pull(other, heads=revs, force=opts['force'])
2000 2008 return postincoming(ui, repo, modheads, opts['update'], checkout)
2001 2009
2002 2010 def push(ui, repo, dest=None, **opts):
2003 2011 """push changes to the specified destination
2004 2012
2005 2013 Push changes from the local repository to the given destination.
2006 2014
2007 2015 This is the symmetrical operation for pull. It helps to move
2008 2016 changes from the current repository to a different one. If the
2009 2017 destination is local this is identical to a pull in that directory
2010 2018 from the current one.
2011 2019
2012 2020 By default, push will refuse to run if it detects the result would
2013 2021 increase the number of remote heads. This generally indicates the
2014 2022 the client has forgotten to sync and merge before pushing.
2015 2023
2016 2024 Valid URLs are of the form:
2017 2025
2018 2026 local/filesystem/path (or file://local/filesystem/path)
2019 2027 ssh://[user@]host[:port]/[path]
2020 2028 http://[user@]host[:port]/[path]
2021 2029 https://[user@]host[:port]/[path]
2022 2030
2023 2031 An optional identifier after # indicates a particular branch, tag,
2024 2032 or changeset to push.
2025 2033
2026 2034 Look at the help text for the pull command for important details
2027 2035 about ssh:// URLs.
2028 2036
2029 2037 Pushing to http:// and https:// URLs is only possible, if this
2030 2038 feature is explicitly enabled on the remote Mercurial server.
2031 2039 """
2032 2040 dest, revs, checkout = hg.parseurl(
2033 2041 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2034 2042 cmdutil.setremoteconfig(ui, opts)
2035 2043
2036 2044 other = hg.repository(ui, dest)
2037 2045 ui.status('pushing to %s\n' % util.hidepassword(dest))
2038 2046 if revs:
2039 2047 revs = [repo.lookup(rev) for rev in revs]
2040 2048 r = repo.push(other, opts['force'], revs=revs)
2041 2049 return r == 0
2042 2050
2043 2051 def rawcommit(ui, repo, *pats, **opts):
2044 2052 """raw commit interface (DEPRECATED)
2045 2053
2046 2054 (DEPRECATED)
2047 2055 Lowlevel commit, for use in helper scripts.
2048 2056
2049 2057 This command is not intended to be used by normal users, as it is
2050 2058 primarily useful for importing from other SCMs.
2051 2059
2052 2060 This command is now deprecated and will be removed in a future
2053 2061 release, please use debugsetparents and commit instead.
2054 2062 """
2055 2063
2056 2064 ui.warn(_("(the rawcommit command is deprecated)\n"))
2057 2065
2058 2066 message = cmdutil.logmessage(opts)
2059 2067
2060 2068 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2061 2069 if opts['files']:
2062 2070 files += open(opts['files']).read().splitlines()
2063 2071
2064 2072 parents = [repo.lookup(p) for p in opts['parent']]
2065 2073
2066 2074 try:
2067 2075 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2068 2076 except ValueError, inst:
2069 2077 raise util.Abort(str(inst))
2070 2078
2071 2079 def recover(ui, repo):
2072 2080 """roll back an interrupted transaction
2073 2081
2074 2082 Recover from an interrupted commit or pull.
2075 2083
2076 2084 This command tries to fix the repository status after an interrupted
2077 2085 operation. It should only be necessary when Mercurial suggests it.
2078 2086 """
2079 2087 if repo.recover():
2080 2088 return hg.verify(repo)
2081 2089 return 1
2082 2090
2083 2091 def remove(ui, repo, *pats, **opts):
2084 2092 """remove the specified files on the next commit
2085 2093
2086 2094 Schedule the indicated files for removal from the repository.
2087 2095
2088 2096 This only removes files from the current branch, not from the
2089 2097 entire project history. If the files still exist in the working
2090 2098 directory, they will be deleted from it. If invoked with --after,
2091 2099 files are marked as removed, but not actually unlinked unless --force
2092 2100 is also given. Without exact file names, --after will only mark
2093 2101 files as removed if they are no longer in the working directory.
2094 2102
2095 2103 This command schedules the files to be removed at the next commit.
2096 2104 To undo a remove before that, see hg revert.
2097 2105
2098 2106 Modified files and added files are not removed by default. To
2099 2107 remove them, use the -f/--force option.
2100 2108 """
2101 2109 if not opts['after'] and not pats:
2102 2110 raise util.Abort(_('no files specified'))
2103 2111 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2104 2112 exact = dict.fromkeys(files)
2105 2113 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2106 2114 modified, added, removed, deleted, unknown = mardu
2107 2115 remove, forget = [], []
2108 2116 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2109 2117 reason = None
2110 2118 if abs in modified and not opts['force']:
2111 2119 reason = _('is modified (use -f to force removal)')
2112 2120 elif abs in added:
2113 2121 if opts['force']:
2114 2122 forget.append(abs)
2115 2123 continue
2116 2124 reason = _('has been marked for add (use -f to force removal)')
2117 2125 exact = 1 # force the message
2118 2126 elif abs not in repo.dirstate:
2119 2127 reason = _('is not managed')
2120 2128 elif opts['after'] and not exact and abs not in deleted:
2121 2129 continue
2122 2130 elif abs in removed:
2123 2131 continue
2124 2132 if reason:
2125 2133 if exact:
2126 2134 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2127 2135 else:
2128 2136 if ui.verbose or not exact:
2129 2137 ui.status(_('removing %s\n') % rel)
2130 2138 remove.append(abs)
2131 2139 repo.forget(forget)
2132 2140 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2133 2141
2134 2142 def rename(ui, repo, *pats, **opts):
2135 2143 """rename files; equivalent of copy + remove
2136 2144
2137 2145 Mark dest as copies of sources; mark sources for deletion. If
2138 2146 dest is a directory, copies are put in that directory. If dest is
2139 2147 a file, there can only be one source.
2140 2148
2141 2149 By default, this command copies the contents of files as they
2142 2150 stand in the working directory. If invoked with --after, the
2143 2151 operation is recorded, but no copying is performed.
2144 2152
2145 2153 This command takes effect in the next commit. To undo a rename
2146 2154 before that, see hg revert.
2147 2155 """
2148 2156 wlock = repo.wlock(False)
2149 2157 try:
2150 2158 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2151 2159 finally:
2152 2160 del wlock
2153 2161
2154 2162 def revert(ui, repo, *pats, **opts):
2155 2163 """restore individual files or dirs to an earlier state
2156 2164
2157 2165 (use update -r to check out earlier revisions, revert does not
2158 2166 change the working dir parents)
2159 2167
2160 2168 With no revision specified, revert the named files or directories
2161 2169 to the contents they had in the parent of the working directory.
2162 2170 This restores the contents of the affected files to an unmodified
2163 2171 state and unschedules adds, removes, copies, and renames. If the
2164 2172 working directory has two parents, you must explicitly specify the
2165 2173 revision to revert to.
2166 2174
2167 2175 Using the -r option, revert the given files or directories to their
2168 2176 contents as of a specific revision. This can be helpful to "roll
2169 2177 back" some or all of an earlier change.
2170 2178
2171 2179 Revert modifies the working directory. It does not commit any
2172 2180 changes, or change the parent of the working directory. If you
2173 2181 revert to a revision other than the parent of the working
2174 2182 directory, the reverted files will thus appear modified
2175 2183 afterwards.
2176 2184
2177 2185 If a file has been deleted, it is restored. If the executable
2178 2186 mode of a file was changed, it is reset.
2179 2187
2180 2188 If names are given, all files matching the names are reverted.
2181 2189
2182 2190 If no arguments are given, no files are reverted.
2183 2191
2184 2192 Modified files are saved with a .orig suffix before reverting.
2185 2193 To disable these backups, use --no-backup.
2186 2194 """
2187 2195
2188 2196 if opts["date"]:
2189 2197 if opts["rev"]:
2190 2198 raise util.Abort(_("you can't specify a revision and a date"))
2191 2199 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2192 2200
2193 2201 if not pats and not opts['all']:
2194 2202 raise util.Abort(_('no files or directories specified; '
2195 2203 'use --all to revert the whole repo'))
2196 2204
2197 2205 parent, p2 = repo.dirstate.parents()
2198 2206 if not opts['rev'] and p2 != nullid:
2199 2207 raise util.Abort(_('uncommitted merge - please provide a '
2200 2208 'specific revision'))
2201 2209 ctx = repo.changectx(opts['rev'])
2202 2210 node = ctx.node()
2203 2211 mf = ctx.manifest()
2204 2212 if node == parent:
2205 2213 pmf = mf
2206 2214 else:
2207 2215 pmf = None
2208 2216
2209 2217 # need all matching names in dirstate and manifest of target rev,
2210 2218 # so have to walk both. do not print errors if files exist in one
2211 2219 # but not other.
2212 2220
2213 2221 names = {}
2214 2222
2215 2223 wlock = repo.wlock()
2216 2224 try:
2217 2225 # walk dirstate.
2218 2226 files = []
2219 2227 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2220 2228 badmatch=mf.has_key):
2221 2229 names[abs] = (rel, exact)
2222 2230 if src != 'b':
2223 2231 files.append(abs)
2224 2232
2225 2233 # walk target manifest.
2226 2234
2227 2235 def badmatch(path):
2228 2236 if path in names:
2229 2237 return True
2230 2238 path_ = path + '/'
2231 2239 for f in names:
2232 2240 if f.startswith(path_):
2233 2241 return True
2234 2242 return False
2235 2243
2236 2244 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2237 2245 badmatch=badmatch):
2238 2246 if abs in names or src == 'b':
2239 2247 continue
2240 2248 names[abs] = (rel, exact)
2241 2249
2242 2250 changes = repo.status(files=files, match=names.has_key)[:4]
2243 2251 modified, added, removed, deleted = map(dict.fromkeys, changes)
2244 2252
2245 2253 # if f is a rename, also revert the source
2246 2254 cwd = repo.getcwd()
2247 2255 for f in added:
2248 2256 src = repo.dirstate.copied(f)
2249 2257 if src and src not in names and repo.dirstate[src] == 'r':
2250 2258 removed[src] = None
2251 2259 names[src] = (repo.pathto(src, cwd), True)
2252 2260
2253 2261 def removeforget(abs):
2254 2262 if repo.dirstate[abs] == 'a':
2255 2263 return _('forgetting %s\n')
2256 2264 return _('removing %s\n')
2257 2265
2258 2266 revert = ([], _('reverting %s\n'))
2259 2267 add = ([], _('adding %s\n'))
2260 2268 remove = ([], removeforget)
2261 2269 undelete = ([], _('undeleting %s\n'))
2262 2270
2263 2271 disptable = (
2264 2272 # dispatch table:
2265 2273 # file state
2266 2274 # action if in target manifest
2267 2275 # action if not in target manifest
2268 2276 # make backup if in target manifest
2269 2277 # make backup if not in target manifest
2270 2278 (modified, revert, remove, True, True),
2271 2279 (added, revert, remove, True, False),
2272 2280 (removed, undelete, None, False, False),
2273 2281 (deleted, revert, remove, False, False),
2274 2282 )
2275 2283
2276 2284 entries = names.items()
2277 2285 entries.sort()
2278 2286
2279 2287 for abs, (rel, exact) in entries:
2280 2288 mfentry = mf.get(abs)
2281 2289 target = repo.wjoin(abs)
2282 2290 def handle(xlist, dobackup):
2283 2291 xlist[0].append(abs)
2284 2292 if dobackup and not opts['no_backup'] and util.lexists(target):
2285 2293 bakname = "%s.orig" % rel
2286 2294 ui.note(_('saving current version of %s as %s\n') %
2287 2295 (rel, bakname))
2288 2296 if not opts.get('dry_run'):
2289 2297 util.copyfile(target, bakname)
2290 2298 if ui.verbose or not exact:
2291 2299 msg = xlist[1]
2292 2300 if not isinstance(msg, basestring):
2293 2301 msg = msg(abs)
2294 2302 ui.status(msg % rel)
2295 2303 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2296 2304 if abs not in table: continue
2297 2305 # file has changed in dirstate
2298 2306 if mfentry:
2299 2307 handle(hitlist, backuphit)
2300 2308 elif misslist is not None:
2301 2309 handle(misslist, backupmiss)
2302 2310 break
2303 2311 else:
2304 2312 if abs not in repo.dirstate:
2305 2313 if mfentry:
2306 2314 handle(add, True)
2307 2315 elif exact:
2308 2316 ui.warn(_('file not managed: %s\n') % rel)
2309 2317 continue
2310 2318 # file has not changed in dirstate
2311 2319 if node == parent:
2312 2320 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2313 2321 continue
2314 2322 if pmf is None:
2315 2323 # only need parent manifest in this unlikely case,
2316 2324 # so do not read by default
2317 2325 pmf = repo.changectx(parent).manifest()
2318 2326 if abs in pmf:
2319 2327 if mfentry:
2320 2328 # if version of file is same in parent and target
2321 2329 # manifests, do nothing
2322 2330 if (pmf[abs] != mfentry or
2323 2331 pmf.flags(abs) != mf.flags(abs)):
2324 2332 handle(revert, False)
2325 2333 else:
2326 2334 handle(remove, False)
2327 2335
2328 2336 if not opts.get('dry_run'):
2329 2337 def checkout(f):
2330 2338 fc = ctx[f]
2331 2339 repo.wwrite(f, fc.data(), fc.fileflags())
2332 2340
2333 2341 audit_path = util.path_auditor(repo.root)
2334 2342 for f in remove[0]:
2335 2343 if repo.dirstate[f] == 'a':
2336 2344 repo.dirstate.forget(f)
2337 2345 continue
2338 2346 audit_path(f)
2339 2347 try:
2340 2348 util.unlink(repo.wjoin(f))
2341 2349 except OSError:
2342 2350 pass
2343 2351 repo.dirstate.remove(f)
2344 2352
2345 2353 for f in revert[0]:
2346 2354 checkout(f)
2347 2355
2348 2356 for f in add[0]:
2349 2357 checkout(f)
2350 2358 repo.dirstate.add(f)
2351 2359
2352 2360 normal = repo.dirstate.normallookup
2353 2361 if node == parent and p2 == nullid:
2354 2362 normal = repo.dirstate.normal
2355 2363 for f in undelete[0]:
2356 2364 checkout(f)
2357 2365 normal(f)
2358 2366
2359 2367 finally:
2360 2368 del wlock
2361 2369
2362 2370 def rollback(ui, repo):
2363 2371 """roll back the last transaction
2364 2372
2365 2373 This command should be used with care. There is only one level of
2366 2374 rollback, and there is no way to undo a rollback. It will also
2367 2375 restore the dirstate at the time of the last transaction, losing
2368 2376 any dirstate changes since that time.
2369 2377
2370 2378 Transactions are used to encapsulate the effects of all commands
2371 2379 that create new changesets or propagate existing changesets into a
2372 2380 repository. For example, the following commands are transactional,
2373 2381 and their effects can be rolled back:
2374 2382
2375 2383 commit
2376 2384 import
2377 2385 pull
2378 2386 push (with this repository as destination)
2379 2387 unbundle
2380 2388
2381 2389 This command is not intended for use on public repositories. Once
2382 2390 changes are visible for pull by other users, rolling a transaction
2383 2391 back locally is ineffective (someone else may already have pulled
2384 2392 the changes). Furthermore, a race is possible with readers of the
2385 2393 repository; for example an in-progress pull from the repository
2386 2394 may fail if a rollback is performed.
2387 2395 """
2388 2396 repo.rollback()
2389 2397
2390 2398 def root(ui, repo):
2391 2399 """print the root (top) of the current working dir
2392 2400
2393 2401 Print the root directory of the current repository.
2394 2402 """
2395 2403 ui.write(repo.root + "\n")
2396 2404
2397 2405 def serve(ui, repo, **opts):
2398 2406 """export the repository via HTTP
2399 2407
2400 2408 Start a local HTTP repository browser and pull server.
2401 2409
2402 2410 By default, the server logs accesses to stdout and errors to
2403 2411 stderr. Use the "-A" and "-E" options to log to files.
2404 2412 """
2405 2413
2406 2414 if opts["stdio"]:
2407 2415 if repo is None:
2408 2416 raise hg.RepoError(_("There is no Mercurial repository here"
2409 2417 " (.hg not found)"))
2410 2418 s = sshserver.sshserver(ui, repo)
2411 2419 s.serve_forever()
2412 2420
2413 2421 parentui = ui.parentui or ui
2414 2422 optlist = ("name templates style address port prefix ipv6"
2415 2423 " accesslog errorlog webdir_conf certificate")
2416 2424 for o in optlist.split():
2417 2425 if opts[o]:
2418 2426 parentui.setconfig("web", o, str(opts[o]))
2419 2427 if (repo is not None) and (repo.ui != parentui):
2420 2428 repo.ui.setconfig("web", o, str(opts[o]))
2421 2429
2422 2430 if repo is None and not ui.config("web", "webdir_conf"):
2423 2431 raise hg.RepoError(_("There is no Mercurial repository here"
2424 2432 " (.hg not found)"))
2425 2433
2426 2434 class service:
2427 2435 def init(self):
2428 2436 util.set_signal_handler()
2429 2437 try:
2430 2438 self.httpd = hgweb.server.create_server(parentui, repo)
2431 2439 except socket.error, inst:
2432 2440 raise util.Abort(_('cannot start server: ') + inst.args[1])
2433 2441
2434 2442 if not ui.verbose: return
2435 2443
2436 2444 if self.httpd.prefix:
2437 2445 prefix = self.httpd.prefix.strip('/') + '/'
2438 2446 else:
2439 2447 prefix = ''
2440 2448
2441 2449 if self.httpd.port != 80:
2442 2450 ui.status(_('listening at http://%s:%d/%s\n') %
2443 2451 (self.httpd.addr, self.httpd.port, prefix))
2444 2452 else:
2445 2453 ui.status(_('listening at http://%s/%s\n') %
2446 2454 (self.httpd.addr, prefix))
2447 2455
2448 2456 def run(self):
2449 2457 self.httpd.serve_forever()
2450 2458
2451 2459 service = service()
2452 2460
2453 2461 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2454 2462
2455 2463 def status(ui, repo, *pats, **opts):
2456 2464 """show changed files in the working directory
2457 2465
2458 2466 Show status of files in the repository. If names are given, only
2459 2467 files that match are shown. Files that are clean or ignored or
2460 2468 source of a copy/move operation, are not listed unless -c (clean),
2461 2469 -i (ignored), -C (copies) or -A is given. Unless options described
2462 2470 with "show only ..." are given, the options -mardu are used.
2463 2471
2464 2472 NOTE: status may appear to disagree with diff if permissions have
2465 2473 changed or a merge has occurred. The standard diff format does not
2466 2474 report permission changes and diff only reports changes relative
2467 2475 to one merge parent.
2468 2476
2469 2477 If one revision is given, it is used as the base revision.
2470 2478 If two revisions are given, the difference between them is shown.
2471 2479
2472 2480 The codes used to show the status of files are:
2473 2481 M = modified
2474 2482 A = added
2475 2483 R = removed
2476 2484 C = clean
2477 2485 ! = deleted, but still tracked
2478 2486 ? = not tracked
2479 2487 I = ignored
2480 2488 = the previous added file was copied from here
2481 2489 """
2482 2490
2483 2491 all = opts['all']
2484 2492 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2485 2493
2486 2494 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2487 2495 cwd = (pats and repo.getcwd()) or ''
2488 2496 modified, added, removed, deleted, unknown, ignored, clean = [
2489 2497 n for n in repo.status(node1=node1, node2=node2, files=files,
2490 2498 match=matchfn,
2491 2499 list_ignored=all or opts['ignored'],
2492 2500 list_clean=all or opts['clean'])]
2493 2501
2494 2502 changetypes = (('modified', 'M', modified),
2495 2503 ('added', 'A', added),
2496 2504 ('removed', 'R', removed),
2497 2505 ('deleted', '!', deleted),
2498 2506 ('unknown', '?', unknown),
2499 2507 ('ignored', 'I', ignored))
2500 2508
2501 2509 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2502 2510
2503 2511 end = opts['print0'] and '\0' or '\n'
2504 2512
2505 2513 for opt, char, changes in ([ct for ct in explicit_changetypes
2506 2514 if all or opts[ct[0]]]
2507 2515 or changetypes):
2508 2516 if opts['no_status']:
2509 2517 format = "%%s%s" % end
2510 2518 else:
2511 2519 format = "%s %%s%s" % (char, end)
2512 2520
2513 2521 for f in changes:
2514 2522 ui.write(format % repo.pathto(f, cwd))
2515 2523 if ((all or opts.get('copies')) and not opts.get('no_status')):
2516 2524 copied = repo.dirstate.copied(f)
2517 2525 if copied:
2518 2526 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2519 2527
2520 2528 def tag(ui, repo, name, rev_=None, **opts):
2521 2529 """add a tag for the current or given revision
2522 2530
2523 2531 Name a particular revision using <name>.
2524 2532
2525 2533 Tags are used to name particular revisions of the repository and are
2526 2534 very useful to compare different revision, to go back to significant
2527 2535 earlier versions or to mark branch points as releases, etc.
2528 2536
2529 2537 If no revision is given, the parent of the working directory is used,
2530 2538 or tip if no revision is checked out.
2531 2539
2532 2540 To facilitate version control, distribution, and merging of tags,
2533 2541 they are stored as a file named ".hgtags" which is managed
2534 2542 similarly to other project files and can be hand-edited if
2535 2543 necessary. The file '.hg/localtags' is used for local tags (not
2536 2544 shared among repositories).
2537 2545 """
2538 2546 if name in ['tip', '.', 'null']:
2539 2547 raise util.Abort(_("the name '%s' is reserved") % name)
2540 2548 if rev_ is not None:
2541 2549 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2542 2550 "please use 'hg tag [-r REV] NAME' instead\n"))
2543 2551 if opts['rev']:
2544 2552 raise util.Abort(_("use only one form to specify the revision"))
2545 2553 if opts['rev'] and opts['remove']:
2546 2554 raise util.Abort(_("--rev and --remove are incompatible"))
2547 2555 if opts['rev']:
2548 2556 rev_ = opts['rev']
2549 2557 message = opts['message']
2550 2558 if opts['remove']:
2551 2559 tagtype = repo.tagtype(name)
2552 2560
2553 2561 if not tagtype:
2554 2562 raise util.Abort(_('tag %s does not exist') % name)
2555 2563 if opts['local'] and tagtype == 'global':
2556 2564 raise util.Abort(_('%s tag is global') % name)
2557 2565 if not opts['local'] and tagtype == 'local':
2558 2566 raise util.Abort(_('%s tag is local') % name)
2559 2567
2560 2568 rev_ = nullid
2561 2569 if not message:
2562 2570 message = _('Removed tag %s') % name
2563 2571 elif name in repo.tags() and not opts['force']:
2564 2572 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2565 2573 % name)
2566 2574 if not rev_ and repo.dirstate.parents()[1] != nullid:
2567 2575 raise util.Abort(_('uncommitted merge - please provide a '
2568 2576 'specific revision'))
2569 2577 r = repo.changectx(rev_).node()
2570 2578
2571 2579 if not message:
2572 2580 message = _('Added tag %s for changeset %s') % (name, short(r))
2573 2581
2574 2582 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2575 2583
2576 2584 def tags(ui, repo):
2577 2585 """list repository tags
2578 2586
2579 2587 List the repository tags.
2580 2588
2581 2589 This lists both regular and local tags. When the -v/--verbose switch
2582 2590 is used, a third column "local" is printed for local tags.
2583 2591 """
2584 2592
2585 2593 l = repo.tagslist()
2586 2594 l.reverse()
2587 2595 hexfunc = ui.debugflag and hex or short
2588 2596 tagtype = ""
2589 2597
2590 2598 for t, n in l:
2591 2599 if ui.quiet:
2592 2600 ui.write("%s\n" % t)
2593 2601 continue
2594 2602
2595 2603 try:
2596 2604 hn = hexfunc(n)
2597 2605 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2598 2606 except revlog.LookupError:
2599 2607 r = " ?:%s" % hn
2600 2608 else:
2601 2609 spaces = " " * (30 - util.locallen(t))
2602 2610 if ui.verbose:
2603 2611 if repo.tagtype(t) == 'local':
2604 2612 tagtype = " local"
2605 2613 else:
2606 2614 tagtype = ""
2607 2615 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2608 2616
2609 2617 def tip(ui, repo, **opts):
2610 2618 """show the tip revision
2611 2619
2612 2620 Show the tip revision.
2613 2621 """
2614 2622 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2615 2623
2616 2624 def unbundle(ui, repo, fname1, *fnames, **opts):
2617 2625 """apply one or more changegroup files
2618 2626
2619 2627 Apply one or more compressed changegroup files generated by the
2620 2628 bundle command.
2621 2629 """
2622 2630 fnames = (fname1,) + fnames
2623 2631 for fname in fnames:
2624 2632 if os.path.exists(fname):
2625 2633 f = open(fname, "rb")
2626 2634 else:
2627 2635 f = urllib.urlopen(fname)
2628 2636 gen = changegroup.readbundle(f, fname)
2629 2637 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2630 2638
2631 2639 return postincoming(ui, repo, modheads, opts['update'], None)
2632 2640
2633 2641 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2634 2642 """update working directory
2635 2643
2636 2644 Update the working directory to the specified revision, or the
2637 2645 tip of the current branch if none is specified.
2638 2646
2639 2647 If there are no outstanding changes in the working directory and
2640 2648 there is a linear relationship between the current version and the
2641 2649 requested version, the result is the requested version.
2642 2650
2643 2651 To merge the working directory with another revision, use the
2644 2652 merge command.
2645 2653
2646 2654 By default, update will refuse to run if doing so would require
2647 2655 discarding local changes.
2648 2656 """
2649 2657 if rev and node:
2650 2658 raise util.Abort(_("please specify just one revision"))
2651 2659
2652 2660 if not rev:
2653 2661 rev = node
2654 2662
2655 2663 if date:
2656 2664 if rev:
2657 2665 raise util.Abort(_("you can't specify a revision and a date"))
2658 2666 rev = cmdutil.finddate(ui, repo, date)
2659 2667
2660 2668 if clean:
2661 2669 return hg.clean(repo, rev)
2662 2670 else:
2663 2671 return hg.update(repo, rev)
2664 2672
2665 2673 def verify(ui, repo):
2666 2674 """verify the integrity of the repository
2667 2675
2668 2676 Verify the integrity of the current repository.
2669 2677
2670 2678 This will perform an extensive check of the repository's
2671 2679 integrity, validating the hashes and checksums of each entry in
2672 2680 the changelog, manifest, and tracked files, as well as the
2673 2681 integrity of their crosslinks and indices.
2674 2682 """
2675 2683 return hg.verify(repo)
2676 2684
2677 2685 def version_(ui):
2678 2686 """output version and copyright information"""
2679 2687 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2680 2688 % version.get_version())
2681 2689 ui.status(_(
2682 2690 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2683 2691 "This is free software; see the source for copying conditions. "
2684 2692 "There is NO\nwarranty; "
2685 2693 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2686 2694 ))
2687 2695
2688 2696 # Command options and aliases are listed here, alphabetically
2689 2697
2690 2698 globalopts = [
2691 2699 ('R', 'repository', '',
2692 2700 _('repository root directory or symbolic path name')),
2693 2701 ('', 'cwd', '', _('change working directory')),
2694 2702 ('y', 'noninteractive', None,
2695 2703 _('do not prompt, assume \'yes\' for any required answers')),
2696 2704 ('q', 'quiet', None, _('suppress output')),
2697 2705 ('v', 'verbose', None, _('enable additional output')),
2698 2706 ('', 'config', [], _('set/override config option')),
2699 2707 ('', 'debug', None, _('enable debugging output')),
2700 2708 ('', 'debugger', None, _('start debugger')),
2701 2709 ('', 'encoding', util._encoding, _('set the charset encoding')),
2702 2710 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2703 2711 ('', 'lsprof', None, _('print improved command execution profile')),
2704 2712 ('', 'traceback', None, _('print traceback on exception')),
2705 2713 ('', 'time', None, _('time how long the command takes')),
2706 2714 ('', 'profile', None, _('print command execution profile')),
2707 2715 ('', 'version', None, _('output version information and exit')),
2708 2716 ('h', 'help', None, _('display help and exit')),
2709 2717 ]
2710 2718
2711 2719 dryrunopts = [('n', 'dry-run', None,
2712 2720 _('do not perform actions, just print output'))]
2713 2721
2714 2722 remoteopts = [
2715 2723 ('e', 'ssh', '', _('specify ssh command to use')),
2716 2724 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2717 2725 ]
2718 2726
2719 2727 walkopts = [
2720 2728 ('I', 'include', [], _('include names matching the given patterns')),
2721 2729 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2722 2730 ]
2723 2731
2724 2732 commitopts = [
2725 2733 ('m', 'message', '', _('use <text> as commit message')),
2726 2734 ('l', 'logfile', '', _('read commit message from <file>')),
2727 2735 ]
2728 2736
2729 2737 commitopts2 = [
2730 2738 ('d', 'date', '', _('record datecode as commit date')),
2731 2739 ('u', 'user', '', _('record user as committer')),
2732 2740 ]
2733 2741
2734 2742 table = {
2735 2743 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2736 2744 "addremove":
2737 2745 (addremove,
2738 2746 [('s', 'similarity', '',
2739 2747 _('guess renamed files by similarity (0<=s<=100)')),
2740 2748 ] + walkopts + dryrunopts,
2741 2749 _('hg addremove [OPTION]... [FILE]...')),
2742 2750 "^annotate":
2743 2751 (annotate,
2744 2752 [('r', 'rev', '', _('annotate the specified revision')),
2745 2753 ('f', 'follow', None, _('follow file copies and renames')),
2746 2754 ('a', 'text', None, _('treat all files as text')),
2747 2755 ('u', 'user', None, _('list the author (long with -v)')),
2748 2756 ('d', 'date', None, _('list the date (short with -q)')),
2749 2757 ('n', 'number', None, _('list the revision number (default)')),
2750 2758 ('c', 'changeset', None, _('list the changeset')),
2751 2759 ('l', 'line-number', None,
2752 2760 _('show line number at the first appearance'))
2753 2761 ] + walkopts,
2754 2762 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2755 2763 "archive":
2756 2764 (archive,
2757 2765 [('', 'no-decode', None, _('do not pass files through decoders')),
2758 2766 ('p', 'prefix', '', _('directory prefix for files in archive')),
2759 2767 ('r', 'rev', '', _('revision to distribute')),
2760 2768 ('t', 'type', '', _('type of distribution to create')),
2761 2769 ] + walkopts,
2762 2770 _('hg archive [OPTION]... DEST')),
2763 2771 "backout":
2764 2772 (backout,
2765 2773 [('', 'merge', None,
2766 2774 _('merge with old dirstate parent after backout')),
2767 2775 ('', 'parent', '', _('parent to choose when backing out merge')),
2768 2776 ('r', 'rev', '', _('revision to backout')),
2769 2777 ] + walkopts + commitopts + commitopts2,
2770 2778 _('hg backout [OPTION]... [-r] REV')),
2771 2779 "bisect":
2772 2780 (bisect,
2773 2781 [('r', 'reset', False, _('reset bisect state')),
2774 2782 ('g', 'good', False, _('mark changeset good')),
2775 2783 ('b', 'bad', False, _('mark changeset bad')),
2776 2784 ('s', 'skip', False, _('skip testing changeset')),
2777 2785 ('U', 'noupdate', False, _('do not update to target'))],
2778 2786 _("hg bisect [-gbsr] [REV]")),
2779 2787 "branch":
2780 2788 (branch,
2781 2789 [('f', 'force', None,
2782 2790 _('set branch name even if it shadows an existing branch'))],
2783 2791 _('hg branch [-f] [NAME]')),
2784 2792 "branches":
2785 2793 (branches,
2786 2794 [('a', 'active', False,
2787 2795 _('show only branches that have unmerged heads'))],
2788 2796 _('hg branches [-a]')),
2789 2797 "bundle":
2790 2798 (bundle,
2791 2799 [('f', 'force', None,
2792 2800 _('run even when remote repository is unrelated')),
2793 2801 ('r', 'rev', [],
2794 2802 _('a changeset you would like to bundle')),
2795 2803 ('', 'base', [],
2796 2804 _('a base changeset to specify instead of a destination')),
2797 2805 ] + remoteopts,
2798 2806 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2799 2807 "cat":
2800 2808 (cat,
2801 2809 [('o', 'output', '', _('print output to file with formatted name')),
2802 2810 ('r', 'rev', '', _('print the given revision')),
2803 2811 ('', 'decode', None, _('apply any matching decode filter')),
2804 2812 ] + walkopts,
2805 2813 _('hg cat [OPTION]... FILE...')),
2806 2814 "^clone":
2807 2815 (clone,
2808 2816 [('U', 'noupdate', None, _('do not update the new working directory')),
2809 2817 ('r', 'rev', [],
2810 2818 _('a changeset you would like to have after cloning')),
2811 2819 ('', 'pull', None, _('use pull protocol to copy metadata')),
2812 2820 ('', 'uncompressed', None,
2813 2821 _('use uncompressed transfer (fast over LAN)')),
2814 2822 ] + remoteopts,
2815 2823 _('hg clone [OPTION]... SOURCE [DEST]')),
2816 2824 "^commit|ci":
2817 2825 (commit,
2818 2826 [('A', 'addremove', None,
2819 2827 _('mark new/missing files as added/removed before committing')),
2820 2828 ] + walkopts + commitopts + commitopts2,
2821 2829 _('hg commit [OPTION]... [FILE]...')),
2822 2830 "copy|cp":
2823 2831 (copy,
2824 2832 [('A', 'after', None, _('record a copy that has already occurred')),
2825 2833 ('f', 'force', None,
2826 2834 _('forcibly copy over an existing managed file')),
2827 2835 ] + walkopts + dryrunopts,
2828 2836 _('hg copy [OPTION]... [SOURCE]... DEST')),
2829 2837 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2830 2838 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2831 2839 "debugcomplete":
2832 2840 (debugcomplete,
2833 2841 [('o', 'options', None, _('show the command options'))],
2834 2842 _('hg debugcomplete [-o] CMD')),
2835 2843 "debugdate":
2836 2844 (debugdate,
2837 2845 [('e', 'extended', None, _('try extended date formats'))],
2838 2846 _('hg debugdate [-e] DATE [RANGE]')),
2839 2847 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2840 2848 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2841 2849 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2842 2850 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2843 2851 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2844 2852 "debugrawcommit|rawcommit":
2845 2853 (rawcommit,
2846 2854 [('p', 'parent', [], _('parent')),
2847 2855 ('F', 'files', '', _('file list'))
2848 2856 ] + commitopts + commitopts2,
2849 2857 _('hg debugrawcommit [OPTION]... [FILE]...')),
2850 2858 "debugrebuildstate":
2851 2859 (debugrebuildstate,
2852 2860 [('r', 'rev', '', _('revision to rebuild to'))],
2853 2861 _('hg debugrebuildstate [-r REV] [REV]')),
2854 2862 "debugrename":
2855 2863 (debugrename,
2856 2864 [('r', 'rev', '', _('revision to debug'))],
2857 2865 _('hg debugrename [-r REV] FILE')),
2858 2866 "debugsetparents":
2859 2867 (debugsetparents,
2860 2868 [],
2861 2869 _('hg debugsetparents REV1 [REV2]')),
2862 2870 "debugstate": (debugstate, [], _('hg debugstate')),
2863 2871 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2864 2872 "^diff":
2865 2873 (diff,
2866 2874 [('r', 'rev', [], _('revision')),
2867 2875 ('a', 'text', None, _('treat all files as text')),
2868 2876 ('p', 'show-function', None,
2869 2877 _('show which function each change is in')),
2870 2878 ('g', 'git', None, _('use git extended diff format')),
2871 2879 ('', 'nodates', None, _("don't include dates in diff headers")),
2872 2880 ('w', 'ignore-all-space', None,
2873 2881 _('ignore white space when comparing lines')),
2874 2882 ('b', 'ignore-space-change', None,
2875 2883 _('ignore changes in the amount of white space')),
2876 2884 ('B', 'ignore-blank-lines', None,
2877 2885 _('ignore changes whose lines are all blank')),
2878 2886 ('U', 'unified', 3,
2879 2887 _('number of lines of context to show'))
2880 2888 ] + walkopts,
2881 2889 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2882 2890 "^export":
2883 2891 (export,
2884 2892 [('o', 'output', '', _('print output to file with formatted name')),
2885 2893 ('a', 'text', None, _('treat all files as text')),
2886 2894 ('g', 'git', None, _('use git extended diff format')),
2887 2895 ('', 'nodates', None, _("don't include dates in diff headers")),
2888 2896 ('', 'switch-parent', None, _('diff against the second parent'))],
2889 2897 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2890 2898 "grep":
2891 2899 (grep,
2892 2900 [('0', 'print0', None, _('end fields with NUL')),
2893 2901 ('', 'all', None, _('print all revisions that match')),
2894 2902 ('f', 'follow', None,
2895 2903 _('follow changeset history, or file history across copies and renames')),
2896 2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2897 2905 ('l', 'files-with-matches', None,
2898 2906 _('print only filenames and revs that match')),
2899 2907 ('n', 'line-number', None, _('print matching line numbers')),
2900 2908 ('r', 'rev', [], _('search in given revision range')),
2901 2909 ('u', 'user', None, _('list the author (long with -v)')),
2902 2910 ('d', 'date', None, _('list the date (short with -q)')),
2903 2911 ] + walkopts,
2904 2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2905 2913 "heads":
2906 2914 (heads,
2907 2915 [('', 'style', '', _('display using template map file')),
2908 2916 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2909 2917 ('', 'template', '', _('display with template'))],
2910 2918 _('hg heads [-r REV] [REV]...')),
2911 2919 "help": (help_, [], _('hg help [COMMAND]')),
2912 2920 "identify|id":
2913 2921 (identify,
2914 2922 [('r', 'rev', '', _('identify the specified rev')),
2915 2923 ('n', 'num', None, _('show local revision number')),
2916 2924 ('i', 'id', None, _('show global revision id')),
2917 2925 ('b', 'branch', None, _('show branch')),
2918 2926 ('t', 'tags', None, _('show tags'))],
2919 2927 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2920 2928 "import|patch":
2921 2929 (import_,
2922 2930 [('p', 'strip', 1,
2923 2931 _('directory strip option for patch. This has the same\n'
2924 2932 'meaning as the corresponding patch option')),
2925 2933 ('b', 'base', '', _('base path')),
2926 2934 ('f', 'force', None,
2927 2935 _('skip check for outstanding uncommitted changes')),
2928 2936 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2929 2937 ('', 'exact', None,
2930 2938 _('apply patch to the nodes from which it was generated')),
2931 2939 ('', 'import-branch', None,
2932 2940 _('Use any branch information in patch (implied by --exact)'))] +
2933 2941 commitopts + commitopts2,
2934 2942 _('hg import [OPTION]... PATCH...')),
2935 2943 "incoming|in":
2936 2944 (incoming,
2937 2945 [('M', 'no-merges', None, _('do not show merges')),
2938 2946 ('f', 'force', None,
2939 2947 _('run even when remote repository is unrelated')),
2940 2948 ('', 'style', '', _('display using template map file')),
2941 2949 ('n', 'newest-first', None, _('show newest record first')),
2942 2950 ('', 'bundle', '', _('file to store the bundles into')),
2943 2951 ('p', 'patch', None, _('show patch')),
2944 2952 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2945 2953 ('', 'template', '', _('display with template')),
2946 2954 ] + remoteopts,
2947 2955 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2948 2956 ' [--bundle FILENAME] [SOURCE]')),
2949 2957 "^init":
2950 2958 (init,
2951 2959 remoteopts,
2952 2960 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2953 2961 "locate":
2954 2962 (locate,
2955 2963 [('r', 'rev', '', _('search the repository as it stood at rev')),
2956 2964 ('0', 'print0', None,
2957 2965 _('end filenames with NUL, for use with xargs')),
2958 2966 ('f', 'fullpath', None,
2959 2967 _('print complete paths from the filesystem root')),
2960 2968 ] + walkopts,
2961 2969 _('hg locate [OPTION]... [PATTERN]...')),
2962 2970 "^log|history":
2963 2971 (log,
2964 2972 [('f', 'follow', None,
2965 2973 _('follow changeset history, or file history across copies and renames')),
2966 2974 ('', 'follow-first', None,
2967 2975 _('only follow the first parent of merge changesets')),
2968 2976 ('d', 'date', '', _('show revs matching date spec')),
2969 2977 ('C', 'copies', None, _('show copied files')),
2970 2978 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2971 2979 ('l', 'limit', '', _('limit number of changes displayed')),
2972 2980 ('r', 'rev', [], _('show the specified revision or range')),
2973 2981 ('', 'removed', None, _('include revs where files were removed')),
2974 2982 ('M', 'no-merges', None, _('do not show merges')),
2975 2983 ('', 'style', '', _('display using template map file')),
2976 2984 ('m', 'only-merges', None, _('show only merges')),
2977 2985 ('b', 'only-branch', [],
2978 2986 _('show only changesets within the given named branch')),
2979 2987 ('p', 'patch', None, _('show patch')),
2980 2988 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2981 2989 ('', 'template', '', _('display with template')),
2982 2990 ] + walkopts,
2983 2991 _('hg log [OPTION]... [FILE]')),
2984 2992 "manifest":
2985 2993 (manifest,
2986 2994 [('r', 'rev', '', _('revision to display'))],
2987 2995 _('hg manifest [-r REV]')),
2988 2996 "^merge":
2989 2997 (merge,
2990 2998 [('f', 'force', None, _('force a merge with outstanding changes')),
2991 2999 ('r', 'rev', '', _('revision to merge')),
2992 3000 ],
2993 3001 _('hg merge [-f] [[-r] REV]')),
2994 3002 "outgoing|out":
2995 3003 (outgoing,
2996 3004 [('M', 'no-merges', None, _('do not show merges')),
2997 3005 ('f', 'force', None,
2998 3006 _('run even when remote repository is unrelated')),
2999 3007 ('p', 'patch', None, _('show patch')),
3000 3008 ('', 'style', '', _('display using template map file')),
3001 3009 ('r', 'rev', [], _('a specific revision you would like to push')),
3002 3010 ('n', 'newest-first', None, _('show newest record first')),
3003 3011 ('', 'template', '', _('display with template')),
3004 3012 ] + remoteopts,
3005 3013 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3006 3014 "^parents":
3007 3015 (parents,
3008 3016 [('r', 'rev', '', _('show parents from the specified rev')),
3009 3017 ('', 'style', '', _('display using template map file')),
3010 3018 ('', 'template', '', _('display with template'))],
3011 3019 _('hg parents [-r REV] [FILE]')),
3012 3020 "paths": (paths, [], _('hg paths [NAME]')),
3013 3021 "^pull":
3014 3022 (pull,
3015 3023 [('u', 'update', None,
3016 3024 _('update to new tip if changesets were pulled')),
3017 3025 ('f', 'force', None,
3018 3026 _('run even when remote repository is unrelated')),
3019 3027 ('r', 'rev', [],
3020 3028 _('a specific revision up to which you would like to pull')),
3021 3029 ] + remoteopts,
3022 3030 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3023 3031 "^push":
3024 3032 (push,
3025 3033 [('f', 'force', None, _('force push')),
3026 3034 ('r', 'rev', [], _('a specific revision you would like to push')),
3027 3035 ] + remoteopts,
3028 3036 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3029 3037 "recover": (recover, [], _('hg recover')),
3030 3038 "^remove|rm":
3031 3039 (remove,
3032 3040 [('A', 'after', None, _('record remove without deleting')),
3033 3041 ('f', 'force', None, _('remove file even if modified')),
3034 3042 ] + walkopts,
3035 3043 _('hg remove [OPTION]... FILE...')),
3036 3044 "rename|mv":
3037 3045 (rename,
3038 3046 [('A', 'after', None, _('record a rename that has already occurred')),
3039 3047 ('f', 'force', None,
3040 3048 _('forcibly copy over an existing managed file')),
3041 3049 ] + walkopts + dryrunopts,
3042 3050 _('hg rename [OPTION]... SOURCE... DEST')),
3043 3051 "revert":
3044 3052 (revert,
3045 3053 [('a', 'all', None, _('revert all changes when no arguments given')),
3046 3054 ('d', 'date', '', _('tipmost revision matching date')),
3047 3055 ('r', 'rev', '', _('revision to revert to')),
3048 3056 ('', 'no-backup', None, _('do not save backup copies of files')),
3049 3057 ] + walkopts + dryrunopts,
3050 3058 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3051 3059 "rollback": (rollback, [], _('hg rollback')),
3052 3060 "root": (root, [], _('hg root')),
3053 3061 "^serve":
3054 3062 (serve,
3055 3063 [('A', 'accesslog', '', _('name of access log file to write to')),
3056 3064 ('d', 'daemon', None, _('run server in background')),
3057 3065 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3058 3066 ('E', 'errorlog', '', _('name of error log file to write to')),
3059 3067 ('p', 'port', 0, _('port to use (default: 8000)')),
3060 3068 ('a', 'address', '', _('address to use')),
3061 3069 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3062 3070 ('n', 'name', '',
3063 3071 _('name to show in web pages (default: working dir)')),
3064 3072 ('', 'webdir-conf', '', _('name of the webdir config file'
3065 3073 ' (serve more than one repo)')),
3066 3074 ('', 'pid-file', '', _('name of file to write process ID to')),
3067 3075 ('', 'stdio', None, _('for remote clients')),
3068 3076 ('t', 'templates', '', _('web templates to use')),
3069 3077 ('', 'style', '', _('template style to use')),
3070 3078 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3071 3079 ('', 'certificate', '', _('SSL certificate file'))],
3072 3080 _('hg serve [OPTION]...')),
3073 3081 "showconfig|debugconfig":
3074 3082 (showconfig,
3075 3083 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3076 3084 _('hg showconfig [-u] [NAME]...')),
3077 3085 "^status|st":
3078 3086 (status,
3079 3087 [('A', 'all', None, _('show status of all files')),
3080 3088 ('m', 'modified', None, _('show only modified files')),
3081 3089 ('a', 'added', None, _('show only added files')),
3082 3090 ('r', 'removed', None, _('show only removed files')),
3083 3091 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3084 3092 ('c', 'clean', None, _('show only files without changes')),
3085 3093 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3086 3094 ('i', 'ignored', None, _('show only ignored files')),
3087 3095 ('n', 'no-status', None, _('hide status prefix')),
3088 3096 ('C', 'copies', None, _('show source of copied files')),
3089 3097 ('0', 'print0', None,
3090 3098 _('end filenames with NUL, for use with xargs')),
3091 3099 ('', 'rev', [], _('show difference from revision')),
3092 3100 ] + walkopts,
3093 3101 _('hg status [OPTION]... [FILE]...')),
3094 3102 "tag":
3095 3103 (tag,
3096 3104 [('f', 'force', None, _('replace existing tag')),
3097 3105 ('l', 'local', None, _('make the tag local')),
3098 3106 ('r', 'rev', '', _('revision to tag')),
3099 3107 ('', 'remove', None, _('remove a tag')),
3100 3108 # -l/--local is already there, commitopts cannot be used
3101 3109 ('m', 'message', '', _('use <text> as commit message')),
3102 3110 ] + commitopts2,
3103 3111 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3104 3112 "tags": (tags, [], _('hg tags')),
3105 3113 "tip":
3106 3114 (tip,
3107 3115 [('', 'style', '', _('display using template map file')),
3108 3116 ('p', 'patch', None, _('show patch')),
3109 3117 ('', 'template', '', _('display with template'))],
3110 3118 _('hg tip [-p]')),
3111 3119 "unbundle":
3112 3120 (unbundle,
3113 3121 [('u', 'update', None,
3114 3122 _('update to new tip if changesets were unbundled'))],
3115 3123 _('hg unbundle [-u] FILE...')),
3116 3124 "^update|up|checkout|co":
3117 3125 (update,
3118 3126 [('C', 'clean', None, _('overwrite locally modified files')),
3119 3127 ('d', 'date', '', _('tipmost revision matching date')),
3120 3128 ('r', 'rev', '', _('revision'))],
3121 3129 _('hg update [-C] [-d DATE] [[-r] REV]')),
3122 3130 "verify": (verify, [], _('hg verify')),
3123 3131 "version": (version_, [], _('hg version')),
3124 3132 }
3125 3133
3126 3134 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3127 3135 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3128 3136 optionalrepo = ("identify paths serve showconfig")
@@ -1,2117 +1,2118 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.root = os.path.realpath(path)
22 22 self.path = os.path.join(self.root, ".hg")
23 23 self.origroot = path
24 24 self.opener = util.opener(self.path)
25 25 self.wopener = util.opener(self.root)
26 26
27 27 if not os.path.isdir(self.path):
28 28 if create:
29 29 if not os.path.exists(path):
30 30 os.mkdir(path)
31 31 os.mkdir(self.path)
32 32 requirements = ["revlogv1"]
33 33 if parentui.configbool('format', 'usestore', True):
34 34 os.mkdir(os.path.join(self.path, "store"))
35 35 requirements.append("store")
36 36 # create an invalid changelog
37 37 self.opener("00changelog.i", "a").write(
38 38 '\0\0\0\2' # represents revlogv2
39 39 ' dummy changelog to prevent using the old repo layout'
40 40 )
41 41 reqfile = self.opener("requires", "w")
42 42 for r in requirements:
43 43 reqfile.write("%s\n" % r)
44 44 reqfile.close()
45 45 else:
46 46 raise repo.RepoError(_("repository %s not found") % path)
47 47 elif create:
48 48 raise repo.RepoError(_("repository %s already exists") % path)
49 49 else:
50 50 # find requirements
51 51 try:
52 52 requirements = self.opener("requires").read().splitlines()
53 53 except IOError, inst:
54 54 if inst.errno != errno.ENOENT:
55 55 raise
56 56 requirements = []
57 57 # check them
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 61
62 62 # setup store
63 63 if "store" in requirements:
64 64 self.encodefn = util.encodefilename
65 65 self.decodefn = util.decodefilename
66 66 self.spath = os.path.join(self.path, "store")
67 67 else:
68 68 self.encodefn = lambda x: x
69 69 self.decodefn = lambda x: x
70 70 self.spath = self.path
71 71
72 72 try:
73 73 # files in .hg/ will be created using this mode
74 74 mode = os.stat(self.spath).st_mode
75 75 # avoid some useless chmods
76 76 if (0777 & ~util._umask) == (0777 & mode):
77 77 mode = None
78 78 except OSError:
79 79 mode = None
80 80
81 81 self._createmode = mode
82 82 self.opener.createmode = mode
83 83 sopener = util.opener(self.spath)
84 84 sopener.createmode = mode
85 85 self.sopener = util.encodedopener(sopener, self.encodefn)
86 86
87 87 self.ui = ui.ui(parentui=parentui)
88 88 try:
89 89 self.ui.readconfig(self.join("hgrc"), self.root)
90 90 extensions.loadall(self.ui)
91 91 except IOError:
92 92 pass
93 93
94 94 self.tagscache = None
95 95 self._tagstypecache = None
96 96 self.branchcache = None
97 97 self._ubranchcache = None # UTF-8 version of branchcache
98 98 self._branchcachetip = None
99 99 self.nodetagscache = None
100 100 self.filterpats = {}
101 101 self._datafilters = {}
102 102 self._transref = self._lockref = self._wlockref = None
103 103
104 104 def __getattr__(self, name):
105 105 if name == 'changelog':
106 106 self.changelog = changelog.changelog(self.sopener)
107 107 self.sopener.defversion = self.changelog.version
108 108 return self.changelog
109 109 if name == 'manifest':
110 110 self.changelog
111 111 self.manifest = manifest.manifest(self.sopener)
112 112 return self.manifest
113 113 if name == 'dirstate':
114 114 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
115 115 return self.dirstate
116 116 else:
117 117 raise AttributeError, name
118 118
119 119 def url(self):
120 120 return 'file:' + self.root
121 121
122 122 def hook(self, name, throw=False, **args):
123 123 return hook.hook(self.ui, self, name, throw, **args)
124 124
125 125 tag_disallowed = ':\r\n'
126 126
127 127 def _tag(self, name, node, message, local, user, date, parent=None,
128 128 extra={}):
129 129 use_dirstate = parent is None
130 130
131 131 for c in self.tag_disallowed:
132 132 if c in name:
133 133 raise util.Abort(_('%r cannot be used in a tag name') % c)
134 134
135 135 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
136 136
137 137 def writetag(fp, name, munge, prevtags):
138 138 fp.seek(0, 2)
139 139 if prevtags and prevtags[-1] != '\n':
140 140 fp.write('\n')
141 141 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
142 142 fp.close()
143 143
144 144 prevtags = ''
145 145 if local:
146 146 try:
147 147 fp = self.opener('localtags', 'r+')
148 148 except IOError, err:
149 149 fp = self.opener('localtags', 'a')
150 150 else:
151 151 prevtags = fp.read()
152 152
153 153 # local tags are stored in the current charset
154 154 writetag(fp, name, None, prevtags)
155 155 self.hook('tag', node=hex(node), tag=name, local=local)
156 156 return
157 157
158 158 if use_dirstate:
159 159 try:
160 160 fp = self.wfile('.hgtags', 'rb+')
161 161 except IOError, err:
162 162 fp = self.wfile('.hgtags', 'ab')
163 163 else:
164 164 prevtags = fp.read()
165 165 else:
166 166 try:
167 167 prevtags = self.filectx('.hgtags', parent).data()
168 168 except revlog.LookupError:
169 169 pass
170 170 fp = self.wfile('.hgtags', 'wb')
171 171 if prevtags:
172 172 fp.write(prevtags)
173 173
174 174 # committed tags are stored in UTF-8
175 175 writetag(fp, name, util.fromlocal, prevtags)
176 176
177 177 if use_dirstate and '.hgtags' not in self.dirstate:
178 178 self.add(['.hgtags'])
179 179
180 180 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
181 181 extra=extra)
182 182
183 183 self.hook('tag', node=hex(node), tag=name, local=local)
184 184
185 185 return tagnode
186 186
187 187 def tag(self, name, node, message, local, user, date):
188 188 '''tag a revision with a symbolic name.
189 189
190 190 if local is True, the tag is stored in a per-repository file.
191 191 otherwise, it is stored in the .hgtags file, and a new
192 192 changeset is committed with the change.
193 193
194 194 keyword arguments:
195 195
196 196 local: whether to store tag in non-version-controlled file
197 197 (default False)
198 198
199 199 message: commit message to use if committing
200 200
201 201 user: name of user to use if committing
202 202
203 203 date: date tuple to use if committing'''
204 204
205 date = util.parsedate(date)
205 206 for x in self.status()[:5]:
206 207 if '.hgtags' in x:
207 208 raise util.Abort(_('working copy of .hgtags is changed '
208 209 '(please commit .hgtags manually)'))
209 210
210 211
211 212 self._tag(name, node, message, local, user, date)
212 213
213 214 def tags(self):
214 215 '''return a mapping of tag to node'''
215 216 if self.tagscache:
216 217 return self.tagscache
217 218
218 219 globaltags = {}
219 220 tagtypes = {}
220 221
221 222 def readtags(lines, fn, tagtype):
222 223 filetags = {}
223 224 count = 0
224 225
225 226 def warn(msg):
226 227 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
227 228
228 229 for l in lines:
229 230 count += 1
230 231 if not l:
231 232 continue
232 233 s = l.split(" ", 1)
233 234 if len(s) != 2:
234 235 warn(_("cannot parse entry"))
235 236 continue
236 237 node, key = s
237 238 key = util.tolocal(key.strip()) # stored in UTF-8
238 239 try:
239 240 bin_n = bin(node)
240 241 except TypeError:
241 242 warn(_("node '%s' is not well formed") % node)
242 243 continue
243 244 if bin_n not in self.changelog.nodemap:
244 245 warn(_("tag '%s' refers to unknown node") % key)
245 246 continue
246 247
247 248 h = []
248 249 if key in filetags:
249 250 n, h = filetags[key]
250 251 h.append(n)
251 252 filetags[key] = (bin_n, h)
252 253
253 254 for k, nh in filetags.items():
254 255 if k not in globaltags:
255 256 globaltags[k] = nh
256 257 tagtypes[k] = tagtype
257 258 continue
258 259
259 260 # we prefer the global tag if:
260 261 # it supercedes us OR
261 262 # mutual supercedes and it has a higher rank
262 263 # otherwise we win because we're tip-most
263 264 an, ah = nh
264 265 bn, bh = globaltags[k]
265 266 if (bn != an and an in bh and
266 267 (bn not in ah or len(bh) > len(ah))):
267 268 an = bn
268 269 ah.extend([n for n in bh if n not in ah])
269 270 globaltags[k] = an, ah
270 271 tagtypes[k] = tagtype
271 272
272 273 # read the tags file from each head, ending with the tip
273 274 f = None
274 275 for rev, node, fnode in self._hgtagsnodes():
275 276 f = (f and f.filectx(fnode) or
276 277 self.filectx('.hgtags', fileid=fnode))
277 278 readtags(f.data().splitlines(), f, "global")
278 279
279 280 try:
280 281 data = util.fromlocal(self.opener("localtags").read())
281 282 # localtags are stored in the local character set
282 283 # while the internal tag table is stored in UTF-8
283 284 readtags(data.splitlines(), "localtags", "local")
284 285 except IOError:
285 286 pass
286 287
287 288 self.tagscache = {}
288 289 self._tagstypecache = {}
289 290 for k,nh in globaltags.items():
290 291 n = nh[0]
291 292 if n != nullid:
292 293 self.tagscache[k] = n
293 294 self._tagstypecache[k] = tagtypes[k]
294 295 self.tagscache['tip'] = self.changelog.tip()
295 296
296 297 return self.tagscache
297 298
298 299 def tagtype(self, tagname):
299 300 '''
300 301 return the type of the given tag. result can be:
301 302
302 303 'local' : a local tag
303 304 'global' : a global tag
304 305 None : tag does not exist
305 306 '''
306 307
307 308 self.tags()
308 309
309 310 return self._tagstypecache.get(tagname)
310 311
311 312 def _hgtagsnodes(self):
312 313 heads = self.heads()
313 314 heads.reverse()
314 315 last = {}
315 316 ret = []
316 317 for node in heads:
317 318 c = self.changectx(node)
318 319 rev = c.rev()
319 320 try:
320 321 fnode = c.filenode('.hgtags')
321 322 except revlog.LookupError:
322 323 continue
323 324 ret.append((rev, node, fnode))
324 325 if fnode in last:
325 326 ret[last[fnode]] = None
326 327 last[fnode] = len(ret) - 1
327 328 return [item for item in ret if item]
328 329
329 330 def tagslist(self):
330 331 '''return a list of tags ordered by revision'''
331 332 l = []
332 333 for t, n in self.tags().items():
333 334 try:
334 335 r = self.changelog.rev(n)
335 336 except:
336 337 r = -2 # sort to the beginning of the list if unknown
337 338 l.append((r, t, n))
338 339 l.sort()
339 340 return [(t, n) for r, t, n in l]
340 341
341 342 def nodetags(self, node):
342 343 '''return the tags associated with a node'''
343 344 if not self.nodetagscache:
344 345 self.nodetagscache = {}
345 346 for t, n in self.tags().items():
346 347 self.nodetagscache.setdefault(n, []).append(t)
347 348 return self.nodetagscache.get(node, [])
348 349
349 350 def _branchtags(self, partial, lrev):
350 351 tiprev = self.changelog.count() - 1
351 352 if lrev != tiprev:
352 353 self._updatebranchcache(partial, lrev+1, tiprev+1)
353 354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
354 355
355 356 return partial
356 357
357 358 def branchtags(self):
358 359 tip = self.changelog.tip()
359 360 if self.branchcache is not None and self._branchcachetip == tip:
360 361 return self.branchcache
361 362
362 363 oldtip = self._branchcachetip
363 364 self._branchcachetip = tip
364 365 if self.branchcache is None:
365 366 self.branchcache = {} # avoid recursion in changectx
366 367 else:
367 368 self.branchcache.clear() # keep using the same dict
368 369 if oldtip is None or oldtip not in self.changelog.nodemap:
369 370 partial, last, lrev = self._readbranchcache()
370 371 else:
371 372 lrev = self.changelog.rev(oldtip)
372 373 partial = self._ubranchcache
373 374
374 375 self._branchtags(partial, lrev)
375 376
376 377 # the branch cache is stored on disk as UTF-8, but in the local
377 378 # charset internally
378 379 for k, v in partial.items():
379 380 self.branchcache[util.tolocal(k)] = v
380 381 self._ubranchcache = partial
381 382 return self.branchcache
382 383
383 384 def _readbranchcache(self):
384 385 partial = {}
385 386 try:
386 387 f = self.opener("branch.cache")
387 388 lines = f.read().split('\n')
388 389 f.close()
389 390 except (IOError, OSError):
390 391 return {}, nullid, nullrev
391 392
392 393 try:
393 394 last, lrev = lines.pop(0).split(" ", 1)
394 395 last, lrev = bin(last), int(lrev)
395 396 if not (lrev < self.changelog.count() and
396 397 self.changelog.node(lrev) == last): # sanity check
397 398 # invalidate the cache
398 399 raise ValueError('invalidating branch cache (tip differs)')
399 400 for l in lines:
400 401 if not l: continue
401 402 node, label = l.split(" ", 1)
402 403 partial[label.strip()] = bin(node)
403 404 except (KeyboardInterrupt, util.SignalInterrupt):
404 405 raise
405 406 except Exception, inst:
406 407 if self.ui.debugflag:
407 408 self.ui.warn(str(inst), '\n')
408 409 partial, last, lrev = {}, nullid, nullrev
409 410 return partial, last, lrev
410 411
411 412 def _writebranchcache(self, branches, tip, tiprev):
412 413 try:
413 414 f = self.opener("branch.cache", "w", atomictemp=True)
414 415 f.write("%s %s\n" % (hex(tip), tiprev))
415 416 for label, node in branches.iteritems():
416 417 f.write("%s %s\n" % (hex(node), label))
417 418 f.rename()
418 419 except (IOError, OSError):
419 420 pass
420 421
421 422 def _updatebranchcache(self, partial, start, end):
422 423 for r in xrange(start, end):
423 424 c = self.changectx(r)
424 425 b = c.branch()
425 426 partial[b] = c.node()
426 427
427 428 def lookup(self, key):
428 429 if key == '.':
429 430 key, second = self.dirstate.parents()
430 431 if key == nullid:
431 432 raise repo.RepoError(_("no revision checked out"))
432 433 if second != nullid:
433 434 self.ui.warn(_("warning: working directory has two parents, "
434 435 "tag '.' uses the first\n"))
435 436 elif key == 'null':
436 437 return nullid
437 438 n = self.changelog._match(key)
438 439 if n:
439 440 return n
440 441 if key in self.tags():
441 442 return self.tags()[key]
442 443 if key in self.branchtags():
443 444 return self.branchtags()[key]
444 445 n = self.changelog._partialmatch(key)
445 446 if n:
446 447 return n
447 448 try:
448 449 if len(key) == 20:
449 450 key = hex(key)
450 451 except:
451 452 pass
452 453 raise repo.RepoError(_("unknown revision '%s'") % key)
453 454
454 455 def dev(self):
455 456 return os.lstat(self.path).st_dev
456 457
457 458 def local(self):
458 459 return True
459 460
460 461 def join(self, f):
461 462 return os.path.join(self.path, f)
462 463
463 464 def sjoin(self, f):
464 465 f = self.encodefn(f)
465 466 return os.path.join(self.spath, f)
466 467
467 468 def wjoin(self, f):
468 469 return os.path.join(self.root, f)
469 470
470 471 def file(self, f):
471 472 if f[0] == '/':
472 473 f = f[1:]
473 474 return filelog.filelog(self.sopener, f)
474 475
475 476 def changectx(self, changeid=None):
476 477 return context.changectx(self, changeid)
477 478
478 479 def workingctx(self):
479 480 return context.workingctx(self)
480 481
481 482 def parents(self, changeid=None):
482 483 '''
483 484 get list of changectxs for parents of changeid or working directory
484 485 '''
485 486 if changeid is None:
486 487 pl = self.dirstate.parents()
487 488 else:
488 489 n = self.changelog.lookup(changeid)
489 490 pl = self.changelog.parents(n)
490 491 if pl[1] == nullid:
491 492 return [self.changectx(pl[0])]
492 493 return [self.changectx(pl[0]), self.changectx(pl[1])]
493 494
494 495 def filectx(self, path, changeid=None, fileid=None):
495 496 """changeid can be a changeset revision, node, or tag.
496 497 fileid can be a file revision or node."""
497 498 return context.filectx(self, path, changeid, fileid)
498 499
499 500 def getcwd(self):
500 501 return self.dirstate.getcwd()
501 502
502 503 def pathto(self, f, cwd=None):
503 504 return self.dirstate.pathto(f, cwd)
504 505
505 506 def wfile(self, f, mode='r'):
506 507 return self.wopener(f, mode)
507 508
508 509 def _link(self, f):
509 510 return os.path.islink(self.wjoin(f))
510 511
511 512 def _filter(self, filter, filename, data):
512 513 if filter not in self.filterpats:
513 514 l = []
514 515 for pat, cmd in self.ui.configitems(filter):
515 516 mf = util.matcher(self.root, "", [pat], [], [])[1]
516 517 fn = None
517 518 params = cmd
518 519 for name, filterfn in self._datafilters.iteritems():
519 520 if cmd.startswith(name):
520 521 fn = filterfn
521 522 params = cmd[len(name):].lstrip()
522 523 break
523 524 if not fn:
524 525 fn = lambda s, c, **kwargs: util.filter(s, c)
525 526 # Wrap old filters not supporting keyword arguments
526 527 if not inspect.getargspec(fn)[2]:
527 528 oldfn = fn
528 529 fn = lambda s, c, **kwargs: oldfn(s, c)
529 530 l.append((mf, fn, params))
530 531 self.filterpats[filter] = l
531 532
532 533 for mf, fn, cmd in self.filterpats[filter]:
533 534 if mf(filename):
534 535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
535 536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
536 537 break
537 538
538 539 return data
539 540
540 541 def adddatafilter(self, name, filter):
541 542 self._datafilters[name] = filter
542 543
543 544 def wread(self, filename):
544 545 if self._link(filename):
545 546 data = os.readlink(self.wjoin(filename))
546 547 else:
547 548 data = self.wopener(filename, 'r').read()
548 549 return self._filter("encode", filename, data)
549 550
550 551 def wwrite(self, filename, data, flags):
551 552 data = self._filter("decode", filename, data)
552 553 try:
553 554 os.unlink(self.wjoin(filename))
554 555 except OSError:
555 556 pass
556 557 self.wopener(filename, 'w').write(data)
557 558 util.set_flags(self.wjoin(filename), flags)
558 559
559 560 def wwritedata(self, filename, data):
560 561 return self._filter("decode", filename, data)
561 562
562 563 def transaction(self):
563 564 if self._transref and self._transref():
564 565 return self._transref().nest()
565 566
566 567 # abort here if the journal already exists
567 568 if os.path.exists(self.sjoin("journal")):
568 569 raise repo.RepoError(_("journal already exists - run hg recover"))
569 570
570 571 # save dirstate for rollback
571 572 try:
572 573 ds = self.opener("dirstate").read()
573 574 except IOError:
574 575 ds = ""
575 576 self.opener("journal.dirstate", "w").write(ds)
576 577 self.opener("journal.branch", "w").write(self.dirstate.branch())
577 578
578 579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
579 580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
580 581 (self.join("journal.branch"), self.join("undo.branch"))]
581 582 tr = transaction.transaction(self.ui.warn, self.sopener,
582 583 self.sjoin("journal"),
583 584 aftertrans(renames),
584 585 self._createmode)
585 586 self._transref = weakref.ref(tr)
586 587 return tr
587 588
588 589 def recover(self):
589 590 l = self.lock()
590 591 try:
591 592 if os.path.exists(self.sjoin("journal")):
592 593 self.ui.status(_("rolling back interrupted transaction\n"))
593 594 transaction.rollback(self.sopener, self.sjoin("journal"))
594 595 self.invalidate()
595 596 return True
596 597 else:
597 598 self.ui.warn(_("no interrupted transaction available\n"))
598 599 return False
599 600 finally:
600 601 del l
601 602
602 603 def rollback(self):
603 604 wlock = lock = None
604 605 try:
605 606 wlock = self.wlock()
606 607 lock = self.lock()
607 608 if os.path.exists(self.sjoin("undo")):
608 609 self.ui.status(_("rolling back last transaction\n"))
609 610 transaction.rollback(self.sopener, self.sjoin("undo"))
610 611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
611 612 try:
612 613 branch = self.opener("undo.branch").read()
613 614 self.dirstate.setbranch(branch)
614 615 except IOError:
615 616 self.ui.warn(_("Named branch could not be reset, "
616 617 "current branch still is: %s\n")
617 618 % util.tolocal(self.dirstate.branch()))
618 619 self.invalidate()
619 620 self.dirstate.invalidate()
620 621 else:
621 622 self.ui.warn(_("no rollback information available\n"))
622 623 finally:
623 624 del lock, wlock
624 625
625 626 def invalidate(self):
626 627 for a in "changelog manifest".split():
627 628 if hasattr(self, a):
628 629 self.__delattr__(a)
629 630 self.tagscache = None
630 631 self._tagstypecache = None
631 632 self.nodetagscache = None
632 633 self.branchcache = None
633 634 self._ubranchcache = None
634 635 self._branchcachetip = None
635 636
636 637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
637 638 try:
638 639 l = lock.lock(lockname, 0, releasefn, desc=desc)
639 640 except lock.LockHeld, inst:
640 641 if not wait:
641 642 raise
642 643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
643 644 (desc, inst.locker))
644 645 # default to 600 seconds timeout
645 646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
646 647 releasefn, desc=desc)
647 648 if acquirefn:
648 649 acquirefn()
649 650 return l
650 651
651 652 def lock(self, wait=True):
652 653 if self._lockref and self._lockref():
653 654 return self._lockref()
654 655
655 656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
656 657 _('repository %s') % self.origroot)
657 658 self._lockref = weakref.ref(l)
658 659 return l
659 660
660 661 def wlock(self, wait=True):
661 662 if self._wlockref and self._wlockref():
662 663 return self._wlockref()
663 664
664 665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
665 666 self.dirstate.invalidate, _('working directory of %s') %
666 667 self.origroot)
667 668 self._wlockref = weakref.ref(l)
668 669 return l
669 670
670 671 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
671 672 """
672 673 commit an individual file as part of a larger transaction
673 674 """
674 675
675 676 t = self.wread(fn)
676 677 fl = self.file(fn)
677 678 fp1 = manifest1.get(fn, nullid)
678 679 fp2 = manifest2.get(fn, nullid)
679 680
680 681 meta = {}
681 682 cp = self.dirstate.copied(fn)
682 683 if cp:
683 684 # Mark the new revision of this file as a copy of another
684 685 # file. This copy data will effectively act as a parent
685 686 # of this new revision. If this is a merge, the first
686 687 # parent will be the nullid (meaning "look up the copy data")
687 688 # and the second one will be the other parent. For example:
688 689 #
689 690 # 0 --- 1 --- 3 rev1 changes file foo
690 691 # \ / rev2 renames foo to bar and changes it
691 692 # \- 2 -/ rev3 should have bar with all changes and
692 693 # should record that bar descends from
693 694 # bar in rev2 and foo in rev1
694 695 #
695 696 # this allows this merge to succeed:
696 697 #
697 698 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
698 699 # \ / merging rev3 and rev4 should use bar@rev2
699 700 # \- 2 --- 4 as the merge base
700 701 #
701 702 meta["copy"] = cp
702 703 if not manifest2: # not a branch merge
703 704 meta["copyrev"] = hex(manifest1.get(cp, nullid))
704 705 fp2 = nullid
705 706 elif fp2 != nullid: # copied on remote side
706 707 meta["copyrev"] = hex(manifest1.get(cp, nullid))
707 708 elif fp1 != nullid: # copied on local side, reversed
708 709 meta["copyrev"] = hex(manifest2.get(cp))
709 710 fp2 = fp1
710 711 elif cp in manifest2: # directory rename on local side
711 712 meta["copyrev"] = hex(manifest2[cp])
712 713 else: # directory rename on remote side
713 714 meta["copyrev"] = hex(manifest1.get(cp, nullid))
714 715 self.ui.debug(_(" %s: copy %s:%s\n") %
715 716 (fn, cp, meta["copyrev"]))
716 717 fp1 = nullid
717 718 elif fp2 != nullid:
718 719 # is one parent an ancestor of the other?
719 720 fpa = fl.ancestor(fp1, fp2)
720 721 if fpa == fp1:
721 722 fp1, fp2 = fp2, nullid
722 723 elif fpa == fp2:
723 724 fp2 = nullid
724 725
725 726 # is the file unmodified from the parent? report existing entry
726 727 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
727 728 return fp1
728 729
729 730 changelist.append(fn)
730 731 return fl.add(t, meta, tr, linkrev, fp1, fp2)
731 732
732 733 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
733 734 if p1 is None:
734 735 p1, p2 = self.dirstate.parents()
735 736 return self.commit(files=files, text=text, user=user, date=date,
736 737 p1=p1, p2=p2, extra=extra, empty_ok=True)
737 738
738 739 def commit(self, files=None, text="", user=None, date=None,
739 740 match=util.always, force=False, force_editor=False,
740 741 p1=None, p2=None, extra={}, empty_ok=False):
741 742 wlock = lock = tr = None
742 743 valid = 0 # don't save the dirstate if this isn't set
743 744 if files:
744 745 files = util.unique(files)
745 746 try:
746 747 commit = []
747 748 remove = []
748 749 changed = []
749 750 use_dirstate = (p1 is None) # not rawcommit
750 751 extra = extra.copy()
751 752
752 753 if use_dirstate:
753 754 if files:
754 755 for f in files:
755 756 s = self.dirstate[f]
756 757 if s in 'nma':
757 758 commit.append(f)
758 759 elif s == 'r':
759 760 remove.append(f)
760 761 else:
761 762 self.ui.warn(_("%s not tracked!\n") % f)
762 763 else:
763 764 changes = self.status(match=match)[:5]
764 765 modified, added, removed, deleted, unknown = changes
765 766 commit = modified + added
766 767 remove = removed
767 768 else:
768 769 commit = files
769 770
770 771 if use_dirstate:
771 772 p1, p2 = self.dirstate.parents()
772 773 update_dirstate = True
773 774 else:
774 775 p1, p2 = p1, p2 or nullid
775 776 update_dirstate = (self.dirstate.parents()[0] == p1)
776 777
777 778 c1 = self.changelog.read(p1)
778 779 c2 = self.changelog.read(p2)
779 780 m1 = self.manifest.read(c1[0]).copy()
780 781 m2 = self.manifest.read(c2[0])
781 782
782 783 if use_dirstate:
783 784 branchname = self.workingctx().branch()
784 785 try:
785 786 branchname = branchname.decode('UTF-8').encode('UTF-8')
786 787 except UnicodeDecodeError:
787 788 raise util.Abort(_('branch name not in UTF-8!'))
788 789 else:
789 790 branchname = ""
790 791
791 792 if use_dirstate:
792 793 oldname = c1[5].get("branch") # stored in UTF-8
793 794 if (not commit and not remove and not force and p2 == nullid
794 795 and branchname == oldname):
795 796 self.ui.status(_("nothing changed\n"))
796 797 return None
797 798
798 799 xp1 = hex(p1)
799 800 if p2 == nullid: xp2 = ''
800 801 else: xp2 = hex(p2)
801 802
802 803 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
803 804
804 805 wlock = self.wlock()
805 806 lock = self.lock()
806 807 tr = self.transaction()
807 808 trp = weakref.proxy(tr)
808 809
809 810 # check in files
810 811 new = {}
811 812 linkrev = self.changelog.count()
812 813 commit.sort()
813 814 is_exec = util.execfunc(self.root, m1.execf)
814 815 is_link = util.linkfunc(self.root, m1.linkf)
815 816 for f in commit:
816 817 self.ui.note(f + "\n")
817 818 try:
818 819 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
819 820 new_exec = is_exec(f)
820 821 new_link = is_link(f)
821 822 if ((not changed or changed[-1] != f) and
822 823 m2.get(f) != new[f]):
823 824 # mention the file in the changelog if some
824 825 # flag changed, even if there was no content
825 826 # change.
826 827 old_exec = m1.execf(f)
827 828 old_link = m1.linkf(f)
828 829 if old_exec != new_exec or old_link != new_link:
829 830 changed.append(f)
830 831 m1.set(f, new_exec, new_link)
831 832 if use_dirstate:
832 833 self.dirstate.normal(f)
833 834
834 835 except (OSError, IOError):
835 836 if use_dirstate:
836 837 self.ui.warn(_("trouble committing %s!\n") % f)
837 838 raise
838 839 else:
839 840 remove.append(f)
840 841
841 842 # update manifest
842 843 m1.update(new)
843 844 remove.sort()
844 845 removed = []
845 846
846 847 for f in remove:
847 848 if f in m1:
848 849 del m1[f]
849 850 removed.append(f)
850 851 elif f in m2:
851 852 removed.append(f)
852 853 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
853 854 (new, removed))
854 855
855 856 # add changeset
856 857 new = new.keys()
857 858 new.sort()
858 859
859 860 user = user or self.ui.username()
860 861 if (not empty_ok and not text) or force_editor:
861 862 edittext = []
862 863 if text:
863 864 edittext.append(text)
864 865 edittext.append("")
865 866 edittext.append(_("HG: Enter commit message."
866 867 " Lines beginning with 'HG:' are removed."))
867 868 edittext.append("HG: --")
868 869 edittext.append("HG: user: %s" % user)
869 870 if p2 != nullid:
870 871 edittext.append("HG: branch merge")
871 872 if branchname:
872 873 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
873 874 edittext.extend(["HG: changed %s" % f for f in changed])
874 875 edittext.extend(["HG: removed %s" % f for f in removed])
875 876 if not changed and not remove:
876 877 edittext.append("HG: no files changed")
877 878 edittext.append("")
878 879 # run editor in the repository root
879 880 olddir = os.getcwd()
880 881 os.chdir(self.root)
881 882 text = self.ui.edit("\n".join(edittext), user)
882 883 os.chdir(olddir)
883 884
884 885 if branchname:
885 886 extra["branch"] = branchname
886 887
887 888 if use_dirstate:
888 889 lines = [line.rstrip() for line in text.rstrip().splitlines()]
889 890 while lines and not lines[0]:
890 891 del lines[0]
891 892 if not lines:
892 893 raise util.Abort(_("empty commit message"))
893 894 text = '\n'.join(lines)
894 895
895 896 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
896 897 user, date, extra)
897 898 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
898 899 parent2=xp2)
899 900 tr.close()
900 901
901 902 if self.branchcache:
902 903 self.branchtags()
903 904
904 905 if use_dirstate or update_dirstate:
905 906 self.dirstate.setparents(n)
906 907 if use_dirstate:
907 908 for f in removed:
908 909 self.dirstate.forget(f)
909 910 valid = 1 # our dirstate updates are complete
910 911
911 912 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
912 913 return n
913 914 finally:
914 915 if not valid: # don't save our updated dirstate
915 916 self.dirstate.invalidate()
916 917 del tr, lock, wlock
917 918
918 919 def walk(self, node=None, files=[], match=util.always, badmatch=None):
919 920 '''
920 921 walk recursively through the directory tree or a given
921 922 changeset, finding all files matched by the match
922 923 function
923 924
924 925 results are yielded in a tuple (src, filename), where src
925 926 is one of:
926 927 'f' the file was found in the directory tree
927 928 'm' the file was only in the dirstate and not in the tree
928 929 'b' file was not found and matched badmatch
929 930 '''
930 931
931 932 if node:
932 933 fdict = dict.fromkeys(files)
933 934 # for dirstate.walk, files=['.'] means "walk the whole tree".
934 935 # follow that here, too
935 936 fdict.pop('.', None)
936 937 mdict = self.manifest.read(self.changelog.read(node)[0])
937 938 mfiles = mdict.keys()
938 939 mfiles.sort()
939 940 for fn in mfiles:
940 941 for ffn in fdict:
941 942 # match if the file is the exact name or a directory
942 943 if ffn == fn or fn.startswith("%s/" % ffn):
943 944 del fdict[ffn]
944 945 break
945 946 if match(fn):
946 947 yield 'm', fn
947 948 ffiles = fdict.keys()
948 949 ffiles.sort()
949 950 for fn in ffiles:
950 951 if badmatch and badmatch(fn):
951 952 if match(fn):
952 953 yield 'b', fn
953 954 else:
954 955 self.ui.warn(_('%s: No such file in rev %s\n')
955 956 % (self.pathto(fn), short(node)))
956 957 else:
957 958 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
958 959 yield src, fn
959 960
960 961 def status(self, node1=None, node2=None, files=[], match=util.always,
961 962 list_ignored=False, list_clean=False):
962 963 """return status of files between two nodes or node and working directory
963 964
964 965 If node1 is None, use the first dirstate parent instead.
965 966 If node2 is None, compare node1 with working directory.
966 967 """
967 968
968 969 def fcmp(fn, getnode):
969 970 t1 = self.wread(fn)
970 971 return self.file(fn).cmp(getnode(fn), t1)
971 972
972 973 def mfmatches(node):
973 974 change = self.changelog.read(node)
974 975 mf = self.manifest.read(change[0]).copy()
975 976 for fn in mf.keys():
976 977 if not match(fn):
977 978 del mf[fn]
978 979 return mf
979 980
980 981 modified, added, removed, deleted, unknown = [], [], [], [], []
981 982 ignored, clean = [], []
982 983
983 984 compareworking = False
984 985 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
985 986 compareworking = True
986 987
987 988 if not compareworking:
988 989 # read the manifest from node1 before the manifest from node2,
989 990 # so that we'll hit the manifest cache if we're going through
990 991 # all the revisions in parent->child order.
991 992 mf1 = mfmatches(node1)
992 993
993 994 # are we comparing the working directory?
994 995 if not node2:
995 996 (lookup, modified, added, removed, deleted, unknown,
996 997 ignored, clean) = self.dirstate.status(files, match,
997 998 list_ignored, list_clean)
998 999
999 1000 # are we comparing working dir against its parent?
1000 1001 if compareworking:
1001 1002 if lookup:
1002 1003 fixup = []
1003 1004 # do a full compare of any files that might have changed
1004 1005 ctx = self.changectx()
1005 1006 for f in lookup:
1006 1007 if f not in ctx or ctx[f].cmp(self.wread(f)):
1007 1008 modified.append(f)
1008 1009 else:
1009 1010 fixup.append(f)
1010 1011 if list_clean:
1011 1012 clean.append(f)
1012 1013
1013 1014 # update dirstate for files that are actually clean
1014 1015 if fixup:
1015 1016 wlock = None
1016 1017 try:
1017 1018 try:
1018 1019 wlock = self.wlock(False)
1019 1020 except lock.LockException:
1020 1021 pass
1021 1022 if wlock:
1022 1023 for f in fixup:
1023 1024 self.dirstate.normal(f)
1024 1025 finally:
1025 1026 del wlock
1026 1027 else:
1027 1028 # we are comparing working dir against non-parent
1028 1029 # generate a pseudo-manifest for the working dir
1029 1030 # XXX: create it in dirstate.py ?
1030 1031 mf2 = mfmatches(self.dirstate.parents()[0])
1031 1032 is_exec = util.execfunc(self.root, mf2.execf)
1032 1033 is_link = util.linkfunc(self.root, mf2.linkf)
1033 1034 for f in lookup + modified + added:
1034 1035 mf2[f] = ""
1035 1036 mf2.set(f, is_exec(f), is_link(f))
1036 1037 for f in removed:
1037 1038 if f in mf2:
1038 1039 del mf2[f]
1039 1040
1040 1041 else:
1041 1042 # we are comparing two revisions
1042 1043 mf2 = mfmatches(node2)
1043 1044
1044 1045 if not compareworking:
1045 1046 # flush lists from dirstate before comparing manifests
1046 1047 modified, added, clean = [], [], []
1047 1048
1048 1049 # make sure to sort the files so we talk to the disk in a
1049 1050 # reasonable order
1050 1051 mf2keys = mf2.keys()
1051 1052 mf2keys.sort()
1052 1053 getnode = lambda fn: mf1.get(fn, nullid)
1053 1054 for fn in mf2keys:
1054 1055 if fn in mf1:
1055 1056 if (mf1.flags(fn) != mf2.flags(fn) or
1056 1057 (mf1[fn] != mf2[fn] and
1057 1058 (mf2[fn] != "" or fcmp(fn, getnode)))):
1058 1059 modified.append(fn)
1059 1060 elif list_clean:
1060 1061 clean.append(fn)
1061 1062 del mf1[fn]
1062 1063 else:
1063 1064 added.append(fn)
1064 1065
1065 1066 removed = mf1.keys()
1066 1067
1067 1068 # sort and return results:
1068 1069 for l in modified, added, removed, deleted, unknown, ignored, clean:
1069 1070 l.sort()
1070 1071 return (modified, added, removed, deleted, unknown, ignored, clean)
1071 1072
1072 1073 def add(self, list):
1073 1074 wlock = self.wlock()
1074 1075 try:
1075 1076 rejected = []
1076 1077 for f in list:
1077 1078 p = self.wjoin(f)
1078 1079 try:
1079 1080 st = os.lstat(p)
1080 1081 except:
1081 1082 self.ui.warn(_("%s does not exist!\n") % f)
1082 1083 rejected.append(f)
1083 1084 continue
1084 1085 if st.st_size > 10000000:
1085 1086 self.ui.warn(_("%s: files over 10MB may cause memory and"
1086 1087 " performance problems\n"
1087 1088 "(use 'hg revert %s' to unadd the file)\n")
1088 1089 % (f, f))
1089 1090 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1090 1091 self.ui.warn(_("%s not added: only files and symlinks "
1091 1092 "supported currently\n") % f)
1092 1093 rejected.append(p)
1093 1094 elif self.dirstate[f] in 'amn':
1094 1095 self.ui.warn(_("%s already tracked!\n") % f)
1095 1096 elif self.dirstate[f] == 'r':
1096 1097 self.dirstate.normallookup(f)
1097 1098 else:
1098 1099 self.dirstate.add(f)
1099 1100 return rejected
1100 1101 finally:
1101 1102 del wlock
1102 1103
1103 1104 def forget(self, list):
1104 1105 wlock = self.wlock()
1105 1106 try:
1106 1107 for f in list:
1107 1108 if self.dirstate[f] != 'a':
1108 1109 self.ui.warn(_("%s not added!\n") % f)
1109 1110 else:
1110 1111 self.dirstate.forget(f)
1111 1112 finally:
1112 1113 del wlock
1113 1114
1114 1115 def remove(self, list, unlink=False):
1115 1116 wlock = None
1116 1117 try:
1117 1118 if unlink:
1118 1119 for f in list:
1119 1120 try:
1120 1121 util.unlink(self.wjoin(f))
1121 1122 except OSError, inst:
1122 1123 if inst.errno != errno.ENOENT:
1123 1124 raise
1124 1125 wlock = self.wlock()
1125 1126 for f in list:
1126 1127 if unlink and os.path.exists(self.wjoin(f)):
1127 1128 self.ui.warn(_("%s still exists!\n") % f)
1128 1129 elif self.dirstate[f] == 'a':
1129 1130 self.dirstate.forget(f)
1130 1131 elif f not in self.dirstate:
1131 1132 self.ui.warn(_("%s not tracked!\n") % f)
1132 1133 else:
1133 1134 self.dirstate.remove(f)
1134 1135 finally:
1135 1136 del wlock
1136 1137
1137 1138 def undelete(self, list):
1138 1139 wlock = None
1139 1140 try:
1140 1141 manifests = [self.manifest.read(self.changelog.read(p)[0])
1141 1142 for p in self.dirstate.parents() if p != nullid]
1142 1143 wlock = self.wlock()
1143 1144 for f in list:
1144 1145 if self.dirstate[f] != 'r':
1145 1146 self.ui.warn("%s not removed!\n" % f)
1146 1147 else:
1147 1148 m = f in manifests[0] and manifests[0] or manifests[1]
1148 1149 t = self.file(f).read(m[f])
1149 1150 self.wwrite(f, t, m.flags(f))
1150 1151 self.dirstate.normal(f)
1151 1152 finally:
1152 1153 del wlock
1153 1154
1154 1155 def copy(self, source, dest):
1155 1156 wlock = None
1156 1157 try:
1157 1158 p = self.wjoin(dest)
1158 1159 if not (os.path.exists(p) or os.path.islink(p)):
1159 1160 self.ui.warn(_("%s does not exist!\n") % dest)
1160 1161 elif not (os.path.isfile(p) or os.path.islink(p)):
1161 1162 self.ui.warn(_("copy failed: %s is not a file or a "
1162 1163 "symbolic link\n") % dest)
1163 1164 else:
1164 1165 wlock = self.wlock()
1165 1166 if dest not in self.dirstate:
1166 1167 self.dirstate.add(dest)
1167 1168 self.dirstate.copy(source, dest)
1168 1169 finally:
1169 1170 del wlock
1170 1171
1171 1172 def heads(self, start=None):
1172 1173 heads = self.changelog.heads(start)
1173 1174 # sort the output in rev descending order
1174 1175 heads = [(-self.changelog.rev(h), h) for h in heads]
1175 1176 heads.sort()
1176 1177 return [n for (r, n) in heads]
1177 1178
1178 1179 def branchheads(self, branch, start=None):
1179 1180 branches = self.branchtags()
1180 1181 if branch not in branches:
1181 1182 return []
1182 1183 # The basic algorithm is this:
1183 1184 #
1184 1185 # Start from the branch tip since there are no later revisions that can
1185 1186 # possibly be in this branch, and the tip is a guaranteed head.
1186 1187 #
1187 1188 # Remember the tip's parents as the first ancestors, since these by
1188 1189 # definition are not heads.
1189 1190 #
1190 1191 # Step backwards from the brach tip through all the revisions. We are
1191 1192 # guaranteed by the rules of Mercurial that we will now be visiting the
1192 1193 # nodes in reverse topological order (children before parents).
1193 1194 #
1194 1195 # If a revision is one of the ancestors of a head then we can toss it
1195 1196 # out of the ancestors set (we've already found it and won't be
1196 1197 # visiting it again) and put its parents in the ancestors set.
1197 1198 #
1198 1199 # Otherwise, if a revision is in the branch it's another head, since it
1199 1200 # wasn't in the ancestor list of an existing head. So add it to the
1200 1201 # head list, and add its parents to the ancestor list.
1201 1202 #
1202 1203 # If it is not in the branch ignore it.
1203 1204 #
1204 1205 # Once we have a list of heads, use nodesbetween to filter out all the
1205 1206 # heads that cannot be reached from startrev. There may be a more
1206 1207 # efficient way to do this as part of the previous algorithm.
1207 1208
1208 1209 set = util.set
1209 1210 heads = [self.changelog.rev(branches[branch])]
1210 1211 # Don't care if ancestors contains nullrev or not.
1211 1212 ancestors = set(self.changelog.parentrevs(heads[0]))
1212 1213 for rev in xrange(heads[0] - 1, nullrev, -1):
1213 1214 if rev in ancestors:
1214 1215 ancestors.update(self.changelog.parentrevs(rev))
1215 1216 ancestors.remove(rev)
1216 1217 elif self.changectx(rev).branch() == branch:
1217 1218 heads.append(rev)
1218 1219 ancestors.update(self.changelog.parentrevs(rev))
1219 1220 heads = [self.changelog.node(rev) for rev in heads]
1220 1221 if start is not None:
1221 1222 heads = self.changelog.nodesbetween([start], heads)[2]
1222 1223 return heads
1223 1224
1224 1225 def branches(self, nodes):
1225 1226 if not nodes:
1226 1227 nodes = [self.changelog.tip()]
1227 1228 b = []
1228 1229 for n in nodes:
1229 1230 t = n
1230 1231 while 1:
1231 1232 p = self.changelog.parents(n)
1232 1233 if p[1] != nullid or p[0] == nullid:
1233 1234 b.append((t, n, p[0], p[1]))
1234 1235 break
1235 1236 n = p[0]
1236 1237 return b
1237 1238
1238 1239 def between(self, pairs):
1239 1240 r = []
1240 1241
1241 1242 for top, bottom in pairs:
1242 1243 n, l, i = top, [], 0
1243 1244 f = 1
1244 1245
1245 1246 while n != bottom:
1246 1247 p = self.changelog.parents(n)[0]
1247 1248 if i == f:
1248 1249 l.append(n)
1249 1250 f = f * 2
1250 1251 n = p
1251 1252 i += 1
1252 1253
1253 1254 r.append(l)
1254 1255
1255 1256 return r
1256 1257
1257 1258 def findincoming(self, remote, base=None, heads=None, force=False):
1258 1259 """Return list of roots of the subsets of missing nodes from remote
1259 1260
1260 1261 If base dict is specified, assume that these nodes and their parents
1261 1262 exist on the remote side and that no child of a node of base exists
1262 1263 in both remote and self.
1263 1264 Furthermore base will be updated to include the nodes that exists
1264 1265 in self and remote but no children exists in self and remote.
1265 1266 If a list of heads is specified, return only nodes which are heads
1266 1267 or ancestors of these heads.
1267 1268
1268 1269 All the ancestors of base are in self and in remote.
1269 1270 All the descendants of the list returned are missing in self.
1270 1271 (and so we know that the rest of the nodes are missing in remote, see
1271 1272 outgoing)
1272 1273 """
1273 1274 m = self.changelog.nodemap
1274 1275 search = []
1275 1276 fetch = {}
1276 1277 seen = {}
1277 1278 seenbranch = {}
1278 1279 if base == None:
1279 1280 base = {}
1280 1281
1281 1282 if not heads:
1282 1283 heads = remote.heads()
1283 1284
1284 1285 if self.changelog.tip() == nullid:
1285 1286 base[nullid] = 1
1286 1287 if heads != [nullid]:
1287 1288 return [nullid]
1288 1289 return []
1289 1290
1290 1291 # assume we're closer to the tip than the root
1291 1292 # and start by examining the heads
1292 1293 self.ui.status(_("searching for changes\n"))
1293 1294
1294 1295 unknown = []
1295 1296 for h in heads:
1296 1297 if h not in m:
1297 1298 unknown.append(h)
1298 1299 else:
1299 1300 base[h] = 1
1300 1301
1301 1302 if not unknown:
1302 1303 return []
1303 1304
1304 1305 req = dict.fromkeys(unknown)
1305 1306 reqcnt = 0
1306 1307
1307 1308 # search through remote branches
1308 1309 # a 'branch' here is a linear segment of history, with four parts:
1309 1310 # head, root, first parent, second parent
1310 1311 # (a branch always has two parents (or none) by definition)
1311 1312 unknown = remote.branches(unknown)
1312 1313 while unknown:
1313 1314 r = []
1314 1315 while unknown:
1315 1316 n = unknown.pop(0)
1316 1317 if n[0] in seen:
1317 1318 continue
1318 1319
1319 1320 self.ui.debug(_("examining %s:%s\n")
1320 1321 % (short(n[0]), short(n[1])))
1321 1322 if n[0] == nullid: # found the end of the branch
1322 1323 pass
1323 1324 elif n in seenbranch:
1324 1325 self.ui.debug(_("branch already found\n"))
1325 1326 continue
1326 1327 elif n[1] and n[1] in m: # do we know the base?
1327 1328 self.ui.debug(_("found incomplete branch %s:%s\n")
1328 1329 % (short(n[0]), short(n[1])))
1329 1330 search.append(n) # schedule branch range for scanning
1330 1331 seenbranch[n] = 1
1331 1332 else:
1332 1333 if n[1] not in seen and n[1] not in fetch:
1333 1334 if n[2] in m and n[3] in m:
1334 1335 self.ui.debug(_("found new changeset %s\n") %
1335 1336 short(n[1]))
1336 1337 fetch[n[1]] = 1 # earliest unknown
1337 1338 for p in n[2:4]:
1338 1339 if p in m:
1339 1340 base[p] = 1 # latest known
1340 1341
1341 1342 for p in n[2:4]:
1342 1343 if p not in req and p not in m:
1343 1344 r.append(p)
1344 1345 req[p] = 1
1345 1346 seen[n[0]] = 1
1346 1347
1347 1348 if r:
1348 1349 reqcnt += 1
1349 1350 self.ui.debug(_("request %d: %s\n") %
1350 1351 (reqcnt, " ".join(map(short, r))))
1351 1352 for p in xrange(0, len(r), 10):
1352 1353 for b in remote.branches(r[p:p+10]):
1353 1354 self.ui.debug(_("received %s:%s\n") %
1354 1355 (short(b[0]), short(b[1])))
1355 1356 unknown.append(b)
1356 1357
1357 1358 # do binary search on the branches we found
1358 1359 while search:
1359 1360 n = search.pop(0)
1360 1361 reqcnt += 1
1361 1362 l = remote.between([(n[0], n[1])])[0]
1362 1363 l.append(n[1])
1363 1364 p = n[0]
1364 1365 f = 1
1365 1366 for i in l:
1366 1367 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1367 1368 if i in m:
1368 1369 if f <= 2:
1369 1370 self.ui.debug(_("found new branch changeset %s\n") %
1370 1371 short(p))
1371 1372 fetch[p] = 1
1372 1373 base[i] = 1
1373 1374 else:
1374 1375 self.ui.debug(_("narrowed branch search to %s:%s\n")
1375 1376 % (short(p), short(i)))
1376 1377 search.append((p, i))
1377 1378 break
1378 1379 p, f = i, f * 2
1379 1380
1380 1381 # sanity check our fetch list
1381 1382 for f in fetch.keys():
1382 1383 if f in m:
1383 1384 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1384 1385
1385 1386 if base.keys() == [nullid]:
1386 1387 if force:
1387 1388 self.ui.warn(_("warning: repository is unrelated\n"))
1388 1389 else:
1389 1390 raise util.Abort(_("repository is unrelated"))
1390 1391
1391 1392 self.ui.debug(_("found new changesets starting at ") +
1392 1393 " ".join([short(f) for f in fetch]) + "\n")
1393 1394
1394 1395 self.ui.debug(_("%d total queries\n") % reqcnt)
1395 1396
1396 1397 return fetch.keys()
1397 1398
1398 1399 def findoutgoing(self, remote, base=None, heads=None, force=False):
1399 1400 """Return list of nodes that are roots of subsets not in remote
1400 1401
1401 1402 If base dict is specified, assume that these nodes and their parents
1402 1403 exist on the remote side.
1403 1404 If a list of heads is specified, return only nodes which are heads
1404 1405 or ancestors of these heads, and return a second element which
1405 1406 contains all remote heads which get new children.
1406 1407 """
1407 1408 if base == None:
1408 1409 base = {}
1409 1410 self.findincoming(remote, base, heads, force=force)
1410 1411
1411 1412 self.ui.debug(_("common changesets up to ")
1412 1413 + " ".join(map(short, base.keys())) + "\n")
1413 1414
1414 1415 remain = dict.fromkeys(self.changelog.nodemap)
1415 1416
1416 1417 # prune everything remote has from the tree
1417 1418 del remain[nullid]
1418 1419 remove = base.keys()
1419 1420 while remove:
1420 1421 n = remove.pop(0)
1421 1422 if n in remain:
1422 1423 del remain[n]
1423 1424 for p in self.changelog.parents(n):
1424 1425 remove.append(p)
1425 1426
1426 1427 # find every node whose parents have been pruned
1427 1428 subset = []
1428 1429 # find every remote head that will get new children
1429 1430 updated_heads = {}
1430 1431 for n in remain:
1431 1432 p1, p2 = self.changelog.parents(n)
1432 1433 if p1 not in remain and p2 not in remain:
1433 1434 subset.append(n)
1434 1435 if heads:
1435 1436 if p1 in heads:
1436 1437 updated_heads[p1] = True
1437 1438 if p2 in heads:
1438 1439 updated_heads[p2] = True
1439 1440
1440 1441 # this is the set of all roots we have to push
1441 1442 if heads:
1442 1443 return subset, updated_heads.keys()
1443 1444 else:
1444 1445 return subset
1445 1446
1446 1447 def pull(self, remote, heads=None, force=False):
1447 1448 lock = self.lock()
1448 1449 try:
1449 1450 fetch = self.findincoming(remote, heads=heads, force=force)
1450 1451 if fetch == [nullid]:
1451 1452 self.ui.status(_("requesting all changes\n"))
1452 1453
1453 1454 if not fetch:
1454 1455 self.ui.status(_("no changes found\n"))
1455 1456 return 0
1456 1457
1457 1458 if heads is None:
1458 1459 cg = remote.changegroup(fetch, 'pull')
1459 1460 else:
1460 1461 if 'changegroupsubset' not in remote.capabilities:
1461 1462 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1462 1463 cg = remote.changegroupsubset(fetch, heads, 'pull')
1463 1464 return self.addchangegroup(cg, 'pull', remote.url())
1464 1465 finally:
1465 1466 del lock
1466 1467
1467 1468 def push(self, remote, force=False, revs=None):
1468 1469 # there are two ways to push to remote repo:
1469 1470 #
1470 1471 # addchangegroup assumes local user can lock remote
1471 1472 # repo (local filesystem, old ssh servers).
1472 1473 #
1473 1474 # unbundle assumes local user cannot lock remote repo (new ssh
1474 1475 # servers, http servers).
1475 1476
1476 1477 if remote.capable('unbundle'):
1477 1478 return self.push_unbundle(remote, force, revs)
1478 1479 return self.push_addchangegroup(remote, force, revs)
1479 1480
1480 1481 def prepush(self, remote, force, revs):
1481 1482 base = {}
1482 1483 remote_heads = remote.heads()
1483 1484 inc = self.findincoming(remote, base, remote_heads, force=force)
1484 1485
1485 1486 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1486 1487 if revs is not None:
1487 1488 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1488 1489 else:
1489 1490 bases, heads = update, self.changelog.heads()
1490 1491
1491 1492 if not bases:
1492 1493 self.ui.status(_("no changes found\n"))
1493 1494 return None, 1
1494 1495 elif not force:
1495 1496 # check if we're creating new remote heads
1496 1497 # to be a remote head after push, node must be either
1497 1498 # - unknown locally
1498 1499 # - a local outgoing head descended from update
1499 1500 # - a remote head that's known locally and not
1500 1501 # ancestral to an outgoing head
1501 1502
1502 1503 warn = 0
1503 1504
1504 1505 if remote_heads == [nullid]:
1505 1506 warn = 0
1506 1507 elif not revs and len(heads) > len(remote_heads):
1507 1508 warn = 1
1508 1509 else:
1509 1510 newheads = list(heads)
1510 1511 for r in remote_heads:
1511 1512 if r in self.changelog.nodemap:
1512 1513 desc = self.changelog.heads(r, heads)
1513 1514 l = [h for h in heads if h in desc]
1514 1515 if not l:
1515 1516 newheads.append(r)
1516 1517 else:
1517 1518 newheads.append(r)
1518 1519 if len(newheads) > len(remote_heads):
1519 1520 warn = 1
1520 1521
1521 1522 if warn:
1522 1523 self.ui.warn(_("abort: push creates new remote branches!\n"))
1523 1524 self.ui.status(_("(did you forget to merge?"
1524 1525 " use push -f to force)\n"))
1525 1526 return None, 0
1526 1527 elif inc:
1527 1528 self.ui.warn(_("note: unsynced remote changes!\n"))
1528 1529
1529 1530
1530 1531 if revs is None:
1531 1532 cg = self.changegroup(update, 'push')
1532 1533 else:
1533 1534 cg = self.changegroupsubset(update, revs, 'push')
1534 1535 return cg, remote_heads
1535 1536
1536 1537 def push_addchangegroup(self, remote, force, revs):
1537 1538 lock = remote.lock()
1538 1539 try:
1539 1540 ret = self.prepush(remote, force, revs)
1540 1541 if ret[0] is not None:
1541 1542 cg, remote_heads = ret
1542 1543 return remote.addchangegroup(cg, 'push', self.url())
1543 1544 return ret[1]
1544 1545 finally:
1545 1546 del lock
1546 1547
1547 1548 def push_unbundle(self, remote, force, revs):
1548 1549 # local repo finds heads on server, finds out what revs it
1549 1550 # must push. once revs transferred, if server finds it has
1550 1551 # different heads (someone else won commit/push race), server
1551 1552 # aborts.
1552 1553
1553 1554 ret = self.prepush(remote, force, revs)
1554 1555 if ret[0] is not None:
1555 1556 cg, remote_heads = ret
1556 1557 if force: remote_heads = ['force']
1557 1558 return remote.unbundle(cg, remote_heads, 'push')
1558 1559 return ret[1]
1559 1560
1560 1561 def changegroupinfo(self, nodes, source):
1561 1562 if self.ui.verbose or source == 'bundle':
1562 1563 self.ui.status(_("%d changesets found\n") % len(nodes))
1563 1564 if self.ui.debugflag:
1564 1565 self.ui.debug(_("List of changesets:\n"))
1565 1566 for node in nodes:
1566 1567 self.ui.debug("%s\n" % hex(node))
1567 1568
1568 1569 def changegroupsubset(self, bases, heads, source, extranodes=None):
1569 1570 """This function generates a changegroup consisting of all the nodes
1570 1571 that are descendents of any of the bases, and ancestors of any of
1571 1572 the heads.
1572 1573
1573 1574 It is fairly complex as determining which filenodes and which
1574 1575 manifest nodes need to be included for the changeset to be complete
1575 1576 is non-trivial.
1576 1577
1577 1578 Another wrinkle is doing the reverse, figuring out which changeset in
1578 1579 the changegroup a particular filenode or manifestnode belongs to.
1579 1580
1580 1581 The caller can specify some nodes that must be included in the
1581 1582 changegroup using the extranodes argument. It should be a dict
1582 1583 where the keys are the filenames (or 1 for the manifest), and the
1583 1584 values are lists of (node, linknode) tuples, where node is a wanted
1584 1585 node and linknode is the changelog node that should be transmitted as
1585 1586 the linkrev.
1586 1587 """
1587 1588
1588 1589 self.hook('preoutgoing', throw=True, source=source)
1589 1590
1590 1591 # Set up some initial variables
1591 1592 # Make it easy to refer to self.changelog
1592 1593 cl = self.changelog
1593 1594 # msng is short for missing - compute the list of changesets in this
1594 1595 # changegroup.
1595 1596 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1596 1597 self.changegroupinfo(msng_cl_lst, source)
1597 1598 # Some bases may turn out to be superfluous, and some heads may be
1598 1599 # too. nodesbetween will return the minimal set of bases and heads
1599 1600 # necessary to re-create the changegroup.
1600 1601
1601 1602 # Known heads are the list of heads that it is assumed the recipient
1602 1603 # of this changegroup will know about.
1603 1604 knownheads = {}
1604 1605 # We assume that all parents of bases are known heads.
1605 1606 for n in bases:
1606 1607 for p in cl.parents(n):
1607 1608 if p != nullid:
1608 1609 knownheads[p] = 1
1609 1610 knownheads = knownheads.keys()
1610 1611 if knownheads:
1611 1612 # Now that we know what heads are known, we can compute which
1612 1613 # changesets are known. The recipient must know about all
1613 1614 # changesets required to reach the known heads from the null
1614 1615 # changeset.
1615 1616 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1616 1617 junk = None
1617 1618 # Transform the list into an ersatz set.
1618 1619 has_cl_set = dict.fromkeys(has_cl_set)
1619 1620 else:
1620 1621 # If there were no known heads, the recipient cannot be assumed to
1621 1622 # know about any changesets.
1622 1623 has_cl_set = {}
1623 1624
1624 1625 # Make it easy to refer to self.manifest
1625 1626 mnfst = self.manifest
1626 1627 # We don't know which manifests are missing yet
1627 1628 msng_mnfst_set = {}
1628 1629 # Nor do we know which filenodes are missing.
1629 1630 msng_filenode_set = {}
1630 1631
1631 1632 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1632 1633 junk = None
1633 1634
1634 1635 # A changeset always belongs to itself, so the changenode lookup
1635 1636 # function for a changenode is identity.
1636 1637 def identity(x):
1637 1638 return x
1638 1639
1639 1640 # A function generating function. Sets up an environment for the
1640 1641 # inner function.
1641 1642 def cmp_by_rev_func(revlog):
1642 1643 # Compare two nodes by their revision number in the environment's
1643 1644 # revision history. Since the revision number both represents the
1644 1645 # most efficient order to read the nodes in, and represents a
1645 1646 # topological sorting of the nodes, this function is often useful.
1646 1647 def cmp_by_rev(a, b):
1647 1648 return cmp(revlog.rev(a), revlog.rev(b))
1648 1649 return cmp_by_rev
1649 1650
1650 1651 # If we determine that a particular file or manifest node must be a
1651 1652 # node that the recipient of the changegroup will already have, we can
1652 1653 # also assume the recipient will have all the parents. This function
1653 1654 # prunes them from the set of missing nodes.
1654 1655 def prune_parents(revlog, hasset, msngset):
1655 1656 haslst = hasset.keys()
1656 1657 haslst.sort(cmp_by_rev_func(revlog))
1657 1658 for node in haslst:
1658 1659 parentlst = [p for p in revlog.parents(node) if p != nullid]
1659 1660 while parentlst:
1660 1661 n = parentlst.pop()
1661 1662 if n not in hasset:
1662 1663 hasset[n] = 1
1663 1664 p = [p for p in revlog.parents(n) if p != nullid]
1664 1665 parentlst.extend(p)
1665 1666 for n in hasset:
1666 1667 msngset.pop(n, None)
1667 1668
1668 1669 # This is a function generating function used to set up an environment
1669 1670 # for the inner function to execute in.
1670 1671 def manifest_and_file_collector(changedfileset):
1671 1672 # This is an information gathering function that gathers
1672 1673 # information from each changeset node that goes out as part of
1673 1674 # the changegroup. The information gathered is a list of which
1674 1675 # manifest nodes are potentially required (the recipient may
1675 1676 # already have them) and total list of all files which were
1676 1677 # changed in any changeset in the changegroup.
1677 1678 #
1678 1679 # We also remember the first changenode we saw any manifest
1679 1680 # referenced by so we can later determine which changenode 'owns'
1680 1681 # the manifest.
1681 1682 def collect_manifests_and_files(clnode):
1682 1683 c = cl.read(clnode)
1683 1684 for f in c[3]:
1684 1685 # This is to make sure we only have one instance of each
1685 1686 # filename string for each filename.
1686 1687 changedfileset.setdefault(f, f)
1687 1688 msng_mnfst_set.setdefault(c[0], clnode)
1688 1689 return collect_manifests_and_files
1689 1690
1690 1691 # Figure out which manifest nodes (of the ones we think might be part
1691 1692 # of the changegroup) the recipient must know about and remove them
1692 1693 # from the changegroup.
1693 1694 def prune_manifests():
1694 1695 has_mnfst_set = {}
1695 1696 for n in msng_mnfst_set:
1696 1697 # If a 'missing' manifest thinks it belongs to a changenode
1697 1698 # the recipient is assumed to have, obviously the recipient
1698 1699 # must have that manifest.
1699 1700 linknode = cl.node(mnfst.linkrev(n))
1700 1701 if linknode in has_cl_set:
1701 1702 has_mnfst_set[n] = 1
1702 1703 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1703 1704
1704 1705 # Use the information collected in collect_manifests_and_files to say
1705 1706 # which changenode any manifestnode belongs to.
1706 1707 def lookup_manifest_link(mnfstnode):
1707 1708 return msng_mnfst_set[mnfstnode]
1708 1709
1709 1710 # A function generating function that sets up the initial environment
1710 1711 # the inner function.
1711 1712 def filenode_collector(changedfiles):
1712 1713 next_rev = [0]
1713 1714 # This gathers information from each manifestnode included in the
1714 1715 # changegroup about which filenodes the manifest node references
1715 1716 # so we can include those in the changegroup too.
1716 1717 #
1717 1718 # It also remembers which changenode each filenode belongs to. It
1718 1719 # does this by assuming the a filenode belongs to the changenode
1719 1720 # the first manifest that references it belongs to.
1720 1721 def collect_msng_filenodes(mnfstnode):
1721 1722 r = mnfst.rev(mnfstnode)
1722 1723 if r == next_rev[0]:
1723 1724 # If the last rev we looked at was the one just previous,
1724 1725 # we only need to see a diff.
1725 1726 deltamf = mnfst.readdelta(mnfstnode)
1726 1727 # For each line in the delta
1727 1728 for f, fnode in deltamf.items():
1728 1729 f = changedfiles.get(f, None)
1729 1730 # And if the file is in the list of files we care
1730 1731 # about.
1731 1732 if f is not None:
1732 1733 # Get the changenode this manifest belongs to
1733 1734 clnode = msng_mnfst_set[mnfstnode]
1734 1735 # Create the set of filenodes for the file if
1735 1736 # there isn't one already.
1736 1737 ndset = msng_filenode_set.setdefault(f, {})
1737 1738 # And set the filenode's changelog node to the
1738 1739 # manifest's if it hasn't been set already.
1739 1740 ndset.setdefault(fnode, clnode)
1740 1741 else:
1741 1742 # Otherwise we need a full manifest.
1742 1743 m = mnfst.read(mnfstnode)
1743 1744 # For every file in we care about.
1744 1745 for f in changedfiles:
1745 1746 fnode = m.get(f, None)
1746 1747 # If it's in the manifest
1747 1748 if fnode is not None:
1748 1749 # See comments above.
1749 1750 clnode = msng_mnfst_set[mnfstnode]
1750 1751 ndset = msng_filenode_set.setdefault(f, {})
1751 1752 ndset.setdefault(fnode, clnode)
1752 1753 # Remember the revision we hope to see next.
1753 1754 next_rev[0] = r + 1
1754 1755 return collect_msng_filenodes
1755 1756
1756 1757 # We have a list of filenodes we think we need for a file, lets remove
1757 1758 # all those we now the recipient must have.
1758 1759 def prune_filenodes(f, filerevlog):
1759 1760 msngset = msng_filenode_set[f]
1760 1761 hasset = {}
1761 1762 # If a 'missing' filenode thinks it belongs to a changenode we
1762 1763 # assume the recipient must have, then the recipient must have
1763 1764 # that filenode.
1764 1765 for n in msngset:
1765 1766 clnode = cl.node(filerevlog.linkrev(n))
1766 1767 if clnode in has_cl_set:
1767 1768 hasset[n] = 1
1768 1769 prune_parents(filerevlog, hasset, msngset)
1769 1770
1770 1771 # A function generator function that sets up the a context for the
1771 1772 # inner function.
1772 1773 def lookup_filenode_link_func(fname):
1773 1774 msngset = msng_filenode_set[fname]
1774 1775 # Lookup the changenode the filenode belongs to.
1775 1776 def lookup_filenode_link(fnode):
1776 1777 return msngset[fnode]
1777 1778 return lookup_filenode_link
1778 1779
1779 1780 # Add the nodes that were explicitly requested.
1780 1781 def add_extra_nodes(name, nodes):
1781 1782 if not extranodes or name not in extranodes:
1782 1783 return
1783 1784
1784 1785 for node, linknode in extranodes[name]:
1785 1786 if node not in nodes:
1786 1787 nodes[node] = linknode
1787 1788
1788 1789 # Now that we have all theses utility functions to help out and
1789 1790 # logically divide up the task, generate the group.
1790 1791 def gengroup():
1791 1792 # The set of changed files starts empty.
1792 1793 changedfiles = {}
1793 1794 # Create a changenode group generator that will call our functions
1794 1795 # back to lookup the owning changenode and collect information.
1795 1796 group = cl.group(msng_cl_lst, identity,
1796 1797 manifest_and_file_collector(changedfiles))
1797 1798 for chnk in group:
1798 1799 yield chnk
1799 1800
1800 1801 # The list of manifests has been collected by the generator
1801 1802 # calling our functions back.
1802 1803 prune_manifests()
1803 1804 add_extra_nodes(1, msng_mnfst_set)
1804 1805 msng_mnfst_lst = msng_mnfst_set.keys()
1805 1806 # Sort the manifestnodes by revision number.
1806 1807 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1807 1808 # Create a generator for the manifestnodes that calls our lookup
1808 1809 # and data collection functions back.
1809 1810 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1810 1811 filenode_collector(changedfiles))
1811 1812 for chnk in group:
1812 1813 yield chnk
1813 1814
1814 1815 # These are no longer needed, dereference and toss the memory for
1815 1816 # them.
1816 1817 msng_mnfst_lst = None
1817 1818 msng_mnfst_set.clear()
1818 1819
1819 1820 if extranodes:
1820 1821 for fname in extranodes:
1821 1822 if isinstance(fname, int):
1822 1823 continue
1823 1824 add_extra_nodes(fname,
1824 1825 msng_filenode_set.setdefault(fname, {}))
1825 1826 changedfiles[fname] = 1
1826 1827 changedfiles = changedfiles.keys()
1827 1828 changedfiles.sort()
1828 1829 # Go through all our files in order sorted by name.
1829 1830 for fname in changedfiles:
1830 1831 filerevlog = self.file(fname)
1831 1832 if filerevlog.count() == 0:
1832 1833 raise util.Abort(_("empty or missing revlog for %s") % fname)
1833 1834 # Toss out the filenodes that the recipient isn't really
1834 1835 # missing.
1835 1836 if fname in msng_filenode_set:
1836 1837 prune_filenodes(fname, filerevlog)
1837 1838 msng_filenode_lst = msng_filenode_set[fname].keys()
1838 1839 else:
1839 1840 msng_filenode_lst = []
1840 1841 # If any filenodes are left, generate the group for them,
1841 1842 # otherwise don't bother.
1842 1843 if len(msng_filenode_lst) > 0:
1843 1844 yield changegroup.chunkheader(len(fname))
1844 1845 yield fname
1845 1846 # Sort the filenodes by their revision #
1846 1847 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1847 1848 # Create a group generator and only pass in a changenode
1848 1849 # lookup function as we need to collect no information
1849 1850 # from filenodes.
1850 1851 group = filerevlog.group(msng_filenode_lst,
1851 1852 lookup_filenode_link_func(fname))
1852 1853 for chnk in group:
1853 1854 yield chnk
1854 1855 if fname in msng_filenode_set:
1855 1856 # Don't need this anymore, toss it to free memory.
1856 1857 del msng_filenode_set[fname]
1857 1858 # Signal that no more groups are left.
1858 1859 yield changegroup.closechunk()
1859 1860
1860 1861 if msng_cl_lst:
1861 1862 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1862 1863
1863 1864 return util.chunkbuffer(gengroup())
1864 1865
1865 1866 def changegroup(self, basenodes, source):
1866 1867 """Generate a changegroup of all nodes that we have that a recipient
1867 1868 doesn't.
1868 1869
1869 1870 This is much easier than the previous function as we can assume that
1870 1871 the recipient has any changenode we aren't sending them."""
1871 1872
1872 1873 self.hook('preoutgoing', throw=True, source=source)
1873 1874
1874 1875 cl = self.changelog
1875 1876 nodes = cl.nodesbetween(basenodes, None)[0]
1876 1877 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1877 1878 self.changegroupinfo(nodes, source)
1878 1879
1879 1880 def identity(x):
1880 1881 return x
1881 1882
1882 1883 def gennodelst(revlog):
1883 1884 for r in xrange(0, revlog.count()):
1884 1885 n = revlog.node(r)
1885 1886 if revlog.linkrev(n) in revset:
1886 1887 yield n
1887 1888
1888 1889 def changed_file_collector(changedfileset):
1889 1890 def collect_changed_files(clnode):
1890 1891 c = cl.read(clnode)
1891 1892 for fname in c[3]:
1892 1893 changedfileset[fname] = 1
1893 1894 return collect_changed_files
1894 1895
1895 1896 def lookuprevlink_func(revlog):
1896 1897 def lookuprevlink(n):
1897 1898 return cl.node(revlog.linkrev(n))
1898 1899 return lookuprevlink
1899 1900
1900 1901 def gengroup():
1901 1902 # construct a list of all changed files
1902 1903 changedfiles = {}
1903 1904
1904 1905 for chnk in cl.group(nodes, identity,
1905 1906 changed_file_collector(changedfiles)):
1906 1907 yield chnk
1907 1908 changedfiles = changedfiles.keys()
1908 1909 changedfiles.sort()
1909 1910
1910 1911 mnfst = self.manifest
1911 1912 nodeiter = gennodelst(mnfst)
1912 1913 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1913 1914 yield chnk
1914 1915
1915 1916 for fname in changedfiles:
1916 1917 filerevlog = self.file(fname)
1917 1918 if filerevlog.count() == 0:
1918 1919 raise util.Abort(_("empty or missing revlog for %s") % fname)
1919 1920 nodeiter = gennodelst(filerevlog)
1920 1921 nodeiter = list(nodeiter)
1921 1922 if nodeiter:
1922 1923 yield changegroup.chunkheader(len(fname))
1923 1924 yield fname
1924 1925 lookup = lookuprevlink_func(filerevlog)
1925 1926 for chnk in filerevlog.group(nodeiter, lookup):
1926 1927 yield chnk
1927 1928
1928 1929 yield changegroup.closechunk()
1929 1930
1930 1931 if nodes:
1931 1932 self.hook('outgoing', node=hex(nodes[0]), source=source)
1932 1933
1933 1934 return util.chunkbuffer(gengroup())
1934 1935
1935 1936 def addchangegroup(self, source, srctype, url, emptyok=False):
1936 1937 """add changegroup to repo.
1937 1938
1938 1939 return values:
1939 1940 - nothing changed or no source: 0
1940 1941 - more heads than before: 1+added heads (2..n)
1941 1942 - less heads than before: -1-removed heads (-2..-n)
1942 1943 - number of heads stays the same: 1
1943 1944 """
1944 1945 def csmap(x):
1945 1946 self.ui.debug(_("add changeset %s\n") % short(x))
1946 1947 return cl.count()
1947 1948
1948 1949 def revmap(x):
1949 1950 return cl.rev(x)
1950 1951
1951 1952 if not source:
1952 1953 return 0
1953 1954
1954 1955 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1955 1956
1956 1957 changesets = files = revisions = 0
1957 1958
1958 1959 # write changelog data to temp files so concurrent readers will not see
1959 1960 # inconsistent view
1960 1961 cl = self.changelog
1961 1962 cl.delayupdate()
1962 1963 oldheads = len(cl.heads())
1963 1964
1964 1965 tr = self.transaction()
1965 1966 try:
1966 1967 trp = weakref.proxy(tr)
1967 1968 # pull off the changeset group
1968 1969 self.ui.status(_("adding changesets\n"))
1969 1970 cor = cl.count() - 1
1970 1971 chunkiter = changegroup.chunkiter(source)
1971 1972 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1972 1973 raise util.Abort(_("received changelog group is empty"))
1973 1974 cnr = cl.count() - 1
1974 1975 changesets = cnr - cor
1975 1976
1976 1977 # pull off the manifest group
1977 1978 self.ui.status(_("adding manifests\n"))
1978 1979 chunkiter = changegroup.chunkiter(source)
1979 1980 # no need to check for empty manifest group here:
1980 1981 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1981 1982 # no new manifest will be created and the manifest group will
1982 1983 # be empty during the pull
1983 1984 self.manifest.addgroup(chunkiter, revmap, trp)
1984 1985
1985 1986 # process the files
1986 1987 self.ui.status(_("adding file changes\n"))
1987 1988 while 1:
1988 1989 f = changegroup.getchunk(source)
1989 1990 if not f:
1990 1991 break
1991 1992 self.ui.debug(_("adding %s revisions\n") % f)
1992 1993 fl = self.file(f)
1993 1994 o = fl.count()
1994 1995 chunkiter = changegroup.chunkiter(source)
1995 1996 if fl.addgroup(chunkiter, revmap, trp) is None:
1996 1997 raise util.Abort(_("received file revlog group is empty"))
1997 1998 revisions += fl.count() - o
1998 1999 files += 1
1999 2000
2000 2001 # make changelog see real files again
2001 2002 cl.finalize(trp)
2002 2003
2003 2004 newheads = len(self.changelog.heads())
2004 2005 heads = ""
2005 2006 if oldheads and newheads != oldheads:
2006 2007 heads = _(" (%+d heads)") % (newheads - oldheads)
2007 2008
2008 2009 self.ui.status(_("added %d changesets"
2009 2010 " with %d changes to %d files%s\n")
2010 2011 % (changesets, revisions, files, heads))
2011 2012
2012 2013 if changesets > 0:
2013 2014 self.hook('pretxnchangegroup', throw=True,
2014 2015 node=hex(self.changelog.node(cor+1)), source=srctype,
2015 2016 url=url)
2016 2017
2017 2018 tr.close()
2018 2019 finally:
2019 2020 del tr
2020 2021
2021 2022 if changesets > 0:
2022 2023 # forcefully update the on-disk branch cache
2023 2024 self.ui.debug(_("updating the branch cache\n"))
2024 2025 self.branchtags()
2025 2026 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2026 2027 source=srctype, url=url)
2027 2028
2028 2029 for i in xrange(cor + 1, cnr + 1):
2029 2030 self.hook("incoming", node=hex(self.changelog.node(i)),
2030 2031 source=srctype, url=url)
2031 2032
2032 2033 # never return 0 here:
2033 2034 if newheads < oldheads:
2034 2035 return newheads - oldheads - 1
2035 2036 else:
2036 2037 return newheads - oldheads + 1
2037 2038
2038 2039
2039 2040 def stream_in(self, remote):
2040 2041 fp = remote.stream_out()
2041 2042 l = fp.readline()
2042 2043 try:
2043 2044 resp = int(l)
2044 2045 except ValueError:
2045 2046 raise util.UnexpectedOutput(
2046 2047 _('Unexpected response from remote server:'), l)
2047 2048 if resp == 1:
2048 2049 raise util.Abort(_('operation forbidden by server'))
2049 2050 elif resp == 2:
2050 2051 raise util.Abort(_('locking the remote repository failed'))
2051 2052 elif resp != 0:
2052 2053 raise util.Abort(_('the server sent an unknown error code'))
2053 2054 self.ui.status(_('streaming all changes\n'))
2054 2055 l = fp.readline()
2055 2056 try:
2056 2057 total_files, total_bytes = map(int, l.split(' ', 1))
2057 2058 except ValueError, TypeError:
2058 2059 raise util.UnexpectedOutput(
2059 2060 _('Unexpected response from remote server:'), l)
2060 2061 self.ui.status(_('%d files to transfer, %s of data\n') %
2061 2062 (total_files, util.bytecount(total_bytes)))
2062 2063 start = time.time()
2063 2064 for i in xrange(total_files):
2064 2065 # XXX doesn't support '\n' or '\r' in filenames
2065 2066 l = fp.readline()
2066 2067 try:
2067 2068 name, size = l.split('\0', 1)
2068 2069 size = int(size)
2069 2070 except ValueError, TypeError:
2070 2071 raise util.UnexpectedOutput(
2071 2072 _('Unexpected response from remote server:'), l)
2072 2073 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2073 2074 ofp = self.sopener(name, 'w')
2074 2075 for chunk in util.filechunkiter(fp, limit=size):
2075 2076 ofp.write(chunk)
2076 2077 ofp.close()
2077 2078 elapsed = time.time() - start
2078 2079 if elapsed <= 0:
2079 2080 elapsed = 0.001
2080 2081 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2081 2082 (util.bytecount(total_bytes), elapsed,
2082 2083 util.bytecount(total_bytes / elapsed)))
2083 2084 self.invalidate()
2084 2085 return len(self.heads()) + 1
2085 2086
2086 2087 def clone(self, remote, heads=[], stream=False):
2087 2088 '''clone remote repository.
2088 2089
2089 2090 keyword arguments:
2090 2091 heads: list of revs to clone (forces use of pull)
2091 2092 stream: use streaming clone if possible'''
2092 2093
2093 2094 # now, all clients that can request uncompressed clones can
2094 2095 # read repo formats supported by all servers that can serve
2095 2096 # them.
2096 2097
2097 2098 # if revlog format changes, client will have to check version
2098 2099 # and format flags on "stream" capability, and use
2099 2100 # uncompressed only if compatible.
2100 2101
2101 2102 if stream and not heads and remote.capable('stream'):
2102 2103 return self.stream_in(remote)
2103 2104 return self.pull(remote, heads)
2104 2105
2105 2106 # used to avoid circular references so destructors work
2106 2107 def aftertrans(files):
2107 2108 renamefiles = [tuple(t) for t in files]
2108 2109 def a():
2109 2110 for src, dest in renamefiles:
2110 2111 util.rename(src, dest)
2111 2112 return a
2112 2113
2113 2114 def instance(ui, path, create):
2114 2115 return localrepository(ui, util.drop_scheme('file', path), create)
2115 2116
2116 2117 def islocal(path):
2117 2118 return True
@@ -1,1783 +1,1787 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
17 17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 18 import urlparse
19 19
20 20 try:
21 21 set = set
22 22 frozenset = frozenset
23 23 except NameError:
24 24 from sets import Set as set, ImmutableSet as frozenset
25 25
26 26 try:
27 27 _encoding = os.environ.get("HGENCODING")
28 28 if sys.platform == 'darwin' and not _encoding:
29 29 # On darwin, getpreferredencoding ignores the locale environment and
30 30 # always returns mac-roman. We override this if the environment is
31 31 # not C (has been customized by the user).
32 32 locale.setlocale(locale.LC_CTYPE, '')
33 33 _encoding = locale.getlocale()[1]
34 34 if not _encoding:
35 35 _encoding = locale.getpreferredencoding() or 'ascii'
36 36 except locale.Error:
37 37 _encoding = 'ascii'
38 38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
39 39 _fallbackencoding = 'ISO-8859-1'
40 40
41 41 def tolocal(s):
42 42 """
43 43 Convert a string from internal UTF-8 to local encoding
44 44
45 45 All internal strings should be UTF-8 but some repos before the
46 46 implementation of locale support may contain latin1 or possibly
47 47 other character sets. We attempt to decode everything strictly
48 48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
49 49 replace unknown characters.
50 50 """
51 51 for e in ('UTF-8', _fallbackencoding):
52 52 try:
53 53 u = s.decode(e) # attempt strict decoding
54 54 return u.encode(_encoding, "replace")
55 55 except LookupError, k:
56 56 raise Abort(_("%s, please check your locale settings") % k)
57 57 except UnicodeDecodeError:
58 58 pass
59 59 u = s.decode("utf-8", "replace") # last ditch
60 60 return u.encode(_encoding, "replace")
61 61
62 62 def fromlocal(s):
63 63 """
64 64 Convert a string from the local character encoding to UTF-8
65 65
66 66 We attempt to decode strings using the encoding mode set by
67 67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
68 68 characters will cause an error message. Other modes include
69 69 'replace', which replaces unknown characters with a special
70 70 Unicode character, and 'ignore', which drops the character.
71 71 """
72 72 try:
73 73 return s.decode(_encoding, _encodingmode).encode("utf-8")
74 74 except UnicodeDecodeError, inst:
75 75 sub = s[max(0, inst.start-10):inst.start+10]
76 76 raise Abort("decoding near '%s': %s!" % (sub, inst))
77 77 except LookupError, k:
78 78 raise Abort(_("%s, please check your locale settings") % k)
79 79
80 80 def locallen(s):
81 81 """Find the length in characters of a local string"""
82 82 return len(s.decode(_encoding, "replace"))
83 83
84 84 # used by parsedate
85 85 defaultdateformats = (
86 86 '%Y-%m-%d %H:%M:%S',
87 87 '%Y-%m-%d %I:%M:%S%p',
88 88 '%Y-%m-%d %H:%M',
89 89 '%Y-%m-%d %I:%M%p',
90 90 '%Y-%m-%d',
91 91 '%m-%d',
92 92 '%m/%d',
93 93 '%m/%d/%y',
94 94 '%m/%d/%Y',
95 95 '%a %b %d %H:%M:%S %Y',
96 96 '%a %b %d %I:%M:%S%p %Y',
97 97 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
98 98 '%b %d %H:%M:%S %Y',
99 99 '%b %d %I:%M:%S%p %Y',
100 100 '%b %d %H:%M:%S',
101 101 '%b %d %I:%M:%S%p',
102 102 '%b %d %H:%M',
103 103 '%b %d %I:%M%p',
104 104 '%b %d %Y',
105 105 '%b %d',
106 106 '%H:%M:%S',
107 107 '%I:%M:%SP',
108 108 '%H:%M',
109 109 '%I:%M%p',
110 110 )
111 111
112 112 extendeddateformats = defaultdateformats + (
113 113 "%Y",
114 114 "%Y-%m",
115 115 "%b",
116 116 "%b %Y",
117 117 )
118 118
119 119 class SignalInterrupt(Exception):
120 120 """Exception raised on SIGTERM and SIGHUP."""
121 121
122 122 # differences from SafeConfigParser:
123 123 # - case-sensitive keys
124 124 # - allows values that are not strings (this means that you may not
125 125 # be able to save the configuration to a file)
126 126 class configparser(ConfigParser.SafeConfigParser):
127 127 def optionxform(self, optionstr):
128 128 return optionstr
129 129
130 130 def set(self, section, option, value):
131 131 return ConfigParser.ConfigParser.set(self, section, option, value)
132 132
133 133 def _interpolate(self, section, option, rawval, vars):
134 134 if not isinstance(rawval, basestring):
135 135 return rawval
136 136 return ConfigParser.SafeConfigParser._interpolate(self, section,
137 137 option, rawval, vars)
138 138
139 139 def cachefunc(func):
140 140 '''cache the result of function calls'''
141 141 # XXX doesn't handle keywords args
142 142 cache = {}
143 143 if func.func_code.co_argcount == 1:
144 144 # we gain a small amount of time because
145 145 # we don't need to pack/unpack the list
146 146 def f(arg):
147 147 if arg not in cache:
148 148 cache[arg] = func(arg)
149 149 return cache[arg]
150 150 else:
151 151 def f(*args):
152 152 if args not in cache:
153 153 cache[args] = func(*args)
154 154 return cache[args]
155 155
156 156 return f
157 157
158 158 def pipefilter(s, cmd):
159 159 '''filter string S through command CMD, returning its output'''
160 160 (pin, pout) = os.popen2(cmd, 'b')
161 161 def writer():
162 162 try:
163 163 pin.write(s)
164 164 pin.close()
165 165 except IOError, inst:
166 166 if inst.errno != errno.EPIPE:
167 167 raise
168 168
169 169 # we should use select instead on UNIX, but this will work on most
170 170 # systems, including Windows
171 171 w = threading.Thread(target=writer)
172 172 w.start()
173 173 f = pout.read()
174 174 pout.close()
175 175 w.join()
176 176 return f
177 177
178 178 def tempfilter(s, cmd):
179 179 '''filter string S through a pair of temporary files with CMD.
180 180 CMD is used as a template to create the real command to be run,
181 181 with the strings INFILE and OUTFILE replaced by the real names of
182 182 the temporary files generated.'''
183 183 inname, outname = None, None
184 184 try:
185 185 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
186 186 fp = os.fdopen(infd, 'wb')
187 187 fp.write(s)
188 188 fp.close()
189 189 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
190 190 os.close(outfd)
191 191 cmd = cmd.replace('INFILE', inname)
192 192 cmd = cmd.replace('OUTFILE', outname)
193 193 code = os.system(cmd)
194 194 if sys.platform == 'OpenVMS' and code & 1:
195 195 code = 0
196 196 if code: raise Abort(_("command '%s' failed: %s") %
197 197 (cmd, explain_exit(code)))
198 198 return open(outname, 'rb').read()
199 199 finally:
200 200 try:
201 201 if inname: os.unlink(inname)
202 202 except: pass
203 203 try:
204 204 if outname: os.unlink(outname)
205 205 except: pass
206 206
207 207 filtertable = {
208 208 'tempfile:': tempfilter,
209 209 'pipe:': pipefilter,
210 210 }
211 211
212 212 def filter(s, cmd):
213 213 "filter a string through a command that transforms its input to its output"
214 214 for name, fn in filtertable.iteritems():
215 215 if cmd.startswith(name):
216 216 return fn(s, cmd[len(name):].lstrip())
217 217 return pipefilter(s, cmd)
218 218
219 219 def binary(s):
220 220 """return true if a string is binary data using diff's heuristic"""
221 221 if s and '\0' in s[:4096]:
222 222 return True
223 223 return False
224 224
225 225 def unique(g):
226 226 """return the uniq elements of iterable g"""
227 227 return dict.fromkeys(g).keys()
228 228
229 229 class Abort(Exception):
230 230 """Raised if a command needs to print an error and exit."""
231 231
232 232 class UnexpectedOutput(Abort):
233 233 """Raised to print an error with part of output and exit."""
234 234
235 235 def always(fn): return True
236 236 def never(fn): return False
237 237
238 238 def expand_glob(pats):
239 239 '''On Windows, expand the implicit globs in a list of patterns'''
240 240 if os.name != 'nt':
241 241 return list(pats)
242 242 ret = []
243 243 for p in pats:
244 244 kind, name = patkind(p, None)
245 245 if kind is None:
246 246 globbed = glob.glob(name)
247 247 if globbed:
248 248 ret.extend(globbed)
249 249 continue
250 250 # if we couldn't expand the glob, just keep it around
251 251 ret.append(p)
252 252 return ret
253 253
254 254 def patkind(name, dflt_pat='glob'):
255 255 """Split a string into an optional pattern kind prefix and the
256 256 actual pattern."""
257 257 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
258 258 if name.startswith(prefix + ':'): return name.split(':', 1)
259 259 return dflt_pat, name
260 260
261 261 def globre(pat, head='^', tail='$'):
262 262 "convert a glob pattern into a regexp"
263 263 i, n = 0, len(pat)
264 264 res = ''
265 265 group = 0
266 266 def peek(): return i < n and pat[i]
267 267 while i < n:
268 268 c = pat[i]
269 269 i = i+1
270 270 if c == '*':
271 271 if peek() == '*':
272 272 i += 1
273 273 res += '.*'
274 274 else:
275 275 res += '[^/]*'
276 276 elif c == '?':
277 277 res += '.'
278 278 elif c == '[':
279 279 j = i
280 280 if j < n and pat[j] in '!]':
281 281 j += 1
282 282 while j < n and pat[j] != ']':
283 283 j += 1
284 284 if j >= n:
285 285 res += '\\['
286 286 else:
287 287 stuff = pat[i:j].replace('\\','\\\\')
288 288 i = j + 1
289 289 if stuff[0] == '!':
290 290 stuff = '^' + stuff[1:]
291 291 elif stuff[0] == '^':
292 292 stuff = '\\' + stuff
293 293 res = '%s[%s]' % (res, stuff)
294 294 elif c == '{':
295 295 group += 1
296 296 res += '(?:'
297 297 elif c == '}' and group:
298 298 res += ')'
299 299 group -= 1
300 300 elif c == ',' and group:
301 301 res += '|'
302 302 elif c == '\\':
303 303 p = peek()
304 304 if p:
305 305 i += 1
306 306 res += re.escape(p)
307 307 else:
308 308 res += re.escape(c)
309 309 else:
310 310 res += re.escape(c)
311 311 return head + res + tail
312 312
313 313 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
314 314
315 315 def pathto(root, n1, n2):
316 316 '''return the relative path from one place to another.
317 317 root should use os.sep to separate directories
318 318 n1 should use os.sep to separate directories
319 319 n2 should use "/" to separate directories
320 320 returns an os.sep-separated path.
321 321
322 322 If n1 is a relative path, it's assumed it's
323 323 relative to root.
324 324 n2 should always be relative to root.
325 325 '''
326 326 if not n1: return localpath(n2)
327 327 if os.path.isabs(n1):
328 328 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
329 329 return os.path.join(root, localpath(n2))
330 330 n2 = '/'.join((pconvert(root), n2))
331 331 a, b = splitpath(n1), n2.split('/')
332 332 a.reverse()
333 333 b.reverse()
334 334 while a and b and a[-1] == b[-1]:
335 335 a.pop()
336 336 b.pop()
337 337 b.reverse()
338 338 return os.sep.join((['..'] * len(a)) + b) or '.'
339 339
340 340 def canonpath(root, cwd, myname):
341 341 """return the canonical path of myname, given cwd and root"""
342 342 if root == os.sep:
343 343 rootsep = os.sep
344 344 elif endswithsep(root):
345 345 rootsep = root
346 346 else:
347 347 rootsep = root + os.sep
348 348 name = myname
349 349 if not os.path.isabs(name):
350 350 name = os.path.join(root, cwd, name)
351 351 name = os.path.normpath(name)
352 352 audit_path = path_auditor(root)
353 353 if name != rootsep and name.startswith(rootsep):
354 354 name = name[len(rootsep):]
355 355 audit_path(name)
356 356 return pconvert(name)
357 357 elif name == root:
358 358 return ''
359 359 else:
360 360 # Determine whether `name' is in the hierarchy at or beneath `root',
361 361 # by iterating name=dirname(name) until that causes no change (can't
362 362 # check name == '/', because that doesn't work on windows). For each
363 363 # `name', compare dev/inode numbers. If they match, the list `rel'
364 364 # holds the reversed list of components making up the relative file
365 365 # name we want.
366 366 root_st = os.stat(root)
367 367 rel = []
368 368 while True:
369 369 try:
370 370 name_st = os.stat(name)
371 371 except OSError:
372 372 break
373 373 if samestat(name_st, root_st):
374 374 if not rel:
375 375 # name was actually the same as root (maybe a symlink)
376 376 return ''
377 377 rel.reverse()
378 378 name = os.path.join(*rel)
379 379 audit_path(name)
380 380 return pconvert(name)
381 381 dirname, basename = os.path.split(name)
382 382 rel.append(basename)
383 383 if dirname == name:
384 384 break
385 385 name = dirname
386 386
387 387 raise Abort('%s not under root' % myname)
388 388
389 389 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
390 390 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
391 391
392 392 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
393 393 globbed=False, default=None):
394 394 default = default or 'relpath'
395 395 if default == 'relpath' and not globbed:
396 396 names = expand_glob(names)
397 397 return _matcher(canonroot, cwd, names, inc, exc, default, src)
398 398
399 399 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
400 400 """build a function to match a set of file patterns
401 401
402 402 arguments:
403 403 canonroot - the canonical root of the tree you're matching against
404 404 cwd - the current working directory, if relevant
405 405 names - patterns to find
406 406 inc - patterns to include
407 407 exc - patterns to exclude
408 408 dflt_pat - if a pattern in names has no explicit type, assume this one
409 409 src - where these patterns came from (e.g. .hgignore)
410 410
411 411 a pattern is one of:
412 412 'glob:<glob>' - a glob relative to cwd
413 413 're:<regexp>' - a regular expression
414 414 'path:<path>' - a path relative to canonroot
415 415 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
416 416 'relpath:<path>' - a path relative to cwd
417 417 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
418 418 '<something>' - one of the cases above, selected by the dflt_pat argument
419 419
420 420 returns:
421 421 a 3-tuple containing
422 422 - list of roots (places where one should start a recursive walk of the fs);
423 423 this often matches the explicit non-pattern names passed in, but also
424 424 includes the initial part of glob: patterns that has no glob characters
425 425 - a bool match(filename) function
426 426 - a bool indicating if any patterns were passed in
427 427 """
428 428
429 429 # a common case: no patterns at all
430 430 if not names and not inc and not exc:
431 431 return [], always, False
432 432
433 433 def contains_glob(name):
434 434 for c in name:
435 435 if c in _globchars: return True
436 436 return False
437 437
438 438 def regex(kind, name, tail):
439 439 '''convert a pattern into a regular expression'''
440 440 if not name:
441 441 return ''
442 442 if kind == 're':
443 443 return name
444 444 elif kind == 'path':
445 445 return '^' + re.escape(name) + '(?:/|$)'
446 446 elif kind == 'relglob':
447 447 return globre(name, '(?:|.*/)', tail)
448 448 elif kind == 'relpath':
449 449 return re.escape(name) + '(?:/|$)'
450 450 elif kind == 'relre':
451 451 if name.startswith('^'):
452 452 return name
453 453 return '.*' + name
454 454 return globre(name, '', tail)
455 455
456 456 def matchfn(pats, tail):
457 457 """build a matching function from a set of patterns"""
458 458 if not pats:
459 459 return
460 460 try:
461 461 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
462 462 if len(pat) > 20000:
463 463 raise OverflowError()
464 464 return re.compile(pat).match
465 465 except OverflowError:
466 466 # We're using a Python with a tiny regex engine and we
467 467 # made it explode, so we'll divide the pattern list in two
468 468 # until it works
469 469 l = len(pats)
470 470 if l < 2:
471 471 raise
472 472 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
473 473 return lambda s: a(s) or b(s)
474 474 except re.error:
475 475 for k, p in pats:
476 476 try:
477 477 re.compile('(?:%s)' % regex(k, p, tail))
478 478 except re.error:
479 479 if src:
480 480 raise Abort("%s: invalid pattern (%s): %s" %
481 481 (src, k, p))
482 482 else:
483 483 raise Abort("invalid pattern (%s): %s" % (k, p))
484 484 raise Abort("invalid pattern")
485 485
486 486 def globprefix(pat):
487 487 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
488 488 root = []
489 489 for p in pat.split('/'):
490 490 if contains_glob(p): break
491 491 root.append(p)
492 492 return '/'.join(root) or '.'
493 493
494 494 def normalizepats(names, default):
495 495 pats = []
496 496 roots = []
497 497 anypats = False
498 498 for kind, name in [patkind(p, default) for p in names]:
499 499 if kind in ('glob', 'relpath'):
500 500 name = canonpath(canonroot, cwd, name)
501 501 elif kind in ('relglob', 'path'):
502 502 name = normpath(name)
503 503
504 504 pats.append((kind, name))
505 505
506 506 if kind in ('glob', 're', 'relglob', 'relre'):
507 507 anypats = True
508 508
509 509 if kind == 'glob':
510 510 root = globprefix(name)
511 511 roots.append(root)
512 512 elif kind in ('relpath', 'path'):
513 513 roots.append(name or '.')
514 514 elif kind == 'relglob':
515 515 roots.append('.')
516 516 return roots, pats, anypats
517 517
518 518 roots, pats, anypats = normalizepats(names, dflt_pat)
519 519
520 520 patmatch = matchfn(pats, '$') or always
521 521 incmatch = always
522 522 if inc:
523 523 dummy, inckinds, dummy = normalizepats(inc, 'glob')
524 524 incmatch = matchfn(inckinds, '(?:/|$)')
525 525 excmatch = lambda fn: False
526 526 if exc:
527 527 dummy, exckinds, dummy = normalizepats(exc, 'glob')
528 528 excmatch = matchfn(exckinds, '(?:/|$)')
529 529
530 530 if not names and inc and not exc:
531 531 # common case: hgignore patterns
532 532 match = incmatch
533 533 else:
534 534 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
535 535
536 536 return (roots, match, (inc or exc or anypats) and True)
537 537
538 538 _hgexecutable = None
539 539
540 540 def hgexecutable():
541 541 """return location of the 'hg' executable.
542 542
543 543 Defaults to $HG or 'hg' in the search path.
544 544 """
545 545 if _hgexecutable is None:
546 546 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
547 547 return _hgexecutable
548 548
549 549 def set_hgexecutable(path):
550 550 """set location of the 'hg' executable"""
551 551 global _hgexecutable
552 552 _hgexecutable = path
553 553
554 554 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
555 555 '''enhanced shell command execution.
556 556 run with environment maybe modified, maybe in different dir.
557 557
558 558 if command fails and onerr is None, return status. if ui object,
559 559 print error message and return status, else raise onerr object as
560 560 exception.'''
561 561 def py2shell(val):
562 562 'convert python object into string that is useful to shell'
563 563 if val in (None, False):
564 564 return '0'
565 565 if val == True:
566 566 return '1'
567 567 return str(val)
568 568 oldenv = {}
569 569 for k in environ:
570 570 oldenv[k] = os.environ.get(k)
571 571 if cwd is not None:
572 572 oldcwd = os.getcwd()
573 573 origcmd = cmd
574 574 if os.name == 'nt':
575 575 cmd = '"%s"' % cmd
576 576 try:
577 577 for k, v in environ.iteritems():
578 578 os.environ[k] = py2shell(v)
579 579 os.environ['HG'] = hgexecutable()
580 580 if cwd is not None and oldcwd != cwd:
581 581 os.chdir(cwd)
582 582 rc = os.system(cmd)
583 583 if sys.platform == 'OpenVMS' and rc & 1:
584 584 rc = 0
585 585 if rc and onerr:
586 586 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
587 587 explain_exit(rc)[0])
588 588 if errprefix:
589 589 errmsg = '%s: %s' % (errprefix, errmsg)
590 590 try:
591 591 onerr.warn(errmsg + '\n')
592 592 except AttributeError:
593 593 raise onerr(errmsg)
594 594 return rc
595 595 finally:
596 596 for k, v in oldenv.iteritems():
597 597 if v is None:
598 598 del os.environ[k]
599 599 else:
600 600 os.environ[k] = v
601 601 if cwd is not None and oldcwd != cwd:
602 602 os.chdir(oldcwd)
603 603
604 604 # os.path.lexists is not available on python2.3
605 605 def lexists(filename):
606 606 "test whether a file with this name exists. does not follow symlinks"
607 607 try:
608 608 os.lstat(filename)
609 609 except:
610 610 return False
611 611 return True
612 612
613 613 def rename(src, dst):
614 614 """forcibly rename a file"""
615 615 try:
616 616 os.rename(src, dst)
617 617 except OSError, err: # FIXME: check err (EEXIST ?)
618 618 # on windows, rename to existing file is not allowed, so we
619 619 # must delete destination first. but if file is open, unlink
620 620 # schedules it for delete but does not delete it. rename
621 621 # happens immediately even for open files, so we create
622 622 # temporary file, delete it, rename destination to that name,
623 623 # then delete that. then rename is safe to do.
624 624 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
625 625 os.close(fd)
626 626 os.unlink(temp)
627 627 os.rename(dst, temp)
628 628 os.unlink(temp)
629 629 os.rename(src, dst)
630 630
631 631 def unlink(f):
632 632 """unlink and remove the directory if it is empty"""
633 633 os.unlink(f)
634 634 # try removing directories that might now be empty
635 635 try:
636 636 os.removedirs(os.path.dirname(f))
637 637 except OSError:
638 638 pass
639 639
640 640 def copyfile(src, dest):
641 641 "copy a file, preserving mode"
642 642 if os.path.islink(src):
643 643 try:
644 644 os.unlink(dest)
645 645 except:
646 646 pass
647 647 os.symlink(os.readlink(src), dest)
648 648 else:
649 649 try:
650 650 shutil.copyfile(src, dest)
651 651 shutil.copymode(src, dest)
652 652 except shutil.Error, inst:
653 653 raise Abort(str(inst))
654 654
655 655 def copyfiles(src, dst, hardlink=None):
656 656 """Copy a directory tree using hardlinks if possible"""
657 657
658 658 if hardlink is None:
659 659 hardlink = (os.stat(src).st_dev ==
660 660 os.stat(os.path.dirname(dst)).st_dev)
661 661
662 662 if os.path.isdir(src):
663 663 os.mkdir(dst)
664 664 for name, kind in osutil.listdir(src):
665 665 srcname = os.path.join(src, name)
666 666 dstname = os.path.join(dst, name)
667 667 copyfiles(srcname, dstname, hardlink)
668 668 else:
669 669 if hardlink:
670 670 try:
671 671 os_link(src, dst)
672 672 except (IOError, OSError):
673 673 hardlink = False
674 674 shutil.copy(src, dst)
675 675 else:
676 676 shutil.copy(src, dst)
677 677
678 678 class path_auditor(object):
679 679 '''ensure that a filesystem path contains no banned components.
680 680 the following properties of a path are checked:
681 681
682 682 - under top-level .hg
683 683 - starts at the root of a windows drive
684 684 - contains ".."
685 685 - traverses a symlink (e.g. a/symlink_here/b)
686 686 - inside a nested repository'''
687 687
688 688 def __init__(self, root):
689 689 self.audited = set()
690 690 self.auditeddir = set()
691 691 self.root = root
692 692
693 693 def __call__(self, path):
694 694 if path in self.audited:
695 695 return
696 696 normpath = os.path.normcase(path)
697 697 parts = splitpath(normpath)
698 698 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
699 699 or os.pardir in parts):
700 700 raise Abort(_("path contains illegal component: %s") % path)
701 701 def check(prefix):
702 702 curpath = os.path.join(self.root, prefix)
703 703 try:
704 704 st = os.lstat(curpath)
705 705 except OSError, err:
706 706 # EINVAL can be raised as invalid path syntax under win32.
707 707 # They must be ignored for patterns can be checked too.
708 708 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
709 709 raise
710 710 else:
711 711 if stat.S_ISLNK(st.st_mode):
712 712 raise Abort(_('path %r traverses symbolic link %r') %
713 713 (path, prefix))
714 714 elif (stat.S_ISDIR(st.st_mode) and
715 715 os.path.isdir(os.path.join(curpath, '.hg'))):
716 716 raise Abort(_('path %r is inside repo %r') %
717 717 (path, prefix))
718 718 parts.pop()
719 719 prefixes = []
720 720 for n in range(len(parts)):
721 721 prefix = os.sep.join(parts)
722 722 if prefix in self.auditeddir:
723 723 break
724 724 check(prefix)
725 725 prefixes.append(prefix)
726 726 parts.pop()
727 727
728 728 self.audited.add(path)
729 729 # only add prefixes to the cache after checking everything: we don't
730 730 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
731 731 self.auditeddir.update(prefixes)
732 732
733 733 def _makelock_file(info, pathname):
734 734 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
735 735 os.write(ld, info)
736 736 os.close(ld)
737 737
738 738 def _readlock_file(pathname):
739 739 return posixfile(pathname).read()
740 740
741 741 def nlinks(pathname):
742 742 """Return number of hardlinks for the given file."""
743 743 return os.lstat(pathname).st_nlink
744 744
745 745 if hasattr(os, 'link'):
746 746 os_link = os.link
747 747 else:
748 748 def os_link(src, dst):
749 749 raise OSError(0, _("Hardlinks not supported"))
750 750
751 751 def fstat(fp):
752 752 '''stat file object that may not have fileno method.'''
753 753 try:
754 754 return os.fstat(fp.fileno())
755 755 except AttributeError:
756 756 return os.stat(fp.name)
757 757
758 758 posixfile = file
759 759
760 760 def openhardlinks():
761 761 '''return true if it is safe to hold open file handles to hardlinks'''
762 762 return True
763 763
764 764 getuser_fallback = None
765 765
766 766 def getuser():
767 767 '''return name of current user'''
768 768 try:
769 769 return getpass.getuser()
770 770 except ImportError:
771 771 # import of pwd will fail on windows - try fallback
772 772 if getuser_fallback:
773 773 return getuser_fallback()
774 774 # raised if win32api not available
775 775 raise Abort(_('user name not available - set USERNAME '
776 776 'environment variable'))
777 777
778 778 def username(uid=None):
779 779 """Return the name of the user with the given uid.
780 780
781 781 If uid is None, return the name of the current user."""
782 782 try:
783 783 import pwd
784 784 if uid is None:
785 785 uid = os.getuid()
786 786 try:
787 787 return pwd.getpwuid(uid)[0]
788 788 except KeyError:
789 789 return str(uid)
790 790 except ImportError:
791 791 return None
792 792
793 793 def groupname(gid=None):
794 794 """Return the name of the group with the given gid.
795 795
796 796 If gid is None, return the name of the current group."""
797 797 try:
798 798 import grp
799 799 if gid is None:
800 800 gid = os.getgid()
801 801 try:
802 802 return grp.getgrgid(gid)[0]
803 803 except KeyError:
804 804 return str(gid)
805 805 except ImportError:
806 806 return None
807 807
808 808 # File system features
809 809
810 810 def checkfolding(path):
811 811 """
812 812 Check whether the given path is on a case-sensitive filesystem
813 813
814 814 Requires a path (like /foo/.hg) ending with a foldable final
815 815 directory component.
816 816 """
817 817 s1 = os.stat(path)
818 818 d, b = os.path.split(path)
819 819 p2 = os.path.join(d, b.upper())
820 820 if path == p2:
821 821 p2 = os.path.join(d, b.lower())
822 822 try:
823 823 s2 = os.stat(p2)
824 824 if s2 == s1:
825 825 return False
826 826 return True
827 827 except:
828 828 return True
829 829
830 830 def checkexec(path):
831 831 """
832 832 Check whether the given path is on a filesystem with UNIX-like exec flags
833 833
834 834 Requires a directory (like /foo/.hg)
835 835 """
836 836
837 837 # VFAT on some Linux versions can flip mode but it doesn't persist
838 838 # a FS remount. Frequently we can detect it if files are created
839 839 # with exec bit on.
840 840
841 841 try:
842 842 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
843 843 fh, fn = tempfile.mkstemp("", "", path)
844 844 try:
845 845 os.close(fh)
846 846 m = os.stat(fn).st_mode & 0777
847 847 new_file_has_exec = m & EXECFLAGS
848 848 os.chmod(fn, m ^ EXECFLAGS)
849 849 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
850 850 finally:
851 851 os.unlink(fn)
852 852 except (IOError, OSError):
853 853 # we don't care, the user probably won't be able to commit anyway
854 854 return False
855 855 return not (new_file_has_exec or exec_flags_cannot_flip)
856 856
857 857 def execfunc(path, fallback):
858 858 '''return an is_exec() function with default to fallback'''
859 859 if checkexec(path):
860 860 return lambda x: is_exec(os.path.join(path, x))
861 861 return fallback
862 862
863 863 def checklink(path):
864 864 """check whether the given path is on a symlink-capable filesystem"""
865 865 # mktemp is not racy because symlink creation will fail if the
866 866 # file already exists
867 867 name = tempfile.mktemp(dir=path)
868 868 try:
869 869 os.symlink(".", name)
870 870 os.unlink(name)
871 871 return True
872 872 except (OSError, AttributeError):
873 873 return False
874 874
875 875 def linkfunc(path, fallback):
876 876 '''return an is_link() function with default to fallback'''
877 877 if checklink(path):
878 878 return lambda x: os.path.islink(os.path.join(path, x))
879 879 return fallback
880 880
881 881 _umask = os.umask(0)
882 882 os.umask(_umask)
883 883
884 884 def needbinarypatch():
885 885 """return True if patches should be applied in binary mode by default."""
886 886 return os.name == 'nt'
887 887
888 888 def endswithsep(path):
889 889 '''Check path ends with os.sep or os.altsep.'''
890 890 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
891 891
892 892 def splitpath(path):
893 893 '''Split path by os.sep.
894 894 Note that this function does not use os.altsep because this is
895 895 an alternative of simple "xxx.split(os.sep)".
896 896 It is recommended to use os.path.normpath() before using this
897 897 function if need.'''
898 898 return path.split(os.sep)
899 899
900 900 def gui():
901 901 '''Are we running in a GUI?'''
902 902 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
903 903
904 904 def lookup_reg(key, name=None, scope=None):
905 905 return None
906 906
907 907 # Platform specific variants
908 908 if os.name == 'nt':
909 909 import msvcrt
910 910 nulldev = 'NUL:'
911 911
912 912 class winstdout:
913 913 '''stdout on windows misbehaves if sent through a pipe'''
914 914
915 915 def __init__(self, fp):
916 916 self.fp = fp
917 917
918 918 def __getattr__(self, key):
919 919 return getattr(self.fp, key)
920 920
921 921 def close(self):
922 922 try:
923 923 self.fp.close()
924 924 except: pass
925 925
926 926 def write(self, s):
927 927 try:
928 928 # This is workaround for "Not enough space" error on
929 929 # writing large size of data to console.
930 930 limit = 16000
931 931 l = len(s)
932 932 start = 0
933 933 while start < l:
934 934 end = start + limit
935 935 self.fp.write(s[start:end])
936 936 start = end
937 937 except IOError, inst:
938 938 if inst.errno != 0: raise
939 939 self.close()
940 940 raise IOError(errno.EPIPE, 'Broken pipe')
941 941
942 942 def flush(self):
943 943 try:
944 944 return self.fp.flush()
945 945 except IOError, inst:
946 946 if inst.errno != errno.EINVAL: raise
947 947 self.close()
948 948 raise IOError(errno.EPIPE, 'Broken pipe')
949 949
950 950 sys.stdout = winstdout(sys.stdout)
951 951
952 952 def _is_win_9x():
953 953 '''return true if run on windows 95, 98 or me.'''
954 954 try:
955 955 return sys.getwindowsversion()[3] == 1
956 956 except AttributeError:
957 957 return 'command' in os.environ.get('comspec', '')
958 958
959 959 def openhardlinks():
960 960 return not _is_win_9x and "win32api" in locals()
961 961
962 962 def system_rcpath():
963 963 try:
964 964 return system_rcpath_win32()
965 965 except:
966 966 return [r'c:\mercurial\mercurial.ini']
967 967
968 968 def user_rcpath():
969 969 '''return os-specific hgrc search path to the user dir'''
970 970 try:
971 971 userrc = user_rcpath_win32()
972 972 except:
973 973 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
974 974 path = [userrc]
975 975 userprofile = os.environ.get('USERPROFILE')
976 976 if userprofile:
977 977 path.append(os.path.join(userprofile, 'mercurial.ini'))
978 978 return path
979 979
980 980 def parse_patch_output(output_line):
981 981 """parses the output produced by patch and returns the file name"""
982 982 pf = output_line[14:]
983 983 if pf[0] == '`':
984 984 pf = pf[1:-1] # Remove the quotes
985 985 return pf
986 986
987 987 def sshargs(sshcmd, host, user, port):
988 988 '''Build argument list for ssh or Plink'''
989 989 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
990 990 args = user and ("%s@%s" % (user, host)) or host
991 991 return port and ("%s %s %s" % (args, pflag, port)) or args
992 992
993 993 def testpid(pid):
994 994 '''return False if pid dead, True if running or not known'''
995 995 return True
996 996
997 997 def set_flags(f, flags):
998 998 pass
999 999
1000 1000 def set_binary(fd):
1001 1001 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1002 1002
1003 1003 def pconvert(path):
1004 1004 return '/'.join(splitpath(path))
1005 1005
1006 1006 def localpath(path):
1007 1007 return path.replace('/', '\\')
1008 1008
1009 1009 def normpath(path):
1010 1010 return pconvert(os.path.normpath(path))
1011 1011
1012 1012 makelock = _makelock_file
1013 1013 readlock = _readlock_file
1014 1014
1015 1015 def samestat(s1, s2):
1016 1016 return False
1017 1017
1018 1018 # A sequence of backslashes is special iff it precedes a double quote:
1019 1019 # - if there's an even number of backslashes, the double quote is not
1020 1020 # quoted (i.e. it ends the quoted region)
1021 1021 # - if there's an odd number of backslashes, the double quote is quoted
1022 1022 # - in both cases, every pair of backslashes is unquoted into a single
1023 1023 # backslash
1024 1024 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1025 1025 # So, to quote a string, we must surround it in double quotes, double
1026 1026 # the number of backslashes that preceed double quotes and add another
1027 1027 # backslash before every double quote (being careful with the double
1028 1028 # quote we've appended to the end)
1029 1029 _quotere = None
1030 1030 def shellquote(s):
1031 1031 global _quotere
1032 1032 if _quotere is None:
1033 1033 _quotere = re.compile(r'(\\*)("|\\$)')
1034 1034 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1035 1035
1036 1036 def quotecommand(cmd):
1037 1037 """Build a command string suitable for os.popen* calls."""
1038 1038 # The extra quotes are needed because popen* runs the command
1039 1039 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1040 1040 return '"' + cmd + '"'
1041 1041
1042 1042 def popen(command):
1043 1043 # Work around "popen spawned process may not write to stdout
1044 1044 # under windows"
1045 1045 # http://bugs.python.org/issue1366
1046 1046 command += " 2> %s" % nulldev
1047 1047 return os.popen(quotecommand(command))
1048 1048
1049 1049 def explain_exit(code):
1050 1050 return _("exited with status %d") % code, code
1051 1051
1052 1052 # if you change this stub into a real check, please try to implement the
1053 1053 # username and groupname functions above, too.
1054 1054 def isowner(fp, st=None):
1055 1055 return True
1056 1056
1057 1057 def find_in_path(name, path, default=None):
1058 1058 '''find name in search path. path can be string (will be split
1059 1059 with os.pathsep), or iterable thing that returns strings. if name
1060 1060 found, return path to name. else return default. name is looked up
1061 1061 using cmd.exe rules, using PATHEXT.'''
1062 1062 if isinstance(path, str):
1063 1063 path = path.split(os.pathsep)
1064 1064
1065 1065 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1066 1066 pathext = pathext.lower().split(os.pathsep)
1067 1067 isexec = os.path.splitext(name)[1].lower() in pathext
1068 1068
1069 1069 for p in path:
1070 1070 p_name = os.path.join(p, name)
1071 1071
1072 1072 if isexec and os.path.exists(p_name):
1073 1073 return p_name
1074 1074
1075 1075 for ext in pathext:
1076 1076 p_name_ext = p_name + ext
1077 1077 if os.path.exists(p_name_ext):
1078 1078 return p_name_ext
1079 1079 return default
1080 1080
1081 1081 def set_signal_handler():
1082 1082 try:
1083 1083 set_signal_handler_win32()
1084 1084 except NameError:
1085 1085 pass
1086 1086
1087 1087 try:
1088 1088 # override functions with win32 versions if possible
1089 1089 from util_win32 import *
1090 1090 if not _is_win_9x():
1091 1091 posixfile = posixfile_nt
1092 1092 except ImportError:
1093 1093 pass
1094 1094
1095 1095 else:
1096 1096 nulldev = '/dev/null'
1097 1097
1098 1098 def rcfiles(path):
1099 1099 rcs = [os.path.join(path, 'hgrc')]
1100 1100 rcdir = os.path.join(path, 'hgrc.d')
1101 1101 try:
1102 1102 rcs.extend([os.path.join(rcdir, f)
1103 1103 for f, kind in osutil.listdir(rcdir)
1104 1104 if f.endswith(".rc")])
1105 1105 except OSError:
1106 1106 pass
1107 1107 return rcs
1108 1108
1109 1109 def system_rcpath():
1110 1110 path = []
1111 1111 # old mod_python does not set sys.argv
1112 1112 if len(getattr(sys, 'argv', [])) > 0:
1113 1113 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1114 1114 '/../etc/mercurial'))
1115 1115 path.extend(rcfiles('/etc/mercurial'))
1116 1116 return path
1117 1117
1118 1118 def user_rcpath():
1119 1119 return [os.path.expanduser('~/.hgrc')]
1120 1120
1121 1121 def parse_patch_output(output_line):
1122 1122 """parses the output produced by patch and returns the file name"""
1123 1123 pf = output_line[14:]
1124 1124 if os.sys.platform == 'OpenVMS':
1125 1125 if pf[0] == '`':
1126 1126 pf = pf[1:-1] # Remove the quotes
1127 1127 else:
1128 1128 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1129 1129 pf = pf[1:-1] # Remove the quotes
1130 1130 return pf
1131 1131
1132 1132 def sshargs(sshcmd, host, user, port):
1133 1133 '''Build argument list for ssh'''
1134 1134 args = user and ("%s@%s" % (user, host)) or host
1135 1135 return port and ("%s -p %s" % (args, port)) or args
1136 1136
1137 1137 def is_exec(f):
1138 1138 """check whether a file is executable"""
1139 1139 return (os.lstat(f).st_mode & 0100 != 0)
1140 1140
1141 1141 def set_flags(f, flags):
1142 1142 s = os.lstat(f).st_mode
1143 1143 x = "x" in flags
1144 1144 l = "l" in flags
1145 1145 if l:
1146 1146 if not stat.S_ISLNK(s):
1147 1147 # switch file to link
1148 1148 data = file(f).read()
1149 1149 os.unlink(f)
1150 1150 os.symlink(data, f)
1151 1151 # no chmod needed at this point
1152 1152 return
1153 1153 if stat.S_ISLNK(s):
1154 1154 # switch link to file
1155 1155 data = os.readlink(f)
1156 1156 os.unlink(f)
1157 1157 file(f, "w").write(data)
1158 1158 s = 0666 & ~_umask # avoid restatting for chmod
1159 1159
1160 1160 sx = s & 0100
1161 1161 if x and not sx:
1162 1162 # Turn on +x for every +r bit when making a file executable
1163 1163 # and obey umask.
1164 1164 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1165 1165 elif not x and sx:
1166 1166 # Turn off all +x bits
1167 1167 os.chmod(f, s & 0666)
1168 1168
1169 1169 def set_binary(fd):
1170 1170 pass
1171 1171
1172 1172 def pconvert(path):
1173 1173 return path
1174 1174
1175 1175 def localpath(path):
1176 1176 return path
1177 1177
1178 1178 normpath = os.path.normpath
1179 1179 samestat = os.path.samestat
1180 1180
1181 1181 def makelock(info, pathname):
1182 1182 try:
1183 1183 os.symlink(info, pathname)
1184 1184 except OSError, why:
1185 1185 if why.errno == errno.EEXIST:
1186 1186 raise
1187 1187 else:
1188 1188 _makelock_file(info, pathname)
1189 1189
1190 1190 def readlock(pathname):
1191 1191 try:
1192 1192 return os.readlink(pathname)
1193 1193 except OSError, why:
1194 1194 if why.errno in (errno.EINVAL, errno.ENOSYS):
1195 1195 return _readlock_file(pathname)
1196 1196 else:
1197 1197 raise
1198 1198
1199 1199 def shellquote(s):
1200 1200 if os.sys.platform == 'OpenVMS':
1201 1201 return '"%s"' % s
1202 1202 else:
1203 1203 return "'%s'" % s.replace("'", "'\\''")
1204 1204
1205 1205 def quotecommand(cmd):
1206 1206 return cmd
1207 1207
1208 1208 def popen(command):
1209 1209 return os.popen(command)
1210 1210
1211 1211 def testpid(pid):
1212 1212 '''return False if pid dead, True if running or not sure'''
1213 1213 if os.sys.platform == 'OpenVMS':
1214 1214 return True
1215 1215 try:
1216 1216 os.kill(pid, 0)
1217 1217 return True
1218 1218 except OSError, inst:
1219 1219 return inst.errno != errno.ESRCH
1220 1220
1221 1221 def explain_exit(code):
1222 1222 """return a 2-tuple (desc, code) describing a process's status"""
1223 1223 if os.WIFEXITED(code):
1224 1224 val = os.WEXITSTATUS(code)
1225 1225 return _("exited with status %d") % val, val
1226 1226 elif os.WIFSIGNALED(code):
1227 1227 val = os.WTERMSIG(code)
1228 1228 return _("killed by signal %d") % val, val
1229 1229 elif os.WIFSTOPPED(code):
1230 1230 val = os.WSTOPSIG(code)
1231 1231 return _("stopped by signal %d") % val, val
1232 1232 raise ValueError(_("invalid exit code"))
1233 1233
1234 1234 def isowner(fp, st=None):
1235 1235 """Return True if the file object f belongs to the current user.
1236 1236
1237 1237 The return value of a util.fstat(f) may be passed as the st argument.
1238 1238 """
1239 1239 if st is None:
1240 1240 st = fstat(fp)
1241 1241 return st.st_uid == os.getuid()
1242 1242
1243 1243 def find_in_path(name, path, default=None):
1244 1244 '''find name in search path. path can be string (will be split
1245 1245 with os.pathsep), or iterable thing that returns strings. if name
1246 1246 found, return path to name. else return default.'''
1247 1247 if isinstance(path, str):
1248 1248 path = path.split(os.pathsep)
1249 1249 for p in path:
1250 1250 p_name = os.path.join(p, name)
1251 1251 if os.path.exists(p_name):
1252 1252 return p_name
1253 1253 return default
1254 1254
1255 1255 def set_signal_handler():
1256 1256 pass
1257 1257
1258 1258 def find_exe(name, default=None):
1259 1259 '''find path of an executable.
1260 1260 if name contains a path component, return it as is. otherwise,
1261 1261 use normal executable search path.'''
1262 1262
1263 1263 if os.sep in name or sys.platform == 'OpenVMS':
1264 1264 # don't check the executable bit. if the file isn't
1265 1265 # executable, whoever tries to actually run it will give a
1266 1266 # much more useful error message.
1267 1267 return name
1268 1268 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1269 1269
1270 1270 def _buildencodefun():
1271 1271 e = '_'
1272 1272 win_reserved = [ord(x) for x in '\\:*?"<>|']
1273 1273 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1274 1274 for x in (range(32) + range(126, 256) + win_reserved):
1275 1275 cmap[chr(x)] = "~%02x" % x
1276 1276 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1277 1277 cmap[chr(x)] = e + chr(x).lower()
1278 1278 dmap = {}
1279 1279 for k, v in cmap.iteritems():
1280 1280 dmap[v] = k
1281 1281 def decode(s):
1282 1282 i = 0
1283 1283 while i < len(s):
1284 1284 for l in xrange(1, 4):
1285 1285 try:
1286 1286 yield dmap[s[i:i+l]]
1287 1287 i += l
1288 1288 break
1289 1289 except KeyError:
1290 1290 pass
1291 1291 else:
1292 1292 raise KeyError
1293 1293 return (lambda s: "".join([cmap[c] for c in s]),
1294 1294 lambda s: "".join(list(decode(s))))
1295 1295
1296 1296 encodefilename, decodefilename = _buildencodefun()
1297 1297
1298 1298 def encodedopener(openerfn, fn):
1299 1299 def o(path, *args, **kw):
1300 1300 return openerfn(fn(path), *args, **kw)
1301 1301 return o
1302 1302
1303 1303 def mktempcopy(name, emptyok=False, createmode=None):
1304 1304 """Create a temporary file with the same contents from name
1305 1305
1306 1306 The permission bits are copied from the original file.
1307 1307
1308 1308 If the temporary file is going to be truncated immediately, you
1309 1309 can use emptyok=True as an optimization.
1310 1310
1311 1311 Returns the name of the temporary file.
1312 1312 """
1313 1313 d, fn = os.path.split(name)
1314 1314 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1315 1315 os.close(fd)
1316 1316 # Temporary files are created with mode 0600, which is usually not
1317 1317 # what we want. If the original file already exists, just copy
1318 1318 # its mode. Otherwise, manually obey umask.
1319 1319 try:
1320 1320 st_mode = os.lstat(name).st_mode & 0777
1321 1321 except OSError, inst:
1322 1322 if inst.errno != errno.ENOENT:
1323 1323 raise
1324 1324 st_mode = createmode
1325 1325 if st_mode is None:
1326 1326 st_mode = ~_umask
1327 1327 st_mode &= 0666
1328 1328 os.chmod(temp, st_mode)
1329 1329 if emptyok:
1330 1330 return temp
1331 1331 try:
1332 1332 try:
1333 1333 ifp = posixfile(name, "rb")
1334 1334 except IOError, inst:
1335 1335 if inst.errno == errno.ENOENT:
1336 1336 return temp
1337 1337 if not getattr(inst, 'filename', None):
1338 1338 inst.filename = name
1339 1339 raise
1340 1340 ofp = posixfile(temp, "wb")
1341 1341 for chunk in filechunkiter(ifp):
1342 1342 ofp.write(chunk)
1343 1343 ifp.close()
1344 1344 ofp.close()
1345 1345 except:
1346 1346 try: os.unlink(temp)
1347 1347 except: pass
1348 1348 raise
1349 1349 return temp
1350 1350
1351 1351 class atomictempfile(posixfile):
1352 1352 """file-like object that atomically updates a file
1353 1353
1354 1354 All writes will be redirected to a temporary copy of the original
1355 1355 file. When rename is called, the copy is renamed to the original
1356 1356 name, making the changes visible.
1357 1357 """
1358 1358 def __init__(self, name, mode, createmode):
1359 1359 self.__name = name
1360 1360 self.temp = mktempcopy(name, emptyok=('w' in mode),
1361 1361 createmode=createmode)
1362 1362 posixfile.__init__(self, self.temp, mode)
1363 1363
1364 1364 def rename(self):
1365 1365 if not self.closed:
1366 1366 posixfile.close(self)
1367 1367 rename(self.temp, localpath(self.__name))
1368 1368
1369 1369 def __del__(self):
1370 1370 if not self.closed:
1371 1371 try:
1372 1372 os.unlink(self.temp)
1373 1373 except: pass
1374 1374 posixfile.close(self)
1375 1375
1376 1376 def makedirs(name, mode=None):
1377 1377 """recursive directory creation with parent mode inheritance"""
1378 1378 try:
1379 1379 os.mkdir(name)
1380 1380 if mode is not None:
1381 1381 os.chmod(name, mode)
1382 1382 return
1383 1383 except OSError, err:
1384 1384 if err.errno == errno.EEXIST:
1385 1385 return
1386 1386 if err.errno != errno.ENOENT:
1387 1387 raise
1388 1388 parent = os.path.abspath(os.path.dirname(name))
1389 1389 makedirs(parent, mode)
1390 1390 makedirs(name, mode)
1391 1391
1392 1392 class opener(object):
1393 1393 """Open files relative to a base directory
1394 1394
1395 1395 This class is used to hide the details of COW semantics and
1396 1396 remote file access from higher level code.
1397 1397 """
1398 1398 def __init__(self, base, audit=True):
1399 1399 self.base = base
1400 1400 if audit:
1401 1401 self.audit_path = path_auditor(base)
1402 1402 else:
1403 1403 self.audit_path = always
1404 1404 self.createmode = None
1405 1405
1406 1406 def __getattr__(self, name):
1407 1407 if name == '_can_symlink':
1408 1408 self._can_symlink = checklink(self.base)
1409 1409 return self._can_symlink
1410 1410 raise AttributeError(name)
1411 1411
1412 1412 def _fixfilemode(self, name):
1413 1413 if self.createmode is None:
1414 1414 return
1415 1415 os.chmod(name, self.createmode & 0666)
1416 1416
1417 1417 def __call__(self, path, mode="r", text=False, atomictemp=False):
1418 1418 self.audit_path(path)
1419 1419 f = os.path.join(self.base, path)
1420 1420
1421 1421 if not text and "b" not in mode:
1422 1422 mode += "b" # for that other OS
1423 1423
1424 1424 nlink = -1
1425 1425 if mode[0] != "r":
1426 1426 try:
1427 1427 nlink = nlinks(f)
1428 1428 except OSError:
1429 1429 nlink = 0
1430 1430 d = os.path.dirname(f)
1431 1431 if not os.path.isdir(d):
1432 1432 makedirs(d, self.createmode)
1433 1433 if atomictemp:
1434 1434 return atomictempfile(f, mode, self.createmode)
1435 1435 if nlink > 1:
1436 1436 rename(mktempcopy(f), f)
1437 1437 fp = posixfile(f, mode)
1438 1438 if nlink == 0:
1439 1439 self._fixfilemode(f)
1440 1440 return fp
1441 1441
1442 1442 def symlink(self, src, dst):
1443 1443 self.audit_path(dst)
1444 1444 linkname = os.path.join(self.base, dst)
1445 1445 try:
1446 1446 os.unlink(linkname)
1447 1447 except OSError:
1448 1448 pass
1449 1449
1450 1450 dirname = os.path.dirname(linkname)
1451 1451 if not os.path.exists(dirname):
1452 1452 makedirs(dirname, self.createmode)
1453 1453
1454 1454 if self._can_symlink:
1455 1455 try:
1456 1456 os.symlink(src, linkname)
1457 1457 except OSError, err:
1458 1458 raise OSError(err.errno, _('could not symlink to %r: %s') %
1459 1459 (src, err.strerror), linkname)
1460 1460 else:
1461 1461 f = self(dst, "w")
1462 1462 f.write(src)
1463 1463 f.close()
1464 1464 self._fixfilemode(dst)
1465 1465
1466 1466 class chunkbuffer(object):
1467 1467 """Allow arbitrary sized chunks of data to be efficiently read from an
1468 1468 iterator over chunks of arbitrary size."""
1469 1469
1470 1470 def __init__(self, in_iter):
1471 1471 """in_iter is the iterator that's iterating over the input chunks.
1472 1472 targetsize is how big a buffer to try to maintain."""
1473 1473 self.iter = iter(in_iter)
1474 1474 self.buf = ''
1475 1475 self.targetsize = 2**16
1476 1476
1477 1477 def read(self, l):
1478 1478 """Read L bytes of data from the iterator of chunks of data.
1479 1479 Returns less than L bytes if the iterator runs dry."""
1480 1480 if l > len(self.buf) and self.iter:
1481 1481 # Clamp to a multiple of self.targetsize
1482 1482 targetsize = max(l, self.targetsize)
1483 1483 collector = cStringIO.StringIO()
1484 1484 collector.write(self.buf)
1485 1485 collected = len(self.buf)
1486 1486 for chunk in self.iter:
1487 1487 collector.write(chunk)
1488 1488 collected += len(chunk)
1489 1489 if collected >= targetsize:
1490 1490 break
1491 1491 if collected < targetsize:
1492 1492 self.iter = False
1493 1493 self.buf = collector.getvalue()
1494 1494 if len(self.buf) == l:
1495 1495 s, self.buf = str(self.buf), ''
1496 1496 else:
1497 1497 s, self.buf = self.buf[:l], buffer(self.buf, l)
1498 1498 return s
1499 1499
1500 1500 def filechunkiter(f, size=65536, limit=None):
1501 1501 """Create a generator that produces the data in the file size
1502 1502 (default 65536) bytes at a time, up to optional limit (default is
1503 1503 to read all data). Chunks may be less than size bytes if the
1504 1504 chunk is the last chunk in the file, or the file is a socket or
1505 1505 some other type of file that sometimes reads less data than is
1506 1506 requested."""
1507 1507 assert size >= 0
1508 1508 assert limit is None or limit >= 0
1509 1509 while True:
1510 1510 if limit is None: nbytes = size
1511 1511 else: nbytes = min(limit, size)
1512 1512 s = nbytes and f.read(nbytes)
1513 1513 if not s: break
1514 1514 if limit: limit -= len(s)
1515 1515 yield s
1516 1516
1517 1517 def makedate():
1518 1518 lt = time.localtime()
1519 1519 if lt[8] == 1 and time.daylight:
1520 1520 tz = time.altzone
1521 1521 else:
1522 1522 tz = time.timezone
1523 1523 return time.mktime(lt), tz
1524 1524
1525 1525 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1526 1526 """represent a (unixtime, offset) tuple as a localized time.
1527 1527 unixtime is seconds since the epoch, and offset is the time zone's
1528 1528 number of seconds away from UTC. if timezone is false, do not
1529 1529 append time zone to string."""
1530 1530 t, tz = date or makedate()
1531 1531 s = time.strftime(format, time.gmtime(float(t) - tz))
1532 1532 if timezone:
1533 1533 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1534 1534 return s
1535 1535
1536 1536 def shortdate(date=None):
1537 1537 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1538 1538 return datestr(date, format='%Y-%m-%d', timezone=False)
1539 1539
1540 1540 def strdate(string, format, defaults=[]):
1541 1541 """parse a localized time string and return a (unixtime, offset) tuple.
1542 1542 if the string cannot be parsed, ValueError is raised."""
1543 1543 def timezone(string):
1544 1544 tz = string.split()[-1]
1545 1545 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1546 1546 tz = int(tz)
1547 1547 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1548 1548 return offset
1549 1549 if tz == "GMT" or tz == "UTC":
1550 1550 return 0
1551 1551 return None
1552 1552
1553 1553 # NOTE: unixtime = localunixtime + offset
1554 1554 offset, date = timezone(string), string
1555 1555 if offset != None:
1556 1556 date = " ".join(string.split()[:-1])
1557 1557
1558 1558 # add missing elements from defaults
1559 1559 for part in defaults:
1560 1560 found = [True for p in part if ("%"+p) in format]
1561 1561 if not found:
1562 1562 date += "@" + defaults[part]
1563 1563 format += "@%" + part[0]
1564 1564
1565 1565 timetuple = time.strptime(date, format)
1566 1566 localunixtime = int(calendar.timegm(timetuple))
1567 1567 if offset is None:
1568 1568 # local timezone
1569 1569 unixtime = int(time.mktime(timetuple))
1570 1570 offset = unixtime - localunixtime
1571 1571 else:
1572 1572 unixtime = localunixtime + offset
1573 1573 return unixtime, offset
1574 1574
1575 def parsedate(string, formats=None, defaults=None):
1576 """parse a localized time string and return a (unixtime, offset) tuple.
1575 def parsedate(date, formats=None, defaults=None):
1576 """parse a localized date/time string and return a (unixtime, offset) tuple.
1577
1577 1578 The date may be a "unixtime offset" string or in one of the specified
1578 formats."""
1579 if not string:
1579 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1580 """
1581 if not date:
1580 1582 return 0, 0
1583 if type(date) is type((0, 0)) and len(date) == 2:
1584 return date
1581 1585 if not formats:
1582 1586 formats = defaultdateformats
1583 string = string.strip()
1587 date = date.strip()
1584 1588 try:
1585 when, offset = map(int, string.split(' '))
1589 when, offset = map(int, date.split(' '))
1586 1590 except ValueError:
1587 1591 # fill out defaults
1588 1592 if not defaults:
1589 1593 defaults = {}
1590 1594 now = makedate()
1591 1595 for part in "d mb yY HI M S".split():
1592 1596 if part not in defaults:
1593 1597 if part[0] in "HMS":
1594 1598 defaults[part] = "00"
1595 1599 elif part[0] in "dm":
1596 1600 defaults[part] = "1"
1597 1601 else:
1598 1602 defaults[part] = datestr(now, "%" + part[0], False)
1599 1603
1600 1604 for format in formats:
1601 1605 try:
1602 when, offset = strdate(string, format, defaults)
1606 when, offset = strdate(date, format, defaults)
1603 1607 except (ValueError, OverflowError):
1604 1608 pass
1605 1609 else:
1606 1610 break
1607 1611 else:
1608 raise Abort(_('invalid date: %r ') % string)
1612 raise Abort(_('invalid date: %r ') % date)
1609 1613 # validate explicit (probably user-specified) date and
1610 1614 # time zone offset. values must fit in signed 32 bits for
1611 1615 # current 32-bit linux runtimes. timezones go from UTC-12
1612 1616 # to UTC+14
1613 1617 if abs(when) > 0x7fffffff:
1614 1618 raise Abort(_('date exceeds 32 bits: %d') % when)
1615 1619 if offset < -50400 or offset > 43200:
1616 1620 raise Abort(_('impossible time zone offset: %d') % offset)
1617 1621 return when, offset
1618 1622
1619 1623 def matchdate(date):
1620 1624 """Return a function that matches a given date match specifier
1621 1625
1622 1626 Formats include:
1623 1627
1624 1628 '{date}' match a given date to the accuracy provided
1625 1629
1626 1630 '<{date}' on or before a given date
1627 1631
1628 1632 '>{date}' on or after a given date
1629 1633
1630 1634 """
1631 1635
1632 1636 def lower(date):
1633 1637 return parsedate(date, extendeddateformats)[0]
1634 1638
1635 1639 def upper(date):
1636 1640 d = dict(mb="12", HI="23", M="59", S="59")
1637 1641 for days in "31 30 29".split():
1638 1642 try:
1639 1643 d["d"] = days
1640 1644 return parsedate(date, extendeddateformats, d)[0]
1641 1645 except:
1642 1646 pass
1643 1647 d["d"] = "28"
1644 1648 return parsedate(date, extendeddateformats, d)[0]
1645 1649
1646 1650 if date[0] == "<":
1647 1651 when = upper(date[1:])
1648 1652 return lambda x: x <= when
1649 1653 elif date[0] == ">":
1650 1654 when = lower(date[1:])
1651 1655 return lambda x: x >= when
1652 1656 elif date[0] == "-":
1653 1657 try:
1654 1658 days = int(date[1:])
1655 1659 except ValueError:
1656 1660 raise Abort(_("invalid day spec: %s") % date[1:])
1657 1661 when = makedate()[0] - days * 3600 * 24
1658 1662 return lambda x: x >= when
1659 1663 elif " to " in date:
1660 1664 a, b = date.split(" to ")
1661 1665 start, stop = lower(a), upper(b)
1662 1666 return lambda x: x >= start and x <= stop
1663 1667 else:
1664 1668 start, stop = lower(date), upper(date)
1665 1669 return lambda x: x >= start and x <= stop
1666 1670
1667 1671 def shortuser(user):
1668 1672 """Return a short representation of a user name or email address."""
1669 1673 f = user.find('@')
1670 1674 if f >= 0:
1671 1675 user = user[:f]
1672 1676 f = user.find('<')
1673 1677 if f >= 0:
1674 1678 user = user[f+1:]
1675 1679 f = user.find(' ')
1676 1680 if f >= 0:
1677 1681 user = user[:f]
1678 1682 f = user.find('.')
1679 1683 if f >= 0:
1680 1684 user = user[:f]
1681 1685 return user
1682 1686
1683 1687 def email(author):
1684 1688 '''get email of author.'''
1685 1689 r = author.find('>')
1686 1690 if r == -1: r = None
1687 1691 return author[author.find('<')+1:r]
1688 1692
1689 1693 def ellipsis(text, maxlength=400):
1690 1694 """Trim string to at most maxlength (default: 400) characters."""
1691 1695 if len(text) <= maxlength:
1692 1696 return text
1693 1697 else:
1694 1698 return "%s..." % (text[:maxlength-3])
1695 1699
1696 1700 def walkrepos(path):
1697 1701 '''yield every hg repository under path, recursively.'''
1698 1702 def errhandler(err):
1699 1703 if err.filename == path:
1700 1704 raise err
1701 1705
1702 1706 for root, dirs, files in os.walk(path, onerror=errhandler):
1703 1707 for d in dirs:
1704 1708 if d == '.hg':
1705 1709 yield root
1706 1710 dirs[:] = []
1707 1711 break
1708 1712
1709 1713 _rcpath = None
1710 1714
1711 1715 def os_rcpath():
1712 1716 '''return default os-specific hgrc search path'''
1713 1717 path = system_rcpath()
1714 1718 path.extend(user_rcpath())
1715 1719 path = [os.path.normpath(f) for f in path]
1716 1720 return path
1717 1721
1718 1722 def rcpath():
1719 1723 '''return hgrc search path. if env var HGRCPATH is set, use it.
1720 1724 for each item in path, if directory, use files ending in .rc,
1721 1725 else use item.
1722 1726 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1723 1727 if no HGRCPATH, use default os-specific path.'''
1724 1728 global _rcpath
1725 1729 if _rcpath is None:
1726 1730 if 'HGRCPATH' in os.environ:
1727 1731 _rcpath = []
1728 1732 for p in os.environ['HGRCPATH'].split(os.pathsep):
1729 1733 if not p: continue
1730 1734 if os.path.isdir(p):
1731 1735 for f, kind in osutil.listdir(p):
1732 1736 if f.endswith('.rc'):
1733 1737 _rcpath.append(os.path.join(p, f))
1734 1738 else:
1735 1739 _rcpath.append(p)
1736 1740 else:
1737 1741 _rcpath = os_rcpath()
1738 1742 return _rcpath
1739 1743
1740 1744 def bytecount(nbytes):
1741 1745 '''return byte count formatted as readable string, with units'''
1742 1746
1743 1747 units = (
1744 1748 (100, 1<<30, _('%.0f GB')),
1745 1749 (10, 1<<30, _('%.1f GB')),
1746 1750 (1, 1<<30, _('%.2f GB')),
1747 1751 (100, 1<<20, _('%.0f MB')),
1748 1752 (10, 1<<20, _('%.1f MB')),
1749 1753 (1, 1<<20, _('%.2f MB')),
1750 1754 (100, 1<<10, _('%.0f KB')),
1751 1755 (10, 1<<10, _('%.1f KB')),
1752 1756 (1, 1<<10, _('%.2f KB')),
1753 1757 (1, 1, _('%.0f bytes')),
1754 1758 )
1755 1759
1756 1760 for multiplier, divisor, format in units:
1757 1761 if nbytes >= divisor * multiplier:
1758 1762 return format % (nbytes / float(divisor))
1759 1763 return units[-1][2] % nbytes
1760 1764
1761 1765 def drop_scheme(scheme, path):
1762 1766 sc = scheme + ':'
1763 1767 if path.startswith(sc):
1764 1768 path = path[len(sc):]
1765 1769 if path.startswith('//'):
1766 1770 path = path[2:]
1767 1771 return path
1768 1772
1769 1773 def uirepr(s):
1770 1774 # Avoid double backslash in Windows path repr()
1771 1775 return repr(s).replace('\\\\', '\\')
1772 1776
1773 1777 def hidepassword(url):
1774 1778 '''hide user credential in a url string'''
1775 1779 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1776 1780 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1777 1781 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1778 1782
1779 1783 def removeauth(url):
1780 1784 '''remove all authentication information from a url string'''
1781 1785 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1782 1786 netloc = netloc[netloc.find('@')+1:]
1783 1787 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
@@ -1,100 +1,94 b''
1 1 % commit date test
2 2 transaction abort!
3 3 rollback completed
4 4 abort: empty commit message
5 transaction abort!
6 rollback completed
7 5 abort: impossible time zone offset: 4444444
8 transaction abort!
9 rollback completed
10 6 abort: invalid date: '1\t15.1'
11 transaction abort!
12 rollback completed
13 7 abort: invalid date: 'foo bar'
14 nothing changed
8 abort: date exceeds 32 bits: 111111111111
15 9 % commit added file that has been deleted
16 10 nothing changed
17 11 abort: file bar not found!
18 12 adding dir/file
19 13 dir/file
20 14 adding dir.file
21 15 abort: no match under directory dir!
22 16 abort: no match under directory .!
23 17 abort: no match under directory ../dir2!
24 18 dir/file
25 19 does-not-exist: No such file or directory
26 20 abort: file does-not-exist not found!
27 21 abort: file baz not tracked!
28 22 abort: file quux not tracked!
29 23 dir/file
30 24 % partial subdir commit test
31 25 adding bar/bar
32 26 adding foo/foo
33 27 % subdir log 1
34 28 changeset: 0:6ef3cb06bb80
35 29 user: test
36 30 date: Mon Jan 12 13:46:40 1970 +0000
37 31 files: foo/foo
38 32 description:
39 33 commit-subdir-1
40 34
41 35
42 36 % subdir log 2
43 37 changeset: 1:f2e51572cf5a
44 38 tag: tip
45 39 user: test
46 40 date: Mon Jan 12 13:46:41 1970 +0000
47 41 files: bar/bar
48 42 description:
49 43 commit-subdir-2
50 44
51 45
52 46 % full log
53 47 changeset: 1:f2e51572cf5a
54 48 tag: tip
55 49 user: test
56 50 date: Mon Jan 12 13:46:41 1970 +0000
57 51 files: bar/bar
58 52 description:
59 53 commit-subdir-2
60 54
61 55
62 56 changeset: 0:6ef3cb06bb80
63 57 user: test
64 58 date: Mon Jan 12 13:46:40 1970 +0000
65 59 files: foo/foo
66 60 description:
67 61 commit-subdir-1
68 62
69 63
70 64 % dot and subdir commit test
71 65 % full log
72 66 changeset: 1:d9180e04fa8a
73 67 tag: tip
74 68 user: test
75 69 date: Sat Jan 24 03:33:20 1970 +0000
76 70 files: foo/plain-file
77 71 description:
78 72 commit-foo-dot
79 73
80 74
81 75 changeset: 0:80b572aaf098
82 76 user: test
83 77 date: Mon Jan 12 13:46:40 1970 +0000
84 78 files: foo/plain-file
85 79 description:
86 80 commit-foo-subdir
87 81
88 82
89 83 % subdir log
90 84 changeset: 1:d9180e04fa8a
91 85 tag: tip
92 86 user: test
93 87 date: Sat Jan 24 03:33:20 1970 +0000
94 88 summary: commit-foo-dot
95 89
96 90 changeset: 0:80b572aaf098
97 91 user: test
98 92 date: Mon Jan 12 13:46:40 1970 +0000
99 93 summary: commit-foo-subdir
100 94
@@ -1,23 +1,17 b''
1 1 reverting a
2 2 changeset 3:107ce1ee2b43 backs out changeset 1:25a1420a55f8
3 3 merging with changeset 2:e6c3abc120e7
4 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
5 5 (branch merge, don't forget to commit)
6 transaction abort!
7 rollback completed
8 6 abort: invalid date: 'should fail'
9 transaction abort!
10 rollback completed
11 7 abort: date exceeds 32 bits: 100000000000000000
12 transaction abort!
13 rollback completed
14 8 abort: impossible time zone offset: 1400000
15 9 Sun Jan 15 13:30:00 2006 +0500
16 10 Sun Jan 15 13:30:00 2006 -0800
17 11 Sat Jul 15 13:30:00 2006 +0500
18 12 Sat Jul 15 13:30:00 2006 -0700
19 13 Sun Jun 11 00:26:40 2006 -0400
20 14 Sat Apr 15 13:30:00 2006 +0200
21 15 Sat Apr 15 13:30:00 2006 +0000
22 16 Wed Feb 01 13:00:30 2006 -0500
23 17 Wed Feb 01 13:00:30 2006 +0000
General Comments 0
You need to be logged in to leave comments. Login now