##// END OF EJS Templates
Make repo locks recursive, eliminate all passing of lock/wlock
Matt Mackall -
r4917:126f527b default
parent child Browse files
Show More
@@ -1,98 +1,98 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 from mercurial.node import *
10 10 from mercurial import commands, cmdutil, hg, node, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated.'''
21 21
22 def postincoming(other, modheads, lock, wlock):
22 def postincoming(other, modheads):
23 23 if modheads == 0:
24 24 return 0
25 25 if modheads == 1:
26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
26 return hg.clean(repo, repo.changelog.tip())
27 27 newheads = repo.heads(parent)
28 28 newchildren = [n for n in repo.heads(parent) if n != parent]
29 29 newparent = parent
30 30 if newchildren:
31 31 newparent = newchildren[0]
32 hg.clean(repo, newparent, wlock=wlock)
32 hg.clean(repo, newparent)
33 33 newheads = [n for n in repo.heads() if n != newparent]
34 34 err = False
35 35 if newheads:
36 36 ui.status(_('merging with new head %d:%s\n') %
37 37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
38 err = hg.merge(repo, newheads[0], remind=False)
39 39 if not err and len(newheads) > 1:
40 40 ui.status(_('not merging with %d other new heads '
41 41 '(use "hg heads" and "hg merge" to merge them)') %
42 42 (len(newheads) - 1))
43 43 if not err:
44 mod, add, rem = repo.status(wlock=wlock)[:3]
44 mod, add, rem = repo.status()[:3]
45 45 message = (cmdutil.logmessage(opts) or
46 46 (_('Automated merge with %s') % other.url()))
47 47 n = repo.commit(mod + add + rem, message,
48 opts['user'], opts['date'], lock=lock, wlock=wlock,
48 opts['user'], opts['date'],
49 49 force_editor=opts.get('force_editor'))
50 50 ui.status(_('new changeset %d:%s merges remote changes '
51 51 'with local\n') % (repo.changelog.rev(n),
52 52 short(n)))
53 def pull(lock, wlock):
53 def pull():
54 54 cmdutil.setremoteconfig(ui, opts)
55 55
56 56 other = hg.repository(ui, ui.expandpath(source))
57 57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
58 58 revs = None
59 59 if opts['rev'] and not other.local():
60 60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
61 61 elif opts['rev']:
62 62 revs = [other.lookup(rev) for rev in opts['rev']]
63 modheads = repo.pull(other, heads=revs, lock=lock)
64 return postincoming(other, modheads, lock, wlock)
63 modheads = repo.pull(other, heads=revs)
64 return postincoming(other, modheads)
65 65
66 66 parent, p2 = repo.dirstate.parents()
67 67 if parent != repo.changelog.tip():
68 68 raise util.Abort(_('working dir not at tip '
69 69 '(use "hg update" to check out tip)'))
70 70 if p2 != nullid:
71 71 raise util.Abort(_('outstanding uncommitted merge'))
72 72 wlock = lock = None
73 73 try:
74 74 wlock = repo.wlock()
75 75 lock = repo.lock()
76 mod, add, rem = repo.status(wlock=wlock)[:3]
76 mod, add, rem = repo.status()[:3]
77 77 if mod or add or rem:
78 78 raise util.Abort(_('outstanding uncommitted changes'))
79 79 if len(repo.heads()) > 1:
80 80 raise util.Abort(_('multiple heads in this repository '
81 81 '(use "hg heads" and "hg merge" to merge)'))
82 return pull(lock, wlock)
82 return pull()
83 83 finally:
84 84 del lock, wlock
85 85
86 86 cmdtable = {
87 87 'fetch':
88 88 (fetch,
89 89 [('e', 'ssh', '', _('specify ssh command to use')),
90 90 ('m', 'message', '', _('use <text> as commit message')),
91 91 ('l', 'logfile', '', _('read the commit message from <file>')),
92 92 ('d', 'date', '', _('record datecode as commit date')),
93 93 ('u', 'user', '', _('record user as commiter')),
94 94 ('r', 'rev', [], _('a specific revision you would like to pull')),
95 95 ('f', 'force-editor', None, _('edit commit message')),
96 96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
97 97 _('hg fetch [SOURCE]')),
98 98 }
@@ -1,2262 +1,2249 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 def mergeone(self, repo, mergeq, head, patch, rev):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev, wlock=wlock)
329 strict=True, merge=rev)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head, wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
340 hg.clean(repo, head)
341 self.strip(repo, n, update=False, backup='strip')
342 342
343 343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev, wlock=wlock)
344 ret = hg.merge(repo, rev)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(),
348 force=1, wlock=wlock)
347 n = repo.commit(None, ctx.description(), ctx.user(), force=1)
349 348 if n == None:
350 349 raise util.Abort(_("repo commit failed"))
351 350 try:
352 351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 352 except:
354 353 raise util.Abort(_("unable to read %s") % patch)
355 354
356 355 patchf = self.opener(patch, "w")
357 356 if comments:
358 357 comments = "\n".join(comments) + '\n\n'
359 358 patchf.write(comments)
360 359 self.printdiff(repo, head, n, fp=patchf)
361 360 patchf.close()
362 361 self.removeundo(repo)
363 362 return (0, n)
364 363
365 364 def qparents(self, repo, rev=None):
366 365 if rev is None:
367 366 (p1, p2) = repo.dirstate.parents()
368 367 if p2 == revlog.nullid:
369 368 return p1
370 369 if len(self.applied) == 0:
371 370 return None
372 371 return revlog.bin(self.applied[-1].rev)
373 372 pp = repo.changelog.parents(rev)
374 373 if pp[1] != revlog.nullid:
375 374 arevs = [ x.rev for x in self.applied ]
376 375 p0 = revlog.hex(pp[0])
377 376 p1 = revlog.hex(pp[1])
378 377 if p0 in arevs:
379 378 return pp[0]
380 379 if p1 in arevs:
381 380 return pp[1]
382 381 return pp[0]
383 382
384 def mergepatch(self, repo, mergeq, series, wlock):
383 def mergepatch(self, repo, mergeq, series):
385 384 if len(self.applied) == 0:
386 385 # each of the patches merged in will have two parents. This
387 386 # can confuse the qrefresh, qdiff, and strip code because it
388 387 # needs to know which parent is actually in the patch queue.
389 388 # so, we insert a merge marker with only one parent. This way
390 389 # the first patch in the queue is never a merge patch
391 390 #
392 391 pname = ".hg.patches.merge.marker"
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 wlock=wlock)
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1)
395 393 self.removeundo(repo)
396 394 self.applied.append(statusentry(revlog.hex(n), pname))
397 395 self.applied_dirty = 1
398 396
399 397 head = self.qparents(repo)
400 398
401 399 for patch in series:
402 400 patch = mergeq.lookup(patch, strict=True)
403 401 if not patch:
404 402 self.ui.warn("patch %s does not exist\n" % patch)
405 403 return (1, None)
406 404 pushable, reason = self.pushable(patch)
407 405 if not pushable:
408 406 self.explain_pushable(patch, all_patches=True)
409 407 continue
410 408 info = mergeq.isapplied(patch)
411 409 if not info:
412 410 self.ui.warn("patch %s is not applied\n" % patch)
413 411 return (1, None)
414 412 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
413 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
416 414 if head:
417 415 self.applied.append(statusentry(revlog.hex(head), patch))
418 416 self.applied_dirty = 1
419 417 if err:
420 418 return (err, head)
421 419 self.save_dirty()
422 420 return (0, head)
423 421
424 422 def patch(self, repo, patchfile):
425 423 '''Apply patchfile to the working directory.
426 424 patchfile: file name of patch'''
427 425 files = {}
428 426 try:
429 427 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 428 files=files)
431 429 except Exception, inst:
432 430 self.ui.note(str(inst) + '\n')
433 431 if not self.ui.verbose:
434 432 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 433 return (False, files, False)
436 434
437 435 return (True, files, fuzz)
438 436
439 437 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, wlock=None,
441 all_files={}):
442 lock = tr = None
438 strict=False, patchdir=None, merge=None, all_files={}):
439 wlock = lock = tr = None
443 440 try:
444 if not wlock:
445 wlock = repo.wlock()
441 wlock = repo.wlock()
446 442 lock = repo.lock()
447 443 tr = repo.transaction()
448 444 try:
449 445 ret = self._apply(tr, repo, series, list, update_status,
450 strict, patchdir, merge, wlock,
451 lock=lock, all_files=all_files)
446 strict, patchdir, merge, all_files=all_files)
452 447 tr.close()
453 448 self.save_dirty()
454 449 return ret
455 450 except:
456 451 try:
457 452 tr.abort()
458 453 finally:
459 454 repo.invalidate()
460 455 repo.dirstate.invalidate()
461 456 raise
462 457 finally:
463 458 del lock, wlock, tr
464 459
465 460 def _apply(self, tr, repo, series, list=False, update_status=True,
466 strict=False, patchdir=None, merge=None, wlock=None,
467 lock=None, all_files={}):
461 strict=False, patchdir=None, merge=None, all_files={}):
468 462 # TODO unify with commands.py
469 463 if not patchdir:
470 464 patchdir = self.path
471 465 err = 0
472 466 n = None
473 467 for patchname in series:
474 468 pushable, reason = self.pushable(patchname)
475 469 if not pushable:
476 470 self.explain_pushable(patchname, all_patches=True)
477 471 continue
478 472 self.ui.warn("applying %s\n" % patchname)
479 473 pf = os.path.join(patchdir, patchname)
480 474
481 475 try:
482 476 message, comments, user, date, patchfound = self.readheaders(patchname)
483 477 except:
484 478 self.ui.warn("Unable to read %s\n" % patchname)
485 479 err = 1
486 480 break
487 481
488 482 if not message:
489 483 message = "imported patch %s\n" % patchname
490 484 else:
491 485 if list:
492 486 message.append("\nimported patch %s" % patchname)
493 487 message = '\n'.join(message)
494 488
495 489 (patcherr, files, fuzz) = self.patch(repo, pf)
496 490 all_files.update(files)
497 491 patcherr = not patcherr
498 492
499 493 if merge and files:
500 494 # Mark as removed/merged and update dirstate parent info
501 495 removed = []
502 496 merged = []
503 497 for f in files:
504 498 if os.path.exists(repo.wjoin(f)):
505 499 merged.append(f)
506 500 else:
507 501 removed.append(f)
508 502 for f in removed:
509 503 repo.dirstate.remove(f)
510 504 for f in merged:
511 505 repo.dirstate.merge(f)
512 506 p1, p2 = repo.dirstate.parents()
513 507 repo.dirstate.setparents(p1, merge)
514 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
515 n = repo.commit(files, message, user, date, force=1, lock=lock,
516 wlock=wlock)
508 files = patch.updatedir(self.ui, repo, files)
509 n = repo.commit(files, message, user, date, force=1)
517 510
518 511 if n == None:
519 512 raise util.Abort(_("repo commit failed"))
520 513
521 514 if update_status:
522 515 self.applied.append(statusentry(revlog.hex(n), patchname))
523 516
524 517 if patcherr:
525 518 if not patchfound:
526 519 self.ui.warn("patch %s is empty\n" % patchname)
527 520 err = 0
528 521 else:
529 522 self.ui.warn("patch failed, rejects left in working dir\n")
530 523 err = 1
531 524 break
532 525
533 526 if fuzz and strict:
534 527 self.ui.warn("fuzz found when applying patch, stopping\n")
535 528 err = 1
536 529 break
537 530 self.removeundo(repo)
538 531 return (err, n)
539 532
540 533 def delete(self, repo, patches, opts):
541 534 if not patches and not opts.get('rev'):
542 535 raise util.Abort(_('qdelete requires at least one revision or '
543 536 'patch name'))
544 537
545 538 realpatches = []
546 539 for patch in patches:
547 540 patch = self.lookup(patch, strict=True)
548 541 info = self.isapplied(patch)
549 542 if info:
550 543 raise util.Abort(_("cannot delete applied patch %s") % patch)
551 544 if patch not in self.series:
552 545 raise util.Abort(_("patch %s not in series file") % patch)
553 546 realpatches.append(patch)
554 547
555 548 appliedbase = 0
556 549 if opts.get('rev'):
557 550 if not self.applied:
558 551 raise util.Abort(_('no patches applied'))
559 552 revs = cmdutil.revrange(repo, opts['rev'])
560 553 if len(revs) > 1 and revs[0] > revs[1]:
561 554 revs.reverse()
562 555 for rev in revs:
563 556 if appliedbase >= len(self.applied):
564 557 raise util.Abort(_("revision %d is not managed") % rev)
565 558
566 559 base = revlog.bin(self.applied[appliedbase].rev)
567 560 node = repo.changelog.node(rev)
568 561 if node != base:
569 562 raise util.Abort(_("cannot delete revision %d above "
570 563 "applied patches") % rev)
571 564 realpatches.append(self.applied[appliedbase].name)
572 565 appliedbase += 1
573 566
574 567 if not opts.get('keep'):
575 568 r = self.qrepo()
576 569 if r:
577 570 r.remove(realpatches, True)
578 571 else:
579 572 for p in realpatches:
580 573 os.unlink(self.join(p))
581 574
582 575 if appliedbase:
583 576 del self.applied[:appliedbase]
584 577 self.applied_dirty = 1
585 578 indices = [self.find_series(p) for p in realpatches]
586 579 indices.sort()
587 580 for i in indices[-1::-1]:
588 581 del self.full_series[i]
589 582 self.parse_series()
590 583 self.series_dirty = 1
591 584
592 585 def check_toppatch(self, repo):
593 586 if len(self.applied) > 0:
594 587 top = revlog.bin(self.applied[-1].rev)
595 588 pp = repo.dirstate.parents()
596 589 if top not in pp:
597 590 raise util.Abort(_("queue top not at same revision as working directory"))
598 591 return top
599 592 return None
600 593 def check_localchanges(self, repo, force=False, refresh=True):
601 594 m, a, r, d = repo.status()[:4]
602 595 if m or a or r or d:
603 596 if not force:
604 597 if refresh:
605 598 raise util.Abort(_("local changes found, refresh first"))
606 599 else:
607 600 raise util.Abort(_("local changes found"))
608 601 return m, a, r, d
609 602
610 603 def new(self, repo, patch, *pats, **opts):
611 604 msg = opts.get('msg')
612 605 force = opts.get('force')
613 606 if os.path.exists(self.join(patch)):
614 607 raise util.Abort(_('patch "%s" already exists') % patch)
615 608 if opts.get('include') or opts.get('exclude') or pats:
616 609 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
617 610 m, a, r, d = repo.status(files=fns, match=match)[:4]
618 611 else:
619 612 m, a, r, d = self.check_localchanges(repo, force)
620 613 commitfiles = m + a + r
621 614 self.check_toppatch(repo)
622 615 wlock = repo.wlock()
623 616 try:
624 617 insert = self.full_series_end()
625 618 if msg:
626 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
619 n = repo.commit(commitfiles, msg, force=True)
627 620 else:
628 n = repo.commit(commitfiles,
629 "[mq]: %s" % patch, force=True, wlock=wlock)
621 n = repo.commit(commitfiles, "[mq]: %s" % patch, force=True)
630 622 if n == None:
631 623 raise util.Abort(_("repo commit failed"))
632 624 self.full_series[insert:insert] = [patch]
633 625 self.applied.append(statusentry(revlog.hex(n), patch))
634 626 self.parse_series()
635 627 self.series_dirty = 1
636 628 self.applied_dirty = 1
637 629 p = self.opener(patch, "w")
638 630 if msg:
639 631 msg = msg + "\n"
640 632 p.write(msg)
641 633 p.close()
642 634 wlock = None
643 635 r = self.qrepo()
644 636 if r: r.add([patch])
645 637 if commitfiles:
646 638 self.refresh(repo, short=True)
647 639 self.removeundo(repo)
648 640 finally:
649 641 del wlock
650 642
651 def strip(self, repo, rev, update=True, backup="all", wlock=None):
652 lock = None
643 def strip(self, repo, rev, update=True, backup="all"):
644 wlock = lock = None
653 645 try:
654 if not wlock:
655 wlock = repo.wlock()
646 wlock = repo.wlock()
656 647 lock = repo.lock()
657 648
658 649 if update:
659 650 self.check_localchanges(repo, refresh=False)
660 651 urev = self.qparents(repo, rev)
661 hg.clean(repo, urev, wlock=wlock)
652 hg.clean(repo, urev)
662 653 repo.dirstate.write()
663 654
664 655 self.removeundo(repo)
665 656 repair.strip(self.ui, repo, rev, backup)
666 657 finally:
667 658 del lock, wlock
668 659
669 660 def isapplied(self, patch):
670 661 """returns (index, rev, patch)"""
671 662 for i in xrange(len(self.applied)):
672 663 a = self.applied[i]
673 664 if a.name == patch:
674 665 return (i, a.rev, a.name)
675 666 return None
676 667
677 668 # if the exact patch name does not exist, we try a few
678 669 # variations. If strict is passed, we try only #1
679 670 #
680 671 # 1) a number to indicate an offset in the series file
681 672 # 2) a unique substring of the patch name was given
682 673 # 3) patchname[-+]num to indicate an offset in the series file
683 674 def lookup(self, patch, strict=False):
684 675 patch = patch and str(patch)
685 676
686 677 def partial_name(s):
687 678 if s in self.series:
688 679 return s
689 680 matches = [x for x in self.series if s in x]
690 681 if len(matches) > 1:
691 682 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
692 683 for m in matches:
693 684 self.ui.warn(' %s\n' % m)
694 685 return None
695 686 if matches:
696 687 return matches[0]
697 688 if len(self.series) > 0 and len(self.applied) > 0:
698 689 if s == 'qtip':
699 690 return self.series[self.series_end(True)-1]
700 691 if s == 'qbase':
701 692 return self.series[0]
702 693 return None
703 694 if patch == None:
704 695 return None
705 696
706 697 # we don't want to return a partial match until we make
707 698 # sure the file name passed in does not exist (checked below)
708 699 res = partial_name(patch)
709 700 if res and res == patch:
710 701 return res
711 702
712 703 if not os.path.isfile(self.join(patch)):
713 704 try:
714 705 sno = int(patch)
715 706 except(ValueError, OverflowError):
716 707 pass
717 708 else:
718 709 if sno < len(self.series):
719 710 return self.series[sno]
720 711 if not strict:
721 712 # return any partial match made above
722 713 if res:
723 714 return res
724 715 minus = patch.rfind('-')
725 716 if minus >= 0:
726 717 res = partial_name(patch[:minus])
727 718 if res:
728 719 i = self.series.index(res)
729 720 try:
730 721 off = int(patch[minus+1:] or 1)
731 722 except(ValueError, OverflowError):
732 723 pass
733 724 else:
734 725 if i - off >= 0:
735 726 return self.series[i - off]
736 727 plus = patch.rfind('+')
737 728 if plus >= 0:
738 729 res = partial_name(patch[:plus])
739 730 if res:
740 731 i = self.series.index(res)
741 732 try:
742 733 off = int(patch[plus+1:] or 1)
743 734 except(ValueError, OverflowError):
744 735 pass
745 736 else:
746 737 if i + off < len(self.series):
747 738 return self.series[i + off]
748 739 raise util.Abort(_("patch %s not in series") % patch)
749 740
750 741 def push(self, repo, patch=None, force=False, list=False,
751 mergeq=None, wlock=None):
752 if not wlock:
753 wlock = repo.wlock()
742 mergeq=None):
743 wlock = repo.wlock()
754 744 try:
755 745 patch = self.lookup(patch)
756 746 # Suppose our series file is: A B C and the current 'top'
757 747 # patch is B. qpush C should be performed (moving forward)
758 748 # qpush B is a NOP (no change) qpush A is an error (can't
759 749 # go backwards with qpush)
760 750 if patch:
761 751 info = self.isapplied(patch)
762 752 if info:
763 753 if info[0] < len(self.applied) - 1:
764 754 raise util.Abort(
765 755 _("cannot push to a previous patch: %s") % patch)
766 756 if info[0] < len(self.series) - 1:
767 757 self.ui.warn(
768 758 _('qpush: %s is already at the top\n') % patch)
769 759 else:
770 760 self.ui.warn(_('all patches are currently applied\n'))
771 761 return
772 762
773 763 # Following the above example, starting at 'top' of B:
774 764 # qpush should be performed (pushes C), but a subsequent
775 765 # qpush without an argument is an error (nothing to
776 766 # apply). This allows a loop of "...while hg qpush..." to
777 767 # work as it detects an error when done
778 768 if self.series_end() == len(self.series):
779 769 self.ui.warn(_('patch series already fully applied\n'))
780 770 return 1
781 771 if not force:
782 772 self.check_localchanges(repo)
783 773
784 774 self.applied_dirty = 1;
785 775 start = self.series_end()
786 776 if start > 0:
787 777 self.check_toppatch(repo)
788 778 if not patch:
789 779 patch = self.series[start]
790 780 end = start + 1
791 781 else:
792 782 end = self.series.index(patch, start) + 1
793 783 s = self.series[start:end]
794 784 all_files = {}
795 785 try:
796 786 if mergeq:
797 ret = self.mergepatch(repo, mergeq, s, wlock)
787 ret = self.mergepatch(repo, mergeq, s)
798 788 else:
799 ret = self.apply(repo, s, list, wlock=wlock,
800 all_files=all_files)
789 ret = self.apply(repo, s, list, all_files=all_files)
801 790 except:
802 791 self.ui.warn(_('cleaning up working directory...'))
803 792 node = repo.dirstate.parents()[0]
804 hg.revert(repo, node, None, wlock)
805 unknown = repo.status(wlock=wlock)[4]
793 hg.revert(repo, node, None)
794 unknown = repo.status()[4]
806 795 # only remove unknown files that we know we touched or
807 796 # created while patching
808 797 for f in unknown:
809 798 if f in all_files:
810 799 util.unlink(repo.wjoin(f))
811 800 self.ui.warn(_('done\n'))
812 801 raise
813 802 top = self.applied[-1].name
814 803 if ret[0]:
815 804 self.ui.write(
816 805 "Errors during apply, please fix and refresh %s\n" % top)
817 806 else:
818 807 self.ui.write("Now at: %s\n" % top)
819 808 return ret[0]
820 809 finally:
821 810 del wlock
822 811
823 def pop(self, repo, patch=None, force=False, update=True, all=False,
824 wlock=None):
812 def pop(self, repo, patch=None, force=False, update=True, all=False):
825 813 def getfile(f, rev):
826 814 t = repo.file(f).read(rev)
827 815 repo.wfile(f, "w").write(t)
828 816
829 if not wlock:
830 wlock = repo.wlock()
817 wlock = repo.wlock()
831 818 try:
832 819 if patch:
833 820 # index, rev, patch
834 821 info = self.isapplied(patch)
835 822 if not info:
836 823 patch = self.lookup(patch)
837 824 info = self.isapplied(patch)
838 825 if not info:
839 826 raise util.Abort(_("patch %s is not applied") % patch)
840 827
841 828 if len(self.applied) == 0:
842 829 # Allow qpop -a to work repeatedly,
843 830 # but not qpop without an argument
844 831 self.ui.warn(_("no patches applied\n"))
845 832 return not all
846 833
847 834 if not update:
848 835 parents = repo.dirstate.parents()
849 836 rr = [ revlog.bin(x.rev) for x in self.applied ]
850 837 for p in parents:
851 838 if p in rr:
852 839 self.ui.warn("qpop: forcing dirstate update\n")
853 840 update = True
854 841
855 842 if not force and update:
856 843 self.check_localchanges(repo)
857 844
858 845 self.applied_dirty = 1;
859 846 end = len(self.applied)
860 847 if not patch:
861 848 if all:
862 849 popi = 0
863 850 else:
864 851 popi = len(self.applied) - 1
865 852 else:
866 853 popi = info[0] + 1
867 854 if popi >= end:
868 855 self.ui.warn("qpop: %s is already at the top\n" % patch)
869 856 return
870 857 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
871 858
872 859 start = info[0]
873 860 rev = revlog.bin(info[1])
874 861
875 862 # we know there are no local changes, so we can make a simplified
876 863 # form of hg.update.
877 864 if update:
878 865 top = self.check_toppatch(repo)
879 866 qp = self.qparents(repo, rev)
880 867 changes = repo.changelog.read(qp)
881 868 mmap = repo.manifest.read(changes[0])
882 869 m, a, r, d, u = repo.status(qp, top)[:5]
883 870 if d:
884 871 raise util.Abort("deletions found between repo revs")
885 872 for f in m:
886 873 getfile(f, mmap[f])
887 874 for f in r:
888 875 getfile(f, mmap[f])
889 876 util.set_exec(repo.wjoin(f), mmap.execf(f))
890 877 for f in m + r:
891 878 repo.dirstate.normal(f)
892 879 for f in a:
893 880 try:
894 881 os.unlink(repo.wjoin(f))
895 882 except OSError, e:
896 883 if e.errno != errno.ENOENT:
897 884 raise
898 885 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
899 886 except: pass
900 887 repo.dirstate.forget(f)
901 888 repo.dirstate.setparents(qp, revlog.nullid)
902 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
889 self.strip(repo, rev, update=False, backup='strip')
903 890 del self.applied[start:end]
904 891 if len(self.applied):
905 892 self.ui.write("Now at: %s\n" % self.applied[-1].name)
906 893 else:
907 894 self.ui.write("Patch queue now empty\n")
908 895 finally:
909 896 del wlock
910 897
911 898 def diff(self, repo, pats, opts):
912 899 top = self.check_toppatch(repo)
913 900 if not top:
914 901 self.ui.write("No patches applied\n")
915 902 return
916 903 qp = self.qparents(repo, top)
917 904 if opts.get('git'):
918 905 self.diffopts().git = True
919 906 self.printdiff(repo, qp, files=pats, opts=opts)
920 907
921 908 def refresh(self, repo, pats=None, **opts):
922 909 if len(self.applied) == 0:
923 910 self.ui.write("No patches applied\n")
924 911 return 1
925 912 wlock = repo.wlock()
926 913 try:
927 914 self.check_toppatch(repo)
928 915 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
929 916 top = revlog.bin(top)
930 917 cparents = repo.changelog.parents(top)
931 918 patchparent = self.qparents(repo, top)
932 919 message, comments, user, date, patchfound = self.readheaders(patchfn)
933 920
934 921 patchf = self.opener(patchfn, 'r+')
935 922
936 923 # if the patch was a git patch, refresh it as a git patch
937 924 for line in patchf:
938 925 if line.startswith('diff --git'):
939 926 self.diffopts().git = True
940 927 break
941 928 patchf.seek(0)
942 929 patchf.truncate()
943 930
944 931 msg = opts.get('msg', '').rstrip()
945 932 if msg:
946 933 if comments:
947 934 # Remove existing message.
948 935 ci = 0
949 936 subj = None
950 937 for mi in xrange(len(message)):
951 938 if comments[ci].lower().startswith('subject: '):
952 939 subj = comments[ci][9:]
953 940 while message[mi] != comments[ci] and message[mi] != subj:
954 941 ci += 1
955 942 del comments[ci]
956 943 comments.append(msg)
957 944 if comments:
958 945 comments = "\n".join(comments) + '\n\n'
959 946 patchf.write(comments)
960 947
961 948 if opts.get('git'):
962 949 self.diffopts().git = True
963 950 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
964 951 tip = repo.changelog.tip()
965 952 if top == tip:
966 953 # if the top of our patch queue is also the tip, there is an
967 954 # optimization here. We update the dirstate in place and strip
968 955 # off the tip commit. Then just commit the current directory
969 956 # tree. We can also send repo.commit the list of files
970 957 # changed to speed up the diff
971 958 #
972 959 # in short mode, we only diff the files included in the
973 960 # patch already
974 961 #
975 962 # this should really read:
976 963 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
977 964 # but we do it backwards to take advantage of manifest/chlog
978 965 # caching against the next repo.status call
979 966 #
980 967 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
981 968 changes = repo.changelog.read(tip)
982 969 man = repo.manifest.read(changes[0])
983 970 aaa = aa[:]
984 971 if opts.get('short'):
985 972 filelist = mm + aa + dd
986 973 match = dict.fromkeys(filelist).__contains__
987 974 else:
988 975 filelist = None
989 976 match = util.always
990 977 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
991 978
992 979 # we might end up with files that were added between
993 980 # tip and the dirstate parent, but then changed in the
994 981 # local dirstate. in this case, we want them to only
995 982 # show up in the added section
996 983 for x in m:
997 984 if x not in aa:
998 985 mm.append(x)
999 986 # we might end up with files added by the local dirstate that
1000 987 # were deleted by the patch. In this case, they should only
1001 988 # show up in the changed section.
1002 989 for x in a:
1003 990 if x in dd:
1004 991 del dd[dd.index(x)]
1005 992 mm.append(x)
1006 993 else:
1007 994 aa.append(x)
1008 995 # make sure any files deleted in the local dirstate
1009 996 # are not in the add or change column of the patch
1010 997 forget = []
1011 998 for x in d + r:
1012 999 if x in aa:
1013 1000 del aa[aa.index(x)]
1014 1001 forget.append(x)
1015 1002 continue
1016 1003 elif x in mm:
1017 1004 del mm[mm.index(x)]
1018 1005 dd.append(x)
1019 1006
1020 1007 m = util.unique(mm)
1021 1008 r = util.unique(dd)
1022 1009 a = util.unique(aa)
1023 1010 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1024 1011 filelist = util.unique(c[0] + c[1] + c[2])
1025 1012 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1026 1013 fp=patchf, changes=c, opts=self.diffopts())
1027 1014 patchf.close()
1028 1015
1029 1016 repo.dirstate.setparents(*cparents)
1030 1017 copies = {}
1031 1018 for dst in a:
1032 1019 src = repo.dirstate.copied(dst)
1033 1020 if src is None:
1034 1021 continue
1035 1022 copies.setdefault(src, []).append(dst)
1036 1023 repo.dirstate.add(dst)
1037 1024 # remember the copies between patchparent and tip
1038 1025 # this may be slow, so don't do it if we're not tracking copies
1039 1026 if self.diffopts().git:
1040 1027 for dst in aaa:
1041 1028 f = repo.file(dst)
1042 1029 src = f.renamed(man[dst])
1043 1030 if src:
1044 1031 copies[src[0]] = copies.get(dst, [])
1045 1032 if dst in a:
1046 1033 copies[src[0]].append(dst)
1047 1034 # we can't copy a file created by the patch itself
1048 1035 if dst in copies:
1049 1036 del copies[dst]
1050 1037 for src, dsts in copies.iteritems():
1051 1038 for dst in dsts:
1052 1039 repo.dirstate.copy(src, dst)
1053 1040 for f in r:
1054 1041 repo.dirstate.remove(f)
1055 1042 # if the patch excludes a modified file, mark that
1056 1043 # file with mtime=0 so status can see it.
1057 1044 mm = []
1058 1045 for i in xrange(len(m)-1, -1, -1):
1059 1046 if not matchfn(m[i]):
1060 1047 mm.append(m[i])
1061 1048 del m[i]
1062 1049 for f in m:
1063 1050 repo.dirstate.normal(f)
1064 1051 for f in mm:
1065 1052 repo.dirstate.normaldirty(f)
1066 1053 for f in forget:
1067 1054 repo.dirstate.forget(f)
1068 1055
1069 1056 if not msg:
1070 1057 if not message:
1071 1058 message = "[mq]: %s\n" % patchfn
1072 1059 else:
1073 1060 message = "\n".join(message)
1074 1061 else:
1075 1062 message = msg
1076 1063
1077 1064 self.strip(repo, top, update=False,
1078 backup='strip', wlock=wlock)
1065 backup='strip')
1079 1066 n = repo.commit(filelist, message, changes[1], match=matchfn,
1080 force=1, wlock=wlock)
1067 force=1)
1081 1068 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1082 1069 self.applied_dirty = 1
1083 1070 self.removeundo(repo)
1084 1071 else:
1085 1072 self.printdiff(repo, patchparent, fp=patchf)
1086 1073 patchf.close()
1087 1074 added = repo.status()[1]
1088 1075 for a in added:
1089 1076 f = repo.wjoin(a)
1090 1077 try:
1091 1078 os.unlink(f)
1092 1079 except OSError, e:
1093 1080 if e.errno != errno.ENOENT:
1094 1081 raise
1095 1082 try: os.removedirs(os.path.dirname(f))
1096 1083 except: pass
1097 1084 # forget the file copies in the dirstate
1098 1085 # push should readd the files later on
1099 1086 repo.dirstate.forget(a)
1100 self.pop(repo, force=True, wlock=wlock)
1101 self.push(repo, force=True, wlock=wlock)
1087 self.pop(repo, force=True)
1088 self.push(repo, force=True)
1102 1089 finally:
1103 1090 del wlock
1104 1091
1105 1092 def init(self, repo, create=False):
1106 1093 if not create and os.path.isdir(self.path):
1107 1094 raise util.Abort(_("patch queue directory already exists"))
1108 1095 try:
1109 1096 os.mkdir(self.path)
1110 1097 except OSError, inst:
1111 1098 if inst.errno != errno.EEXIST or not create:
1112 1099 raise
1113 1100 if create:
1114 1101 return self.qrepo(create=True)
1115 1102
1116 1103 def unapplied(self, repo, patch=None):
1117 1104 if patch and patch not in self.series:
1118 1105 raise util.Abort(_("patch %s is not in series file") % patch)
1119 1106 if not patch:
1120 1107 start = self.series_end()
1121 1108 else:
1122 1109 start = self.series.index(patch) + 1
1123 1110 unapplied = []
1124 1111 for i in xrange(start, len(self.series)):
1125 1112 pushable, reason = self.pushable(i)
1126 1113 if pushable:
1127 1114 unapplied.append((i, self.series[i]))
1128 1115 self.explain_pushable(i)
1129 1116 return unapplied
1130 1117
1131 1118 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1132 1119 summary=False):
1133 1120 def displayname(patchname):
1134 1121 if summary:
1135 1122 msg = self.readheaders(patchname)[0]
1136 1123 msg = msg and ': ' + msg[0] or ': '
1137 1124 else:
1138 1125 msg = ''
1139 1126 return '%s%s' % (patchname, msg)
1140 1127
1141 1128 applied = dict.fromkeys([p.name for p in self.applied])
1142 1129 if length is None:
1143 1130 length = len(self.series) - start
1144 1131 if not missing:
1145 1132 for i in xrange(start, start+length):
1146 1133 patch = self.series[i]
1147 1134 if patch in applied:
1148 1135 stat = 'A'
1149 1136 elif self.pushable(i)[0]:
1150 1137 stat = 'U'
1151 1138 else:
1152 1139 stat = 'G'
1153 1140 pfx = ''
1154 1141 if self.ui.verbose:
1155 1142 pfx = '%d %s ' % (i, stat)
1156 1143 elif status and status != stat:
1157 1144 continue
1158 1145 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1159 1146 else:
1160 1147 msng_list = []
1161 1148 for root, dirs, files in os.walk(self.path):
1162 1149 d = root[len(self.path) + 1:]
1163 1150 for f in files:
1164 1151 fl = os.path.join(d, f)
1165 1152 if (fl not in self.series and
1166 1153 fl not in (self.status_path, self.series_path,
1167 1154 self.guards_path)
1168 1155 and not fl.startswith('.')):
1169 1156 msng_list.append(fl)
1170 1157 msng_list.sort()
1171 1158 for x in msng_list:
1172 1159 pfx = self.ui.verbose and ('D ') or ''
1173 1160 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1174 1161
1175 1162 def issaveline(self, l):
1176 1163 if l.name == '.hg.patches.save.line':
1177 1164 return True
1178 1165
1179 1166 def qrepo(self, create=False):
1180 1167 if create or os.path.isdir(self.join(".hg")):
1181 1168 return hg.repository(self.ui, path=self.path, create=create)
1182 1169
1183 1170 def restore(self, repo, rev, delete=None, qupdate=None):
1184 1171 c = repo.changelog.read(rev)
1185 1172 desc = c[4].strip()
1186 1173 lines = desc.splitlines()
1187 1174 i = 0
1188 1175 datastart = None
1189 1176 series = []
1190 1177 applied = []
1191 1178 qpp = None
1192 1179 for i in xrange(0, len(lines)):
1193 1180 if lines[i] == 'Patch Data:':
1194 1181 datastart = i + 1
1195 1182 elif lines[i].startswith('Dirstate:'):
1196 1183 l = lines[i].rstrip()
1197 1184 l = l[10:].split(' ')
1198 1185 qpp = [ hg.bin(x) for x in l ]
1199 1186 elif datastart != None:
1200 1187 l = lines[i].rstrip()
1201 1188 se = statusentry(l)
1202 1189 file_ = se.name
1203 1190 if se.rev:
1204 1191 applied.append(se)
1205 1192 else:
1206 1193 series.append(file_)
1207 1194 if datastart == None:
1208 1195 self.ui.warn("No saved patch data found\n")
1209 1196 return 1
1210 1197 self.ui.warn("restoring status: %s\n" % lines[0])
1211 1198 self.full_series = series
1212 1199 self.applied = applied
1213 1200 self.parse_series()
1214 1201 self.series_dirty = 1
1215 1202 self.applied_dirty = 1
1216 1203 heads = repo.changelog.heads()
1217 1204 if delete:
1218 1205 if rev not in heads:
1219 1206 self.ui.warn("save entry has children, leaving it alone\n")
1220 1207 else:
1221 1208 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1222 1209 pp = repo.dirstate.parents()
1223 1210 if rev in pp:
1224 1211 update = True
1225 1212 else:
1226 1213 update = False
1227 1214 self.strip(repo, rev, update=update, backup='strip')
1228 1215 if qpp:
1229 1216 self.ui.warn("saved queue repository parents: %s %s\n" %
1230 1217 (hg.short(qpp[0]), hg.short(qpp[1])))
1231 1218 if qupdate:
1232 1219 print "queue directory updating"
1233 1220 r = self.qrepo()
1234 1221 if not r:
1235 1222 self.ui.warn("Unable to load queue repository\n")
1236 1223 return 1
1237 1224 hg.clean(r, qpp[0])
1238 1225
1239 1226 def save(self, repo, msg=None):
1240 1227 if len(self.applied) == 0:
1241 1228 self.ui.warn("save: no patches applied, exiting\n")
1242 1229 return 1
1243 1230 if self.issaveline(self.applied[-1]):
1244 1231 self.ui.warn("status is already saved\n")
1245 1232 return 1
1246 1233
1247 1234 ar = [ ':' + x for x in self.full_series ]
1248 1235 if not msg:
1249 1236 msg = "hg patches saved state"
1250 1237 else:
1251 1238 msg = "hg patches: " + msg.rstrip('\r\n')
1252 1239 r = self.qrepo()
1253 1240 if r:
1254 1241 pp = r.dirstate.parents()
1255 1242 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1256 1243 msg += "\n\nPatch Data:\n"
1257 1244 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1258 1245 "\n".join(ar) + '\n' or "")
1259 1246 n = repo.commit(None, text, user=None, force=1)
1260 1247 if not n:
1261 1248 self.ui.warn("repo commit failed\n")
1262 1249 return 1
1263 1250 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1264 1251 self.applied_dirty = 1
1265 1252 self.removeundo(repo)
1266 1253
1267 1254 def full_series_end(self):
1268 1255 if len(self.applied) > 0:
1269 1256 p = self.applied[-1].name
1270 1257 end = self.find_series(p)
1271 1258 if end == None:
1272 1259 return len(self.full_series)
1273 1260 return end + 1
1274 1261 return 0
1275 1262
1276 1263 def series_end(self, all_patches=False):
1277 1264 """If all_patches is False, return the index of the next pushable patch
1278 1265 in the series, or the series length. If all_patches is True, return the
1279 1266 index of the first patch past the last applied one.
1280 1267 """
1281 1268 end = 0
1282 1269 def next(start):
1283 1270 if all_patches:
1284 1271 return start
1285 1272 i = start
1286 1273 while i < len(self.series):
1287 1274 p, reason = self.pushable(i)
1288 1275 if p:
1289 1276 break
1290 1277 self.explain_pushable(i)
1291 1278 i += 1
1292 1279 return i
1293 1280 if len(self.applied) > 0:
1294 1281 p = self.applied[-1].name
1295 1282 try:
1296 1283 end = self.series.index(p)
1297 1284 except ValueError:
1298 1285 return 0
1299 1286 return next(end + 1)
1300 1287 return next(end)
1301 1288
1302 1289 def appliedname(self, index):
1303 1290 pname = self.applied[index].name
1304 1291 if not self.ui.verbose:
1305 1292 p = pname
1306 1293 else:
1307 1294 p = str(self.series.index(pname)) + " " + pname
1308 1295 return p
1309 1296
1310 1297 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1311 1298 force=None, git=False):
1312 1299 def checkseries(patchname):
1313 1300 if patchname in self.series:
1314 1301 raise util.Abort(_('patch %s is already in the series file')
1315 1302 % patchname)
1316 1303 def checkfile(patchname):
1317 1304 if not force and os.path.exists(self.join(patchname)):
1318 1305 raise util.Abort(_('patch "%s" already exists')
1319 1306 % patchname)
1320 1307
1321 1308 if rev:
1322 1309 if files:
1323 1310 raise util.Abort(_('option "-r" not valid when importing '
1324 1311 'files'))
1325 1312 rev = cmdutil.revrange(repo, rev)
1326 1313 rev.sort(lambda x, y: cmp(y, x))
1327 1314 if (len(files) > 1 or len(rev) > 1) and patchname:
1328 1315 raise util.Abort(_('option "-n" not valid when importing multiple '
1329 1316 'patches'))
1330 1317 i = 0
1331 1318 added = []
1332 1319 if rev:
1333 1320 # If mq patches are applied, we can only import revisions
1334 1321 # that form a linear path to qbase.
1335 1322 # Otherwise, they should form a linear path to a head.
1336 1323 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1337 1324 if len(heads) > 1:
1338 1325 raise util.Abort(_('revision %d is the root of more than one '
1339 1326 'branch') % rev[-1])
1340 1327 if self.applied:
1341 1328 base = revlog.hex(repo.changelog.node(rev[0]))
1342 1329 if base in [n.rev for n in self.applied]:
1343 1330 raise util.Abort(_('revision %d is already managed')
1344 1331 % rev[0])
1345 1332 if heads != [revlog.bin(self.applied[-1].rev)]:
1346 1333 raise util.Abort(_('revision %d is not the parent of '
1347 1334 'the queue') % rev[0])
1348 1335 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1349 1336 lastparent = repo.changelog.parentrevs(base)[0]
1350 1337 else:
1351 1338 if heads != [repo.changelog.node(rev[0])]:
1352 1339 raise util.Abort(_('revision %d has unmanaged children')
1353 1340 % rev[0])
1354 1341 lastparent = None
1355 1342
1356 1343 if git:
1357 1344 self.diffopts().git = True
1358 1345
1359 1346 for r in rev:
1360 1347 p1, p2 = repo.changelog.parentrevs(r)
1361 1348 n = repo.changelog.node(r)
1362 1349 if p2 != revlog.nullrev:
1363 1350 raise util.Abort(_('cannot import merge revision %d') % r)
1364 1351 if lastparent and lastparent != r:
1365 1352 raise util.Abort(_('revision %d is not the parent of %d')
1366 1353 % (r, lastparent))
1367 1354 lastparent = p1
1368 1355
1369 1356 if not patchname:
1370 1357 patchname = normname('%d.diff' % r)
1371 1358 checkseries(patchname)
1372 1359 checkfile(patchname)
1373 1360 self.full_series.insert(0, patchname)
1374 1361
1375 1362 patchf = self.opener(patchname, "w")
1376 1363 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1377 1364 patchf.close()
1378 1365
1379 1366 se = statusentry(revlog.hex(n), patchname)
1380 1367 self.applied.insert(0, se)
1381 1368
1382 1369 added.append(patchname)
1383 1370 patchname = None
1384 1371 self.parse_series()
1385 1372 self.applied_dirty = 1
1386 1373
1387 1374 for filename in files:
1388 1375 if existing:
1389 1376 if filename == '-':
1390 1377 raise util.Abort(_('-e is incompatible with import from -'))
1391 1378 if not patchname:
1392 1379 patchname = normname(filename)
1393 1380 if not os.path.isfile(self.join(patchname)):
1394 1381 raise util.Abort(_("patch %s does not exist") % patchname)
1395 1382 else:
1396 1383 try:
1397 1384 if filename == '-':
1398 1385 if not patchname:
1399 1386 raise util.Abort(_('need --name to import a patch from -'))
1400 1387 text = sys.stdin.read()
1401 1388 else:
1402 1389 text = file(filename).read()
1403 1390 except IOError:
1404 1391 raise util.Abort(_("unable to read %s") % patchname)
1405 1392 if not patchname:
1406 1393 patchname = normname(os.path.basename(filename))
1407 1394 checkfile(patchname)
1408 1395 patchf = self.opener(patchname, "w")
1409 1396 patchf.write(text)
1410 1397 checkseries(patchname)
1411 1398 index = self.full_series_end() + i
1412 1399 self.full_series[index:index] = [patchname]
1413 1400 self.parse_series()
1414 1401 self.ui.warn("adding %s to series file\n" % patchname)
1415 1402 i += 1
1416 1403 added.append(patchname)
1417 1404 patchname = None
1418 1405 self.series_dirty = 1
1419 1406 qrepo = self.qrepo()
1420 1407 if qrepo:
1421 1408 qrepo.add(added)
1422 1409
1423 1410 def delete(ui, repo, *patches, **opts):
1424 1411 """remove patches from queue
1425 1412
1426 1413 The patches must not be applied, unless they are arguments to
1427 1414 the --rev parameter. At least one patch or revision is required.
1428 1415
1429 1416 With --rev, mq will stop managing the named revisions (converting
1430 1417 them to regular mercurial changesets). The patches must be applied
1431 1418 and at the base of the stack. This option is useful when the patches
1432 1419 have been applied upstream.
1433 1420
1434 1421 With --keep, the patch files are preserved in the patch directory."""
1435 1422 q = repo.mq
1436 1423 q.delete(repo, patches, opts)
1437 1424 q.save_dirty()
1438 1425 return 0
1439 1426
1440 1427 def applied(ui, repo, patch=None, **opts):
1441 1428 """print the patches already applied"""
1442 1429 q = repo.mq
1443 1430 if patch:
1444 1431 if patch not in q.series:
1445 1432 raise util.Abort(_("patch %s is not in series file") % patch)
1446 1433 end = q.series.index(patch) + 1
1447 1434 else:
1448 1435 end = q.series_end(True)
1449 1436 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1450 1437
1451 1438 def unapplied(ui, repo, patch=None, **opts):
1452 1439 """print the patches not yet applied"""
1453 1440 q = repo.mq
1454 1441 if patch:
1455 1442 if patch not in q.series:
1456 1443 raise util.Abort(_("patch %s is not in series file") % patch)
1457 1444 start = q.series.index(patch) + 1
1458 1445 else:
1459 1446 start = q.series_end(True)
1460 1447 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1461 1448
1462 1449 def qimport(ui, repo, *filename, **opts):
1463 1450 """import a patch
1464 1451
1465 1452 The patch will have the same name as its source file unless you
1466 1453 give it a new one with --name.
1467 1454
1468 1455 You can register an existing patch inside the patch directory
1469 1456 with the --existing flag.
1470 1457
1471 1458 With --force, an existing patch of the same name will be overwritten.
1472 1459
1473 1460 An existing changeset may be placed under mq control with --rev
1474 1461 (e.g. qimport --rev tip -n patch will place tip under mq control).
1475 1462 With --git, patches imported with --rev will use the git diff
1476 1463 format.
1477 1464 """
1478 1465 q = repo.mq
1479 1466 q.qimport(repo, filename, patchname=opts['name'],
1480 1467 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1481 1468 git=opts['git'])
1482 1469 q.save_dirty()
1483 1470 return 0
1484 1471
1485 1472 def init(ui, repo, **opts):
1486 1473 """init a new queue repository
1487 1474
1488 1475 The queue repository is unversioned by default. If -c is
1489 1476 specified, qinit will create a separate nested repository
1490 1477 for patches (qinit -c may also be run later to convert
1491 1478 an unversioned patch repository into a versioned one).
1492 1479 You can use qcommit to commit changes to this queue repository."""
1493 1480 q = repo.mq
1494 1481 r = q.init(repo, create=opts['create_repo'])
1495 1482 q.save_dirty()
1496 1483 if r:
1497 1484 if not os.path.exists(r.wjoin('.hgignore')):
1498 1485 fp = r.wopener('.hgignore', 'w')
1499 1486 fp.write('syntax: glob\n')
1500 1487 fp.write('status\n')
1501 1488 fp.write('guards\n')
1502 1489 fp.close()
1503 1490 if not os.path.exists(r.wjoin('series')):
1504 1491 r.wopener('series', 'w').close()
1505 1492 r.add(['.hgignore', 'series'])
1506 1493 commands.add(ui, r)
1507 1494 return 0
1508 1495
1509 1496 def clone(ui, source, dest=None, **opts):
1510 1497 '''clone main and patch repository at same time
1511 1498
1512 1499 If source is local, destination will have no patches applied. If
1513 1500 source is remote, this command can not check if patches are
1514 1501 applied in source, so cannot guarantee that patches are not
1515 1502 applied in destination. If you clone remote repository, be sure
1516 1503 before that it has no patches applied.
1517 1504
1518 1505 Source patch repository is looked for in <src>/.hg/patches by
1519 1506 default. Use -p <url> to change.
1520 1507
1521 1508 The patch directory must be a nested mercurial repository, as
1522 1509 would be created by qinit -c.
1523 1510 '''
1524 1511 cmdutil.setremoteconfig(ui, opts)
1525 1512 if dest is None:
1526 1513 dest = hg.defaultdest(source)
1527 1514 sr = hg.repository(ui, ui.expandpath(source))
1528 1515 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1529 1516 try:
1530 1517 pr = hg.repository(ui, patchdir)
1531 1518 except hg.RepoError:
1532 1519 raise util.Abort(_('versioned patch repository not found'
1533 1520 ' (see qinit -c)'))
1534 1521 qbase, destrev = None, None
1535 1522 if sr.local():
1536 1523 if sr.mq.applied:
1537 1524 qbase = revlog.bin(sr.mq.applied[0].rev)
1538 1525 if not hg.islocal(dest):
1539 1526 heads = dict.fromkeys(sr.heads())
1540 1527 for h in sr.heads(qbase):
1541 1528 del heads[h]
1542 1529 destrev = heads.keys()
1543 1530 destrev.append(sr.changelog.parents(qbase)[0])
1544 1531 ui.note(_('cloning main repo\n'))
1545 1532 sr, dr = hg.clone(ui, sr.url(), dest,
1546 1533 pull=opts['pull'],
1547 1534 rev=destrev,
1548 1535 update=False,
1549 1536 stream=opts['uncompressed'])
1550 1537 ui.note(_('cloning patch repo\n'))
1551 1538 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1552 1539 dr.url() + '/.hg/patches',
1553 1540 pull=opts['pull'],
1554 1541 update=not opts['noupdate'],
1555 1542 stream=opts['uncompressed'])
1556 1543 if dr.local():
1557 1544 if qbase:
1558 1545 ui.note(_('stripping applied patches from destination repo\n'))
1559 1546 dr.mq.strip(dr, qbase, update=False, backup=None)
1560 1547 if not opts['noupdate']:
1561 1548 ui.note(_('updating destination repo\n'))
1562 1549 hg.update(dr, dr.changelog.tip())
1563 1550
1564 1551 def commit(ui, repo, *pats, **opts):
1565 1552 """commit changes in the queue repository"""
1566 1553 q = repo.mq
1567 1554 r = q.qrepo()
1568 1555 if not r: raise util.Abort('no queue repository')
1569 1556 commands.commit(r.ui, r, *pats, **opts)
1570 1557
1571 1558 def series(ui, repo, **opts):
1572 1559 """print the entire series file"""
1573 1560 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1574 1561 return 0
1575 1562
1576 1563 def top(ui, repo, **opts):
1577 1564 """print the name of the current patch"""
1578 1565 q = repo.mq
1579 1566 t = q.applied and q.series_end(True) or 0
1580 1567 if t:
1581 1568 return q.qseries(repo, start=t-1, length=1, status='A',
1582 1569 summary=opts.get('summary'))
1583 1570 else:
1584 1571 ui.write("No patches applied\n")
1585 1572 return 1
1586 1573
1587 1574 def next(ui, repo, **opts):
1588 1575 """print the name of the next patch"""
1589 1576 q = repo.mq
1590 1577 end = q.series_end()
1591 1578 if end == len(q.series):
1592 1579 ui.write("All patches applied\n")
1593 1580 return 1
1594 1581 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1595 1582
1596 1583 def prev(ui, repo, **opts):
1597 1584 """print the name of the previous patch"""
1598 1585 q = repo.mq
1599 1586 l = len(q.applied)
1600 1587 if l == 1:
1601 1588 ui.write("Only one patch applied\n")
1602 1589 return 1
1603 1590 if not l:
1604 1591 ui.write("No patches applied\n")
1605 1592 return 1
1606 1593 return q.qseries(repo, start=l-2, length=1, status='A',
1607 1594 summary=opts.get('summary'))
1608 1595
1609 1596 def new(ui, repo, patch, *args, **opts):
1610 1597 """create a new patch
1611 1598
1612 1599 qnew creates a new patch on top of the currently-applied patch
1613 1600 (if any). It will refuse to run if there are any outstanding
1614 1601 changes unless -f is specified, in which case the patch will
1615 1602 be initialised with them. You may also use -I, -X, and/or a list of
1616 1603 files after the patch name to add only changes to matching files
1617 1604 to the new patch, leaving the rest as uncommitted modifications.
1618 1605
1619 1606 -e, -m or -l set the patch header as well as the commit message.
1620 1607 If none is specified, the patch header is empty and the
1621 1608 commit message is '[mq]: PATCH'"""
1622 1609 q = repo.mq
1623 1610 message = cmdutil.logmessage(opts)
1624 1611 if opts['edit']:
1625 1612 message = ui.edit(message, ui.username())
1626 1613 opts['msg'] = message
1627 1614 q.new(repo, patch, *args, **opts)
1628 1615 q.save_dirty()
1629 1616 return 0
1630 1617
1631 1618 def refresh(ui, repo, *pats, **opts):
1632 1619 """update the current patch
1633 1620
1634 1621 If any file patterns are provided, the refreshed patch will contain only
1635 1622 the modifications that match those patterns; the remaining modifications
1636 1623 will remain in the working directory.
1637 1624
1638 1625 hg add/remove/copy/rename work as usual, though you might want to use
1639 1626 git-style patches (--git or [diff] git=1) to track copies and renames.
1640 1627 """
1641 1628 q = repo.mq
1642 1629 message = cmdutil.logmessage(opts)
1643 1630 if opts['edit']:
1644 1631 if message:
1645 1632 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1646 1633 patch = q.applied[-1].name
1647 1634 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1648 1635 message = ui.edit('\n'.join(message), user or ui.username())
1649 1636 ret = q.refresh(repo, pats, msg=message, **opts)
1650 1637 q.save_dirty()
1651 1638 return ret
1652 1639
1653 1640 def diff(ui, repo, *pats, **opts):
1654 1641 """diff of the current patch"""
1655 1642 repo.mq.diff(repo, pats, opts)
1656 1643 return 0
1657 1644
1658 1645 def fold(ui, repo, *files, **opts):
1659 1646 """fold the named patches into the current patch
1660 1647
1661 1648 Patches must not yet be applied. Each patch will be successively
1662 1649 applied to the current patch in the order given. If all the
1663 1650 patches apply successfully, the current patch will be refreshed
1664 1651 with the new cumulative patch, and the folded patches will
1665 1652 be deleted. With -k/--keep, the folded patch files will not
1666 1653 be removed afterwards.
1667 1654
1668 1655 The header for each folded patch will be concatenated with
1669 1656 the current patch header, separated by a line of '* * *'."""
1670 1657
1671 1658 q = repo.mq
1672 1659
1673 1660 if not files:
1674 1661 raise util.Abort(_('qfold requires at least one patch name'))
1675 1662 if not q.check_toppatch(repo):
1676 1663 raise util.Abort(_('No patches applied'))
1677 1664
1678 1665 message = cmdutil.logmessage(opts)
1679 1666 if opts['edit']:
1680 1667 if message:
1681 1668 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1682 1669
1683 1670 parent = q.lookup('qtip')
1684 1671 patches = []
1685 1672 messages = []
1686 1673 for f in files:
1687 1674 p = q.lookup(f)
1688 1675 if p in patches or p == parent:
1689 1676 ui.warn(_('Skipping already folded patch %s') % p)
1690 1677 if q.isapplied(p):
1691 1678 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1692 1679 patches.append(p)
1693 1680
1694 1681 for p in patches:
1695 1682 if not message:
1696 1683 messages.append(q.readheaders(p)[0])
1697 1684 pf = q.join(p)
1698 1685 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1699 1686 if not patchsuccess:
1700 1687 raise util.Abort(_('Error folding patch %s') % p)
1701 1688 patch.updatedir(ui, repo, files)
1702 1689
1703 1690 if not message:
1704 1691 message, comments, user = q.readheaders(parent)[0:3]
1705 1692 for msg in messages:
1706 1693 message.append('* * *')
1707 1694 message.extend(msg)
1708 1695 message = '\n'.join(message)
1709 1696
1710 1697 if opts['edit']:
1711 1698 message = ui.edit(message, user or ui.username())
1712 1699
1713 1700 q.refresh(repo, msg=message)
1714 1701 q.delete(repo, patches, opts)
1715 1702 q.save_dirty()
1716 1703
1717 1704 def goto(ui, repo, patch, **opts):
1718 1705 '''push or pop patches until named patch is at top of stack'''
1719 1706 q = repo.mq
1720 1707 patch = q.lookup(patch)
1721 1708 if q.isapplied(patch):
1722 1709 ret = q.pop(repo, patch, force=opts['force'])
1723 1710 else:
1724 1711 ret = q.push(repo, patch, force=opts['force'])
1725 1712 q.save_dirty()
1726 1713 return ret
1727 1714
1728 1715 def guard(ui, repo, *args, **opts):
1729 1716 '''set or print guards for a patch
1730 1717
1731 1718 Guards control whether a patch can be pushed. A patch with no
1732 1719 guards is always pushed. A patch with a positive guard ("+foo") is
1733 1720 pushed only if the qselect command has activated it. A patch with
1734 1721 a negative guard ("-foo") is never pushed if the qselect command
1735 1722 has activated it.
1736 1723
1737 1724 With no arguments, print the currently active guards.
1738 1725 With arguments, set guards for the named patch.
1739 1726
1740 1727 To set a negative guard "-foo" on topmost patch ("--" is needed so
1741 1728 hg will not interpret "-foo" as an option):
1742 1729 hg qguard -- -foo
1743 1730
1744 1731 To set guards on another patch:
1745 1732 hg qguard other.patch +2.6.17 -stable
1746 1733 '''
1747 1734 def status(idx):
1748 1735 guards = q.series_guards[idx] or ['unguarded']
1749 1736 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1750 1737 q = repo.mq
1751 1738 patch = None
1752 1739 args = list(args)
1753 1740 if opts['list']:
1754 1741 if args or opts['none']:
1755 1742 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1756 1743 for i in xrange(len(q.series)):
1757 1744 status(i)
1758 1745 return
1759 1746 if not args or args[0][0:1] in '-+':
1760 1747 if not q.applied:
1761 1748 raise util.Abort(_('no patches applied'))
1762 1749 patch = q.applied[-1].name
1763 1750 if patch is None and args[0][0:1] not in '-+':
1764 1751 patch = args.pop(0)
1765 1752 if patch is None:
1766 1753 raise util.Abort(_('no patch to work with'))
1767 1754 if args or opts['none']:
1768 1755 idx = q.find_series(patch)
1769 1756 if idx is None:
1770 1757 raise util.Abort(_('no patch named %s') % patch)
1771 1758 q.set_guards(idx, args)
1772 1759 q.save_dirty()
1773 1760 else:
1774 1761 status(q.series.index(q.lookup(patch)))
1775 1762
1776 1763 def header(ui, repo, patch=None):
1777 1764 """Print the header of the topmost or specified patch"""
1778 1765 q = repo.mq
1779 1766
1780 1767 if patch:
1781 1768 patch = q.lookup(patch)
1782 1769 else:
1783 1770 if not q.applied:
1784 1771 ui.write('No patches applied\n')
1785 1772 return 1
1786 1773 patch = q.lookup('qtip')
1787 1774 message = repo.mq.readheaders(patch)[0]
1788 1775
1789 1776 ui.write('\n'.join(message) + '\n')
1790 1777
1791 1778 def lastsavename(path):
1792 1779 (directory, base) = os.path.split(path)
1793 1780 names = os.listdir(directory)
1794 1781 namere = re.compile("%s.([0-9]+)" % base)
1795 1782 maxindex = None
1796 1783 maxname = None
1797 1784 for f in names:
1798 1785 m = namere.match(f)
1799 1786 if m:
1800 1787 index = int(m.group(1))
1801 1788 if maxindex == None or index > maxindex:
1802 1789 maxindex = index
1803 1790 maxname = f
1804 1791 if maxname:
1805 1792 return (os.path.join(directory, maxname), maxindex)
1806 1793 return (None, None)
1807 1794
1808 1795 def savename(path):
1809 1796 (last, index) = lastsavename(path)
1810 1797 if last is None:
1811 1798 index = 0
1812 1799 newpath = path + ".%d" % (index + 1)
1813 1800 return newpath
1814 1801
1815 1802 def push(ui, repo, patch=None, **opts):
1816 1803 """push the next patch onto the stack"""
1817 1804 q = repo.mq
1818 1805 mergeq = None
1819 1806
1820 1807 if opts['all']:
1821 1808 if not q.series:
1822 1809 ui.warn(_('no patches in series\n'))
1823 1810 return 0
1824 1811 patch = q.series[-1]
1825 1812 if opts['merge']:
1826 1813 if opts['name']:
1827 1814 newpath = opts['name']
1828 1815 else:
1829 1816 newpath, i = lastsavename(q.path)
1830 1817 if not newpath:
1831 1818 ui.warn("no saved queues found, please use -n\n")
1832 1819 return 1
1833 1820 mergeq = queue(ui, repo.join(""), newpath)
1834 1821 ui.warn("merging with queue at: %s\n" % mergeq.path)
1835 1822 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1836 1823 mergeq=mergeq)
1837 1824 return ret
1838 1825
1839 1826 def pop(ui, repo, patch=None, **opts):
1840 1827 """pop the current patch off the stack"""
1841 1828 localupdate = True
1842 1829 if opts['name']:
1843 1830 q = queue(ui, repo.join(""), repo.join(opts['name']))
1844 1831 ui.warn('using patch queue: %s\n' % q.path)
1845 1832 localupdate = False
1846 1833 else:
1847 1834 q = repo.mq
1848 1835 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1849 1836 all=opts['all'])
1850 1837 q.save_dirty()
1851 1838 return ret
1852 1839
1853 1840 def rename(ui, repo, patch, name=None, **opts):
1854 1841 """rename a patch
1855 1842
1856 1843 With one argument, renames the current patch to PATCH1.
1857 1844 With two arguments, renames PATCH1 to PATCH2."""
1858 1845
1859 1846 q = repo.mq
1860 1847
1861 1848 if not name:
1862 1849 name = patch
1863 1850 patch = None
1864 1851
1865 1852 if patch:
1866 1853 patch = q.lookup(patch)
1867 1854 else:
1868 1855 if not q.applied:
1869 1856 ui.write(_('No patches applied\n'))
1870 1857 return
1871 1858 patch = q.lookup('qtip')
1872 1859 absdest = q.join(name)
1873 1860 if os.path.isdir(absdest):
1874 1861 name = normname(os.path.join(name, os.path.basename(patch)))
1875 1862 absdest = q.join(name)
1876 1863 if os.path.exists(absdest):
1877 1864 raise util.Abort(_('%s already exists') % absdest)
1878 1865
1879 1866 if name in q.series:
1880 1867 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1881 1868
1882 1869 if ui.verbose:
1883 1870 ui.write('Renaming %s to %s\n' % (patch, name))
1884 1871 i = q.find_series(patch)
1885 1872 guards = q.guard_re.findall(q.full_series[i])
1886 1873 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1887 1874 q.parse_series()
1888 1875 q.series_dirty = 1
1889 1876
1890 1877 info = q.isapplied(patch)
1891 1878 if info:
1892 1879 q.applied[info[0]] = statusentry(info[1], name)
1893 1880 q.applied_dirty = 1
1894 1881
1895 1882 util.rename(q.join(patch), absdest)
1896 1883 r = q.qrepo()
1897 1884 if r:
1898 1885 wlock = r.wlock()
1899 1886 try:
1900 1887 if r.dirstate[name] == 'r':
1901 r.undelete([name], wlock)
1902 r.copy(patch, name, wlock)
1903 r.remove([patch], False, wlock)
1888 r.undelete([name])
1889 r.copy(patch, name)
1890 r.remove([patch], False)
1904 1891 finally:
1905 1892 del wlock
1906 1893
1907 1894 q.save_dirty()
1908 1895
1909 1896 def restore(ui, repo, rev, **opts):
1910 1897 """restore the queue state saved by a rev"""
1911 1898 rev = repo.lookup(rev)
1912 1899 q = repo.mq
1913 1900 q.restore(repo, rev, delete=opts['delete'],
1914 1901 qupdate=opts['update'])
1915 1902 q.save_dirty()
1916 1903 return 0
1917 1904
1918 1905 def save(ui, repo, **opts):
1919 1906 """save current queue state"""
1920 1907 q = repo.mq
1921 1908 message = cmdutil.logmessage(opts)
1922 1909 ret = q.save(repo, msg=message)
1923 1910 if ret:
1924 1911 return ret
1925 1912 q.save_dirty()
1926 1913 if opts['copy']:
1927 1914 path = q.path
1928 1915 if opts['name']:
1929 1916 newpath = os.path.join(q.basepath, opts['name'])
1930 1917 if os.path.exists(newpath):
1931 1918 if not os.path.isdir(newpath):
1932 1919 raise util.Abort(_('destination %s exists and is not '
1933 1920 'a directory') % newpath)
1934 1921 if not opts['force']:
1935 1922 raise util.Abort(_('destination %s exists, '
1936 1923 'use -f to force') % newpath)
1937 1924 else:
1938 1925 newpath = savename(path)
1939 1926 ui.warn("copy %s to %s\n" % (path, newpath))
1940 1927 util.copyfiles(path, newpath)
1941 1928 if opts['empty']:
1942 1929 try:
1943 1930 os.unlink(q.join(q.status_path))
1944 1931 except:
1945 1932 pass
1946 1933 return 0
1947 1934
1948 1935 def strip(ui, repo, rev, **opts):
1949 1936 """strip a revision and all later revs on the same branch"""
1950 1937 rev = repo.lookup(rev)
1951 1938 backup = 'all'
1952 1939 if opts['backup']:
1953 1940 backup = 'strip'
1954 1941 elif opts['nobackup']:
1955 1942 backup = 'none'
1956 1943 update = repo.dirstate.parents()[0] != revlog.nullid
1957 1944 repo.mq.strip(repo, rev, backup=backup, update=update)
1958 1945 return 0
1959 1946
1960 1947 def select(ui, repo, *args, **opts):
1961 1948 '''set or print guarded patches to push
1962 1949
1963 1950 Use the qguard command to set or print guards on patch, then use
1964 1951 qselect to tell mq which guards to use. A patch will be pushed if it
1965 1952 has no guards or any positive guards match the currently selected guard,
1966 1953 but will not be pushed if any negative guards match the current guard.
1967 1954 For example:
1968 1955
1969 1956 qguard foo.patch -stable (negative guard)
1970 1957 qguard bar.patch +stable (positive guard)
1971 1958 qselect stable
1972 1959
1973 1960 This activates the "stable" guard. mq will skip foo.patch (because
1974 1961 it has a negative match) but push bar.patch (because it
1975 1962 has a positive match).
1976 1963
1977 1964 With no arguments, prints the currently active guards.
1978 1965 With one argument, sets the active guard.
1979 1966
1980 1967 Use -n/--none to deactivate guards (no other arguments needed).
1981 1968 When no guards are active, patches with positive guards are skipped
1982 1969 and patches with negative guards are pushed.
1983 1970
1984 1971 qselect can change the guards on applied patches. It does not pop
1985 1972 guarded patches by default. Use --pop to pop back to the last applied
1986 1973 patch that is not guarded. Use --reapply (which implies --pop) to push
1987 1974 back to the current patch afterwards, but skip guarded patches.
1988 1975
1989 1976 Use -s/--series to print a list of all guards in the series file (no
1990 1977 other arguments needed). Use -v for more information.'''
1991 1978
1992 1979 q = repo.mq
1993 1980 guards = q.active()
1994 1981 if args or opts['none']:
1995 1982 old_unapplied = q.unapplied(repo)
1996 1983 old_guarded = [i for i in xrange(len(q.applied)) if
1997 1984 not q.pushable(i)[0]]
1998 1985 q.set_active(args)
1999 1986 q.save_dirty()
2000 1987 if not args:
2001 1988 ui.status(_('guards deactivated\n'))
2002 1989 if not opts['pop'] and not opts['reapply']:
2003 1990 unapplied = q.unapplied(repo)
2004 1991 guarded = [i for i in xrange(len(q.applied))
2005 1992 if not q.pushable(i)[0]]
2006 1993 if len(unapplied) != len(old_unapplied):
2007 1994 ui.status(_('number of unguarded, unapplied patches has '
2008 1995 'changed from %d to %d\n') %
2009 1996 (len(old_unapplied), len(unapplied)))
2010 1997 if len(guarded) != len(old_guarded):
2011 1998 ui.status(_('number of guarded, applied patches has changed '
2012 1999 'from %d to %d\n') %
2013 2000 (len(old_guarded), len(guarded)))
2014 2001 elif opts['series']:
2015 2002 guards = {}
2016 2003 noguards = 0
2017 2004 for gs in q.series_guards:
2018 2005 if not gs:
2019 2006 noguards += 1
2020 2007 for g in gs:
2021 2008 guards.setdefault(g, 0)
2022 2009 guards[g] += 1
2023 2010 if ui.verbose:
2024 2011 guards['NONE'] = noguards
2025 2012 guards = guards.items()
2026 2013 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2027 2014 if guards:
2028 2015 ui.note(_('guards in series file:\n'))
2029 2016 for guard, count in guards:
2030 2017 ui.note('%2d ' % count)
2031 2018 ui.write(guard, '\n')
2032 2019 else:
2033 2020 ui.note(_('no guards in series file\n'))
2034 2021 else:
2035 2022 if guards:
2036 2023 ui.note(_('active guards:\n'))
2037 2024 for g in guards:
2038 2025 ui.write(g, '\n')
2039 2026 else:
2040 2027 ui.write(_('no active guards\n'))
2041 2028 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2042 2029 popped = False
2043 2030 if opts['pop'] or opts['reapply']:
2044 2031 for i in xrange(len(q.applied)):
2045 2032 pushable, reason = q.pushable(i)
2046 2033 if not pushable:
2047 2034 ui.status(_('popping guarded patches\n'))
2048 2035 popped = True
2049 2036 if i == 0:
2050 2037 q.pop(repo, all=True)
2051 2038 else:
2052 2039 q.pop(repo, i-1)
2053 2040 break
2054 2041 if popped:
2055 2042 try:
2056 2043 if reapply:
2057 2044 ui.status(_('reapplying unguarded patches\n'))
2058 2045 q.push(repo, reapply)
2059 2046 finally:
2060 2047 q.save_dirty()
2061 2048
2062 2049 def reposetup(ui, repo):
2063 2050 class mqrepo(repo.__class__):
2064 2051 def abort_if_wdir_patched(self, errmsg, force=False):
2065 2052 if self.mq.applied and not force:
2066 2053 parent = revlog.hex(self.dirstate.parents()[0])
2067 2054 if parent in [s.rev for s in self.mq.applied]:
2068 2055 raise util.Abort(errmsg)
2069 2056
2070 2057 def commit(self, *args, **opts):
2071 2058 if len(args) >= 6:
2072 2059 force = args[5]
2073 2060 else:
2074 2061 force = opts.get('force')
2075 2062 self.abort_if_wdir_patched(
2076 2063 _('cannot commit over an applied mq patch'),
2077 2064 force)
2078 2065
2079 2066 return super(mqrepo, self).commit(*args, **opts)
2080 2067
2081 2068 def push(self, remote, force=False, revs=None):
2082 2069 if self.mq.applied and not force and not revs:
2083 2070 raise util.Abort(_('source has mq patches applied'))
2084 2071 return super(mqrepo, self).push(remote, force, revs)
2085 2072
2086 2073 def tags(self):
2087 2074 if self.tagscache:
2088 2075 return self.tagscache
2089 2076
2090 2077 tagscache = super(mqrepo, self).tags()
2091 2078
2092 2079 q = self.mq
2093 2080 if not q.applied:
2094 2081 return tagscache
2095 2082
2096 2083 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2097 2084 mqtags.append((mqtags[-1][0], 'qtip'))
2098 2085 mqtags.append((mqtags[0][0], 'qbase'))
2099 2086 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2100 2087 for patch in mqtags:
2101 2088 if patch[1] in tagscache:
2102 2089 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2103 2090 else:
2104 2091 tagscache[patch[1]] = patch[0]
2105 2092
2106 2093 return tagscache
2107 2094
2108 2095 def _branchtags(self):
2109 2096 q = self.mq
2110 2097 if not q.applied:
2111 2098 return super(mqrepo, self)._branchtags()
2112 2099
2113 2100 self.branchcache = {} # avoid recursion in changectx
2114 2101 cl = self.changelog
2115 2102 partial, last, lrev = self._readbranchcache()
2116 2103
2117 2104 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2118 2105 start = lrev + 1
2119 2106 if start < qbase:
2120 2107 # update the cache (excluding the patches) and save it
2121 2108 self._updatebranchcache(partial, lrev+1, qbase)
2122 2109 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2123 2110 start = qbase
2124 2111 # if start = qbase, the cache is as updated as it should be.
2125 2112 # if start > qbase, the cache includes (part of) the patches.
2126 2113 # we might as well use it, but we won't save it.
2127 2114
2128 2115 # update the cache up to the tip
2129 2116 self._updatebranchcache(partial, start, cl.count())
2130 2117
2131 2118 return partial
2132 2119
2133 2120 if repo.local():
2134 2121 repo.__class__ = mqrepo
2135 2122 repo.mq = queue(ui, repo.join(""))
2136 2123
2137 2124 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2138 2125
2139 2126 cmdtable = {
2140 2127 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2141 2128 "qclone":
2142 2129 (clone,
2143 2130 [('', 'pull', None, _('use pull protocol to copy metadata')),
2144 2131 ('U', 'noupdate', None, _('do not update the new working directories')),
2145 2132 ('', 'uncompressed', None,
2146 2133 _('use uncompressed transfer (fast over LAN)')),
2147 2134 ('e', 'ssh', '', _('specify ssh command to use')),
2148 2135 ('p', 'patches', '', _('location of source patch repo')),
2149 2136 ('', 'remotecmd', '',
2150 2137 _('specify hg command to run on the remote side'))],
2151 2138 _('hg qclone [OPTION]... SOURCE [DEST]')),
2152 2139 "qcommit|qci":
2153 2140 (commit,
2154 2141 commands.table["^commit|ci"][1],
2155 2142 _('hg qcommit [OPTION]... [FILE]...')),
2156 2143 "^qdiff":
2157 2144 (diff,
2158 2145 [('g', 'git', None, _('use git extended diff format')),
2159 2146 ('I', 'include', [], _('include names matching the given patterns')),
2160 2147 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2161 2148 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2162 2149 "qdelete|qremove|qrm":
2163 2150 (delete,
2164 2151 [('k', 'keep', None, _('keep patch file')),
2165 2152 ('r', 'rev', [], _('stop managing a revision'))],
2166 2153 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2167 2154 'qfold':
2168 2155 (fold,
2169 2156 [('e', 'edit', None, _('edit patch header')),
2170 2157 ('k', 'keep', None, _('keep folded patch files')),
2171 2158 ] + commands.commitopts,
2172 2159 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2173 2160 'qgoto':
2174 2161 (goto,
2175 2162 [('f', 'force', None, _('overwrite any local changes'))],
2176 2163 _('hg qgoto [OPTION]... PATCH')),
2177 2164 'qguard':
2178 2165 (guard,
2179 2166 [('l', 'list', None, _('list all patches and guards')),
2180 2167 ('n', 'none', None, _('drop all guards'))],
2181 2168 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2182 2169 'qheader': (header, [], _('hg qheader [PATCH]')),
2183 2170 "^qimport":
2184 2171 (qimport,
2185 2172 [('e', 'existing', None, 'import file in patch dir'),
2186 2173 ('n', 'name', '', 'patch file name'),
2187 2174 ('f', 'force', None, 'overwrite existing files'),
2188 2175 ('r', 'rev', [], 'place existing revisions under mq control'),
2189 2176 ('g', 'git', None, _('use git extended diff format'))],
2190 2177 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2191 2178 "^qinit":
2192 2179 (init,
2193 2180 [('c', 'create-repo', None, 'create queue repository')],
2194 2181 _('hg qinit [-c]')),
2195 2182 "qnew":
2196 2183 (new,
2197 2184 [('e', 'edit', None, _('edit commit message')),
2198 2185 ('f', 'force', None, _('import uncommitted changes into patch')),
2199 2186 ('I', 'include', [], _('include names matching the given patterns')),
2200 2187 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2201 2188 ] + commands.commitopts,
2202 2189 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2203 2190 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2204 2191 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2205 2192 "^qpop":
2206 2193 (pop,
2207 2194 [('a', 'all', None, _('pop all patches')),
2208 2195 ('n', 'name', '', _('queue name to pop')),
2209 2196 ('f', 'force', None, _('forget any local changes'))],
2210 2197 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2211 2198 "^qpush":
2212 2199 (push,
2213 2200 [('f', 'force', None, _('apply if the patch has rejects')),
2214 2201 ('l', 'list', None, _('list patch name in commit text')),
2215 2202 ('a', 'all', None, _('apply all patches')),
2216 2203 ('m', 'merge', None, _('merge from another queue')),
2217 2204 ('n', 'name', '', _('merge queue name'))],
2218 2205 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2219 2206 "^qrefresh":
2220 2207 (refresh,
2221 2208 [('e', 'edit', None, _('edit commit message')),
2222 2209 ('g', 'git', None, _('use git extended diff format')),
2223 2210 ('s', 'short', None, _('refresh only files already in the patch')),
2224 2211 ('I', 'include', [], _('include names matching the given patterns')),
2225 2212 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2226 2213 ] + commands.commitopts,
2227 2214 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2228 2215 'qrename|qmv':
2229 2216 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2230 2217 "qrestore":
2231 2218 (restore,
2232 2219 [('d', 'delete', None, _('delete save entry')),
2233 2220 ('u', 'update', None, _('update queue working dir'))],
2234 2221 _('hg qrestore [-d] [-u] REV')),
2235 2222 "qsave":
2236 2223 (save,
2237 2224 [('c', 'copy', None, _('copy patch directory')),
2238 2225 ('n', 'name', '', _('copy directory name')),
2239 2226 ('e', 'empty', None, _('clear queue status file')),
2240 2227 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2241 2228 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2242 2229 "qselect":
2243 2230 (select,
2244 2231 [('n', 'none', None, _('disable all guards')),
2245 2232 ('s', 'series', None, _('list all guards in series file')),
2246 2233 ('', 'pop', None, _('pop to before first guarded applied patch')),
2247 2234 ('', 'reapply', None, _('pop, then reapply patches'))],
2248 2235 _('hg qselect [OPTION]... [GUARD]...')),
2249 2236 "qseries":
2250 2237 (series,
2251 2238 [('m', 'missing', None, _('print patches not in series')),
2252 2239 ] + seriesopts,
2253 2240 _('hg qseries [-ms]')),
2254 2241 "^strip":
2255 2242 (strip,
2256 2243 [('f', 'force', None, _('force multi-head removal')),
2257 2244 ('b', 'backup', None, _('bundle unrelated changesets')),
2258 2245 ('n', 'nobackup', None, _('no backups'))],
2259 2246 _('hg strip [-f] [-b] [-n] REV')),
2260 2247 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2261 2248 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2262 2249 }
@@ -1,600 +1,597 b''
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 import os, tempfile
10 10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
11 11 from mercurial import patch, revlog, util
12 12
13 13 '''patch transplanting tool
14 14
15 15 This extension allows you to transplant patches from another branch.
16 16
17 17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 18 from a changeset hash to its hash in the source repository.
19 19 '''
20 20
21 21 class transplantentry:
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants:
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter:
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73 73
74 74 def applied(self, repo, node, parent):
75 75 '''returns True if a node is already an ancestor of parent
76 76 or has already been transplanted'''
77 77 if hasnode(repo, node):
78 78 if node in repo.changelog.reachable(parent, stop=node):
79 79 return True
80 80 for t in self.transplants.get(node):
81 81 # it might have been stripped
82 82 if not hasnode(repo, t.lnode):
83 83 self.transplants.remove(t)
84 84 return False
85 85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 86 return True
87 87 return False
88 88
89 89 def apply(self, repo, source, revmap, merges, opts={}):
90 90 '''apply the revisions in revmap one by one in revision order'''
91 91 revs = revmap.keys()
92 92 revs.sort()
93 93
94 94 p1, p2 = repo.dirstate.parents()
95 95 pulls = []
96 96 diffopts = patch.diffopts(self.ui, opts)
97 97 diffopts.git = True
98 98
99 99 lock = wlock = None
100 100 try:
101 101 wlock = repo.wlock()
102 102 lock = repo.lock()
103 103 for rev in revs:
104 104 node = revmap[rev]
105 105 revstr = '%s:%s' % (rev, revlog.short(node))
106 106
107 107 if self.applied(repo, node, p1):
108 108 self.ui.warn(_('skipping already applied revision %s\n') %
109 109 revstr)
110 110 continue
111 111
112 112 parents = source.changelog.parents(node)
113 113 if not opts.get('filter'):
114 114 # If the changeset parent is the same as the wdir's parent,
115 115 # just pull it.
116 116 if parents[0] == p1:
117 117 pulls.append(node)
118 118 p1 = node
119 119 continue
120 120 if pulls:
121 121 if source != repo:
122 repo.pull(source, heads=pulls, lock=lock)
123 merge.update(repo, pulls[-1], False, False, None,
124 wlock=wlock)
122 repo.pull(source, heads=pulls)
123 merge.update(repo, pulls[-1], False, False, None)
125 124 p1, p2 = repo.dirstate.parents()
126 125 pulls = []
127 126
128 127 domerge = False
129 128 if node in merges:
130 129 # pulling all the merge revs at once would mean we couldn't
131 130 # transplant after the latest even if transplants before them
132 131 # fail.
133 132 domerge = True
134 133 if not hasnode(repo, node):
135 repo.pull(source, heads=[node], lock=lock)
134 repo.pull(source, heads=[node])
136 135
137 136 if parents[1] != revlog.nullid:
138 137 self.ui.note(_('skipping merge changeset %s:%s\n')
139 138 % (rev, revlog.short(node)))
140 139 patchfile = None
141 140 else:
142 141 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
143 142 fp = os.fdopen(fd, 'w')
144 143 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
145 144 fp.close()
146 145
147 146 del revmap[rev]
148 147 if patchfile or domerge:
149 148 try:
150 n = self.applyone(repo, node, source.changelog.read(node),
149 n = self.applyone(repo, node,
150 source.changelog.read(node),
151 151 patchfile, merge=domerge,
152 152 log=opts.get('log'),
153 filter=opts.get('filter'),
154 lock=lock, wlock=wlock)
153 filter=opts.get('filter'))
155 154 if n and domerge:
156 155 self.ui.status(_('%s merged at %s\n') % (revstr,
157 156 revlog.short(n)))
158 157 elif n:
159 158 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
160 159 revlog.short(n)))
161 160 finally:
162 161 if patchfile:
163 162 os.unlink(patchfile)
164 163 if pulls:
165 repo.pull(source, heads=pulls, lock=lock)
166 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
164 repo.pull(source, heads=pulls)
165 merge.update(repo, pulls[-1], False, False, None)
167 166 finally:
168 167 self.saveseries(revmap, merges)
169 168 self.transplants.write()
170 169 del lock, wlock
171 170
172 171 def filter(self, filter, changelog, patchfile):
173 172 '''arbitrarily rewrite changeset before applying it'''
174 173
175 174 self.ui.status('filtering %s\n' % patchfile)
176 175 user, date, msg = (changelog[1], changelog[2], changelog[4])
177 176
178 177 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
179 178 fp = os.fdopen(fd, 'w')
180 179 fp.write("# HG changeset patch\n")
181 180 fp.write("# User %s\n" % user)
182 181 fp.write("# Date %d %d\n" % date)
183 182 fp.write(changelog[4])
184 183 fp.close()
185 184
186 185 try:
187 186 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
188 187 util.shellquote(patchfile)),
189 188 environ={'HGUSER': changelog[1]},
190 189 onerr=util.Abort, errprefix=_('filter failed'))
191 190 user, date, msg = self.parselog(file(headerfile))[1:4]
192 191 finally:
193 192 os.unlink(headerfile)
194 193
195 194 return (user, date, msg)
196 195
197 196 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
198 filter=None, lock=None, wlock=None):
197 filter=None):
199 198 '''apply the patch in patchfile to the repository as a transplant'''
200 199 (manifest, user, (time, timezone), files, message) = cl[:5]
201 200 date = "%d %d" % (time, timezone)
202 201 extra = {'transplant_source': node}
203 202 if filter:
204 203 (user, date, message) = self.filter(filter, cl, patchfile)
205 204
206 205 if log:
207 206 message += '\n(transplanted from %s)' % revlog.hex(node)
208 207
209 208 self.ui.status(_('applying %s\n') % revlog.short(node))
210 209 self.ui.note('%s %s\n%s\n' % (user, date, message))
211 210
212 211 if not patchfile and not merge:
213 212 raise util.Abort(_('can only omit patchfile if merging'))
214 213 if patchfile:
215 214 try:
216 215 files = {}
217 216 try:
218 217 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
219 218 files=files)
220 219 if not files:
221 220 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
222 221 return None
223 222 finally:
224 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 files = patch.updatedir(self.ui, repo, files)
225 224 except Exception, inst:
226 225 if filter:
227 226 os.unlink(patchfile)
228 227 seriespath = os.path.join(self.path, 'series')
229 228 if os.path.exists(seriespath):
230 229 os.unlink(seriespath)
231 230 p1 = repo.dirstate.parents()[0]
232 231 p2 = node
233 232 self.log(user, date, message, p1, p2, merge=merge)
234 233 self.ui.write(str(inst) + '\n')
235 234 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
236 235 else:
237 236 files = None
238 237 if merge:
239 238 p1, p2 = repo.dirstate.parents()
240 239 repo.dirstate.setparents(p1, node)
241 240
242 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
243 extra=extra)
241 n = repo.commit(files, message, user, date, extra=extra)
244 242 if not merge:
245 243 self.transplants.set(n, node)
246 244
247 245 return n
248 246
249 247 def resume(self, repo, source, opts=None):
250 248 '''recover last transaction and apply remaining changesets'''
251 249 if os.path.exists(os.path.join(self.path, 'journal')):
252 250 n, node = self.recover(repo)
253 251 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
254 252 revlog.short(n)))
255 253 seriespath = os.path.join(self.path, 'series')
256 254 if not os.path.exists(seriespath):
257 255 self.transplants.write()
258 256 return
259 257 nodes, merges = self.readseries()
260 258 revmap = {}
261 259 for n in nodes:
262 260 revmap[source.changelog.rev(n)] = n
263 261 os.unlink(seriespath)
264 262
265 263 self.apply(repo, source, revmap, merges, opts)
266 264
267 265 def recover(self, repo):
268 266 '''commit working directory using journal metadata'''
269 267 node, user, date, message, parents = self.readlog()
270 268 merge = len(parents) == 2
271 269
272 270 if not user or not date or not message or not parents[0]:
273 271 raise util.Abort(_('transplant log file is corrupt'))
274 272
275 273 extra = {'transplant_source': node}
276 274 wlock = repo.wlock()
277 275 try:
278 276 p1, p2 = repo.dirstate.parents()
279 277 if p1 != parents[0]:
280 278 raise util.Abort(
281 279 _('working dir not at transplant parent %s') %
282 280 revlog.hex(parents[0]))
283 281 if merge:
284 282 repo.dirstate.setparents(p1, parents[1])
285 n = repo.commit(None, message, user, date, wlock=wlock,
286 extra=extra)
283 n = repo.commit(None, message, user, date, extra=extra)
287 284 if not n:
288 285 raise util.Abort(_('commit failed'))
289 286 if not merge:
290 287 self.transplants.set(n, node)
291 288 self.unlog()
292 289
293 290 return n, node
294 291 finally:
295 292 del wlock
296 293
297 294 def readseries(self):
298 295 nodes = []
299 296 merges = []
300 297 cur = nodes
301 298 for line in self.opener('series').read().splitlines():
302 299 if line.startswith('# Merges'):
303 300 cur = merges
304 301 continue
305 302 cur.append(revlog.bin(line))
306 303
307 304 return (nodes, merges)
308 305
309 306 def saveseries(self, revmap, merges):
310 307 if not revmap:
311 308 return
312 309
313 310 if not os.path.isdir(self.path):
314 311 os.mkdir(self.path)
315 312 series = self.opener('series', 'w')
316 313 revs = revmap.keys()
317 314 revs.sort()
318 315 for rev in revs:
319 316 series.write(revlog.hex(revmap[rev]) + '\n')
320 317 if merges:
321 318 series.write('# Merges\n')
322 319 for m in merges:
323 320 series.write(revlog.hex(m) + '\n')
324 321 series.close()
325 322
326 323 def parselog(self, fp):
327 324 parents = []
328 325 message = []
329 326 node = revlog.nullid
330 327 inmsg = False
331 328 for line in fp.read().splitlines():
332 329 if inmsg:
333 330 message.append(line)
334 331 elif line.startswith('# User '):
335 332 user = line[7:]
336 333 elif line.startswith('# Date '):
337 334 date = line[7:]
338 335 elif line.startswith('# Node ID '):
339 336 node = revlog.bin(line[10:])
340 337 elif line.startswith('# Parent '):
341 338 parents.append(revlog.bin(line[9:]))
342 339 elif not line.startswith('#'):
343 340 inmsg = True
344 341 message.append(line)
345 342 return (node, user, date, '\n'.join(message), parents)
346 343
347 344 def log(self, user, date, message, p1, p2, merge=False):
348 345 '''journal changelog metadata for later recover'''
349 346
350 347 if not os.path.isdir(self.path):
351 348 os.mkdir(self.path)
352 349 fp = self.opener('journal', 'w')
353 350 fp.write('# User %s\n' % user)
354 351 fp.write('# Date %s\n' % date)
355 352 fp.write('# Node ID %s\n' % revlog.hex(p2))
356 353 fp.write('# Parent ' + revlog.hex(p1) + '\n')
357 354 if merge:
358 355 fp.write('# Parent ' + revlog.hex(p2) + '\n')
359 356 fp.write(message.rstrip() + '\n')
360 357 fp.close()
361 358
362 359 def readlog(self):
363 360 return self.parselog(self.opener('journal'))
364 361
365 362 def unlog(self):
366 363 '''remove changelog journal'''
367 364 absdst = os.path.join(self.path, 'journal')
368 365 if os.path.exists(absdst):
369 366 os.unlink(absdst)
370 367
371 368 def transplantfilter(self, repo, source, root):
372 369 def matchfn(node):
373 370 if self.applied(repo, node, root):
374 371 return False
375 372 if source.changelog.parents(node)[1] != revlog.nullid:
376 373 return False
377 374 extra = source.changelog.read(node)[5]
378 375 cnode = extra.get('transplant_source')
379 376 if cnode and self.applied(repo, cnode, root):
380 377 return False
381 378 return True
382 379
383 380 return matchfn
384 381
385 382 def hasnode(repo, node):
386 383 try:
387 384 return repo.changelog.rev(node) != None
388 385 except revlog.RevlogError:
389 386 return False
390 387
391 388 def browserevs(ui, repo, nodes, opts):
392 389 '''interactively transplant changesets'''
393 390 def browsehelp(ui):
394 391 ui.write('y: transplant this changeset\n'
395 392 'n: skip this changeset\n'
396 393 'm: merge at this changeset\n'
397 394 'p: show patch\n'
398 395 'c: commit selected changesets\n'
399 396 'q: cancel transplant\n'
400 397 '?: show this help\n')
401 398
402 399 displayer = cmdutil.show_changeset(ui, repo, opts)
403 400 transplants = []
404 401 merges = []
405 402 for node in nodes:
406 403 displayer.show(changenode=node)
407 404 action = None
408 405 while not action:
409 406 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
410 407 if action == '?':
411 408 browsehelp(ui)
412 409 action = None
413 410 elif action == 'p':
414 411 parent = repo.changelog.parents(node)[0]
415 412 patch.diff(repo, parent, node)
416 413 action = None
417 414 elif action not in ('y', 'n', 'm', 'c', 'q'):
418 415 ui.write('no such option\n')
419 416 action = None
420 417 if action == 'y':
421 418 transplants.append(node)
422 419 elif action == 'm':
423 420 merges.append(node)
424 421 elif action == 'c':
425 422 break
426 423 elif action == 'q':
427 424 transplants = ()
428 425 merges = ()
429 426 break
430 427 return (transplants, merges)
431 428
432 429 def transplant(ui, repo, *revs, **opts):
433 430 '''transplant changesets from another branch
434 431
435 432 Selected changesets will be applied on top of the current working
436 433 directory with the log of the original changeset. If --log is
437 434 specified, log messages will have a comment appended of the form:
438 435
439 436 (transplanted from CHANGESETHASH)
440 437
441 438 You can rewrite the changelog message with the --filter option.
442 439 Its argument will be invoked with the current changelog message
443 440 as $1 and the patch as $2.
444 441
445 442 If --source is specified, selects changesets from the named
446 443 repository. If --branch is specified, selects changesets from the
447 444 branch holding the named revision, up to that revision. If --all
448 445 is specified, all changesets on the branch will be transplanted,
449 446 otherwise you will be prompted to select the changesets you want.
450 447
451 448 hg transplant --branch REVISION --all will rebase the selected branch
452 449 (up to the named revision) onto your current working directory.
453 450
454 451 You can optionally mark selected transplanted changesets as
455 452 merge changesets. You will not be prompted to transplant any
456 453 ancestors of a merged transplant, and you can merge descendants
457 454 of them normally instead of transplanting them.
458 455
459 456 If no merges or revisions are provided, hg transplant will start
460 457 an interactive changeset browser.
461 458
462 459 If a changeset application fails, you can fix the merge by hand and
463 460 then resume where you left off by calling hg transplant --continue.
464 461 '''
465 462 def getoneitem(opts, item, errmsg):
466 463 val = opts.get(item)
467 464 if val:
468 465 if len(val) > 1:
469 466 raise util.Abort(errmsg)
470 467 else:
471 468 return val[0]
472 469
473 470 def getremotechanges(repo, url):
474 471 sourcerepo = ui.expandpath(url)
475 472 source = hg.repository(ui, sourcerepo)
476 473 incoming = repo.findincoming(source, force=True)
477 474 if not incoming:
478 475 return (source, None, None)
479 476
480 477 bundle = None
481 478 if not source.local():
482 479 cg = source.changegroup(incoming, 'incoming')
483 480 bundle = changegroup.writebundle(cg, None, 'HG10UN')
484 481 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
485 482
486 483 return (source, incoming, bundle)
487 484
488 485 def incwalk(repo, incoming, branches, match=util.always):
489 486 if not branches:
490 487 branches=None
491 488 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
492 489 if match(node):
493 490 yield node
494 491
495 492 def transplantwalk(repo, root, branches, match=util.always):
496 493 if not branches:
497 494 branches = repo.heads()
498 495 ancestors = []
499 496 for branch in branches:
500 497 ancestors.append(repo.changelog.ancestor(root, branch))
501 498 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
502 499 if match(node):
503 500 yield node
504 501
505 502 def checkopts(opts, revs):
506 503 if opts.get('continue'):
507 504 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
508 505 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
509 506 return
510 507 if not (opts.get('source') or revs or
511 508 opts.get('merge') or opts.get('branch')):
512 509 raise util.Abort(_('no source URL, branch tag or revision list provided'))
513 510 if opts.get('all'):
514 511 if not opts.get('branch'):
515 512 raise util.Abort(_('--all requires a branch revision'))
516 513 if revs:
517 514 raise util.Abort(_('--all is incompatible with a revision list'))
518 515
519 516 checkopts(opts, revs)
520 517
521 518 if not opts.get('log'):
522 519 opts['log'] = ui.config('transplant', 'log')
523 520 if not opts.get('filter'):
524 521 opts['filter'] = ui.config('transplant', 'filter')
525 522
526 523 tp = transplanter(ui, repo)
527 524
528 525 p1, p2 = repo.dirstate.parents()
529 526 if p1 == revlog.nullid:
530 527 raise util.Abort(_('no revision checked out'))
531 528 if not opts.get('continue'):
532 529 if p2 != revlog.nullid:
533 530 raise util.Abort(_('outstanding uncommitted merges'))
534 531 m, a, r, d = repo.status()[:4]
535 532 if m or a or r or d:
536 533 raise util.Abort(_('outstanding local changes'))
537 534
538 535 bundle = None
539 536 source = opts.get('source')
540 537 if source:
541 538 (source, incoming, bundle) = getremotechanges(repo, source)
542 539 else:
543 540 source = repo
544 541
545 542 try:
546 543 if opts.get('continue'):
547 544 tp.resume(repo, source, opts)
548 545 return
549 546
550 547 tf=tp.transplantfilter(repo, source, p1)
551 548 if opts.get('prune'):
552 549 prune = [source.lookup(r)
553 550 for r in cmdutil.revrange(source, opts.get('prune'))]
554 551 matchfn = lambda x: tf(x) and x not in prune
555 552 else:
556 553 matchfn = tf
557 554 branches = map(source.lookup, opts.get('branch', ()))
558 555 merges = map(source.lookup, opts.get('merge', ()))
559 556 revmap = {}
560 557 if revs:
561 558 for r in cmdutil.revrange(source, revs):
562 559 revmap[int(r)] = source.lookup(r)
563 560 elif opts.get('all') or not merges:
564 561 if source != repo:
565 562 alltransplants = incwalk(source, incoming, branches, match=matchfn)
566 563 else:
567 564 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
568 565 if opts.get('all'):
569 566 revs = alltransplants
570 567 else:
571 568 revs, newmerges = browserevs(ui, source, alltransplants, opts)
572 569 merges.extend(newmerges)
573 570 for r in revs:
574 571 revmap[source.changelog.rev(r)] = r
575 572 for r in merges:
576 573 revmap[source.changelog.rev(r)] = r
577 574
578 575 revs = revmap.keys()
579 576 revs.sort()
580 577 pulls = []
581 578
582 579 tp.apply(repo, source, revmap, merges, opts)
583 580 finally:
584 581 if bundle:
585 582 source.close()
586 583 os.unlink(bundle)
587 584
588 585 cmdtable = {
589 586 "transplant":
590 587 (transplant,
591 588 [('s', 'source', '', _('pull patches from REPOSITORY')),
592 589 ('b', 'branch', [], _('pull patches from branch BRANCH')),
593 590 ('a', 'all', None, _('pull all changesets up to BRANCH')),
594 591 ('p', 'prune', [], _('skip over REV')),
595 592 ('m', 'merge', [], _('merge at REV')),
596 593 ('', 'log', None, _('append transplant info to log message')),
597 594 ('c', 'continue', None, _('continue last transplant session after repair')),
598 595 ('', 'filter', '', _('filter changesets through FILTER'))],
599 596 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
600 597 }
@@ -1,1278 +1,1277 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 12 import fancyopts, revlog, version, extensions, hook
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20 class ParseError(Exception):
21 21 """Exception raised on errors in parsing the command line."""
22 22
23 23 def runcatch(ui, args, argv0=None):
24 24 def catchterm(*args):
25 25 raise util.SignalInterrupt
26 26
27 27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 28 num = getattr(signal, name, None)
29 29 if num: signal.signal(num, catchterm)
30 30
31 31 try:
32 32 try:
33 33 # enter the debugger before command execution
34 34 if '--debugger' in args:
35 35 pdb.set_trace()
36 36 try:
37 37 return dispatch(ui, args, argv0=argv0)
38 38 finally:
39 39 ui.flush()
40 40 except:
41 41 # enter the debugger when we hit an exception
42 42 if '--debugger' in args:
43 43 pdb.post_mortem(sys.exc_info()[2])
44 44 ui.print_exc()
45 45 raise
46 46
47 47 except ParseError, inst:
48 48 if inst.args[0]:
49 49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 50 commands.help_(ui, inst.args[0])
51 51 else:
52 52 ui.warn(_("hg: %s\n") % inst.args[1])
53 53 commands.help_(ui, 'shortlist')
54 54 except AmbiguousCommand, inst:
55 55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 56 (inst.args[0], " ".join(inst.args[1])))
57 57 except UnknownCommand, inst:
58 58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 59 commands.help_(ui, 'shortlist')
60 60 except hg.RepoError, inst:
61 61 ui.warn(_("abort: %s!\n") % inst)
62 62 except lock.LockHeld, inst:
63 63 if inst.errno == errno.ETIMEDOUT:
64 64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 65 else:
66 66 reason = _('lock held by %s') % inst.locker
67 67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 68 except lock.LockUnavailable, inst:
69 69 ui.warn(_("abort: could not lock %s: %s\n") %
70 70 (inst.desc or inst.filename, inst.strerror))
71 71 except revlog.RevlogError, inst:
72 72 ui.warn(_("abort: %s!\n") % inst)
73 73 except util.SignalInterrupt:
74 74 ui.warn(_("killed!\n"))
75 75 except KeyboardInterrupt:
76 76 try:
77 77 ui.warn(_("interrupted!\n"))
78 78 except IOError, inst:
79 79 if inst.errno == errno.EPIPE:
80 80 if ui.debugflag:
81 81 ui.warn(_("\nbroken pipe\n"))
82 82 else:
83 83 raise
84 84 except socket.error, inst:
85 85 ui.warn(_("abort: %s\n") % inst[1])
86 86 except IOError, inst:
87 87 if hasattr(inst, "code"):
88 88 ui.warn(_("abort: %s\n") % inst)
89 89 elif hasattr(inst, "reason"):
90 90 try: # usually it is in the form (errno, strerror)
91 91 reason = inst.reason.args[1]
92 92 except: # it might be anything, for example a string
93 93 reason = inst.reason
94 94 ui.warn(_("abort: error: %s\n") % reason)
95 95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 96 if ui.debugflag:
97 97 ui.warn(_("broken pipe\n"))
98 98 elif getattr(inst, "strerror", None):
99 99 if getattr(inst, "filename", None):
100 100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 101 else:
102 102 ui.warn(_("abort: %s\n") % inst.strerror)
103 103 else:
104 104 raise
105 105 except OSError, inst:
106 106 if getattr(inst, "filename", None):
107 107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 108 else:
109 109 ui.warn(_("abort: %s\n") % inst.strerror)
110 110 except util.UnexpectedOutput, inst:
111 111 ui.warn(_("abort: %s") % inst[0])
112 112 if not isinstance(inst[1], basestring):
113 113 ui.warn(" %r\n" % (inst[1],))
114 114 elif not inst[1]:
115 115 ui.warn(_(" empty string\n"))
116 116 else:
117 117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 118 except ImportError, inst:
119 119 m = str(inst).split()[-1]
120 120 ui.warn(_("abort: could not import module %s!\n" % m))
121 121 if m in "mpatch bdiff".split():
122 122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 123 elif m in "zlib".split():
124 124 ui.warn(_("(is your Python install correct?)\n"))
125 125
126 126 except util.Abort, inst:
127 127 ui.warn(_("abort: %s\n") % inst)
128 128 except SystemExit, inst:
129 129 # Commands shouldn't sys.exit directly, but give a return code.
130 130 # Just in case catch this and and pass exit code to caller.
131 131 return inst.code
132 132 except:
133 133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 134 ui.warn(_("** report bug details to "
135 135 "http://www.selenic.com/mercurial/bts\n"))
136 136 ui.warn(_("** or mercurial@selenic.com\n"))
137 137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 138 % version.get_version())
139 139 raise
140 140
141 141 return -1
142 142
143 143 def findpossible(ui, cmd):
144 144 """
145 145 Return cmd -> (aliases, command table entry)
146 146 for each matching command.
147 147 Return debug commands (or their aliases) only if no normal command matches.
148 148 """
149 149 choice = {}
150 150 debugchoice = {}
151 151 for e in commands.table.keys():
152 152 aliases = e.lstrip("^").split("|")
153 153 found = None
154 154 if cmd in aliases:
155 155 found = cmd
156 156 elif not ui.config("ui", "strict"):
157 157 for a in aliases:
158 158 if a.startswith(cmd):
159 159 found = a
160 160 break
161 161 if found is not None:
162 162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 163 debugchoice[found] = (aliases, commands.table[e])
164 164 else:
165 165 choice[found] = (aliases, commands.table[e])
166 166
167 167 if not choice and debugchoice:
168 168 choice = debugchoice
169 169
170 170 return choice
171 171
172 172 def findcmd(ui, cmd):
173 173 """Return (aliases, command table entry) for command string."""
174 174 choice = findpossible(ui, cmd)
175 175
176 176 if choice.has_key(cmd):
177 177 return choice[cmd]
178 178
179 179 if len(choice) > 1:
180 180 clist = choice.keys()
181 181 clist.sort()
182 182 raise AmbiguousCommand(cmd, clist)
183 183
184 184 if choice:
185 185 return choice.values()[0]
186 186
187 187 raise UnknownCommand(cmd)
188 188
189 189 def findrepo():
190 190 p = os.getcwd()
191 191 while not os.path.isdir(os.path.join(p, ".hg")):
192 192 oldp, p = p, os.path.dirname(p)
193 193 if p == oldp:
194 194 return None
195 195
196 196 return p
197 197
198 198 def parse(ui, args):
199 199 options = {}
200 200 cmdoptions = {}
201 201
202 202 try:
203 203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 204 except fancyopts.getopt.GetoptError, inst:
205 205 raise ParseError(None, inst)
206 206
207 207 if args:
208 208 cmd, args = args[0], args[1:]
209 209 aliases, i = findcmd(ui, cmd)
210 210 cmd = aliases[0]
211 211 defaults = ui.config("defaults", cmd)
212 212 if defaults:
213 213 args = shlex.split(defaults) + args
214 214 c = list(i[1])
215 215 else:
216 216 cmd = None
217 217 c = []
218 218
219 219 # combine global options into local
220 220 for o in commands.globalopts:
221 221 c.append((o[0], o[1], options[o[1]], o[3]))
222 222
223 223 try:
224 224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 225 except fancyopts.getopt.GetoptError, inst:
226 226 raise ParseError(cmd, inst)
227 227
228 228 # separate global options back out
229 229 for o in commands.globalopts:
230 230 n = o[1]
231 231 options[n] = cmdoptions[n]
232 232 del cmdoptions[n]
233 233
234 234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235 235
236 236 def parseconfig(config):
237 237 """parse the --config options from the command line"""
238 238 parsed = []
239 239 for cfg in config:
240 240 try:
241 241 name, value = cfg.split('=', 1)
242 242 section, name = name.split('.', 1)
243 243 if not section or not name:
244 244 raise IndexError
245 245 parsed.append((section, name, value))
246 246 except (IndexError, ValueError):
247 247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 248 return parsed
249 249
250 250 def earlygetopt(aliases, args):
251 251 """Return list of values for an option (or aliases).
252 252
253 253 The values are listed in the order they appear in args.
254 254 The options and values are removed from args.
255 255 """
256 256 try:
257 257 argcount = args.index("--")
258 258 except ValueError:
259 259 argcount = len(args)
260 260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 261 values = []
262 262 pos = 0
263 263 while pos < argcount:
264 264 if args[pos] in aliases:
265 265 if pos + 1 >= argcount:
266 266 # ignore and let getopt report an error if there is no value
267 267 break
268 268 del args[pos]
269 269 values.append(args.pop(pos))
270 270 argcount -= 2
271 271 elif args[pos][:2] in shortopts:
272 272 # short option can have no following space, e.g. hg log -Rfoo
273 273 values.append(args.pop(pos)[2:])
274 274 argcount -= 1
275 275 else:
276 276 pos += 1
277 277 return values
278 278
279 279 def dispatch(ui, args, argv0=None):
280 280 # remember how to call 'hg' before changing the working dir
281 281 util.set_hgexecutable(argv0)
282 282
283 283 # read --config before doing anything else
284 284 # (e.g. to change trust settings for reading .hg/hgrc)
285 285 config = earlygetopt(['--config'], args)
286 286 if config:
287 287 ui.updateopts(config=parseconfig(config))
288 288
289 289 # check for cwd
290 290 cwd = earlygetopt(['--cwd'], args)
291 291 if cwd:
292 292 os.chdir(cwd[-1])
293 293
294 294 # read the local repository .hgrc into a local ui object
295 295 path = findrepo() or ""
296 296 if not path:
297 297 lui = ui
298 298 if path:
299 299 try:
300 300 lui = commands.ui.ui(parentui=ui)
301 301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 302 except IOError:
303 303 pass
304 304
305 305 # now we can expand paths, even ones in .hg/hgrc
306 306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 307 if rpath:
308 308 path = lui.expandpath(rpath[-1])
309 309 lui = commands.ui.ui(parentui=ui)
310 310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311 311
312 312 extensions.loadall(lui)
313 313 # check for fallback encoding
314 314 fallback = lui.config('ui', 'fallbackencoding')
315 315 if fallback:
316 316 util._fallbackencoding = fallback
317 317
318 318 fullargs = args
319 319 cmd, func, args, options, cmdoptions = parse(ui, args)
320 320
321 321 if options["config"]:
322 322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 323 if options["cwd"]:
324 324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 325 if options["repository"]:
326 326 raise util.Abort(_(
327 327 "Option -R has to be separated from other options (i.e. not -qR) "
328 328 "and --repository may only be abbreviated as --repo!"))
329 329
330 330 if options["encoding"]:
331 331 util._encoding = options["encoding"]
332 332 if options["encodingmode"]:
333 333 util._encodingmode = options["encodingmode"]
334 334 if options["time"]:
335 335 def get_times():
336 336 t = os.times()
337 337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 338 t = (t[0], t[1], t[2], t[3], time.clock())
339 339 return t
340 340 s = get_times()
341 341 def print_time():
342 342 t = get_times()
343 343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 345 atexit.register(print_time)
346 346
347 347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 348 not options["noninteractive"], options["traceback"])
349 349
350 350 if options['help']:
351 351 return commands.help_(ui, cmd, options['version'])
352 352 elif options['version']:
353 353 return commands.version_(ui)
354 354 elif not cmd:
355 355 return commands.help_(ui, 'shortlist')
356 356
357 357 repo = None
358 358 if cmd not in commands.norepo.split():
359 359 try:
360 360 repo = hg.repository(ui, path=path)
361 361 ui = repo.ui
362 362 if not repo.local():
363 363 raise util.Abort(_("repository '%s' is not local") % path)
364 364 except hg.RepoError:
365 365 if cmd not in commands.optionalrepo.split():
366 366 if not path:
367 367 raise hg.RepoError(_("There is no Mercurial repository here"
368 368 " (.hg not found)"))
369 369 raise
370 370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 371 else:
372 372 d = lambda: func(ui, *args, **cmdoptions)
373 373
374 374 # run pre-hook, and abort if it fails
375 375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 376 if ret:
377 377 return ret
378 378 ret = runcommand(ui, options, cmd, d)
379 379 # run post-hook, passing command result
380 380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 381 result = ret)
382 382 return ret
383 383
384 384 def runcommand(ui, options, cmd, cmdfunc):
385 385 def checkargs():
386 386 try:
387 387 return cmdfunc()
388 388 except TypeError, inst:
389 389 # was this an argument error?
390 390 tb = traceback.extract_tb(sys.exc_info()[2])
391 391 if len(tb) != 2: # no
392 392 raise
393 393 raise ParseError(cmd, _("invalid arguments"))
394 394
395 395 if options['profile']:
396 396 import hotshot, hotshot.stats
397 397 prof = hotshot.Profile("hg.prof")
398 398 try:
399 399 try:
400 400 return prof.runcall(checkargs)
401 401 except:
402 402 try:
403 403 ui.warn(_('exception raised - generating '
404 404 'profile anyway\n'))
405 405 except:
406 406 pass
407 407 raise
408 408 finally:
409 409 prof.close()
410 410 stats = hotshot.stats.load("hg.prof")
411 411 stats.strip_dirs()
412 412 stats.sort_stats('time', 'calls')
413 413 stats.print_stats(40)
414 414 elif options['lsprof']:
415 415 try:
416 416 from mercurial import lsprof
417 417 except ImportError:
418 418 raise util.Abort(_(
419 419 'lsprof not available - install from '
420 420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 421 p = lsprof.Profiler()
422 422 p.enable(subcalls=True)
423 423 try:
424 424 return checkargs()
425 425 finally:
426 426 p.disable()
427 427 stats = lsprof.Stats(p.getstats())
428 428 stats.sort()
429 429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 430 else:
431 431 return checkargs()
432 432
433 433 def bail_if_changed(repo):
434 434 modified, added, removed, deleted = repo.status()[:4]
435 435 if modified or added or removed or deleted:
436 436 raise util.Abort(_("outstanding uncommitted changes"))
437 437
438 438 def logmessage(opts):
439 439 """ get the log message according to -m and -l option """
440 440 message = opts['message']
441 441 logfile = opts['logfile']
442 442
443 443 if message and logfile:
444 444 raise util.Abort(_('options --message and --logfile are mutually '
445 445 'exclusive'))
446 446 if not message and logfile:
447 447 try:
448 448 if logfile == '-':
449 449 message = sys.stdin.read()
450 450 else:
451 451 message = open(logfile).read()
452 452 except IOError, inst:
453 453 raise util.Abort(_("can't read commit message '%s': %s") %
454 454 (logfile, inst.strerror))
455 455 return message
456 456
457 457 def setremoteconfig(ui, opts):
458 458 "copy remote options to ui tree"
459 459 if opts.get('ssh'):
460 460 ui.setconfig("ui", "ssh", opts['ssh'])
461 461 if opts.get('remotecmd'):
462 462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463 463
464 464 def parseurl(url, revs):
465 465 '''parse url#branch, returning url, branch + revs'''
466 466
467 467 if '#' not in url:
468 468 return url, (revs or None)
469 469
470 470 url, rev = url.split('#', 1)
471 471 return url, revs + [rev]
472 472
473 473 def revpair(repo, revs):
474 474 '''return pair of nodes, given list of revisions. second item can
475 475 be None, meaning use working dir.'''
476 476
477 477 def revfix(repo, val, defval):
478 478 if not val and val != 0 and defval is not None:
479 479 val = defval
480 480 return repo.lookup(val)
481 481
482 482 if not revs:
483 483 return repo.dirstate.parents()[0], None
484 484 end = None
485 485 if len(revs) == 1:
486 486 if revrangesep in revs[0]:
487 487 start, end = revs[0].split(revrangesep, 1)
488 488 start = revfix(repo, start, 0)
489 489 end = revfix(repo, end, repo.changelog.count() - 1)
490 490 else:
491 491 start = revfix(repo, revs[0], None)
492 492 elif len(revs) == 2:
493 493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 494 raise util.Abort(_('too many revisions specified'))
495 495 start = revfix(repo, revs[0], None)
496 496 end = revfix(repo, revs[1], None)
497 497 else:
498 498 raise util.Abort(_('too many revisions specified'))
499 499 return start, end
500 500
501 501 def revrange(repo, revs):
502 502 """Yield revision as strings from a list of revision specifications."""
503 503
504 504 def revfix(repo, val, defval):
505 505 if not val and val != 0 and defval is not None:
506 506 return defval
507 507 return repo.changelog.rev(repo.lookup(val))
508 508
509 509 seen, l = {}, []
510 510 for spec in revs:
511 511 if revrangesep in spec:
512 512 start, end = spec.split(revrangesep, 1)
513 513 start = revfix(repo, start, 0)
514 514 end = revfix(repo, end, repo.changelog.count() - 1)
515 515 step = start > end and -1 or 1
516 516 for rev in xrange(start, end+step, step):
517 517 if rev in seen:
518 518 continue
519 519 seen[rev] = 1
520 520 l.append(rev)
521 521 else:
522 522 rev = revfix(repo, spec, None)
523 523 if rev in seen:
524 524 continue
525 525 seen[rev] = 1
526 526 l.append(rev)
527 527
528 528 return l
529 529
530 530 def make_filename(repo, pat, node,
531 531 total=None, seqno=None, revwidth=None, pathname=None):
532 532 node_expander = {
533 533 'H': lambda: hex(node),
534 534 'R': lambda: str(repo.changelog.rev(node)),
535 535 'h': lambda: short(node),
536 536 }
537 537 expander = {
538 538 '%': lambda: '%',
539 539 'b': lambda: os.path.basename(repo.root),
540 540 }
541 541
542 542 try:
543 543 if node:
544 544 expander.update(node_expander)
545 545 if node:
546 546 expander['r'] = (lambda:
547 547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 548 if total is not None:
549 549 expander['N'] = lambda: str(total)
550 550 if seqno is not None:
551 551 expander['n'] = lambda: str(seqno)
552 552 if total is not None and seqno is not None:
553 553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 554 if pathname is not None:
555 555 expander['s'] = lambda: os.path.basename(pathname)
556 556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 557 expander['p'] = lambda: pathname
558 558
559 559 newname = []
560 560 patlen = len(pat)
561 561 i = 0
562 562 while i < patlen:
563 563 c = pat[i]
564 564 if c == '%':
565 565 i += 1
566 566 c = pat[i]
567 567 c = expander[c]()
568 568 newname.append(c)
569 569 i += 1
570 570 return ''.join(newname)
571 571 except KeyError, inst:
572 572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 573 inst.args[0])
574 574
575 575 def make_file(repo, pat, node=None,
576 576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 577 if not pat or pat == '-':
578 578 return 'w' in mode and sys.stdout or sys.stdin
579 579 if hasattr(pat, 'write') and 'w' in mode:
580 580 return pat
581 581 if hasattr(pat, 'read') and 'r' in mode:
582 582 return pat
583 583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 584 pathname),
585 585 mode)
586 586
587 587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 588 cwd = repo.getcwd()
589 589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 590 opts.get('exclude'), globbed=globbed,
591 591 default=default)
592 592
593 593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 594 default=None):
595 595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 596 default=default)
597 597 exact = dict.fromkeys(files)
598 598 cwd = repo.getcwd()
599 599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 600 badmatch=badmatch):
601 601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602 602
603 603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 604 '''find renamed files -- yields (before, after, score) tuples'''
605 605 if added is None or removed is None:
606 606 added, removed = repo.status()[1:3]
607 607 ctx = repo.changectx()
608 608 for a in added:
609 609 aa = repo.wread(a)
610 610 bestname, bestscore = None, threshold
611 611 for r in removed:
612 612 rr = ctx.filectx(r).data()
613 613
614 614 # bdiff.blocks() returns blocks of matching lines
615 615 # count the number of bytes in each
616 616 equal = 0
617 617 alines = mdiff.splitnewlines(aa)
618 618 matches = bdiff.blocks(aa, rr)
619 619 for x1,x2,y1,y2 in matches:
620 620 for line in alines[x1:x2]:
621 621 equal += len(line)
622 622
623 623 lengths = len(aa) + len(rr)
624 624 if lengths:
625 625 myscore = equal*2.0 / lengths
626 626 if myscore >= bestscore:
627 627 bestname, bestscore = r, myscore
628 628 if bestname:
629 629 yield bestname, a, bestscore
630 630
631 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
632 similarity=None):
631 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
633 632 if dry_run is None:
634 633 dry_run = opts.get('dry_run')
635 634 if similarity is None:
636 635 similarity = float(opts.get('similarity') or 0)
637 636 add, remove = [], []
638 637 mapping = {}
639 638 for src, abs, rel, exact in walk(repo, pats, opts):
640 639 target = repo.wjoin(abs)
641 640 if src == 'f' and abs not in repo.dirstate:
642 641 add.append(abs)
643 642 mapping[abs] = rel, exact
644 643 if repo.ui.verbose or not exact:
645 644 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 645 if repo.dirstate[abs] != 'r' and not util.lexists(target):
647 646 remove.append(abs)
648 647 mapping[abs] = rel, exact
649 648 if repo.ui.verbose or not exact:
650 649 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
651 650 if not dry_run:
652 repo.add(add, wlock=wlock)
653 repo.remove(remove, wlock=wlock)
651 repo.add(add)
652 repo.remove(remove)
654 653 if similarity > 0:
655 654 for old, new, score in findrenames(repo, add, remove, similarity):
656 655 oldrel, oldexact = mapping[old]
657 656 newrel, newexact = mapping[new]
658 657 if repo.ui.verbose or not oldexact or not newexact:
659 658 repo.ui.status(_('recording removal of %s as rename to %s '
660 659 '(%d%% similar)\n') %
661 660 (oldrel, newrel, score * 100))
662 661 if not dry_run:
663 repo.copy(old, new, wlock=wlock)
662 repo.copy(old, new)
664 663
665 664 def service(opts, parentfn=None, initfn=None, runfn=None):
666 665 '''Run a command as a service.'''
667 666
668 667 if opts['daemon'] and not opts['daemon_pipefds']:
669 668 rfd, wfd = os.pipe()
670 669 args = sys.argv[:]
671 670 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
672 671 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
673 672 args[0], args)
674 673 os.close(wfd)
675 674 os.read(rfd, 1)
676 675 if parentfn:
677 676 return parentfn(pid)
678 677 else:
679 678 os._exit(0)
680 679
681 680 if initfn:
682 681 initfn()
683 682
684 683 if opts['pid_file']:
685 684 fp = open(opts['pid_file'], 'w')
686 685 fp.write(str(os.getpid()) + '\n')
687 686 fp.close()
688 687
689 688 if opts['daemon_pipefds']:
690 689 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
691 690 os.close(rfd)
692 691 try:
693 692 os.setsid()
694 693 except AttributeError:
695 694 pass
696 695 os.write(wfd, 'y')
697 696 os.close(wfd)
698 697 sys.stdout.flush()
699 698 sys.stderr.flush()
700 699 fd = os.open(util.nulldev, os.O_RDWR)
701 700 if fd != 0: os.dup2(fd, 0)
702 701 if fd != 1: os.dup2(fd, 1)
703 702 if fd != 2: os.dup2(fd, 2)
704 703 if fd not in (0, 1, 2): os.close(fd)
705 704
706 705 if runfn:
707 706 return runfn()
708 707
709 708 class changeset_printer(object):
710 709 '''show changeset information when templating not requested.'''
711 710
712 711 def __init__(self, ui, repo, patch, buffered):
713 712 self.ui = ui
714 713 self.repo = repo
715 714 self.buffered = buffered
716 715 self.patch = patch
717 716 self.header = {}
718 717 self.hunk = {}
719 718 self.lastheader = None
720 719
721 720 def flush(self, rev):
722 721 if rev in self.header:
723 722 h = self.header[rev]
724 723 if h != self.lastheader:
725 724 self.lastheader = h
726 725 self.ui.write(h)
727 726 del self.header[rev]
728 727 if rev in self.hunk:
729 728 self.ui.write(self.hunk[rev])
730 729 del self.hunk[rev]
731 730 return 1
732 731 return 0
733 732
734 733 def show(self, rev=0, changenode=None, copies=(), **props):
735 734 if self.buffered:
736 735 self.ui.pushbuffer()
737 736 self._show(rev, changenode, copies, props)
738 737 self.hunk[rev] = self.ui.popbuffer()
739 738 else:
740 739 self._show(rev, changenode, copies, props)
741 740
742 741 def _show(self, rev, changenode, copies, props):
743 742 '''show a single changeset or file revision'''
744 743 log = self.repo.changelog
745 744 if changenode is None:
746 745 changenode = log.node(rev)
747 746 elif not rev:
748 747 rev = log.rev(changenode)
749 748
750 749 if self.ui.quiet:
751 750 self.ui.write("%d:%s\n" % (rev, short(changenode)))
752 751 return
753 752
754 753 changes = log.read(changenode)
755 754 date = util.datestr(changes[2])
756 755 extra = changes[5]
757 756 branch = extra.get("branch")
758 757
759 758 hexfunc = self.ui.debugflag and hex or short
760 759
761 760 parents = [(p, hexfunc(log.node(p)))
762 761 for p in self._meaningful_parentrevs(log, rev)]
763 762
764 763 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
765 764
766 765 # don't show the default branch name
767 766 if branch != 'default':
768 767 branch = util.tolocal(branch)
769 768 self.ui.write(_("branch: %s\n") % branch)
770 769 for tag in self.repo.nodetags(changenode):
771 770 self.ui.write(_("tag: %s\n") % tag)
772 771 for parent in parents:
773 772 self.ui.write(_("parent: %d:%s\n") % parent)
774 773
775 774 if self.ui.debugflag:
776 775 self.ui.write(_("manifest: %d:%s\n") %
777 776 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
778 777 self.ui.write(_("user: %s\n") % changes[1])
779 778 self.ui.write(_("date: %s\n") % date)
780 779
781 780 if self.ui.debugflag:
782 781 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
783 782 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
784 783 files):
785 784 if value:
786 785 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
787 786 elif changes[3] and self.ui.verbose:
788 787 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
789 788 if copies and self.ui.verbose:
790 789 copies = ['%s (%s)' % c for c in copies]
791 790 self.ui.write(_("copies: %s\n") % ' '.join(copies))
792 791
793 792 if extra and self.ui.debugflag:
794 793 extraitems = extra.items()
795 794 extraitems.sort()
796 795 for key, value in extraitems:
797 796 self.ui.write(_("extra: %s=%s\n")
798 797 % (key, value.encode('string_escape')))
799 798
800 799 description = changes[4].strip()
801 800 if description:
802 801 if self.ui.verbose:
803 802 self.ui.write(_("description:\n"))
804 803 self.ui.write(description)
805 804 self.ui.write("\n\n")
806 805 else:
807 806 self.ui.write(_("summary: %s\n") %
808 807 description.splitlines()[0])
809 808 self.ui.write("\n")
810 809
811 810 self.showpatch(changenode)
812 811
813 812 def showpatch(self, node):
814 813 if self.patch:
815 814 prev = self.repo.changelog.parents(node)[0]
816 815 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
817 816 opts=patch.diffopts(self.ui))
818 817 self.ui.write("\n")
819 818
820 819 def _meaningful_parentrevs(self, log, rev):
821 820 """Return list of meaningful (or all if debug) parentrevs for rev.
822 821
823 822 For merges (two non-nullrev revisions) both parents are meaningful.
824 823 Otherwise the first parent revision is considered meaningful if it
825 824 is not the preceding revision.
826 825 """
827 826 parents = log.parentrevs(rev)
828 827 if not self.ui.debugflag and parents[1] == nullrev:
829 828 if parents[0] >= rev - 1:
830 829 parents = []
831 830 else:
832 831 parents = [parents[0]]
833 832 return parents
834 833
835 834
836 835 class changeset_templater(changeset_printer):
837 836 '''format changeset information.'''
838 837
839 838 def __init__(self, ui, repo, patch, mapfile, buffered):
840 839 changeset_printer.__init__(self, ui, repo, patch, buffered)
841 840 filters = templater.common_filters.copy()
842 841 filters['formatnode'] = (ui.debugflag and (lambda x: x)
843 842 or (lambda x: x[:12]))
844 843 self.t = templater.templater(mapfile, filters,
845 844 cache={
846 845 'parent': '{rev}:{node|formatnode} ',
847 846 'manifest': '{rev}:{node|formatnode}',
848 847 'filecopy': '{name} ({source})'})
849 848
850 849 def use_template(self, t):
851 850 '''set template string to use'''
852 851 self.t.cache['changeset'] = t
853 852
854 853 def _show(self, rev, changenode, copies, props):
855 854 '''show a single changeset or file revision'''
856 855 log = self.repo.changelog
857 856 if changenode is None:
858 857 changenode = log.node(rev)
859 858 elif not rev:
860 859 rev = log.rev(changenode)
861 860
862 861 changes = log.read(changenode)
863 862
864 863 def showlist(name, values, plural=None, **args):
865 864 '''expand set of values.
866 865 name is name of key in template map.
867 866 values is list of strings or dicts.
868 867 plural is plural of name, if not simply name + 's'.
869 868
870 869 expansion works like this, given name 'foo'.
871 870
872 871 if values is empty, expand 'no_foos'.
873 872
874 873 if 'foo' not in template map, return values as a string,
875 874 joined by space.
876 875
877 876 expand 'start_foos'.
878 877
879 878 for each value, expand 'foo'. if 'last_foo' in template
880 879 map, expand it instead of 'foo' for last key.
881 880
882 881 expand 'end_foos'.
883 882 '''
884 883 if plural: names = plural
885 884 else: names = name + 's'
886 885 if not values:
887 886 noname = 'no_' + names
888 887 if noname in self.t:
889 888 yield self.t(noname, **args)
890 889 return
891 890 if name not in self.t:
892 891 if isinstance(values[0], str):
893 892 yield ' '.join(values)
894 893 else:
895 894 for v in values:
896 895 yield dict(v, **args)
897 896 return
898 897 startname = 'start_' + names
899 898 if startname in self.t:
900 899 yield self.t(startname, **args)
901 900 vargs = args.copy()
902 901 def one(v, tag=name):
903 902 try:
904 903 vargs.update(v)
905 904 except (AttributeError, ValueError):
906 905 try:
907 906 for a, b in v:
908 907 vargs[a] = b
909 908 except ValueError:
910 909 vargs[name] = v
911 910 return self.t(tag, **vargs)
912 911 lastname = 'last_' + name
913 912 if lastname in self.t:
914 913 last = values.pop()
915 914 else:
916 915 last = None
917 916 for v in values:
918 917 yield one(v)
919 918 if last is not None:
920 919 yield one(last, tag=lastname)
921 920 endname = 'end_' + names
922 921 if endname in self.t:
923 922 yield self.t(endname, **args)
924 923
925 924 def showbranches(**args):
926 925 branch = changes[5].get("branch")
927 926 if branch != 'default':
928 927 branch = util.tolocal(branch)
929 928 return showlist('branch', [branch], plural='branches', **args)
930 929
931 930 def showparents(**args):
932 931 parents = [[('rev', p), ('node', hex(log.node(p)))]
933 932 for p in self._meaningful_parentrevs(log, rev)]
934 933 return showlist('parent', parents, **args)
935 934
936 935 def showtags(**args):
937 936 return showlist('tag', self.repo.nodetags(changenode), **args)
938 937
939 938 def showextras(**args):
940 939 extras = changes[5].items()
941 940 extras.sort()
942 941 for key, value in extras:
943 942 args = args.copy()
944 943 args.update(dict(key=key, value=value))
945 944 yield self.t('extra', **args)
946 945
947 946 def showcopies(**args):
948 947 c = [{'name': x[0], 'source': x[1]} for x in copies]
949 948 return showlist('file_copy', c, plural='file_copies', **args)
950 949
951 950 if self.ui.debugflag:
952 951 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
953 952 def showfiles(**args):
954 953 return showlist('file', files[0], **args)
955 954 def showadds(**args):
956 955 return showlist('file_add', files[1], **args)
957 956 def showdels(**args):
958 957 return showlist('file_del', files[2], **args)
959 958 def showmanifest(**args):
960 959 args = args.copy()
961 960 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
962 961 node=hex(changes[0])))
963 962 return self.t('manifest', **args)
964 963 else:
965 964 def showfiles(**args):
966 965 return showlist('file', changes[3], **args)
967 966 showadds = ''
968 967 showdels = ''
969 968 showmanifest = ''
970 969
971 970 defprops = {
972 971 'author': changes[1],
973 972 'branches': showbranches,
974 973 'date': changes[2],
975 974 'desc': changes[4].strip(),
976 975 'file_adds': showadds,
977 976 'file_dels': showdels,
978 977 'files': showfiles,
979 978 'file_copies': showcopies,
980 979 'manifest': showmanifest,
981 980 'node': hex(changenode),
982 981 'parents': showparents,
983 982 'rev': rev,
984 983 'tags': showtags,
985 984 'extras': showextras,
986 985 }
987 986 props = props.copy()
988 987 props.update(defprops)
989 988
990 989 try:
991 990 if self.ui.debugflag and 'header_debug' in self.t:
992 991 key = 'header_debug'
993 992 elif self.ui.quiet and 'header_quiet' in self.t:
994 993 key = 'header_quiet'
995 994 elif self.ui.verbose and 'header_verbose' in self.t:
996 995 key = 'header_verbose'
997 996 elif 'header' in self.t:
998 997 key = 'header'
999 998 else:
1000 999 key = ''
1001 1000 if key:
1002 1001 h = templater.stringify(self.t(key, **props))
1003 1002 if self.buffered:
1004 1003 self.header[rev] = h
1005 1004 else:
1006 1005 self.ui.write(h)
1007 1006 if self.ui.debugflag and 'changeset_debug' in self.t:
1008 1007 key = 'changeset_debug'
1009 1008 elif self.ui.quiet and 'changeset_quiet' in self.t:
1010 1009 key = 'changeset_quiet'
1011 1010 elif self.ui.verbose and 'changeset_verbose' in self.t:
1012 1011 key = 'changeset_verbose'
1013 1012 else:
1014 1013 key = 'changeset'
1015 1014 self.ui.write(templater.stringify(self.t(key, **props)))
1016 1015 self.showpatch(changenode)
1017 1016 except KeyError, inst:
1018 1017 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1019 1018 inst.args[0]))
1020 1019 except SyntaxError, inst:
1021 1020 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1022 1021
1023 1022 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1024 1023 """show one changeset using template or regular display.
1025 1024
1026 1025 Display format will be the first non-empty hit of:
1027 1026 1. option 'template'
1028 1027 2. option 'style'
1029 1028 3. [ui] setting 'logtemplate'
1030 1029 4. [ui] setting 'style'
1031 1030 If all of these values are either the unset or the empty string,
1032 1031 regular display via changeset_printer() is done.
1033 1032 """
1034 1033 # options
1035 1034 patch = False
1036 1035 if opts.get('patch'):
1037 1036 patch = matchfn or util.always
1038 1037
1039 1038 tmpl = opts.get('template')
1040 1039 mapfile = None
1041 1040 if tmpl:
1042 1041 tmpl = templater.parsestring(tmpl, quoted=False)
1043 1042 else:
1044 1043 mapfile = opts.get('style')
1045 1044 # ui settings
1046 1045 if not mapfile:
1047 1046 tmpl = ui.config('ui', 'logtemplate')
1048 1047 if tmpl:
1049 1048 tmpl = templater.parsestring(tmpl)
1050 1049 else:
1051 1050 mapfile = ui.config('ui', 'style')
1052 1051
1053 1052 if tmpl or mapfile:
1054 1053 if mapfile:
1055 1054 if not os.path.split(mapfile)[0]:
1056 1055 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1057 1056 or templater.templatepath(mapfile))
1058 1057 if mapname: mapfile = mapname
1059 1058 try:
1060 1059 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1061 1060 except SyntaxError, inst:
1062 1061 raise util.Abort(inst.args[0])
1063 1062 if tmpl: t.use_template(tmpl)
1064 1063 return t
1065 1064 return changeset_printer(ui, repo, patch, buffered)
1066 1065
1067 1066 def finddate(ui, repo, date):
1068 1067 """Find the tipmost changeset that matches the given date spec"""
1069 1068 df = util.matchdate(date + " to " + date)
1070 1069 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1071 1070 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1072 1071 results = {}
1073 1072 for st, rev, fns in changeiter:
1074 1073 if st == 'add':
1075 1074 d = get(rev)[2]
1076 1075 if df(d[0]):
1077 1076 results[rev] = d
1078 1077 elif st == 'iter':
1079 1078 if rev in results:
1080 1079 ui.status("Found revision %s from %s\n" %
1081 1080 (rev, util.datestr(results[rev])))
1082 1081 return str(rev)
1083 1082
1084 1083 raise util.Abort(_("revision matching date not found"))
1085 1084
1086 1085 def walkchangerevs(ui, repo, pats, change, opts):
1087 1086 '''Iterate over files and the revs they changed in.
1088 1087
1089 1088 Callers most commonly need to iterate backwards over the history
1090 1089 it is interested in. Doing so has awful (quadratic-looking)
1091 1090 performance, so we use iterators in a "windowed" way.
1092 1091
1093 1092 We walk a window of revisions in the desired order. Within the
1094 1093 window, we first walk forwards to gather data, then in the desired
1095 1094 order (usually backwards) to display it.
1096 1095
1097 1096 This function returns an (iterator, matchfn) tuple. The iterator
1098 1097 yields 3-tuples. They will be of one of the following forms:
1099 1098
1100 1099 "window", incrementing, lastrev: stepping through a window,
1101 1100 positive if walking forwards through revs, last rev in the
1102 1101 sequence iterated over - use to reset state for the current window
1103 1102
1104 1103 "add", rev, fns: out-of-order traversal of the given file names
1105 1104 fns, which changed during revision rev - use to gather data for
1106 1105 possible display
1107 1106
1108 1107 "iter", rev, None: in-order traversal of the revs earlier iterated
1109 1108 over with "add" - use to display data'''
1110 1109
1111 1110 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1112 1111 if start < end:
1113 1112 while start < end:
1114 1113 yield start, min(windowsize, end-start)
1115 1114 start += windowsize
1116 1115 if windowsize < sizelimit:
1117 1116 windowsize *= 2
1118 1117 else:
1119 1118 while start > end:
1120 1119 yield start, min(windowsize, start-end-1)
1121 1120 start -= windowsize
1122 1121 if windowsize < sizelimit:
1123 1122 windowsize *= 2
1124 1123
1125 1124 files, matchfn, anypats = matchpats(repo, pats, opts)
1126 1125 follow = opts.get('follow') or opts.get('follow_first')
1127 1126
1128 1127 if repo.changelog.count() == 0:
1129 1128 return [], matchfn
1130 1129
1131 1130 if follow:
1132 1131 defrange = '%s:0' % repo.changectx().rev()
1133 1132 else:
1134 1133 defrange = 'tip:0'
1135 1134 revs = revrange(repo, opts['rev'] or [defrange])
1136 1135 wanted = {}
1137 1136 slowpath = anypats or opts.get('removed')
1138 1137 fncache = {}
1139 1138
1140 1139 if not slowpath and not files:
1141 1140 # No files, no patterns. Display all revs.
1142 1141 wanted = dict.fromkeys(revs)
1143 1142 copies = []
1144 1143 if not slowpath:
1145 1144 # Only files, no patterns. Check the history of each file.
1146 1145 def filerevgen(filelog, node):
1147 1146 cl_count = repo.changelog.count()
1148 1147 if node is None:
1149 1148 last = filelog.count() - 1
1150 1149 else:
1151 1150 last = filelog.rev(node)
1152 1151 for i, window in increasing_windows(last, nullrev):
1153 1152 revs = []
1154 1153 for j in xrange(i - window, i + 1):
1155 1154 n = filelog.node(j)
1156 1155 revs.append((filelog.linkrev(n),
1157 1156 follow and filelog.renamed(n)))
1158 1157 revs.reverse()
1159 1158 for rev in revs:
1160 1159 # only yield rev for which we have the changelog, it can
1161 1160 # happen while doing "hg log" during a pull or commit
1162 1161 if rev[0] < cl_count:
1163 1162 yield rev
1164 1163 def iterfiles():
1165 1164 for filename in files:
1166 1165 yield filename, None
1167 1166 for filename_node in copies:
1168 1167 yield filename_node
1169 1168 minrev, maxrev = min(revs), max(revs)
1170 1169 for file_, node in iterfiles():
1171 1170 filelog = repo.file(file_)
1172 1171 # A zero count may be a directory or deleted file, so
1173 1172 # try to find matching entries on the slow path.
1174 1173 if filelog.count() == 0:
1175 1174 slowpath = True
1176 1175 break
1177 1176 for rev, copied in filerevgen(filelog, node):
1178 1177 if rev <= maxrev:
1179 1178 if rev < minrev:
1180 1179 break
1181 1180 fncache.setdefault(rev, [])
1182 1181 fncache[rev].append(file_)
1183 1182 wanted[rev] = 1
1184 1183 if follow and copied:
1185 1184 copies.append(copied)
1186 1185 if slowpath:
1187 1186 if follow:
1188 1187 raise util.Abort(_('can only follow copies/renames for explicit '
1189 1188 'file names'))
1190 1189
1191 1190 # The slow path checks files modified in every changeset.
1192 1191 def changerevgen():
1193 1192 for i, window in increasing_windows(repo.changelog.count()-1,
1194 1193 nullrev):
1195 1194 for j in xrange(i - window, i + 1):
1196 1195 yield j, change(j)[3]
1197 1196
1198 1197 for rev, changefiles in changerevgen():
1199 1198 matches = filter(matchfn, changefiles)
1200 1199 if matches:
1201 1200 fncache[rev] = matches
1202 1201 wanted[rev] = 1
1203 1202
1204 1203 class followfilter:
1205 1204 def __init__(self, onlyfirst=False):
1206 1205 self.startrev = nullrev
1207 1206 self.roots = []
1208 1207 self.onlyfirst = onlyfirst
1209 1208
1210 1209 def match(self, rev):
1211 1210 def realparents(rev):
1212 1211 if self.onlyfirst:
1213 1212 return repo.changelog.parentrevs(rev)[0:1]
1214 1213 else:
1215 1214 return filter(lambda x: x != nullrev,
1216 1215 repo.changelog.parentrevs(rev))
1217 1216
1218 1217 if self.startrev == nullrev:
1219 1218 self.startrev = rev
1220 1219 return True
1221 1220
1222 1221 if rev > self.startrev:
1223 1222 # forward: all descendants
1224 1223 if not self.roots:
1225 1224 self.roots.append(self.startrev)
1226 1225 for parent in realparents(rev):
1227 1226 if parent in self.roots:
1228 1227 self.roots.append(rev)
1229 1228 return True
1230 1229 else:
1231 1230 # backwards: all parents
1232 1231 if not self.roots:
1233 1232 self.roots.extend(realparents(self.startrev))
1234 1233 if rev in self.roots:
1235 1234 self.roots.remove(rev)
1236 1235 self.roots.extend(realparents(rev))
1237 1236 return True
1238 1237
1239 1238 return False
1240 1239
1241 1240 # it might be worthwhile to do this in the iterator if the rev range
1242 1241 # is descending and the prune args are all within that range
1243 1242 for rev in opts.get('prune', ()):
1244 1243 rev = repo.changelog.rev(repo.lookup(rev))
1245 1244 ff = followfilter()
1246 1245 stop = min(revs[0], revs[-1])
1247 1246 for x in xrange(rev, stop-1, -1):
1248 1247 if ff.match(x) and x in wanted:
1249 1248 del wanted[x]
1250 1249
1251 1250 def iterate():
1252 1251 if follow and not files:
1253 1252 ff = followfilter(onlyfirst=opts.get('follow_first'))
1254 1253 def want(rev):
1255 1254 if ff.match(rev) and rev in wanted:
1256 1255 return True
1257 1256 return False
1258 1257 else:
1259 1258 def want(rev):
1260 1259 return rev in wanted
1261 1260
1262 1261 for i, window in increasing_windows(0, len(revs)):
1263 1262 yield 'window', revs[0] < revs[-1], revs[-1]
1264 1263 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1265 1264 srevs = list(nrevs)
1266 1265 srevs.sort()
1267 1266 for rev in srevs:
1268 1267 fns = fncache.get(rev)
1269 1268 if not fns:
1270 1269 def fns_generator():
1271 1270 for f in change(rev)[3]:
1272 1271 if matchfn(f):
1273 1272 yield f
1274 1273 fns = fns_generator()
1275 1274 yield 'add', rev, fns
1276 1275 for rev in nrevs:
1277 1276 yield 'iter', rev, None
1278 1277 return iterate(), matchfn
@@ -1,3180 +1,3179 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, urllib, shlex, stat
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 36 elif abs not in repo.dirstate:
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 sim = float(opts.get('similarity') or 0)
57 57 if sim < 0 or sim > 100:
58 58 raise util.Abort(_('similarity must be between 0 and 100'))
59 59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60 60
61 61 def annotate(ui, repo, *pats, **opts):
62 62 """show changeset information per file line
63 63
64 64 List changes in files, showing the revision id responsible for each line
65 65
66 66 This command is useful to discover who did a change or when a change took
67 67 place.
68 68
69 69 Without the -a option, annotate will avoid processing files it
70 70 detects as binary. With -a, annotate will generate an annotation
71 71 anyway, probably with undesirable results.
72 72 """
73 73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74 74
75 75 if not pats:
76 76 raise util.Abort(_('at least one file name or pattern required'))
77 77
78 78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 79 ('number', lambda x: str(x[0].rev())),
80 80 ('changeset', lambda x: short(x[0].node())),
81 81 ('date', getdate),
82 82 ('follow', lambda x: x[0].path()),
83 83 ]
84 84
85 85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 86 and not opts['follow']):
87 87 opts['number'] = 1
88 88
89 89 linenumber = opts.get('line_number') is not None
90 90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92 92
93 93 funcmap = [func for op, func in opmap if opts.get(op)]
94 94 if linenumber:
95 95 lastfunc = funcmap[-1]
96 96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97 97
98 98 ctx = repo.changectx(opts['rev'])
99 99
100 100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 101 node=ctx.node()):
102 102 fctx = ctx.filectx(abs)
103 103 if not opts['text'] and util.binary(fctx.data()):
104 104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 105 continue
106 106
107 107 lines = fctx.annotate(follow=opts.get('follow'),
108 108 linenumber=linenumber)
109 109 pieces = []
110 110
111 111 for f in funcmap:
112 112 l = [f(n) for n, dummy in lines]
113 113 if l:
114 114 m = max(map(len, l))
115 115 pieces.append(["%*s" % (m, x) for x in l])
116 116
117 117 if pieces:
118 118 for p, l in zip(zip(*pieces), lines):
119 119 ui.write("%s: %s" % (" ".join(p), l[1]))
120 120
121 121 def archive(ui, repo, dest, **opts):
122 122 '''create unversioned archive of a repository revision
123 123
124 124 By default, the revision used is the parent of the working
125 125 directory; use "-r" to specify a different revision.
126 126
127 127 To specify the type of archive to create, use "-t". Valid
128 128 types are:
129 129
130 130 "files" (default): a directory full of files
131 131 "tar": tar archive, uncompressed
132 132 "tbz2": tar archive, compressed using bzip2
133 133 "tgz": tar archive, compressed using gzip
134 134 "uzip": zip archive, uncompressed
135 135 "zip": zip archive, compressed using deflate
136 136
137 137 The exact name of the destination archive or directory is given
138 138 using a format string; see "hg help export" for details.
139 139
140 140 Each member added to an archive file has a directory prefix
141 141 prepended. Use "-p" to specify a format string for the prefix.
142 142 The default is the basename of the archive, with suffixes removed.
143 143 '''
144 144
145 145 ctx = repo.changectx(opts['rev'])
146 146 if not ctx:
147 147 raise util.Abort(_('repository has no revisions'))
148 148 node = ctx.node()
149 149 dest = cmdutil.make_filename(repo, dest, node)
150 150 if os.path.realpath(dest) == repo.root:
151 151 raise util.Abort(_('repository root cannot be destination'))
152 152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 153 kind = opts.get('type') or 'files'
154 154 prefix = opts['prefix']
155 155 if dest == '-':
156 156 if kind == 'files':
157 157 raise util.Abort(_('cannot archive plain files to stdout'))
158 158 dest = sys.stdout
159 159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 160 prefix = cmdutil.make_filename(repo, prefix, node)
161 161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 162 matchfn, prefix)
163 163
164 164 def backout(ui, repo, node=None, rev=None, **opts):
165 165 '''reverse effect of earlier changeset
166 166
167 167 Commit the backed out changes as a new changeset. The new
168 168 changeset is a child of the backed out changeset.
169 169
170 170 If you back out a changeset other than the tip, a new head is
171 171 created. This head is the parent of the working directory. If
172 172 you back out an old changeset, your working directory will appear
173 173 old after the backout. You should merge the backout changeset
174 174 with another head.
175 175
176 176 The --merge option remembers the parent of the working directory
177 177 before starting the backout, then merges the new head with that
178 178 changeset afterwards. This saves you from doing the merge by
179 179 hand. The result of this merge is not committed, as for a normal
180 180 merge.'''
181 181 if rev and node:
182 182 raise util.Abort(_("please specify just one revision"))
183 183
184 184 if not rev:
185 185 rev = node
186 186
187 187 if not rev:
188 188 raise util.Abort(_("please specify a revision to backout"))
189 189
190 190 cmdutil.bail_if_changed(repo)
191 191 op1, op2 = repo.dirstate.parents()
192 192 if op2 != nullid:
193 193 raise util.Abort(_('outstanding uncommitted merge'))
194 194 node = repo.lookup(rev)
195 195 p1, p2 = repo.changelog.parents(node)
196 196 if p1 == nullid:
197 197 raise util.Abort(_('cannot back out a change with no parents'))
198 198 if p2 != nullid:
199 199 if not opts['parent']:
200 200 raise util.Abort(_('cannot back out a merge changeset without '
201 201 '--parent'))
202 202 p = repo.lookup(opts['parent'])
203 203 if p not in (p1, p2):
204 204 raise util.Abort(_('%s is not a parent of %s') %
205 205 (short(p), short(node)))
206 206 parent = p
207 207 else:
208 208 if opts['parent']:
209 209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 210 parent = p1
211 211 hg.clean(repo, node, show_stats=False)
212 212 revert_opts = opts.copy()
213 213 revert_opts['date'] = None
214 214 revert_opts['all'] = True
215 215 revert_opts['rev'] = hex(parent)
216 216 revert(ui, repo, **revert_opts)
217 217 commit_opts = opts.copy()
218 218 commit_opts['addremove'] = False
219 219 if not commit_opts['message'] and not commit_opts['logfile']:
220 220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 221 commit_opts['force_editor'] = True
222 222 commit(ui, repo, **commit_opts)
223 223 def nice(node):
224 224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 225 ui.status(_('changeset %s backs out changeset %s\n') %
226 226 (nice(repo.changelog.tip()), nice(node)))
227 227 if op1 != node:
228 228 if opts['merge']:
229 229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 230 hg.merge(repo, hex(op1))
231 231 else:
232 232 ui.status(_('the backout changeset is a new head - '
233 233 'do not forget to merge\n'))
234 234 ui.status(_('(use "backout --merge" '
235 235 'if you want to auto-merge)\n'))
236 236
237 237 def branch(ui, repo, label=None, **opts):
238 238 """set or show the current branch name
239 239
240 240 With no argument, show the current branch name. With one argument,
241 241 set the working directory branch name (the branch does not exist in
242 242 the repository until the next commit).
243 243
244 244 Unless --force is specified, branch will not let you set a
245 245 branch name that shadows an existing branch.
246 246 """
247 247
248 248 if label:
249 249 if not opts.get('force') and label in repo.branchtags():
250 250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 251 raise util.Abort(_('a branch of the same name already exists'
252 252 ' (use --force to override)'))
253 253 repo.dirstate.setbranch(util.fromlocal(label))
254 254 ui.status(_('marked working directory as branch %s\n') % label)
255 255 else:
256 256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257 257
258 258 def branches(ui, repo, active=False):
259 259 """list repository named branches
260 260
261 261 List the repository's named branches, indicating which ones are
262 262 inactive. If active is specified, only show active branches.
263 263
264 264 A branch is considered active if it contains unmerged heads.
265 265 """
266 266 b = repo.branchtags()
267 267 heads = dict.fromkeys(repo.heads(), 1)
268 268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 269 l.sort()
270 270 l.reverse()
271 271 for ishead, r, n, t in l:
272 272 if active and not ishead:
273 273 # If we're only displaying active branches, abort the loop on
274 274 # encountering the first inactive head
275 275 break
276 276 else:
277 277 hexfunc = ui.debugflag and hex or short
278 278 if ui.quiet:
279 279 ui.write("%s\n" % t)
280 280 else:
281 281 spaces = " " * (30 - util.locallen(t))
282 282 # The code only gets here if inactive branches are being
283 283 # displayed or the branch is active.
284 284 isinactive = ((not ishead) and " (inactive)") or ''
285 285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286 286
287 287 def bundle(ui, repo, fname, dest=None, **opts):
288 288 """create a changegroup file
289 289
290 290 Generate a compressed changegroup file collecting changesets not
291 291 found in the other repository.
292 292
293 293 If no destination repository is specified the destination is assumed
294 294 to have all the nodes specified by one or more --base parameters.
295 295
296 296 The bundle file can then be transferred using conventional means and
297 297 applied to another repository with the unbundle or pull command.
298 298 This is useful when direct push and pull are not available or when
299 299 exporting an entire repository is undesirable.
300 300
301 301 Applying bundles preserves all changeset contents including
302 302 permissions, copy/rename information, and revision history.
303 303 """
304 304 revs = opts.get('rev') or None
305 305 if revs:
306 306 revs = [repo.lookup(rev) for rev in revs]
307 307 base = opts.get('base')
308 308 if base:
309 309 if dest:
310 310 raise util.Abort(_("--base is incompatible with specifiying "
311 311 "a destination"))
312 312 base = [repo.lookup(rev) for rev in base]
313 313 # create the right base
314 314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 315 o = []
316 316 has = {nullid: None}
317 317 for n in base:
318 318 has.update(repo.changelog.reachable(n))
319 319 if revs:
320 320 visit = list(revs)
321 321 else:
322 322 visit = repo.changelog.heads()
323 323 seen = {}
324 324 while visit:
325 325 n = visit.pop(0)
326 326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 327 if len(parents) == 0:
328 328 o.insert(0, n)
329 329 else:
330 330 for p in parents:
331 331 if p not in seen:
332 332 seen[p] = 1
333 333 visit.append(p)
334 334 else:
335 335 cmdutil.setremoteconfig(ui, opts)
336 336 dest, revs = cmdutil.parseurl(
337 337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 338 other = hg.repository(ui, dest)
339 339 o = repo.findoutgoing(other, force=opts['force'])
340 340
341 341 if revs:
342 342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 343 else:
344 344 cg = repo.changegroup(o, 'bundle')
345 345 changegroup.writebundle(cg, fname, "HG10BZ")
346 346
347 347 def cat(ui, repo, file1, *pats, **opts):
348 348 """output the current or given revision of files
349 349
350 350 Print the specified files as they were at the given revision.
351 351 If no revision is given, the parent of the working directory is used,
352 352 or tip if no revision is checked out.
353 353
354 354 Output may be to a file, in which case the name of the file is
355 355 given using a format string. The formatting rules are the same as
356 356 for the export command, with the following additions:
357 357
358 358 %s basename of file being printed
359 359 %d dirname of file being printed, or '.' if in repo root
360 360 %p root-relative path name of file being printed
361 361 """
362 362 ctx = repo.changectx(opts['rev'])
363 363 err = 1
364 364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 365 ctx.node()):
366 366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 367 fp.write(ctx.filectx(abs).data())
368 368 err = 0
369 369 return err
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 cmdutil.setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = cmdutil.logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 try:
444 444 mode = os.lstat(rf)[stat.ST_MODE]
445 445 except OSError:
446 446 raise util.Abort(_("file %s not found!") % rf)
447 447 if stat.S_ISDIR(mode):
448 448 name = f + '/'
449 449 if slist is None:
450 450 slist = list(files)
451 451 slist.sort()
452 452 i = bisect.bisect(slist, name)
453 453 if i >= len(slist) or not slist[i].startswith(name):
454 454 raise util.Abort(_("no match under directory %s!")
455 455 % rf)
456 456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 457 raise util.Abort(_("can't commit %s: "
458 458 "unsupported file type!") % rf)
459 459 elif f not in repo.dirstate:
460 460 raise util.Abort(_("file %s not tracked!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'r': _('has been marked for remove')}
485 485 state = repo.dirstate[abs]
486 486 reason = reasons.get(state)
487 487 if reason:
488 488 if exact:
489 489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 490 else:
491 491 if state == 'a':
492 492 origsrc = repo.dirstate.copied(abs)
493 493 if origsrc is not None:
494 494 return origsrc
495 495 return abs
496 496
497 497 # origsrc: hgsep
498 498 # abssrc: hgsep
499 499 # relsrc: ossep
500 500 # otarget: ossep
501 501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 503 reltarget = repo.pathto(abstarget, cwd)
504 504 prevsrc = targets.get(abstarget)
505 505 src = repo.wjoin(abssrc)
506 506 target = repo.wjoin(abstarget)
507 507 if prevsrc is not None:
508 508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 509 (reltarget, repo.pathto(abssrc, cwd),
510 510 repo.pathto(prevsrc, cwd)))
511 511 return
512 512 if (not opts['after'] and os.path.exists(target) or
513 513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 514 if not opts['force']:
515 515 ui.warn(_('%s: not overwriting - file exists\n') %
516 516 reltarget)
517 517 return
518 518 if not opts['after'] and not opts.get('dry_run'):
519 519 os.unlink(target)
520 520 if opts['after']:
521 521 if not os.path.exists(target):
522 522 return
523 523 else:
524 524 targetdir = os.path.dirname(target) or '.'
525 525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 526 os.makedirs(targetdir)
527 527 try:
528 528 restore = repo.dirstate[abstarget] == 'r'
529 529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget], wlock)
530 repo.undelete([abstarget])
531 531 try:
532 532 if not opts.get('dry_run'):
533 533 util.copyfile(src, target)
534 534 restore = False
535 535 finally:
536 536 if restore:
537 repo.remove([abstarget], wlock=wlock)
537 repo.remove([abstarget])
538 538 except IOError, inst:
539 539 if inst.errno == errno.ENOENT:
540 540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 541 else:
542 542 ui.warn(_('%s: cannot copy - %s\n') %
543 543 (relsrc, inst.strerror))
544 544 errors += 1
545 545 return
546 546 if ui.verbose or not exact:
547 547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 548 targets[abstarget] = abssrc
549 549 if abstarget != origsrc:
550 550 if repo.dirstate[origsrc] == 'a':
551 551 if not ui.quiet:
552 552 ui.warn(_("%s has not been committed yet, so no copy "
553 553 "data will be stored for %s.\n")
554 554 % (repo.pathto(origsrc, cwd), reltarget))
555 555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget], wlock)
556 repo.add([abstarget])
557 557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget, wlock)
558 repo.copy(origsrc, abstarget)
559 559 copied.append((abssrc, relsrc, exact))
560 560
561 561 # pat: ossep
562 562 # dest ossep
563 563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 564 # return: function that takes hgsep and returns ossep
565 565 def targetpathfn(pat, dest, srcs):
566 566 if os.path.isdir(pat):
567 567 abspfx = util.canonpath(repo.root, cwd, pat)
568 568 abspfx = util.localpath(abspfx)
569 569 if destdirexists:
570 570 striplen = len(os.path.split(abspfx)[0])
571 571 else:
572 572 striplen = len(abspfx)
573 573 if striplen:
574 574 striplen += len(os.sep)
575 575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 576 elif destdirexists:
577 577 res = lambda p: os.path.join(dest,
578 578 os.path.basename(util.localpath(p)))
579 579 else:
580 580 res = lambda p: dest
581 581 return res
582 582
583 583 # pat: ossep
584 584 # dest ossep
585 585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 586 # return: function that takes hgsep and returns ossep
587 587 def targetpathafterfn(pat, dest, srcs):
588 588 if util.patkind(pat, None)[0]:
589 589 # a mercurial pattern
590 590 res = lambda p: os.path.join(dest,
591 591 os.path.basename(util.localpath(p)))
592 592 else:
593 593 abspfx = util.canonpath(repo.root, cwd, pat)
594 594 if len(abspfx) < len(srcs[0][0]):
595 595 # A directory. Either the target path contains the last
596 596 # component of the source path or it does not.
597 597 def evalpath(striplen):
598 598 score = 0
599 599 for s in srcs:
600 600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 601 if os.path.exists(t):
602 602 score += 1
603 603 return score
604 604
605 605 abspfx = util.localpath(abspfx)
606 606 striplen = len(abspfx)
607 607 if striplen:
608 608 striplen += len(os.sep)
609 609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 610 score = evalpath(striplen)
611 611 striplen1 = len(os.path.split(abspfx)[0])
612 612 if striplen1:
613 613 striplen1 += len(os.sep)
614 614 if evalpath(striplen1) > score:
615 615 striplen = striplen1
616 616 res = lambda p: os.path.join(dest,
617 617 util.localpath(p)[striplen:])
618 618 else:
619 619 # a file
620 620 if destdirexists:
621 621 res = lambda p: os.path.join(dest,
622 622 os.path.basename(util.localpath(p)))
623 623 else:
624 624 res = lambda p: dest
625 625 return res
626 626
627 627
628 628 pats = util.expand_glob(pats)
629 629 if not pats:
630 630 raise util.Abort(_('no source or destination specified'))
631 631 if len(pats) == 1:
632 632 raise util.Abort(_('no destination specified'))
633 633 dest = pats.pop()
634 634 destdirexists = os.path.isdir(dest)
635 635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 636 raise util.Abort(_('with multiple sources, destination must be an '
637 637 'existing directory'))
638 638 if opts['after']:
639 639 tfn = targetpathafterfn
640 640 else:
641 641 tfn = targetpathfn
642 642 copylist = []
643 643 for pat in pats:
644 644 srcs = []
645 645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 646 globbed=True):
647 647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 648 if origsrc:
649 649 srcs.append((origsrc, abssrc, relsrc, exact))
650 650 if not srcs:
651 651 continue
652 652 copylist.append((tfn(pat, dest, srcs), srcs))
653 653 if not copylist:
654 654 raise util.Abort(_('no files to copy'))
655 655
656 656 for targetpath, srcs in copylist:
657 657 for origsrc, abssrc, relsrc, exact in srcs:
658 658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659 659
660 660 if errors:
661 661 ui.warn(_('(consider using --after)\n'))
662 662 return errors, copied
663 663
664 664 def copy(ui, repo, *pats, **opts):
665 665 """mark files as copied for the next commit
666 666
667 667 Mark dest as having copies of source files. If dest is a
668 668 directory, copies are put in that directory. If dest is a file,
669 669 there can only be one source.
670 670
671 671 By default, this command copies the contents of files as they
672 672 stand in the working directory. If invoked with --after, the
673 673 operation is recorded, but no copying is performed.
674 674
675 675 This command takes effect in the next commit. To undo a copy
676 676 before that, see hg revert.
677 677 """
678 678 wlock = repo.wlock(False)
679 679 try:
680 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 errs, copied = docopy(ui, repo, pats, opts)
681 681 finally:
682 682 del wlock
683 683 return errs
684 684
685 685 def debugancestor(ui, index, rev1, rev2):
686 686 """find the ancestor revision of two revisions in a given index"""
687 687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
688 688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
689 689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
690 690
691 691 def debugcomplete(ui, cmd='', **opts):
692 692 """returns the completion list associated with the given command"""
693 693
694 694 if opts['options']:
695 695 options = []
696 696 otables = [globalopts]
697 697 if cmd:
698 698 aliases, entry = cmdutil.findcmd(ui, cmd)
699 699 otables.append(entry[1])
700 700 for t in otables:
701 701 for o in t:
702 702 if o[0]:
703 703 options.append('-%s' % o[0])
704 704 options.append('--%s' % o[1])
705 705 ui.write("%s\n" % "\n".join(options))
706 706 return
707 707
708 708 clist = cmdutil.findpossible(ui, cmd).keys()
709 709 clist.sort()
710 710 ui.write("%s\n" % "\n".join(clist))
711 711
712 712 def debugrebuildstate(ui, repo, rev=""):
713 713 """rebuild the dirstate as it would look like for the given revision"""
714 714 if rev == "":
715 715 rev = repo.changelog.tip()
716 716 ctx = repo.changectx(rev)
717 717 files = ctx.manifest()
718 718 wlock = repo.wlock()
719 719 try:
720 720 repo.dirstate.rebuild(rev, files)
721 721 finally:
722 722 del wlock
723 723
724 724 def debugcheckstate(ui, repo):
725 725 """validate the correctness of the current dirstate"""
726 726 parent1, parent2 = repo.dirstate.parents()
727 727 m1 = repo.changectx(parent1).manifest()
728 728 m2 = repo.changectx(parent2).manifest()
729 729 errors = 0
730 730 for f in repo.dirstate:
731 731 state = repo.dirstate[f]
732 732 if state in "nr" and f not in m1:
733 733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
734 734 errors += 1
735 735 if state in "a" and f in m1:
736 736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
737 737 errors += 1
738 738 if state in "m" and f not in m1 and f not in m2:
739 739 ui.warn(_("%s in state %s, but not in either manifest\n") %
740 740 (f, state))
741 741 errors += 1
742 742 for f in m1:
743 743 state = repo.dirstate[f]
744 744 if state not in "nrm":
745 745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
746 746 errors += 1
747 747 if errors:
748 748 error = _(".hg/dirstate inconsistent with current parent's manifest")
749 749 raise util.Abort(error)
750 750
751 751 def showconfig(ui, repo, *values, **opts):
752 752 """show combined config settings from all hgrc files
753 753
754 754 With no args, print names and values of all config items.
755 755
756 756 With one arg of the form section.name, print just the value of
757 757 that config item.
758 758
759 759 With multiple args, print names and values of all config items
760 760 with matching section names."""
761 761
762 762 untrusted = bool(opts.get('untrusted'))
763 763 if values:
764 764 if len([v for v in values if '.' in v]) > 1:
765 765 raise util.Abort(_('only one config item permitted'))
766 766 for section, name, value in ui.walkconfig(untrusted=untrusted):
767 767 sectname = section + '.' + name
768 768 if values:
769 769 for v in values:
770 770 if v == section:
771 771 ui.write('%s=%s\n' % (sectname, value))
772 772 elif v == sectname:
773 773 ui.write(value, '\n')
774 774 else:
775 775 ui.write('%s=%s\n' % (sectname, value))
776 776
777 777 def debugsetparents(ui, repo, rev1, rev2=None):
778 778 """manually set the parents of the current working directory
779 779
780 780 This is useful for writing repository conversion tools, but should
781 781 be used with care.
782 782 """
783 783
784 784 if not rev2:
785 785 rev2 = hex(nullid)
786 786
787 787 wlock = repo.wlock()
788 788 try:
789 789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
790 790 finally:
791 791 del wlock
792 792
793 793 def debugstate(ui, repo):
794 794 """show the contents of the current dirstate"""
795 795 dc = repo.dirstate._map
796 796 k = dc.keys()
797 797 k.sort()
798 798 for file_ in k:
799 799 if dc[file_][3] == -1:
800 800 # Pad or slice to locale representation
801 801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
802 802 timestr = 'unset'
803 803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
804 804 else:
805 805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
806 806 ui.write("%c %3o %10d %s %s\n"
807 807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
808 808 timestr, file_))
809 809 for f in repo.dirstate.copies():
810 810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
811 811
812 812 def debugdata(ui, file_, rev):
813 813 """dump the contents of a data file revision"""
814 814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
815 815 try:
816 816 ui.write(r.revision(r.lookup(rev)))
817 817 except KeyError:
818 818 raise util.Abort(_('invalid revision identifier %s') % rev)
819 819
820 820 def debugdate(ui, date, range=None, **opts):
821 821 """parse and display a date"""
822 822 if opts["extended"]:
823 823 d = util.parsedate(date, util.extendeddateformats)
824 824 else:
825 825 d = util.parsedate(date)
826 826 ui.write("internal: %s %s\n" % d)
827 827 ui.write("standard: %s\n" % util.datestr(d))
828 828 if range:
829 829 m = util.matchdate(range)
830 830 ui.write("match: %s\n" % m(d[0]))
831 831
832 832 def debugindex(ui, file_):
833 833 """dump the contents of an index file"""
834 834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
835 835 ui.write(" rev offset length base linkrev" +
836 836 " nodeid p1 p2\n")
837 837 for i in xrange(r.count()):
838 838 node = r.node(i)
839 839 pp = r.parents(node)
840 840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
841 841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
842 842 short(node), short(pp[0]), short(pp[1])))
843 843
844 844 def debugindexdot(ui, file_):
845 845 """dump an index DAG as a .dot file"""
846 846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
847 847 ui.write("digraph G {\n")
848 848 for i in xrange(r.count()):
849 849 node = r.node(i)
850 850 pp = r.parents(node)
851 851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
852 852 if pp[1] != nullid:
853 853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
854 854 ui.write("}\n")
855 855
856 856 def debuginstall(ui):
857 857 '''test Mercurial installation'''
858 858
859 859 def writetemp(contents):
860 860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
861 861 f = os.fdopen(fd, "wb")
862 862 f.write(contents)
863 863 f.close()
864 864 return name
865 865
866 866 problems = 0
867 867
868 868 # encoding
869 869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
870 870 try:
871 871 util.fromlocal("test")
872 872 except util.Abort, inst:
873 873 ui.write(" %s\n" % inst)
874 874 ui.write(_(" (check that your locale is properly set)\n"))
875 875 problems += 1
876 876
877 877 # compiled modules
878 878 ui.status(_("Checking extensions...\n"))
879 879 try:
880 880 import bdiff, mpatch, base85
881 881 except Exception, inst:
882 882 ui.write(" %s\n" % inst)
883 883 ui.write(_(" One or more extensions could not be found"))
884 884 ui.write(_(" (check that you compiled the extensions)\n"))
885 885 problems += 1
886 886
887 887 # templates
888 888 ui.status(_("Checking templates...\n"))
889 889 try:
890 890 import templater
891 891 t = templater.templater(templater.templatepath("map-cmdline.default"))
892 892 except Exception, inst:
893 893 ui.write(" %s\n" % inst)
894 894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
895 895 problems += 1
896 896
897 897 # patch
898 898 ui.status(_("Checking patch...\n"))
899 899 patcher = ui.config('ui', 'patch')
900 900 patcher = ((patcher and util.find_exe(patcher)) or
901 901 util.find_exe('gpatch') or
902 902 util.find_exe('patch'))
903 903 if not patcher:
904 904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
905 905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
906 906 problems += 1
907 907 else:
908 908 # actually attempt a patch here
909 909 a = "1\n2\n3\n4\n"
910 910 b = "1\n2\n3\ninsert\n4\n"
911 911 fa = writetemp(a)
912 912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
913 913 fd = writetemp(d)
914 914
915 915 files = {}
916 916 try:
917 917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
918 918 except util.Abort, e:
919 919 ui.write(_(" patch call failed:\n"))
920 920 ui.write(" " + str(e) + "\n")
921 921 problems += 1
922 922 else:
923 923 if list(files) != [os.path.basename(fa)]:
924 924 ui.write(_(" unexpected patch output!"))
925 925 ui.write(_(" (you may have an incompatible version of patch)\n"))
926 926 problems += 1
927 927 a = file(fa).read()
928 928 if a != b:
929 929 ui.write(_(" patch test failed!"))
930 930 ui.write(_(" (you may have an incompatible version of patch)\n"))
931 931 problems += 1
932 932
933 933 os.unlink(fa)
934 934 os.unlink(fd)
935 935
936 936 # merge helper
937 937 ui.status(_("Checking merge helper...\n"))
938 938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
939 939 or "hgmerge")
940 940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
941 941 if not cmdpath:
942 942 if cmd == 'hgmerge':
943 943 ui.write(_(" No merge helper set and can't find default"
944 944 " hgmerge script in PATH\n"))
945 945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
946 946 else:
947 947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
948 948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
949 949 problems += 1
950 950 else:
951 951 # actually attempt a patch here
952 952 fa = writetemp("1\n2\n3\n4\n")
953 953 fl = writetemp("1\n2\n3\ninsert\n4\n")
954 954 fr = writetemp("begin\n1\n2\n3\n4\n")
955 955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
956 956 if r:
957 957 ui.write(_(" Got unexpected merge error %d!\n") % r)
958 958 problems += 1
959 959 m = file(fl).read()
960 960 if m != "begin\n1\n2\n3\ninsert\n4\n":
961 961 ui.write(_(" Got unexpected merge results!\n"))
962 962 ui.write(_(" (your merge helper may have the"
963 963 " wrong argument order)\n"))
964 964 ui.write(_(" Result: %r\n") % m)
965 965 problems += 1
966 966 os.unlink(fa)
967 967 os.unlink(fl)
968 968 os.unlink(fr)
969 969
970 970 # editor
971 971 ui.status(_("Checking commit editor...\n"))
972 972 editor = (os.environ.get("HGEDITOR") or
973 973 ui.config("ui", "editor") or
974 974 os.environ.get("EDITOR", "vi"))
975 975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
976 976 if not cmdpath:
977 977 if editor == 'vi':
978 978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
979 979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
980 980 else:
981 981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
982 982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
983 983 problems += 1
984 984
985 985 # check username
986 986 ui.status(_("Checking username...\n"))
987 987 user = os.environ.get("HGUSER")
988 988 if user is None:
989 989 user = ui.config("ui", "username")
990 990 if user is None:
991 991 user = os.environ.get("EMAIL")
992 992 if not user:
993 993 ui.warn(" ")
994 994 ui.username()
995 995 ui.write(_(" (specify a username in your .hgrc file)\n"))
996 996
997 997 if not problems:
998 998 ui.status(_("No problems detected\n"))
999 999 else:
1000 1000 ui.write(_("%s problems detected,"
1001 1001 " please check your install!\n") % problems)
1002 1002
1003 1003 return problems
1004 1004
1005 1005 def debugrename(ui, repo, file1, *pats, **opts):
1006 1006 """dump rename information"""
1007 1007
1008 1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1009 1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1010 1010 ctx.node()):
1011 1011 m = ctx.filectx(abs).renamed()
1012 1012 if m:
1013 1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1014 1014 else:
1015 1015 ui.write(_("%s not renamed\n") % rel)
1016 1016
1017 1017 def debugwalk(ui, repo, *pats, **opts):
1018 1018 """show how files match on given patterns"""
1019 1019 items = list(cmdutil.walk(repo, pats, opts))
1020 1020 if not items:
1021 1021 return
1022 1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1023 1023 max([len(abs) for (src, abs, rel, exact) in items]),
1024 1024 max([len(rel) for (src, abs, rel, exact) in items]))
1025 1025 for src, abs, rel, exact in items:
1026 1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1027 1027 ui.write("%s\n" % line.rstrip())
1028 1028
1029 1029 def diff(ui, repo, *pats, **opts):
1030 1030 """diff repository (or selected files)
1031 1031
1032 1032 Show differences between revisions for the specified files.
1033 1033
1034 1034 Differences between files are shown using the unified diff format.
1035 1035
1036 1036 NOTE: diff may generate unexpected results for merges, as it will
1037 1037 default to comparing against the working directory's first parent
1038 1038 changeset if no revisions are specified.
1039 1039
1040 1040 When two revision arguments are given, then changes are shown
1041 1041 between those revisions. If only one revision is specified then
1042 1042 that revision is compared to the working directory, and, when no
1043 1043 revisions are specified, the working directory files are compared
1044 1044 to its parent.
1045 1045
1046 1046 Without the -a option, diff will avoid generating diffs of files
1047 1047 it detects as binary. With -a, diff will generate a diff anyway,
1048 1048 probably with undesirable results.
1049 1049 """
1050 1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1051 1051
1052 1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1053 1053
1054 1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1055 1055 opts=patch.diffopts(ui, opts))
1056 1056
1057 1057 def export(ui, repo, *changesets, **opts):
1058 1058 """dump the header and diffs for one or more changesets
1059 1059
1060 1060 Print the changeset header and diffs for one or more revisions.
1061 1061
1062 1062 The information shown in the changeset header is: author,
1063 1063 changeset hash, parent(s) and commit comment.
1064 1064
1065 1065 NOTE: export may generate unexpected diff output for merge changesets,
1066 1066 as it will compare the merge changeset against its first parent only.
1067 1067
1068 1068 Output may be to a file, in which case the name of the file is
1069 1069 given using a format string. The formatting rules are as follows:
1070 1070
1071 1071 %% literal "%" character
1072 1072 %H changeset hash (40 bytes of hexadecimal)
1073 1073 %N number of patches being generated
1074 1074 %R changeset revision number
1075 1075 %b basename of the exporting repository
1076 1076 %h short-form changeset hash (12 bytes of hexadecimal)
1077 1077 %n zero-padded sequence number, starting at 1
1078 1078 %r zero-padded changeset revision number
1079 1079
1080 1080 Without the -a option, export will avoid generating diffs of files
1081 1081 it detects as binary. With -a, export will generate a diff anyway,
1082 1082 probably with undesirable results.
1083 1083
1084 1084 With the --switch-parent option, the diff will be against the second
1085 1085 parent. It can be useful to review a merge.
1086 1086 """
1087 1087 if not changesets:
1088 1088 raise util.Abort(_("export requires at least one changeset"))
1089 1089 revs = cmdutil.revrange(repo, changesets)
1090 1090 if len(revs) > 1:
1091 1091 ui.note(_('exporting patches:\n'))
1092 1092 else:
1093 1093 ui.note(_('exporting patch:\n'))
1094 1094 patch.export(repo, revs, template=opts['output'],
1095 1095 switch_parent=opts['switch_parent'],
1096 1096 opts=patch.diffopts(ui, opts))
1097 1097
1098 1098 def grep(ui, repo, pattern, *pats, **opts):
1099 1099 """search for a pattern in specified files and revisions
1100 1100
1101 1101 Search revisions of files for a regular expression.
1102 1102
1103 1103 This command behaves differently than Unix grep. It only accepts
1104 1104 Python/Perl regexps. It searches repository history, not the
1105 1105 working directory. It always prints the revision number in which
1106 1106 a match appears.
1107 1107
1108 1108 By default, grep only prints output for the first revision of a
1109 1109 file in which it finds a match. To get it to print every revision
1110 1110 that contains a change in match status ("-" for a match that
1111 1111 becomes a non-match, or "+" for a non-match that becomes a match),
1112 1112 use the --all flag.
1113 1113 """
1114 1114 reflags = 0
1115 1115 if opts['ignore_case']:
1116 1116 reflags |= re.I
1117 1117 try:
1118 1118 regexp = re.compile(pattern, reflags)
1119 1119 except Exception, inst:
1120 1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1121 1121 return None
1122 1122 sep, eol = ':', '\n'
1123 1123 if opts['print0']:
1124 1124 sep = eol = '\0'
1125 1125
1126 1126 fcache = {}
1127 1127 def getfile(fn):
1128 1128 if fn not in fcache:
1129 1129 fcache[fn] = repo.file(fn)
1130 1130 return fcache[fn]
1131 1131
1132 1132 def matchlines(body):
1133 1133 begin = 0
1134 1134 linenum = 0
1135 1135 while True:
1136 1136 match = regexp.search(body, begin)
1137 1137 if not match:
1138 1138 break
1139 1139 mstart, mend = match.span()
1140 1140 linenum += body.count('\n', begin, mstart) + 1
1141 1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1142 1142 lend = body.find('\n', mend)
1143 1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1144 1144 begin = lend + 1
1145 1145
1146 1146 class linestate(object):
1147 1147 def __init__(self, line, linenum, colstart, colend):
1148 1148 self.line = line
1149 1149 self.linenum = linenum
1150 1150 self.colstart = colstart
1151 1151 self.colend = colend
1152 1152
1153 1153 def __eq__(self, other):
1154 1154 return self.line == other.line
1155 1155
1156 1156 matches = {}
1157 1157 copies = {}
1158 1158 def grepbody(fn, rev, body):
1159 1159 matches[rev].setdefault(fn, [])
1160 1160 m = matches[rev][fn]
1161 1161 for lnum, cstart, cend, line in matchlines(body):
1162 1162 s = linestate(line, lnum, cstart, cend)
1163 1163 m.append(s)
1164 1164
1165 1165 def difflinestates(a, b):
1166 1166 sm = difflib.SequenceMatcher(None, a, b)
1167 1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1168 1168 if tag == 'insert':
1169 1169 for i in xrange(blo, bhi):
1170 1170 yield ('+', b[i])
1171 1171 elif tag == 'delete':
1172 1172 for i in xrange(alo, ahi):
1173 1173 yield ('-', a[i])
1174 1174 elif tag == 'replace':
1175 1175 for i in xrange(alo, ahi):
1176 1176 yield ('-', a[i])
1177 1177 for i in xrange(blo, bhi):
1178 1178 yield ('+', b[i])
1179 1179
1180 1180 prev = {}
1181 1181 def display(fn, rev, states, prevstates):
1182 1182 found = False
1183 1183 filerevmatches = {}
1184 1184 r = prev.get(fn, -1)
1185 1185 if opts['all']:
1186 1186 iter = difflinestates(states, prevstates)
1187 1187 else:
1188 1188 iter = [('', l) for l in prevstates]
1189 1189 for change, l in iter:
1190 1190 cols = [fn, str(r)]
1191 1191 if opts['line_number']:
1192 1192 cols.append(str(l.linenum))
1193 1193 if opts['all']:
1194 1194 cols.append(change)
1195 1195 if opts['user']:
1196 1196 cols.append(ui.shortuser(get(r)[1]))
1197 1197 if opts['files_with_matches']:
1198 1198 c = (fn, r)
1199 1199 if c in filerevmatches:
1200 1200 continue
1201 1201 filerevmatches[c] = 1
1202 1202 else:
1203 1203 cols.append(l.line)
1204 1204 ui.write(sep.join(cols), eol)
1205 1205 found = True
1206 1206 return found
1207 1207
1208 1208 fstate = {}
1209 1209 skip = {}
1210 1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1211 1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1212 1212 found = False
1213 1213 follow = opts.get('follow')
1214 1214 for st, rev, fns in changeiter:
1215 1215 if st == 'window':
1216 1216 matches.clear()
1217 1217 elif st == 'add':
1218 1218 mf = repo.changectx(rev).manifest()
1219 1219 matches[rev] = {}
1220 1220 for fn in fns:
1221 1221 if fn in skip:
1222 1222 continue
1223 1223 fstate.setdefault(fn, {})
1224 1224 try:
1225 1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1226 1226 if follow:
1227 1227 copied = getfile(fn).renamed(mf[fn])
1228 1228 if copied:
1229 1229 copies.setdefault(rev, {})[fn] = copied[0]
1230 1230 except KeyError:
1231 1231 pass
1232 1232 elif st == 'iter':
1233 1233 states = matches[rev].items()
1234 1234 states.sort()
1235 1235 for fn, m in states:
1236 1236 copy = copies.get(rev, {}).get(fn)
1237 1237 if fn in skip:
1238 1238 if copy:
1239 1239 skip[copy] = True
1240 1240 continue
1241 1241 if fn in prev or fstate[fn]:
1242 1242 r = display(fn, rev, m, fstate[fn])
1243 1243 found = found or r
1244 1244 if r and not opts['all']:
1245 1245 skip[fn] = True
1246 1246 if copy:
1247 1247 skip[copy] = True
1248 1248 fstate[fn] = m
1249 1249 if copy:
1250 1250 fstate[copy] = m
1251 1251 prev[fn] = rev
1252 1252
1253 1253 fstate = fstate.items()
1254 1254 fstate.sort()
1255 1255 for fn, state in fstate:
1256 1256 if fn in skip:
1257 1257 continue
1258 1258 if fn not in copies.get(prev[fn], {}):
1259 1259 found = display(fn, rev, {}, state) or found
1260 1260 return (not found and 1) or 0
1261 1261
1262 1262 def heads(ui, repo, *branchrevs, **opts):
1263 1263 """show current repository heads or show branch heads
1264 1264
1265 1265 With no arguments, show all repository head changesets.
1266 1266
1267 1267 If branch or revisions names are given this will show the heads of
1268 1268 the specified branches or the branches those revisions are tagged
1269 1269 with.
1270 1270
1271 1271 Repository "heads" are changesets that don't have child
1272 1272 changesets. They are where development generally takes place and
1273 1273 are the usual targets for update and merge operations.
1274 1274
1275 1275 Branch heads are changesets that have a given branch tag, but have
1276 1276 no child changesets with that tag. They are usually where
1277 1277 development on the given branch takes place.
1278 1278 """
1279 1279 if opts['rev']:
1280 1280 start = repo.lookup(opts['rev'])
1281 1281 else:
1282 1282 start = None
1283 1283 if not branchrevs:
1284 1284 # Assume we're looking repo-wide heads if no revs were specified.
1285 1285 heads = repo.heads(start)
1286 1286 else:
1287 1287 heads = []
1288 1288 visitedset = util.set()
1289 1289 for branchrev in branchrevs:
1290 1290 branch = repo.changectx(branchrev).branch()
1291 1291 if branch in visitedset:
1292 1292 continue
1293 1293 visitedset.add(branch)
1294 1294 bheads = repo.branchheads(branch, start)
1295 1295 if not bheads:
1296 1296 if branch != branchrev:
1297 1297 ui.warn(_("no changes on branch %s containing %s are "
1298 1298 "reachable from %s\n")
1299 1299 % (branch, branchrev, opts['rev']))
1300 1300 else:
1301 1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1302 1302 % (branch, opts['rev']))
1303 1303 heads.extend(bheads)
1304 1304 if not heads:
1305 1305 return 1
1306 1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1307 1307 for n in heads:
1308 1308 displayer.show(changenode=n)
1309 1309
1310 1310 def help_(ui, name=None, with_version=False):
1311 1311 """show help for a command, extension, or list of commands
1312 1312
1313 1313 With no arguments, print a list of commands and short help.
1314 1314
1315 1315 Given a command name, print help for that command.
1316 1316
1317 1317 Given an extension name, print help for that extension, and the
1318 1318 commands it provides."""
1319 1319 option_lists = []
1320 1320
1321 1321 def addglobalopts(aliases):
1322 1322 if ui.verbose:
1323 1323 option_lists.append((_("global options:"), globalopts))
1324 1324 if name == 'shortlist':
1325 1325 option_lists.append((_('use "hg help" for the full list '
1326 1326 'of commands'), ()))
1327 1327 else:
1328 1328 if name == 'shortlist':
1329 1329 msg = _('use "hg help" for the full list of commands '
1330 1330 'or "hg -v" for details')
1331 1331 elif aliases:
1332 1332 msg = _('use "hg -v help%s" to show aliases and '
1333 1333 'global options') % (name and " " + name or "")
1334 1334 else:
1335 1335 msg = _('use "hg -v help %s" to show global options') % name
1336 1336 option_lists.append((msg, ()))
1337 1337
1338 1338 def helpcmd(name):
1339 1339 if with_version:
1340 1340 version_(ui)
1341 1341 ui.write('\n')
1342 1342 aliases, i = cmdutil.findcmd(ui, name)
1343 1343 # synopsis
1344 1344 ui.write("%s\n\n" % i[2])
1345 1345
1346 1346 # description
1347 1347 doc = i[0].__doc__
1348 1348 if not doc:
1349 1349 doc = _("(No help text available)")
1350 1350 if ui.quiet:
1351 1351 doc = doc.splitlines(0)[0]
1352 1352 ui.write("%s\n" % doc.rstrip())
1353 1353
1354 1354 if not ui.quiet:
1355 1355 # aliases
1356 1356 if len(aliases) > 1:
1357 1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1358 1358
1359 1359 # options
1360 1360 if i[1]:
1361 1361 option_lists.append((_("options:\n"), i[1]))
1362 1362
1363 1363 addglobalopts(False)
1364 1364
1365 1365 def helplist(select=None):
1366 1366 h = {}
1367 1367 cmds = {}
1368 1368 for c, e in table.items():
1369 1369 f = c.split("|", 1)[0]
1370 1370 if select and not select(f):
1371 1371 continue
1372 1372 if name == "shortlist" and not f.startswith("^"):
1373 1373 continue
1374 1374 f = f.lstrip("^")
1375 1375 if not ui.debugflag and f.startswith("debug"):
1376 1376 continue
1377 1377 doc = e[0].__doc__
1378 1378 if not doc:
1379 1379 doc = _("(No help text available)")
1380 1380 h[f] = doc.splitlines(0)[0].rstrip()
1381 1381 cmds[f] = c.lstrip("^")
1382 1382
1383 1383 fns = h.keys()
1384 1384 fns.sort()
1385 1385 m = max(map(len, fns))
1386 1386 for f in fns:
1387 1387 if ui.verbose:
1388 1388 commands = cmds[f].replace("|",", ")
1389 1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1390 1390 else:
1391 1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1392 1392
1393 1393 if not ui.quiet:
1394 1394 addglobalopts(True)
1395 1395
1396 1396 def helptopic(name):
1397 1397 v = None
1398 1398 for i in help.helptable:
1399 1399 l = i.split('|')
1400 1400 if name in l:
1401 1401 v = i
1402 1402 header = l[-1]
1403 1403 if not v:
1404 1404 raise cmdutil.UnknownCommand(name)
1405 1405
1406 1406 # description
1407 1407 doc = help.helptable[v]
1408 1408 if not doc:
1409 1409 doc = _("(No help text available)")
1410 1410 if callable(doc):
1411 1411 doc = doc()
1412 1412
1413 1413 ui.write("%s\n" % header)
1414 1414 ui.write("%s\n" % doc.rstrip())
1415 1415
1416 1416 def helpext(name):
1417 1417 try:
1418 1418 mod = extensions.find(name)
1419 1419 except KeyError:
1420 1420 raise cmdutil.UnknownCommand(name)
1421 1421
1422 1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1423 1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1424 1424 for d in doc[1:]:
1425 1425 ui.write(d, '\n')
1426 1426
1427 1427 ui.status('\n')
1428 1428
1429 1429 try:
1430 1430 ct = mod.cmdtable
1431 1431 except AttributeError:
1432 1432 ct = None
1433 1433 if not ct:
1434 1434 ui.status(_('no commands defined\n'))
1435 1435 return
1436 1436
1437 1437 ui.status(_('list of commands:\n\n'))
1438 1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1439 1439 helplist(modcmds.has_key)
1440 1440
1441 1441 if name and name != 'shortlist':
1442 1442 i = None
1443 1443 for f in (helpcmd, helptopic, helpext):
1444 1444 try:
1445 1445 f(name)
1446 1446 i = None
1447 1447 break
1448 1448 except cmdutil.UnknownCommand, inst:
1449 1449 i = inst
1450 1450 if i:
1451 1451 raise i
1452 1452
1453 1453 else:
1454 1454 # program name
1455 1455 if ui.verbose or with_version:
1456 1456 version_(ui)
1457 1457 else:
1458 1458 ui.status(_("Mercurial Distributed SCM\n"))
1459 1459 ui.status('\n')
1460 1460
1461 1461 # list of commands
1462 1462 if name == "shortlist":
1463 1463 ui.status(_('basic commands:\n\n'))
1464 1464 else:
1465 1465 ui.status(_('list of commands:\n\n'))
1466 1466
1467 1467 helplist()
1468 1468
1469 1469 # list all option lists
1470 1470 opt_output = []
1471 1471 for title, options in option_lists:
1472 1472 opt_output.append(("\n%s" % title, None))
1473 1473 for shortopt, longopt, default, desc in options:
1474 1474 if "DEPRECATED" in desc and not ui.verbose: continue
1475 1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1476 1476 longopt and " --%s" % longopt),
1477 1477 "%s%s" % (desc,
1478 1478 default
1479 1479 and _(" (default: %s)") % default
1480 1480 or "")))
1481 1481
1482 1482 if opt_output:
1483 1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1484 1484 for first, second in opt_output:
1485 1485 if second:
1486 1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1487 1487 else:
1488 1488 ui.write("%s\n" % first)
1489 1489
1490 1490 def identify(ui, repo, source=None,
1491 1491 rev=None, num=None, id=None, branch=None, tags=None):
1492 1492 """identify the working copy or specified revision
1493 1493
1494 1494 With no revision, print a summary of the current state of the repo.
1495 1495
1496 1496 With a path, do a lookup in another repository.
1497 1497
1498 1498 This summary identifies the repository state using one or two parent
1499 1499 hash identifiers, followed by a "+" if there are uncommitted changes
1500 1500 in the working directory, a list of tags for this revision and a branch
1501 1501 name for non-default branches.
1502 1502 """
1503 1503
1504 1504 hexfunc = ui.debugflag and hex or short
1505 1505 default = not (num or id or branch or tags)
1506 1506 output = []
1507 1507
1508 1508 if source:
1509 1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1510 1510 srepo = hg.repository(ui, source)
1511 1511 if not rev and revs:
1512 1512 rev = revs[0]
1513 1513 if not rev:
1514 1514 rev = "tip"
1515 1515 if num or branch or tags:
1516 1516 raise util.Abort(
1517 1517 "can't query remote revision number, branch, or tags")
1518 1518 output = [hexfunc(srepo.lookup(rev))]
1519 1519 elif not rev:
1520 1520 ctx = repo.workingctx()
1521 1521 parents = ctx.parents()
1522 1522 changed = False
1523 1523 if default or id or num:
1524 1524 changed = ctx.files() + ctx.deleted()
1525 1525 if default or id:
1526 1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1527 1527 (changed) and "+" or "")]
1528 1528 if num:
1529 1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1530 1530 (changed) and "+" or ""))
1531 1531 else:
1532 1532 ctx = repo.changectx(rev)
1533 1533 if default or id:
1534 1534 output = [hexfunc(ctx.node())]
1535 1535 if num:
1536 1536 output.append(str(ctx.rev()))
1537 1537
1538 1538 if not source and default and not ui.quiet:
1539 1539 b = util.tolocal(ctx.branch())
1540 1540 if b != 'default':
1541 1541 output.append("(%s)" % b)
1542 1542
1543 1543 # multiple tags for a single parent separated by '/'
1544 1544 t = "/".join(ctx.tags())
1545 1545 if t:
1546 1546 output.append(t)
1547 1547
1548 1548 if branch:
1549 1549 output.append(util.tolocal(ctx.branch()))
1550 1550
1551 1551 if tags:
1552 1552 output.extend(ctx.tags())
1553 1553
1554 1554 ui.write("%s\n" % ' '.join(output))
1555 1555
1556 1556 def import_(ui, repo, patch1, *patches, **opts):
1557 1557 """import an ordered set of patches
1558 1558
1559 1559 Import a list of patches and commit them individually.
1560 1560
1561 1561 If there are outstanding changes in the working directory, import
1562 1562 will abort unless given the -f flag.
1563 1563
1564 1564 You can import a patch straight from a mail message. Even patches
1565 1565 as attachments work (body part must be type text/plain or
1566 1566 text/x-patch to be used). From and Subject headers of email
1567 1567 message are used as default committer and commit message. All
1568 1568 text/plain body parts before first diff are added to commit
1569 1569 message.
1570 1570
1571 1571 If the imported patch was generated by hg export, user and description
1572 1572 from patch override values from message headers and body. Values
1573 1573 given on command line with -m and -u override these.
1574 1574
1575 1575 If --exact is specified, import will set the working directory
1576 1576 to the parent of each patch before applying it, and will abort
1577 1577 if the resulting changeset has a different ID than the one
1578 1578 recorded in the patch. This may happen due to character set
1579 1579 problems or other deficiencies in the text patch format.
1580 1580
1581 1581 To read a patch from standard input, use patch name "-".
1582 1582 """
1583 1583 patches = (patch1,) + patches
1584 1584
1585 1585 if opts.get('exact') or not opts['force']:
1586 1586 cmdutil.bail_if_changed(repo)
1587 1587
1588 1588 d = opts["base"]
1589 1589 strip = opts["strip"]
1590 1590 wlock = lock = None
1591 1591 try:
1592 1592 wlock = repo.wlock()
1593 1593 lock = repo.lock()
1594 1594 for p in patches:
1595 1595 pf = os.path.join(d, p)
1596 1596
1597 1597 if pf == '-':
1598 1598 ui.status(_("applying patch from stdin\n"))
1599 1599 data = patch.extract(ui, sys.stdin)
1600 1600 else:
1601 1601 ui.status(_("applying %s\n") % p)
1602 1602 data = patch.extract(ui, file(pf, 'rb'))
1603 1603
1604 1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1605 1605
1606 1606 if tmpname is None:
1607 1607 raise util.Abort(_('no diffs found'))
1608 1608
1609 1609 try:
1610 1610 cmdline_message = cmdutil.logmessage(opts)
1611 1611 if cmdline_message:
1612 1612 # pickup the cmdline msg
1613 1613 message = cmdline_message
1614 1614 elif message:
1615 1615 # pickup the patch msg
1616 1616 message = message.strip()
1617 1617 else:
1618 1618 # launch the editor
1619 1619 message = None
1620 1620 ui.debug(_('message:\n%s\n') % message)
1621 1621
1622 1622 wp = repo.workingctx().parents()
1623 1623 if opts.get('exact'):
1624 1624 if not nodeid or not p1:
1625 1625 raise util.Abort(_('not a mercurial patch'))
1626 1626 p1 = repo.lookup(p1)
1627 1627 p2 = repo.lookup(p2 or hex(nullid))
1628 1628
1629 1629 if p1 != wp[0].node():
1630 hg.clean(repo, p1, wlock=wlock)
1630 hg.clean(repo, p1)
1631 1631 repo.dirstate.setparents(p1, p2)
1632 1632 elif p2:
1633 1633 try:
1634 1634 p1 = repo.lookup(p1)
1635 1635 p2 = repo.lookup(p2)
1636 1636 if p1 == wp[0].node():
1637 1637 repo.dirstate.setparents(p1, p2)
1638 1638 except hg.RepoError:
1639 1639 pass
1640 1640 if opts.get('exact') or opts.get('import_branch'):
1641 1641 repo.dirstate.setbranch(branch or 'default')
1642 1642
1643 1643 files = {}
1644 1644 try:
1645 1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 1646 files=files)
1647 1647 finally:
1648 files = patch.updatedir(ui, repo, files, wlock=wlock)
1649 n = repo.commit(files, message, user, date, wlock=wlock,
1650 lock=lock)
1648 files = patch.updatedir(ui, repo, files)
1649 n = repo.commit(files, message, user, date)
1651 1650 if opts.get('exact'):
1652 1651 if hex(n) != nodeid:
1653 repo.rollback(wlock=wlock, lock=lock)
1652 repo.rollback()
1654 1653 raise util.Abort(_('patch is damaged' +
1655 1654 ' or loses information'))
1656 1655 finally:
1657 1656 os.unlink(tmpname)
1658 1657 finally:
1659 1658 del wlock, lock
1660 1659
1661 1660 def incoming(ui, repo, source="default", **opts):
1662 1661 """show new changesets found in source
1663 1662
1664 1663 Show new changesets found in the specified path/URL or the default
1665 1664 pull location. These are the changesets that would be pulled if a pull
1666 1665 was requested.
1667 1666
1668 1667 For remote repository, using --bundle avoids downloading the changesets
1669 1668 twice if the incoming is followed by a pull.
1670 1669
1671 1670 See pull for valid source format details.
1672 1671 """
1673 1672 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1674 1673 cmdutil.setremoteconfig(ui, opts)
1675 1674
1676 1675 other = hg.repository(ui, source)
1677 1676 ui.status(_('comparing with %s\n') % source)
1678 1677 if revs:
1679 1678 if 'lookup' in other.capabilities:
1680 1679 revs = [other.lookup(rev) for rev in revs]
1681 1680 else:
1682 1681 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1683 1682 raise util.Abort(error)
1684 1683 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1685 1684 if not incoming:
1686 1685 try:
1687 1686 os.unlink(opts["bundle"])
1688 1687 except:
1689 1688 pass
1690 1689 ui.status(_("no changes found\n"))
1691 1690 return 1
1692 1691
1693 1692 cleanup = None
1694 1693 try:
1695 1694 fname = opts["bundle"]
1696 1695 if fname or not other.local():
1697 1696 # create a bundle (uncompressed if other repo is not local)
1698 1697 if revs is None:
1699 1698 cg = other.changegroup(incoming, "incoming")
1700 1699 else:
1701 1700 if 'changegroupsubset' not in other.capabilities:
1702 1701 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1703 1702 cg = other.changegroupsubset(incoming, revs, 'incoming')
1704 1703 bundletype = other.local() and "HG10BZ" or "HG10UN"
1705 1704 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1706 1705 # keep written bundle?
1707 1706 if opts["bundle"]:
1708 1707 cleanup = None
1709 1708 if not other.local():
1710 1709 # use the created uncompressed bundlerepo
1711 1710 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1712 1711
1713 1712 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 1713 if opts['newest_first']:
1715 1714 o.reverse()
1716 1715 displayer = cmdutil.show_changeset(ui, other, opts)
1717 1716 for n in o:
1718 1717 parents = [p for p in other.changelog.parents(n) if p != nullid]
1719 1718 if opts['no_merges'] and len(parents) == 2:
1720 1719 continue
1721 1720 displayer.show(changenode=n)
1722 1721 finally:
1723 1722 if hasattr(other, 'close'):
1724 1723 other.close()
1725 1724 if cleanup:
1726 1725 os.unlink(cleanup)
1727 1726
1728 1727 def init(ui, dest=".", **opts):
1729 1728 """create a new repository in the given directory
1730 1729
1731 1730 Initialize a new repository in the given directory. If the given
1732 1731 directory does not exist, it is created.
1733 1732
1734 1733 If no directory is given, the current directory is used.
1735 1734
1736 1735 It is possible to specify an ssh:// URL as the destination.
1737 1736 Look at the help text for the pull command for important details
1738 1737 about ssh:// URLs.
1739 1738 """
1740 1739 cmdutil.setremoteconfig(ui, opts)
1741 1740 hg.repository(ui, dest, create=1)
1742 1741
1743 1742 def locate(ui, repo, *pats, **opts):
1744 1743 """locate files matching specific patterns
1745 1744
1746 1745 Print all files under Mercurial control whose names match the
1747 1746 given patterns.
1748 1747
1749 1748 This command searches the entire repository by default. To search
1750 1749 just the current directory and its subdirectories, use
1751 1750 "--include .".
1752 1751
1753 1752 If no patterns are given to match, this command prints all file
1754 1753 names.
1755 1754
1756 1755 If you want to feed the output of this command into the "xargs"
1757 1756 command, use the "-0" option to both this command and "xargs".
1758 1757 This will avoid the problem of "xargs" treating single filenames
1759 1758 that contain white space as multiple filenames.
1760 1759 """
1761 1760 end = opts['print0'] and '\0' or '\n'
1762 1761 rev = opts['rev']
1763 1762 if rev:
1764 1763 node = repo.lookup(rev)
1765 1764 else:
1766 1765 node = None
1767 1766
1768 1767 ret = 1
1769 1768 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1770 1769 badmatch=util.always,
1771 1770 default='relglob'):
1772 1771 if src == 'b':
1773 1772 continue
1774 1773 if not node and abs not in repo.dirstate:
1775 1774 continue
1776 1775 if opts['fullpath']:
1777 1776 ui.write(os.path.join(repo.root, abs), end)
1778 1777 else:
1779 1778 ui.write(((pats and rel) or abs), end)
1780 1779 ret = 0
1781 1780
1782 1781 return ret
1783 1782
1784 1783 def log(ui, repo, *pats, **opts):
1785 1784 """show revision history of entire repository or files
1786 1785
1787 1786 Print the revision history of the specified files or the entire
1788 1787 project.
1789 1788
1790 1789 File history is shown without following rename or copy history of
1791 1790 files. Use -f/--follow with a file name to follow history across
1792 1791 renames and copies. --follow without a file name will only show
1793 1792 ancestors or descendants of the starting revision. --follow-first
1794 1793 only follows the first parent of merge revisions.
1795 1794
1796 1795 If no revision range is specified, the default is tip:0 unless
1797 1796 --follow is set, in which case the working directory parent is
1798 1797 used as the starting revision.
1799 1798
1800 1799 By default this command outputs: changeset id and hash, tags,
1801 1800 non-trivial parents, user, date and time, and a summary for each
1802 1801 commit. When the -v/--verbose switch is used, the list of changed
1803 1802 files and full commit message is shown.
1804 1803
1805 1804 NOTE: log -p may generate unexpected diff output for merge
1806 1805 changesets, as it will compare the merge changeset against its
1807 1806 first parent only. Also, the files: list will only reflect files
1808 1807 that are different from BOTH parents.
1809 1808
1810 1809 """
1811 1810
1812 1811 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1813 1812 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1814 1813
1815 1814 if opts['limit']:
1816 1815 try:
1817 1816 limit = int(opts['limit'])
1818 1817 except ValueError:
1819 1818 raise util.Abort(_('limit must be a positive integer'))
1820 1819 if limit <= 0: raise util.Abort(_('limit must be positive'))
1821 1820 else:
1822 1821 limit = sys.maxint
1823 1822 count = 0
1824 1823
1825 1824 if opts['copies'] and opts['rev']:
1826 1825 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1827 1826 else:
1828 1827 endrev = repo.changelog.count()
1829 1828 rcache = {}
1830 1829 ncache = {}
1831 1830 dcache = []
1832 1831 def getrenamed(fn, rev, man):
1833 1832 '''looks up all renames for a file (up to endrev) the first
1834 1833 time the file is given. It indexes on the changerev and only
1835 1834 parses the manifest if linkrev != changerev.
1836 1835 Returns rename info for fn at changerev rev.'''
1837 1836 if fn not in rcache:
1838 1837 rcache[fn] = {}
1839 1838 ncache[fn] = {}
1840 1839 fl = repo.file(fn)
1841 1840 for i in xrange(fl.count()):
1842 1841 node = fl.node(i)
1843 1842 lr = fl.linkrev(node)
1844 1843 renamed = fl.renamed(node)
1845 1844 rcache[fn][lr] = renamed
1846 1845 if renamed:
1847 1846 ncache[fn][node] = renamed
1848 1847 if lr >= endrev:
1849 1848 break
1850 1849 if rev in rcache[fn]:
1851 1850 return rcache[fn][rev]
1852 1851 mr = repo.manifest.rev(man)
1853 1852 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1854 1853 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1855 1854 if not dcache or dcache[0] != man:
1856 1855 dcache[:] = [man, repo.manifest.readdelta(man)]
1857 1856 if fn in dcache[1]:
1858 1857 return ncache[fn].get(dcache[1][fn])
1859 1858 return None
1860 1859
1861 1860 df = False
1862 1861 if opts["date"]:
1863 1862 df = util.matchdate(opts["date"])
1864 1863
1865 1864 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1866 1865 for st, rev, fns in changeiter:
1867 1866 if st == 'add':
1868 1867 changenode = repo.changelog.node(rev)
1869 1868 parents = [p for p in repo.changelog.parentrevs(rev)
1870 1869 if p != nullrev]
1871 1870 if opts['no_merges'] and len(parents) == 2:
1872 1871 continue
1873 1872 if opts['only_merges'] and len(parents) != 2:
1874 1873 continue
1875 1874
1876 1875 if df:
1877 1876 changes = get(rev)
1878 1877 if not df(changes[2][0]):
1879 1878 continue
1880 1879
1881 1880 if opts['keyword']:
1882 1881 changes = get(rev)
1883 1882 miss = 0
1884 1883 for k in [kw.lower() for kw in opts['keyword']]:
1885 1884 if not (k in changes[1].lower() or
1886 1885 k in changes[4].lower() or
1887 1886 k in " ".join(changes[3]).lower()):
1888 1887 miss = 1
1889 1888 break
1890 1889 if miss:
1891 1890 continue
1892 1891
1893 1892 copies = []
1894 1893 if opts.get('copies') and rev:
1895 1894 mf = get(rev)[0]
1896 1895 for fn in get(rev)[3]:
1897 1896 rename = getrenamed(fn, rev, mf)
1898 1897 if rename:
1899 1898 copies.append((fn, rename[0]))
1900 1899 displayer.show(rev, changenode, copies=copies)
1901 1900 elif st == 'iter':
1902 1901 if count == limit: break
1903 1902 if displayer.flush(rev):
1904 1903 count += 1
1905 1904
1906 1905 def manifest(ui, repo, rev=None):
1907 1906 """output the current or given revision of the project manifest
1908 1907
1909 1908 Print a list of version controlled files for the given revision.
1910 1909 If no revision is given, the parent of the working directory is used,
1911 1910 or tip if no revision is checked out.
1912 1911
1913 1912 The manifest is the list of files being version controlled. If no revision
1914 1913 is given then the first parent of the working directory is used.
1915 1914
1916 1915 With -v flag, print file permissions. With --debug flag, print
1917 1916 file revision hashes.
1918 1917 """
1919 1918
1920 1919 m = repo.changectx(rev).manifest()
1921 1920 files = m.keys()
1922 1921 files.sort()
1923 1922
1924 1923 for f in files:
1925 1924 if ui.debugflag:
1926 1925 ui.write("%40s " % hex(m[f]))
1927 1926 if ui.verbose:
1928 1927 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1929 1928 ui.write("%s\n" % f)
1930 1929
1931 1930 def merge(ui, repo, node=None, force=None, rev=None):
1932 1931 """merge working directory with another revision
1933 1932
1934 1933 Merge the contents of the current working directory and the
1935 1934 requested revision. Files that changed between either parent are
1936 1935 marked as changed for the next commit and a commit must be
1937 1936 performed before any further updates are allowed.
1938 1937
1939 1938 If no revision is specified, the working directory's parent is a
1940 1939 head revision, and the repository contains exactly one other head,
1941 1940 the other head is merged with by default. Otherwise, an explicit
1942 1941 revision to merge with must be provided.
1943 1942 """
1944 1943
1945 1944 if rev and node:
1946 1945 raise util.Abort(_("please specify just one revision"))
1947 1946
1948 1947 if not node:
1949 1948 node = rev
1950 1949
1951 1950 if not node:
1952 1951 heads = repo.heads()
1953 1952 if len(heads) > 2:
1954 1953 raise util.Abort(_('repo has %d heads - '
1955 1954 'please merge with an explicit rev') %
1956 1955 len(heads))
1957 1956 if len(heads) == 1:
1958 1957 raise util.Abort(_('there is nothing to merge - '
1959 1958 'use "hg update" instead'))
1960 1959 parent = repo.dirstate.parents()[0]
1961 1960 if parent not in heads:
1962 1961 raise util.Abort(_('working dir not at a head rev - '
1963 1962 'use "hg update" or merge with an explicit rev'))
1964 1963 node = parent == heads[0] and heads[-1] or heads[0]
1965 1964 return hg.merge(repo, node, force=force)
1966 1965
1967 1966 def outgoing(ui, repo, dest=None, **opts):
1968 1967 """show changesets not found in destination
1969 1968
1970 1969 Show changesets not found in the specified destination repository or
1971 1970 the default push location. These are the changesets that would be pushed
1972 1971 if a push was requested.
1973 1972
1974 1973 See pull for valid destination format details.
1975 1974 """
1976 1975 dest, revs = cmdutil.parseurl(
1977 1976 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1978 1977 cmdutil.setremoteconfig(ui, opts)
1979 1978 if revs:
1980 1979 revs = [repo.lookup(rev) for rev in revs]
1981 1980
1982 1981 other = hg.repository(ui, dest)
1983 1982 ui.status(_('comparing with %s\n') % dest)
1984 1983 o = repo.findoutgoing(other, force=opts['force'])
1985 1984 if not o:
1986 1985 ui.status(_("no changes found\n"))
1987 1986 return 1
1988 1987 o = repo.changelog.nodesbetween(o, revs)[0]
1989 1988 if opts['newest_first']:
1990 1989 o.reverse()
1991 1990 displayer = cmdutil.show_changeset(ui, repo, opts)
1992 1991 for n in o:
1993 1992 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1994 1993 if opts['no_merges'] and len(parents) == 2:
1995 1994 continue
1996 1995 displayer.show(changenode=n)
1997 1996
1998 1997 def parents(ui, repo, file_=None, **opts):
1999 1998 """show the parents of the working dir or revision
2000 1999
2001 2000 Print the working directory's parent revisions. If a
2002 2001 revision is given via --rev, the parent of that revision
2003 2002 will be printed. If a file argument is given, revision in
2004 2003 which the file was last changed (before the working directory
2005 2004 revision or the argument to --rev if given) is printed.
2006 2005 """
2007 2006 rev = opts.get('rev')
2008 2007 if file_:
2009 2008 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2010 2009 if anypats or len(files) != 1:
2011 2010 raise util.Abort(_('can only specify an explicit file name'))
2012 2011 ctx = repo.filectx(files[0], changeid=rev)
2013 2012 elif rev:
2014 2013 ctx = repo.changectx(rev)
2015 2014 else:
2016 2015 ctx = repo.workingctx()
2017 2016 p = [cp.node() for cp in ctx.parents()]
2018 2017
2019 2018 displayer = cmdutil.show_changeset(ui, repo, opts)
2020 2019 for n in p:
2021 2020 if n != nullid:
2022 2021 displayer.show(changenode=n)
2023 2022
2024 2023 def paths(ui, repo, search=None):
2025 2024 """show definition of symbolic path names
2026 2025
2027 2026 Show definition of symbolic path name NAME. If no name is given, show
2028 2027 definition of available names.
2029 2028
2030 2029 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2031 2030 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2032 2031 """
2033 2032 if search:
2034 2033 for name, path in ui.configitems("paths"):
2035 2034 if name == search:
2036 2035 ui.write("%s\n" % path)
2037 2036 return
2038 2037 ui.warn(_("not found!\n"))
2039 2038 return 1
2040 2039 else:
2041 2040 for name, path in ui.configitems("paths"):
2042 2041 ui.write("%s = %s\n" % (name, path))
2043 2042
2044 2043 def postincoming(ui, repo, modheads, optupdate, wasempty):
2045 2044 if modheads == 0:
2046 2045 return
2047 2046 if optupdate:
2048 2047 if wasempty:
2049 2048 return hg.update(repo, repo.lookup('default'))
2050 2049 elif modheads == 1:
2051 2050 return hg.update(repo, repo.changelog.tip()) # update
2052 2051 else:
2053 2052 ui.status(_("not updating, since new heads added\n"))
2054 2053 if modheads > 1:
2055 2054 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2056 2055 else:
2057 2056 ui.status(_("(run 'hg update' to get a working copy)\n"))
2058 2057
2059 2058 def pull(ui, repo, source="default", **opts):
2060 2059 """pull changes from the specified source
2061 2060
2062 2061 Pull changes from a remote repository to a local one.
2063 2062
2064 2063 This finds all changes from the repository at the specified path
2065 2064 or URL and adds them to the local repository. By default, this
2066 2065 does not update the copy of the project in the working directory.
2067 2066
2068 2067 Valid URLs are of the form:
2069 2068
2070 2069 local/filesystem/path (or file://local/filesystem/path)
2071 2070 http://[user@]host[:port]/[path]
2072 2071 https://[user@]host[:port]/[path]
2073 2072 ssh://[user@]host[:port]/[path]
2074 2073 static-http://host[:port]/[path]
2075 2074
2076 2075 Paths in the local filesystem can either point to Mercurial
2077 2076 repositories or to bundle files (as created by 'hg bundle' or
2078 2077 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2079 2078 allows access to a Mercurial repository where you simply use a web
2080 2079 server to publish the .hg directory as static content.
2081 2080
2082 2081 An optional identifier after # indicates a particular branch, tag,
2083 2082 or changeset to pull.
2084 2083
2085 2084 Some notes about using SSH with Mercurial:
2086 2085 - SSH requires an accessible shell account on the destination machine
2087 2086 and a copy of hg in the remote path or specified with as remotecmd.
2088 2087 - path is relative to the remote user's home directory by default.
2089 2088 Use an extra slash at the start of a path to specify an absolute path:
2090 2089 ssh://example.com//tmp/repository
2091 2090 - Mercurial doesn't use its own compression via SSH; the right thing
2092 2091 to do is to configure it in your ~/.ssh/config, e.g.:
2093 2092 Host *.mylocalnetwork.example.com
2094 2093 Compression no
2095 2094 Host *
2096 2095 Compression yes
2097 2096 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2098 2097 with the --ssh command line option.
2099 2098 """
2100 2099 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2101 2100 cmdutil.setremoteconfig(ui, opts)
2102 2101
2103 2102 other = hg.repository(ui, source)
2104 2103 ui.status(_('pulling from %s\n') % (source))
2105 2104 if revs:
2106 2105 if 'lookup' in other.capabilities:
2107 2106 revs = [other.lookup(rev) for rev in revs]
2108 2107 else:
2109 2108 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2110 2109 raise util.Abort(error)
2111 2110
2112 2111 wasempty = repo.changelog.count() == 0
2113 2112 modheads = repo.pull(other, heads=revs, force=opts['force'])
2114 2113 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2115 2114
2116 2115 def push(ui, repo, dest=None, **opts):
2117 2116 """push changes to the specified destination
2118 2117
2119 2118 Push changes from the local repository to the given destination.
2120 2119
2121 2120 This is the symmetrical operation for pull. It helps to move
2122 2121 changes from the current repository to a different one. If the
2123 2122 destination is local this is identical to a pull in that directory
2124 2123 from the current one.
2125 2124
2126 2125 By default, push will refuse to run if it detects the result would
2127 2126 increase the number of remote heads. This generally indicates the
2128 2127 the client has forgotten to sync and merge before pushing.
2129 2128
2130 2129 Valid URLs are of the form:
2131 2130
2132 2131 local/filesystem/path (or file://local/filesystem/path)
2133 2132 ssh://[user@]host[:port]/[path]
2134 2133 http://[user@]host[:port]/[path]
2135 2134 https://[user@]host[:port]/[path]
2136 2135
2137 2136 An optional identifier after # indicates a particular branch, tag,
2138 2137 or changeset to push.
2139 2138
2140 2139 Look at the help text for the pull command for important details
2141 2140 about ssh:// URLs.
2142 2141
2143 2142 Pushing to http:// and https:// URLs is only possible, if this
2144 2143 feature is explicitly enabled on the remote Mercurial server.
2145 2144 """
2146 2145 dest, revs = cmdutil.parseurl(
2147 2146 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2148 2147 cmdutil.setremoteconfig(ui, opts)
2149 2148
2150 2149 other = hg.repository(ui, dest)
2151 2150 ui.status('pushing to %s\n' % (dest))
2152 2151 if revs:
2153 2152 revs = [repo.lookup(rev) for rev in revs]
2154 2153 r = repo.push(other, opts['force'], revs=revs)
2155 2154 return r == 0
2156 2155
2157 2156 def rawcommit(ui, repo, *pats, **opts):
2158 2157 """raw commit interface (DEPRECATED)
2159 2158
2160 2159 (DEPRECATED)
2161 2160 Lowlevel commit, for use in helper scripts.
2162 2161
2163 2162 This command is not intended to be used by normal users, as it is
2164 2163 primarily useful for importing from other SCMs.
2165 2164
2166 2165 This command is now deprecated and will be removed in a future
2167 2166 release, please use debugsetparents and commit instead.
2168 2167 """
2169 2168
2170 2169 ui.warn(_("(the rawcommit command is deprecated)\n"))
2171 2170
2172 2171 message = cmdutil.logmessage(opts)
2173 2172
2174 2173 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2175 2174 if opts['files']:
2176 2175 files += open(opts['files']).read().splitlines()
2177 2176
2178 2177 parents = [repo.lookup(p) for p in opts['parent']]
2179 2178
2180 2179 try:
2181 2180 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2182 2181 except ValueError, inst:
2183 2182 raise util.Abort(str(inst))
2184 2183
2185 2184 def recover(ui, repo):
2186 2185 """roll back an interrupted transaction
2187 2186
2188 2187 Recover from an interrupted commit or pull.
2189 2188
2190 2189 This command tries to fix the repository status after an interrupted
2191 2190 operation. It should only be necessary when Mercurial suggests it.
2192 2191 """
2193 2192 if repo.recover():
2194 2193 return hg.verify(repo)
2195 2194 return 1
2196 2195
2197 2196 def remove(ui, repo, *pats, **opts):
2198 2197 """remove the specified files on the next commit
2199 2198
2200 2199 Schedule the indicated files for removal from the repository.
2201 2200
2202 2201 This only removes files from the current branch, not from the
2203 2202 entire project history. If the files still exist in the working
2204 2203 directory, they will be deleted from it. If invoked with --after,
2205 2204 files are marked as removed, but not actually unlinked unless --force
2206 2205 is also given. Without exact file names, --after will only mark
2207 2206 files as removed if they are no longer in the working directory.
2208 2207
2209 2208 This command schedules the files to be removed at the next commit.
2210 2209 To undo a remove before that, see hg revert.
2211 2210
2212 2211 Modified files and added files are not removed by default. To
2213 2212 remove them, use the -f/--force option.
2214 2213 """
2215 2214 names = []
2216 2215 if not opts['after'] and not pats:
2217 2216 raise util.Abort(_('no files specified'))
2218 2217 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2219 2218 exact = dict.fromkeys(files)
2220 2219 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2221 2220 modified, added, removed, deleted, unknown = mardu
2222 2221 remove, forget = [], []
2223 2222 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2224 2223 reason = None
2225 2224 if abs in modified and not opts['force']:
2226 2225 reason = _('is modified (use -f to force removal)')
2227 2226 elif abs in added:
2228 2227 if opts['force']:
2229 2228 forget.append(abs)
2230 2229 continue
2231 2230 reason = _('has been marked for add (use -f to force removal)')
2232 2231 elif abs not in repo.dirstate:
2233 2232 reason = _('is not managed')
2234 2233 elif opts['after'] and not exact and abs not in deleted:
2235 2234 continue
2236 2235 elif abs in removed:
2237 2236 continue
2238 2237 if reason:
2239 2238 if exact:
2240 2239 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2241 2240 else:
2242 2241 if ui.verbose or not exact:
2243 2242 ui.status(_('removing %s\n') % rel)
2244 2243 remove.append(abs)
2245 2244 repo.forget(forget)
2246 2245 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2247 2246
2248 2247 def rename(ui, repo, *pats, **opts):
2249 2248 """rename files; equivalent of copy + remove
2250 2249
2251 2250 Mark dest as copies of sources; mark sources for deletion. If
2252 2251 dest is a directory, copies are put in that directory. If dest is
2253 2252 a file, there can only be one source.
2254 2253
2255 2254 By default, this command copies the contents of files as they
2256 2255 stand in the working directory. If invoked with --after, the
2257 2256 operation is recorded, but no copying is performed.
2258 2257
2259 2258 This command takes effect in the next commit. To undo a rename
2260 2259 before that, see hg revert.
2261 2260 """
2262 2261 wlock = repo.wlock(False)
2263 2262 try:
2264 errs, copied = docopy(ui, repo, pats, opts, wlock)
2263 errs, copied = docopy(ui, repo, pats, opts)
2265 2264 names = []
2266 2265 for abs, rel, exact in copied:
2267 2266 if ui.verbose or not exact:
2268 2267 ui.status(_('removing %s\n') % rel)
2269 2268 names.append(abs)
2270 2269 if not opts.get('dry_run'):
2271 repo.remove(names, True, wlock=wlock)
2270 repo.remove(names, True)
2272 2271 return errs
2273 2272 finally:
2274 2273 del wlock
2275 2274
2276 2275 def revert(ui, repo, *pats, **opts):
2277 2276 """revert files or dirs to their states as of some revision
2278 2277
2279 2278 With no revision specified, revert the named files or directories
2280 2279 to the contents they had in the parent of the working directory.
2281 2280 This restores the contents of the affected files to an unmodified
2282 2281 state and unschedules adds, removes, copies, and renames. If the
2283 2282 working directory has two parents, you must explicitly specify the
2284 2283 revision to revert to.
2285 2284
2286 2285 Modified files are saved with a .orig suffix before reverting.
2287 2286 To disable these backups, use --no-backup.
2288 2287
2289 2288 Using the -r option, revert the given files or directories to their
2290 2289 contents as of a specific revision. This can be helpful to "roll
2291 2290 back" some or all of a change that should not have been committed.
2292 2291
2293 2292 Revert modifies the working directory. It does not commit any
2294 2293 changes, or change the parent of the working directory. If you
2295 2294 revert to a revision other than the parent of the working
2296 2295 directory, the reverted files will thus appear modified
2297 2296 afterwards.
2298 2297
2299 2298 If a file has been deleted, it is restored. If the executable
2300 2299 mode of a file was changed, it is reset.
2301 2300
2302 2301 If names are given, all files matching the names are reverted.
2303 2302
2304 2303 If no arguments are given, no files are reverted.
2305 2304 """
2306 2305
2307 2306 if opts["date"]:
2308 2307 if opts["rev"]:
2309 2308 raise util.Abort(_("you can't specify a revision and a date"))
2310 2309 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2311 2310
2312 2311 if not pats and not opts['all']:
2313 2312 raise util.Abort(_('no files or directories specified; '
2314 2313 'use --all to revert the whole repo'))
2315 2314
2316 2315 parent, p2 = repo.dirstate.parents()
2317 2316 if not opts['rev'] and p2 != nullid:
2318 2317 raise util.Abort(_('uncommitted merge - please provide a '
2319 2318 'specific revision'))
2320 2319 ctx = repo.changectx(opts['rev'])
2321 2320 node = ctx.node()
2322 2321 mf = ctx.manifest()
2323 2322 if node == parent:
2324 2323 pmf = mf
2325 2324 else:
2326 2325 pmf = None
2327 2326
2328 2327 # need all matching names in dirstate and manifest of target rev,
2329 2328 # so have to walk both. do not print errors if files exist in one
2330 2329 # but not other.
2331 2330
2332 2331 names = {}
2333 2332 target_only = {}
2334 2333
2335 2334 wlock = repo.wlock()
2336 2335 try:
2337 2336 # walk dirstate.
2338 2337 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2339 2338 badmatch=mf.has_key):
2340 2339 names[abs] = (rel, exact)
2341 2340 if src == 'b':
2342 2341 target_only[abs] = True
2343 2342
2344 2343 # walk target manifest.
2345 2344
2346 2345 def badmatch(path):
2347 2346 if path in names:
2348 2347 return True
2349 2348 path_ = path + '/'
2350 2349 for f in names:
2351 2350 if f.startswith(path_):
2352 2351 return True
2353 2352 return False
2354 2353
2355 2354 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2356 2355 badmatch=badmatch):
2357 2356 if abs in names or src == 'b':
2358 2357 continue
2359 2358 names[abs] = (rel, exact)
2360 2359 target_only[abs] = True
2361 2360
2362 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2361 changes = repo.status(match=names.has_key)[:5]
2363 2362 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2364 2363
2365 2364 revert = ([], _('reverting %s\n'))
2366 2365 add = ([], _('adding %s\n'))
2367 2366 remove = ([], _('removing %s\n'))
2368 2367 forget = ([], _('forgetting %s\n'))
2369 2368 undelete = ([], _('undeleting %s\n'))
2370 2369 update = {}
2371 2370
2372 2371 disptable = (
2373 2372 # dispatch table:
2374 2373 # file state
2375 2374 # action if in target manifest
2376 2375 # action if not in target manifest
2377 2376 # make backup if in target manifest
2378 2377 # make backup if not in target manifest
2379 2378 (modified, revert, remove, True, True),
2380 2379 (added, revert, forget, True, False),
2381 2380 (removed, undelete, None, False, False),
2382 2381 (deleted, revert, remove, False, False),
2383 2382 (unknown, add, None, True, False),
2384 2383 (target_only, add, None, False, False),
2385 2384 )
2386 2385
2387 2386 entries = names.items()
2388 2387 entries.sort()
2389 2388
2390 2389 for abs, (rel, exact) in entries:
2391 2390 mfentry = mf.get(abs)
2392 2391 target = repo.wjoin(abs)
2393 2392 def handle(xlist, dobackup):
2394 2393 xlist[0].append(abs)
2395 2394 update[abs] = 1
2396 2395 if dobackup and not opts['no_backup'] and util.lexists(target):
2397 2396 bakname = "%s.orig" % rel
2398 2397 ui.note(_('saving current version of %s as %s\n') %
2399 2398 (rel, bakname))
2400 2399 if not opts.get('dry_run'):
2401 2400 util.copyfile(target, bakname)
2402 2401 if ui.verbose or not exact:
2403 2402 ui.status(xlist[1] % rel)
2404 2403 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2405 2404 if abs not in table: continue
2406 2405 # file has changed in dirstate
2407 2406 if mfentry:
2408 2407 handle(hitlist, backuphit)
2409 2408 elif misslist is not None:
2410 2409 handle(misslist, backupmiss)
2411 2410 else:
2412 2411 if exact: ui.warn(_('file not managed: %s\n') % rel)
2413 2412 break
2414 2413 else:
2415 2414 # file has not changed in dirstate
2416 2415 if node == parent:
2417 2416 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2418 2417 continue
2419 2418 if pmf is None:
2420 2419 # only need parent manifest in this unlikely case,
2421 2420 # so do not read by default
2422 2421 pmf = repo.changectx(parent).manifest()
2423 2422 if abs in pmf:
2424 2423 if mfentry:
2425 2424 # if version of file is same in parent and target
2426 2425 # manifests, do nothing
2427 2426 if pmf[abs] != mfentry:
2428 2427 handle(revert, False)
2429 2428 else:
2430 2429 handle(remove, False)
2431 2430
2432 2431 if not opts.get('dry_run'):
2433 2432 for f in forget[0]:
2434 2433 repo.dirstate.forget(f)
2435 r = hg.revert(repo, node, update.has_key, wlock)
2434 r = hg.revert(repo, node, update.has_key)
2436 2435 for f in add[0]:
2437 2436 repo.dirstate.add(f)
2438 2437 for f in undelete[0]:
2439 2438 repo.dirstate.normal(f)
2440 2439 for f in remove[0]:
2441 2440 repo.dirstate.remove(f)
2442 2441 return r
2443 2442 finally:
2444 2443 del wlock
2445 2444
2446 2445 def rollback(ui, repo):
2447 2446 """roll back the last transaction in this repository
2448 2447
2449 2448 Roll back the last transaction in this repository, restoring the
2450 2449 project to its state prior to the transaction.
2451 2450
2452 2451 Transactions are used to encapsulate the effects of all commands
2453 2452 that create new changesets or propagate existing changesets into a
2454 2453 repository. For example, the following commands are transactional,
2455 2454 and their effects can be rolled back:
2456 2455
2457 2456 commit
2458 2457 import
2459 2458 pull
2460 2459 push (with this repository as destination)
2461 2460 unbundle
2462 2461
2463 2462 This command should be used with care. There is only one level of
2464 2463 rollback, and there is no way to undo a rollback. It will also
2465 2464 restore the dirstate at the time of the last transaction, which
2466 2465 may lose subsequent dirstate changes.
2467 2466
2468 2467 This command is not intended for use on public repositories. Once
2469 2468 changes are visible for pull by other users, rolling a transaction
2470 2469 back locally is ineffective (someone else may already have pulled
2471 2470 the changes). Furthermore, a race is possible with readers of the
2472 2471 repository; for example an in-progress pull from the repository
2473 2472 may fail if a rollback is performed.
2474 2473 """
2475 2474 repo.rollback()
2476 2475
2477 2476 def root(ui, repo):
2478 2477 """print the root (top) of the current working dir
2479 2478
2480 2479 Print the root directory of the current repository.
2481 2480 """
2482 2481 ui.write(repo.root + "\n")
2483 2482
2484 2483 def serve(ui, repo, **opts):
2485 2484 """export the repository via HTTP
2486 2485
2487 2486 Start a local HTTP repository browser and pull server.
2488 2487
2489 2488 By default, the server logs accesses to stdout and errors to
2490 2489 stderr. Use the "-A" and "-E" options to log to files.
2491 2490 """
2492 2491
2493 2492 if opts["stdio"]:
2494 2493 if repo is None:
2495 2494 raise hg.RepoError(_("There is no Mercurial repository here"
2496 2495 " (.hg not found)"))
2497 2496 s = sshserver.sshserver(ui, repo)
2498 2497 s.serve_forever()
2499 2498
2500 2499 parentui = ui.parentui or ui
2501 2500 optlist = ("name templates style address port ipv6"
2502 2501 " accesslog errorlog webdir_conf certificate")
2503 2502 for o in optlist.split():
2504 2503 if opts[o]:
2505 2504 parentui.setconfig("web", o, str(opts[o]))
2506 2505 if repo.ui != parentui:
2507 2506 repo.ui.setconfig("web", o, str(opts[o]))
2508 2507
2509 2508 if repo is None and not ui.config("web", "webdir_conf"):
2510 2509 raise hg.RepoError(_("There is no Mercurial repository here"
2511 2510 " (.hg not found)"))
2512 2511
2513 2512 class service:
2514 2513 def init(self):
2515 2514 util.set_signal_handler()
2516 2515 try:
2517 2516 self.httpd = hgweb.server.create_server(parentui, repo)
2518 2517 except socket.error, inst:
2519 2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2520 2519
2521 2520 if not ui.verbose: return
2522 2521
2523 2522 if self.httpd.port != 80:
2524 2523 ui.status(_('listening at http://%s:%d/\n') %
2525 2524 (self.httpd.addr, self.httpd.port))
2526 2525 else:
2527 2526 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2528 2527
2529 2528 def run(self):
2530 2529 self.httpd.serve_forever()
2531 2530
2532 2531 service = service()
2533 2532
2534 2533 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2535 2534
2536 2535 def status(ui, repo, *pats, **opts):
2537 2536 """show changed files in the working directory
2538 2537
2539 2538 Show status of files in the repository. If names are given, only
2540 2539 files that match are shown. Files that are clean or ignored, are
2541 2540 not listed unless -c (clean), -i (ignored) or -A is given.
2542 2541
2543 2542 NOTE: status may appear to disagree with diff if permissions have
2544 2543 changed or a merge has occurred. The standard diff format does not
2545 2544 report permission changes and diff only reports changes relative
2546 2545 to one merge parent.
2547 2546
2548 2547 If one revision is given, it is used as the base revision.
2549 2548 If two revisions are given, the difference between them is shown.
2550 2549
2551 2550 The codes used to show the status of files are:
2552 2551 M = modified
2553 2552 A = added
2554 2553 R = removed
2555 2554 C = clean
2556 2555 ! = deleted, but still tracked
2557 2556 ? = not tracked
2558 2557 I = ignored (not shown by default)
2559 2558 = the previous added file was copied from here
2560 2559 """
2561 2560
2562 2561 all = opts['all']
2563 2562 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2564 2563
2565 2564 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2566 2565 cwd = (pats and repo.getcwd()) or ''
2567 2566 modified, added, removed, deleted, unknown, ignored, clean = [
2568 2567 n for n in repo.status(node1=node1, node2=node2, files=files,
2569 2568 match=matchfn,
2570 2569 list_ignored=all or opts['ignored'],
2571 2570 list_clean=all or opts['clean'])]
2572 2571
2573 2572 changetypes = (('modified', 'M', modified),
2574 2573 ('added', 'A', added),
2575 2574 ('removed', 'R', removed),
2576 2575 ('deleted', '!', deleted),
2577 2576 ('unknown', '?', unknown),
2578 2577 ('ignored', 'I', ignored))
2579 2578
2580 2579 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2581 2580
2582 2581 end = opts['print0'] and '\0' or '\n'
2583 2582
2584 2583 for opt, char, changes in ([ct for ct in explicit_changetypes
2585 2584 if all or opts[ct[0]]]
2586 2585 or changetypes):
2587 2586 if opts['no_status']:
2588 2587 format = "%%s%s" % end
2589 2588 else:
2590 2589 format = "%s %%s%s" % (char, end)
2591 2590
2592 2591 for f in changes:
2593 2592 ui.write(format % repo.pathto(f, cwd))
2594 2593 if ((all or opts.get('copies')) and not opts.get('no_status')):
2595 2594 copied = repo.dirstate.copied(f)
2596 2595 if copied:
2597 2596 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2598 2597
2599 2598 def tag(ui, repo, name, rev_=None, **opts):
2600 2599 """add a tag for the current or given revision
2601 2600
2602 2601 Name a particular revision using <name>.
2603 2602
2604 2603 Tags are used to name particular revisions of the repository and are
2605 2604 very useful to compare different revision, to go back to significant
2606 2605 earlier versions or to mark branch points as releases, etc.
2607 2606
2608 2607 If no revision is given, the parent of the working directory is used,
2609 2608 or tip if no revision is checked out.
2610 2609
2611 2610 To facilitate version control, distribution, and merging of tags,
2612 2611 they are stored as a file named ".hgtags" which is managed
2613 2612 similarly to other project files and can be hand-edited if
2614 2613 necessary. The file '.hg/localtags' is used for local tags (not
2615 2614 shared among repositories).
2616 2615 """
2617 2616 if name in ['tip', '.', 'null']:
2618 2617 raise util.Abort(_("the name '%s' is reserved") % name)
2619 2618 if rev_ is not None:
2620 2619 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2621 2620 "please use 'hg tag [-r REV] NAME' instead\n"))
2622 2621 if opts['rev']:
2623 2622 raise util.Abort(_("use only one form to specify the revision"))
2624 2623 if opts['rev'] and opts['remove']:
2625 2624 raise util.Abort(_("--rev and --remove are incompatible"))
2626 2625 if opts['rev']:
2627 2626 rev_ = opts['rev']
2628 2627 message = opts['message']
2629 2628 if opts['remove']:
2630 2629 if not name in repo.tags():
2631 2630 raise util.Abort(_('tag %s does not exist') % name)
2632 2631 rev_ = nullid
2633 2632 if not message:
2634 2633 message = _('Removed tag %s') % name
2635 2634 elif name in repo.tags() and not opts['force']:
2636 2635 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2637 2636 % name)
2638 2637 if not rev_ and repo.dirstate.parents()[1] != nullid:
2639 2638 raise util.Abort(_('uncommitted merge - please provide a '
2640 2639 'specific revision'))
2641 2640 r = repo.changectx(rev_).node()
2642 2641
2643 2642 if not message:
2644 2643 message = _('Added tag %s for changeset %s') % (name, short(r))
2645 2644
2646 2645 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2647 2646
2648 2647 def tags(ui, repo):
2649 2648 """list repository tags
2650 2649
2651 2650 List the repository tags.
2652 2651
2653 2652 This lists both regular and local tags.
2654 2653 """
2655 2654
2656 2655 l = repo.tagslist()
2657 2656 l.reverse()
2658 2657 hexfunc = ui.debugflag and hex or short
2659 2658 for t, n in l:
2660 2659 try:
2661 2660 hn = hexfunc(n)
2662 2661 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2663 2662 except revlog.LookupError:
2664 2663 r = " ?:%s" % hn
2665 2664 if ui.quiet:
2666 2665 ui.write("%s\n" % t)
2667 2666 else:
2668 2667 spaces = " " * (30 - util.locallen(t))
2669 2668 ui.write("%s%s %s\n" % (t, spaces, r))
2670 2669
2671 2670 def tip(ui, repo, **opts):
2672 2671 """show the tip revision
2673 2672
2674 2673 Show the tip revision.
2675 2674 """
2676 2675 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2677 2676
2678 2677 def unbundle(ui, repo, fname1, *fnames, **opts):
2679 2678 """apply one or more changegroup files
2680 2679
2681 2680 Apply one or more compressed changegroup files generated by the
2682 2681 bundle command.
2683 2682 """
2684 2683 fnames = (fname1,) + fnames
2685 2684 result = None
2686 2685 wasempty = repo.changelog.count() == 0
2687 2686 for fname in fnames:
2688 2687 if os.path.exists(fname):
2689 2688 f = open(fname, "rb")
2690 2689 else:
2691 2690 f = urllib.urlopen(fname)
2692 2691 gen = changegroup.readbundle(f, fname)
2693 2692 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2694 2693
2695 2694 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2696 2695
2697 2696 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2698 2697 """update working directory
2699 2698
2700 2699 Update the working directory to the specified revision, or the
2701 2700 tip of the current branch if none is specified.
2702 2701
2703 2702 If there are no outstanding changes in the working directory and
2704 2703 there is a linear relationship between the current version and the
2705 2704 requested version, the result is the requested version.
2706 2705
2707 2706 To merge the working directory with another revision, use the
2708 2707 merge command.
2709 2708
2710 2709 By default, update will refuse to run if doing so would require
2711 2710 discarding local changes.
2712 2711 """
2713 2712 if rev and node:
2714 2713 raise util.Abort(_("please specify just one revision"))
2715 2714
2716 2715 if not rev:
2717 2716 rev = node
2718 2717
2719 2718 if date:
2720 2719 if rev:
2721 2720 raise util.Abort(_("you can't specify a revision and a date"))
2722 2721 rev = cmdutil.finddate(ui, repo, date)
2723 2722
2724 2723 if clean:
2725 2724 return hg.clean(repo, rev)
2726 2725 else:
2727 2726 return hg.update(repo, rev)
2728 2727
2729 2728 def verify(ui, repo):
2730 2729 """verify the integrity of the repository
2731 2730
2732 2731 Verify the integrity of the current repository.
2733 2732
2734 2733 This will perform an extensive check of the repository's
2735 2734 integrity, validating the hashes and checksums of each entry in
2736 2735 the changelog, manifest, and tracked files, as well as the
2737 2736 integrity of their crosslinks and indices.
2738 2737 """
2739 2738 return hg.verify(repo)
2740 2739
2741 2740 def version_(ui):
2742 2741 """output version and copyright information"""
2743 2742 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2744 2743 % version.get_version())
2745 2744 ui.status(_(
2746 2745 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2747 2746 "This is free software; see the source for copying conditions. "
2748 2747 "There is NO\nwarranty; "
2749 2748 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2750 2749 ))
2751 2750
2752 2751 # Command options and aliases are listed here, alphabetically
2753 2752
2754 2753 globalopts = [
2755 2754 ('R', 'repository', '',
2756 2755 _('repository root directory or symbolic path name')),
2757 2756 ('', 'cwd', '', _('change working directory')),
2758 2757 ('y', 'noninteractive', None,
2759 2758 _('do not prompt, assume \'yes\' for any required answers')),
2760 2759 ('q', 'quiet', None, _('suppress output')),
2761 2760 ('v', 'verbose', None, _('enable additional output')),
2762 2761 ('', 'config', [], _('set/override config option')),
2763 2762 ('', 'debug', None, _('enable debugging output')),
2764 2763 ('', 'debugger', None, _('start debugger')),
2765 2764 ('', 'encoding', util._encoding, _('set the charset encoding')),
2766 2765 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2767 2766 ('', 'lsprof', None, _('print improved command execution profile')),
2768 2767 ('', 'traceback', None, _('print traceback on exception')),
2769 2768 ('', 'time', None, _('time how long the command takes')),
2770 2769 ('', 'profile', None, _('print command execution profile')),
2771 2770 ('', 'version', None, _('output version information and exit')),
2772 2771 ('h', 'help', None, _('display help and exit')),
2773 2772 ]
2774 2773
2775 2774 dryrunopts = [('n', 'dry-run', None,
2776 2775 _('do not perform actions, just print output'))]
2777 2776
2778 2777 remoteopts = [
2779 2778 ('e', 'ssh', '', _('specify ssh command to use')),
2780 2779 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2781 2780 ]
2782 2781
2783 2782 walkopts = [
2784 2783 ('I', 'include', [], _('include names matching the given patterns')),
2785 2784 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2786 2785 ]
2787 2786
2788 2787 commitopts = [
2789 2788 ('m', 'message', '', _('use <text> as commit message')),
2790 2789 ('l', 'logfile', '', _('read commit message from <file>')),
2791 2790 ]
2792 2791
2793 2792 table = {
2794 2793 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2795 2794 "addremove":
2796 2795 (addremove,
2797 2796 [('s', 'similarity', '',
2798 2797 _('guess renamed files by similarity (0<=s<=100)')),
2799 2798 ] + walkopts + dryrunopts,
2800 2799 _('hg addremove [OPTION]... [FILE]...')),
2801 2800 "^annotate":
2802 2801 (annotate,
2803 2802 [('r', 'rev', '', _('annotate the specified revision')),
2804 2803 ('f', 'follow', None, _('follow file copies and renames')),
2805 2804 ('a', 'text', None, _('treat all files as text')),
2806 2805 ('u', 'user', None, _('list the author')),
2807 2806 ('d', 'date', None, _('list the date')),
2808 2807 ('n', 'number', None, _('list the revision number (default)')),
2809 2808 ('c', 'changeset', None, _('list the changeset')),
2810 2809 ('l', 'line-number', None,
2811 2810 _('show line number at the first appearance'))
2812 2811 ] + walkopts,
2813 2812 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2814 2813 "archive":
2815 2814 (archive,
2816 2815 [('', 'no-decode', None, _('do not pass files through decoders')),
2817 2816 ('p', 'prefix', '', _('directory prefix for files in archive')),
2818 2817 ('r', 'rev', '', _('revision to distribute')),
2819 2818 ('t', 'type', '', _('type of distribution to create')),
2820 2819 ] + walkopts,
2821 2820 _('hg archive [OPTION]... DEST')),
2822 2821 "backout":
2823 2822 (backout,
2824 2823 [('', 'merge', None,
2825 2824 _('merge with old dirstate parent after backout')),
2826 2825 ('d', 'date', '', _('record datecode as commit date')),
2827 2826 ('', 'parent', '', _('parent to choose when backing out merge')),
2828 2827 ('u', 'user', '', _('record user as committer')),
2829 2828 ('r', 'rev', '', _('revision to backout')),
2830 2829 ] + walkopts + commitopts,
2831 2830 _('hg backout [OPTION]... [-r] REV')),
2832 2831 "branch":
2833 2832 (branch,
2834 2833 [('f', 'force', None,
2835 2834 _('set branch name even if it shadows an existing branch'))],
2836 2835 _('hg branch [NAME]')),
2837 2836 "branches":
2838 2837 (branches,
2839 2838 [('a', 'active', False,
2840 2839 _('show only branches that have unmerged heads'))],
2841 2840 _('hg branches [-a]')),
2842 2841 "bundle":
2843 2842 (bundle,
2844 2843 [('f', 'force', None,
2845 2844 _('run even when remote repository is unrelated')),
2846 2845 ('r', 'rev', [],
2847 2846 _('a changeset you would like to bundle')),
2848 2847 ('', 'base', [],
2849 2848 _('a base changeset to specify instead of a destination')),
2850 2849 ] + remoteopts,
2851 2850 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2852 2851 "cat":
2853 2852 (cat,
2854 2853 [('o', 'output', '', _('print output to file with formatted name')),
2855 2854 ('r', 'rev', '', _('print the given revision')),
2856 2855 ] + walkopts,
2857 2856 _('hg cat [OPTION]... FILE...')),
2858 2857 "^clone":
2859 2858 (clone,
2860 2859 [('U', 'noupdate', None, _('do not update the new working directory')),
2861 2860 ('r', 'rev', [],
2862 2861 _('a changeset you would like to have after cloning')),
2863 2862 ('', 'pull', None, _('use pull protocol to copy metadata')),
2864 2863 ('', 'uncompressed', None,
2865 2864 _('use uncompressed transfer (fast over LAN)')),
2866 2865 ] + remoteopts,
2867 2866 _('hg clone [OPTION]... SOURCE [DEST]')),
2868 2867 "^commit|ci":
2869 2868 (commit,
2870 2869 [('A', 'addremove', None,
2871 2870 _('mark new/missing files as added/removed before committing')),
2872 2871 ('d', 'date', '', _('record datecode as commit date')),
2873 2872 ('u', 'user', '', _('record user as commiter')),
2874 2873 ] + walkopts + commitopts,
2875 2874 _('hg commit [OPTION]... [FILE]...')),
2876 2875 "copy|cp":
2877 2876 (copy,
2878 2877 [('A', 'after', None, _('record a copy that has already occurred')),
2879 2878 ('f', 'force', None,
2880 2879 _('forcibly copy over an existing managed file')),
2881 2880 ] + walkopts + dryrunopts,
2882 2881 _('hg copy [OPTION]... [SOURCE]... DEST')),
2883 2882 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2884 2883 "debugcomplete":
2885 2884 (debugcomplete,
2886 2885 [('o', 'options', None, _('show the command options'))],
2887 2886 _('debugcomplete [-o] CMD')),
2888 2887 "debuginstall": (debuginstall, [], _('debuginstall')),
2889 2888 "debugrebuildstate":
2890 2889 (debugrebuildstate,
2891 2890 [('r', 'rev', '', _('revision to rebuild to'))],
2892 2891 _('debugrebuildstate [-r REV] [REV]')),
2893 2892 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2894 2893 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2895 2894 "debugstate": (debugstate, [], _('debugstate')),
2896 2895 "debugdate":
2897 2896 (debugdate,
2898 2897 [('e', 'extended', None, _('try extended date formats'))],
2899 2898 _('debugdate [-e] DATE [RANGE]')),
2900 2899 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2901 2900 "debugindex": (debugindex, [], _('debugindex FILE')),
2902 2901 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2903 2902 "debugrename":
2904 2903 (debugrename,
2905 2904 [('r', 'rev', '', _('revision to debug'))],
2906 2905 _('debugrename [-r REV] FILE')),
2907 2906 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2908 2907 "^diff":
2909 2908 (diff,
2910 2909 [('r', 'rev', [], _('revision')),
2911 2910 ('a', 'text', None, _('treat all files as text')),
2912 2911 ('p', 'show-function', None,
2913 2912 _('show which function each change is in')),
2914 2913 ('g', 'git', None, _('use git extended diff format')),
2915 2914 ('', 'nodates', None, _("don't include dates in diff headers")),
2916 2915 ('w', 'ignore-all-space', None,
2917 2916 _('ignore white space when comparing lines')),
2918 2917 ('b', 'ignore-space-change', None,
2919 2918 _('ignore changes in the amount of white space')),
2920 2919 ('B', 'ignore-blank-lines', None,
2921 2920 _('ignore changes whose lines are all blank')),
2922 2921 ] + walkopts,
2923 2922 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2924 2923 "^export":
2925 2924 (export,
2926 2925 [('o', 'output', '', _('print output to file with formatted name')),
2927 2926 ('a', 'text', None, _('treat all files as text')),
2928 2927 ('g', 'git', None, _('use git extended diff format')),
2929 2928 ('', 'nodates', None, _("don't include dates in diff headers")),
2930 2929 ('', 'switch-parent', None, _('diff against the second parent'))],
2931 2930 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2932 2931 "grep":
2933 2932 (grep,
2934 2933 [('0', 'print0', None, _('end fields with NUL')),
2935 2934 ('', 'all', None, _('print all revisions that match')),
2936 2935 ('f', 'follow', None,
2937 2936 _('follow changeset history, or file history across copies and renames')),
2938 2937 ('i', 'ignore-case', None, _('ignore case when matching')),
2939 2938 ('l', 'files-with-matches', None,
2940 2939 _('print only filenames and revs that match')),
2941 2940 ('n', 'line-number', None, _('print matching line numbers')),
2942 2941 ('r', 'rev', [], _('search in given revision range')),
2943 2942 ('u', 'user', None, _('print user who committed change')),
2944 2943 ] + walkopts,
2945 2944 _('hg grep [OPTION]... PATTERN [FILE]...')),
2946 2945 "heads":
2947 2946 (heads,
2948 2947 [('', 'style', '', _('display using template map file')),
2949 2948 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2950 2949 ('', 'template', '', _('display with template'))],
2951 2950 _('hg heads [-r REV] [REV]...')),
2952 2951 "help": (help_, [], _('hg help [COMMAND]')),
2953 2952 "identify|id":
2954 2953 (identify,
2955 2954 [('r', 'rev', '', _('identify the specified rev')),
2956 2955 ('n', 'num', None, _('show local revision number')),
2957 2956 ('i', 'id', None, _('show global revision id')),
2958 2957 ('b', 'branch', None, _('show branch')),
2959 2958 ('t', 'tags', None, _('show tags'))],
2960 2959 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2961 2960 "import|patch":
2962 2961 (import_,
2963 2962 [('p', 'strip', 1,
2964 2963 _('directory strip option for patch. This has the same\n'
2965 2964 'meaning as the corresponding patch option')),
2966 2965 ('b', 'base', '', _('base path')),
2967 2966 ('f', 'force', None,
2968 2967 _('skip check for outstanding uncommitted changes')),
2969 2968 ('', 'exact', None,
2970 2969 _('apply patch to the nodes from which it was generated')),
2971 2970 ('', 'import-branch', None,
2972 2971 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2973 2972 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2974 2973 "incoming|in": (incoming,
2975 2974 [('M', 'no-merges', None, _('do not show merges')),
2976 2975 ('f', 'force', None,
2977 2976 _('run even when remote repository is unrelated')),
2978 2977 ('', 'style', '', _('display using template map file')),
2979 2978 ('n', 'newest-first', None, _('show newest record first')),
2980 2979 ('', 'bundle', '', _('file to store the bundles into')),
2981 2980 ('p', 'patch', None, _('show patch')),
2982 2981 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2983 2982 ('', 'template', '', _('display with template')),
2984 2983 ] + remoteopts,
2985 2984 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2986 2985 ' [--bundle FILENAME] [SOURCE]')),
2987 2986 "^init":
2988 2987 (init,
2989 2988 remoteopts,
2990 2989 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2991 2990 "locate":
2992 2991 (locate,
2993 2992 [('r', 'rev', '', _('search the repository as it stood at rev')),
2994 2993 ('0', 'print0', None,
2995 2994 _('end filenames with NUL, for use with xargs')),
2996 2995 ('f', 'fullpath', None,
2997 2996 _('print complete paths from the filesystem root')),
2998 2997 ] + walkopts,
2999 2998 _('hg locate [OPTION]... [PATTERN]...')),
3000 2999 "^log|history":
3001 3000 (log,
3002 3001 [('f', 'follow', None,
3003 3002 _('follow changeset history, or file history across copies and renames')),
3004 3003 ('', 'follow-first', None,
3005 3004 _('only follow the first parent of merge changesets')),
3006 3005 ('d', 'date', '', _('show revs matching date spec')),
3007 3006 ('C', 'copies', None, _('show copied files')),
3008 3007 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3009 3008 ('l', 'limit', '', _('limit number of changes displayed')),
3010 3009 ('r', 'rev', [], _('show the specified revision or range')),
3011 3010 ('', 'removed', None, _('include revs where files were removed')),
3012 3011 ('M', 'no-merges', None, _('do not show merges')),
3013 3012 ('', 'style', '', _('display using template map file')),
3014 3013 ('m', 'only-merges', None, _('show only merges')),
3015 3014 ('p', 'patch', None, _('show patch')),
3016 3015 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3017 3016 ('', 'template', '', _('display with template')),
3018 3017 ] + walkopts,
3019 3018 _('hg log [OPTION]... [FILE]')),
3020 3019 "manifest": (manifest, [], _('hg manifest [REV]')),
3021 3020 "^merge":
3022 3021 (merge,
3023 3022 [('f', 'force', None, _('force a merge with outstanding changes')),
3024 3023 ('r', 'rev', '', _('revision to merge')),
3025 3024 ],
3026 3025 _('hg merge [-f] [[-r] REV]')),
3027 3026 "outgoing|out": (outgoing,
3028 3027 [('M', 'no-merges', None, _('do not show merges')),
3029 3028 ('f', 'force', None,
3030 3029 _('run even when remote repository is unrelated')),
3031 3030 ('p', 'patch', None, _('show patch')),
3032 3031 ('', 'style', '', _('display using template map file')),
3033 3032 ('r', 'rev', [], _('a specific revision you would like to push')),
3034 3033 ('n', 'newest-first', None, _('show newest record first')),
3035 3034 ('', 'template', '', _('display with template')),
3036 3035 ] + remoteopts,
3037 3036 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3038 3037 "^parents":
3039 3038 (parents,
3040 3039 [('r', 'rev', '', _('show parents from the specified rev')),
3041 3040 ('', 'style', '', _('display using template map file')),
3042 3041 ('', 'template', '', _('display with template'))],
3043 3042 _('hg parents [-r REV] [FILE]')),
3044 3043 "paths": (paths, [], _('hg paths [NAME]')),
3045 3044 "^pull":
3046 3045 (pull,
3047 3046 [('u', 'update', None,
3048 3047 _('update to new tip if changesets were pulled')),
3049 3048 ('f', 'force', None,
3050 3049 _('run even when remote repository is unrelated')),
3051 3050 ('r', 'rev', [],
3052 3051 _('a specific revision up to which you would like to pull')),
3053 3052 ] + remoteopts,
3054 3053 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3055 3054 "^push":
3056 3055 (push,
3057 3056 [('f', 'force', None, _('force push')),
3058 3057 ('r', 'rev', [], _('a specific revision you would like to push')),
3059 3058 ] + remoteopts,
3060 3059 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3061 3060 "debugrawcommit|rawcommit":
3062 3061 (rawcommit,
3063 3062 [('p', 'parent', [], _('parent')),
3064 3063 ('d', 'date', '', _('date code')),
3065 3064 ('u', 'user', '', _('user')),
3066 3065 ('F', 'files', '', _('file list'))
3067 3066 ] + commitopts,
3068 3067 _('hg debugrawcommit [OPTION]... [FILE]...')),
3069 3068 "recover": (recover, [], _('hg recover')),
3070 3069 "^remove|rm":
3071 3070 (remove,
3072 3071 [('A', 'after', None, _('record remove that has already occurred')),
3073 3072 ('f', 'force', None, _('remove file even if modified')),
3074 3073 ] + walkopts,
3075 3074 _('hg remove [OPTION]... FILE...')),
3076 3075 "rename|mv":
3077 3076 (rename,
3078 3077 [('A', 'after', None, _('record a rename that has already occurred')),
3079 3078 ('f', 'force', None,
3080 3079 _('forcibly copy over an existing managed file')),
3081 3080 ] + walkopts + dryrunopts,
3082 3081 _('hg rename [OPTION]... SOURCE... DEST')),
3083 3082 "^revert":
3084 3083 (revert,
3085 3084 [('a', 'all', None, _('revert all changes when no arguments given')),
3086 3085 ('d', 'date', '', _('tipmost revision matching date')),
3087 3086 ('r', 'rev', '', _('revision to revert to')),
3088 3087 ('', 'no-backup', None, _('do not save backup copies of files')),
3089 3088 ] + walkopts + dryrunopts,
3090 3089 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3091 3090 "rollback": (rollback, [], _('hg rollback')),
3092 3091 "root": (root, [], _('hg root')),
3093 3092 "showconfig|debugconfig":
3094 3093 (showconfig,
3095 3094 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3096 3095 _('showconfig [-u] [NAME]...')),
3097 3096 "^serve":
3098 3097 (serve,
3099 3098 [('A', 'accesslog', '', _('name of access log file to write to')),
3100 3099 ('d', 'daemon', None, _('run server in background')),
3101 3100 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3102 3101 ('E', 'errorlog', '', _('name of error log file to write to')),
3103 3102 ('p', 'port', 0, _('port to use (default: 8000)')),
3104 3103 ('a', 'address', '', _('address to use')),
3105 3104 ('n', 'name', '',
3106 3105 _('name to show in web pages (default: working dir)')),
3107 3106 ('', 'webdir-conf', '', _('name of the webdir config file'
3108 3107 ' (serve more than one repo)')),
3109 3108 ('', 'pid-file', '', _('name of file to write process ID to')),
3110 3109 ('', 'stdio', None, _('for remote clients')),
3111 3110 ('t', 'templates', '', _('web templates to use')),
3112 3111 ('', 'style', '', _('template style to use')),
3113 3112 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3114 3113 ('', 'certificate', '', _('SSL certificate file'))],
3115 3114 _('hg serve [OPTION]...')),
3116 3115 "^status|st":
3117 3116 (status,
3118 3117 [('A', 'all', None, _('show status of all files')),
3119 3118 ('m', 'modified', None, _('show only modified files')),
3120 3119 ('a', 'added', None, _('show only added files')),
3121 3120 ('r', 'removed', None, _('show only removed files')),
3122 3121 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3123 3122 ('c', 'clean', None, _('show only files without changes')),
3124 3123 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3125 3124 ('i', 'ignored', None, _('show only ignored files')),
3126 3125 ('n', 'no-status', None, _('hide status prefix')),
3127 3126 ('C', 'copies', None, _('show source of copied files')),
3128 3127 ('0', 'print0', None,
3129 3128 _('end filenames with NUL, for use with xargs')),
3130 3129 ('', 'rev', [], _('show difference from revision')),
3131 3130 ] + walkopts,
3132 3131 _('hg status [OPTION]... [FILE]...')),
3133 3132 "tag":
3134 3133 (tag,
3135 3134 [('f', 'force', None, _('replace existing tag')),
3136 3135 ('l', 'local', None, _('make the tag local')),
3137 3136 ('m', 'message', '', _('message for tag commit log entry')),
3138 3137 ('d', 'date', '', _('record datecode as commit date')),
3139 3138 ('u', 'user', '', _('record user as commiter')),
3140 3139 ('r', 'rev', '', _('revision to tag')),
3141 3140 ('', 'remove', None, _('remove a tag'))],
3142 3141 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3143 3142 "tags": (tags, [], _('hg tags')),
3144 3143 "tip":
3145 3144 (tip,
3146 3145 [('', 'style', '', _('display using template map file')),
3147 3146 ('p', 'patch', None, _('show patch')),
3148 3147 ('', 'template', '', _('display with template'))],
3149 3148 _('hg tip [-p]')),
3150 3149 "unbundle":
3151 3150 (unbundle,
3152 3151 [('u', 'update', None,
3153 3152 _('update to new tip if changesets were unbundled'))],
3154 3153 _('hg unbundle [-u] FILE...')),
3155 3154 "^update|up|checkout|co":
3156 3155 (update,
3157 3156 [('C', 'clean', None, _('overwrite locally modified files')),
3158 3157 ('d', 'date', '', _('tipmost revision matching date')),
3159 3158 ('r', 'rev', '', _('revision'))],
3160 3159 _('hg update [-C] [-d DATE] [[-r] REV]')),
3161 3160 "verify": (verify, [], _('hg verify')),
3162 3161 "version": (version_, [], _('hg version')),
3163 3162 }
3164 3163
3165 3164 extensions.commandtable = table
3166 3165
3167 3166 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 3167 " debugindex debugindexdot debugdate debuginstall")
3169 3168 optionalrepo = ("paths serve showconfig")
3170 3169
3171 3170 def dispatch(args, argv0=None):
3172 3171 try:
3173 3172 u = ui.ui(traceback='--traceback' in args)
3174 3173 except util.Abort, inst:
3175 3174 sys.stderr.write(_("abort: %s\n") % inst)
3176 3175 return -1
3177 3176 return cmdutil.runcatch(u, args, argv0=argv0)
3178 3177
3179 3178 def run():
3180 3179 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,281 +1,281 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from repo import *
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import errno, lock, os, shutil, util, cmdutil, extensions
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 schemes = {
22 22 'bundle': bundlerepo,
23 23 'file': _local,
24 24 'http': httprepo,
25 25 'https': httprepo,
26 26 'ssh': sshrepo,
27 27 'static-http': statichttprepo,
28 28 }
29 29
30 30 def _lookup(path):
31 31 scheme = 'file'
32 32 if path:
33 33 c = path.find(':')
34 34 if c > 0:
35 35 scheme = path[:c]
36 36 thing = schemes.get(scheme) or schemes['file']
37 37 try:
38 38 return thing(path)
39 39 except TypeError:
40 40 return thing
41 41
42 42 def islocal(repo):
43 43 '''return true if repo or path is local'''
44 44 if isinstance(repo, str):
45 45 try:
46 46 return _lookup(repo).islocal(repo)
47 47 except AttributeError:
48 48 return False
49 49 return repo.local()
50 50
51 51 def repository(ui, path='', create=False):
52 52 """return a repository object for the specified path"""
53 53 repo = _lookup(path).instance(ui, path, create)
54 54 ui = getattr(repo, "ui", ui)
55 55 for hook in extensions.setuphooks:
56 56 hook(ui, repo)
57 57 return repo
58 58
59 59 def defaultdest(source):
60 60 '''return default destination of clone if none is given'''
61 61 return os.path.basename(os.path.normpath(source))
62 62
63 63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
64 64 stream=False):
65 65 """Make a copy of an existing repository.
66 66
67 67 Create a copy of an existing repository in a new directory. The
68 68 source and destination are URLs, as passed to the repository
69 69 function. Returns a pair of repository objects, the source and
70 70 newly created destination.
71 71
72 72 The location of the source is added to the new repository's
73 73 .hg/hgrc file, as the default to be used for future pulls and
74 74 pushes.
75 75
76 76 If an exception is raised, the partly cloned/updated destination
77 77 repository will be deleted.
78 78
79 79 Arguments:
80 80
81 81 source: repository object or URL
82 82
83 83 dest: URL of destination repository to create (defaults to base
84 84 name of source repository)
85 85
86 86 pull: always pull from source repository, even in local case
87 87
88 88 stream: stream raw data uncompressed from repository (fast over
89 89 LAN, slow over WAN)
90 90
91 91 rev: revision to clone up to (implies pull=True)
92 92
93 93 update: update working directory after clone completes, if
94 94 destination is local repository
95 95 """
96 96
97 97 origsource = source
98 98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
99 99
100 100 if isinstance(source, str):
101 101 src_repo = repository(ui, source)
102 102 else:
103 103 src_repo = source
104 104 source = src_repo.url()
105 105
106 106 if dest is None:
107 107 dest = defaultdest(source)
108 108 ui.status(_("destination directory: %s\n") % dest)
109 109
110 110 def localpath(path):
111 111 if path.startswith('file://'):
112 112 return path[7:]
113 113 if path.startswith('file:'):
114 114 return path[5:]
115 115 return path
116 116
117 117 dest = localpath(dest)
118 118 source = localpath(source)
119 119
120 120 if os.path.exists(dest):
121 121 raise util.Abort(_("destination '%s' already exists") % dest)
122 122
123 123 class DirCleanup(object):
124 124 def __init__(self, dir_):
125 125 self.rmtree = shutil.rmtree
126 126 self.dir_ = dir_
127 127 def close(self):
128 128 self.dir_ = None
129 129 def __del__(self):
130 130 if self.dir_:
131 131 self.rmtree(self.dir_, True)
132 132
133 133 src_lock = dest_lock = dir_cleanup = None
134 134 try:
135 135 if islocal(dest):
136 136 dir_cleanup = DirCleanup(dest)
137 137
138 138 abspath = origsource
139 139 copy = False
140 140 if src_repo.local() and islocal(dest):
141 141 abspath = os.path.abspath(origsource)
142 142 copy = not pull and not rev
143 143
144 144 if copy:
145 145 try:
146 146 # we use a lock here because if we race with commit, we
147 147 # can end up with extra data in the cloned revlogs that's
148 148 # not pointed to by changesets, thus causing verify to
149 149 # fail
150 150 src_lock = src_repo.lock()
151 151 except lock.LockException:
152 152 copy = False
153 153
154 154 if copy:
155 155 def force_copy(src, dst):
156 156 try:
157 157 util.copyfiles(src, dst)
158 158 except OSError, inst:
159 159 if inst.errno != errno.ENOENT:
160 160 raise
161 161
162 162 src_store = os.path.realpath(src_repo.spath)
163 163 if not os.path.exists(dest):
164 164 os.mkdir(dest)
165 165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 166 os.mkdir(dest_path)
167 167 if src_repo.spath != src_repo.path:
168 168 dest_store = os.path.join(dest_path, "store")
169 169 os.mkdir(dest_store)
170 170 else:
171 171 dest_store = dest_path
172 172 # copy the requires file
173 173 force_copy(src_repo.join("requires"),
174 174 os.path.join(dest_path, "requires"))
175 175 # we lock here to avoid premature writing to the target
176 176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177 177
178 178 files = ("data",
179 179 "00manifest.d", "00manifest.i",
180 180 "00changelog.d", "00changelog.i")
181 181 for f in files:
182 182 src = os.path.join(src_store, f)
183 183 dst = os.path.join(dest_store, f)
184 184 force_copy(src, dst)
185 185
186 186 # we need to re-init the repo after manually copying the data
187 187 # into it
188 188 dest_repo = repository(ui, dest)
189 189
190 190 else:
191 191 dest_repo = repository(ui, dest, create=True)
192 192
193 193 revs = None
194 194 if rev:
195 195 if 'lookup' not in src_repo.capabilities:
196 196 raise util.Abort(_("src repository does not support revision "
197 197 "lookup and so doesn't support clone by "
198 198 "revision"))
199 199 revs = [src_repo.lookup(r) for r in rev]
200 200
201 201 if dest_repo.local():
202 202 dest_repo.clone(src_repo, heads=revs, stream=stream)
203 203 elif src_repo.local():
204 204 src_repo.push(dest_repo, revs=revs)
205 205 else:
206 206 raise util.Abort(_("clone from remote to remote not supported"))
207 207
208 208 if dest_repo.local():
209 209 fp = dest_repo.opener("hgrc", "w", text=True)
210 210 fp.write("[paths]\n")
211 211 fp.write("default = %s\n" % abspath)
212 212 fp.close()
213 213
214 214 if update:
215 215 try:
216 216 checkout = dest_repo.lookup("default")
217 217 except:
218 218 checkout = dest_repo.changelog.tip()
219 219 _update(dest_repo, checkout)
220 220 if dir_cleanup:
221 221 dir_cleanup.close()
222 222
223 223 return src_repo, dest_repo
224 224 finally:
225 225 del src_lock, dest_lock, dir_cleanup
226 226
227 227 def _showstats(repo, stats):
228 228 stats = ((stats[0], _("updated")),
229 229 (stats[1], _("merged")),
230 230 (stats[2], _("removed")),
231 231 (stats[3], _("unresolved")))
232 232 note = ", ".join([_("%d files %s") % s for s in stats])
233 233 repo.ui.status("%s\n" % note)
234 234
235 235 def _update(repo, node): return update(repo, node)
236 236
237 237 def update(repo, node):
238 238 """update the working directory to node, merging linear changes"""
239 239 pl = repo.parents()
240 stats = _merge.update(repo, node, False, False, None, None)
240 stats = _merge.update(repo, node, False, False, None)
241 241 _showstats(repo, stats)
242 242 if stats[3]:
243 243 repo.ui.status(_("There are unresolved merges with"
244 244 " locally modified files.\n"))
245 245 if stats[1]:
246 246 repo.ui.status(_("You can finish the partial merge using:\n"))
247 247 else:
248 248 repo.ui.status(_("You can redo the full merge using:\n"))
249 249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
250 250 repo.ui.status(_(" hg update %s\n hg update %s\n")
251 251 % (pl[0].rev(), repo.changectx(node).rev()))
252 252 return stats[3]
253 253
254 def clean(repo, node, wlock=None, show_stats=True):
254 def clean(repo, node, show_stats=True):
255 255 """forcibly switch the working directory to node, clobbering changes"""
256 stats = _merge.update(repo, node, False, True, None, wlock)
256 stats = _merge.update(repo, node, False, True, None)
257 257 if show_stats: _showstats(repo, stats)
258 258 return stats[3]
259 259
260 def merge(repo, node, force=None, remind=True, wlock=None):
260 def merge(repo, node, force=None, remind=True):
261 261 """branch merge with node, resolving changes"""
262 stats = _merge.update(repo, node, True, force, False, wlock)
262 stats = _merge.update(repo, node, True, force, False)
263 263 _showstats(repo, stats)
264 264 if stats[3]:
265 265 pl = repo.parents()
266 266 repo.ui.status(_("There are unresolved merges,"
267 267 " you can redo the full merge using:\n"
268 268 " hg update -C %s\n"
269 269 " hg merge %s\n")
270 270 % (pl[0].rev(), pl[1].rev()))
271 271 elif remind:
272 272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
273 273 return stats[3]
274 274
275 def revert(repo, node, choose, wlock):
275 def revert(repo, node, choose):
276 276 """revert changes to revision in node without updating dirstate"""
277 return _merge.update(repo, node, False, True, choose, wlock)[3]
277 return _merge.update(repo, node, False, True, choose)[3]
278 278
279 279 def verify(repo):
280 280 """verify the consistency of a repository"""
281 281 return _verify.verify(repo)
@@ -1,1978 +1,1981 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __init__(self, parentui, path=None, create=0):
20 20 repo.repository.__init__(self)
21 21 self.path = path
22 22 self.root = os.path.realpath(path)
23 23 self.path = os.path.join(self.root, ".hg")
24 24 self.origroot = path
25 25 self.opener = util.opener(self.path)
26 26 self.wopener = util.opener(self.root)
27 27
28 28 if not os.path.isdir(self.path):
29 29 if create:
30 30 if not os.path.exists(path):
31 31 os.mkdir(path)
32 32 os.mkdir(self.path)
33 33 requirements = ["revlogv1"]
34 34 if parentui.configbool('format', 'usestore', True):
35 35 os.mkdir(os.path.join(self.path, "store"))
36 36 requirements.append("store")
37 37 # create an invalid changelog
38 38 self.opener("00changelog.i", "a").write(
39 39 '\0\0\0\2' # represents revlogv2
40 40 ' dummy changelog to prevent using the old repo layout'
41 41 )
42 42 reqfile = self.opener("requires", "w")
43 43 for r in requirements:
44 44 reqfile.write("%s\n" % r)
45 45 reqfile.close()
46 46 else:
47 47 raise repo.RepoError(_("repository %s not found") % path)
48 48 elif create:
49 49 raise repo.RepoError(_("repository %s already exists") % path)
50 50 else:
51 51 # find requirements
52 52 try:
53 53 requirements = self.opener("requires").read().splitlines()
54 54 except IOError, inst:
55 55 if inst.errno != errno.ENOENT:
56 56 raise
57 57 requirements = []
58 58 # check them
59 59 for r in requirements:
60 60 if r not in self.supported:
61 61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62 62
63 63 # setup store
64 64 if "store" in requirements:
65 65 self.encodefn = util.encodefilename
66 66 self.decodefn = util.decodefilename
67 67 self.spath = os.path.join(self.path, "store")
68 68 else:
69 69 self.encodefn = lambda x: x
70 70 self.decodefn = lambda x: x
71 71 self.spath = self.path
72 72 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
73 73
74 74 self.ui = ui.ui(parentui=parentui)
75 75 try:
76 76 self.ui.readconfig(self.join("hgrc"), self.root)
77 77 extensions.loadall(self.ui)
78 78 except IOError:
79 79 pass
80 80
81 81 self.tagscache = None
82 82 self.branchcache = None
83 83 self.nodetagscache = None
84 84 self.filterpats = {}
85 85 self._transref = self._lockref = self._wlockref = None
86 86
87 87 def __getattr__(self, name):
88 88 if name == 'changelog':
89 89 self.changelog = changelog.changelog(self.sopener)
90 90 self.sopener.defversion = self.changelog.version
91 91 return self.changelog
92 92 if name == 'manifest':
93 93 self.changelog
94 94 self.manifest = manifest.manifest(self.sopener)
95 95 return self.manifest
96 96 if name == 'dirstate':
97 97 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 98 return self.dirstate
99 99 else:
100 100 raise AttributeError, name
101 101
102 102 def url(self):
103 103 return 'file:' + self.root
104 104
105 105 def hook(self, name, throw=False, **args):
106 106 return hook.hook(self.ui, self, name, throw, **args)
107 107
108 108 tag_disallowed = ':\r\n'
109 109
110 110 def _tag(self, name, node, message, local, user, date, parent=None,
111 111 extra={}):
112 112 use_dirstate = parent is None
113 113
114 114 for c in self.tag_disallowed:
115 115 if c in name:
116 116 raise util.Abort(_('%r cannot be used in a tag name') % c)
117 117
118 118 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119 119
120 120 def writetag(fp, name, munge, prevtags):
121 121 if prevtags and prevtags[-1] != '\n':
122 122 fp.write('\n')
123 123 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 124 fp.close()
125 125 self.hook('tag', node=hex(node), tag=name, local=local)
126 126
127 127 prevtags = ''
128 128 if local:
129 129 try:
130 130 fp = self.opener('localtags', 'r+')
131 131 except IOError, err:
132 132 fp = self.opener('localtags', 'a')
133 133 else:
134 134 prevtags = fp.read()
135 135
136 136 # local tags are stored in the current charset
137 137 writetag(fp, name, None, prevtags)
138 138 return
139 139
140 140 if use_dirstate:
141 141 try:
142 142 fp = self.wfile('.hgtags', 'rb+')
143 143 except IOError, err:
144 144 fp = self.wfile('.hgtags', 'ab')
145 145 else:
146 146 prevtags = fp.read()
147 147 else:
148 148 try:
149 149 prevtags = self.filectx('.hgtags', parent).data()
150 150 except revlog.LookupError:
151 151 pass
152 152 fp = self.wfile('.hgtags', 'wb')
153 153
154 154 # committed tags are stored in UTF-8
155 155 writetag(fp, name, util.fromlocal, prevtags)
156 156
157 157 if use_dirstate and '.hgtags' not in self.dirstate:
158 158 self.add(['.hgtags'])
159 159
160 160 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
161 161 extra=extra)
162 162
163 163 self.hook('tag', node=hex(node), tag=name, local=local)
164 164
165 165 return tagnode
166 166
167 167 def tag(self, name, node, message, local, user, date):
168 168 '''tag a revision with a symbolic name.
169 169
170 170 if local is True, the tag is stored in a per-repository file.
171 171 otherwise, it is stored in the .hgtags file, and a new
172 172 changeset is committed with the change.
173 173
174 174 keyword arguments:
175 175
176 176 local: whether to store tag in non-version-controlled file
177 177 (default False)
178 178
179 179 message: commit message to use if committing
180 180
181 181 user: name of user to use if committing
182 182
183 183 date: date tuple to use if committing'''
184 184
185 185 for x in self.status()[:5]:
186 186 if '.hgtags' in x:
187 187 raise util.Abort(_('working copy of .hgtags is changed '
188 188 '(please commit .hgtags manually)'))
189 189
190 190
191 191 self._tag(name, node, message, local, user, date)
192 192
193 193 def tags(self):
194 194 '''return a mapping of tag to node'''
195 195 if self.tagscache:
196 196 return self.tagscache
197 197
198 198 globaltags = {}
199 199
200 200 def readtags(lines, fn):
201 201 filetags = {}
202 202 count = 0
203 203
204 204 def warn(msg):
205 205 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
206 206
207 207 for l in lines:
208 208 count += 1
209 209 if not l:
210 210 continue
211 211 s = l.split(" ", 1)
212 212 if len(s) != 2:
213 213 warn(_("cannot parse entry"))
214 214 continue
215 215 node, key = s
216 216 key = util.tolocal(key.strip()) # stored in UTF-8
217 217 try:
218 218 bin_n = bin(node)
219 219 except TypeError:
220 220 warn(_("node '%s' is not well formed") % node)
221 221 continue
222 222 if bin_n not in self.changelog.nodemap:
223 223 warn(_("tag '%s' refers to unknown node") % key)
224 224 continue
225 225
226 226 h = []
227 227 if key in filetags:
228 228 n, h = filetags[key]
229 229 h.append(n)
230 230 filetags[key] = (bin_n, h)
231 231
232 232 for k, nh in filetags.items():
233 233 if k not in globaltags:
234 234 globaltags[k] = nh
235 235 continue
236 236 # we prefer the global tag if:
237 237 # it supercedes us OR
238 238 # mutual supercedes and it has a higher rank
239 239 # otherwise we win because we're tip-most
240 240 an, ah = nh
241 241 bn, bh = globaltags[k]
242 242 if (bn != an and an in bh and
243 243 (bn not in ah or len(bh) > len(ah))):
244 244 an = bn
245 245 ah.extend([n for n in bh if n not in ah])
246 246 globaltags[k] = an, ah
247 247
248 248 # read the tags file from each head, ending with the tip
249 249 f = None
250 250 for rev, node, fnode in self._hgtagsnodes():
251 251 f = (f and f.filectx(fnode) or
252 252 self.filectx('.hgtags', fileid=fnode))
253 253 readtags(f.data().splitlines(), f)
254 254
255 255 try:
256 256 data = util.fromlocal(self.opener("localtags").read())
257 257 # localtags are stored in the local character set
258 258 # while the internal tag table is stored in UTF-8
259 259 readtags(data.splitlines(), "localtags")
260 260 except IOError:
261 261 pass
262 262
263 263 self.tagscache = {}
264 264 for k,nh in globaltags.items():
265 265 n = nh[0]
266 266 if n != nullid:
267 267 self.tagscache[k] = n
268 268 self.tagscache['tip'] = self.changelog.tip()
269 269
270 270 return self.tagscache
271 271
272 272 def _hgtagsnodes(self):
273 273 heads = self.heads()
274 274 heads.reverse()
275 275 last = {}
276 276 ret = []
277 277 for node in heads:
278 278 c = self.changectx(node)
279 279 rev = c.rev()
280 280 try:
281 281 fnode = c.filenode('.hgtags')
282 282 except revlog.LookupError:
283 283 continue
284 284 ret.append((rev, node, fnode))
285 285 if fnode in last:
286 286 ret[last[fnode]] = None
287 287 last[fnode] = len(ret) - 1
288 288 return [item for item in ret if item]
289 289
290 290 def tagslist(self):
291 291 '''return a list of tags ordered by revision'''
292 292 l = []
293 293 for t, n in self.tags().items():
294 294 try:
295 295 r = self.changelog.rev(n)
296 296 except:
297 297 r = -2 # sort to the beginning of the list if unknown
298 298 l.append((r, t, n))
299 299 l.sort()
300 300 return [(t, n) for r, t, n in l]
301 301
302 302 def nodetags(self, node):
303 303 '''return the tags associated with a node'''
304 304 if not self.nodetagscache:
305 305 self.nodetagscache = {}
306 306 for t, n in self.tags().items():
307 307 self.nodetagscache.setdefault(n, []).append(t)
308 308 return self.nodetagscache.get(node, [])
309 309
310 310 def _branchtags(self):
311 311 partial, last, lrev = self._readbranchcache()
312 312
313 313 tiprev = self.changelog.count() - 1
314 314 if lrev != tiprev:
315 315 self._updatebranchcache(partial, lrev+1, tiprev+1)
316 316 self._writebranchcache(partial, self.changelog.tip(), tiprev)
317 317
318 318 return partial
319 319
320 320 def branchtags(self):
321 321 if self.branchcache is not None:
322 322 return self.branchcache
323 323
324 324 self.branchcache = {} # avoid recursion in changectx
325 325 partial = self._branchtags()
326 326
327 327 # the branch cache is stored on disk as UTF-8, but in the local
328 328 # charset internally
329 329 for k, v in partial.items():
330 330 self.branchcache[util.tolocal(k)] = v
331 331 return self.branchcache
332 332
333 333 def _readbranchcache(self):
334 334 partial = {}
335 335 try:
336 336 f = self.opener("branch.cache")
337 337 lines = f.read().split('\n')
338 338 f.close()
339 339 except (IOError, OSError):
340 340 return {}, nullid, nullrev
341 341
342 342 try:
343 343 last, lrev = lines.pop(0).split(" ", 1)
344 344 last, lrev = bin(last), int(lrev)
345 345 if not (lrev < self.changelog.count() and
346 346 self.changelog.node(lrev) == last): # sanity check
347 347 # invalidate the cache
348 348 raise ValueError('Invalid branch cache: unknown tip')
349 349 for l in lines:
350 350 if not l: continue
351 351 node, label = l.split(" ", 1)
352 352 partial[label.strip()] = bin(node)
353 353 except (KeyboardInterrupt, util.SignalInterrupt):
354 354 raise
355 355 except Exception, inst:
356 356 if self.ui.debugflag:
357 357 self.ui.warn(str(inst), '\n')
358 358 partial, last, lrev = {}, nullid, nullrev
359 359 return partial, last, lrev
360 360
361 361 def _writebranchcache(self, branches, tip, tiprev):
362 362 try:
363 363 f = self.opener("branch.cache", "w", atomictemp=True)
364 364 f.write("%s %s\n" % (hex(tip), tiprev))
365 365 for label, node in branches.iteritems():
366 366 f.write("%s %s\n" % (hex(node), label))
367 367 f.rename()
368 368 except (IOError, OSError):
369 369 pass
370 370
371 371 def _updatebranchcache(self, partial, start, end):
372 372 for r in xrange(start, end):
373 373 c = self.changectx(r)
374 374 b = c.branch()
375 375 partial[b] = c.node()
376 376
377 377 def lookup(self, key):
378 378 if key == '.':
379 379 key, second = self.dirstate.parents()
380 380 if key == nullid:
381 381 raise repo.RepoError(_("no revision checked out"))
382 382 if second != nullid:
383 383 self.ui.warn(_("warning: working directory has two parents, "
384 384 "tag '.' uses the first\n"))
385 385 elif key == 'null':
386 386 return nullid
387 387 n = self.changelog._match(key)
388 388 if n:
389 389 return n
390 390 if key in self.tags():
391 391 return self.tags()[key]
392 392 if key in self.branchtags():
393 393 return self.branchtags()[key]
394 394 n = self.changelog._partialmatch(key)
395 395 if n:
396 396 return n
397 397 try:
398 398 if len(key) == 20:
399 399 key = hex(key)
400 400 except:
401 401 pass
402 402 raise repo.RepoError(_("unknown revision '%s'") % key)
403 403
404 404 def dev(self):
405 405 return os.lstat(self.path).st_dev
406 406
407 407 def local(self):
408 408 return True
409 409
410 410 def join(self, f):
411 411 return os.path.join(self.path, f)
412 412
413 413 def sjoin(self, f):
414 414 f = self.encodefn(f)
415 415 return os.path.join(self.spath, f)
416 416
417 417 def wjoin(self, f):
418 418 return os.path.join(self.root, f)
419 419
420 420 def file(self, f):
421 421 if f[0] == '/':
422 422 f = f[1:]
423 423 return filelog.filelog(self.sopener, f)
424 424
425 425 def changectx(self, changeid=None):
426 426 return context.changectx(self, changeid)
427 427
428 428 def workingctx(self):
429 429 return context.workingctx(self)
430 430
431 431 def parents(self, changeid=None):
432 432 '''
433 433 get list of changectxs for parents of changeid or working directory
434 434 '''
435 435 if changeid is None:
436 436 pl = self.dirstate.parents()
437 437 else:
438 438 n = self.changelog.lookup(changeid)
439 439 pl = self.changelog.parents(n)
440 440 if pl[1] == nullid:
441 441 return [self.changectx(pl[0])]
442 442 return [self.changectx(pl[0]), self.changectx(pl[1])]
443 443
444 444 def filectx(self, path, changeid=None, fileid=None):
445 445 """changeid can be a changeset revision, node, or tag.
446 446 fileid can be a file revision or node."""
447 447 return context.filectx(self, path, changeid, fileid)
448 448
449 449 def getcwd(self):
450 450 return self.dirstate.getcwd()
451 451
452 452 def pathto(self, f, cwd=None):
453 453 return self.dirstate.pathto(f, cwd)
454 454
455 455 def wfile(self, f, mode='r'):
456 456 return self.wopener(f, mode)
457 457
458 458 def _link(self, f):
459 459 return os.path.islink(self.wjoin(f))
460 460
461 461 def _filter(self, filter, filename, data):
462 462 if filter not in self.filterpats:
463 463 l = []
464 464 for pat, cmd in self.ui.configitems(filter):
465 465 mf = util.matcher(self.root, "", [pat], [], [])[1]
466 466 l.append((mf, cmd))
467 467 self.filterpats[filter] = l
468 468
469 469 for mf, cmd in self.filterpats[filter]:
470 470 if mf(filename):
471 471 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
472 472 data = util.filter(data, cmd)
473 473 break
474 474
475 475 return data
476 476
477 477 def wread(self, filename):
478 478 if self._link(filename):
479 479 data = os.readlink(self.wjoin(filename))
480 480 else:
481 481 data = self.wopener(filename, 'r').read()
482 482 return self._filter("encode", filename, data)
483 483
484 484 def wwrite(self, filename, data, flags):
485 485 data = self._filter("decode", filename, data)
486 486 if "l" in flags:
487 487 self.wopener.symlink(data, filename)
488 488 else:
489 489 try:
490 490 if self._link(filename):
491 491 os.unlink(self.wjoin(filename))
492 492 except OSError:
493 493 pass
494 494 self.wopener(filename, 'w').write(data)
495 495 util.set_exec(self.wjoin(filename), "x" in flags)
496 496
497 497 def wwritedata(self, filename, data):
498 498 return self._filter("decode", filename, data)
499 499
500 500 def transaction(self):
501 501 if self._transref and self._transref():
502 502 return self._transref().nest()
503 503
504 504 # save dirstate for rollback
505 505 try:
506 506 ds = self.opener("dirstate").read()
507 507 except IOError:
508 508 ds = ""
509 509 self.opener("journal.dirstate", "w").write(ds)
510 510
511 511 renames = [(self.sjoin("journal"), self.sjoin("undo")),
512 512 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
513 513 tr = transaction.transaction(self.ui.warn, self.sopener,
514 514 self.sjoin("journal"),
515 515 aftertrans(renames))
516 516 self._transref = weakref.ref(tr)
517 517 return tr
518 518
519 519 def recover(self):
520 520 l = self.lock()
521 521 try:
522 522 if os.path.exists(self.sjoin("journal")):
523 523 self.ui.status(_("rolling back interrupted transaction\n"))
524 524 transaction.rollback(self.sopener, self.sjoin("journal"))
525 525 self.invalidate()
526 526 return True
527 527 else:
528 528 self.ui.warn(_("no interrupted transaction available\n"))
529 529 return False
530 530 finally:
531 531 del l
532 532
533 def rollback(self, wlock=None, lock=None):
533 def rollback(self):
534 wlock = lock = None
534 535 try:
535 if not wlock:
536 wlock = self.wlock()
537 if not lock:
538 lock = self.lock()
536 wlock = self.wlock()
537 lock = self.lock()
539 538 if os.path.exists(self.sjoin("undo")):
540 539 self.ui.status(_("rolling back last transaction\n"))
541 540 transaction.rollback(self.sopener, self.sjoin("undo"))
542 541 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
543 542 self.invalidate()
544 543 self.dirstate.invalidate()
545 544 else:
546 545 self.ui.warn(_("no rollback information available\n"))
547 546 finally:
548 547 del wlock, lock
549 548
550 549 def invalidate(self):
551 550 for a in "changelog manifest".split():
552 551 if hasattr(self, a):
553 552 self.__delattr__(a)
554 553 self.tagscache = None
555 554 self.nodetagscache = None
556 555
557 556 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
558 557 try:
559 558 l = lock.lock(lockname, 0, releasefn, desc=desc)
560 559 except lock.LockHeld, inst:
561 560 if not wait:
562 561 raise
563 562 self.ui.warn(_("waiting for lock on %s held by %r\n") %
564 563 (desc, inst.locker))
565 564 # default to 600 seconds timeout
566 565 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
567 566 releasefn, desc=desc)
568 567 if acquirefn:
569 568 acquirefn()
570 569 return l
571 570
572 571 def lock(self, wait=True):
573 return self._lock(self.sjoin("lock"), wait, None, self.invalidate,
574 _('repository %s') % self.origroot)
572 if self._lockref and self._lockref():
573 return self._lockref()
574
575 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
576 _('repository %s') % self.origroot)
577 self._lockref = weakref.ref(l)
578 return l
575 579
576 580 def wlock(self, wait=True):
577 return self._lock(self.join("wlock"), wait, self.dirstate.write,
578 self.dirstate.invalidate,
579 _('working directory of %s') % self.origroot)
581 if self._wlockref and self._wlockref():
582 return self._wlockref()
583
584 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
585 self.dirstate.invalidate, _('working directory of %s') %
586 self.origroot)
587 self._wlockref = weakref.ref(l)
588 return l
580 589
581 590 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
582 591 """
583 592 commit an individual file as part of a larger transaction
584 593 """
585 594
586 595 t = self.wread(fn)
587 596 fl = self.file(fn)
588 597 fp1 = manifest1.get(fn, nullid)
589 598 fp2 = manifest2.get(fn, nullid)
590 599
591 600 meta = {}
592 601 cp = self.dirstate.copied(fn)
593 602 if cp:
594 603 # Mark the new revision of this file as a copy of another
595 604 # file. This copy data will effectively act as a parent
596 605 # of this new revision. If this is a merge, the first
597 606 # parent will be the nullid (meaning "look up the copy data")
598 607 # and the second one will be the other parent. For example:
599 608 #
600 609 # 0 --- 1 --- 3 rev1 changes file foo
601 610 # \ / rev2 renames foo to bar and changes it
602 611 # \- 2 -/ rev3 should have bar with all changes and
603 612 # should record that bar descends from
604 613 # bar in rev2 and foo in rev1
605 614 #
606 615 # this allows this merge to succeed:
607 616 #
608 617 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
609 618 # \ / merging rev3 and rev4 should use bar@rev2
610 619 # \- 2 --- 4 as the merge base
611 620 #
612 621 meta["copy"] = cp
613 622 if not manifest2: # not a branch merge
614 623 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 624 fp2 = nullid
616 625 elif fp2 != nullid: # copied on remote side
617 626 meta["copyrev"] = hex(manifest1.get(cp, nullid))
618 627 elif fp1 != nullid: # copied on local side, reversed
619 628 meta["copyrev"] = hex(manifest2.get(cp))
620 629 fp2 = fp1
621 630 else: # directory rename
622 631 meta["copyrev"] = hex(manifest1.get(cp, nullid))
623 632 self.ui.debug(_(" %s: copy %s:%s\n") %
624 633 (fn, cp, meta["copyrev"]))
625 634 fp1 = nullid
626 635 elif fp2 != nullid:
627 636 # is one parent an ancestor of the other?
628 637 fpa = fl.ancestor(fp1, fp2)
629 638 if fpa == fp1:
630 639 fp1, fp2 = fp2, nullid
631 640 elif fpa == fp2:
632 641 fp2 = nullid
633 642
634 643 # is the file unmodified from the parent? report existing entry
635 644 if fp2 == nullid and not fl.cmp(fp1, t):
636 645 return fp1
637 646
638 647 changelist.append(fn)
639 648 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
640 649
641 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
650 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
642 651 if p1 is None:
643 652 p1, p2 = self.dirstate.parents()
644 653 return self.commit(files=files, text=text, user=user, date=date,
645 p1=p1, p2=p2, wlock=wlock, extra=extra)
654 p1=p1, p2=p2, extra=extra)
646 655
647 656 def commit(self, files=None, text="", user=None, date=None,
648 match=util.always, force=False, lock=None, wlock=None,
649 force_editor=False, p1=None, p2=None, extra={}):
650 tr = None
657 match=util.always, force=False, force_editor=False,
658 p1=None, p2=None, extra={}):
659 wlock = lock = tr = None
651 660 try:
652 661 commit = []
653 662 remove = []
654 663 changed = []
655 664 use_dirstate = (p1 is None) # not rawcommit
656 665 extra = extra.copy()
657 666
658 667 if use_dirstate:
659 668 if files:
660 669 for f in files:
661 670 s = self.dirstate[f]
662 671 if s in 'nma':
663 672 commit.append(f)
664 673 elif s == 'r':
665 674 remove.append(f)
666 675 else:
667 676 self.ui.warn(_("%s not tracked!\n") % f)
668 677 else:
669 678 changes = self.status(match=match)[:5]
670 679 modified, added, removed, deleted, unknown = changes
671 680 commit = modified + added
672 681 remove = removed
673 682 else:
674 683 commit = files
675 684
676 685 if use_dirstate:
677 686 p1, p2 = self.dirstate.parents()
678 687 update_dirstate = True
679 688 else:
680 689 p1, p2 = p1, p2 or nullid
681 690 update_dirstate = (self.dirstate.parents()[0] == p1)
682 691
683 692 c1 = self.changelog.read(p1)
684 693 c2 = self.changelog.read(p2)
685 694 m1 = self.manifest.read(c1[0]).copy()
686 695 m2 = self.manifest.read(c2[0])
687 696
688 697 if use_dirstate:
689 698 branchname = self.workingctx().branch()
690 699 try:
691 700 branchname = branchname.decode('UTF-8').encode('UTF-8')
692 701 except UnicodeDecodeError:
693 702 raise util.Abort(_('branch name not in UTF-8!'))
694 703 else:
695 704 branchname = ""
696 705
697 706 if use_dirstate:
698 707 oldname = c1[5].get("branch") # stored in UTF-8
699 708 if (not commit and not remove and not force and p2 == nullid
700 709 and branchname == oldname):
701 710 self.ui.status(_("nothing changed\n"))
702 711 return None
703 712
704 713 xp1 = hex(p1)
705 714 if p2 == nullid: xp2 = ''
706 715 else: xp2 = hex(p2)
707 716
708 717 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
709 718
710 if not wlock:
711 wlock = self.wlock()
712 if not lock:
713 lock = self.lock()
719 wlock = self.wlock()
720 lock = self.lock()
714 721 tr = self.transaction()
715 722
716 723 # check in files
717 724 new = {}
718 725 linkrev = self.changelog.count()
719 726 commit.sort()
720 727 is_exec = util.execfunc(self.root, m1.execf)
721 728 is_link = util.linkfunc(self.root, m1.linkf)
722 729 for f in commit:
723 730 self.ui.note(f + "\n")
724 731 try:
725 732 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
726 733 new_exec = is_exec(f)
727 734 new_link = is_link(f)
728 735 if not changed or changed[-1] != f:
729 736 # mention the file in the changelog if some
730 737 # flag changed, even if there was no content
731 738 # change.
732 739 old_exec = m1.execf(f)
733 740 old_link = m1.linkf(f)
734 741 if old_exec != new_exec or old_link != new_link:
735 742 changed.append(f)
736 743 m1.set(f, new_exec, new_link)
737 744 except (OSError, IOError):
738 745 if use_dirstate:
739 746 self.ui.warn(_("trouble committing %s!\n") % f)
740 747 raise
741 748 else:
742 749 remove.append(f)
743 750
744 751 # update manifest
745 752 m1.update(new)
746 753 remove.sort()
747 754 removed = []
748 755
749 756 for f in remove:
750 757 if f in m1:
751 758 del m1[f]
752 759 removed.append(f)
753 760 elif f in m2:
754 761 removed.append(f)
755 762 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
756 763 (new, removed))
757 764
758 765 # add changeset
759 766 new = new.keys()
760 767 new.sort()
761 768
762 769 user = user or self.ui.username()
763 770 if not text or force_editor:
764 771 edittext = []
765 772 if text:
766 773 edittext.append(text)
767 774 edittext.append("")
768 775 edittext.append("HG: user: %s" % user)
769 776 if p2 != nullid:
770 777 edittext.append("HG: branch merge")
771 778 if branchname:
772 779 edittext.append("HG: branch %s" % util.tolocal(branchname))
773 780 edittext.extend(["HG: changed %s" % f for f in changed])
774 781 edittext.extend(["HG: removed %s" % f for f in removed])
775 782 if not changed and not remove:
776 783 edittext.append("HG: no files changed")
777 784 edittext.append("")
778 785 # run editor in the repository root
779 786 olddir = os.getcwd()
780 787 os.chdir(self.root)
781 788 text = self.ui.edit("\n".join(edittext), user)
782 789 os.chdir(olddir)
783 790
784 791 lines = [line.rstrip() for line in text.rstrip().splitlines()]
785 792 while lines and not lines[0]:
786 793 del lines[0]
787 794 if not lines:
788 795 return None
789 796 text = '\n'.join(lines)
790 797 if branchname:
791 798 extra["branch"] = branchname
792 799 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
793 800 user, date, extra)
794 801 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
795 802 parent2=xp2)
796 803 tr.close()
797 804
798 805 if self.branchcache and "branch" in extra:
799 806 self.branchcache[util.tolocal(extra["branch"])] = n
800 807
801 808 if use_dirstate or update_dirstate:
802 809 self.dirstate.setparents(n)
803 810 if use_dirstate:
804 811 for f in new:
805 812 self.dirstate.normal(f)
806 813 for f in removed:
807 814 self.dirstate.forget(f)
808 815
809 816 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
810 817 return n
811 818 finally:
812 819 del lock, wlock, tr
813 820
814 821 def walk(self, node=None, files=[], match=util.always, badmatch=None):
815 822 '''
816 823 walk recursively through the directory tree or a given
817 824 changeset, finding all files matched by the match
818 825 function
819 826
820 827 results are yielded in a tuple (src, filename), where src
821 828 is one of:
822 829 'f' the file was found in the directory tree
823 830 'm' the file was only in the dirstate and not in the tree
824 831 'b' file was not found and matched badmatch
825 832 '''
826 833
827 834 if node:
828 835 fdict = dict.fromkeys(files)
829 836 # for dirstate.walk, files=['.'] means "walk the whole tree".
830 837 # follow that here, too
831 838 fdict.pop('.', None)
832 839 mdict = self.manifest.read(self.changelog.read(node)[0])
833 840 mfiles = mdict.keys()
834 841 mfiles.sort()
835 842 for fn in mfiles:
836 843 for ffn in fdict:
837 844 # match if the file is the exact name or a directory
838 845 if ffn == fn or fn.startswith("%s/" % ffn):
839 846 del fdict[ffn]
840 847 break
841 848 if match(fn):
842 849 yield 'm', fn
843 850 ffiles = fdict.keys()
844 851 ffiles.sort()
845 852 for fn in ffiles:
846 853 if badmatch and badmatch(fn):
847 854 if match(fn):
848 855 yield 'b', fn
849 856 else:
850 857 self.ui.warn(_('%s: No such file in rev %s\n')
851 858 % (self.pathto(fn), short(node)))
852 859 else:
853 860 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
854 861 yield src, fn
855 862
856 863 def status(self, node1=None, node2=None, files=[], match=util.always,
857 wlock=None, list_ignored=False, list_clean=False):
864 list_ignored=False, list_clean=False):
858 865 """return status of files between two nodes or node and working directory
859 866
860 867 If node1 is None, use the first dirstate parent instead.
861 868 If node2 is None, compare node1 with working directory.
862 869 """
863 870
864 871 def fcmp(fn, getnode):
865 872 t1 = self.wread(fn)
866 873 return self.file(fn).cmp(getnode(fn), t1)
867 874
868 875 def mfmatches(node):
869 876 change = self.changelog.read(node)
870 877 mf = self.manifest.read(change[0]).copy()
871 878 for fn in mf.keys():
872 879 if not match(fn):
873 880 del mf[fn]
874 881 return mf
875 882
876 883 modified, added, removed, deleted, unknown = [], [], [], [], []
877 884 ignored, clean = [], []
878 885
879 886 compareworking = False
880 887 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
881 888 compareworking = True
882 889
883 890 if not compareworking:
884 891 # read the manifest from node1 before the manifest from node2,
885 892 # so that we'll hit the manifest cache if we're going through
886 893 # all the revisions in parent->child order.
887 894 mf1 = mfmatches(node1)
888 895
889 896 # are we comparing the working directory?
890 897 if not node2:
891 898 (lookup, modified, added, removed, deleted, unknown,
892 899 ignored, clean) = self.dirstate.status(files, match,
893 900 list_ignored, list_clean)
894 901
895 902 # are we comparing working dir against its parent?
896 903 if compareworking:
897 904 if lookup:
898 905 fixup = []
899 906 # do a full compare of any files that might have changed
900 907 ctx = self.changectx()
901 908 for f in lookup:
902 909 if f not in ctx or ctx[f].cmp(self.wread(f)):
903 910 modified.append(f)
904 911 else:
905 912 fixup.append(f)
906 913 if list_clean:
907 914 clean.append(f)
908 915
909 916 # update dirstate for files that are actually clean
910 917 if fixup:
911 fixlock = wlock
918 wlock = None
912 919 try:
913 if not fixlock:
914 try:
915 fixlock = self.wlock(False)
916 except lock.LockException:
917 pass
918 if fixlock:
920 try:
921 wlock = self.wlock(False)
922 except lock.LockException:
923 pass
924 if wlock:
919 925 for f in fixup:
920 926 self.dirstate.normal(f)
921 927 finally:
922 del fixlock
928 del wlock
923 929 else:
924 930 # we are comparing working dir against non-parent
925 931 # generate a pseudo-manifest for the working dir
926 932 # XXX: create it in dirstate.py ?
927 933 mf2 = mfmatches(self.dirstate.parents()[0])
928 934 is_exec = util.execfunc(self.root, mf2.execf)
929 935 is_link = util.linkfunc(self.root, mf2.linkf)
930 936 for f in lookup + modified + added:
931 937 mf2[f] = ""
932 938 mf2.set(f, is_exec(f), is_link(f))
933 939 for f in removed:
934 940 if f in mf2:
935 941 del mf2[f]
936 942
937 943 else:
938 944 # we are comparing two revisions
939 945 mf2 = mfmatches(node2)
940 946
941 947 if not compareworking:
942 948 # flush lists from dirstate before comparing manifests
943 949 modified, added, clean = [], [], []
944 950
945 951 # make sure to sort the files so we talk to the disk in a
946 952 # reasonable order
947 953 mf2keys = mf2.keys()
948 954 mf2keys.sort()
949 955 getnode = lambda fn: mf1.get(fn, nullid)
950 956 for fn in mf2keys:
951 957 if mf1.has_key(fn):
952 958 if (mf1.flags(fn) != mf2.flags(fn) or
953 959 (mf1[fn] != mf2[fn] and
954 960 (mf2[fn] != "" or fcmp(fn, getnode)))):
955 961 modified.append(fn)
956 962 elif list_clean:
957 963 clean.append(fn)
958 964 del mf1[fn]
959 965 else:
960 966 added.append(fn)
961 967
962 968 removed = mf1.keys()
963 969
964 970 # sort and return results:
965 971 for l in modified, added, removed, deleted, unknown, ignored, clean:
966 972 l.sort()
967 973 return (modified, added, removed, deleted, unknown, ignored, clean)
968 974
969 def add(self, list, wlock=None):
975 def add(self, list):
976 wlock = self.wlock()
970 977 try:
971 if not wlock:
972 wlock = self.wlock()
973 978 for f in list:
974 979 p = self.wjoin(f)
975 980 try:
976 981 st = os.lstat(p)
977 982 except:
978 983 self.ui.warn(_("%s does not exist!\n") % f)
979 984 continue
980 985 if st.st_size > 10000000:
981 986 self.ui.warn(_("%s: files over 10MB may cause memory and"
982 987 " performance problems\n"
983 988 "(use 'hg revert %s' to unadd the file)\n")
984 989 % (f, f))
985 990 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
986 991 self.ui.warn(_("%s not added: only files and symlinks "
987 992 "supported currently\n") % f)
988 993 elif self.dirstate[f] in 'an':
989 994 self.ui.warn(_("%s already tracked!\n") % f)
990 995 else:
991 996 self.dirstate.add(f)
992 997 finally:
993 998 del wlock
994 999
995 def forget(self, list, wlock=None):
1000 def forget(self, list):
1001 wlock = self.wlock()
996 1002 try:
997 if not wlock:
998 wlock = self.wlock()
999 1003 for f in list:
1000 1004 if self.dirstate[f] != 'a':
1001 1005 self.ui.warn(_("%s not added!\n") % f)
1002 1006 else:
1003 1007 self.dirstate.forget(f)
1004 1008 finally:
1005 1009 del wlock
1006 1010
1007 def remove(self, list, unlink=False, wlock=None):
1011 def remove(self, list, unlink=False):
1012 wlock = None
1008 1013 try:
1009 1014 if unlink:
1010 1015 for f in list:
1011 1016 try:
1012 1017 util.unlink(self.wjoin(f))
1013 1018 except OSError, inst:
1014 1019 if inst.errno != errno.ENOENT:
1015 1020 raise
1016 if not wlock:
1017 wlock = self.wlock()
1021 wlock = self.wlock()
1018 1022 for f in list:
1019 1023 if unlink and os.path.exists(self.wjoin(f)):
1020 1024 self.ui.warn(_("%s still exists!\n") % f)
1021 1025 elif self.dirstate[f] == 'a':
1022 1026 self.dirstate.forget(f)
1023 1027 elif f not in self.dirstate:
1024 1028 self.ui.warn(_("%s not tracked!\n") % f)
1025 1029 else:
1026 1030 self.dirstate.remove(f)
1027 1031 finally:
1028 1032 del wlock
1029 1033
1030 def undelete(self, list, wlock=None):
1034 def undelete(self, list):
1035 wlock = None
1031 1036 try:
1032 1037 p = self.dirstate.parents()[0]
1033 1038 mn = self.changelog.read(p)[0]
1034 1039 m = self.manifest.read(mn)
1035 if not wlock:
1036 wlock = self.wlock()
1040 wlock = self.wlock()
1037 1041 for f in list:
1038 1042 if self.dirstate[f] != 'r':
1039 1043 self.ui.warn("%s not removed!\n" % f)
1040 1044 else:
1041 1045 t = self.file(f).read(m[f])
1042 1046 self.wwrite(f, t, m.flags(f))
1043 1047 self.dirstate.normal(f)
1044 1048 finally:
1045 1049 del wlock
1046 1050
1047 def copy(self, source, dest, wlock=None):
1051 def copy(self, source, dest):
1052 wlock = None
1048 1053 try:
1049 1054 p = self.wjoin(dest)
1050 1055 if not (os.path.exists(p) or os.path.islink(p)):
1051 1056 self.ui.warn(_("%s does not exist!\n") % dest)
1052 1057 elif not (os.path.isfile(p) or os.path.islink(p)):
1053 1058 self.ui.warn(_("copy failed: %s is not a file or a "
1054 1059 "symbolic link\n") % dest)
1055 1060 else:
1056 if not wlock:
1057 wlock = self.wlock()
1061 wlock = self.wlock()
1058 1062 if dest not in self.dirstate:
1059 1063 self.dirstate.add(dest)
1060 1064 self.dirstate.copy(source, dest)
1061 1065 finally:
1062 1066 del wlock
1063 1067
1064 1068 def heads(self, start=None):
1065 1069 heads = self.changelog.heads(start)
1066 1070 # sort the output in rev descending order
1067 1071 heads = [(-self.changelog.rev(h), h) for h in heads]
1068 1072 heads.sort()
1069 1073 return [n for (r, n) in heads]
1070 1074
1071 1075 def branchheads(self, branch, start=None):
1072 1076 branches = self.branchtags()
1073 1077 if branch not in branches:
1074 1078 return []
1075 1079 # The basic algorithm is this:
1076 1080 #
1077 1081 # Start from the branch tip since there are no later revisions that can
1078 1082 # possibly be in this branch, and the tip is a guaranteed head.
1079 1083 #
1080 1084 # Remember the tip's parents as the first ancestors, since these by
1081 1085 # definition are not heads.
1082 1086 #
1083 1087 # Step backwards from the brach tip through all the revisions. We are
1084 1088 # guaranteed by the rules of Mercurial that we will now be visiting the
1085 1089 # nodes in reverse topological order (children before parents).
1086 1090 #
1087 1091 # If a revision is one of the ancestors of a head then we can toss it
1088 1092 # out of the ancestors set (we've already found it and won't be
1089 1093 # visiting it again) and put its parents in the ancestors set.
1090 1094 #
1091 1095 # Otherwise, if a revision is in the branch it's another head, since it
1092 1096 # wasn't in the ancestor list of an existing head. So add it to the
1093 1097 # head list, and add its parents to the ancestor list.
1094 1098 #
1095 1099 # If it is not in the branch ignore it.
1096 1100 #
1097 1101 # Once we have a list of heads, use nodesbetween to filter out all the
1098 1102 # heads that cannot be reached from startrev. There may be a more
1099 1103 # efficient way to do this as part of the previous algorithm.
1100 1104
1101 1105 set = util.set
1102 1106 heads = [self.changelog.rev(branches[branch])]
1103 1107 # Don't care if ancestors contains nullrev or not.
1104 1108 ancestors = set(self.changelog.parentrevs(heads[0]))
1105 1109 for rev in xrange(heads[0] - 1, nullrev, -1):
1106 1110 if rev in ancestors:
1107 1111 ancestors.update(self.changelog.parentrevs(rev))
1108 1112 ancestors.remove(rev)
1109 1113 elif self.changectx(rev).branch() == branch:
1110 1114 heads.append(rev)
1111 1115 ancestors.update(self.changelog.parentrevs(rev))
1112 1116 heads = [self.changelog.node(rev) for rev in heads]
1113 1117 if start is not None:
1114 1118 heads = self.changelog.nodesbetween([start], heads)[2]
1115 1119 return heads
1116 1120
1117 1121 def branches(self, nodes):
1118 1122 if not nodes:
1119 1123 nodes = [self.changelog.tip()]
1120 1124 b = []
1121 1125 for n in nodes:
1122 1126 t = n
1123 1127 while 1:
1124 1128 p = self.changelog.parents(n)
1125 1129 if p[1] != nullid or p[0] == nullid:
1126 1130 b.append((t, n, p[0], p[1]))
1127 1131 break
1128 1132 n = p[0]
1129 1133 return b
1130 1134
1131 1135 def between(self, pairs):
1132 1136 r = []
1133 1137
1134 1138 for top, bottom in pairs:
1135 1139 n, l, i = top, [], 0
1136 1140 f = 1
1137 1141
1138 1142 while n != bottom:
1139 1143 p = self.changelog.parents(n)[0]
1140 1144 if i == f:
1141 1145 l.append(n)
1142 1146 f = f * 2
1143 1147 n = p
1144 1148 i += 1
1145 1149
1146 1150 r.append(l)
1147 1151
1148 1152 return r
1149 1153
1150 1154 def findincoming(self, remote, base=None, heads=None, force=False):
1151 1155 """Return list of roots of the subsets of missing nodes from remote
1152 1156
1153 1157 If base dict is specified, assume that these nodes and their parents
1154 1158 exist on the remote side and that no child of a node of base exists
1155 1159 in both remote and self.
1156 1160 Furthermore base will be updated to include the nodes that exists
1157 1161 in self and remote but no children exists in self and remote.
1158 1162 If a list of heads is specified, return only nodes which are heads
1159 1163 or ancestors of these heads.
1160 1164
1161 1165 All the ancestors of base are in self and in remote.
1162 1166 All the descendants of the list returned are missing in self.
1163 1167 (and so we know that the rest of the nodes are missing in remote, see
1164 1168 outgoing)
1165 1169 """
1166 1170 m = self.changelog.nodemap
1167 1171 search = []
1168 1172 fetch = {}
1169 1173 seen = {}
1170 1174 seenbranch = {}
1171 1175 if base == None:
1172 1176 base = {}
1173 1177
1174 1178 if not heads:
1175 1179 heads = remote.heads()
1176 1180
1177 1181 if self.changelog.tip() == nullid:
1178 1182 base[nullid] = 1
1179 1183 if heads != [nullid]:
1180 1184 return [nullid]
1181 1185 return []
1182 1186
1183 1187 # assume we're closer to the tip than the root
1184 1188 # and start by examining the heads
1185 1189 self.ui.status(_("searching for changes\n"))
1186 1190
1187 1191 unknown = []
1188 1192 for h in heads:
1189 1193 if h not in m:
1190 1194 unknown.append(h)
1191 1195 else:
1192 1196 base[h] = 1
1193 1197
1194 1198 if not unknown:
1195 1199 return []
1196 1200
1197 1201 req = dict.fromkeys(unknown)
1198 1202 reqcnt = 0
1199 1203
1200 1204 # search through remote branches
1201 1205 # a 'branch' here is a linear segment of history, with four parts:
1202 1206 # head, root, first parent, second parent
1203 1207 # (a branch always has two parents (or none) by definition)
1204 1208 unknown = remote.branches(unknown)
1205 1209 while unknown:
1206 1210 r = []
1207 1211 while unknown:
1208 1212 n = unknown.pop(0)
1209 1213 if n[0] in seen:
1210 1214 continue
1211 1215
1212 1216 self.ui.debug(_("examining %s:%s\n")
1213 1217 % (short(n[0]), short(n[1])))
1214 1218 if n[0] == nullid: # found the end of the branch
1215 1219 pass
1216 1220 elif n in seenbranch:
1217 1221 self.ui.debug(_("branch already found\n"))
1218 1222 continue
1219 1223 elif n[1] and n[1] in m: # do we know the base?
1220 1224 self.ui.debug(_("found incomplete branch %s:%s\n")
1221 1225 % (short(n[0]), short(n[1])))
1222 1226 search.append(n) # schedule branch range for scanning
1223 1227 seenbranch[n] = 1
1224 1228 else:
1225 1229 if n[1] not in seen and n[1] not in fetch:
1226 1230 if n[2] in m and n[3] in m:
1227 1231 self.ui.debug(_("found new changeset %s\n") %
1228 1232 short(n[1]))
1229 1233 fetch[n[1]] = 1 # earliest unknown
1230 1234 for p in n[2:4]:
1231 1235 if p in m:
1232 1236 base[p] = 1 # latest known
1233 1237
1234 1238 for p in n[2:4]:
1235 1239 if p not in req and p not in m:
1236 1240 r.append(p)
1237 1241 req[p] = 1
1238 1242 seen[n[0]] = 1
1239 1243
1240 1244 if r:
1241 1245 reqcnt += 1
1242 1246 self.ui.debug(_("request %d: %s\n") %
1243 1247 (reqcnt, " ".join(map(short, r))))
1244 1248 for p in xrange(0, len(r), 10):
1245 1249 for b in remote.branches(r[p:p+10]):
1246 1250 self.ui.debug(_("received %s:%s\n") %
1247 1251 (short(b[0]), short(b[1])))
1248 1252 unknown.append(b)
1249 1253
1250 1254 # do binary search on the branches we found
1251 1255 while search:
1252 1256 n = search.pop(0)
1253 1257 reqcnt += 1
1254 1258 l = remote.between([(n[0], n[1])])[0]
1255 1259 l.append(n[1])
1256 1260 p = n[0]
1257 1261 f = 1
1258 1262 for i in l:
1259 1263 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1260 1264 if i in m:
1261 1265 if f <= 2:
1262 1266 self.ui.debug(_("found new branch changeset %s\n") %
1263 1267 short(p))
1264 1268 fetch[p] = 1
1265 1269 base[i] = 1
1266 1270 else:
1267 1271 self.ui.debug(_("narrowed branch search to %s:%s\n")
1268 1272 % (short(p), short(i)))
1269 1273 search.append((p, i))
1270 1274 break
1271 1275 p, f = i, f * 2
1272 1276
1273 1277 # sanity check our fetch list
1274 1278 for f in fetch.keys():
1275 1279 if f in m:
1276 1280 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1277 1281
1278 1282 if base.keys() == [nullid]:
1279 1283 if force:
1280 1284 self.ui.warn(_("warning: repository is unrelated\n"))
1281 1285 else:
1282 1286 raise util.Abort(_("repository is unrelated"))
1283 1287
1284 1288 self.ui.debug(_("found new changesets starting at ") +
1285 1289 " ".join([short(f) for f in fetch]) + "\n")
1286 1290
1287 1291 self.ui.debug(_("%d total queries\n") % reqcnt)
1288 1292
1289 1293 return fetch.keys()
1290 1294
1291 1295 def findoutgoing(self, remote, base=None, heads=None, force=False):
1292 1296 """Return list of nodes that are roots of subsets not in remote
1293 1297
1294 1298 If base dict is specified, assume that these nodes and their parents
1295 1299 exist on the remote side.
1296 1300 If a list of heads is specified, return only nodes which are heads
1297 1301 or ancestors of these heads, and return a second element which
1298 1302 contains all remote heads which get new children.
1299 1303 """
1300 1304 if base == None:
1301 1305 base = {}
1302 1306 self.findincoming(remote, base, heads, force=force)
1303 1307
1304 1308 self.ui.debug(_("common changesets up to ")
1305 1309 + " ".join(map(short, base.keys())) + "\n")
1306 1310
1307 1311 remain = dict.fromkeys(self.changelog.nodemap)
1308 1312
1309 1313 # prune everything remote has from the tree
1310 1314 del remain[nullid]
1311 1315 remove = base.keys()
1312 1316 while remove:
1313 1317 n = remove.pop(0)
1314 1318 if n in remain:
1315 1319 del remain[n]
1316 1320 for p in self.changelog.parents(n):
1317 1321 remove.append(p)
1318 1322
1319 1323 # find every node whose parents have been pruned
1320 1324 subset = []
1321 1325 # find every remote head that will get new children
1322 1326 updated_heads = {}
1323 1327 for n in remain:
1324 1328 p1, p2 = self.changelog.parents(n)
1325 1329 if p1 not in remain and p2 not in remain:
1326 1330 subset.append(n)
1327 1331 if heads:
1328 1332 if p1 in heads:
1329 1333 updated_heads[p1] = True
1330 1334 if p2 in heads:
1331 1335 updated_heads[p2] = True
1332 1336
1333 1337 # this is the set of all roots we have to push
1334 1338 if heads:
1335 1339 return subset, updated_heads.keys()
1336 1340 else:
1337 1341 return subset
1338 1342
1339 def pull(self, remote, heads=None, force=False, lock=None):
1343 def pull(self, remote, heads=None, force=False):
1344 lock = self.lock()
1340 1345 try:
1341 if not lock:
1342 lock = self.lock()
1343 1346 fetch = self.findincoming(remote, force=force)
1344 1347 if fetch == [nullid]:
1345 1348 self.ui.status(_("requesting all changes\n"))
1346 1349
1347 1350 if not fetch:
1348 1351 self.ui.status(_("no changes found\n"))
1349 1352 return 0
1350 1353
1351 1354 if heads is None:
1352 1355 cg = remote.changegroup(fetch, 'pull')
1353 1356 else:
1354 1357 if 'changegroupsubset' not in remote.capabilities:
1355 1358 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1356 1359 cg = remote.changegroupsubset(fetch, heads, 'pull')
1357 1360 return self.addchangegroup(cg, 'pull', remote.url())
1358 1361 finally:
1359 1362 del lock
1360 1363
1361 1364 def push(self, remote, force=False, revs=None):
1362 1365 # there are two ways to push to remote repo:
1363 1366 #
1364 1367 # addchangegroup assumes local user can lock remote
1365 1368 # repo (local filesystem, old ssh servers).
1366 1369 #
1367 1370 # unbundle assumes local user cannot lock remote repo (new ssh
1368 1371 # servers, http servers).
1369 1372
1370 1373 if remote.capable('unbundle'):
1371 1374 return self.push_unbundle(remote, force, revs)
1372 1375 return self.push_addchangegroup(remote, force, revs)
1373 1376
1374 1377 def prepush(self, remote, force, revs):
1375 1378 base = {}
1376 1379 remote_heads = remote.heads()
1377 1380 inc = self.findincoming(remote, base, remote_heads, force=force)
1378 1381
1379 1382 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1380 1383 if revs is not None:
1381 1384 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1382 1385 else:
1383 1386 bases, heads = update, self.changelog.heads()
1384 1387
1385 1388 if not bases:
1386 1389 self.ui.status(_("no changes found\n"))
1387 1390 return None, 1
1388 1391 elif not force:
1389 1392 # check if we're creating new remote heads
1390 1393 # to be a remote head after push, node must be either
1391 1394 # - unknown locally
1392 1395 # - a local outgoing head descended from update
1393 1396 # - a remote head that's known locally and not
1394 1397 # ancestral to an outgoing head
1395 1398
1396 1399 warn = 0
1397 1400
1398 1401 if remote_heads == [nullid]:
1399 1402 warn = 0
1400 1403 elif not revs and len(heads) > len(remote_heads):
1401 1404 warn = 1
1402 1405 else:
1403 1406 newheads = list(heads)
1404 1407 for r in remote_heads:
1405 1408 if r in self.changelog.nodemap:
1406 1409 desc = self.changelog.heads(r, heads)
1407 1410 l = [h for h in heads if h in desc]
1408 1411 if not l:
1409 1412 newheads.append(r)
1410 1413 else:
1411 1414 newheads.append(r)
1412 1415 if len(newheads) > len(remote_heads):
1413 1416 warn = 1
1414 1417
1415 1418 if warn:
1416 1419 self.ui.warn(_("abort: push creates new remote branches!\n"))
1417 1420 self.ui.status(_("(did you forget to merge?"
1418 1421 " use push -f to force)\n"))
1419 1422 return None, 1
1420 1423 elif inc:
1421 1424 self.ui.warn(_("note: unsynced remote changes!\n"))
1422 1425
1423 1426
1424 1427 if revs is None:
1425 1428 cg = self.changegroup(update, 'push')
1426 1429 else:
1427 1430 cg = self.changegroupsubset(update, revs, 'push')
1428 1431 return cg, remote_heads
1429 1432
1430 1433 def push_addchangegroup(self, remote, force, revs):
1431 1434 lock = remote.lock()
1432 1435 try:
1433 1436 ret = self.prepush(remote, force, revs)
1434 1437 if ret[0] is not None:
1435 1438 cg, remote_heads = ret
1436 1439 return remote.addchangegroup(cg, 'push', self.url())
1437 1440 return ret[1]
1438 1441 finally:
1439 1442 del lock
1440 1443
1441 1444 def push_unbundle(self, remote, force, revs):
1442 1445 # local repo finds heads on server, finds out what revs it
1443 1446 # must push. once revs transferred, if server finds it has
1444 1447 # different heads (someone else won commit/push race), server
1445 1448 # aborts.
1446 1449
1447 1450 ret = self.prepush(remote, force, revs)
1448 1451 if ret[0] is not None:
1449 1452 cg, remote_heads = ret
1450 1453 if force: remote_heads = ['force']
1451 1454 return remote.unbundle(cg, remote_heads, 'push')
1452 1455 return ret[1]
1453 1456
1454 1457 def changegroupinfo(self, nodes):
1455 1458 self.ui.note(_("%d changesets found\n") % len(nodes))
1456 1459 if self.ui.debugflag:
1457 1460 self.ui.debug(_("List of changesets:\n"))
1458 1461 for node in nodes:
1459 1462 self.ui.debug("%s\n" % hex(node))
1460 1463
1461 1464 def changegroupsubset(self, bases, heads, source):
1462 1465 """This function generates a changegroup consisting of all the nodes
1463 1466 that are descendents of any of the bases, and ancestors of any of
1464 1467 the heads.
1465 1468
1466 1469 It is fairly complex as determining which filenodes and which
1467 1470 manifest nodes need to be included for the changeset to be complete
1468 1471 is non-trivial.
1469 1472
1470 1473 Another wrinkle is doing the reverse, figuring out which changeset in
1471 1474 the changegroup a particular filenode or manifestnode belongs to."""
1472 1475
1473 1476 self.hook('preoutgoing', throw=True, source=source)
1474 1477
1475 1478 # Set up some initial variables
1476 1479 # Make it easy to refer to self.changelog
1477 1480 cl = self.changelog
1478 1481 # msng is short for missing - compute the list of changesets in this
1479 1482 # changegroup.
1480 1483 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1481 1484 self.changegroupinfo(msng_cl_lst)
1482 1485 # Some bases may turn out to be superfluous, and some heads may be
1483 1486 # too. nodesbetween will return the minimal set of bases and heads
1484 1487 # necessary to re-create the changegroup.
1485 1488
1486 1489 # Known heads are the list of heads that it is assumed the recipient
1487 1490 # of this changegroup will know about.
1488 1491 knownheads = {}
1489 1492 # We assume that all parents of bases are known heads.
1490 1493 for n in bases:
1491 1494 for p in cl.parents(n):
1492 1495 if p != nullid:
1493 1496 knownheads[p] = 1
1494 1497 knownheads = knownheads.keys()
1495 1498 if knownheads:
1496 1499 # Now that we know what heads are known, we can compute which
1497 1500 # changesets are known. The recipient must know about all
1498 1501 # changesets required to reach the known heads from the null
1499 1502 # changeset.
1500 1503 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1501 1504 junk = None
1502 1505 # Transform the list into an ersatz set.
1503 1506 has_cl_set = dict.fromkeys(has_cl_set)
1504 1507 else:
1505 1508 # If there were no known heads, the recipient cannot be assumed to
1506 1509 # know about any changesets.
1507 1510 has_cl_set = {}
1508 1511
1509 1512 # Make it easy to refer to self.manifest
1510 1513 mnfst = self.manifest
1511 1514 # We don't know which manifests are missing yet
1512 1515 msng_mnfst_set = {}
1513 1516 # Nor do we know which filenodes are missing.
1514 1517 msng_filenode_set = {}
1515 1518
1516 1519 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1517 1520 junk = None
1518 1521
1519 1522 # A changeset always belongs to itself, so the changenode lookup
1520 1523 # function for a changenode is identity.
1521 1524 def identity(x):
1522 1525 return x
1523 1526
1524 1527 # A function generating function. Sets up an environment for the
1525 1528 # inner function.
1526 1529 def cmp_by_rev_func(revlog):
1527 1530 # Compare two nodes by their revision number in the environment's
1528 1531 # revision history. Since the revision number both represents the
1529 1532 # most efficient order to read the nodes in, and represents a
1530 1533 # topological sorting of the nodes, this function is often useful.
1531 1534 def cmp_by_rev(a, b):
1532 1535 return cmp(revlog.rev(a), revlog.rev(b))
1533 1536 return cmp_by_rev
1534 1537
1535 1538 # If we determine that a particular file or manifest node must be a
1536 1539 # node that the recipient of the changegroup will already have, we can
1537 1540 # also assume the recipient will have all the parents. This function
1538 1541 # prunes them from the set of missing nodes.
1539 1542 def prune_parents(revlog, hasset, msngset):
1540 1543 haslst = hasset.keys()
1541 1544 haslst.sort(cmp_by_rev_func(revlog))
1542 1545 for node in haslst:
1543 1546 parentlst = [p for p in revlog.parents(node) if p != nullid]
1544 1547 while parentlst:
1545 1548 n = parentlst.pop()
1546 1549 if n not in hasset:
1547 1550 hasset[n] = 1
1548 1551 p = [p for p in revlog.parents(n) if p != nullid]
1549 1552 parentlst.extend(p)
1550 1553 for n in hasset:
1551 1554 msngset.pop(n, None)
1552 1555
1553 1556 # This is a function generating function used to set up an environment
1554 1557 # for the inner function to execute in.
1555 1558 def manifest_and_file_collector(changedfileset):
1556 1559 # This is an information gathering function that gathers
1557 1560 # information from each changeset node that goes out as part of
1558 1561 # the changegroup. The information gathered is a list of which
1559 1562 # manifest nodes are potentially required (the recipient may
1560 1563 # already have them) and total list of all files which were
1561 1564 # changed in any changeset in the changegroup.
1562 1565 #
1563 1566 # We also remember the first changenode we saw any manifest
1564 1567 # referenced by so we can later determine which changenode 'owns'
1565 1568 # the manifest.
1566 1569 def collect_manifests_and_files(clnode):
1567 1570 c = cl.read(clnode)
1568 1571 for f in c[3]:
1569 1572 # This is to make sure we only have one instance of each
1570 1573 # filename string for each filename.
1571 1574 changedfileset.setdefault(f, f)
1572 1575 msng_mnfst_set.setdefault(c[0], clnode)
1573 1576 return collect_manifests_and_files
1574 1577
1575 1578 # Figure out which manifest nodes (of the ones we think might be part
1576 1579 # of the changegroup) the recipient must know about and remove them
1577 1580 # from the changegroup.
1578 1581 def prune_manifests():
1579 1582 has_mnfst_set = {}
1580 1583 for n in msng_mnfst_set:
1581 1584 # If a 'missing' manifest thinks it belongs to a changenode
1582 1585 # the recipient is assumed to have, obviously the recipient
1583 1586 # must have that manifest.
1584 1587 linknode = cl.node(mnfst.linkrev(n))
1585 1588 if linknode in has_cl_set:
1586 1589 has_mnfst_set[n] = 1
1587 1590 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1588 1591
1589 1592 # Use the information collected in collect_manifests_and_files to say
1590 1593 # which changenode any manifestnode belongs to.
1591 1594 def lookup_manifest_link(mnfstnode):
1592 1595 return msng_mnfst_set[mnfstnode]
1593 1596
1594 1597 # A function generating function that sets up the initial environment
1595 1598 # the inner function.
1596 1599 def filenode_collector(changedfiles):
1597 1600 next_rev = [0]
1598 1601 # This gathers information from each manifestnode included in the
1599 1602 # changegroup about which filenodes the manifest node references
1600 1603 # so we can include those in the changegroup too.
1601 1604 #
1602 1605 # It also remembers which changenode each filenode belongs to. It
1603 1606 # does this by assuming the a filenode belongs to the changenode
1604 1607 # the first manifest that references it belongs to.
1605 1608 def collect_msng_filenodes(mnfstnode):
1606 1609 r = mnfst.rev(mnfstnode)
1607 1610 if r == next_rev[0]:
1608 1611 # If the last rev we looked at was the one just previous,
1609 1612 # we only need to see a diff.
1610 1613 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1611 1614 # For each line in the delta
1612 1615 for dline in delta.splitlines():
1613 1616 # get the filename and filenode for that line
1614 1617 f, fnode = dline.split('\0')
1615 1618 fnode = bin(fnode[:40])
1616 1619 f = changedfiles.get(f, None)
1617 1620 # And if the file is in the list of files we care
1618 1621 # about.
1619 1622 if f is not None:
1620 1623 # Get the changenode this manifest belongs to
1621 1624 clnode = msng_mnfst_set[mnfstnode]
1622 1625 # Create the set of filenodes for the file if
1623 1626 # there isn't one already.
1624 1627 ndset = msng_filenode_set.setdefault(f, {})
1625 1628 # And set the filenode's changelog node to the
1626 1629 # manifest's if it hasn't been set already.
1627 1630 ndset.setdefault(fnode, clnode)
1628 1631 else:
1629 1632 # Otherwise we need a full manifest.
1630 1633 m = mnfst.read(mnfstnode)
1631 1634 # For every file in we care about.
1632 1635 for f in changedfiles:
1633 1636 fnode = m.get(f, None)
1634 1637 # If it's in the manifest
1635 1638 if fnode is not None:
1636 1639 # See comments above.
1637 1640 clnode = msng_mnfst_set[mnfstnode]
1638 1641 ndset = msng_filenode_set.setdefault(f, {})
1639 1642 ndset.setdefault(fnode, clnode)
1640 1643 # Remember the revision we hope to see next.
1641 1644 next_rev[0] = r + 1
1642 1645 return collect_msng_filenodes
1643 1646
1644 1647 # We have a list of filenodes we think we need for a file, lets remove
1645 1648 # all those we now the recipient must have.
1646 1649 def prune_filenodes(f, filerevlog):
1647 1650 msngset = msng_filenode_set[f]
1648 1651 hasset = {}
1649 1652 # If a 'missing' filenode thinks it belongs to a changenode we
1650 1653 # assume the recipient must have, then the recipient must have
1651 1654 # that filenode.
1652 1655 for n in msngset:
1653 1656 clnode = cl.node(filerevlog.linkrev(n))
1654 1657 if clnode in has_cl_set:
1655 1658 hasset[n] = 1
1656 1659 prune_parents(filerevlog, hasset, msngset)
1657 1660
1658 1661 # A function generator function that sets up the a context for the
1659 1662 # inner function.
1660 1663 def lookup_filenode_link_func(fname):
1661 1664 msngset = msng_filenode_set[fname]
1662 1665 # Lookup the changenode the filenode belongs to.
1663 1666 def lookup_filenode_link(fnode):
1664 1667 return msngset[fnode]
1665 1668 return lookup_filenode_link
1666 1669
1667 1670 # Now that we have all theses utility functions to help out and
1668 1671 # logically divide up the task, generate the group.
1669 1672 def gengroup():
1670 1673 # The set of changed files starts empty.
1671 1674 changedfiles = {}
1672 1675 # Create a changenode group generator that will call our functions
1673 1676 # back to lookup the owning changenode and collect information.
1674 1677 group = cl.group(msng_cl_lst, identity,
1675 1678 manifest_and_file_collector(changedfiles))
1676 1679 for chnk in group:
1677 1680 yield chnk
1678 1681
1679 1682 # The list of manifests has been collected by the generator
1680 1683 # calling our functions back.
1681 1684 prune_manifests()
1682 1685 msng_mnfst_lst = msng_mnfst_set.keys()
1683 1686 # Sort the manifestnodes by revision number.
1684 1687 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1685 1688 # Create a generator for the manifestnodes that calls our lookup
1686 1689 # and data collection functions back.
1687 1690 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1688 1691 filenode_collector(changedfiles))
1689 1692 for chnk in group:
1690 1693 yield chnk
1691 1694
1692 1695 # These are no longer needed, dereference and toss the memory for
1693 1696 # them.
1694 1697 msng_mnfst_lst = None
1695 1698 msng_mnfst_set.clear()
1696 1699
1697 1700 changedfiles = changedfiles.keys()
1698 1701 changedfiles.sort()
1699 1702 # Go through all our files in order sorted by name.
1700 1703 for fname in changedfiles:
1701 1704 filerevlog = self.file(fname)
1702 1705 # Toss out the filenodes that the recipient isn't really
1703 1706 # missing.
1704 1707 if msng_filenode_set.has_key(fname):
1705 1708 prune_filenodes(fname, filerevlog)
1706 1709 msng_filenode_lst = msng_filenode_set[fname].keys()
1707 1710 else:
1708 1711 msng_filenode_lst = []
1709 1712 # If any filenodes are left, generate the group for them,
1710 1713 # otherwise don't bother.
1711 1714 if len(msng_filenode_lst) > 0:
1712 1715 yield changegroup.genchunk(fname)
1713 1716 # Sort the filenodes by their revision #
1714 1717 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1715 1718 # Create a group generator and only pass in a changenode
1716 1719 # lookup function as we need to collect no information
1717 1720 # from filenodes.
1718 1721 group = filerevlog.group(msng_filenode_lst,
1719 1722 lookup_filenode_link_func(fname))
1720 1723 for chnk in group:
1721 1724 yield chnk
1722 1725 if msng_filenode_set.has_key(fname):
1723 1726 # Don't need this anymore, toss it to free memory.
1724 1727 del msng_filenode_set[fname]
1725 1728 # Signal that no more groups are left.
1726 1729 yield changegroup.closechunk()
1727 1730
1728 1731 if msng_cl_lst:
1729 1732 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1730 1733
1731 1734 return util.chunkbuffer(gengroup())
1732 1735
1733 1736 def changegroup(self, basenodes, source):
1734 1737 """Generate a changegroup of all nodes that we have that a recipient
1735 1738 doesn't.
1736 1739
1737 1740 This is much easier than the previous function as we can assume that
1738 1741 the recipient has any changenode we aren't sending them."""
1739 1742
1740 1743 self.hook('preoutgoing', throw=True, source=source)
1741 1744
1742 1745 cl = self.changelog
1743 1746 nodes = cl.nodesbetween(basenodes, None)[0]
1744 1747 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1745 1748 self.changegroupinfo(nodes)
1746 1749
1747 1750 def identity(x):
1748 1751 return x
1749 1752
1750 1753 def gennodelst(revlog):
1751 1754 for r in xrange(0, revlog.count()):
1752 1755 n = revlog.node(r)
1753 1756 if revlog.linkrev(n) in revset:
1754 1757 yield n
1755 1758
1756 1759 def changed_file_collector(changedfileset):
1757 1760 def collect_changed_files(clnode):
1758 1761 c = cl.read(clnode)
1759 1762 for fname in c[3]:
1760 1763 changedfileset[fname] = 1
1761 1764 return collect_changed_files
1762 1765
1763 1766 def lookuprevlink_func(revlog):
1764 1767 def lookuprevlink(n):
1765 1768 return cl.node(revlog.linkrev(n))
1766 1769 return lookuprevlink
1767 1770
1768 1771 def gengroup():
1769 1772 # construct a list of all changed files
1770 1773 changedfiles = {}
1771 1774
1772 1775 for chnk in cl.group(nodes, identity,
1773 1776 changed_file_collector(changedfiles)):
1774 1777 yield chnk
1775 1778 changedfiles = changedfiles.keys()
1776 1779 changedfiles.sort()
1777 1780
1778 1781 mnfst = self.manifest
1779 1782 nodeiter = gennodelst(mnfst)
1780 1783 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1781 1784 yield chnk
1782 1785
1783 1786 for fname in changedfiles:
1784 1787 filerevlog = self.file(fname)
1785 1788 nodeiter = gennodelst(filerevlog)
1786 1789 nodeiter = list(nodeiter)
1787 1790 if nodeiter:
1788 1791 yield changegroup.genchunk(fname)
1789 1792 lookup = lookuprevlink_func(filerevlog)
1790 1793 for chnk in filerevlog.group(nodeiter, lookup):
1791 1794 yield chnk
1792 1795
1793 1796 yield changegroup.closechunk()
1794 1797
1795 1798 if nodes:
1796 1799 self.hook('outgoing', node=hex(nodes[0]), source=source)
1797 1800
1798 1801 return util.chunkbuffer(gengroup())
1799 1802
1800 1803 def addchangegroup(self, source, srctype, url):
1801 1804 """add changegroup to repo.
1802 1805
1803 1806 return values:
1804 1807 - nothing changed or no source: 0
1805 1808 - more heads than before: 1+added heads (2..n)
1806 1809 - less heads than before: -1-removed heads (-2..-n)
1807 1810 - number of heads stays the same: 1
1808 1811 """
1809 1812 def csmap(x):
1810 1813 self.ui.debug(_("add changeset %s\n") % short(x))
1811 1814 return cl.count()
1812 1815
1813 1816 def revmap(x):
1814 1817 return cl.rev(x)
1815 1818
1816 1819 if not source:
1817 1820 return 0
1818 1821
1819 1822 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1820 1823
1821 1824 changesets = files = revisions = 0
1822 1825
1823 1826 # write changelog data to temp files so concurrent readers will not see
1824 1827 # inconsistent view
1825 1828 cl = self.changelog
1826 1829 cl.delayupdate()
1827 1830 oldheads = len(cl.heads())
1828 1831
1829 1832 tr = self.transaction()
1830 1833 try:
1831 1834 # pull off the changeset group
1832 1835 self.ui.status(_("adding changesets\n"))
1833 1836 cor = cl.count() - 1
1834 1837 chunkiter = changegroup.chunkiter(source)
1835 1838 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1836 1839 raise util.Abort(_("received changelog group is empty"))
1837 1840 cnr = cl.count() - 1
1838 1841 changesets = cnr - cor
1839 1842
1840 1843 # pull off the manifest group
1841 1844 self.ui.status(_("adding manifests\n"))
1842 1845 chunkiter = changegroup.chunkiter(source)
1843 1846 # no need to check for empty manifest group here:
1844 1847 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1845 1848 # no new manifest will be created and the manifest group will
1846 1849 # be empty during the pull
1847 1850 self.manifest.addgroup(chunkiter, revmap, tr)
1848 1851
1849 1852 # process the files
1850 1853 self.ui.status(_("adding file changes\n"))
1851 1854 while 1:
1852 1855 f = changegroup.getchunk(source)
1853 1856 if not f:
1854 1857 break
1855 1858 self.ui.debug(_("adding %s revisions\n") % f)
1856 1859 fl = self.file(f)
1857 1860 o = fl.count()
1858 1861 chunkiter = changegroup.chunkiter(source)
1859 1862 if fl.addgroup(chunkiter, revmap, tr) is None:
1860 1863 raise util.Abort(_("received file revlog group is empty"))
1861 1864 revisions += fl.count() - o
1862 1865 files += 1
1863 1866
1864 1867 # make changelog see real files again
1865 1868 cl.finalize(tr)
1866 1869
1867 1870 newheads = len(self.changelog.heads())
1868 1871 heads = ""
1869 1872 if oldheads and newheads != oldheads:
1870 1873 heads = _(" (%+d heads)") % (newheads - oldheads)
1871 1874
1872 1875 self.ui.status(_("added %d changesets"
1873 1876 " with %d changes to %d files%s\n")
1874 1877 % (changesets, revisions, files, heads))
1875 1878
1876 1879 if changesets > 0:
1877 1880 self.hook('pretxnchangegroup', throw=True,
1878 1881 node=hex(self.changelog.node(cor+1)), source=srctype,
1879 1882 url=url)
1880 1883
1881 1884 tr.close()
1882 1885 finally:
1883 1886 del tr
1884 1887
1885 1888 if changesets > 0:
1886 1889 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1887 1890 source=srctype, url=url)
1888 1891
1889 1892 for i in xrange(cor + 1, cnr + 1):
1890 1893 self.hook("incoming", node=hex(self.changelog.node(i)),
1891 1894 source=srctype, url=url)
1892 1895
1893 1896 # never return 0 here:
1894 1897 if newheads < oldheads:
1895 1898 return newheads - oldheads - 1
1896 1899 else:
1897 1900 return newheads - oldheads + 1
1898 1901
1899 1902
1900 1903 def stream_in(self, remote):
1901 1904 fp = remote.stream_out()
1902 1905 l = fp.readline()
1903 1906 try:
1904 1907 resp = int(l)
1905 1908 except ValueError:
1906 1909 raise util.UnexpectedOutput(
1907 1910 _('Unexpected response from remote server:'), l)
1908 1911 if resp == 1:
1909 1912 raise util.Abort(_('operation forbidden by server'))
1910 1913 elif resp == 2:
1911 1914 raise util.Abort(_('locking the remote repository failed'))
1912 1915 elif resp != 0:
1913 1916 raise util.Abort(_('the server sent an unknown error code'))
1914 1917 self.ui.status(_('streaming all changes\n'))
1915 1918 l = fp.readline()
1916 1919 try:
1917 1920 total_files, total_bytes = map(int, l.split(' ', 1))
1918 1921 except ValueError, TypeError:
1919 1922 raise util.UnexpectedOutput(
1920 1923 _('Unexpected response from remote server:'), l)
1921 1924 self.ui.status(_('%d files to transfer, %s of data\n') %
1922 1925 (total_files, util.bytecount(total_bytes)))
1923 1926 start = time.time()
1924 1927 for i in xrange(total_files):
1925 1928 # XXX doesn't support '\n' or '\r' in filenames
1926 1929 l = fp.readline()
1927 1930 try:
1928 1931 name, size = l.split('\0', 1)
1929 1932 size = int(size)
1930 1933 except ValueError, TypeError:
1931 1934 raise util.UnexpectedOutput(
1932 1935 _('Unexpected response from remote server:'), l)
1933 1936 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1934 1937 ofp = self.sopener(name, 'w')
1935 1938 for chunk in util.filechunkiter(fp, limit=size):
1936 1939 ofp.write(chunk)
1937 1940 ofp.close()
1938 1941 elapsed = time.time() - start
1939 1942 if elapsed <= 0:
1940 1943 elapsed = 0.001
1941 1944 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1942 1945 (util.bytecount(total_bytes), elapsed,
1943 1946 util.bytecount(total_bytes / elapsed)))
1944 1947 self.invalidate()
1945 1948 return len(self.heads()) + 1
1946 1949
1947 1950 def clone(self, remote, heads=[], stream=False):
1948 1951 '''clone remote repository.
1949 1952
1950 1953 keyword arguments:
1951 1954 heads: list of revs to clone (forces use of pull)
1952 1955 stream: use streaming clone if possible'''
1953 1956
1954 1957 # now, all clients that can request uncompressed clones can
1955 1958 # read repo formats supported by all servers that can serve
1956 1959 # them.
1957 1960
1958 1961 # if revlog format changes, client will have to check version
1959 1962 # and format flags on "stream" capability, and use
1960 1963 # uncompressed only if compatible.
1961 1964
1962 1965 if stream and not heads and remote.capable('stream'):
1963 1966 return self.stream_in(remote)
1964 1967 return self.pull(remote, heads)
1965 1968
1966 1969 # used to avoid circular references so destructors work
1967 1970 def aftertrans(files):
1968 1971 renamefiles = [tuple(t) for t in files]
1969 1972 def a():
1970 1973 for src, dest in renamefiles:
1971 1974 util.rename(src, dest)
1972 1975 return a
1973 1976
1974 1977 def instance(ui, path, create):
1975 1978 return localrepository(ui, util.drop_scheme('file', path), create)
1976 1979
1977 1980 def islocal(path):
1978 1981 return True
@@ -1,572 +1,569 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import errno, util, os, tempfile, context
11 11
12 12 def filemerge(repo, fw, fo, wctx, mctx):
13 13 """perform a 3-way merge in the working directory
14 14
15 15 fw = filename in the working directory
16 16 fo = filename in other parent
17 17 wctx, mctx = working and merge changecontexts
18 18 """
19 19
20 20 def temp(prefix, ctx):
21 21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 23 data = repo.wwritedata(ctx.path(), ctx.data())
24 24 f = os.fdopen(fd, "wb")
25 25 f.write(data)
26 26 f.close()
27 27 return name
28 28
29 29 fcm = wctx.filectx(fw)
30 30 fco = mctx.filectx(fo)
31 31
32 32 if not fco.cmp(fcm.data()): # files identical?
33 33 return None
34 34
35 35 fca = fcm.ancestor(fco)
36 36 if not fca:
37 37 fca = repo.filectx(fw, fileid=nullrev)
38 38 a = repo.wjoin(fw)
39 39 b = temp("base", fca)
40 40 c = temp("other", fco)
41 41
42 42 if fw != fo:
43 43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 44 else:
45 45 repo.ui.status(_("merging %s\n") % fw)
46 46
47 47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48 48
49 49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 50 or "hgmerge")
51 51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 52 environ={'HG_FILE': fw,
53 53 'HG_MY_NODE': str(wctx.parents()[0]),
54 54 'HG_OTHER_NODE': str(mctx)})
55 55 if r:
56 56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 57
58 58 os.unlink(b)
59 59 os.unlink(c)
60 60 return r
61 61
62 62 def checkunknown(wctx, mctx):
63 63 "check for collisions between unknown files and files in mctx"
64 64 man = mctx.manifest()
65 65 for f in wctx.unknown():
66 66 if f in man:
67 67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 68 raise util.Abort(_("untracked local file '%s' differs"
69 69 " from remote version") % f)
70 70
71 71 def checkcollision(mctx):
72 72 "check for case folding collisions in the destination context"
73 73 folded = {}
74 74 for fn in mctx.manifest():
75 75 fold = fn.lower()
76 76 if fold in folded:
77 77 raise util.Abort(_("case-folding collision between %s and %s")
78 78 % (fn, folded[fold]))
79 79 folded[fold] = fn
80 80
81 81 def forgetremoved(wctx, mctx):
82 82 """
83 83 Forget removed files
84 84
85 85 If we're jumping between revisions (as opposed to merging), and if
86 86 neither the working directory nor the target rev has the file,
87 87 then we need to remove it from the dirstate, to prevent the
88 88 dirstate from listing the file when it is no longer in the
89 89 manifest.
90 90 """
91 91
92 92 action = []
93 93 man = mctx.manifest()
94 94 for f in wctx.deleted() + wctx.removed():
95 95 if f not in man:
96 96 action.append((f, "f"))
97 97
98 98 return action
99 99
100 100 def findcopies(repo, m1, m2, ma, limit):
101 101 """
102 102 Find moves and copies between m1 and m2 back to limit linkrev
103 103 """
104 104
105 105 def nonoverlap(d1, d2, d3):
106 106 "Return list of elements in d1 not in d2 or d3"
107 107 l = [d for d in d1 if d not in d3 and d not in d2]
108 108 l.sort()
109 109 return l
110 110
111 111 def dirname(f):
112 112 s = f.rfind("/")
113 113 if s == -1:
114 114 return ""
115 115 return f[:s]
116 116
117 117 def dirs(files):
118 118 d = {}
119 119 for f in files:
120 120 f = dirname(f)
121 121 while f not in d:
122 122 d[f] = True
123 123 f = dirname(f)
124 124 return d
125 125
126 126 wctx = repo.workingctx()
127 127
128 128 def makectx(f, n):
129 129 if len(n) == 20:
130 130 return repo.filectx(f, fileid=n)
131 131 return wctx.filectx(f)
132 132 ctx = util.cachefunc(makectx)
133 133
134 134 def findold(fctx):
135 135 "find files that path was copied from, back to linkrev limit"
136 136 old = {}
137 137 seen = {}
138 138 orig = fctx.path()
139 139 visit = [fctx]
140 140 while visit:
141 141 fc = visit.pop()
142 142 s = str(fc)
143 143 if s in seen:
144 144 continue
145 145 seen[s] = 1
146 146 if fc.path() != orig and fc.path() not in old:
147 147 old[fc.path()] = 1
148 148 if fc.rev() < limit:
149 149 continue
150 150 visit += fc.parents()
151 151
152 152 old = old.keys()
153 153 old.sort()
154 154 return old
155 155
156 156 copy = {}
157 157 fullcopy = {}
158 158 diverge = {}
159 159
160 160 def checkcopies(c, man, aman):
161 161 '''check possible copies for filectx c'''
162 162 for of in findold(c):
163 163 fullcopy[c.path()] = of # remember for dir rename detection
164 164 if of not in man: # original file not in other manifest?
165 165 if of in ma:
166 166 diverge.setdefault(of, []).append(c.path())
167 167 continue
168 168 # if the original file is unchanged on the other branch,
169 169 # no merge needed
170 170 if man[of] == aman.get(of):
171 171 continue
172 172 c2 = ctx(of, man[of])
173 173 ca = c.ancestor(c2)
174 174 if not ca: # unrelated?
175 175 continue
176 176 # named changed on only one side?
177 177 if ca.path() == c.path() or ca.path() == c2.path():
178 178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 179 continue
180 180 copy[c.path()] = of
181 181
182 182 if not repo.ui.configbool("merge", "followcopies", True):
183 183 return {}, {}
184 184
185 185 # avoid silly behavior for update from empty dir
186 186 if not m1 or not m2 or not ma:
187 187 return {}, {}
188 188
189 189 u1 = nonoverlap(m1, m2, ma)
190 190 u2 = nonoverlap(m2, m1, ma)
191 191
192 192 for f in u1:
193 193 checkcopies(ctx(f, m1[f]), m2, ma)
194 194
195 195 for f in u2:
196 196 checkcopies(ctx(f, m2[f]), m1, ma)
197 197
198 198 d2 = {}
199 199 for of, fl in diverge.items():
200 200 for f in fl:
201 201 fo = list(fl)
202 202 fo.remove(f)
203 203 d2[f] = (of, fo)
204 204
205 205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 206 return copy, diverge
207 207
208 208 # generate a directory move map
209 209 d1, d2 = dirs(m1), dirs(m2)
210 210 invalid = {}
211 211 dirmove = {}
212 212
213 213 # examine each file copy for a potential directory move, which is
214 214 # when all the files in a directory are moved to a new directory
215 215 for dst, src in fullcopy.items():
216 216 dsrc, ddst = dirname(src), dirname(dst)
217 217 if dsrc in invalid:
218 218 # already seen to be uninteresting
219 219 continue
220 220 elif dsrc in d1 and ddst in d1:
221 221 # directory wasn't entirely moved locally
222 222 invalid[dsrc] = True
223 223 elif dsrc in d2 and ddst in d2:
224 224 # directory wasn't entirely moved remotely
225 225 invalid[dsrc] = True
226 226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 227 # files from the same directory moved to two different places
228 228 invalid[dsrc] = True
229 229 else:
230 230 # looks good so far
231 231 dirmove[dsrc + "/"] = ddst + "/"
232 232
233 233 for i in invalid:
234 234 if i in dirmove:
235 235 del dirmove[i]
236 236
237 237 del d1, d2, invalid
238 238
239 239 if not dirmove:
240 240 return copy, diverge
241 241
242 242 # check unaccounted nonoverlapping files against directory moves
243 243 for f in u1 + u2:
244 244 if f not in fullcopy:
245 245 for d in dirmove:
246 246 if f.startswith(d):
247 247 # new file added in a directory that was moved, move it
248 248 copy[f] = dirmove[d] + f[len(d):]
249 249 break
250 250
251 251 return copy, diverge
252 252
253 253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 254 """
255 255 Merge p1 and p2 with ancestor ma and generate merge action list
256 256
257 257 overwrite = whether we clobber working files
258 258 partial = function to filter file lists
259 259 """
260 260
261 261 repo.ui.note(_("resolving manifests\n"))
262 262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264 264
265 265 m1 = p1.manifest()
266 266 m2 = p2.manifest()
267 267 ma = pa.manifest()
268 268 backwards = (pa == p2)
269 269 action = []
270 270 copy = {}
271 271 diverge = {}
272 272
273 273 def fmerge(f, f2=None, fa=None):
274 274 """merge flags"""
275 275 if not f2:
276 276 f2 = f
277 277 fa = f
278 278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 279 if ((a^b) | (a^c)) ^ a:
280 280 return 'x'
281 281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 282 if ((a^b) | (a^c)) ^ a:
283 283 return 'l'
284 284 return ''
285 285
286 286 def act(msg, m, f, *args):
287 287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 288 action.append((f, m) + args)
289 289
290 290 if not (backwards or overwrite):
291 291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292 292
293 293 for of, fl in diverge.items():
294 294 act("divergent renames", "dr", of, fl)
295 295
296 296 copied = dict.fromkeys(copy.values())
297 297
298 298 # Compare manifests
299 299 for f, n in m1.iteritems():
300 300 if partial and not partial(f):
301 301 continue
302 302 if f in m2:
303 303 # are files different?
304 304 if n != m2[f]:
305 305 a = ma.get(f, nullid)
306 306 # are both different from the ancestor?
307 307 if not overwrite and n != a and m2[f] != a:
308 308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 309 # are we clobbering?
310 310 # is remote's version newer?
311 311 # or are we going back in time and clean?
312 312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 313 act("remote is newer", "g", f, m2.flags(f))
314 314 # local is newer, not overwrite, check mode bits
315 315 elif fmerge(f) != m1.flags(f):
316 316 act("update permissions", "e", f, m2.flags(f))
317 317 # contents same, check mode bits
318 318 elif m1.flags(f) != m2.flags(f):
319 319 if overwrite or fmerge(f) != m1.flags(f):
320 320 act("update permissions", "e", f, m2.flags(f))
321 321 elif f in copied:
322 322 continue
323 323 elif f in copy:
324 324 f2 = copy[f]
325 325 if f2 not in m2: # directory rename
326 326 act("remote renamed directory to " + f2, "d",
327 327 f, None, f2, m1.flags(f))
328 328 elif f2 in m1: # case 2 A,B/B/B
329 329 act("local copied to " + f2, "m",
330 330 f, f2, f, fmerge(f, f2, f2), False)
331 331 else: # case 4,21 A/B/B
332 332 act("local moved to " + f2, "m",
333 333 f, f2, f, fmerge(f, f2, f2), False)
334 334 elif f in ma:
335 335 if n != ma[f] and not overwrite:
336 336 if repo.ui.prompt(
337 337 (_(" local changed %s which remote deleted\n") % f) +
338 338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 339 act("prompt delete", "r", f)
340 340 else:
341 341 act("other deleted", "r", f)
342 342 else:
343 343 # file is created on branch or in working directory
344 344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 345 act("remote deleted", "r", f)
346 346
347 347 for f, n in m2.iteritems():
348 348 if partial and not partial(f):
349 349 continue
350 350 if f in m1:
351 351 continue
352 352 if f in copied:
353 353 continue
354 354 if f in copy:
355 355 f2 = copy[f]
356 356 if f2 not in m1: # directory rename
357 357 act("local renamed directory to " + f2, "d",
358 358 None, f, f2, m2.flags(f))
359 359 elif f2 in m2: # rename case 1, A/A,B/A
360 360 act("remote copied to " + f, "m",
361 361 f2, f, f, fmerge(f2, f, f2), False)
362 362 else: # case 3,20 A/B/A
363 363 act("remote moved to " + f, "m",
364 364 f2, f, f, fmerge(f2, f, f2), True)
365 365 elif f in ma:
366 366 if overwrite or backwards:
367 367 act("recreating", "g", f, m2.flags(f))
368 368 elif n != ma[f]:
369 369 if repo.ui.prompt(
370 370 (_("remote changed %s which local deleted\n") % f) +
371 371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 372 act("prompt recreating", "g", f, m2.flags(f))
373 373 else:
374 374 act("remote created", "g", f, m2.flags(f))
375 375
376 376 return action
377 377
378 378 def applyupdates(repo, action, wctx, mctx):
379 379 "apply the merge action list to the working directory"
380 380
381 381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 382 action.sort()
383 383 for a in action:
384 384 f, m = a[:2]
385 385 if f and f[0] == "/":
386 386 continue
387 387 if m == "r": # remove
388 388 repo.ui.note(_("removing %s\n") % f)
389 389 util.audit_path(f)
390 390 try:
391 391 util.unlink(repo.wjoin(f))
392 392 except OSError, inst:
393 393 if inst.errno != errno.ENOENT:
394 394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 395 (f, inst.strerror))
396 396 removed += 1
397 397 elif m == "m": # merge
398 398 f2, fd, flags, move = a[2:]
399 399 r = filemerge(repo, f, f2, wctx, mctx)
400 400 if r > 0:
401 401 unresolved += 1
402 402 else:
403 403 if r is None:
404 404 updated += 1
405 405 else:
406 406 merged += 1
407 407 if f != fd:
408 408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 409 repo.wwrite(fd, repo.wread(f), flags)
410 410 if move:
411 411 repo.ui.debug(_("removing %s\n") % f)
412 412 os.unlink(repo.wjoin(f))
413 413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 414 elif m == "g": # get
415 415 flags = a[2]
416 416 repo.ui.note(_("getting %s\n") % f)
417 417 t = mctx.filectx(f).data()
418 418 repo.wwrite(f, t, flags)
419 419 updated += 1
420 420 elif m == "d": # directory rename
421 421 f2, fd, flags = a[2:]
422 422 if f:
423 423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 424 t = wctx.filectx(f).data()
425 425 repo.wwrite(fd, t, flags)
426 426 util.unlink(repo.wjoin(f))
427 427 if f2:
428 428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 429 t = mctx.filectx(f2).data()
430 430 repo.wwrite(fd, t, flags)
431 431 updated += 1
432 432 elif m == "dr": # divergent renames
433 433 fl = a[2]
434 434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 435 for nf in fl:
436 436 repo.ui.warn(" %s\n" % nf)
437 437 elif m == "e": # exec
438 438 flags = a[2]
439 439 util.set_exec(repo.wjoin(f), flags)
440 440
441 441 return updated, merged, removed, unresolved
442 442
443 443 def recordupdates(repo, action, branchmerge):
444 444 "record merge actions to the dirstate"
445 445
446 446 for a in action:
447 447 f, m = a[:2]
448 448 if m == "r": # remove
449 449 if branchmerge:
450 450 repo.dirstate.remove(f)
451 451 else:
452 452 repo.dirstate.forget(f)
453 453 elif m == "f": # forget
454 454 repo.dirstate.forget(f)
455 455 elif m == "g": # get
456 456 if branchmerge:
457 457 repo.dirstate.normaldirty(f)
458 458 else:
459 459 repo.dirstate.normal(f)
460 460 elif m == "m": # merge
461 461 f2, fd, flag, move = a[2:]
462 462 if branchmerge:
463 463 # We've done a branch merge, mark this file as merged
464 464 # so that we properly record the merger later
465 465 repo.dirstate.merge(fd)
466 466 if f != f2: # copy/rename
467 467 if move:
468 468 repo.dirstate.remove(f)
469 469 if f != fd:
470 470 repo.dirstate.copy(f, fd)
471 471 else:
472 472 repo.dirstate.copy(f2, fd)
473 473 else:
474 474 # We've update-merged a locally modified file, so
475 475 # we set the dirstate to emulate a normal checkout
476 476 # of that file some time in the past. Thus our
477 477 # merge will appear as a normal local file
478 478 # modification.
479 479 repo.dirstate.normaldirty(fd)
480 480 if move:
481 481 repo.dirstate.forget(f)
482 482 elif m == "d": # directory rename
483 483 f2, fd, flag = a[2:]
484 484 if not f2 and f not in repo.dirstate:
485 485 # untracked file moved
486 486 continue
487 487 if branchmerge:
488 488 repo.dirstate.add(fd)
489 489 if f:
490 490 repo.dirstate.remove(f)
491 491 repo.dirstate.copy(f, fd)
492 492 if f2:
493 493 repo.dirstate.copy(f2, fd)
494 494 else:
495 495 repo.dirstate.normal(fd)
496 496 if f:
497 497 repo.dirstate.forget(f)
498 498
499 def update(repo, node, branchmerge, force, partial, wlock):
499 def update(repo, node, branchmerge, force, partial):
500 500 """
501 501 Perform a merge between the working directory and the given node
502 502
503 503 branchmerge = whether to merge between branches
504 504 force = whether to force branch merging or file overwriting
505 505 partial = a function to filter file lists (dirstate not updated)
506 wlock = working dir lock, if already held
507 506 """
508 507
508 wlock = repo.wlock()
509 509 try:
510 if not wlock:
511 wlock = repo.wlock()
512
513 510 wc = repo.workingctx()
514 511 if node is None:
515 512 # tip of current branch
516 513 try:
517 514 node = repo.branchtags()[wc.branch()]
518 515 except KeyError:
519 516 raise util.Abort(_("branch %s not found") % wc.branch())
520 517 overwrite = force and not branchmerge
521 518 forcemerge = force and branchmerge
522 519 pl = wc.parents()
523 520 p1, p2 = pl[0], repo.changectx(node)
524 521 pa = p1.ancestor(p2)
525 522 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
526 523 fastforward = False
527 524
528 525 ### check phase
529 526 if not overwrite and len(pl) > 1:
530 527 raise util.Abort(_("outstanding uncommitted merges"))
531 528 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
532 529 if branchmerge:
533 530 if p1.branch() != p2.branch() and pa != p2:
534 531 fastforward = True
535 532 else:
536 533 raise util.Abort(_("there is nothing to merge, just use "
537 534 "'hg update' or look at 'hg heads'"))
538 535 elif not (overwrite or branchmerge):
539 536 raise util.Abort(_("update spans branches, use 'hg merge' "
540 537 "or 'hg update -C' to lose changes"))
541 538 if branchmerge and not forcemerge:
542 539 if wc.files():
543 540 raise util.Abort(_("outstanding uncommitted changes"))
544 541
545 542 ### calculate phase
546 543 action = []
547 544 if not force:
548 545 checkunknown(wc, p2)
549 546 if not util.checkfolding(repo.path):
550 547 checkcollision(p2)
551 548 if not branchmerge:
552 549 action += forgetremoved(wc, p2)
553 550 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
554 551
555 552 ### apply phase
556 553 if not branchmerge: # just jump to the new rev
557 554 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
558 555 if not partial:
559 556 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
560 557
561 558 stats = applyupdates(repo, action, wc, p2)
562 559
563 560 if not partial:
564 561 recordupdates(repo, action, branchmerge)
565 562 repo.dirstate.setparents(fp1, fp2)
566 563 if not branchmerge and not fastforward:
567 564 repo.dirstate.setbranch(p2.branch())
568 565 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
569 566
570 567 return stats
571 568 finally:
572 569 del wlock
@@ -1,1319 +1,1319 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import *
11 11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 12 import cStringIO, email.Parser, os, popen2, re, sha
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 # should try to parse msg['Date']
63 63 date = None
64 64 nodeid = None
65 65 branch = None
66 66 parents = []
67 67
68 68 if subject:
69 69 if subject.startswith('[PATCH'):
70 70 pend = subject.find(']')
71 71 if pend >= 0:
72 72 subject = subject[pend+1:].lstrip()
73 73 subject = subject.replace('\n\t', ' ')
74 74 ui.debug('Subject: %s\n' % subject)
75 75 if user:
76 76 ui.debug('From: %s\n' % user)
77 77 diffs_seen = 0
78 78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 79 message = ''
80 80 for part in msg.walk():
81 81 content_type = part.get_content_type()
82 82 ui.debug('Content-Type: %s\n' % content_type)
83 83 if content_type not in ok_types:
84 84 continue
85 85 payload = part.get_payload(decode=True)
86 86 m = diffre.search(payload)
87 87 if m:
88 88 hgpatch = False
89 89 ignoretext = False
90 90
91 91 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 92 diffs_seen += 1
93 93 cfp = cStringIO.StringIO()
94 94 for line in payload[:m.start(0)].splitlines():
95 95 if line.startswith('# HG changeset patch'):
96 96 ui.debug(_('patch generated by hg export\n'))
97 97 hgpatch = True
98 98 # drop earlier commit message content
99 99 cfp.seek(0)
100 100 cfp.truncate()
101 101 subject = None
102 102 elif hgpatch:
103 103 if line.startswith('# User '):
104 104 user = line[7:]
105 105 ui.debug('From: %s\n' % user)
106 106 elif line.startswith("# Date "):
107 107 date = line[7:]
108 108 elif line.startswith("# Branch "):
109 109 branch = line[9:]
110 110 elif line.startswith("# Node ID "):
111 111 nodeid = line[10:]
112 112 elif line.startswith("# Parent "):
113 113 parents.append(line[10:])
114 114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
115 115 ignoretext = True
116 116 if not line.startswith('# ') and not ignoretext:
117 117 cfp.write(line)
118 118 cfp.write('\n')
119 119 message = cfp.getvalue()
120 120 if tmpfp:
121 121 tmpfp.write(payload)
122 122 if not payload.endswith('\n'):
123 123 tmpfp.write('\n')
124 124 elif not diffs_seen and message and content_type == 'text/plain':
125 125 message += '\n' + payload
126 126 except:
127 127 tmpfp.close()
128 128 os.unlink(tmpname)
129 129 raise
130 130
131 131 if subject and not message.startswith(subject):
132 132 message = '%s\n%s' % (subject, message)
133 133 tmpfp.close()
134 134 if not diffs_seen:
135 135 os.unlink(tmpname)
136 136 return None, message, user, date, branch, None, None, None
137 137 p1 = parents and parents.pop(0) or None
138 138 p2 = parents and parents.pop(0) or None
139 139 return tmpname, message, user, date, branch, nodeid, p1, p2
140 140
141 141 GP_PATCH = 1 << 0 # we have to run patch
142 142 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 143 GP_BINARY = 1 << 2 # there's a binary patch
144 144
145 145 def readgitpatch(fp, firstline):
146 146 """extract git-style metadata about patches from <patchname>"""
147 147 class gitpatch:
148 148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 149 def __init__(self, path):
150 150 self.path = path
151 151 self.oldpath = None
152 152 self.mode = None
153 153 self.op = 'MODIFY'
154 154 self.copymod = False
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 yield firstline
160 160 for line in fp:
161 161 yield line
162 162
163 163 # Filter patch for git information
164 164 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 165 gp = None
166 166 gitpatches = []
167 167 # Can have a git patch with only metadata, causing patch to complain
168 168 dopatch = 0
169 169
170 170 lineno = 0
171 171 for line in reader(fp, firstline):
172 172 lineno += 1
173 173 if line.startswith('diff --git'):
174 174 m = gitre.match(line)
175 175 if m:
176 176 if gp:
177 177 gitpatches.append(gp)
178 178 src, dst = m.group(1, 2)
179 179 gp = gitpatch(dst)
180 180 gp.lineno = lineno
181 181 elif gp:
182 182 if line.startswith('--- '):
183 183 if gp.op in ('COPY', 'RENAME'):
184 184 gp.copymod = True
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-3:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-3:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchname, ui, strip, cwd, files):
282 282 """use builtin patch to apply <patchname> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 284 fp = file(patchname)
285 285 if cwd:
286 286 curdir = os.getcwd()
287 287 os.chdir(cwd)
288 288 try:
289 289 ret = applydiff(ui, fp, files, strip=strip)
290 290 finally:
291 291 if cwd:
292 292 os.chdir(curdir)
293 293 if ret < 0:
294 294 raise PatchError
295 295 return ret > 0
296 296
297 297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
298 298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
299 299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
300 300
301 301 class patchfile:
302 302 def __init__(self, ui, fname):
303 303 self.fname = fname
304 304 self.ui = ui
305 305 try:
306 306 fp = file(fname, 'r')
307 307 self.lines = fp.readlines()
308 308 self.exists = True
309 309 except IOError:
310 310 dirname = os.path.dirname(fname)
311 311 if dirname and not os.path.isdir(dirname):
312 312 dirs = dirname.split(os.path.sep)
313 313 d = ""
314 314 for x in dirs:
315 315 d = os.path.join(d, x)
316 316 if not os.path.isdir(d):
317 317 os.mkdir(d)
318 318 self.lines = []
319 319 self.exists = False
320 320
321 321 self.hash = {}
322 322 self.dirty = 0
323 323 self.offset = 0
324 324 self.rej = []
325 325 self.fileprinted = False
326 326 self.printfile(False)
327 327 self.hunks = 0
328 328
329 329 def printfile(self, warn):
330 330 if self.fileprinted:
331 331 return
332 332 if warn or self.ui.verbose:
333 333 self.fileprinted = True
334 334 s = _("patching file %s\n") % self.fname
335 335 if warn:
336 336 self.ui.warn(s)
337 337 else:
338 338 self.ui.note(s)
339 339
340 340
341 341 def findlines(self, l, linenum):
342 342 # looks through the hash and finds candidate lines. The
343 343 # result is a list of line numbers sorted based on distance
344 344 # from linenum
345 345 def sorter(a, b):
346 346 vala = abs(a - linenum)
347 347 valb = abs(b - linenum)
348 348 return cmp(vala, valb)
349 349
350 350 try:
351 351 cand = self.hash[l]
352 352 except:
353 353 return []
354 354
355 355 if len(cand) > 1:
356 356 # resort our list of potentials forward then back.
357 357 cand.sort(cmp=sorter)
358 358 return cand
359 359
360 360 def hashlines(self):
361 361 self.hash = {}
362 362 for x in xrange(len(self.lines)):
363 363 s = self.lines[x]
364 364 self.hash.setdefault(s, []).append(x)
365 365
366 366 def write_rej(self):
367 367 # our rejects are a little different from patch(1). This always
368 368 # creates rejects in the same form as the original patch. A file
369 369 # header is inserted so that you can run the reject through patch again
370 370 # without having to type the filename.
371 371
372 372 if not self.rej:
373 373 return
374 374 if self.hunks != 1:
375 375 hunkstr = "s"
376 376 else:
377 377 hunkstr = ""
378 378
379 379 fname = self.fname + ".rej"
380 380 self.ui.warn(
381 381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
382 382 (len(self.rej), self.hunks, hunkstr, fname))
383 383 try: os.unlink(fname)
384 384 except:
385 385 pass
386 386 fp = file(fname, 'w')
387 387 base = os.path.basename(self.fname)
388 388 fp.write("--- %s\n+++ %s\n" % (base, base))
389 389 for x in self.rej:
390 390 for l in x.hunk:
391 391 fp.write(l)
392 392 if l[-1] != '\n':
393 393 fp.write("\n\ No newline at end of file\n")
394 394
395 395 def write(self, dest=None):
396 396 if self.dirty:
397 397 if not dest:
398 398 dest = self.fname
399 399 st = None
400 400 try:
401 401 st = os.lstat(dest)
402 402 if st.st_nlink > 1:
403 403 os.unlink(dest)
404 404 except: pass
405 405 fp = file(dest, 'w')
406 406 if st:
407 407 os.chmod(dest, st.st_mode)
408 408 fp.writelines(self.lines)
409 409 fp.close()
410 410
411 411 def close(self):
412 412 self.write()
413 413 self.write_rej()
414 414
415 415 def apply(self, h, reverse):
416 416 if not h.complete():
417 417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
418 418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
419 419 h.lenb))
420 420
421 421 self.hunks += 1
422 422 if reverse:
423 423 h.reverse()
424 424
425 425 if self.exists and h.createfile():
426 426 self.ui.warn(_("file %s already exists\n") % self.fname)
427 427 self.rej.append(h)
428 428 return -1
429 429
430 430 if isinstance(h, binhunk):
431 431 if h.rmfile():
432 432 os.unlink(self.fname)
433 433 else:
434 434 self.lines[:] = h.new()
435 435 self.offset += len(h.new())
436 436 self.dirty = 1
437 437 return 0
438 438
439 439 # fast case first, no offsets, no fuzz
440 440 old = h.old()
441 441 # patch starts counting at 1 unless we are adding the file
442 442 if h.starta == 0:
443 443 start = 0
444 444 else:
445 445 start = h.starta + self.offset - 1
446 446 orig_start = start
447 447 if diffhelpers.testhunk(old, self.lines, start) == 0:
448 448 if h.rmfile():
449 449 os.unlink(self.fname)
450 450 else:
451 451 self.lines[start : start + h.lena] = h.new()
452 452 self.offset += h.lenb - h.lena
453 453 self.dirty = 1
454 454 return 0
455 455
456 456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
457 457 self.hashlines()
458 458 if h.hunk[-1][0] != ' ':
459 459 # if the hunk tried to put something at the bottom of the file
460 460 # override the start line and use eof here
461 461 search_start = len(self.lines)
462 462 else:
463 463 search_start = orig_start
464 464
465 465 for fuzzlen in xrange(3):
466 466 for toponly in [ True, False ]:
467 467 old = h.old(fuzzlen, toponly)
468 468
469 469 cand = self.findlines(old[0][1:], search_start)
470 470 for l in cand:
471 471 if diffhelpers.testhunk(old, self.lines, l) == 0:
472 472 newlines = h.new(fuzzlen, toponly)
473 473 self.lines[l : l + len(old)] = newlines
474 474 self.offset += len(newlines) - len(old)
475 475 self.dirty = 1
476 476 if fuzzlen:
477 477 fuzzstr = "with fuzz %d " % fuzzlen
478 478 f = self.ui.warn
479 479 self.printfile(True)
480 480 else:
481 481 fuzzstr = ""
482 482 f = self.ui.note
483 483 offset = l - orig_start - fuzzlen
484 484 if offset == 1:
485 485 linestr = "line"
486 486 else:
487 487 linestr = "lines"
488 488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
489 489 (h.number, l+1, fuzzstr, offset, linestr))
490 490 return fuzzlen
491 491 self.printfile(True)
492 492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
493 493 self.rej.append(h)
494 494 return -1
495 495
496 496 class hunk:
497 497 def __init__(self, desc, num, lr, context):
498 498 self.number = num
499 499 self.desc = desc
500 500 self.hunk = [ desc ]
501 501 self.a = []
502 502 self.b = []
503 503 if context:
504 504 self.read_context_hunk(lr)
505 505 else:
506 506 self.read_unified_hunk(lr)
507 507
508 508 def read_unified_hunk(self, lr):
509 509 m = unidesc.match(self.desc)
510 510 if not m:
511 511 raise PatchError(_("bad hunk #%d") % self.number)
512 512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
513 513 if self.lena == None:
514 514 self.lena = 1
515 515 else:
516 516 self.lena = int(self.lena)
517 517 if self.lenb == None:
518 518 self.lenb = 1
519 519 else:
520 520 self.lenb = int(self.lenb)
521 521 self.starta = int(self.starta)
522 522 self.startb = int(self.startb)
523 523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
524 524 # if we hit eof before finishing out the hunk, the last line will
525 525 # be zero length. Lets try to fix it up.
526 526 while len(self.hunk[-1]) == 0:
527 527 del self.hunk[-1]
528 528 del self.a[-1]
529 529 del self.b[-1]
530 530 self.lena -= 1
531 531 self.lenb -= 1
532 532
533 533 def read_context_hunk(self, lr):
534 534 self.desc = lr.readline()
535 535 m = contextdesc.match(self.desc)
536 536 if not m:
537 537 raise PatchError(_("bad hunk #%d") % self.number)
538 538 foo, self.starta, foo2, aend, foo3 = m.groups()
539 539 self.starta = int(self.starta)
540 540 if aend == None:
541 541 aend = self.starta
542 542 self.lena = int(aend) - self.starta
543 543 if self.starta:
544 544 self.lena += 1
545 545 for x in xrange(self.lena):
546 546 l = lr.readline()
547 547 if l.startswith('---'):
548 548 lr.push(l)
549 549 break
550 550 s = l[2:]
551 551 if l.startswith('- ') or l.startswith('! '):
552 552 u = '-' + s
553 553 elif l.startswith(' '):
554 554 u = ' ' + s
555 555 else:
556 556 raise PatchError(_("bad hunk #%d old text line %d") %
557 557 (self.number, x))
558 558 self.a.append(u)
559 559 self.hunk.append(u)
560 560
561 561 l = lr.readline()
562 562 if l.startswith('\ '):
563 563 s = self.a[-1][:-1]
564 564 self.a[-1] = s
565 565 self.hunk[-1] = s
566 566 l = lr.readline()
567 567 m = contextdesc.match(l)
568 568 if not m:
569 569 raise PatchError(_("bad hunk #%d") % self.number)
570 570 foo, self.startb, foo2, bend, foo3 = m.groups()
571 571 self.startb = int(self.startb)
572 572 if bend == None:
573 573 bend = self.startb
574 574 self.lenb = int(bend) - self.startb
575 575 if self.startb:
576 576 self.lenb += 1
577 577 hunki = 1
578 578 for x in xrange(self.lenb):
579 579 l = lr.readline()
580 580 if l.startswith('\ '):
581 581 s = self.b[-1][:-1]
582 582 self.b[-1] = s
583 583 self.hunk[hunki-1] = s
584 584 continue
585 585 if not l:
586 586 lr.push(l)
587 587 break
588 588 s = l[2:]
589 589 if l.startswith('+ ') or l.startswith('! '):
590 590 u = '+' + s
591 591 elif l.startswith(' '):
592 592 u = ' ' + s
593 593 elif len(self.b) == 0:
594 594 # this can happen when the hunk does not add any lines
595 595 lr.push(l)
596 596 break
597 597 else:
598 598 raise PatchError(_("bad hunk #%d old text line %d") %
599 599 (self.number, x))
600 600 self.b.append(s)
601 601 while True:
602 602 if hunki >= len(self.hunk):
603 603 h = ""
604 604 else:
605 605 h = self.hunk[hunki]
606 606 hunki += 1
607 607 if h == u:
608 608 break
609 609 elif h.startswith('-'):
610 610 continue
611 611 else:
612 612 self.hunk.insert(hunki-1, u)
613 613 break
614 614
615 615 if not self.a:
616 616 # this happens when lines were only added to the hunk
617 617 for x in self.hunk:
618 618 if x.startswith('-') or x.startswith(' '):
619 619 self.a.append(x)
620 620 if not self.b:
621 621 # this happens when lines were only deleted from the hunk
622 622 for x in self.hunk:
623 623 if x.startswith('+') or x.startswith(' '):
624 624 self.b.append(x[1:])
625 625 # @@ -start,len +start,len @@
626 626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
627 627 self.startb, self.lenb)
628 628 self.hunk[0] = self.desc
629 629
630 630 def reverse(self):
631 631 origlena = self.lena
632 632 origstarta = self.starta
633 633 self.lena = self.lenb
634 634 self.starta = self.startb
635 635 self.lenb = origlena
636 636 self.startb = origstarta
637 637 self.a = []
638 638 self.b = []
639 639 # self.hunk[0] is the @@ description
640 640 for x in xrange(1, len(self.hunk)):
641 641 o = self.hunk[x]
642 642 if o.startswith('-'):
643 643 n = '+' + o[1:]
644 644 self.b.append(o[1:])
645 645 elif o.startswith('+'):
646 646 n = '-' + o[1:]
647 647 self.a.append(n)
648 648 else:
649 649 n = o
650 650 self.b.append(o[1:])
651 651 self.a.append(o)
652 652 self.hunk[x] = o
653 653
654 654 def fix_newline(self):
655 655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
656 656
657 657 def complete(self):
658 658 return len(self.a) == self.lena and len(self.b) == self.lenb
659 659
660 660 def createfile(self):
661 661 return self.starta == 0 and self.lena == 0
662 662
663 663 def rmfile(self):
664 664 return self.startb == 0 and self.lenb == 0
665 665
666 666 def fuzzit(self, l, fuzz, toponly):
667 667 # this removes context lines from the top and bottom of list 'l'. It
668 668 # checks the hunk to make sure only context lines are removed, and then
669 669 # returns a new shortened list of lines.
670 670 fuzz = min(fuzz, len(l)-1)
671 671 if fuzz:
672 672 top = 0
673 673 bot = 0
674 674 hlen = len(self.hunk)
675 675 for x in xrange(hlen-1):
676 676 # the hunk starts with the @@ line, so use x+1
677 677 if self.hunk[x+1][0] == ' ':
678 678 top += 1
679 679 else:
680 680 break
681 681 if not toponly:
682 682 for x in xrange(hlen-1):
683 683 if self.hunk[hlen-bot-1][0] == ' ':
684 684 bot += 1
685 685 else:
686 686 break
687 687
688 688 # top and bot now count context in the hunk
689 689 # adjust them if either one is short
690 690 context = max(top, bot, 3)
691 691 if bot < context:
692 692 bot = max(0, fuzz - (context - bot))
693 693 else:
694 694 bot = min(fuzz, bot)
695 695 if top < context:
696 696 top = max(0, fuzz - (context - top))
697 697 else:
698 698 top = min(fuzz, top)
699 699
700 700 return l[top:len(l)-bot]
701 701 return l
702 702
703 703 def old(self, fuzz=0, toponly=False):
704 704 return self.fuzzit(self.a, fuzz, toponly)
705 705
706 706 def newctrl(self):
707 707 res = []
708 708 for x in self.hunk:
709 709 c = x[0]
710 710 if c == ' ' or c == '+':
711 711 res.append(x)
712 712 return res
713 713
714 714 def new(self, fuzz=0, toponly=False):
715 715 return self.fuzzit(self.b, fuzz, toponly)
716 716
717 717 class binhunk:
718 718 'A binary patch file. Only understands literals so far.'
719 719 def __init__(self, gitpatch):
720 720 self.gitpatch = gitpatch
721 721 self.text = None
722 722 self.hunk = ['GIT binary patch\n']
723 723
724 724 def createfile(self):
725 725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
726 726
727 727 def rmfile(self):
728 728 return self.gitpatch.op == 'DELETE'
729 729
730 730 def complete(self):
731 731 return self.text is not None
732 732
733 733 def new(self):
734 734 return [self.text]
735 735
736 736 def extract(self, fp):
737 737 line = fp.readline()
738 738 self.hunk.append(line)
739 739 while line and not line.startswith('literal '):
740 740 line = fp.readline()
741 741 self.hunk.append(line)
742 742 if not line:
743 743 raise PatchError(_('could not extract binary patch'))
744 744 size = int(line[8:].rstrip())
745 745 dec = []
746 746 line = fp.readline()
747 747 self.hunk.append(line)
748 748 while len(line) > 1:
749 749 l = line[0]
750 750 if l <= 'Z' and l >= 'A':
751 751 l = ord(l) - ord('A') + 1
752 752 else:
753 753 l = ord(l) - ord('a') + 27
754 754 dec.append(base85.b85decode(line[1:-1])[:l])
755 755 line = fp.readline()
756 756 self.hunk.append(line)
757 757 text = zlib.decompress(''.join(dec))
758 758 if len(text) != size:
759 759 raise PatchError(_('binary patch is %d bytes, not %d') %
760 760 len(text), size)
761 761 self.text = text
762 762
763 763 def parsefilename(str):
764 764 # --- filename \t|space stuff
765 765 s = str[4:]
766 766 i = s.find('\t')
767 767 if i < 0:
768 768 i = s.find(' ')
769 769 if i < 0:
770 770 return s
771 771 return s[:i]
772 772
773 773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
774 774 def pathstrip(path, count=1):
775 775 pathlen = len(path)
776 776 i = 0
777 777 if count == 0:
778 778 return path.rstrip()
779 779 while count > 0:
780 780 i = path.find(os.sep, i)
781 781 if i == -1:
782 782 raise PatchError(_("unable to strip away %d dirs from %s") %
783 783 (count, path))
784 784 i += 1
785 785 # consume '//' in the path
786 786 while i < pathlen - 1 and path[i] == os.sep:
787 787 i += 1
788 788 count -= 1
789 789 return path[i:].rstrip()
790 790
791 791 nulla = afile_orig == "/dev/null"
792 792 nullb = bfile_orig == "/dev/null"
793 793 afile = pathstrip(afile_orig, strip)
794 794 gooda = os.path.exists(afile) and not nulla
795 795 bfile = pathstrip(bfile_orig, strip)
796 796 if afile == bfile:
797 797 goodb = gooda
798 798 else:
799 799 goodb = os.path.exists(bfile) and not nullb
800 800 createfunc = hunk.createfile
801 801 if reverse:
802 802 createfunc = hunk.rmfile
803 803 if not goodb and not gooda and not createfunc():
804 804 raise PatchError(_("unable to find %s or %s for patching") %
805 805 (afile, bfile))
806 806 if gooda and goodb:
807 807 fname = bfile
808 808 if afile in bfile:
809 809 fname = afile
810 810 elif gooda:
811 811 fname = afile
812 812 elif not nullb:
813 813 fname = bfile
814 814 if afile in bfile:
815 815 fname = afile
816 816 elif not nulla:
817 817 fname = afile
818 818 return fname
819 819
820 820 class linereader:
821 821 # simple class to allow pushing lines back into the input stream
822 822 def __init__(self, fp):
823 823 self.fp = fp
824 824 self.buf = []
825 825
826 826 def push(self, line):
827 827 self.buf.append(line)
828 828
829 829 def readline(self):
830 830 if self.buf:
831 831 l = self.buf[0]
832 832 del self.buf[0]
833 833 return l
834 834 return self.fp.readline()
835 835
836 836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
837 837 rejmerge=None, updatedir=None):
838 838 """reads a patch from fp and tries to apply it. The dict 'changed' is
839 839 filled in with all of the filenames changed by the patch. Returns 0
840 840 for a clean patch, -1 if any rejects were found and 1 if there was
841 841 any fuzz."""
842 842
843 843 def scangitpatch(fp, firstline, cwd=None):
844 844 '''git patches can modify a file, then copy that file to
845 845 a new file, but expect the source to be the unmodified form.
846 846 So we scan the patch looking for that case so we can do
847 847 the copies ahead of time.'''
848 848
849 849 pos = 0
850 850 try:
851 851 pos = fp.tell()
852 852 except IOError:
853 853 fp = cStringIO.StringIO(fp.read())
854 854
855 855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
856 856 for gp in gitpatches:
857 857 if gp.copymod:
858 858 copyfile(gp.oldpath, gp.path, basedir=cwd)
859 859
860 860 fp.seek(pos)
861 861
862 862 return fp, dopatch, gitpatches
863 863
864 864 current_hunk = None
865 865 current_file = None
866 866 afile = ""
867 867 bfile = ""
868 868 state = None
869 869 hunknum = 0
870 870 rejects = 0
871 871
872 872 git = False
873 873 gitre = re.compile('diff --git (a/.*) (b/.*)')
874 874
875 875 # our states
876 876 BFILE = 1
877 877 err = 0
878 878 context = None
879 879 lr = linereader(fp)
880 880 dopatch = True
881 881 gitworkdone = False
882 882
883 883 while True:
884 884 newfile = False
885 885 x = lr.readline()
886 886 if not x:
887 887 break
888 888 if current_hunk:
889 889 if x.startswith('\ '):
890 890 current_hunk.fix_newline()
891 891 ret = current_file.apply(current_hunk, reverse)
892 892 if ret >= 0:
893 893 changed.setdefault(current_file.fname, (None, None))
894 894 if ret > 0:
895 895 err = 1
896 896 current_hunk = None
897 897 gitworkdone = False
898 898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
899 899 ((context or context == None) and x.startswith('***************')))):
900 900 try:
901 901 if context == None and x.startswith('***************'):
902 902 context = True
903 903 current_hunk = hunk(x, hunknum + 1, lr, context)
904 904 except PatchError, err:
905 905 ui.debug(err)
906 906 current_hunk = None
907 907 continue
908 908 hunknum += 1
909 909 if not current_file:
910 910 if sourcefile:
911 911 current_file = patchfile(ui, sourcefile)
912 912 else:
913 913 current_file = selectfile(afile, bfile, current_hunk,
914 914 strip, reverse)
915 915 current_file = patchfile(ui, current_file)
916 916 elif state == BFILE and x.startswith('GIT binary patch'):
917 917 current_hunk = binhunk(changed[bfile[2:]][1])
918 918 if not current_file:
919 919 if sourcefile:
920 920 current_file = patchfile(ui, sourcefile)
921 921 else:
922 922 current_file = selectfile(afile, bfile, current_hunk,
923 923 strip, reverse)
924 924 current_file = patchfile(ui, current_file)
925 925 hunknum += 1
926 926 current_hunk.extract(fp)
927 927 elif x.startswith('diff --git'):
928 928 # check for git diff, scanning the whole patch file if needed
929 929 m = gitre.match(x)
930 930 if m:
931 931 afile, bfile = m.group(1, 2)
932 932 if not git:
933 933 git = True
934 934 fp, dopatch, gitpatches = scangitpatch(fp, x)
935 935 for gp in gitpatches:
936 936 changed[gp.path] = (gp.op, gp)
937 937 # else error?
938 938 # copy/rename + modify should modify target, not source
939 939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
940 940 'RENAME'):
941 941 afile = bfile
942 942 gitworkdone = True
943 943 newfile = True
944 944 elif x.startswith('---'):
945 945 # check for a unified diff
946 946 l2 = lr.readline()
947 947 if not l2.startswith('+++'):
948 948 lr.push(l2)
949 949 continue
950 950 newfile = True
951 951 context = False
952 952 afile = parsefilename(x)
953 953 bfile = parsefilename(l2)
954 954 elif x.startswith('***'):
955 955 # check for a context diff
956 956 l2 = lr.readline()
957 957 if not l2.startswith('---'):
958 958 lr.push(l2)
959 959 continue
960 960 l3 = lr.readline()
961 961 lr.push(l3)
962 962 if not l3.startswith("***************"):
963 963 lr.push(l2)
964 964 continue
965 965 newfile = True
966 966 context = True
967 967 afile = parsefilename(x)
968 968 bfile = parsefilename(l2)
969 969
970 970 if newfile:
971 971 if current_file:
972 972 current_file.close()
973 973 if rejmerge:
974 974 rejmerge(current_file)
975 975 rejects += len(current_file.rej)
976 976 state = BFILE
977 977 current_file = None
978 978 hunknum = 0
979 979 if current_hunk:
980 980 if current_hunk.complete():
981 981 ret = current_file.apply(current_hunk, reverse)
982 982 if ret >= 0:
983 983 changed.setdefault(current_file.fname, (None, None))
984 984 if ret > 0:
985 985 err = 1
986 986 else:
987 987 fname = current_file and current_file.fname or None
988 988 raise PatchError(_("malformed patch %s %s") % (fname,
989 989 current_hunk.desc))
990 990 if current_file:
991 991 current_file.close()
992 992 if rejmerge:
993 993 rejmerge(current_file)
994 994 rejects += len(current_file.rej)
995 995 if updatedir and git:
996 996 updatedir(gitpatches)
997 997 if rejects:
998 998 return -1
999 999 if hunknum == 0 and dopatch and not gitworkdone:
1000 1000 raise NoHunks
1001 1001 return err
1002 1002
1003 1003 def diffopts(ui, opts={}, untrusted=False):
1004 1004 def get(key, name=None):
1005 1005 return (opts.get(key) or
1006 1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1007 1007 return mdiff.diffopts(
1008 1008 text=opts.get('text'),
1009 1009 git=get('git'),
1010 1010 nodates=get('nodates'),
1011 1011 showfunc=get('show_function', 'showfunc'),
1012 1012 ignorews=get('ignore_all_space', 'ignorews'),
1013 1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1014 1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1015 1015
1016 def updatedir(ui, repo, patches, wlock=None):
1016 def updatedir(ui, repo, patches):
1017 1017 '''Update dirstate after patch application according to metadata'''
1018 1018 if not patches:
1019 1019 return
1020 1020 copies = []
1021 1021 removes = {}
1022 1022 cfiles = patches.keys()
1023 1023 cwd = repo.getcwd()
1024 1024 if cwd:
1025 1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1026 1026 for f in patches:
1027 1027 ctype, gp = patches[f]
1028 1028 if ctype == 'RENAME':
1029 1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1030 1030 removes[gp.oldpath] = 1
1031 1031 elif ctype == 'COPY':
1032 1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1033 1033 elif ctype == 'DELETE':
1034 1034 removes[gp.path] = 1
1035 1035 for src, dst, after in copies:
1036 1036 if not after:
1037 1037 copyfile(src, dst, repo.root)
1038 repo.copy(src, dst, wlock=wlock)
1038 repo.copy(src, dst)
1039 1039 removes = removes.keys()
1040 1040 if removes:
1041 1041 removes.sort()
1042 repo.remove(removes, True, wlock=wlock)
1042 repo.remove(removes, True)
1043 1043 for f in patches:
1044 1044 ctype, gp = patches[f]
1045 1045 if gp and gp.mode:
1046 1046 x = gp.mode & 0100 != 0
1047 1047 dst = os.path.join(repo.root, gp.path)
1048 1048 # patch won't create empty files
1049 1049 if ctype == 'ADD' and not os.path.exists(dst):
1050 1050 repo.wwrite(gp.path, '', x and 'x' or '')
1051 1051 else:
1052 1052 util.set_exec(dst, x)
1053 cmdutil.addremove(repo, cfiles, wlock=wlock)
1053 cmdutil.addremove(repo, cfiles)
1054 1054 files = patches.keys()
1055 1055 files.extend([r for r in removes if r not in files])
1056 1056 files.sort()
1057 1057
1058 1058 return files
1059 1059
1060 1060 def b85diff(fp, to, tn):
1061 1061 '''print base85-encoded binary diff'''
1062 1062 def gitindex(text):
1063 1063 if not text:
1064 1064 return '0' * 40
1065 1065 l = len(text)
1066 1066 s = sha.new('blob %d\0' % l)
1067 1067 s.update(text)
1068 1068 return s.hexdigest()
1069 1069
1070 1070 def fmtline(line):
1071 1071 l = len(line)
1072 1072 if l <= 26:
1073 1073 l = chr(ord('A') + l - 1)
1074 1074 else:
1075 1075 l = chr(l - 26 + ord('a') - 1)
1076 1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1077 1077
1078 1078 def chunk(text, csize=52):
1079 1079 l = len(text)
1080 1080 i = 0
1081 1081 while i < l:
1082 1082 yield text[i:i+csize]
1083 1083 i += csize
1084 1084
1085 1085 tohash = gitindex(to)
1086 1086 tnhash = gitindex(tn)
1087 1087 if tohash == tnhash:
1088 1088 return ""
1089 1089
1090 1090 # TODO: deltas
1091 1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1092 1092 (tohash, tnhash, len(tn))]
1093 1093 for l in chunk(zlib.compress(tn)):
1094 1094 ret.append(fmtline(l))
1095 1095 ret.append('\n')
1096 1096 return ''.join(ret)
1097 1097
1098 1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1099 1099 fp=None, changes=None, opts=None):
1100 1100 '''print diff of changes to files between two nodes, or node and
1101 1101 working directory.
1102 1102
1103 1103 if node1 is None, use first dirstate parent instead.
1104 1104 if node2 is None, compare node1 with working directory.'''
1105 1105
1106 1106 if opts is None:
1107 1107 opts = mdiff.defaultopts
1108 1108 if fp is None:
1109 1109 fp = repo.ui
1110 1110
1111 1111 if not node1:
1112 1112 node1 = repo.dirstate.parents()[0]
1113 1113
1114 1114 ccache = {}
1115 1115 def getctx(r):
1116 1116 if r not in ccache:
1117 1117 ccache[r] = context.changectx(repo, r)
1118 1118 return ccache[r]
1119 1119
1120 1120 flcache = {}
1121 1121 def getfilectx(f, ctx):
1122 1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1123 1123 if f not in flcache:
1124 1124 flcache[f] = flctx._filelog
1125 1125 return flctx
1126 1126
1127 1127 # reading the data for node1 early allows it to play nicely
1128 1128 # with repo.status and the revlog cache.
1129 1129 ctx1 = context.changectx(repo, node1)
1130 1130 # force manifest reading
1131 1131 man1 = ctx1.manifest()
1132 1132 date1 = util.datestr(ctx1.date())
1133 1133
1134 1134 if not changes:
1135 1135 changes = repo.status(node1, node2, files, match=match)[:5]
1136 1136 modified, added, removed, deleted, unknown = changes
1137 1137
1138 1138 if not modified and not added and not removed:
1139 1139 return
1140 1140
1141 1141 if node2:
1142 1142 ctx2 = context.changectx(repo, node2)
1143 1143 execf2 = ctx2.manifest().execf
1144 1144 else:
1145 1145 ctx2 = context.workingctx(repo)
1146 1146 execf2 = util.execfunc(repo.root, None)
1147 1147 if execf2 is None:
1148 1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1149 1149
1150 1150 # returns False if there was no rename between ctx1 and ctx2
1151 1151 # returns None if the file was created between ctx1 and ctx2
1152 1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1153 1153 def renamed(f):
1154 1154 startrev = ctx1.rev()
1155 1155 c = ctx2
1156 1156 crev = c.rev()
1157 1157 if crev is None:
1158 1158 crev = repo.changelog.count()
1159 1159 orig = f
1160 1160 while crev > startrev:
1161 1161 if f in c.files():
1162 1162 try:
1163 1163 src = getfilectx(f, c).renamed()
1164 1164 except revlog.LookupError:
1165 1165 return None
1166 1166 if src:
1167 1167 f = src[0]
1168 1168 crev = c.parents()[0].rev()
1169 1169 # try to reuse
1170 1170 c = getctx(crev)
1171 1171 if f not in man1:
1172 1172 return None
1173 1173 if f == orig:
1174 1174 return False
1175 1175 return f
1176 1176
1177 1177 if repo.ui.quiet:
1178 1178 r = None
1179 1179 else:
1180 1180 hexfunc = repo.ui.debugflag and hex or short
1181 1181 r = [hexfunc(node) for node in [node1, node2] if node]
1182 1182
1183 1183 if opts.git:
1184 1184 copied = {}
1185 1185 for f in added:
1186 1186 src = renamed(f)
1187 1187 if src:
1188 1188 copied[f] = src
1189 1189 srcs = [x[1] for x in copied.items()]
1190 1190
1191 1191 all = modified + added + removed
1192 1192 all.sort()
1193 1193 gone = {}
1194 1194
1195 1195 for f in all:
1196 1196 to = None
1197 1197 tn = None
1198 1198 dodiff = True
1199 1199 header = []
1200 1200 if f in man1:
1201 1201 to = getfilectx(f, ctx1).data()
1202 1202 if f not in removed:
1203 1203 tn = getfilectx(f, ctx2).data()
1204 1204 if opts.git:
1205 1205 def gitmode(x):
1206 1206 return x and '100755' or '100644'
1207 1207 def addmodehdr(header, omode, nmode):
1208 1208 if omode != nmode:
1209 1209 header.append('old mode %s\n' % omode)
1210 1210 header.append('new mode %s\n' % nmode)
1211 1211
1212 1212 a, b = f, f
1213 1213 if f in added:
1214 1214 mode = gitmode(execf2(f))
1215 1215 if f in copied:
1216 1216 a = copied[f]
1217 1217 omode = gitmode(man1.execf(a))
1218 1218 addmodehdr(header, omode, mode)
1219 1219 if a in removed and a not in gone:
1220 1220 op = 'rename'
1221 1221 gone[a] = 1
1222 1222 else:
1223 1223 op = 'copy'
1224 1224 header.append('%s from %s\n' % (op, a))
1225 1225 header.append('%s to %s\n' % (op, f))
1226 1226 to = getfilectx(a, ctx1).data()
1227 1227 else:
1228 1228 header.append('new file mode %s\n' % mode)
1229 1229 if util.binary(tn):
1230 1230 dodiff = 'binary'
1231 1231 elif f in removed:
1232 1232 if f in srcs:
1233 1233 dodiff = False
1234 1234 else:
1235 1235 mode = gitmode(man1.execf(f))
1236 1236 header.append('deleted file mode %s\n' % mode)
1237 1237 else:
1238 1238 omode = gitmode(man1.execf(f))
1239 1239 nmode = gitmode(execf2(f))
1240 1240 addmodehdr(header, omode, nmode)
1241 1241 if util.binary(to) or util.binary(tn):
1242 1242 dodiff = 'binary'
1243 1243 r = None
1244 1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1245 1245 if dodiff:
1246 1246 if dodiff == 'binary':
1247 1247 text = b85diff(fp, to, tn)
1248 1248 else:
1249 1249 text = mdiff.unidiff(to, date1,
1250 1250 # ctx2 date may be dynamic
1251 1251 tn, util.datestr(ctx2.date()),
1252 1252 f, r, opts=opts)
1253 1253 if text or len(header) > 1:
1254 1254 fp.write(''.join(header))
1255 1255 fp.write(text)
1256 1256
1257 1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1258 1258 opts=None):
1259 1259 '''export changesets as hg patches.'''
1260 1260
1261 1261 total = len(revs)
1262 1262 revwidth = max([len(str(rev)) for rev in revs])
1263 1263
1264 1264 def single(rev, seqno, fp):
1265 1265 ctx = repo.changectx(rev)
1266 1266 node = ctx.node()
1267 1267 parents = [p.node() for p in ctx.parents() if p]
1268 1268 branch = ctx.branch()
1269 1269 if switch_parent:
1270 1270 parents.reverse()
1271 1271 prev = (parents and parents[0]) or nullid
1272 1272
1273 1273 if not fp:
1274 1274 fp = cmdutil.make_file(repo, template, node, total=total,
1275 1275 seqno=seqno, revwidth=revwidth)
1276 1276 if fp != sys.stdout and hasattr(fp, 'name'):
1277 1277 repo.ui.note("%s\n" % fp.name)
1278 1278
1279 1279 fp.write("# HG changeset patch\n")
1280 1280 fp.write("# User %s\n" % ctx.user())
1281 1281 fp.write("# Date %d %d\n" % ctx.date())
1282 1282 if branch and (branch != 'default'):
1283 1283 fp.write("# Branch %s\n" % branch)
1284 1284 fp.write("# Node ID %s\n" % hex(node))
1285 1285 fp.write("# Parent %s\n" % hex(prev))
1286 1286 if len(parents) > 1:
1287 1287 fp.write("# Parent %s\n" % hex(parents[1]))
1288 1288 fp.write(ctx.description().rstrip())
1289 1289 fp.write("\n\n")
1290 1290
1291 1291 diff(repo, prev, node, fp=fp, opts=opts)
1292 1292 if fp not in (sys.stdout, repo.ui):
1293 1293 fp.close()
1294 1294
1295 1295 for seqno, rev in enumerate(revs):
1296 1296 single(rev, seqno+1, fp)
1297 1297
1298 1298 def diffstat(patchlines):
1299 1299 if not util.find_exe('diffstat'):
1300 1300 return
1301 1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1302 1302 try:
1303 1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1304 1304 try:
1305 1305 for line in patchlines: print >> p.tochild, line
1306 1306 p.tochild.close()
1307 1307 if p.wait(): return
1308 1308 fp = os.fdopen(fd, 'r')
1309 1309 stat = []
1310 1310 for line in fp: stat.append(line.lstrip())
1311 1311 last = stat.pop()
1312 1312 stat.insert(0, last)
1313 1313 stat = ''.join(stat)
1314 1314 if stat.startswith('0 files'): raise ValueError
1315 1315 return stat
1316 1316 except: raise
1317 1317 finally:
1318 1318 try: os.unlink(name)
1319 1319 except: pass
General Comments 0
You need to be logged in to leave comments. Login now