##// END OF EJS Templates
Use try/finally pattern to cleanup locks and transactions
Matt Mackall -
r4915:97b734fb default
parent child Browse files
Show More
@@ -1,98 +1,98 b''
1 1 # fetch.py - pull and merge remote changes
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 from mercurial.node import *
10 10 from mercurial import commands, cmdutil, hg, node, util
11 11
12 12 def fetch(ui, repo, source='default', **opts):
13 13 '''Pull changes from a remote repository, merge new changes if needed.
14 14
15 15 This finds all changes from the repository at the specified path
16 16 or URL and adds them to the local repository.
17 17
18 18 If the pulled changes add a new head, the head is automatically
19 19 merged, and the result of the merge is committed. Otherwise, the
20 20 working directory is updated.'''
21 21
22 def postincoming(other, modheads):
22 def postincoming(other, modheads, lock, wlock):
23 23 if modheads == 0:
24 24 return 0
25 25 if modheads == 1:
26 26 return hg.clean(repo, repo.changelog.tip(), wlock=wlock)
27 27 newheads = repo.heads(parent)
28 28 newchildren = [n for n in repo.heads(parent) if n != parent]
29 29 newparent = parent
30 30 if newchildren:
31 31 newparent = newchildren[0]
32 32 hg.clean(repo, newparent, wlock=wlock)
33 33 newheads = [n for n in repo.heads() if n != newparent]
34 34 err = False
35 35 if newheads:
36 36 ui.status(_('merging with new head %d:%s\n') %
37 37 (repo.changelog.rev(newheads[0]), short(newheads[0])))
38 38 err = hg.merge(repo, newheads[0], remind=False, wlock=wlock)
39 39 if not err and len(newheads) > 1:
40 40 ui.status(_('not merging with %d other new heads '
41 41 '(use "hg heads" and "hg merge" to merge them)') %
42 42 (len(newheads) - 1))
43 43 if not err:
44 44 mod, add, rem = repo.status(wlock=wlock)[:3]
45 45 message = (cmdutil.logmessage(opts) or
46 46 (_('Automated merge with %s') % other.url()))
47 47 n = repo.commit(mod + add + rem, message,
48 48 opts['user'], opts['date'], lock=lock, wlock=wlock,
49 49 force_editor=opts.get('force_editor'))
50 50 ui.status(_('new changeset %d:%s merges remote changes '
51 51 'with local\n') % (repo.changelog.rev(n),
52 52 short(n)))
53 def pull():
53 def pull(lock, wlock):
54 54 cmdutil.setremoteconfig(ui, opts)
55 55
56 56 other = hg.repository(ui, ui.expandpath(source))
57 57 ui.status(_('pulling from %s\n') % ui.expandpath(source))
58 58 revs = None
59 59 if opts['rev'] and not other.local():
60 60 raise util.Abort(_("fetch -r doesn't work for remote repositories yet"))
61 61 elif opts['rev']:
62 62 revs = [other.lookup(rev) for rev in opts['rev']]
63 63 modheads = repo.pull(other, heads=revs, lock=lock)
64 return postincoming(other, modheads)
64 return postincoming(other, modheads, lock, wlock)
65 65
66 66 parent, p2 = repo.dirstate.parents()
67 67 if parent != repo.changelog.tip():
68 68 raise util.Abort(_('working dir not at tip '
69 69 '(use "hg update" to check out tip)'))
70 70 if p2 != nullid:
71 71 raise util.Abort(_('outstanding uncommitted merge'))
72 wlock = lock = None
73 try:
72 74 wlock = repo.wlock()
73 75 lock = repo.lock()
74 try:
75 76 mod, add, rem = repo.status(wlock=wlock)[:3]
76 77 if mod or add or rem:
77 78 raise util.Abort(_('outstanding uncommitted changes'))
78 79 if len(repo.heads()) > 1:
79 80 raise util.Abort(_('multiple heads in this repository '
80 81 '(use "hg heads" and "hg merge" to merge)'))
81 return pull()
82 return pull(lock, wlock)
82 83 finally:
83 lock.release()
84 wlock.release()
84 del lock, wlock
85 85
86 86 cmdtable = {
87 87 'fetch':
88 88 (fetch,
89 89 [('e', 'ssh', '', _('specify ssh command to use')),
90 90 ('m', 'message', '', _('use <text> as commit message')),
91 91 ('l', 'logfile', '', _('read the commit message from <file>')),
92 92 ('d', 'date', '', _('record datecode as commit date')),
93 93 ('u', 'user', '', _('record user as commiter')),
94 94 ('r', 'rev', [], _('a specific revision you would like to pull')),
95 95 ('f', 'force-editor', None, _('edit commit message')),
96 96 ('', 'remotecmd', '', _('hg command to run on the remote side'))],
97 97 _('hg fetch [SOURCE]')),
98 98 }
@@ -1,2235 +1,2262 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev, wlock=wlock)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head, wlock=wlock)
341 341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev, wlock=wlock)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(),
348 348 force=1, wlock=wlock)
349 349 if n == None:
350 350 raise util.Abort(_("repo commit failed"))
351 351 try:
352 352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 353 except:
354 354 raise util.Abort(_("unable to read %s") % patch)
355 355
356 356 patchf = self.opener(patch, "w")
357 357 if comments:
358 358 comments = "\n".join(comments) + '\n\n'
359 359 patchf.write(comments)
360 360 self.printdiff(repo, head, n, fp=patchf)
361 361 patchf.close()
362 362 self.removeundo(repo)
363 363 return (0, n)
364 364
365 365 def qparents(self, repo, rev=None):
366 366 if rev is None:
367 367 (p1, p2) = repo.dirstate.parents()
368 368 if p2 == revlog.nullid:
369 369 return p1
370 370 if len(self.applied) == 0:
371 371 return None
372 372 return revlog.bin(self.applied[-1].rev)
373 373 pp = repo.changelog.parents(rev)
374 374 if pp[1] != revlog.nullid:
375 375 arevs = [ x.rev for x in self.applied ]
376 376 p0 = revlog.hex(pp[0])
377 377 p1 = revlog.hex(pp[1])
378 378 if p0 in arevs:
379 379 return pp[0]
380 380 if p1 in arevs:
381 381 return pp[1]
382 382 return pp[0]
383 383
384 384 def mergepatch(self, repo, mergeq, series, wlock):
385 385 if len(self.applied) == 0:
386 386 # each of the patches merged in will have two parents. This
387 387 # can confuse the qrefresh, qdiff, and strip code because it
388 388 # needs to know which parent is actually in the patch queue.
389 389 # so, we insert a merge marker with only one parent. This way
390 390 # the first patch in the queue is never a merge patch
391 391 #
392 392 pname = ".hg.patches.merge.marker"
393 393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 394 wlock=wlock)
395 395 self.removeundo(repo)
396 396 self.applied.append(statusentry(revlog.hex(n), pname))
397 397 self.applied_dirty = 1
398 398
399 399 head = self.qparents(repo)
400 400
401 401 for patch in series:
402 402 patch = mergeq.lookup(patch, strict=True)
403 403 if not patch:
404 404 self.ui.warn("patch %s does not exist\n" % patch)
405 405 return (1, None)
406 406 pushable, reason = self.pushable(patch)
407 407 if not pushable:
408 408 self.explain_pushable(patch, all_patches=True)
409 409 continue
410 410 info = mergeq.isapplied(patch)
411 411 if not info:
412 412 self.ui.warn("patch %s is not applied\n" % patch)
413 413 return (1, None)
414 414 rev = revlog.bin(info[1])
415 415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 416 if head:
417 417 self.applied.append(statusentry(revlog.hex(head), patch))
418 418 self.applied_dirty = 1
419 419 if err:
420 420 return (err, head)
421 421 self.save_dirty()
422 422 return (0, head)
423 423
424 424 def patch(self, repo, patchfile):
425 425 '''Apply patchfile to the working directory.
426 426 patchfile: file name of patch'''
427 427 files = {}
428 428 try:
429 429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 430 files=files)
431 431 except Exception, inst:
432 432 self.ui.note(str(inst) + '\n')
433 433 if not self.ui.verbose:
434 434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 435 return (False, files, False)
436 436
437 437 return (True, files, fuzz)
438 438
439 439 def apply(self, repo, series, list=False, update_status=True,
440 440 strict=False, patchdir=None, merge=None, wlock=None,
441 441 all_files={}):
442 lock = tr = None
443 try:
442 444 if not wlock:
443 445 wlock = repo.wlock()
444 446 lock = repo.lock()
445 447 tr = repo.transaction()
446 448 try:
447 449 ret = self._apply(tr, repo, series, list, update_status,
448 450 strict, patchdir, merge, wlock,
449 451 lock=lock, all_files=all_files)
450 452 tr.close()
451 453 self.save_dirty()
452 454 return ret
453 455 except:
454 456 try:
455 457 tr.abort()
456 458 finally:
457 459 repo.invalidate()
458 460 repo.dirstate.invalidate()
459 461 raise
462 finally:
463 del lock, wlock, tr
460 464
461 465 def _apply(self, tr, repo, series, list=False, update_status=True,
462 466 strict=False, patchdir=None, merge=None, wlock=None,
463 467 lock=None, all_files={}):
464 468 # TODO unify with commands.py
465 469 if not patchdir:
466 470 patchdir = self.path
467 471 err = 0
468 472 n = None
469 473 for patchname in series:
470 474 pushable, reason = self.pushable(patchname)
471 475 if not pushable:
472 476 self.explain_pushable(patchname, all_patches=True)
473 477 continue
474 478 self.ui.warn("applying %s\n" % patchname)
475 479 pf = os.path.join(patchdir, patchname)
476 480
477 481 try:
478 482 message, comments, user, date, patchfound = self.readheaders(patchname)
479 483 except:
480 484 self.ui.warn("Unable to read %s\n" % patchname)
481 485 err = 1
482 486 break
483 487
484 488 if not message:
485 489 message = "imported patch %s\n" % patchname
486 490 else:
487 491 if list:
488 492 message.append("\nimported patch %s" % patchname)
489 493 message = '\n'.join(message)
490 494
491 495 (patcherr, files, fuzz) = self.patch(repo, pf)
492 496 all_files.update(files)
493 497 patcherr = not patcherr
494 498
495 499 if merge and files:
496 500 # Mark as removed/merged and update dirstate parent info
497 501 removed = []
498 502 merged = []
499 503 for f in files:
500 504 if os.path.exists(repo.wjoin(f)):
501 505 merged.append(f)
502 506 else:
503 507 removed.append(f)
504 508 for f in removed:
505 509 repo.dirstate.remove(f)
506 510 for f in merged:
507 511 repo.dirstate.merge(f)
508 512 p1, p2 = repo.dirstate.parents()
509 513 repo.dirstate.setparents(p1, merge)
510 514 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
511 515 n = repo.commit(files, message, user, date, force=1, lock=lock,
512 516 wlock=wlock)
513 517
514 518 if n == None:
515 519 raise util.Abort(_("repo commit failed"))
516 520
517 521 if update_status:
518 522 self.applied.append(statusentry(revlog.hex(n), patchname))
519 523
520 524 if patcherr:
521 525 if not patchfound:
522 526 self.ui.warn("patch %s is empty\n" % patchname)
523 527 err = 0
524 528 else:
525 529 self.ui.warn("patch failed, rejects left in working dir\n")
526 530 err = 1
527 531 break
528 532
529 533 if fuzz and strict:
530 534 self.ui.warn("fuzz found when applying patch, stopping\n")
531 535 err = 1
532 536 break
533 537 self.removeundo(repo)
534 538 return (err, n)
535 539
536 540 def delete(self, repo, patches, opts):
537 541 if not patches and not opts.get('rev'):
538 542 raise util.Abort(_('qdelete requires at least one revision or '
539 543 'patch name'))
540 544
541 545 realpatches = []
542 546 for patch in patches:
543 547 patch = self.lookup(patch, strict=True)
544 548 info = self.isapplied(patch)
545 549 if info:
546 550 raise util.Abort(_("cannot delete applied patch %s") % patch)
547 551 if patch not in self.series:
548 552 raise util.Abort(_("patch %s not in series file") % patch)
549 553 realpatches.append(patch)
550 554
551 555 appliedbase = 0
552 556 if opts.get('rev'):
553 557 if not self.applied:
554 558 raise util.Abort(_('no patches applied'))
555 559 revs = cmdutil.revrange(repo, opts['rev'])
556 560 if len(revs) > 1 and revs[0] > revs[1]:
557 561 revs.reverse()
558 562 for rev in revs:
559 563 if appliedbase >= len(self.applied):
560 564 raise util.Abort(_("revision %d is not managed") % rev)
561 565
562 566 base = revlog.bin(self.applied[appliedbase].rev)
563 567 node = repo.changelog.node(rev)
564 568 if node != base:
565 569 raise util.Abort(_("cannot delete revision %d above "
566 570 "applied patches") % rev)
567 571 realpatches.append(self.applied[appliedbase].name)
568 572 appliedbase += 1
569 573
570 574 if not opts.get('keep'):
571 575 r = self.qrepo()
572 576 if r:
573 577 r.remove(realpatches, True)
574 578 else:
575 579 for p in realpatches:
576 580 os.unlink(self.join(p))
577 581
578 582 if appliedbase:
579 583 del self.applied[:appliedbase]
580 584 self.applied_dirty = 1
581 585 indices = [self.find_series(p) for p in realpatches]
582 586 indices.sort()
583 587 for i in indices[-1::-1]:
584 588 del self.full_series[i]
585 589 self.parse_series()
586 590 self.series_dirty = 1
587 591
588 592 def check_toppatch(self, repo):
589 593 if len(self.applied) > 0:
590 594 top = revlog.bin(self.applied[-1].rev)
591 595 pp = repo.dirstate.parents()
592 596 if top not in pp:
593 597 raise util.Abort(_("queue top not at same revision as working directory"))
594 598 return top
595 599 return None
596 600 def check_localchanges(self, repo, force=False, refresh=True):
597 601 m, a, r, d = repo.status()[:4]
598 602 if m or a or r or d:
599 603 if not force:
600 604 if refresh:
601 605 raise util.Abort(_("local changes found, refresh first"))
602 606 else:
603 607 raise util.Abort(_("local changes found"))
604 608 return m, a, r, d
605 609
606 610 def new(self, repo, patch, *pats, **opts):
607 611 msg = opts.get('msg')
608 612 force = opts.get('force')
609 613 if os.path.exists(self.join(patch)):
610 614 raise util.Abort(_('patch "%s" already exists') % patch)
611 615 if opts.get('include') or opts.get('exclude') or pats:
612 616 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
613 617 m, a, r, d = repo.status(files=fns, match=match)[:4]
614 618 else:
615 619 m, a, r, d = self.check_localchanges(repo, force)
616 620 commitfiles = m + a + r
617 621 self.check_toppatch(repo)
618 622 wlock = repo.wlock()
623 try:
619 624 insert = self.full_series_end()
620 625 if msg:
621 626 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
622 627 else:
623 628 n = repo.commit(commitfiles,
624 629 "[mq]: %s" % patch, force=True, wlock=wlock)
625 630 if n == None:
626 631 raise util.Abort(_("repo commit failed"))
627 632 self.full_series[insert:insert] = [patch]
628 633 self.applied.append(statusentry(revlog.hex(n), patch))
629 634 self.parse_series()
630 635 self.series_dirty = 1
631 636 self.applied_dirty = 1
632 637 p = self.opener(patch, "w")
633 638 if msg:
634 639 msg = msg + "\n"
635 640 p.write(msg)
636 641 p.close()
637 642 wlock = None
638 643 r = self.qrepo()
639 644 if r: r.add([patch])
640 645 if commitfiles:
641 646 self.refresh(repo, short=True)
642 647 self.removeundo(repo)
648 finally:
649 del wlock
643 650
644 651 def strip(self, repo, rev, update=True, backup="all", wlock=None):
652 lock = None
653 try:
645 654 if not wlock:
646 655 wlock = repo.wlock()
647 656 lock = repo.lock()
648 657
649 658 if update:
650 659 self.check_localchanges(repo, refresh=False)
651 660 urev = self.qparents(repo, rev)
652 661 hg.clean(repo, urev, wlock=wlock)
653 662 repo.dirstate.write()
654 663
655 664 self.removeundo(repo)
656 665 repair.strip(self.ui, repo, rev, backup)
666 finally:
667 del lock, wlock
657 668
658 669 def isapplied(self, patch):
659 670 """returns (index, rev, patch)"""
660 671 for i in xrange(len(self.applied)):
661 672 a = self.applied[i]
662 673 if a.name == patch:
663 674 return (i, a.rev, a.name)
664 675 return None
665 676
666 677 # if the exact patch name does not exist, we try a few
667 678 # variations. If strict is passed, we try only #1
668 679 #
669 680 # 1) a number to indicate an offset in the series file
670 681 # 2) a unique substring of the patch name was given
671 682 # 3) patchname[-+]num to indicate an offset in the series file
672 683 def lookup(self, patch, strict=False):
673 684 patch = patch and str(patch)
674 685
675 686 def partial_name(s):
676 687 if s in self.series:
677 688 return s
678 689 matches = [x for x in self.series if s in x]
679 690 if len(matches) > 1:
680 691 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
681 692 for m in matches:
682 693 self.ui.warn(' %s\n' % m)
683 694 return None
684 695 if matches:
685 696 return matches[0]
686 697 if len(self.series) > 0 and len(self.applied) > 0:
687 698 if s == 'qtip':
688 699 return self.series[self.series_end(True)-1]
689 700 if s == 'qbase':
690 701 return self.series[0]
691 702 return None
692 703 if patch == None:
693 704 return None
694 705
695 706 # we don't want to return a partial match until we make
696 707 # sure the file name passed in does not exist (checked below)
697 708 res = partial_name(patch)
698 709 if res and res == patch:
699 710 return res
700 711
701 712 if not os.path.isfile(self.join(patch)):
702 713 try:
703 714 sno = int(patch)
704 715 except(ValueError, OverflowError):
705 716 pass
706 717 else:
707 718 if sno < len(self.series):
708 719 return self.series[sno]
709 720 if not strict:
710 721 # return any partial match made above
711 722 if res:
712 723 return res
713 724 minus = patch.rfind('-')
714 725 if minus >= 0:
715 726 res = partial_name(patch[:minus])
716 727 if res:
717 728 i = self.series.index(res)
718 729 try:
719 730 off = int(patch[minus+1:] or 1)
720 731 except(ValueError, OverflowError):
721 732 pass
722 733 else:
723 734 if i - off >= 0:
724 735 return self.series[i - off]
725 736 plus = patch.rfind('+')
726 737 if plus >= 0:
727 738 res = partial_name(patch[:plus])
728 739 if res:
729 740 i = self.series.index(res)
730 741 try:
731 742 off = int(patch[plus+1:] or 1)
732 743 except(ValueError, OverflowError):
733 744 pass
734 745 else:
735 746 if i + off < len(self.series):
736 747 return self.series[i + off]
737 748 raise util.Abort(_("patch %s not in series") % patch)
738 749
739 750 def push(self, repo, patch=None, force=False, list=False,
740 751 mergeq=None, wlock=None):
741 752 if not wlock:
742 753 wlock = repo.wlock()
754 try:
743 755 patch = self.lookup(patch)
744 # Suppose our series file is: A B C and the current 'top' patch is B.
745 # qpush C should be performed (moving forward)
746 # qpush B is a NOP (no change)
747 # qpush A is an error (can't go backwards with qpush)
756 # Suppose our series file is: A B C and the current 'top'
757 # patch is B. qpush C should be performed (moving forward)
758 # qpush B is a NOP (no change) qpush A is an error (can't
759 # go backwards with qpush)
748 760 if patch:
749 761 info = self.isapplied(patch)
750 762 if info:
751 763 if info[0] < len(self.applied) - 1:
752 raise util.Abort(_("cannot push to a previous patch: %s") %
753 patch)
764 raise util.Abort(
765 _("cannot push to a previous patch: %s") % patch)
754 766 if info[0] < len(self.series) - 1:
755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
767 self.ui.warn(
768 _('qpush: %s is already at the top\n') % patch)
756 769 else:
757 770 self.ui.warn(_('all patches are currently applied\n'))
758 771 return
759 772
760 773 # Following the above example, starting at 'top' of B:
761 # qpush should be performed (pushes C), but a subsequent qpush without
762 # an argument is an error (nothing to apply). This allows a loop
763 # of "...while hg qpush..." to work as it detects an error when done
774 # qpush should be performed (pushes C), but a subsequent
775 # qpush without an argument is an error (nothing to
776 # apply). This allows a loop of "...while hg qpush..." to
777 # work as it detects an error when done
764 778 if self.series_end() == len(self.series):
765 779 self.ui.warn(_('patch series already fully applied\n'))
766 780 return 1
767 781 if not force:
768 782 self.check_localchanges(repo)
769 783
770 784 self.applied_dirty = 1;
771 785 start = self.series_end()
772 786 if start > 0:
773 787 self.check_toppatch(repo)
774 788 if not patch:
775 789 patch = self.series[start]
776 790 end = start + 1
777 791 else:
778 792 end = self.series.index(patch, start) + 1
779 793 s = self.series[start:end]
780 794 all_files = {}
781 795 try:
782 796 if mergeq:
783 797 ret = self.mergepatch(repo, mergeq, s, wlock)
784 798 else:
785 799 ret = self.apply(repo, s, list, wlock=wlock,
786 800 all_files=all_files)
787 801 except:
788 802 self.ui.warn(_('cleaning up working directory...'))
789 803 node = repo.dirstate.parents()[0]
790 804 hg.revert(repo, node, None, wlock)
791 805 unknown = repo.status(wlock=wlock)[4]
792 806 # only remove unknown files that we know we touched or
793 807 # created while patching
794 808 for f in unknown:
795 809 if f in all_files:
796 810 util.unlink(repo.wjoin(f))
797 811 self.ui.warn(_('done\n'))
798 812 raise
799 813 top = self.applied[-1].name
800 814 if ret[0]:
801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
802 top)
815 self.ui.write(
816 "Errors during apply, please fix and refresh %s\n" % top)
803 817 else:
804 818 self.ui.write("Now at: %s\n" % top)
805 819 return ret[0]
820 finally:
821 del wlock
806 822
807 823 def pop(self, repo, patch=None, force=False, update=True, all=False,
808 824 wlock=None):
809 825 def getfile(f, rev):
810 826 t = repo.file(f).read(rev)
811 827 repo.wfile(f, "w").write(t)
812 828
813 829 if not wlock:
814 830 wlock = repo.wlock()
831 try:
815 832 if patch:
816 833 # index, rev, patch
817 834 info = self.isapplied(patch)
818 835 if not info:
819 836 patch = self.lookup(patch)
820 837 info = self.isapplied(patch)
821 838 if not info:
822 839 raise util.Abort(_("patch %s is not applied") % patch)
823 840
824 841 if len(self.applied) == 0:
825 842 # Allow qpop -a to work repeatedly,
826 843 # but not qpop without an argument
827 844 self.ui.warn(_("no patches applied\n"))
828 845 return not all
829 846
830 847 if not update:
831 848 parents = repo.dirstate.parents()
832 849 rr = [ revlog.bin(x.rev) for x in self.applied ]
833 850 for p in parents:
834 851 if p in rr:
835 852 self.ui.warn("qpop: forcing dirstate update\n")
836 853 update = True
837 854
838 855 if not force and update:
839 856 self.check_localchanges(repo)
840 857
841 858 self.applied_dirty = 1;
842 859 end = len(self.applied)
843 860 if not patch:
844 861 if all:
845 862 popi = 0
846 863 else:
847 864 popi = len(self.applied) - 1
848 865 else:
849 866 popi = info[0] + 1
850 867 if popi >= end:
851 868 self.ui.warn("qpop: %s is already at the top\n" % patch)
852 869 return
853 870 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
854 871
855 872 start = info[0]
856 873 rev = revlog.bin(info[1])
857 874
858 875 # we know there are no local changes, so we can make a simplified
859 876 # form of hg.update.
860 877 if update:
861 878 top = self.check_toppatch(repo)
862 879 qp = self.qparents(repo, rev)
863 880 changes = repo.changelog.read(qp)
864 881 mmap = repo.manifest.read(changes[0])
865 882 m, a, r, d, u = repo.status(qp, top)[:5]
866 883 if d:
867 884 raise util.Abort("deletions found between repo revs")
868 885 for f in m:
869 886 getfile(f, mmap[f])
870 887 for f in r:
871 888 getfile(f, mmap[f])
872 889 util.set_exec(repo.wjoin(f), mmap.execf(f))
873 890 for f in m + r:
874 891 repo.dirstate.normal(f)
875 892 for f in a:
876 893 try:
877 894 os.unlink(repo.wjoin(f))
878 895 except OSError, e:
879 896 if e.errno != errno.ENOENT:
880 897 raise
881 898 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
882 899 except: pass
883 900 repo.dirstate.forget(f)
884 901 repo.dirstate.setparents(qp, revlog.nullid)
885 902 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
886 903 del self.applied[start:end]
887 904 if len(self.applied):
888 905 self.ui.write("Now at: %s\n" % self.applied[-1].name)
889 906 else:
890 907 self.ui.write("Patch queue now empty\n")
908 finally:
909 del wlock
891 910
892 911 def diff(self, repo, pats, opts):
893 912 top = self.check_toppatch(repo)
894 913 if not top:
895 914 self.ui.write("No patches applied\n")
896 915 return
897 916 qp = self.qparents(repo, top)
898 917 if opts.get('git'):
899 918 self.diffopts().git = True
900 919 self.printdiff(repo, qp, files=pats, opts=opts)
901 920
902 921 def refresh(self, repo, pats=None, **opts):
903 922 if len(self.applied) == 0:
904 923 self.ui.write("No patches applied\n")
905 924 return 1
906 925 wlock = repo.wlock()
926 try:
907 927 self.check_toppatch(repo)
908 928 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
909 929 top = revlog.bin(top)
910 930 cparents = repo.changelog.parents(top)
911 931 patchparent = self.qparents(repo, top)
912 932 message, comments, user, date, patchfound = self.readheaders(patchfn)
913 933
914 934 patchf = self.opener(patchfn, 'r+')
915 935
916 936 # if the patch was a git patch, refresh it as a git patch
917 937 for line in patchf:
918 938 if line.startswith('diff --git'):
919 939 self.diffopts().git = True
920 940 break
921 941 patchf.seek(0)
922 942 patchf.truncate()
923 943
924 944 msg = opts.get('msg', '').rstrip()
925 945 if msg:
926 946 if comments:
927 947 # Remove existing message.
928 948 ci = 0
929 949 subj = None
930 950 for mi in xrange(len(message)):
931 951 if comments[ci].lower().startswith('subject: '):
932 952 subj = comments[ci][9:]
933 953 while message[mi] != comments[ci] and message[mi] != subj:
934 954 ci += 1
935 955 del comments[ci]
936 956 comments.append(msg)
937 957 if comments:
938 958 comments = "\n".join(comments) + '\n\n'
939 959 patchf.write(comments)
940 960
941 961 if opts.get('git'):
942 962 self.diffopts().git = True
943 963 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
944 964 tip = repo.changelog.tip()
945 965 if top == tip:
946 966 # if the top of our patch queue is also the tip, there is an
947 967 # optimization here. We update the dirstate in place and strip
948 968 # off the tip commit. Then just commit the current directory
949 969 # tree. We can also send repo.commit the list of files
950 970 # changed to speed up the diff
951 971 #
952 972 # in short mode, we only diff the files included in the
953 973 # patch already
954 974 #
955 975 # this should really read:
956 976 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
957 977 # but we do it backwards to take advantage of manifest/chlog
958 978 # caching against the next repo.status call
959 979 #
960 980 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
961 981 changes = repo.changelog.read(tip)
962 982 man = repo.manifest.read(changes[0])
963 983 aaa = aa[:]
964 984 if opts.get('short'):
965 985 filelist = mm + aa + dd
966 986 match = dict.fromkeys(filelist).__contains__
967 987 else:
968 988 filelist = None
969 989 match = util.always
970 990 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
971 991
972 # we might end up with files that were added between tip and
973 # the dirstate parent, but then changed in the local dirstate.
974 # in this case, we want them to only show up in the added section
992 # we might end up with files that were added between
993 # tip and the dirstate parent, but then changed in the
994 # local dirstate. in this case, we want them to only
995 # show up in the added section
975 996 for x in m:
976 997 if x not in aa:
977 998 mm.append(x)
978 999 # we might end up with files added by the local dirstate that
979 1000 # were deleted by the patch. In this case, they should only
980 1001 # show up in the changed section.
981 1002 for x in a:
982 1003 if x in dd:
983 1004 del dd[dd.index(x)]
984 1005 mm.append(x)
985 1006 else:
986 1007 aa.append(x)
987 1008 # make sure any files deleted in the local dirstate
988 1009 # are not in the add or change column of the patch
989 1010 forget = []
990 1011 for x in d + r:
991 1012 if x in aa:
992 1013 del aa[aa.index(x)]
993 1014 forget.append(x)
994 1015 continue
995 1016 elif x in mm:
996 1017 del mm[mm.index(x)]
997 1018 dd.append(x)
998 1019
999 1020 m = util.unique(mm)
1000 1021 r = util.unique(dd)
1001 1022 a = util.unique(aa)
1002 1023 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1003 1024 filelist = util.unique(c[0] + c[1] + c[2])
1004 1025 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1005 1026 fp=patchf, changes=c, opts=self.diffopts())
1006 1027 patchf.close()
1007 1028
1008 1029 repo.dirstate.setparents(*cparents)
1009 1030 copies = {}
1010 1031 for dst in a:
1011 1032 src = repo.dirstate.copied(dst)
1012 1033 if src is None:
1013 1034 continue
1014 1035 copies.setdefault(src, []).append(dst)
1015 1036 repo.dirstate.add(dst)
1016 1037 # remember the copies between patchparent and tip
1017 1038 # this may be slow, so don't do it if we're not tracking copies
1018 1039 if self.diffopts().git:
1019 1040 for dst in aaa:
1020 1041 f = repo.file(dst)
1021 1042 src = f.renamed(man[dst])
1022 1043 if src:
1023 1044 copies[src[0]] = copies.get(dst, [])
1024 1045 if dst in a:
1025 1046 copies[src[0]].append(dst)
1026 1047 # we can't copy a file created by the patch itself
1027 1048 if dst in copies:
1028 1049 del copies[dst]
1029 1050 for src, dsts in copies.iteritems():
1030 1051 for dst in dsts:
1031 1052 repo.dirstate.copy(src, dst)
1032 1053 for f in r:
1033 1054 repo.dirstate.remove(f)
1034 # if the patch excludes a modified file, mark that file with mtime=0
1035 # so status can see it.
1055 # if the patch excludes a modified file, mark that
1056 # file with mtime=0 so status can see it.
1036 1057 mm = []
1037 1058 for i in xrange(len(m)-1, -1, -1):
1038 1059 if not matchfn(m[i]):
1039 1060 mm.append(m[i])
1040 1061 del m[i]
1041 1062 for f in m:
1042 1063 repo.dirstate.normal(f)
1043 1064 for f in mm:
1044 1065 repo.dirstate.normaldirty(f)
1045 1066 for f in forget:
1046 1067 repo.dirstate.forget(f)
1047 1068
1048 1069 if not msg:
1049 1070 if not message:
1050 1071 message = "[mq]: %s\n" % patchfn
1051 1072 else:
1052 1073 message = "\n".join(message)
1053 1074 else:
1054 1075 message = msg
1055 1076
1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1077 self.strip(repo, top, update=False,
1078 backup='strip', wlock=wlock)
1057 1079 n = repo.commit(filelist, message, changes[1], match=matchfn,
1058 1080 force=1, wlock=wlock)
1059 1081 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1060 1082 self.applied_dirty = 1
1061 1083 self.removeundo(repo)
1062 1084 else:
1063 1085 self.printdiff(repo, patchparent, fp=patchf)
1064 1086 patchf.close()
1065 1087 added = repo.status()[1]
1066 1088 for a in added:
1067 1089 f = repo.wjoin(a)
1068 1090 try:
1069 1091 os.unlink(f)
1070 1092 except OSError, e:
1071 1093 if e.errno != errno.ENOENT:
1072 1094 raise
1073 1095 try: os.removedirs(os.path.dirname(f))
1074 1096 except: pass
1075 1097 # forget the file copies in the dirstate
1076 1098 # push should readd the files later on
1077 1099 repo.dirstate.forget(a)
1078 1100 self.pop(repo, force=True, wlock=wlock)
1079 1101 self.push(repo, force=True, wlock=wlock)
1102 finally:
1103 del wlock
1080 1104
1081 1105 def init(self, repo, create=False):
1082 1106 if not create and os.path.isdir(self.path):
1083 1107 raise util.Abort(_("patch queue directory already exists"))
1084 1108 try:
1085 1109 os.mkdir(self.path)
1086 1110 except OSError, inst:
1087 1111 if inst.errno != errno.EEXIST or not create:
1088 1112 raise
1089 1113 if create:
1090 1114 return self.qrepo(create=True)
1091 1115
1092 1116 def unapplied(self, repo, patch=None):
1093 1117 if patch and patch not in self.series:
1094 1118 raise util.Abort(_("patch %s is not in series file") % patch)
1095 1119 if not patch:
1096 1120 start = self.series_end()
1097 1121 else:
1098 1122 start = self.series.index(patch) + 1
1099 1123 unapplied = []
1100 1124 for i in xrange(start, len(self.series)):
1101 1125 pushable, reason = self.pushable(i)
1102 1126 if pushable:
1103 1127 unapplied.append((i, self.series[i]))
1104 1128 self.explain_pushable(i)
1105 1129 return unapplied
1106 1130
1107 1131 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1108 1132 summary=False):
1109 1133 def displayname(patchname):
1110 1134 if summary:
1111 1135 msg = self.readheaders(patchname)[0]
1112 1136 msg = msg and ': ' + msg[0] or ': '
1113 1137 else:
1114 1138 msg = ''
1115 1139 return '%s%s' % (patchname, msg)
1116 1140
1117 1141 applied = dict.fromkeys([p.name for p in self.applied])
1118 1142 if length is None:
1119 1143 length = len(self.series) - start
1120 1144 if not missing:
1121 1145 for i in xrange(start, start+length):
1122 1146 patch = self.series[i]
1123 1147 if patch in applied:
1124 1148 stat = 'A'
1125 1149 elif self.pushable(i)[0]:
1126 1150 stat = 'U'
1127 1151 else:
1128 1152 stat = 'G'
1129 1153 pfx = ''
1130 1154 if self.ui.verbose:
1131 1155 pfx = '%d %s ' % (i, stat)
1132 1156 elif status and status != stat:
1133 1157 continue
1134 1158 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1135 1159 else:
1136 1160 msng_list = []
1137 1161 for root, dirs, files in os.walk(self.path):
1138 1162 d = root[len(self.path) + 1:]
1139 1163 for f in files:
1140 1164 fl = os.path.join(d, f)
1141 1165 if (fl not in self.series and
1142 1166 fl not in (self.status_path, self.series_path,
1143 1167 self.guards_path)
1144 1168 and not fl.startswith('.')):
1145 1169 msng_list.append(fl)
1146 1170 msng_list.sort()
1147 1171 for x in msng_list:
1148 1172 pfx = self.ui.verbose and ('D ') or ''
1149 1173 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1150 1174
1151 1175 def issaveline(self, l):
1152 1176 if l.name == '.hg.patches.save.line':
1153 1177 return True
1154 1178
1155 1179 def qrepo(self, create=False):
1156 1180 if create or os.path.isdir(self.join(".hg")):
1157 1181 return hg.repository(self.ui, path=self.path, create=create)
1158 1182
1159 1183 def restore(self, repo, rev, delete=None, qupdate=None):
1160 1184 c = repo.changelog.read(rev)
1161 1185 desc = c[4].strip()
1162 1186 lines = desc.splitlines()
1163 1187 i = 0
1164 1188 datastart = None
1165 1189 series = []
1166 1190 applied = []
1167 1191 qpp = None
1168 1192 for i in xrange(0, len(lines)):
1169 1193 if lines[i] == 'Patch Data:':
1170 1194 datastart = i + 1
1171 1195 elif lines[i].startswith('Dirstate:'):
1172 1196 l = lines[i].rstrip()
1173 1197 l = l[10:].split(' ')
1174 1198 qpp = [ hg.bin(x) for x in l ]
1175 1199 elif datastart != None:
1176 1200 l = lines[i].rstrip()
1177 1201 se = statusentry(l)
1178 1202 file_ = se.name
1179 1203 if se.rev:
1180 1204 applied.append(se)
1181 1205 else:
1182 1206 series.append(file_)
1183 1207 if datastart == None:
1184 1208 self.ui.warn("No saved patch data found\n")
1185 1209 return 1
1186 1210 self.ui.warn("restoring status: %s\n" % lines[0])
1187 1211 self.full_series = series
1188 1212 self.applied = applied
1189 1213 self.parse_series()
1190 1214 self.series_dirty = 1
1191 1215 self.applied_dirty = 1
1192 1216 heads = repo.changelog.heads()
1193 1217 if delete:
1194 1218 if rev not in heads:
1195 1219 self.ui.warn("save entry has children, leaving it alone\n")
1196 1220 else:
1197 1221 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1198 1222 pp = repo.dirstate.parents()
1199 1223 if rev in pp:
1200 1224 update = True
1201 1225 else:
1202 1226 update = False
1203 1227 self.strip(repo, rev, update=update, backup='strip')
1204 1228 if qpp:
1205 1229 self.ui.warn("saved queue repository parents: %s %s\n" %
1206 1230 (hg.short(qpp[0]), hg.short(qpp[1])))
1207 1231 if qupdate:
1208 1232 print "queue directory updating"
1209 1233 r = self.qrepo()
1210 1234 if not r:
1211 1235 self.ui.warn("Unable to load queue repository\n")
1212 1236 return 1
1213 1237 hg.clean(r, qpp[0])
1214 1238
1215 1239 def save(self, repo, msg=None):
1216 1240 if len(self.applied) == 0:
1217 1241 self.ui.warn("save: no patches applied, exiting\n")
1218 1242 return 1
1219 1243 if self.issaveline(self.applied[-1]):
1220 1244 self.ui.warn("status is already saved\n")
1221 1245 return 1
1222 1246
1223 1247 ar = [ ':' + x for x in self.full_series ]
1224 1248 if not msg:
1225 1249 msg = "hg patches saved state"
1226 1250 else:
1227 1251 msg = "hg patches: " + msg.rstrip('\r\n')
1228 1252 r = self.qrepo()
1229 1253 if r:
1230 1254 pp = r.dirstate.parents()
1231 1255 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1232 1256 msg += "\n\nPatch Data:\n"
1233 1257 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1234 1258 "\n".join(ar) + '\n' or "")
1235 1259 n = repo.commit(None, text, user=None, force=1)
1236 1260 if not n:
1237 1261 self.ui.warn("repo commit failed\n")
1238 1262 return 1
1239 1263 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1240 1264 self.applied_dirty = 1
1241 1265 self.removeundo(repo)
1242 1266
1243 1267 def full_series_end(self):
1244 1268 if len(self.applied) > 0:
1245 1269 p = self.applied[-1].name
1246 1270 end = self.find_series(p)
1247 1271 if end == None:
1248 1272 return len(self.full_series)
1249 1273 return end + 1
1250 1274 return 0
1251 1275
1252 1276 def series_end(self, all_patches=False):
1253 1277 """If all_patches is False, return the index of the next pushable patch
1254 1278 in the series, or the series length. If all_patches is True, return the
1255 1279 index of the first patch past the last applied one.
1256 1280 """
1257 1281 end = 0
1258 1282 def next(start):
1259 1283 if all_patches:
1260 1284 return start
1261 1285 i = start
1262 1286 while i < len(self.series):
1263 1287 p, reason = self.pushable(i)
1264 1288 if p:
1265 1289 break
1266 1290 self.explain_pushable(i)
1267 1291 i += 1
1268 1292 return i
1269 1293 if len(self.applied) > 0:
1270 1294 p = self.applied[-1].name
1271 1295 try:
1272 1296 end = self.series.index(p)
1273 1297 except ValueError:
1274 1298 return 0
1275 1299 return next(end + 1)
1276 1300 return next(end)
1277 1301
1278 1302 def appliedname(self, index):
1279 1303 pname = self.applied[index].name
1280 1304 if not self.ui.verbose:
1281 1305 p = pname
1282 1306 else:
1283 1307 p = str(self.series.index(pname)) + " " + pname
1284 1308 return p
1285 1309
1286 1310 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1287 1311 force=None, git=False):
1288 1312 def checkseries(patchname):
1289 1313 if patchname in self.series:
1290 1314 raise util.Abort(_('patch %s is already in the series file')
1291 1315 % patchname)
1292 1316 def checkfile(patchname):
1293 1317 if not force and os.path.exists(self.join(patchname)):
1294 1318 raise util.Abort(_('patch "%s" already exists')
1295 1319 % patchname)
1296 1320
1297 1321 if rev:
1298 1322 if files:
1299 1323 raise util.Abort(_('option "-r" not valid when importing '
1300 1324 'files'))
1301 1325 rev = cmdutil.revrange(repo, rev)
1302 1326 rev.sort(lambda x, y: cmp(y, x))
1303 1327 if (len(files) > 1 or len(rev) > 1) and patchname:
1304 1328 raise util.Abort(_('option "-n" not valid when importing multiple '
1305 1329 'patches'))
1306 1330 i = 0
1307 1331 added = []
1308 1332 if rev:
1309 1333 # If mq patches are applied, we can only import revisions
1310 1334 # that form a linear path to qbase.
1311 1335 # Otherwise, they should form a linear path to a head.
1312 1336 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1313 1337 if len(heads) > 1:
1314 1338 raise util.Abort(_('revision %d is the root of more than one '
1315 1339 'branch') % rev[-1])
1316 1340 if self.applied:
1317 1341 base = revlog.hex(repo.changelog.node(rev[0]))
1318 1342 if base in [n.rev for n in self.applied]:
1319 1343 raise util.Abort(_('revision %d is already managed')
1320 1344 % rev[0])
1321 1345 if heads != [revlog.bin(self.applied[-1].rev)]:
1322 1346 raise util.Abort(_('revision %d is not the parent of '
1323 1347 'the queue') % rev[0])
1324 1348 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1325 1349 lastparent = repo.changelog.parentrevs(base)[0]
1326 1350 else:
1327 1351 if heads != [repo.changelog.node(rev[0])]:
1328 1352 raise util.Abort(_('revision %d has unmanaged children')
1329 1353 % rev[0])
1330 1354 lastparent = None
1331 1355
1332 1356 if git:
1333 1357 self.diffopts().git = True
1334 1358
1335 1359 for r in rev:
1336 1360 p1, p2 = repo.changelog.parentrevs(r)
1337 1361 n = repo.changelog.node(r)
1338 1362 if p2 != revlog.nullrev:
1339 1363 raise util.Abort(_('cannot import merge revision %d') % r)
1340 1364 if lastparent and lastparent != r:
1341 1365 raise util.Abort(_('revision %d is not the parent of %d')
1342 1366 % (r, lastparent))
1343 1367 lastparent = p1
1344 1368
1345 1369 if not patchname:
1346 1370 patchname = normname('%d.diff' % r)
1347 1371 checkseries(patchname)
1348 1372 checkfile(patchname)
1349 1373 self.full_series.insert(0, patchname)
1350 1374
1351 1375 patchf = self.opener(patchname, "w")
1352 1376 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1353 1377 patchf.close()
1354 1378
1355 1379 se = statusentry(revlog.hex(n), patchname)
1356 1380 self.applied.insert(0, se)
1357 1381
1358 1382 added.append(patchname)
1359 1383 patchname = None
1360 1384 self.parse_series()
1361 1385 self.applied_dirty = 1
1362 1386
1363 1387 for filename in files:
1364 1388 if existing:
1365 1389 if filename == '-':
1366 1390 raise util.Abort(_('-e is incompatible with import from -'))
1367 1391 if not patchname:
1368 1392 patchname = normname(filename)
1369 1393 if not os.path.isfile(self.join(patchname)):
1370 1394 raise util.Abort(_("patch %s does not exist") % patchname)
1371 1395 else:
1372 1396 try:
1373 1397 if filename == '-':
1374 1398 if not patchname:
1375 1399 raise util.Abort(_('need --name to import a patch from -'))
1376 1400 text = sys.stdin.read()
1377 1401 else:
1378 1402 text = file(filename).read()
1379 1403 except IOError:
1380 1404 raise util.Abort(_("unable to read %s") % patchname)
1381 1405 if not patchname:
1382 1406 patchname = normname(os.path.basename(filename))
1383 1407 checkfile(patchname)
1384 1408 patchf = self.opener(patchname, "w")
1385 1409 patchf.write(text)
1386 1410 checkseries(patchname)
1387 1411 index = self.full_series_end() + i
1388 1412 self.full_series[index:index] = [patchname]
1389 1413 self.parse_series()
1390 1414 self.ui.warn("adding %s to series file\n" % patchname)
1391 1415 i += 1
1392 1416 added.append(patchname)
1393 1417 patchname = None
1394 1418 self.series_dirty = 1
1395 1419 qrepo = self.qrepo()
1396 1420 if qrepo:
1397 1421 qrepo.add(added)
1398 1422
1399 1423 def delete(ui, repo, *patches, **opts):
1400 1424 """remove patches from queue
1401 1425
1402 1426 The patches must not be applied, unless they are arguments to
1403 1427 the --rev parameter. At least one patch or revision is required.
1404 1428
1405 1429 With --rev, mq will stop managing the named revisions (converting
1406 1430 them to regular mercurial changesets). The patches must be applied
1407 1431 and at the base of the stack. This option is useful when the patches
1408 1432 have been applied upstream.
1409 1433
1410 1434 With --keep, the patch files are preserved in the patch directory."""
1411 1435 q = repo.mq
1412 1436 q.delete(repo, patches, opts)
1413 1437 q.save_dirty()
1414 1438 return 0
1415 1439
1416 1440 def applied(ui, repo, patch=None, **opts):
1417 1441 """print the patches already applied"""
1418 1442 q = repo.mq
1419 1443 if patch:
1420 1444 if patch not in q.series:
1421 1445 raise util.Abort(_("patch %s is not in series file") % patch)
1422 1446 end = q.series.index(patch) + 1
1423 1447 else:
1424 1448 end = q.series_end(True)
1425 1449 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1426 1450
1427 1451 def unapplied(ui, repo, patch=None, **opts):
1428 1452 """print the patches not yet applied"""
1429 1453 q = repo.mq
1430 1454 if patch:
1431 1455 if patch not in q.series:
1432 1456 raise util.Abort(_("patch %s is not in series file") % patch)
1433 1457 start = q.series.index(patch) + 1
1434 1458 else:
1435 1459 start = q.series_end(True)
1436 1460 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1437 1461
1438 1462 def qimport(ui, repo, *filename, **opts):
1439 1463 """import a patch
1440 1464
1441 1465 The patch will have the same name as its source file unless you
1442 1466 give it a new one with --name.
1443 1467
1444 1468 You can register an existing patch inside the patch directory
1445 1469 with the --existing flag.
1446 1470
1447 1471 With --force, an existing patch of the same name will be overwritten.
1448 1472
1449 1473 An existing changeset may be placed under mq control with --rev
1450 1474 (e.g. qimport --rev tip -n patch will place tip under mq control).
1451 1475 With --git, patches imported with --rev will use the git diff
1452 1476 format.
1453 1477 """
1454 1478 q = repo.mq
1455 1479 q.qimport(repo, filename, patchname=opts['name'],
1456 1480 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1457 1481 git=opts['git'])
1458 1482 q.save_dirty()
1459 1483 return 0
1460 1484
1461 1485 def init(ui, repo, **opts):
1462 1486 """init a new queue repository
1463 1487
1464 1488 The queue repository is unversioned by default. If -c is
1465 1489 specified, qinit will create a separate nested repository
1466 1490 for patches (qinit -c may also be run later to convert
1467 1491 an unversioned patch repository into a versioned one).
1468 1492 You can use qcommit to commit changes to this queue repository."""
1469 1493 q = repo.mq
1470 1494 r = q.init(repo, create=opts['create_repo'])
1471 1495 q.save_dirty()
1472 1496 if r:
1473 1497 if not os.path.exists(r.wjoin('.hgignore')):
1474 1498 fp = r.wopener('.hgignore', 'w')
1475 1499 fp.write('syntax: glob\n')
1476 1500 fp.write('status\n')
1477 1501 fp.write('guards\n')
1478 1502 fp.close()
1479 1503 if not os.path.exists(r.wjoin('series')):
1480 1504 r.wopener('series', 'w').close()
1481 1505 r.add(['.hgignore', 'series'])
1482 1506 commands.add(ui, r)
1483 1507 return 0
1484 1508
1485 1509 def clone(ui, source, dest=None, **opts):
1486 1510 '''clone main and patch repository at same time
1487 1511
1488 1512 If source is local, destination will have no patches applied. If
1489 1513 source is remote, this command can not check if patches are
1490 1514 applied in source, so cannot guarantee that patches are not
1491 1515 applied in destination. If you clone remote repository, be sure
1492 1516 before that it has no patches applied.
1493 1517
1494 1518 Source patch repository is looked for in <src>/.hg/patches by
1495 1519 default. Use -p <url> to change.
1496 1520
1497 1521 The patch directory must be a nested mercurial repository, as
1498 1522 would be created by qinit -c.
1499 1523 '''
1500 1524 cmdutil.setremoteconfig(ui, opts)
1501 1525 if dest is None:
1502 1526 dest = hg.defaultdest(source)
1503 1527 sr = hg.repository(ui, ui.expandpath(source))
1504 1528 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1505 1529 try:
1506 1530 pr = hg.repository(ui, patchdir)
1507 1531 except hg.RepoError:
1508 1532 raise util.Abort(_('versioned patch repository not found'
1509 1533 ' (see qinit -c)'))
1510 1534 qbase, destrev = None, None
1511 1535 if sr.local():
1512 1536 if sr.mq.applied:
1513 1537 qbase = revlog.bin(sr.mq.applied[0].rev)
1514 1538 if not hg.islocal(dest):
1515 1539 heads = dict.fromkeys(sr.heads())
1516 1540 for h in sr.heads(qbase):
1517 1541 del heads[h]
1518 1542 destrev = heads.keys()
1519 1543 destrev.append(sr.changelog.parents(qbase)[0])
1520 1544 ui.note(_('cloning main repo\n'))
1521 1545 sr, dr = hg.clone(ui, sr.url(), dest,
1522 1546 pull=opts['pull'],
1523 1547 rev=destrev,
1524 1548 update=False,
1525 1549 stream=opts['uncompressed'])
1526 1550 ui.note(_('cloning patch repo\n'))
1527 1551 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1528 1552 dr.url() + '/.hg/patches',
1529 1553 pull=opts['pull'],
1530 1554 update=not opts['noupdate'],
1531 1555 stream=opts['uncompressed'])
1532 1556 if dr.local():
1533 1557 if qbase:
1534 1558 ui.note(_('stripping applied patches from destination repo\n'))
1535 1559 dr.mq.strip(dr, qbase, update=False, backup=None)
1536 1560 if not opts['noupdate']:
1537 1561 ui.note(_('updating destination repo\n'))
1538 1562 hg.update(dr, dr.changelog.tip())
1539 1563
1540 1564 def commit(ui, repo, *pats, **opts):
1541 1565 """commit changes in the queue repository"""
1542 1566 q = repo.mq
1543 1567 r = q.qrepo()
1544 1568 if not r: raise util.Abort('no queue repository')
1545 1569 commands.commit(r.ui, r, *pats, **opts)
1546 1570
1547 1571 def series(ui, repo, **opts):
1548 1572 """print the entire series file"""
1549 1573 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1550 1574 return 0
1551 1575
1552 1576 def top(ui, repo, **opts):
1553 1577 """print the name of the current patch"""
1554 1578 q = repo.mq
1555 1579 t = q.applied and q.series_end(True) or 0
1556 1580 if t:
1557 1581 return q.qseries(repo, start=t-1, length=1, status='A',
1558 1582 summary=opts.get('summary'))
1559 1583 else:
1560 1584 ui.write("No patches applied\n")
1561 1585 return 1
1562 1586
1563 1587 def next(ui, repo, **opts):
1564 1588 """print the name of the next patch"""
1565 1589 q = repo.mq
1566 1590 end = q.series_end()
1567 1591 if end == len(q.series):
1568 1592 ui.write("All patches applied\n")
1569 1593 return 1
1570 1594 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1571 1595
1572 1596 def prev(ui, repo, **opts):
1573 1597 """print the name of the previous patch"""
1574 1598 q = repo.mq
1575 1599 l = len(q.applied)
1576 1600 if l == 1:
1577 1601 ui.write("Only one patch applied\n")
1578 1602 return 1
1579 1603 if not l:
1580 1604 ui.write("No patches applied\n")
1581 1605 return 1
1582 1606 return q.qseries(repo, start=l-2, length=1, status='A',
1583 1607 summary=opts.get('summary'))
1584 1608
1585 1609 def new(ui, repo, patch, *args, **opts):
1586 1610 """create a new patch
1587 1611
1588 1612 qnew creates a new patch on top of the currently-applied patch
1589 1613 (if any). It will refuse to run if there are any outstanding
1590 1614 changes unless -f is specified, in which case the patch will
1591 1615 be initialised with them. You may also use -I, -X, and/or a list of
1592 1616 files after the patch name to add only changes to matching files
1593 1617 to the new patch, leaving the rest as uncommitted modifications.
1594 1618
1595 1619 -e, -m or -l set the patch header as well as the commit message.
1596 1620 If none is specified, the patch header is empty and the
1597 1621 commit message is '[mq]: PATCH'"""
1598 1622 q = repo.mq
1599 1623 message = cmdutil.logmessage(opts)
1600 1624 if opts['edit']:
1601 1625 message = ui.edit(message, ui.username())
1602 1626 opts['msg'] = message
1603 1627 q.new(repo, patch, *args, **opts)
1604 1628 q.save_dirty()
1605 1629 return 0
1606 1630
1607 1631 def refresh(ui, repo, *pats, **opts):
1608 1632 """update the current patch
1609 1633
1610 1634 If any file patterns are provided, the refreshed patch will contain only
1611 1635 the modifications that match those patterns; the remaining modifications
1612 1636 will remain in the working directory.
1613 1637
1614 1638 hg add/remove/copy/rename work as usual, though you might want to use
1615 1639 git-style patches (--git or [diff] git=1) to track copies and renames.
1616 1640 """
1617 1641 q = repo.mq
1618 1642 message = cmdutil.logmessage(opts)
1619 1643 if opts['edit']:
1620 1644 if message:
1621 1645 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1622 1646 patch = q.applied[-1].name
1623 1647 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1624 1648 message = ui.edit('\n'.join(message), user or ui.username())
1625 1649 ret = q.refresh(repo, pats, msg=message, **opts)
1626 1650 q.save_dirty()
1627 1651 return ret
1628 1652
1629 1653 def diff(ui, repo, *pats, **opts):
1630 1654 """diff of the current patch"""
1631 1655 repo.mq.diff(repo, pats, opts)
1632 1656 return 0
1633 1657
1634 1658 def fold(ui, repo, *files, **opts):
1635 1659 """fold the named patches into the current patch
1636 1660
1637 1661 Patches must not yet be applied. Each patch will be successively
1638 1662 applied to the current patch in the order given. If all the
1639 1663 patches apply successfully, the current patch will be refreshed
1640 1664 with the new cumulative patch, and the folded patches will
1641 1665 be deleted. With -k/--keep, the folded patch files will not
1642 1666 be removed afterwards.
1643 1667
1644 1668 The header for each folded patch will be concatenated with
1645 1669 the current patch header, separated by a line of '* * *'."""
1646 1670
1647 1671 q = repo.mq
1648 1672
1649 1673 if not files:
1650 1674 raise util.Abort(_('qfold requires at least one patch name'))
1651 1675 if not q.check_toppatch(repo):
1652 1676 raise util.Abort(_('No patches applied'))
1653 1677
1654 1678 message = cmdutil.logmessage(opts)
1655 1679 if opts['edit']:
1656 1680 if message:
1657 1681 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1658 1682
1659 1683 parent = q.lookup('qtip')
1660 1684 patches = []
1661 1685 messages = []
1662 1686 for f in files:
1663 1687 p = q.lookup(f)
1664 1688 if p in patches or p == parent:
1665 1689 ui.warn(_('Skipping already folded patch %s') % p)
1666 1690 if q.isapplied(p):
1667 1691 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1668 1692 patches.append(p)
1669 1693
1670 1694 for p in patches:
1671 1695 if not message:
1672 1696 messages.append(q.readheaders(p)[0])
1673 1697 pf = q.join(p)
1674 1698 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1675 1699 if not patchsuccess:
1676 1700 raise util.Abort(_('Error folding patch %s') % p)
1677 1701 patch.updatedir(ui, repo, files)
1678 1702
1679 1703 if not message:
1680 1704 message, comments, user = q.readheaders(parent)[0:3]
1681 1705 for msg in messages:
1682 1706 message.append('* * *')
1683 1707 message.extend(msg)
1684 1708 message = '\n'.join(message)
1685 1709
1686 1710 if opts['edit']:
1687 1711 message = ui.edit(message, user or ui.username())
1688 1712
1689 1713 q.refresh(repo, msg=message)
1690 1714 q.delete(repo, patches, opts)
1691 1715 q.save_dirty()
1692 1716
1693 1717 def goto(ui, repo, patch, **opts):
1694 1718 '''push or pop patches until named patch is at top of stack'''
1695 1719 q = repo.mq
1696 1720 patch = q.lookup(patch)
1697 1721 if q.isapplied(patch):
1698 1722 ret = q.pop(repo, patch, force=opts['force'])
1699 1723 else:
1700 1724 ret = q.push(repo, patch, force=opts['force'])
1701 1725 q.save_dirty()
1702 1726 return ret
1703 1727
1704 1728 def guard(ui, repo, *args, **opts):
1705 1729 '''set or print guards for a patch
1706 1730
1707 1731 Guards control whether a patch can be pushed. A patch with no
1708 1732 guards is always pushed. A patch with a positive guard ("+foo") is
1709 1733 pushed only if the qselect command has activated it. A patch with
1710 1734 a negative guard ("-foo") is never pushed if the qselect command
1711 1735 has activated it.
1712 1736
1713 1737 With no arguments, print the currently active guards.
1714 1738 With arguments, set guards for the named patch.
1715 1739
1716 1740 To set a negative guard "-foo" on topmost patch ("--" is needed so
1717 1741 hg will not interpret "-foo" as an option):
1718 1742 hg qguard -- -foo
1719 1743
1720 1744 To set guards on another patch:
1721 1745 hg qguard other.patch +2.6.17 -stable
1722 1746 '''
1723 1747 def status(idx):
1724 1748 guards = q.series_guards[idx] or ['unguarded']
1725 1749 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1726 1750 q = repo.mq
1727 1751 patch = None
1728 1752 args = list(args)
1729 1753 if opts['list']:
1730 1754 if args or opts['none']:
1731 1755 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1732 1756 for i in xrange(len(q.series)):
1733 1757 status(i)
1734 1758 return
1735 1759 if not args or args[0][0:1] in '-+':
1736 1760 if not q.applied:
1737 1761 raise util.Abort(_('no patches applied'))
1738 1762 patch = q.applied[-1].name
1739 1763 if patch is None and args[0][0:1] not in '-+':
1740 1764 patch = args.pop(0)
1741 1765 if patch is None:
1742 1766 raise util.Abort(_('no patch to work with'))
1743 1767 if args or opts['none']:
1744 1768 idx = q.find_series(patch)
1745 1769 if idx is None:
1746 1770 raise util.Abort(_('no patch named %s') % patch)
1747 1771 q.set_guards(idx, args)
1748 1772 q.save_dirty()
1749 1773 else:
1750 1774 status(q.series.index(q.lookup(patch)))
1751 1775
1752 1776 def header(ui, repo, patch=None):
1753 1777 """Print the header of the topmost or specified patch"""
1754 1778 q = repo.mq
1755 1779
1756 1780 if patch:
1757 1781 patch = q.lookup(patch)
1758 1782 else:
1759 1783 if not q.applied:
1760 1784 ui.write('No patches applied\n')
1761 1785 return 1
1762 1786 patch = q.lookup('qtip')
1763 1787 message = repo.mq.readheaders(patch)[0]
1764 1788
1765 1789 ui.write('\n'.join(message) + '\n')
1766 1790
1767 1791 def lastsavename(path):
1768 1792 (directory, base) = os.path.split(path)
1769 1793 names = os.listdir(directory)
1770 1794 namere = re.compile("%s.([0-9]+)" % base)
1771 1795 maxindex = None
1772 1796 maxname = None
1773 1797 for f in names:
1774 1798 m = namere.match(f)
1775 1799 if m:
1776 1800 index = int(m.group(1))
1777 1801 if maxindex == None or index > maxindex:
1778 1802 maxindex = index
1779 1803 maxname = f
1780 1804 if maxname:
1781 1805 return (os.path.join(directory, maxname), maxindex)
1782 1806 return (None, None)
1783 1807
1784 1808 def savename(path):
1785 1809 (last, index) = lastsavename(path)
1786 1810 if last is None:
1787 1811 index = 0
1788 1812 newpath = path + ".%d" % (index + 1)
1789 1813 return newpath
1790 1814
1791 1815 def push(ui, repo, patch=None, **opts):
1792 1816 """push the next patch onto the stack"""
1793 1817 q = repo.mq
1794 1818 mergeq = None
1795 1819
1796 1820 if opts['all']:
1797 1821 if not q.series:
1798 1822 ui.warn(_('no patches in series\n'))
1799 1823 return 0
1800 1824 patch = q.series[-1]
1801 1825 if opts['merge']:
1802 1826 if opts['name']:
1803 1827 newpath = opts['name']
1804 1828 else:
1805 1829 newpath, i = lastsavename(q.path)
1806 1830 if not newpath:
1807 1831 ui.warn("no saved queues found, please use -n\n")
1808 1832 return 1
1809 1833 mergeq = queue(ui, repo.join(""), newpath)
1810 1834 ui.warn("merging with queue at: %s\n" % mergeq.path)
1811 1835 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1812 1836 mergeq=mergeq)
1813 1837 return ret
1814 1838
1815 1839 def pop(ui, repo, patch=None, **opts):
1816 1840 """pop the current patch off the stack"""
1817 1841 localupdate = True
1818 1842 if opts['name']:
1819 1843 q = queue(ui, repo.join(""), repo.join(opts['name']))
1820 1844 ui.warn('using patch queue: %s\n' % q.path)
1821 1845 localupdate = False
1822 1846 else:
1823 1847 q = repo.mq
1824 1848 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1825 1849 all=opts['all'])
1826 1850 q.save_dirty()
1827 1851 return ret
1828 1852
1829 1853 def rename(ui, repo, patch, name=None, **opts):
1830 1854 """rename a patch
1831 1855
1832 1856 With one argument, renames the current patch to PATCH1.
1833 1857 With two arguments, renames PATCH1 to PATCH2."""
1834 1858
1835 1859 q = repo.mq
1836 1860
1837 1861 if not name:
1838 1862 name = patch
1839 1863 patch = None
1840 1864
1841 1865 if patch:
1842 1866 patch = q.lookup(patch)
1843 1867 else:
1844 1868 if not q.applied:
1845 1869 ui.write(_('No patches applied\n'))
1846 1870 return
1847 1871 patch = q.lookup('qtip')
1848 1872 absdest = q.join(name)
1849 1873 if os.path.isdir(absdest):
1850 1874 name = normname(os.path.join(name, os.path.basename(patch)))
1851 1875 absdest = q.join(name)
1852 1876 if os.path.exists(absdest):
1853 1877 raise util.Abort(_('%s already exists') % absdest)
1854 1878
1855 1879 if name in q.series:
1856 1880 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1857 1881
1858 1882 if ui.verbose:
1859 1883 ui.write('Renaming %s to %s\n' % (patch, name))
1860 1884 i = q.find_series(patch)
1861 1885 guards = q.guard_re.findall(q.full_series[i])
1862 1886 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1863 1887 q.parse_series()
1864 1888 q.series_dirty = 1
1865 1889
1866 1890 info = q.isapplied(patch)
1867 1891 if info:
1868 1892 q.applied[info[0]] = statusentry(info[1], name)
1869 1893 q.applied_dirty = 1
1870 1894
1871 1895 util.rename(q.join(patch), absdest)
1872 1896 r = q.qrepo()
1873 1897 if r:
1874 1898 wlock = r.wlock()
1899 try:
1875 1900 if r.dirstate[name] == 'r':
1876 1901 r.undelete([name], wlock)
1877 1902 r.copy(patch, name, wlock)
1878 1903 r.remove([patch], False, wlock)
1904 finally:
1905 del wlock
1879 1906
1880 1907 q.save_dirty()
1881 1908
1882 1909 def restore(ui, repo, rev, **opts):
1883 1910 """restore the queue state saved by a rev"""
1884 1911 rev = repo.lookup(rev)
1885 1912 q = repo.mq
1886 1913 q.restore(repo, rev, delete=opts['delete'],
1887 1914 qupdate=opts['update'])
1888 1915 q.save_dirty()
1889 1916 return 0
1890 1917
1891 1918 def save(ui, repo, **opts):
1892 1919 """save current queue state"""
1893 1920 q = repo.mq
1894 1921 message = cmdutil.logmessage(opts)
1895 1922 ret = q.save(repo, msg=message)
1896 1923 if ret:
1897 1924 return ret
1898 1925 q.save_dirty()
1899 1926 if opts['copy']:
1900 1927 path = q.path
1901 1928 if opts['name']:
1902 1929 newpath = os.path.join(q.basepath, opts['name'])
1903 1930 if os.path.exists(newpath):
1904 1931 if not os.path.isdir(newpath):
1905 1932 raise util.Abort(_('destination %s exists and is not '
1906 1933 'a directory') % newpath)
1907 1934 if not opts['force']:
1908 1935 raise util.Abort(_('destination %s exists, '
1909 1936 'use -f to force') % newpath)
1910 1937 else:
1911 1938 newpath = savename(path)
1912 1939 ui.warn("copy %s to %s\n" % (path, newpath))
1913 1940 util.copyfiles(path, newpath)
1914 1941 if opts['empty']:
1915 1942 try:
1916 1943 os.unlink(q.join(q.status_path))
1917 1944 except:
1918 1945 pass
1919 1946 return 0
1920 1947
1921 1948 def strip(ui, repo, rev, **opts):
1922 1949 """strip a revision and all later revs on the same branch"""
1923 1950 rev = repo.lookup(rev)
1924 1951 backup = 'all'
1925 1952 if opts['backup']:
1926 1953 backup = 'strip'
1927 1954 elif opts['nobackup']:
1928 1955 backup = 'none'
1929 1956 update = repo.dirstate.parents()[0] != revlog.nullid
1930 1957 repo.mq.strip(repo, rev, backup=backup, update=update)
1931 1958 return 0
1932 1959
1933 1960 def select(ui, repo, *args, **opts):
1934 1961 '''set or print guarded patches to push
1935 1962
1936 1963 Use the qguard command to set or print guards on patch, then use
1937 1964 qselect to tell mq which guards to use. A patch will be pushed if it
1938 1965 has no guards or any positive guards match the currently selected guard,
1939 1966 but will not be pushed if any negative guards match the current guard.
1940 1967 For example:
1941 1968
1942 1969 qguard foo.patch -stable (negative guard)
1943 1970 qguard bar.patch +stable (positive guard)
1944 1971 qselect stable
1945 1972
1946 1973 This activates the "stable" guard. mq will skip foo.patch (because
1947 1974 it has a negative match) but push bar.patch (because it
1948 1975 has a positive match).
1949 1976
1950 1977 With no arguments, prints the currently active guards.
1951 1978 With one argument, sets the active guard.
1952 1979
1953 1980 Use -n/--none to deactivate guards (no other arguments needed).
1954 1981 When no guards are active, patches with positive guards are skipped
1955 1982 and patches with negative guards are pushed.
1956 1983
1957 1984 qselect can change the guards on applied patches. It does not pop
1958 1985 guarded patches by default. Use --pop to pop back to the last applied
1959 1986 patch that is not guarded. Use --reapply (which implies --pop) to push
1960 1987 back to the current patch afterwards, but skip guarded patches.
1961 1988
1962 1989 Use -s/--series to print a list of all guards in the series file (no
1963 1990 other arguments needed). Use -v for more information.'''
1964 1991
1965 1992 q = repo.mq
1966 1993 guards = q.active()
1967 1994 if args or opts['none']:
1968 1995 old_unapplied = q.unapplied(repo)
1969 1996 old_guarded = [i for i in xrange(len(q.applied)) if
1970 1997 not q.pushable(i)[0]]
1971 1998 q.set_active(args)
1972 1999 q.save_dirty()
1973 2000 if not args:
1974 2001 ui.status(_('guards deactivated\n'))
1975 2002 if not opts['pop'] and not opts['reapply']:
1976 2003 unapplied = q.unapplied(repo)
1977 2004 guarded = [i for i in xrange(len(q.applied))
1978 2005 if not q.pushable(i)[0]]
1979 2006 if len(unapplied) != len(old_unapplied):
1980 2007 ui.status(_('number of unguarded, unapplied patches has '
1981 2008 'changed from %d to %d\n') %
1982 2009 (len(old_unapplied), len(unapplied)))
1983 2010 if len(guarded) != len(old_guarded):
1984 2011 ui.status(_('number of guarded, applied patches has changed '
1985 2012 'from %d to %d\n') %
1986 2013 (len(old_guarded), len(guarded)))
1987 2014 elif opts['series']:
1988 2015 guards = {}
1989 2016 noguards = 0
1990 2017 for gs in q.series_guards:
1991 2018 if not gs:
1992 2019 noguards += 1
1993 2020 for g in gs:
1994 2021 guards.setdefault(g, 0)
1995 2022 guards[g] += 1
1996 2023 if ui.verbose:
1997 2024 guards['NONE'] = noguards
1998 2025 guards = guards.items()
1999 2026 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2000 2027 if guards:
2001 2028 ui.note(_('guards in series file:\n'))
2002 2029 for guard, count in guards:
2003 2030 ui.note('%2d ' % count)
2004 2031 ui.write(guard, '\n')
2005 2032 else:
2006 2033 ui.note(_('no guards in series file\n'))
2007 2034 else:
2008 2035 if guards:
2009 2036 ui.note(_('active guards:\n'))
2010 2037 for g in guards:
2011 2038 ui.write(g, '\n')
2012 2039 else:
2013 2040 ui.write(_('no active guards\n'))
2014 2041 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2015 2042 popped = False
2016 2043 if opts['pop'] or opts['reapply']:
2017 2044 for i in xrange(len(q.applied)):
2018 2045 pushable, reason = q.pushable(i)
2019 2046 if not pushable:
2020 2047 ui.status(_('popping guarded patches\n'))
2021 2048 popped = True
2022 2049 if i == 0:
2023 2050 q.pop(repo, all=True)
2024 2051 else:
2025 2052 q.pop(repo, i-1)
2026 2053 break
2027 2054 if popped:
2028 2055 try:
2029 2056 if reapply:
2030 2057 ui.status(_('reapplying unguarded patches\n'))
2031 2058 q.push(repo, reapply)
2032 2059 finally:
2033 2060 q.save_dirty()
2034 2061
2035 2062 def reposetup(ui, repo):
2036 2063 class mqrepo(repo.__class__):
2037 2064 def abort_if_wdir_patched(self, errmsg, force=False):
2038 2065 if self.mq.applied and not force:
2039 2066 parent = revlog.hex(self.dirstate.parents()[0])
2040 2067 if parent in [s.rev for s in self.mq.applied]:
2041 2068 raise util.Abort(errmsg)
2042 2069
2043 2070 def commit(self, *args, **opts):
2044 2071 if len(args) >= 6:
2045 2072 force = args[5]
2046 2073 else:
2047 2074 force = opts.get('force')
2048 2075 self.abort_if_wdir_patched(
2049 2076 _('cannot commit over an applied mq patch'),
2050 2077 force)
2051 2078
2052 2079 return super(mqrepo, self).commit(*args, **opts)
2053 2080
2054 2081 def push(self, remote, force=False, revs=None):
2055 2082 if self.mq.applied and not force and not revs:
2056 2083 raise util.Abort(_('source has mq patches applied'))
2057 2084 return super(mqrepo, self).push(remote, force, revs)
2058 2085
2059 2086 def tags(self):
2060 2087 if self.tagscache:
2061 2088 return self.tagscache
2062 2089
2063 2090 tagscache = super(mqrepo, self).tags()
2064 2091
2065 2092 q = self.mq
2066 2093 if not q.applied:
2067 2094 return tagscache
2068 2095
2069 2096 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2070 2097 mqtags.append((mqtags[-1][0], 'qtip'))
2071 2098 mqtags.append((mqtags[0][0], 'qbase'))
2072 2099 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2073 2100 for patch in mqtags:
2074 2101 if patch[1] in tagscache:
2075 2102 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2076 2103 else:
2077 2104 tagscache[patch[1]] = patch[0]
2078 2105
2079 2106 return tagscache
2080 2107
2081 2108 def _branchtags(self):
2082 2109 q = self.mq
2083 2110 if not q.applied:
2084 2111 return super(mqrepo, self)._branchtags()
2085 2112
2086 2113 self.branchcache = {} # avoid recursion in changectx
2087 2114 cl = self.changelog
2088 2115 partial, last, lrev = self._readbranchcache()
2089 2116
2090 2117 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2091 2118 start = lrev + 1
2092 2119 if start < qbase:
2093 2120 # update the cache (excluding the patches) and save it
2094 2121 self._updatebranchcache(partial, lrev+1, qbase)
2095 2122 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2096 2123 start = qbase
2097 2124 # if start = qbase, the cache is as updated as it should be.
2098 2125 # if start > qbase, the cache includes (part of) the patches.
2099 2126 # we might as well use it, but we won't save it.
2100 2127
2101 2128 # update the cache up to the tip
2102 2129 self._updatebranchcache(partial, start, cl.count())
2103 2130
2104 2131 return partial
2105 2132
2106 2133 if repo.local():
2107 2134 repo.__class__ = mqrepo
2108 2135 repo.mq = queue(ui, repo.join(""))
2109 2136
2110 2137 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2111 2138
2112 2139 cmdtable = {
2113 2140 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2114 2141 "qclone":
2115 2142 (clone,
2116 2143 [('', 'pull', None, _('use pull protocol to copy metadata')),
2117 2144 ('U', 'noupdate', None, _('do not update the new working directories')),
2118 2145 ('', 'uncompressed', None,
2119 2146 _('use uncompressed transfer (fast over LAN)')),
2120 2147 ('e', 'ssh', '', _('specify ssh command to use')),
2121 2148 ('p', 'patches', '', _('location of source patch repo')),
2122 2149 ('', 'remotecmd', '',
2123 2150 _('specify hg command to run on the remote side'))],
2124 2151 _('hg qclone [OPTION]... SOURCE [DEST]')),
2125 2152 "qcommit|qci":
2126 2153 (commit,
2127 2154 commands.table["^commit|ci"][1],
2128 2155 _('hg qcommit [OPTION]... [FILE]...')),
2129 2156 "^qdiff":
2130 2157 (diff,
2131 2158 [('g', 'git', None, _('use git extended diff format')),
2132 2159 ('I', 'include', [], _('include names matching the given patterns')),
2133 2160 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2134 2161 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2135 2162 "qdelete|qremove|qrm":
2136 2163 (delete,
2137 2164 [('k', 'keep', None, _('keep patch file')),
2138 2165 ('r', 'rev', [], _('stop managing a revision'))],
2139 2166 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2140 2167 'qfold':
2141 2168 (fold,
2142 2169 [('e', 'edit', None, _('edit patch header')),
2143 2170 ('k', 'keep', None, _('keep folded patch files')),
2144 2171 ] + commands.commitopts,
2145 2172 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2146 2173 'qgoto':
2147 2174 (goto,
2148 2175 [('f', 'force', None, _('overwrite any local changes'))],
2149 2176 _('hg qgoto [OPTION]... PATCH')),
2150 2177 'qguard':
2151 2178 (guard,
2152 2179 [('l', 'list', None, _('list all patches and guards')),
2153 2180 ('n', 'none', None, _('drop all guards'))],
2154 2181 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2155 2182 'qheader': (header, [], _('hg qheader [PATCH]')),
2156 2183 "^qimport":
2157 2184 (qimport,
2158 2185 [('e', 'existing', None, 'import file in patch dir'),
2159 2186 ('n', 'name', '', 'patch file name'),
2160 2187 ('f', 'force', None, 'overwrite existing files'),
2161 2188 ('r', 'rev', [], 'place existing revisions under mq control'),
2162 2189 ('g', 'git', None, _('use git extended diff format'))],
2163 2190 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2164 2191 "^qinit":
2165 2192 (init,
2166 2193 [('c', 'create-repo', None, 'create queue repository')],
2167 2194 _('hg qinit [-c]')),
2168 2195 "qnew":
2169 2196 (new,
2170 2197 [('e', 'edit', None, _('edit commit message')),
2171 2198 ('f', 'force', None, _('import uncommitted changes into patch')),
2172 2199 ('I', 'include', [], _('include names matching the given patterns')),
2173 2200 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2174 2201 ] + commands.commitopts,
2175 2202 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2176 2203 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2177 2204 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2178 2205 "^qpop":
2179 2206 (pop,
2180 2207 [('a', 'all', None, _('pop all patches')),
2181 2208 ('n', 'name', '', _('queue name to pop')),
2182 2209 ('f', 'force', None, _('forget any local changes'))],
2183 2210 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2184 2211 "^qpush":
2185 2212 (push,
2186 2213 [('f', 'force', None, _('apply if the patch has rejects')),
2187 2214 ('l', 'list', None, _('list patch name in commit text')),
2188 2215 ('a', 'all', None, _('apply all patches')),
2189 2216 ('m', 'merge', None, _('merge from another queue')),
2190 2217 ('n', 'name', '', _('merge queue name'))],
2191 2218 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2192 2219 "^qrefresh":
2193 2220 (refresh,
2194 2221 [('e', 'edit', None, _('edit commit message')),
2195 2222 ('g', 'git', None, _('use git extended diff format')),
2196 2223 ('s', 'short', None, _('refresh only files already in the patch')),
2197 2224 ('I', 'include', [], _('include names matching the given patterns')),
2198 2225 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2199 2226 ] + commands.commitopts,
2200 2227 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2201 2228 'qrename|qmv':
2202 2229 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2203 2230 "qrestore":
2204 2231 (restore,
2205 2232 [('d', 'delete', None, _('delete save entry')),
2206 2233 ('u', 'update', None, _('update queue working dir'))],
2207 2234 _('hg qrestore [-d] [-u] REV')),
2208 2235 "qsave":
2209 2236 (save,
2210 2237 [('c', 'copy', None, _('copy patch directory')),
2211 2238 ('n', 'name', '', _('copy directory name')),
2212 2239 ('e', 'empty', None, _('clear queue status file')),
2213 2240 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2214 2241 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2215 2242 "qselect":
2216 2243 (select,
2217 2244 [('n', 'none', None, _('disable all guards')),
2218 2245 ('s', 'series', None, _('list all guards in series file')),
2219 2246 ('', 'pop', None, _('pop to before first guarded applied patch')),
2220 2247 ('', 'reapply', None, _('pop, then reapply patches'))],
2221 2248 _('hg qselect [OPTION]... [GUARD]...')),
2222 2249 "qseries":
2223 2250 (series,
2224 2251 [('m', 'missing', None, _('print patches not in series')),
2225 2252 ] + seriesopts,
2226 2253 _('hg qseries [-ms]')),
2227 2254 "^strip":
2228 2255 (strip,
2229 2256 [('f', 'force', None, _('force multi-head removal')),
2230 2257 ('b', 'backup', None, _('bundle unrelated changesets')),
2231 2258 ('n', 'nobackup', None, _('no backups'))],
2232 2259 _('hg strip [-f] [-b] [-n] REV')),
2233 2260 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2234 2261 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2235 2262 }
@@ -1,593 +1,600 b''
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from mercurial.i18n import _
9 9 import os, tempfile
10 10 from mercurial import bundlerepo, changegroup, cmdutil, commands, hg, merge
11 11 from mercurial import patch, revlog, util
12 12
13 13 '''patch transplanting tool
14 14
15 15 This extension allows you to transplant patches from another branch.
16 16
17 17 Transplanted patches are recorded in .hg/transplant/transplants, as a map
18 18 from a changeset hash to its hash in the source repository.
19 19 '''
20 20
21 21 class transplantentry:
22 22 def __init__(self, lnode, rnode):
23 23 self.lnode = lnode
24 24 self.rnode = rnode
25 25
26 26 class transplants:
27 27 def __init__(self, path=None, transplantfile=None, opener=None):
28 28 self.path = path
29 29 self.transplantfile = transplantfile
30 30 self.opener = opener
31 31
32 32 if not opener:
33 33 self.opener = util.opener(self.path)
34 34 self.transplants = []
35 35 self.dirty = False
36 36 self.read()
37 37
38 38 def read(self):
39 39 abspath = os.path.join(self.path, self.transplantfile)
40 40 if self.transplantfile and os.path.exists(abspath):
41 41 for line in self.opener(self.transplantfile).read().splitlines():
42 42 lnode, rnode = map(revlog.bin, line.split(':'))
43 43 self.transplants.append(transplantentry(lnode, rnode))
44 44
45 45 def write(self):
46 46 if self.dirty and self.transplantfile:
47 47 if not os.path.isdir(self.path):
48 48 os.mkdir(self.path)
49 49 fp = self.opener(self.transplantfile, 'w')
50 50 for c in self.transplants:
51 51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 52 fp.write(l + ':' + r + '\n')
53 53 fp.close()
54 54 self.dirty = False
55 55
56 56 def get(self, rnode):
57 57 return [t for t in self.transplants if t.rnode == rnode]
58 58
59 59 def set(self, lnode, rnode):
60 60 self.transplants.append(transplantentry(lnode, rnode))
61 61 self.dirty = True
62 62
63 63 def remove(self, transplant):
64 64 del self.transplants[self.transplants.index(transplant)]
65 65 self.dirty = True
66 66
67 67 class transplanter:
68 68 def __init__(self, ui, repo):
69 69 self.ui = ui
70 70 self.path = repo.join('transplant')
71 71 self.opener = util.opener(self.path)
72 72 self.transplants = transplants(self.path, 'transplants', opener=self.opener)
73 73
74 74 def applied(self, repo, node, parent):
75 75 '''returns True if a node is already an ancestor of parent
76 76 or has already been transplanted'''
77 77 if hasnode(repo, node):
78 78 if node in repo.changelog.reachable(parent, stop=node):
79 79 return True
80 80 for t in self.transplants.get(node):
81 81 # it might have been stripped
82 82 if not hasnode(repo, t.lnode):
83 83 self.transplants.remove(t)
84 84 return False
85 85 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 86 return True
87 87 return False
88 88
89 89 def apply(self, repo, source, revmap, merges, opts={}):
90 90 '''apply the revisions in revmap one by one in revision order'''
91 91 revs = revmap.keys()
92 92 revs.sort()
93 93
94 94 p1, p2 = repo.dirstate.parents()
95 95 pulls = []
96 96 diffopts = patch.diffopts(self.ui, opts)
97 97 diffopts.git = True
98 98
99 lock = wlock = None
100 try:
99 101 wlock = repo.wlock()
100 102 lock = repo.lock()
101 try:
102 103 for rev in revs:
103 104 node = revmap[rev]
104 105 revstr = '%s:%s' % (rev, revlog.short(node))
105 106
106 107 if self.applied(repo, node, p1):
107 108 self.ui.warn(_('skipping already applied revision %s\n') %
108 109 revstr)
109 110 continue
110 111
111 112 parents = source.changelog.parents(node)
112 113 if not opts.get('filter'):
113 114 # If the changeset parent is the same as the wdir's parent,
114 115 # just pull it.
115 116 if parents[0] == p1:
116 117 pulls.append(node)
117 118 p1 = node
118 119 continue
119 120 if pulls:
120 121 if source != repo:
121 122 repo.pull(source, heads=pulls, lock=lock)
122 123 merge.update(repo, pulls[-1], False, False, None,
123 124 wlock=wlock)
124 125 p1, p2 = repo.dirstate.parents()
125 126 pulls = []
126 127
127 128 domerge = False
128 129 if node in merges:
129 130 # pulling all the merge revs at once would mean we couldn't
130 131 # transplant after the latest even if transplants before them
131 132 # fail.
132 133 domerge = True
133 134 if not hasnode(repo, node):
134 135 repo.pull(source, heads=[node], lock=lock)
135 136
136 137 if parents[1] != revlog.nullid:
137 138 self.ui.note(_('skipping merge changeset %s:%s\n')
138 139 % (rev, revlog.short(node)))
139 140 patchfile = None
140 141 else:
141 142 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
142 143 fp = os.fdopen(fd, 'w')
143 144 patch.diff(source, parents[0], node, fp=fp, opts=diffopts)
144 145 fp.close()
145 146
146 147 del revmap[rev]
147 148 if patchfile or domerge:
148 149 try:
149 150 n = self.applyone(repo, node, source.changelog.read(node),
150 151 patchfile, merge=domerge,
151 152 log=opts.get('log'),
152 153 filter=opts.get('filter'),
153 154 lock=lock, wlock=wlock)
154 155 if n and domerge:
155 156 self.ui.status(_('%s merged at %s\n') % (revstr,
156 157 revlog.short(n)))
157 158 elif n:
158 159 self.ui.status(_('%s transplanted to %s\n') % (revlog.short(node),
159 160 revlog.short(n)))
160 161 finally:
161 162 if patchfile:
162 163 os.unlink(patchfile)
163 164 if pulls:
164 165 repo.pull(source, heads=pulls, lock=lock)
165 166 merge.update(repo, pulls[-1], False, False, None, wlock=wlock)
166 167 finally:
167 168 self.saveseries(revmap, merges)
168 169 self.transplants.write()
170 del lock, wlock
169 171
170 172 def filter(self, filter, changelog, patchfile):
171 173 '''arbitrarily rewrite changeset before applying it'''
172 174
173 175 self.ui.status('filtering %s\n' % patchfile)
174 176 user, date, msg = (changelog[1], changelog[2], changelog[4])
175 177
176 178 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
177 179 fp = os.fdopen(fd, 'w')
178 180 fp.write("# HG changeset patch\n")
179 181 fp.write("# User %s\n" % user)
180 182 fp.write("# Date %d %d\n" % date)
181 183 fp.write(changelog[4])
182 184 fp.close()
183 185
184 186 try:
185 187 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
186 188 util.shellquote(patchfile)),
187 189 environ={'HGUSER': changelog[1]},
188 190 onerr=util.Abort, errprefix=_('filter failed'))
189 191 user, date, msg = self.parselog(file(headerfile))[1:4]
190 192 finally:
191 193 os.unlink(headerfile)
192 194
193 195 return (user, date, msg)
194 196
195 197 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
196 198 filter=None, lock=None, wlock=None):
197 199 '''apply the patch in patchfile to the repository as a transplant'''
198 200 (manifest, user, (time, timezone), files, message) = cl[:5]
199 201 date = "%d %d" % (time, timezone)
200 202 extra = {'transplant_source': node}
201 203 if filter:
202 204 (user, date, message) = self.filter(filter, cl, patchfile)
203 205
204 206 if log:
205 207 message += '\n(transplanted from %s)' % revlog.hex(node)
206 208
207 209 self.ui.status(_('applying %s\n') % revlog.short(node))
208 210 self.ui.note('%s %s\n%s\n' % (user, date, message))
209 211
210 212 if not patchfile and not merge:
211 213 raise util.Abort(_('can only omit patchfile if merging'))
212 214 if patchfile:
213 215 try:
214 216 files = {}
215 217 try:
216 218 fuzz = patch.patch(patchfile, self.ui, cwd=repo.root,
217 219 files=files)
218 220 if not files:
219 221 self.ui.warn(_('%s: empty changeset') % revlog.hex(node))
220 222 return None
221 223 finally:
222 224 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
223 225 except Exception, inst:
224 226 if filter:
225 227 os.unlink(patchfile)
226 228 seriespath = os.path.join(self.path, 'series')
227 229 if os.path.exists(seriespath):
228 230 os.unlink(seriespath)
229 231 p1 = repo.dirstate.parents()[0]
230 232 p2 = node
231 233 self.log(user, date, message, p1, p2, merge=merge)
232 234 self.ui.write(str(inst) + '\n')
233 235 raise util.Abort(_('Fix up the merge and run hg transplant --continue'))
234 236 else:
235 237 files = None
236 238 if merge:
237 239 p1, p2 = repo.dirstate.parents()
238 240 repo.dirstate.setparents(p1, node)
239 241
240 242 n = repo.commit(files, message, user, date, lock=lock, wlock=wlock,
241 243 extra=extra)
242 244 if not merge:
243 245 self.transplants.set(n, node)
244 246
245 247 return n
246 248
247 249 def resume(self, repo, source, opts=None):
248 250 '''recover last transaction and apply remaining changesets'''
249 251 if os.path.exists(os.path.join(self.path, 'journal')):
250 252 n, node = self.recover(repo)
251 253 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
252 254 revlog.short(n)))
253 255 seriespath = os.path.join(self.path, 'series')
254 256 if not os.path.exists(seriespath):
255 257 self.transplants.write()
256 258 return
257 259 nodes, merges = self.readseries()
258 260 revmap = {}
259 261 for n in nodes:
260 262 revmap[source.changelog.rev(n)] = n
261 263 os.unlink(seriespath)
262 264
263 265 self.apply(repo, source, revmap, merges, opts)
264 266
265 267 def recover(self, repo):
266 268 '''commit working directory using journal metadata'''
267 269 node, user, date, message, parents = self.readlog()
268 270 merge = len(parents) == 2
269 271
270 272 if not user or not date or not message or not parents[0]:
271 273 raise util.Abort(_('transplant log file is corrupt'))
272 274
273 275 extra = {'transplant_source': node}
274 276 wlock = repo.wlock()
277 try:
275 278 p1, p2 = repo.dirstate.parents()
276 279 if p1 != parents[0]:
277 raise util.Abort(_('working dir not at transplant parent %s') %
280 raise util.Abort(
281 _('working dir not at transplant parent %s') %
278 282 revlog.hex(parents[0]))
279 283 if merge:
280 284 repo.dirstate.setparents(p1, parents[1])
281 n = repo.commit(None, message, user, date, wlock=wlock, extra=extra)
285 n = repo.commit(None, message, user, date, wlock=wlock,
286 extra=extra)
282 287 if not n:
283 288 raise util.Abort(_('commit failed'))
284 289 if not merge:
285 290 self.transplants.set(n, node)
286 291 self.unlog()
287 292
288 293 return n, node
294 finally:
295 del wlock
289 296
290 297 def readseries(self):
291 298 nodes = []
292 299 merges = []
293 300 cur = nodes
294 301 for line in self.opener('series').read().splitlines():
295 302 if line.startswith('# Merges'):
296 303 cur = merges
297 304 continue
298 305 cur.append(revlog.bin(line))
299 306
300 307 return (nodes, merges)
301 308
302 309 def saveseries(self, revmap, merges):
303 310 if not revmap:
304 311 return
305 312
306 313 if not os.path.isdir(self.path):
307 314 os.mkdir(self.path)
308 315 series = self.opener('series', 'w')
309 316 revs = revmap.keys()
310 317 revs.sort()
311 318 for rev in revs:
312 319 series.write(revlog.hex(revmap[rev]) + '\n')
313 320 if merges:
314 321 series.write('# Merges\n')
315 322 for m in merges:
316 323 series.write(revlog.hex(m) + '\n')
317 324 series.close()
318 325
319 326 def parselog(self, fp):
320 327 parents = []
321 328 message = []
322 329 node = revlog.nullid
323 330 inmsg = False
324 331 for line in fp.read().splitlines():
325 332 if inmsg:
326 333 message.append(line)
327 334 elif line.startswith('# User '):
328 335 user = line[7:]
329 336 elif line.startswith('# Date '):
330 337 date = line[7:]
331 338 elif line.startswith('# Node ID '):
332 339 node = revlog.bin(line[10:])
333 340 elif line.startswith('# Parent '):
334 341 parents.append(revlog.bin(line[9:]))
335 342 elif not line.startswith('#'):
336 343 inmsg = True
337 344 message.append(line)
338 345 return (node, user, date, '\n'.join(message), parents)
339 346
340 347 def log(self, user, date, message, p1, p2, merge=False):
341 348 '''journal changelog metadata for later recover'''
342 349
343 350 if not os.path.isdir(self.path):
344 351 os.mkdir(self.path)
345 352 fp = self.opener('journal', 'w')
346 353 fp.write('# User %s\n' % user)
347 354 fp.write('# Date %s\n' % date)
348 355 fp.write('# Node ID %s\n' % revlog.hex(p2))
349 356 fp.write('# Parent ' + revlog.hex(p1) + '\n')
350 357 if merge:
351 358 fp.write('# Parent ' + revlog.hex(p2) + '\n')
352 359 fp.write(message.rstrip() + '\n')
353 360 fp.close()
354 361
355 362 def readlog(self):
356 363 return self.parselog(self.opener('journal'))
357 364
358 365 def unlog(self):
359 366 '''remove changelog journal'''
360 367 absdst = os.path.join(self.path, 'journal')
361 368 if os.path.exists(absdst):
362 369 os.unlink(absdst)
363 370
364 371 def transplantfilter(self, repo, source, root):
365 372 def matchfn(node):
366 373 if self.applied(repo, node, root):
367 374 return False
368 375 if source.changelog.parents(node)[1] != revlog.nullid:
369 376 return False
370 377 extra = source.changelog.read(node)[5]
371 378 cnode = extra.get('transplant_source')
372 379 if cnode and self.applied(repo, cnode, root):
373 380 return False
374 381 return True
375 382
376 383 return matchfn
377 384
378 385 def hasnode(repo, node):
379 386 try:
380 387 return repo.changelog.rev(node) != None
381 388 except revlog.RevlogError:
382 389 return False
383 390
384 391 def browserevs(ui, repo, nodes, opts):
385 392 '''interactively transplant changesets'''
386 393 def browsehelp(ui):
387 394 ui.write('y: transplant this changeset\n'
388 395 'n: skip this changeset\n'
389 396 'm: merge at this changeset\n'
390 397 'p: show patch\n'
391 398 'c: commit selected changesets\n'
392 399 'q: cancel transplant\n'
393 400 '?: show this help\n')
394 401
395 402 displayer = cmdutil.show_changeset(ui, repo, opts)
396 403 transplants = []
397 404 merges = []
398 405 for node in nodes:
399 406 displayer.show(changenode=node)
400 407 action = None
401 408 while not action:
402 409 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
403 410 if action == '?':
404 411 browsehelp(ui)
405 412 action = None
406 413 elif action == 'p':
407 414 parent = repo.changelog.parents(node)[0]
408 415 patch.diff(repo, parent, node)
409 416 action = None
410 417 elif action not in ('y', 'n', 'm', 'c', 'q'):
411 418 ui.write('no such option\n')
412 419 action = None
413 420 if action == 'y':
414 421 transplants.append(node)
415 422 elif action == 'm':
416 423 merges.append(node)
417 424 elif action == 'c':
418 425 break
419 426 elif action == 'q':
420 427 transplants = ()
421 428 merges = ()
422 429 break
423 430 return (transplants, merges)
424 431
425 432 def transplant(ui, repo, *revs, **opts):
426 433 '''transplant changesets from another branch
427 434
428 435 Selected changesets will be applied on top of the current working
429 436 directory with the log of the original changeset. If --log is
430 437 specified, log messages will have a comment appended of the form:
431 438
432 439 (transplanted from CHANGESETHASH)
433 440
434 441 You can rewrite the changelog message with the --filter option.
435 442 Its argument will be invoked with the current changelog message
436 443 as $1 and the patch as $2.
437 444
438 445 If --source is specified, selects changesets from the named
439 446 repository. If --branch is specified, selects changesets from the
440 447 branch holding the named revision, up to that revision. If --all
441 448 is specified, all changesets on the branch will be transplanted,
442 449 otherwise you will be prompted to select the changesets you want.
443 450
444 451 hg transplant --branch REVISION --all will rebase the selected branch
445 452 (up to the named revision) onto your current working directory.
446 453
447 454 You can optionally mark selected transplanted changesets as
448 455 merge changesets. You will not be prompted to transplant any
449 456 ancestors of a merged transplant, and you can merge descendants
450 457 of them normally instead of transplanting them.
451 458
452 459 If no merges or revisions are provided, hg transplant will start
453 460 an interactive changeset browser.
454 461
455 462 If a changeset application fails, you can fix the merge by hand and
456 463 then resume where you left off by calling hg transplant --continue.
457 464 '''
458 465 def getoneitem(opts, item, errmsg):
459 466 val = opts.get(item)
460 467 if val:
461 468 if len(val) > 1:
462 469 raise util.Abort(errmsg)
463 470 else:
464 471 return val[0]
465 472
466 473 def getremotechanges(repo, url):
467 474 sourcerepo = ui.expandpath(url)
468 475 source = hg.repository(ui, sourcerepo)
469 476 incoming = repo.findincoming(source, force=True)
470 477 if not incoming:
471 478 return (source, None, None)
472 479
473 480 bundle = None
474 481 if not source.local():
475 482 cg = source.changegroup(incoming, 'incoming')
476 483 bundle = changegroup.writebundle(cg, None, 'HG10UN')
477 484 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
478 485
479 486 return (source, incoming, bundle)
480 487
481 488 def incwalk(repo, incoming, branches, match=util.always):
482 489 if not branches:
483 490 branches=None
484 491 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
485 492 if match(node):
486 493 yield node
487 494
488 495 def transplantwalk(repo, root, branches, match=util.always):
489 496 if not branches:
490 497 branches = repo.heads()
491 498 ancestors = []
492 499 for branch in branches:
493 500 ancestors.append(repo.changelog.ancestor(root, branch))
494 501 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
495 502 if match(node):
496 503 yield node
497 504
498 505 def checkopts(opts, revs):
499 506 if opts.get('continue'):
500 507 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
501 508 raise util.Abort(_('--continue is incompatible with branch, all or merge'))
502 509 return
503 510 if not (opts.get('source') or revs or
504 511 opts.get('merge') or opts.get('branch')):
505 512 raise util.Abort(_('no source URL, branch tag or revision list provided'))
506 513 if opts.get('all'):
507 514 if not opts.get('branch'):
508 515 raise util.Abort(_('--all requires a branch revision'))
509 516 if revs:
510 517 raise util.Abort(_('--all is incompatible with a revision list'))
511 518
512 519 checkopts(opts, revs)
513 520
514 521 if not opts.get('log'):
515 522 opts['log'] = ui.config('transplant', 'log')
516 523 if not opts.get('filter'):
517 524 opts['filter'] = ui.config('transplant', 'filter')
518 525
519 526 tp = transplanter(ui, repo)
520 527
521 528 p1, p2 = repo.dirstate.parents()
522 529 if p1 == revlog.nullid:
523 530 raise util.Abort(_('no revision checked out'))
524 531 if not opts.get('continue'):
525 532 if p2 != revlog.nullid:
526 533 raise util.Abort(_('outstanding uncommitted merges'))
527 534 m, a, r, d = repo.status()[:4]
528 535 if m or a or r or d:
529 536 raise util.Abort(_('outstanding local changes'))
530 537
531 538 bundle = None
532 539 source = opts.get('source')
533 540 if source:
534 541 (source, incoming, bundle) = getremotechanges(repo, source)
535 542 else:
536 543 source = repo
537 544
538 545 try:
539 546 if opts.get('continue'):
540 547 tp.resume(repo, source, opts)
541 548 return
542 549
543 550 tf=tp.transplantfilter(repo, source, p1)
544 551 if opts.get('prune'):
545 552 prune = [source.lookup(r)
546 553 for r in cmdutil.revrange(source, opts.get('prune'))]
547 554 matchfn = lambda x: tf(x) and x not in prune
548 555 else:
549 556 matchfn = tf
550 557 branches = map(source.lookup, opts.get('branch', ()))
551 558 merges = map(source.lookup, opts.get('merge', ()))
552 559 revmap = {}
553 560 if revs:
554 561 for r in cmdutil.revrange(source, revs):
555 562 revmap[int(r)] = source.lookup(r)
556 563 elif opts.get('all') or not merges:
557 564 if source != repo:
558 565 alltransplants = incwalk(source, incoming, branches, match=matchfn)
559 566 else:
560 567 alltransplants = transplantwalk(source, p1, branches, match=matchfn)
561 568 if opts.get('all'):
562 569 revs = alltransplants
563 570 else:
564 571 revs, newmerges = browserevs(ui, source, alltransplants, opts)
565 572 merges.extend(newmerges)
566 573 for r in revs:
567 574 revmap[source.changelog.rev(r)] = r
568 575 for r in merges:
569 576 revmap[source.changelog.rev(r)] = r
570 577
571 578 revs = revmap.keys()
572 579 revs.sort()
573 580 pulls = []
574 581
575 582 tp.apply(repo, source, revmap, merges, opts)
576 583 finally:
577 584 if bundle:
578 585 source.close()
579 586 os.unlink(bundle)
580 587
581 588 cmdtable = {
582 589 "transplant":
583 590 (transplant,
584 591 [('s', 'source', '', _('pull patches from REPOSITORY')),
585 592 ('b', 'branch', [], _('pull patches from branch BRANCH')),
586 593 ('a', 'all', None, _('pull all changesets up to BRANCH')),
587 594 ('p', 'prune', [], _('skip over REV')),
588 595 ('m', 'merge', [], _('merge at REV')),
589 596 ('', 'log', None, _('append transplant info to log message')),
590 597 ('c', 'continue', None, _('continue last transplant session after repair')),
591 598 ('', 'filter', '', _('filter changesets through FILTER'))],
592 599 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] [-m REV] [REV]...'))
593 600 }
@@ -1,3164 +1,3180 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, urllib, shlex, stat
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 36 elif abs not in repo.dirstate:
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 sim = float(opts.get('similarity') or 0)
57 57 if sim < 0 or sim > 100:
58 58 raise util.Abort(_('similarity must be between 0 and 100'))
59 59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60 60
61 61 def annotate(ui, repo, *pats, **opts):
62 62 """show changeset information per file line
63 63
64 64 List changes in files, showing the revision id responsible for each line
65 65
66 66 This command is useful to discover who did a change or when a change took
67 67 place.
68 68
69 69 Without the -a option, annotate will avoid processing files it
70 70 detects as binary. With -a, annotate will generate an annotation
71 71 anyway, probably with undesirable results.
72 72 """
73 73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74 74
75 75 if not pats:
76 76 raise util.Abort(_('at least one file name or pattern required'))
77 77
78 78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 79 ('number', lambda x: str(x[0].rev())),
80 80 ('changeset', lambda x: short(x[0].node())),
81 81 ('date', getdate),
82 82 ('follow', lambda x: x[0].path()),
83 83 ]
84 84
85 85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 86 and not opts['follow']):
87 87 opts['number'] = 1
88 88
89 89 linenumber = opts.get('line_number') is not None
90 90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92 92
93 93 funcmap = [func for op, func in opmap if opts.get(op)]
94 94 if linenumber:
95 95 lastfunc = funcmap[-1]
96 96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97 97
98 98 ctx = repo.changectx(opts['rev'])
99 99
100 100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 101 node=ctx.node()):
102 102 fctx = ctx.filectx(abs)
103 103 if not opts['text'] and util.binary(fctx.data()):
104 104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 105 continue
106 106
107 107 lines = fctx.annotate(follow=opts.get('follow'),
108 108 linenumber=linenumber)
109 109 pieces = []
110 110
111 111 for f in funcmap:
112 112 l = [f(n) for n, dummy in lines]
113 113 if l:
114 114 m = max(map(len, l))
115 115 pieces.append(["%*s" % (m, x) for x in l])
116 116
117 117 if pieces:
118 118 for p, l in zip(zip(*pieces), lines):
119 119 ui.write("%s: %s" % (" ".join(p), l[1]))
120 120
121 121 def archive(ui, repo, dest, **opts):
122 122 '''create unversioned archive of a repository revision
123 123
124 124 By default, the revision used is the parent of the working
125 125 directory; use "-r" to specify a different revision.
126 126
127 127 To specify the type of archive to create, use "-t". Valid
128 128 types are:
129 129
130 130 "files" (default): a directory full of files
131 131 "tar": tar archive, uncompressed
132 132 "tbz2": tar archive, compressed using bzip2
133 133 "tgz": tar archive, compressed using gzip
134 134 "uzip": zip archive, uncompressed
135 135 "zip": zip archive, compressed using deflate
136 136
137 137 The exact name of the destination archive or directory is given
138 138 using a format string; see "hg help export" for details.
139 139
140 140 Each member added to an archive file has a directory prefix
141 141 prepended. Use "-p" to specify a format string for the prefix.
142 142 The default is the basename of the archive, with suffixes removed.
143 143 '''
144 144
145 145 ctx = repo.changectx(opts['rev'])
146 146 if not ctx:
147 147 raise util.Abort(_('repository has no revisions'))
148 148 node = ctx.node()
149 149 dest = cmdutil.make_filename(repo, dest, node)
150 150 if os.path.realpath(dest) == repo.root:
151 151 raise util.Abort(_('repository root cannot be destination'))
152 152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 153 kind = opts.get('type') or 'files'
154 154 prefix = opts['prefix']
155 155 if dest == '-':
156 156 if kind == 'files':
157 157 raise util.Abort(_('cannot archive plain files to stdout'))
158 158 dest = sys.stdout
159 159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 160 prefix = cmdutil.make_filename(repo, prefix, node)
161 161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 162 matchfn, prefix)
163 163
164 164 def backout(ui, repo, node=None, rev=None, **opts):
165 165 '''reverse effect of earlier changeset
166 166
167 167 Commit the backed out changes as a new changeset. The new
168 168 changeset is a child of the backed out changeset.
169 169
170 170 If you back out a changeset other than the tip, a new head is
171 171 created. This head is the parent of the working directory. If
172 172 you back out an old changeset, your working directory will appear
173 173 old after the backout. You should merge the backout changeset
174 174 with another head.
175 175
176 176 The --merge option remembers the parent of the working directory
177 177 before starting the backout, then merges the new head with that
178 178 changeset afterwards. This saves you from doing the merge by
179 179 hand. The result of this merge is not committed, as for a normal
180 180 merge.'''
181 181 if rev and node:
182 182 raise util.Abort(_("please specify just one revision"))
183 183
184 184 if not rev:
185 185 rev = node
186 186
187 187 if not rev:
188 188 raise util.Abort(_("please specify a revision to backout"))
189 189
190 190 cmdutil.bail_if_changed(repo)
191 191 op1, op2 = repo.dirstate.parents()
192 192 if op2 != nullid:
193 193 raise util.Abort(_('outstanding uncommitted merge'))
194 194 node = repo.lookup(rev)
195 195 p1, p2 = repo.changelog.parents(node)
196 196 if p1 == nullid:
197 197 raise util.Abort(_('cannot back out a change with no parents'))
198 198 if p2 != nullid:
199 199 if not opts['parent']:
200 200 raise util.Abort(_('cannot back out a merge changeset without '
201 201 '--parent'))
202 202 p = repo.lookup(opts['parent'])
203 203 if p not in (p1, p2):
204 204 raise util.Abort(_('%s is not a parent of %s') %
205 205 (short(p), short(node)))
206 206 parent = p
207 207 else:
208 208 if opts['parent']:
209 209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 210 parent = p1
211 211 hg.clean(repo, node, show_stats=False)
212 212 revert_opts = opts.copy()
213 213 revert_opts['date'] = None
214 214 revert_opts['all'] = True
215 215 revert_opts['rev'] = hex(parent)
216 216 revert(ui, repo, **revert_opts)
217 217 commit_opts = opts.copy()
218 218 commit_opts['addremove'] = False
219 219 if not commit_opts['message'] and not commit_opts['logfile']:
220 220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 221 commit_opts['force_editor'] = True
222 222 commit(ui, repo, **commit_opts)
223 223 def nice(node):
224 224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 225 ui.status(_('changeset %s backs out changeset %s\n') %
226 226 (nice(repo.changelog.tip()), nice(node)))
227 227 if op1 != node:
228 228 if opts['merge']:
229 229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 230 hg.merge(repo, hex(op1))
231 231 else:
232 232 ui.status(_('the backout changeset is a new head - '
233 233 'do not forget to merge\n'))
234 234 ui.status(_('(use "backout --merge" '
235 235 'if you want to auto-merge)\n'))
236 236
237 237 def branch(ui, repo, label=None, **opts):
238 238 """set or show the current branch name
239 239
240 240 With no argument, show the current branch name. With one argument,
241 241 set the working directory branch name (the branch does not exist in
242 242 the repository until the next commit).
243 243
244 244 Unless --force is specified, branch will not let you set a
245 245 branch name that shadows an existing branch.
246 246 """
247 247
248 248 if label:
249 249 if not opts.get('force') and label in repo.branchtags():
250 250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 251 raise util.Abort(_('a branch of the same name already exists'
252 252 ' (use --force to override)'))
253 253 repo.dirstate.setbranch(util.fromlocal(label))
254 254 ui.status(_('marked working directory as branch %s\n') % label)
255 255 else:
256 256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257 257
258 258 def branches(ui, repo, active=False):
259 259 """list repository named branches
260 260
261 261 List the repository's named branches, indicating which ones are
262 262 inactive. If active is specified, only show active branches.
263 263
264 264 A branch is considered active if it contains unmerged heads.
265 265 """
266 266 b = repo.branchtags()
267 267 heads = dict.fromkeys(repo.heads(), 1)
268 268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 269 l.sort()
270 270 l.reverse()
271 271 for ishead, r, n, t in l:
272 272 if active and not ishead:
273 273 # If we're only displaying active branches, abort the loop on
274 274 # encountering the first inactive head
275 275 break
276 276 else:
277 277 hexfunc = ui.debugflag and hex or short
278 278 if ui.quiet:
279 279 ui.write("%s\n" % t)
280 280 else:
281 281 spaces = " " * (30 - util.locallen(t))
282 282 # The code only gets here if inactive branches are being
283 283 # displayed or the branch is active.
284 284 isinactive = ((not ishead) and " (inactive)") or ''
285 285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286 286
287 287 def bundle(ui, repo, fname, dest=None, **opts):
288 288 """create a changegroup file
289 289
290 290 Generate a compressed changegroup file collecting changesets not
291 291 found in the other repository.
292 292
293 293 If no destination repository is specified the destination is assumed
294 294 to have all the nodes specified by one or more --base parameters.
295 295
296 296 The bundle file can then be transferred using conventional means and
297 297 applied to another repository with the unbundle or pull command.
298 298 This is useful when direct push and pull are not available or when
299 299 exporting an entire repository is undesirable.
300 300
301 301 Applying bundles preserves all changeset contents including
302 302 permissions, copy/rename information, and revision history.
303 303 """
304 304 revs = opts.get('rev') or None
305 305 if revs:
306 306 revs = [repo.lookup(rev) for rev in revs]
307 307 base = opts.get('base')
308 308 if base:
309 309 if dest:
310 310 raise util.Abort(_("--base is incompatible with specifiying "
311 311 "a destination"))
312 312 base = [repo.lookup(rev) for rev in base]
313 313 # create the right base
314 314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 315 o = []
316 316 has = {nullid: None}
317 317 for n in base:
318 318 has.update(repo.changelog.reachable(n))
319 319 if revs:
320 320 visit = list(revs)
321 321 else:
322 322 visit = repo.changelog.heads()
323 323 seen = {}
324 324 while visit:
325 325 n = visit.pop(0)
326 326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 327 if len(parents) == 0:
328 328 o.insert(0, n)
329 329 else:
330 330 for p in parents:
331 331 if p not in seen:
332 332 seen[p] = 1
333 333 visit.append(p)
334 334 else:
335 335 cmdutil.setremoteconfig(ui, opts)
336 336 dest, revs = cmdutil.parseurl(
337 337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 338 other = hg.repository(ui, dest)
339 339 o = repo.findoutgoing(other, force=opts['force'])
340 340
341 341 if revs:
342 342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 343 else:
344 344 cg = repo.changegroup(o, 'bundle')
345 345 changegroup.writebundle(cg, fname, "HG10BZ")
346 346
347 347 def cat(ui, repo, file1, *pats, **opts):
348 348 """output the current or given revision of files
349 349
350 350 Print the specified files as they were at the given revision.
351 351 If no revision is given, the parent of the working directory is used,
352 352 or tip if no revision is checked out.
353 353
354 354 Output may be to a file, in which case the name of the file is
355 355 given using a format string. The formatting rules are the same as
356 356 for the export command, with the following additions:
357 357
358 358 %s basename of file being printed
359 359 %d dirname of file being printed, or '.' if in repo root
360 360 %p root-relative path name of file being printed
361 361 """
362 362 ctx = repo.changectx(opts['rev'])
363 363 err = 1
364 364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 365 ctx.node()):
366 366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 367 fp.write(ctx.filectx(abs).data())
368 368 err = 0
369 369 return err
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 cmdutil.setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = cmdutil.logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 try:
444 444 mode = os.lstat(rf)[stat.ST_MODE]
445 445 except OSError:
446 446 raise util.Abort(_("file %s not found!") % rf)
447 447 if stat.S_ISDIR(mode):
448 448 name = f + '/'
449 449 if slist is None:
450 450 slist = list(files)
451 451 slist.sort()
452 452 i = bisect.bisect(slist, name)
453 453 if i >= len(slist) or not slist[i].startswith(name):
454 454 raise util.Abort(_("no match under directory %s!")
455 455 % rf)
456 456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 457 raise util.Abort(_("can't commit %s: "
458 458 "unsupported file type!") % rf)
459 459 elif f not in repo.dirstate:
460 460 raise util.Abort(_("file %s not tracked!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 469 def docopy(ui, repo, pats, opts, wlock):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'r': _('has been marked for remove')}
485 485 state = repo.dirstate[abs]
486 486 reason = reasons.get(state)
487 487 if reason:
488 488 if exact:
489 489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 490 else:
491 491 if state == 'a':
492 492 origsrc = repo.dirstate.copied(abs)
493 493 if origsrc is not None:
494 494 return origsrc
495 495 return abs
496 496
497 497 # origsrc: hgsep
498 498 # abssrc: hgsep
499 499 # relsrc: ossep
500 500 # otarget: ossep
501 501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 503 reltarget = repo.pathto(abstarget, cwd)
504 504 prevsrc = targets.get(abstarget)
505 505 src = repo.wjoin(abssrc)
506 506 target = repo.wjoin(abstarget)
507 507 if prevsrc is not None:
508 508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 509 (reltarget, repo.pathto(abssrc, cwd),
510 510 repo.pathto(prevsrc, cwd)))
511 511 return
512 512 if (not opts['after'] and os.path.exists(target) or
513 513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 514 if not opts['force']:
515 515 ui.warn(_('%s: not overwriting - file exists\n') %
516 516 reltarget)
517 517 return
518 518 if not opts['after'] and not opts.get('dry_run'):
519 519 os.unlink(target)
520 520 if opts['after']:
521 521 if not os.path.exists(target):
522 522 return
523 523 else:
524 524 targetdir = os.path.dirname(target) or '.'
525 525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 526 os.makedirs(targetdir)
527 527 try:
528 528 restore = repo.dirstate[abstarget] == 'r'
529 529 if restore and not opts.get('dry_run'):
530 530 repo.undelete([abstarget], wlock)
531 531 try:
532 532 if not opts.get('dry_run'):
533 533 util.copyfile(src, target)
534 534 restore = False
535 535 finally:
536 536 if restore:
537 537 repo.remove([abstarget], wlock=wlock)
538 538 except IOError, inst:
539 539 if inst.errno == errno.ENOENT:
540 540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 541 else:
542 542 ui.warn(_('%s: cannot copy - %s\n') %
543 543 (relsrc, inst.strerror))
544 544 errors += 1
545 545 return
546 546 if ui.verbose or not exact:
547 547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 548 targets[abstarget] = abssrc
549 549 if abstarget != origsrc:
550 550 if repo.dirstate[origsrc] == 'a':
551 551 if not ui.quiet:
552 552 ui.warn(_("%s has not been committed yet, so no copy "
553 553 "data will be stored for %s.\n")
554 554 % (repo.pathto(origsrc, cwd), reltarget))
555 555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 556 repo.add([abstarget], wlock)
557 557 elif not opts.get('dry_run'):
558 558 repo.copy(origsrc, abstarget, wlock)
559 559 copied.append((abssrc, relsrc, exact))
560 560
561 561 # pat: ossep
562 562 # dest ossep
563 563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 564 # return: function that takes hgsep and returns ossep
565 565 def targetpathfn(pat, dest, srcs):
566 566 if os.path.isdir(pat):
567 567 abspfx = util.canonpath(repo.root, cwd, pat)
568 568 abspfx = util.localpath(abspfx)
569 569 if destdirexists:
570 570 striplen = len(os.path.split(abspfx)[0])
571 571 else:
572 572 striplen = len(abspfx)
573 573 if striplen:
574 574 striplen += len(os.sep)
575 575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 576 elif destdirexists:
577 577 res = lambda p: os.path.join(dest,
578 578 os.path.basename(util.localpath(p)))
579 579 else:
580 580 res = lambda p: dest
581 581 return res
582 582
583 583 # pat: ossep
584 584 # dest ossep
585 585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 586 # return: function that takes hgsep and returns ossep
587 587 def targetpathafterfn(pat, dest, srcs):
588 588 if util.patkind(pat, None)[0]:
589 589 # a mercurial pattern
590 590 res = lambda p: os.path.join(dest,
591 591 os.path.basename(util.localpath(p)))
592 592 else:
593 593 abspfx = util.canonpath(repo.root, cwd, pat)
594 594 if len(abspfx) < len(srcs[0][0]):
595 595 # A directory. Either the target path contains the last
596 596 # component of the source path or it does not.
597 597 def evalpath(striplen):
598 598 score = 0
599 599 for s in srcs:
600 600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 601 if os.path.exists(t):
602 602 score += 1
603 603 return score
604 604
605 605 abspfx = util.localpath(abspfx)
606 606 striplen = len(abspfx)
607 607 if striplen:
608 608 striplen += len(os.sep)
609 609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 610 score = evalpath(striplen)
611 611 striplen1 = len(os.path.split(abspfx)[0])
612 612 if striplen1:
613 613 striplen1 += len(os.sep)
614 614 if evalpath(striplen1) > score:
615 615 striplen = striplen1
616 616 res = lambda p: os.path.join(dest,
617 617 util.localpath(p)[striplen:])
618 618 else:
619 619 # a file
620 620 if destdirexists:
621 621 res = lambda p: os.path.join(dest,
622 622 os.path.basename(util.localpath(p)))
623 623 else:
624 624 res = lambda p: dest
625 625 return res
626 626
627 627
628 628 pats = util.expand_glob(pats)
629 629 if not pats:
630 630 raise util.Abort(_('no source or destination specified'))
631 631 if len(pats) == 1:
632 632 raise util.Abort(_('no destination specified'))
633 633 dest = pats.pop()
634 634 destdirexists = os.path.isdir(dest)
635 635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 636 raise util.Abort(_('with multiple sources, destination must be an '
637 637 'existing directory'))
638 638 if opts['after']:
639 639 tfn = targetpathafterfn
640 640 else:
641 641 tfn = targetpathfn
642 642 copylist = []
643 643 for pat in pats:
644 644 srcs = []
645 645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 646 globbed=True):
647 647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 648 if origsrc:
649 649 srcs.append((origsrc, abssrc, relsrc, exact))
650 650 if not srcs:
651 651 continue
652 652 copylist.append((tfn(pat, dest, srcs), srcs))
653 653 if not copylist:
654 654 raise util.Abort(_('no files to copy'))
655 655
656 656 for targetpath, srcs in copylist:
657 657 for origsrc, abssrc, relsrc, exact in srcs:
658 658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659 659
660 660 if errors:
661 661 ui.warn(_('(consider using --after)\n'))
662 662 return errors, copied
663 663
664 664 def copy(ui, repo, *pats, **opts):
665 665 """mark files as copied for the next commit
666 666
667 667 Mark dest as having copies of source files. If dest is a
668 668 directory, copies are put in that directory. If dest is a file,
669 669 there can only be one source.
670 670
671 671 By default, this command copies the contents of files as they
672 672 stand in the working directory. If invoked with --after, the
673 673 operation is recorded, but no copying is performed.
674 674
675 675 This command takes effect in the next commit. To undo a copy
676 676 before that, see hg revert.
677 677 """
678 678 wlock = repo.wlock(False)
679 try:
679 680 errs, copied = docopy(ui, repo, pats, opts, wlock)
681 finally:
682 del wlock
680 683 return errs
681 684
682 685 def debugancestor(ui, index, rev1, rev2):
683 686 """find the ancestor revision of two revisions in a given index"""
684 687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687 690
688 691 def debugcomplete(ui, cmd='', **opts):
689 692 """returns the completion list associated with the given command"""
690 693
691 694 if opts['options']:
692 695 options = []
693 696 otables = [globalopts]
694 697 if cmd:
695 698 aliases, entry = cmdutil.findcmd(ui, cmd)
696 699 otables.append(entry[1])
697 700 for t in otables:
698 701 for o in t:
699 702 if o[0]:
700 703 options.append('-%s' % o[0])
701 704 options.append('--%s' % o[1])
702 705 ui.write("%s\n" % "\n".join(options))
703 706 return
704 707
705 708 clist = cmdutil.findpossible(ui, cmd).keys()
706 709 clist.sort()
707 710 ui.write("%s\n" % "\n".join(clist))
708 711
709 712 def debugrebuildstate(ui, repo, rev=""):
710 713 """rebuild the dirstate as it would look like for the given revision"""
711 714 if rev == "":
712 715 rev = repo.changelog.tip()
713 716 ctx = repo.changectx(rev)
714 717 files = ctx.manifest()
715 718 wlock = repo.wlock()
719 try:
716 720 repo.dirstate.rebuild(rev, files)
721 finally:
722 del wlock
717 723
718 724 def debugcheckstate(ui, repo):
719 725 """validate the correctness of the current dirstate"""
720 726 parent1, parent2 = repo.dirstate.parents()
721 727 m1 = repo.changectx(parent1).manifest()
722 728 m2 = repo.changectx(parent2).manifest()
723 729 errors = 0
724 730 for f in repo.dirstate:
725 731 state = repo.dirstate[f]
726 732 if state in "nr" and f not in m1:
727 733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
728 734 errors += 1
729 735 if state in "a" and f in m1:
730 736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
731 737 errors += 1
732 738 if state in "m" and f not in m1 and f not in m2:
733 739 ui.warn(_("%s in state %s, but not in either manifest\n") %
734 740 (f, state))
735 741 errors += 1
736 742 for f in m1:
737 743 state = repo.dirstate[f]
738 744 if state not in "nrm":
739 745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
740 746 errors += 1
741 747 if errors:
742 748 error = _(".hg/dirstate inconsistent with current parent's manifest")
743 749 raise util.Abort(error)
744 750
745 751 def showconfig(ui, repo, *values, **opts):
746 752 """show combined config settings from all hgrc files
747 753
748 754 With no args, print names and values of all config items.
749 755
750 756 With one arg of the form section.name, print just the value of
751 757 that config item.
752 758
753 759 With multiple args, print names and values of all config items
754 760 with matching section names."""
755 761
756 762 untrusted = bool(opts.get('untrusted'))
757 763 if values:
758 764 if len([v for v in values if '.' in v]) > 1:
759 765 raise util.Abort(_('only one config item permitted'))
760 766 for section, name, value in ui.walkconfig(untrusted=untrusted):
761 767 sectname = section + '.' + name
762 768 if values:
763 769 for v in values:
764 770 if v == section:
765 771 ui.write('%s=%s\n' % (sectname, value))
766 772 elif v == sectname:
767 773 ui.write(value, '\n')
768 774 else:
769 775 ui.write('%s=%s\n' % (sectname, value))
770 776
771 777 def debugsetparents(ui, repo, rev1, rev2=None):
772 778 """manually set the parents of the current working directory
773 779
774 780 This is useful for writing repository conversion tools, but should
775 781 be used with care.
776 782 """
777 783
778 784 if not rev2:
779 785 rev2 = hex(nullid)
780 786
781 787 wlock = repo.wlock()
782 788 try:
783 789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
784 790 finally:
785 wlock.release()
791 del wlock
786 792
787 793 def debugstate(ui, repo):
788 794 """show the contents of the current dirstate"""
789 795 dc = repo.dirstate._map
790 796 k = dc.keys()
791 797 k.sort()
792 798 for file_ in k:
793 799 if dc[file_][3] == -1:
794 800 # Pad or slice to locale representation
795 801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
796 802 timestr = 'unset'
797 803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
798 804 else:
799 805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
800 806 ui.write("%c %3o %10d %s %s\n"
801 807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
802 808 timestr, file_))
803 809 for f in repo.dirstate.copies():
804 810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
805 811
806 812 def debugdata(ui, file_, rev):
807 813 """dump the contents of a data file revision"""
808 814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
809 815 try:
810 816 ui.write(r.revision(r.lookup(rev)))
811 817 except KeyError:
812 818 raise util.Abort(_('invalid revision identifier %s') % rev)
813 819
814 820 def debugdate(ui, date, range=None, **opts):
815 821 """parse and display a date"""
816 822 if opts["extended"]:
817 823 d = util.parsedate(date, util.extendeddateformats)
818 824 else:
819 825 d = util.parsedate(date)
820 826 ui.write("internal: %s %s\n" % d)
821 827 ui.write("standard: %s\n" % util.datestr(d))
822 828 if range:
823 829 m = util.matchdate(range)
824 830 ui.write("match: %s\n" % m(d[0]))
825 831
826 832 def debugindex(ui, file_):
827 833 """dump the contents of an index file"""
828 834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
829 835 ui.write(" rev offset length base linkrev" +
830 836 " nodeid p1 p2\n")
831 837 for i in xrange(r.count()):
832 838 node = r.node(i)
833 839 pp = r.parents(node)
834 840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
835 841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
836 842 short(node), short(pp[0]), short(pp[1])))
837 843
838 844 def debugindexdot(ui, file_):
839 845 """dump an index DAG as a .dot file"""
840 846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
841 847 ui.write("digraph G {\n")
842 848 for i in xrange(r.count()):
843 849 node = r.node(i)
844 850 pp = r.parents(node)
845 851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
846 852 if pp[1] != nullid:
847 853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
848 854 ui.write("}\n")
849 855
850 856 def debuginstall(ui):
851 857 '''test Mercurial installation'''
852 858
853 859 def writetemp(contents):
854 860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
855 861 f = os.fdopen(fd, "wb")
856 862 f.write(contents)
857 863 f.close()
858 864 return name
859 865
860 866 problems = 0
861 867
862 868 # encoding
863 869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
864 870 try:
865 871 util.fromlocal("test")
866 872 except util.Abort, inst:
867 873 ui.write(" %s\n" % inst)
868 874 ui.write(_(" (check that your locale is properly set)\n"))
869 875 problems += 1
870 876
871 877 # compiled modules
872 878 ui.status(_("Checking extensions...\n"))
873 879 try:
874 880 import bdiff, mpatch, base85
875 881 except Exception, inst:
876 882 ui.write(" %s\n" % inst)
877 883 ui.write(_(" One or more extensions could not be found"))
878 884 ui.write(_(" (check that you compiled the extensions)\n"))
879 885 problems += 1
880 886
881 887 # templates
882 888 ui.status(_("Checking templates...\n"))
883 889 try:
884 890 import templater
885 891 t = templater.templater(templater.templatepath("map-cmdline.default"))
886 892 except Exception, inst:
887 893 ui.write(" %s\n" % inst)
888 894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
889 895 problems += 1
890 896
891 897 # patch
892 898 ui.status(_("Checking patch...\n"))
893 899 patcher = ui.config('ui', 'patch')
894 900 patcher = ((patcher and util.find_exe(patcher)) or
895 901 util.find_exe('gpatch') or
896 902 util.find_exe('patch'))
897 903 if not patcher:
898 904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
899 905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
900 906 problems += 1
901 907 else:
902 908 # actually attempt a patch here
903 909 a = "1\n2\n3\n4\n"
904 910 b = "1\n2\n3\ninsert\n4\n"
905 911 fa = writetemp(a)
906 912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
907 913 fd = writetemp(d)
908 914
909 915 files = {}
910 916 try:
911 917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
912 918 except util.Abort, e:
913 919 ui.write(_(" patch call failed:\n"))
914 920 ui.write(" " + str(e) + "\n")
915 921 problems += 1
916 922 else:
917 923 if list(files) != [os.path.basename(fa)]:
918 924 ui.write(_(" unexpected patch output!"))
919 925 ui.write(_(" (you may have an incompatible version of patch)\n"))
920 926 problems += 1
921 927 a = file(fa).read()
922 928 if a != b:
923 929 ui.write(_(" patch test failed!"))
924 930 ui.write(_(" (you may have an incompatible version of patch)\n"))
925 931 problems += 1
926 932
927 933 os.unlink(fa)
928 934 os.unlink(fd)
929 935
930 936 # merge helper
931 937 ui.status(_("Checking merge helper...\n"))
932 938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
933 939 or "hgmerge")
934 940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
935 941 if not cmdpath:
936 942 if cmd == 'hgmerge':
937 943 ui.write(_(" No merge helper set and can't find default"
938 944 " hgmerge script in PATH\n"))
939 945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
940 946 else:
941 947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
942 948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
943 949 problems += 1
944 950 else:
945 951 # actually attempt a patch here
946 952 fa = writetemp("1\n2\n3\n4\n")
947 953 fl = writetemp("1\n2\n3\ninsert\n4\n")
948 954 fr = writetemp("begin\n1\n2\n3\n4\n")
949 955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
950 956 if r:
951 957 ui.write(_(" Got unexpected merge error %d!\n") % r)
952 958 problems += 1
953 959 m = file(fl).read()
954 960 if m != "begin\n1\n2\n3\ninsert\n4\n":
955 961 ui.write(_(" Got unexpected merge results!\n"))
956 962 ui.write(_(" (your merge helper may have the"
957 963 " wrong argument order)\n"))
958 964 ui.write(_(" Result: %r\n") % m)
959 965 problems += 1
960 966 os.unlink(fa)
961 967 os.unlink(fl)
962 968 os.unlink(fr)
963 969
964 970 # editor
965 971 ui.status(_("Checking commit editor...\n"))
966 972 editor = (os.environ.get("HGEDITOR") or
967 973 ui.config("ui", "editor") or
968 974 os.environ.get("EDITOR", "vi"))
969 975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
970 976 if not cmdpath:
971 977 if editor == 'vi':
972 978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
973 979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
974 980 else:
975 981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
976 982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
977 983 problems += 1
978 984
979 985 # check username
980 986 ui.status(_("Checking username...\n"))
981 987 user = os.environ.get("HGUSER")
982 988 if user is None:
983 989 user = ui.config("ui", "username")
984 990 if user is None:
985 991 user = os.environ.get("EMAIL")
986 992 if not user:
987 993 ui.warn(" ")
988 994 ui.username()
989 995 ui.write(_(" (specify a username in your .hgrc file)\n"))
990 996
991 997 if not problems:
992 998 ui.status(_("No problems detected\n"))
993 999 else:
994 1000 ui.write(_("%s problems detected,"
995 1001 " please check your install!\n") % problems)
996 1002
997 1003 return problems
998 1004
999 1005 def debugrename(ui, repo, file1, *pats, **opts):
1000 1006 """dump rename information"""
1001 1007
1002 1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1003 1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1004 1010 ctx.node()):
1005 1011 m = ctx.filectx(abs).renamed()
1006 1012 if m:
1007 1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1008 1014 else:
1009 1015 ui.write(_("%s not renamed\n") % rel)
1010 1016
1011 1017 def debugwalk(ui, repo, *pats, **opts):
1012 1018 """show how files match on given patterns"""
1013 1019 items = list(cmdutil.walk(repo, pats, opts))
1014 1020 if not items:
1015 1021 return
1016 1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1017 1023 max([len(abs) for (src, abs, rel, exact) in items]),
1018 1024 max([len(rel) for (src, abs, rel, exact) in items]))
1019 1025 for src, abs, rel, exact in items:
1020 1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1021 1027 ui.write("%s\n" % line.rstrip())
1022 1028
1023 1029 def diff(ui, repo, *pats, **opts):
1024 1030 """diff repository (or selected files)
1025 1031
1026 1032 Show differences between revisions for the specified files.
1027 1033
1028 1034 Differences between files are shown using the unified diff format.
1029 1035
1030 1036 NOTE: diff may generate unexpected results for merges, as it will
1031 1037 default to comparing against the working directory's first parent
1032 1038 changeset if no revisions are specified.
1033 1039
1034 1040 When two revision arguments are given, then changes are shown
1035 1041 between those revisions. If only one revision is specified then
1036 1042 that revision is compared to the working directory, and, when no
1037 1043 revisions are specified, the working directory files are compared
1038 1044 to its parent.
1039 1045
1040 1046 Without the -a option, diff will avoid generating diffs of files
1041 1047 it detects as binary. With -a, diff will generate a diff anyway,
1042 1048 probably with undesirable results.
1043 1049 """
1044 1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1045 1051
1046 1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1047 1053
1048 1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1049 1055 opts=patch.diffopts(ui, opts))
1050 1056
1051 1057 def export(ui, repo, *changesets, **opts):
1052 1058 """dump the header and diffs for one or more changesets
1053 1059
1054 1060 Print the changeset header and diffs for one or more revisions.
1055 1061
1056 1062 The information shown in the changeset header is: author,
1057 1063 changeset hash, parent(s) and commit comment.
1058 1064
1059 1065 NOTE: export may generate unexpected diff output for merge changesets,
1060 1066 as it will compare the merge changeset against its first parent only.
1061 1067
1062 1068 Output may be to a file, in which case the name of the file is
1063 1069 given using a format string. The formatting rules are as follows:
1064 1070
1065 1071 %% literal "%" character
1066 1072 %H changeset hash (40 bytes of hexadecimal)
1067 1073 %N number of patches being generated
1068 1074 %R changeset revision number
1069 1075 %b basename of the exporting repository
1070 1076 %h short-form changeset hash (12 bytes of hexadecimal)
1071 1077 %n zero-padded sequence number, starting at 1
1072 1078 %r zero-padded changeset revision number
1073 1079
1074 1080 Without the -a option, export will avoid generating diffs of files
1075 1081 it detects as binary. With -a, export will generate a diff anyway,
1076 1082 probably with undesirable results.
1077 1083
1078 1084 With the --switch-parent option, the diff will be against the second
1079 1085 parent. It can be useful to review a merge.
1080 1086 """
1081 1087 if not changesets:
1082 1088 raise util.Abort(_("export requires at least one changeset"))
1083 1089 revs = cmdutil.revrange(repo, changesets)
1084 1090 if len(revs) > 1:
1085 1091 ui.note(_('exporting patches:\n'))
1086 1092 else:
1087 1093 ui.note(_('exporting patch:\n'))
1088 1094 patch.export(repo, revs, template=opts['output'],
1089 1095 switch_parent=opts['switch_parent'],
1090 1096 opts=patch.diffopts(ui, opts))
1091 1097
1092 1098 def grep(ui, repo, pattern, *pats, **opts):
1093 1099 """search for a pattern in specified files and revisions
1094 1100
1095 1101 Search revisions of files for a regular expression.
1096 1102
1097 1103 This command behaves differently than Unix grep. It only accepts
1098 1104 Python/Perl regexps. It searches repository history, not the
1099 1105 working directory. It always prints the revision number in which
1100 1106 a match appears.
1101 1107
1102 1108 By default, grep only prints output for the first revision of a
1103 1109 file in which it finds a match. To get it to print every revision
1104 1110 that contains a change in match status ("-" for a match that
1105 1111 becomes a non-match, or "+" for a non-match that becomes a match),
1106 1112 use the --all flag.
1107 1113 """
1108 1114 reflags = 0
1109 1115 if opts['ignore_case']:
1110 1116 reflags |= re.I
1111 1117 try:
1112 1118 regexp = re.compile(pattern, reflags)
1113 1119 except Exception, inst:
1114 1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1115 1121 return None
1116 1122 sep, eol = ':', '\n'
1117 1123 if opts['print0']:
1118 1124 sep = eol = '\0'
1119 1125
1120 1126 fcache = {}
1121 1127 def getfile(fn):
1122 1128 if fn not in fcache:
1123 1129 fcache[fn] = repo.file(fn)
1124 1130 return fcache[fn]
1125 1131
1126 1132 def matchlines(body):
1127 1133 begin = 0
1128 1134 linenum = 0
1129 1135 while True:
1130 1136 match = regexp.search(body, begin)
1131 1137 if not match:
1132 1138 break
1133 1139 mstart, mend = match.span()
1134 1140 linenum += body.count('\n', begin, mstart) + 1
1135 1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1136 1142 lend = body.find('\n', mend)
1137 1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1138 1144 begin = lend + 1
1139 1145
1140 1146 class linestate(object):
1141 1147 def __init__(self, line, linenum, colstart, colend):
1142 1148 self.line = line
1143 1149 self.linenum = linenum
1144 1150 self.colstart = colstart
1145 1151 self.colend = colend
1146 1152
1147 1153 def __eq__(self, other):
1148 1154 return self.line == other.line
1149 1155
1150 1156 matches = {}
1151 1157 copies = {}
1152 1158 def grepbody(fn, rev, body):
1153 1159 matches[rev].setdefault(fn, [])
1154 1160 m = matches[rev][fn]
1155 1161 for lnum, cstart, cend, line in matchlines(body):
1156 1162 s = linestate(line, lnum, cstart, cend)
1157 1163 m.append(s)
1158 1164
1159 1165 def difflinestates(a, b):
1160 1166 sm = difflib.SequenceMatcher(None, a, b)
1161 1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1162 1168 if tag == 'insert':
1163 1169 for i in xrange(blo, bhi):
1164 1170 yield ('+', b[i])
1165 1171 elif tag == 'delete':
1166 1172 for i in xrange(alo, ahi):
1167 1173 yield ('-', a[i])
1168 1174 elif tag == 'replace':
1169 1175 for i in xrange(alo, ahi):
1170 1176 yield ('-', a[i])
1171 1177 for i in xrange(blo, bhi):
1172 1178 yield ('+', b[i])
1173 1179
1174 1180 prev = {}
1175 1181 def display(fn, rev, states, prevstates):
1176 1182 found = False
1177 1183 filerevmatches = {}
1178 1184 r = prev.get(fn, -1)
1179 1185 if opts['all']:
1180 1186 iter = difflinestates(states, prevstates)
1181 1187 else:
1182 1188 iter = [('', l) for l in prevstates]
1183 1189 for change, l in iter:
1184 1190 cols = [fn, str(r)]
1185 1191 if opts['line_number']:
1186 1192 cols.append(str(l.linenum))
1187 1193 if opts['all']:
1188 1194 cols.append(change)
1189 1195 if opts['user']:
1190 1196 cols.append(ui.shortuser(get(r)[1]))
1191 1197 if opts['files_with_matches']:
1192 1198 c = (fn, r)
1193 1199 if c in filerevmatches:
1194 1200 continue
1195 1201 filerevmatches[c] = 1
1196 1202 else:
1197 1203 cols.append(l.line)
1198 1204 ui.write(sep.join(cols), eol)
1199 1205 found = True
1200 1206 return found
1201 1207
1202 1208 fstate = {}
1203 1209 skip = {}
1204 1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1205 1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1206 1212 found = False
1207 1213 follow = opts.get('follow')
1208 1214 for st, rev, fns in changeiter:
1209 1215 if st == 'window':
1210 1216 matches.clear()
1211 1217 elif st == 'add':
1212 1218 mf = repo.changectx(rev).manifest()
1213 1219 matches[rev] = {}
1214 1220 for fn in fns:
1215 1221 if fn in skip:
1216 1222 continue
1217 1223 fstate.setdefault(fn, {})
1218 1224 try:
1219 1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1220 1226 if follow:
1221 1227 copied = getfile(fn).renamed(mf[fn])
1222 1228 if copied:
1223 1229 copies.setdefault(rev, {})[fn] = copied[0]
1224 1230 except KeyError:
1225 1231 pass
1226 1232 elif st == 'iter':
1227 1233 states = matches[rev].items()
1228 1234 states.sort()
1229 1235 for fn, m in states:
1230 1236 copy = copies.get(rev, {}).get(fn)
1231 1237 if fn in skip:
1232 1238 if copy:
1233 1239 skip[copy] = True
1234 1240 continue
1235 1241 if fn in prev or fstate[fn]:
1236 1242 r = display(fn, rev, m, fstate[fn])
1237 1243 found = found or r
1238 1244 if r and not opts['all']:
1239 1245 skip[fn] = True
1240 1246 if copy:
1241 1247 skip[copy] = True
1242 1248 fstate[fn] = m
1243 1249 if copy:
1244 1250 fstate[copy] = m
1245 1251 prev[fn] = rev
1246 1252
1247 1253 fstate = fstate.items()
1248 1254 fstate.sort()
1249 1255 for fn, state in fstate:
1250 1256 if fn in skip:
1251 1257 continue
1252 1258 if fn not in copies.get(prev[fn], {}):
1253 1259 found = display(fn, rev, {}, state) or found
1254 1260 return (not found and 1) or 0
1255 1261
1256 1262 def heads(ui, repo, *branchrevs, **opts):
1257 1263 """show current repository heads or show branch heads
1258 1264
1259 1265 With no arguments, show all repository head changesets.
1260 1266
1261 1267 If branch or revisions names are given this will show the heads of
1262 1268 the specified branches or the branches those revisions are tagged
1263 1269 with.
1264 1270
1265 1271 Repository "heads" are changesets that don't have child
1266 1272 changesets. They are where development generally takes place and
1267 1273 are the usual targets for update and merge operations.
1268 1274
1269 1275 Branch heads are changesets that have a given branch tag, but have
1270 1276 no child changesets with that tag. They are usually where
1271 1277 development on the given branch takes place.
1272 1278 """
1273 1279 if opts['rev']:
1274 1280 start = repo.lookup(opts['rev'])
1275 1281 else:
1276 1282 start = None
1277 1283 if not branchrevs:
1278 1284 # Assume we're looking repo-wide heads if no revs were specified.
1279 1285 heads = repo.heads(start)
1280 1286 else:
1281 1287 heads = []
1282 1288 visitedset = util.set()
1283 1289 for branchrev in branchrevs:
1284 1290 branch = repo.changectx(branchrev).branch()
1285 1291 if branch in visitedset:
1286 1292 continue
1287 1293 visitedset.add(branch)
1288 1294 bheads = repo.branchheads(branch, start)
1289 1295 if not bheads:
1290 1296 if branch != branchrev:
1291 1297 ui.warn(_("no changes on branch %s containing %s are "
1292 1298 "reachable from %s\n")
1293 1299 % (branch, branchrev, opts['rev']))
1294 1300 else:
1295 1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1296 1302 % (branch, opts['rev']))
1297 1303 heads.extend(bheads)
1298 1304 if not heads:
1299 1305 return 1
1300 1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1301 1307 for n in heads:
1302 1308 displayer.show(changenode=n)
1303 1309
1304 1310 def help_(ui, name=None, with_version=False):
1305 1311 """show help for a command, extension, or list of commands
1306 1312
1307 1313 With no arguments, print a list of commands and short help.
1308 1314
1309 1315 Given a command name, print help for that command.
1310 1316
1311 1317 Given an extension name, print help for that extension, and the
1312 1318 commands it provides."""
1313 1319 option_lists = []
1314 1320
1315 1321 def addglobalopts(aliases):
1316 1322 if ui.verbose:
1317 1323 option_lists.append((_("global options:"), globalopts))
1318 1324 if name == 'shortlist':
1319 1325 option_lists.append((_('use "hg help" for the full list '
1320 1326 'of commands'), ()))
1321 1327 else:
1322 1328 if name == 'shortlist':
1323 1329 msg = _('use "hg help" for the full list of commands '
1324 1330 'or "hg -v" for details')
1325 1331 elif aliases:
1326 1332 msg = _('use "hg -v help%s" to show aliases and '
1327 1333 'global options') % (name and " " + name or "")
1328 1334 else:
1329 1335 msg = _('use "hg -v help %s" to show global options') % name
1330 1336 option_lists.append((msg, ()))
1331 1337
1332 1338 def helpcmd(name):
1333 1339 if with_version:
1334 1340 version_(ui)
1335 1341 ui.write('\n')
1336 1342 aliases, i = cmdutil.findcmd(ui, name)
1337 1343 # synopsis
1338 1344 ui.write("%s\n\n" % i[2])
1339 1345
1340 1346 # description
1341 1347 doc = i[0].__doc__
1342 1348 if not doc:
1343 1349 doc = _("(No help text available)")
1344 1350 if ui.quiet:
1345 1351 doc = doc.splitlines(0)[0]
1346 1352 ui.write("%s\n" % doc.rstrip())
1347 1353
1348 1354 if not ui.quiet:
1349 1355 # aliases
1350 1356 if len(aliases) > 1:
1351 1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1352 1358
1353 1359 # options
1354 1360 if i[1]:
1355 1361 option_lists.append((_("options:\n"), i[1]))
1356 1362
1357 1363 addglobalopts(False)
1358 1364
1359 1365 def helplist(select=None):
1360 1366 h = {}
1361 1367 cmds = {}
1362 1368 for c, e in table.items():
1363 1369 f = c.split("|", 1)[0]
1364 1370 if select and not select(f):
1365 1371 continue
1366 1372 if name == "shortlist" and not f.startswith("^"):
1367 1373 continue
1368 1374 f = f.lstrip("^")
1369 1375 if not ui.debugflag and f.startswith("debug"):
1370 1376 continue
1371 1377 doc = e[0].__doc__
1372 1378 if not doc:
1373 1379 doc = _("(No help text available)")
1374 1380 h[f] = doc.splitlines(0)[0].rstrip()
1375 1381 cmds[f] = c.lstrip("^")
1376 1382
1377 1383 fns = h.keys()
1378 1384 fns.sort()
1379 1385 m = max(map(len, fns))
1380 1386 for f in fns:
1381 1387 if ui.verbose:
1382 1388 commands = cmds[f].replace("|",", ")
1383 1389 ui.write(" %s:\n %s\n"%(commands, h[f]))
1384 1390 else:
1385 1391 ui.write(' %-*s %s\n' % (m, f, h[f]))
1386 1392
1387 1393 if not ui.quiet:
1388 1394 addglobalopts(True)
1389 1395
1390 1396 def helptopic(name):
1391 1397 v = None
1392 1398 for i in help.helptable:
1393 1399 l = i.split('|')
1394 1400 if name in l:
1395 1401 v = i
1396 1402 header = l[-1]
1397 1403 if not v:
1398 1404 raise cmdutil.UnknownCommand(name)
1399 1405
1400 1406 # description
1401 1407 doc = help.helptable[v]
1402 1408 if not doc:
1403 1409 doc = _("(No help text available)")
1404 1410 if callable(doc):
1405 1411 doc = doc()
1406 1412
1407 1413 ui.write("%s\n" % header)
1408 1414 ui.write("%s\n" % doc.rstrip())
1409 1415
1410 1416 def helpext(name):
1411 1417 try:
1412 1418 mod = extensions.find(name)
1413 1419 except KeyError:
1414 1420 raise cmdutil.UnknownCommand(name)
1415 1421
1416 1422 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1417 1423 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1418 1424 for d in doc[1:]:
1419 1425 ui.write(d, '\n')
1420 1426
1421 1427 ui.status('\n')
1422 1428
1423 1429 try:
1424 1430 ct = mod.cmdtable
1425 1431 except AttributeError:
1426 1432 ct = None
1427 1433 if not ct:
1428 1434 ui.status(_('no commands defined\n'))
1429 1435 return
1430 1436
1431 1437 ui.status(_('list of commands:\n\n'))
1432 1438 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1433 1439 helplist(modcmds.has_key)
1434 1440
1435 1441 if name and name != 'shortlist':
1436 1442 i = None
1437 1443 for f in (helpcmd, helptopic, helpext):
1438 1444 try:
1439 1445 f(name)
1440 1446 i = None
1441 1447 break
1442 1448 except cmdutil.UnknownCommand, inst:
1443 1449 i = inst
1444 1450 if i:
1445 1451 raise i
1446 1452
1447 1453 else:
1448 1454 # program name
1449 1455 if ui.verbose or with_version:
1450 1456 version_(ui)
1451 1457 else:
1452 1458 ui.status(_("Mercurial Distributed SCM\n"))
1453 1459 ui.status('\n')
1454 1460
1455 1461 # list of commands
1456 1462 if name == "shortlist":
1457 1463 ui.status(_('basic commands:\n\n'))
1458 1464 else:
1459 1465 ui.status(_('list of commands:\n\n'))
1460 1466
1461 1467 helplist()
1462 1468
1463 1469 # list all option lists
1464 1470 opt_output = []
1465 1471 for title, options in option_lists:
1466 1472 opt_output.append(("\n%s" % title, None))
1467 1473 for shortopt, longopt, default, desc in options:
1468 1474 if "DEPRECATED" in desc and not ui.verbose: continue
1469 1475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1470 1476 longopt and " --%s" % longopt),
1471 1477 "%s%s" % (desc,
1472 1478 default
1473 1479 and _(" (default: %s)") % default
1474 1480 or "")))
1475 1481
1476 1482 if opt_output:
1477 1483 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1478 1484 for first, second in opt_output:
1479 1485 if second:
1480 1486 ui.write(" %-*s %s\n" % (opts_len, first, second))
1481 1487 else:
1482 1488 ui.write("%s\n" % first)
1483 1489
1484 1490 def identify(ui, repo, source=None,
1485 1491 rev=None, num=None, id=None, branch=None, tags=None):
1486 1492 """identify the working copy or specified revision
1487 1493
1488 1494 With no revision, print a summary of the current state of the repo.
1489 1495
1490 1496 With a path, do a lookup in another repository.
1491 1497
1492 1498 This summary identifies the repository state using one or two parent
1493 1499 hash identifiers, followed by a "+" if there are uncommitted changes
1494 1500 in the working directory, a list of tags for this revision and a branch
1495 1501 name for non-default branches.
1496 1502 """
1497 1503
1498 1504 hexfunc = ui.debugflag and hex or short
1499 1505 default = not (num or id or branch or tags)
1500 1506 output = []
1501 1507
1502 1508 if source:
1503 1509 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1504 1510 srepo = hg.repository(ui, source)
1505 1511 if not rev and revs:
1506 1512 rev = revs[0]
1507 1513 if not rev:
1508 1514 rev = "tip"
1509 1515 if num or branch or tags:
1510 1516 raise util.Abort(
1511 1517 "can't query remote revision number, branch, or tags")
1512 1518 output = [hexfunc(srepo.lookup(rev))]
1513 1519 elif not rev:
1514 1520 ctx = repo.workingctx()
1515 1521 parents = ctx.parents()
1516 1522 changed = False
1517 1523 if default or id or num:
1518 1524 changed = ctx.files() + ctx.deleted()
1519 1525 if default or id:
1520 1526 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1521 1527 (changed) and "+" or "")]
1522 1528 if num:
1523 1529 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1524 1530 (changed) and "+" or ""))
1525 1531 else:
1526 1532 ctx = repo.changectx(rev)
1527 1533 if default or id:
1528 1534 output = [hexfunc(ctx.node())]
1529 1535 if num:
1530 1536 output.append(str(ctx.rev()))
1531 1537
1532 1538 if not source and default and not ui.quiet:
1533 1539 b = util.tolocal(ctx.branch())
1534 1540 if b != 'default':
1535 1541 output.append("(%s)" % b)
1536 1542
1537 1543 # multiple tags for a single parent separated by '/'
1538 1544 t = "/".join(ctx.tags())
1539 1545 if t:
1540 1546 output.append(t)
1541 1547
1542 1548 if branch:
1543 1549 output.append(util.tolocal(ctx.branch()))
1544 1550
1545 1551 if tags:
1546 1552 output.extend(ctx.tags())
1547 1553
1548 1554 ui.write("%s\n" % ' '.join(output))
1549 1555
1550 1556 def import_(ui, repo, patch1, *patches, **opts):
1551 1557 """import an ordered set of patches
1552 1558
1553 1559 Import a list of patches and commit them individually.
1554 1560
1555 1561 If there are outstanding changes in the working directory, import
1556 1562 will abort unless given the -f flag.
1557 1563
1558 1564 You can import a patch straight from a mail message. Even patches
1559 1565 as attachments work (body part must be type text/plain or
1560 1566 text/x-patch to be used). From and Subject headers of email
1561 1567 message are used as default committer and commit message. All
1562 1568 text/plain body parts before first diff are added to commit
1563 1569 message.
1564 1570
1565 1571 If the imported patch was generated by hg export, user and description
1566 1572 from patch override values from message headers and body. Values
1567 1573 given on command line with -m and -u override these.
1568 1574
1569 1575 If --exact is specified, import will set the working directory
1570 1576 to the parent of each patch before applying it, and will abort
1571 1577 if the resulting changeset has a different ID than the one
1572 1578 recorded in the patch. This may happen due to character set
1573 1579 problems or other deficiencies in the text patch format.
1574 1580
1575 1581 To read a patch from standard input, use patch name "-".
1576 1582 """
1577 1583 patches = (patch1,) + patches
1578 1584
1579 1585 if opts.get('exact') or not opts['force']:
1580 1586 cmdutil.bail_if_changed(repo)
1581 1587
1582 1588 d = opts["base"]
1583 1589 strip = opts["strip"]
1584
1590 wlock = lock = None
1591 try:
1585 1592 wlock = repo.wlock()
1586 1593 lock = repo.lock()
1587
1588 1594 for p in patches:
1589 1595 pf = os.path.join(d, p)
1590 1596
1591 1597 if pf == '-':
1592 1598 ui.status(_("applying patch from stdin\n"))
1593 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1599 data = patch.extract(ui, sys.stdin)
1594 1600 else:
1595 1601 ui.status(_("applying %s\n") % p)
1596 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1602 data = patch.extract(ui, file(pf, 'rb'))
1603
1604 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1597 1605
1598 1606 if tmpname is None:
1599 1607 raise util.Abort(_('no diffs found'))
1600 1608
1601 1609 try:
1602 1610 cmdline_message = cmdutil.logmessage(opts)
1603 1611 if cmdline_message:
1604 1612 # pickup the cmdline msg
1605 1613 message = cmdline_message
1606 1614 elif message:
1607 1615 # pickup the patch msg
1608 1616 message = message.strip()
1609 1617 else:
1610 1618 # launch the editor
1611 1619 message = None
1612 1620 ui.debug(_('message:\n%s\n') % message)
1613 1621
1614 1622 wp = repo.workingctx().parents()
1615 1623 if opts.get('exact'):
1616 1624 if not nodeid or not p1:
1617 1625 raise util.Abort(_('not a mercurial patch'))
1618 1626 p1 = repo.lookup(p1)
1619 1627 p2 = repo.lookup(p2 or hex(nullid))
1620 1628
1621 1629 if p1 != wp[0].node():
1622 1630 hg.clean(repo, p1, wlock=wlock)
1623 1631 repo.dirstate.setparents(p1, p2)
1624 1632 elif p2:
1625 1633 try:
1626 1634 p1 = repo.lookup(p1)
1627 1635 p2 = repo.lookup(p2)
1628 1636 if p1 == wp[0].node():
1629 1637 repo.dirstate.setparents(p1, p2)
1630 1638 except hg.RepoError:
1631 1639 pass
1632 1640 if opts.get('exact') or opts.get('import_branch'):
1633 1641 repo.dirstate.setbranch(branch or 'default')
1634 1642
1635 1643 files = {}
1636 1644 try:
1637 1645 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1638 1646 files=files)
1639 1647 finally:
1640 1648 files = patch.updatedir(ui, repo, files, wlock=wlock)
1641 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1649 n = repo.commit(files, message, user, date, wlock=wlock,
1650 lock=lock)
1642 1651 if opts.get('exact'):
1643 1652 if hex(n) != nodeid:
1644 1653 repo.rollback(wlock=wlock, lock=lock)
1645 raise util.Abort(_('patch is damaged or loses information'))
1654 raise util.Abort(_('patch is damaged' +
1655 ' or loses information'))
1646 1656 finally:
1647 1657 os.unlink(tmpname)
1658 finally:
1659 del wlock, lock
1648 1660
1649 1661 def incoming(ui, repo, source="default", **opts):
1650 1662 """show new changesets found in source
1651 1663
1652 1664 Show new changesets found in the specified path/URL or the default
1653 1665 pull location. These are the changesets that would be pulled if a pull
1654 1666 was requested.
1655 1667
1656 1668 For remote repository, using --bundle avoids downloading the changesets
1657 1669 twice if the incoming is followed by a pull.
1658 1670
1659 1671 See pull for valid source format details.
1660 1672 """
1661 1673 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1662 1674 cmdutil.setremoteconfig(ui, opts)
1663 1675
1664 1676 other = hg.repository(ui, source)
1665 1677 ui.status(_('comparing with %s\n') % source)
1666 1678 if revs:
1667 1679 if 'lookup' in other.capabilities:
1668 1680 revs = [other.lookup(rev) for rev in revs]
1669 1681 else:
1670 1682 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1671 1683 raise util.Abort(error)
1672 1684 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1673 1685 if not incoming:
1674 1686 try:
1675 1687 os.unlink(opts["bundle"])
1676 1688 except:
1677 1689 pass
1678 1690 ui.status(_("no changes found\n"))
1679 1691 return 1
1680 1692
1681 1693 cleanup = None
1682 1694 try:
1683 1695 fname = opts["bundle"]
1684 1696 if fname or not other.local():
1685 1697 # create a bundle (uncompressed if other repo is not local)
1686 1698 if revs is None:
1687 1699 cg = other.changegroup(incoming, "incoming")
1688 1700 else:
1689 1701 if 'changegroupsubset' not in other.capabilities:
1690 1702 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1691 1703 cg = other.changegroupsubset(incoming, revs, 'incoming')
1692 1704 bundletype = other.local() and "HG10BZ" or "HG10UN"
1693 1705 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1694 1706 # keep written bundle?
1695 1707 if opts["bundle"]:
1696 1708 cleanup = None
1697 1709 if not other.local():
1698 1710 # use the created uncompressed bundlerepo
1699 1711 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1700 1712
1701 1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1702 1714 if opts['newest_first']:
1703 1715 o.reverse()
1704 1716 displayer = cmdutil.show_changeset(ui, other, opts)
1705 1717 for n in o:
1706 1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1707 1719 if opts['no_merges'] and len(parents) == 2:
1708 1720 continue
1709 1721 displayer.show(changenode=n)
1710 1722 finally:
1711 1723 if hasattr(other, 'close'):
1712 1724 other.close()
1713 1725 if cleanup:
1714 1726 os.unlink(cleanup)
1715 1727
1716 1728 def init(ui, dest=".", **opts):
1717 1729 """create a new repository in the given directory
1718 1730
1719 1731 Initialize a new repository in the given directory. If the given
1720 1732 directory does not exist, it is created.
1721 1733
1722 1734 If no directory is given, the current directory is used.
1723 1735
1724 1736 It is possible to specify an ssh:// URL as the destination.
1725 1737 Look at the help text for the pull command for important details
1726 1738 about ssh:// URLs.
1727 1739 """
1728 1740 cmdutil.setremoteconfig(ui, opts)
1729 1741 hg.repository(ui, dest, create=1)
1730 1742
1731 1743 def locate(ui, repo, *pats, **opts):
1732 1744 """locate files matching specific patterns
1733 1745
1734 1746 Print all files under Mercurial control whose names match the
1735 1747 given patterns.
1736 1748
1737 1749 This command searches the entire repository by default. To search
1738 1750 just the current directory and its subdirectories, use
1739 1751 "--include .".
1740 1752
1741 1753 If no patterns are given to match, this command prints all file
1742 1754 names.
1743 1755
1744 1756 If you want to feed the output of this command into the "xargs"
1745 1757 command, use the "-0" option to both this command and "xargs".
1746 1758 This will avoid the problem of "xargs" treating single filenames
1747 1759 that contain white space as multiple filenames.
1748 1760 """
1749 1761 end = opts['print0'] and '\0' or '\n'
1750 1762 rev = opts['rev']
1751 1763 if rev:
1752 1764 node = repo.lookup(rev)
1753 1765 else:
1754 1766 node = None
1755 1767
1756 1768 ret = 1
1757 1769 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1758 1770 badmatch=util.always,
1759 1771 default='relglob'):
1760 1772 if src == 'b':
1761 1773 continue
1762 1774 if not node and abs not in repo.dirstate:
1763 1775 continue
1764 1776 if opts['fullpath']:
1765 1777 ui.write(os.path.join(repo.root, abs), end)
1766 1778 else:
1767 1779 ui.write(((pats and rel) or abs), end)
1768 1780 ret = 0
1769 1781
1770 1782 return ret
1771 1783
1772 1784 def log(ui, repo, *pats, **opts):
1773 1785 """show revision history of entire repository or files
1774 1786
1775 1787 Print the revision history of the specified files or the entire
1776 1788 project.
1777 1789
1778 1790 File history is shown without following rename or copy history of
1779 1791 files. Use -f/--follow with a file name to follow history across
1780 1792 renames and copies. --follow without a file name will only show
1781 1793 ancestors or descendants of the starting revision. --follow-first
1782 1794 only follows the first parent of merge revisions.
1783 1795
1784 1796 If no revision range is specified, the default is tip:0 unless
1785 1797 --follow is set, in which case the working directory parent is
1786 1798 used as the starting revision.
1787 1799
1788 1800 By default this command outputs: changeset id and hash, tags,
1789 1801 non-trivial parents, user, date and time, and a summary for each
1790 1802 commit. When the -v/--verbose switch is used, the list of changed
1791 1803 files and full commit message is shown.
1792 1804
1793 1805 NOTE: log -p may generate unexpected diff output for merge
1794 1806 changesets, as it will compare the merge changeset against its
1795 1807 first parent only. Also, the files: list will only reflect files
1796 1808 that are different from BOTH parents.
1797 1809
1798 1810 """
1799 1811
1800 1812 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1801 1813 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1802 1814
1803 1815 if opts['limit']:
1804 1816 try:
1805 1817 limit = int(opts['limit'])
1806 1818 except ValueError:
1807 1819 raise util.Abort(_('limit must be a positive integer'))
1808 1820 if limit <= 0: raise util.Abort(_('limit must be positive'))
1809 1821 else:
1810 1822 limit = sys.maxint
1811 1823 count = 0
1812 1824
1813 1825 if opts['copies'] and opts['rev']:
1814 1826 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1815 1827 else:
1816 1828 endrev = repo.changelog.count()
1817 1829 rcache = {}
1818 1830 ncache = {}
1819 1831 dcache = []
1820 1832 def getrenamed(fn, rev, man):
1821 1833 '''looks up all renames for a file (up to endrev) the first
1822 1834 time the file is given. It indexes on the changerev and only
1823 1835 parses the manifest if linkrev != changerev.
1824 1836 Returns rename info for fn at changerev rev.'''
1825 1837 if fn not in rcache:
1826 1838 rcache[fn] = {}
1827 1839 ncache[fn] = {}
1828 1840 fl = repo.file(fn)
1829 1841 for i in xrange(fl.count()):
1830 1842 node = fl.node(i)
1831 1843 lr = fl.linkrev(node)
1832 1844 renamed = fl.renamed(node)
1833 1845 rcache[fn][lr] = renamed
1834 1846 if renamed:
1835 1847 ncache[fn][node] = renamed
1836 1848 if lr >= endrev:
1837 1849 break
1838 1850 if rev in rcache[fn]:
1839 1851 return rcache[fn][rev]
1840 1852 mr = repo.manifest.rev(man)
1841 1853 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1842 1854 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1843 1855 if not dcache or dcache[0] != man:
1844 1856 dcache[:] = [man, repo.manifest.readdelta(man)]
1845 1857 if fn in dcache[1]:
1846 1858 return ncache[fn].get(dcache[1][fn])
1847 1859 return None
1848 1860
1849 1861 df = False
1850 1862 if opts["date"]:
1851 1863 df = util.matchdate(opts["date"])
1852 1864
1853 1865 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1854 1866 for st, rev, fns in changeiter:
1855 1867 if st == 'add':
1856 1868 changenode = repo.changelog.node(rev)
1857 1869 parents = [p for p in repo.changelog.parentrevs(rev)
1858 1870 if p != nullrev]
1859 1871 if opts['no_merges'] and len(parents) == 2:
1860 1872 continue
1861 1873 if opts['only_merges'] and len(parents) != 2:
1862 1874 continue
1863 1875
1864 1876 if df:
1865 1877 changes = get(rev)
1866 1878 if not df(changes[2][0]):
1867 1879 continue
1868 1880
1869 1881 if opts['keyword']:
1870 1882 changes = get(rev)
1871 1883 miss = 0
1872 1884 for k in [kw.lower() for kw in opts['keyword']]:
1873 1885 if not (k in changes[1].lower() or
1874 1886 k in changes[4].lower() or
1875 1887 k in " ".join(changes[3]).lower()):
1876 1888 miss = 1
1877 1889 break
1878 1890 if miss:
1879 1891 continue
1880 1892
1881 1893 copies = []
1882 1894 if opts.get('copies') and rev:
1883 1895 mf = get(rev)[0]
1884 1896 for fn in get(rev)[3]:
1885 1897 rename = getrenamed(fn, rev, mf)
1886 1898 if rename:
1887 1899 copies.append((fn, rename[0]))
1888 1900 displayer.show(rev, changenode, copies=copies)
1889 1901 elif st == 'iter':
1890 1902 if count == limit: break
1891 1903 if displayer.flush(rev):
1892 1904 count += 1
1893 1905
1894 1906 def manifest(ui, repo, rev=None):
1895 1907 """output the current or given revision of the project manifest
1896 1908
1897 1909 Print a list of version controlled files for the given revision.
1898 1910 If no revision is given, the parent of the working directory is used,
1899 1911 or tip if no revision is checked out.
1900 1912
1901 1913 The manifest is the list of files being version controlled. If no revision
1902 1914 is given then the first parent of the working directory is used.
1903 1915
1904 1916 With -v flag, print file permissions. With --debug flag, print
1905 1917 file revision hashes.
1906 1918 """
1907 1919
1908 1920 m = repo.changectx(rev).manifest()
1909 1921 files = m.keys()
1910 1922 files.sort()
1911 1923
1912 1924 for f in files:
1913 1925 if ui.debugflag:
1914 1926 ui.write("%40s " % hex(m[f]))
1915 1927 if ui.verbose:
1916 1928 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1917 1929 ui.write("%s\n" % f)
1918 1930
1919 1931 def merge(ui, repo, node=None, force=None, rev=None):
1920 1932 """merge working directory with another revision
1921 1933
1922 1934 Merge the contents of the current working directory and the
1923 1935 requested revision. Files that changed between either parent are
1924 1936 marked as changed for the next commit and a commit must be
1925 1937 performed before any further updates are allowed.
1926 1938
1927 1939 If no revision is specified, the working directory's parent is a
1928 1940 head revision, and the repository contains exactly one other head,
1929 1941 the other head is merged with by default. Otherwise, an explicit
1930 1942 revision to merge with must be provided.
1931 1943 """
1932 1944
1933 1945 if rev and node:
1934 1946 raise util.Abort(_("please specify just one revision"))
1935 1947
1936 1948 if not node:
1937 1949 node = rev
1938 1950
1939 1951 if not node:
1940 1952 heads = repo.heads()
1941 1953 if len(heads) > 2:
1942 1954 raise util.Abort(_('repo has %d heads - '
1943 1955 'please merge with an explicit rev') %
1944 1956 len(heads))
1945 1957 if len(heads) == 1:
1946 1958 raise util.Abort(_('there is nothing to merge - '
1947 1959 'use "hg update" instead'))
1948 1960 parent = repo.dirstate.parents()[0]
1949 1961 if parent not in heads:
1950 1962 raise util.Abort(_('working dir not at a head rev - '
1951 1963 'use "hg update" or merge with an explicit rev'))
1952 1964 node = parent == heads[0] and heads[-1] or heads[0]
1953 1965 return hg.merge(repo, node, force=force)
1954 1966
1955 1967 def outgoing(ui, repo, dest=None, **opts):
1956 1968 """show changesets not found in destination
1957 1969
1958 1970 Show changesets not found in the specified destination repository or
1959 1971 the default push location. These are the changesets that would be pushed
1960 1972 if a push was requested.
1961 1973
1962 1974 See pull for valid destination format details.
1963 1975 """
1964 1976 dest, revs = cmdutil.parseurl(
1965 1977 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1966 1978 cmdutil.setremoteconfig(ui, opts)
1967 1979 if revs:
1968 1980 revs = [repo.lookup(rev) for rev in revs]
1969 1981
1970 1982 other = hg.repository(ui, dest)
1971 1983 ui.status(_('comparing with %s\n') % dest)
1972 1984 o = repo.findoutgoing(other, force=opts['force'])
1973 1985 if not o:
1974 1986 ui.status(_("no changes found\n"))
1975 1987 return 1
1976 1988 o = repo.changelog.nodesbetween(o, revs)[0]
1977 1989 if opts['newest_first']:
1978 1990 o.reverse()
1979 1991 displayer = cmdutil.show_changeset(ui, repo, opts)
1980 1992 for n in o:
1981 1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1982 1994 if opts['no_merges'] and len(parents) == 2:
1983 1995 continue
1984 1996 displayer.show(changenode=n)
1985 1997
1986 1998 def parents(ui, repo, file_=None, **opts):
1987 1999 """show the parents of the working dir or revision
1988 2000
1989 2001 Print the working directory's parent revisions. If a
1990 2002 revision is given via --rev, the parent of that revision
1991 2003 will be printed. If a file argument is given, revision in
1992 2004 which the file was last changed (before the working directory
1993 2005 revision or the argument to --rev if given) is printed.
1994 2006 """
1995 2007 rev = opts.get('rev')
1996 2008 if file_:
1997 2009 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1998 2010 if anypats or len(files) != 1:
1999 2011 raise util.Abort(_('can only specify an explicit file name'))
2000 2012 ctx = repo.filectx(files[0], changeid=rev)
2001 2013 elif rev:
2002 2014 ctx = repo.changectx(rev)
2003 2015 else:
2004 2016 ctx = repo.workingctx()
2005 2017 p = [cp.node() for cp in ctx.parents()]
2006 2018
2007 2019 displayer = cmdutil.show_changeset(ui, repo, opts)
2008 2020 for n in p:
2009 2021 if n != nullid:
2010 2022 displayer.show(changenode=n)
2011 2023
2012 2024 def paths(ui, repo, search=None):
2013 2025 """show definition of symbolic path names
2014 2026
2015 2027 Show definition of symbolic path name NAME. If no name is given, show
2016 2028 definition of available names.
2017 2029
2018 2030 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2019 2031 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2020 2032 """
2021 2033 if search:
2022 2034 for name, path in ui.configitems("paths"):
2023 2035 if name == search:
2024 2036 ui.write("%s\n" % path)
2025 2037 return
2026 2038 ui.warn(_("not found!\n"))
2027 2039 return 1
2028 2040 else:
2029 2041 for name, path in ui.configitems("paths"):
2030 2042 ui.write("%s = %s\n" % (name, path))
2031 2043
2032 2044 def postincoming(ui, repo, modheads, optupdate, wasempty):
2033 2045 if modheads == 0:
2034 2046 return
2035 2047 if optupdate:
2036 2048 if wasempty:
2037 2049 return hg.update(repo, repo.lookup('default'))
2038 2050 elif modheads == 1:
2039 2051 return hg.update(repo, repo.changelog.tip()) # update
2040 2052 else:
2041 2053 ui.status(_("not updating, since new heads added\n"))
2042 2054 if modheads > 1:
2043 2055 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2044 2056 else:
2045 2057 ui.status(_("(run 'hg update' to get a working copy)\n"))
2046 2058
2047 2059 def pull(ui, repo, source="default", **opts):
2048 2060 """pull changes from the specified source
2049 2061
2050 2062 Pull changes from a remote repository to a local one.
2051 2063
2052 2064 This finds all changes from the repository at the specified path
2053 2065 or URL and adds them to the local repository. By default, this
2054 2066 does not update the copy of the project in the working directory.
2055 2067
2056 2068 Valid URLs are of the form:
2057 2069
2058 2070 local/filesystem/path (or file://local/filesystem/path)
2059 2071 http://[user@]host[:port]/[path]
2060 2072 https://[user@]host[:port]/[path]
2061 2073 ssh://[user@]host[:port]/[path]
2062 2074 static-http://host[:port]/[path]
2063 2075
2064 2076 Paths in the local filesystem can either point to Mercurial
2065 2077 repositories or to bundle files (as created by 'hg bundle' or
2066 2078 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2067 2079 allows access to a Mercurial repository where you simply use a web
2068 2080 server to publish the .hg directory as static content.
2069 2081
2070 2082 An optional identifier after # indicates a particular branch, tag,
2071 2083 or changeset to pull.
2072 2084
2073 2085 Some notes about using SSH with Mercurial:
2074 2086 - SSH requires an accessible shell account on the destination machine
2075 2087 and a copy of hg in the remote path or specified with as remotecmd.
2076 2088 - path is relative to the remote user's home directory by default.
2077 2089 Use an extra slash at the start of a path to specify an absolute path:
2078 2090 ssh://example.com//tmp/repository
2079 2091 - Mercurial doesn't use its own compression via SSH; the right thing
2080 2092 to do is to configure it in your ~/.ssh/config, e.g.:
2081 2093 Host *.mylocalnetwork.example.com
2082 2094 Compression no
2083 2095 Host *
2084 2096 Compression yes
2085 2097 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2086 2098 with the --ssh command line option.
2087 2099 """
2088 2100 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2089 2101 cmdutil.setremoteconfig(ui, opts)
2090 2102
2091 2103 other = hg.repository(ui, source)
2092 2104 ui.status(_('pulling from %s\n') % (source))
2093 2105 if revs:
2094 2106 if 'lookup' in other.capabilities:
2095 2107 revs = [other.lookup(rev) for rev in revs]
2096 2108 else:
2097 2109 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2098 2110 raise util.Abort(error)
2099 2111
2100 2112 wasempty = repo.changelog.count() == 0
2101 2113 modheads = repo.pull(other, heads=revs, force=opts['force'])
2102 2114 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2103 2115
2104 2116 def push(ui, repo, dest=None, **opts):
2105 2117 """push changes to the specified destination
2106 2118
2107 2119 Push changes from the local repository to the given destination.
2108 2120
2109 2121 This is the symmetrical operation for pull. It helps to move
2110 2122 changes from the current repository to a different one. If the
2111 2123 destination is local this is identical to a pull in that directory
2112 2124 from the current one.
2113 2125
2114 2126 By default, push will refuse to run if it detects the result would
2115 2127 increase the number of remote heads. This generally indicates the
2116 2128 the client has forgotten to sync and merge before pushing.
2117 2129
2118 2130 Valid URLs are of the form:
2119 2131
2120 2132 local/filesystem/path (or file://local/filesystem/path)
2121 2133 ssh://[user@]host[:port]/[path]
2122 2134 http://[user@]host[:port]/[path]
2123 2135 https://[user@]host[:port]/[path]
2124 2136
2125 2137 An optional identifier after # indicates a particular branch, tag,
2126 2138 or changeset to push.
2127 2139
2128 2140 Look at the help text for the pull command for important details
2129 2141 about ssh:// URLs.
2130 2142
2131 2143 Pushing to http:// and https:// URLs is only possible, if this
2132 2144 feature is explicitly enabled on the remote Mercurial server.
2133 2145 """
2134 2146 dest, revs = cmdutil.parseurl(
2135 2147 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2136 2148 cmdutil.setremoteconfig(ui, opts)
2137 2149
2138 2150 other = hg.repository(ui, dest)
2139 2151 ui.status('pushing to %s\n' % (dest))
2140 2152 if revs:
2141 2153 revs = [repo.lookup(rev) for rev in revs]
2142 2154 r = repo.push(other, opts['force'], revs=revs)
2143 2155 return r == 0
2144 2156
2145 2157 def rawcommit(ui, repo, *pats, **opts):
2146 2158 """raw commit interface (DEPRECATED)
2147 2159
2148 2160 (DEPRECATED)
2149 2161 Lowlevel commit, for use in helper scripts.
2150 2162
2151 2163 This command is not intended to be used by normal users, as it is
2152 2164 primarily useful for importing from other SCMs.
2153 2165
2154 2166 This command is now deprecated and will be removed in a future
2155 2167 release, please use debugsetparents and commit instead.
2156 2168 """
2157 2169
2158 2170 ui.warn(_("(the rawcommit command is deprecated)\n"))
2159 2171
2160 2172 message = cmdutil.logmessage(opts)
2161 2173
2162 2174 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2163 2175 if opts['files']:
2164 2176 files += open(opts['files']).read().splitlines()
2165 2177
2166 2178 parents = [repo.lookup(p) for p in opts['parent']]
2167 2179
2168 2180 try:
2169 2181 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2170 2182 except ValueError, inst:
2171 2183 raise util.Abort(str(inst))
2172 2184
2173 2185 def recover(ui, repo):
2174 2186 """roll back an interrupted transaction
2175 2187
2176 2188 Recover from an interrupted commit or pull.
2177 2189
2178 2190 This command tries to fix the repository status after an interrupted
2179 2191 operation. It should only be necessary when Mercurial suggests it.
2180 2192 """
2181 2193 if repo.recover():
2182 2194 return hg.verify(repo)
2183 2195 return 1
2184 2196
2185 2197 def remove(ui, repo, *pats, **opts):
2186 2198 """remove the specified files on the next commit
2187 2199
2188 2200 Schedule the indicated files for removal from the repository.
2189 2201
2190 2202 This only removes files from the current branch, not from the
2191 2203 entire project history. If the files still exist in the working
2192 2204 directory, they will be deleted from it. If invoked with --after,
2193 2205 files are marked as removed, but not actually unlinked unless --force
2194 2206 is also given. Without exact file names, --after will only mark
2195 2207 files as removed if they are no longer in the working directory.
2196 2208
2197 2209 This command schedules the files to be removed at the next commit.
2198 2210 To undo a remove before that, see hg revert.
2199 2211
2200 2212 Modified files and added files are not removed by default. To
2201 2213 remove them, use the -f/--force option.
2202 2214 """
2203 2215 names = []
2204 2216 if not opts['after'] and not pats:
2205 2217 raise util.Abort(_('no files specified'))
2206 2218 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2207 2219 exact = dict.fromkeys(files)
2208 2220 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2209 2221 modified, added, removed, deleted, unknown = mardu
2210 2222 remove, forget = [], []
2211 2223 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2212 2224 reason = None
2213 2225 if abs in modified and not opts['force']:
2214 2226 reason = _('is modified (use -f to force removal)')
2215 2227 elif abs in added:
2216 2228 if opts['force']:
2217 2229 forget.append(abs)
2218 2230 continue
2219 2231 reason = _('has been marked for add (use -f to force removal)')
2220 2232 elif abs not in repo.dirstate:
2221 2233 reason = _('is not managed')
2222 2234 elif opts['after'] and not exact and abs not in deleted:
2223 2235 continue
2224 2236 elif abs in removed:
2225 2237 continue
2226 2238 if reason:
2227 2239 if exact:
2228 2240 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2229 2241 else:
2230 2242 if ui.verbose or not exact:
2231 2243 ui.status(_('removing %s\n') % rel)
2232 2244 remove.append(abs)
2233 2245 repo.forget(forget)
2234 2246 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2235 2247
2236 2248 def rename(ui, repo, *pats, **opts):
2237 2249 """rename files; equivalent of copy + remove
2238 2250
2239 2251 Mark dest as copies of sources; mark sources for deletion. If
2240 2252 dest is a directory, copies are put in that directory. If dest is
2241 2253 a file, there can only be one source.
2242 2254
2243 2255 By default, this command copies the contents of files as they
2244 2256 stand in the working directory. If invoked with --after, the
2245 2257 operation is recorded, but no copying is performed.
2246 2258
2247 2259 This command takes effect in the next commit. To undo a rename
2248 2260 before that, see hg revert.
2249 2261 """
2250 2262 wlock = repo.wlock(False)
2263 try:
2251 2264 errs, copied = docopy(ui, repo, pats, opts, wlock)
2252 2265 names = []
2253 2266 for abs, rel, exact in copied:
2254 2267 if ui.verbose or not exact:
2255 2268 ui.status(_('removing %s\n') % rel)
2256 2269 names.append(abs)
2257 2270 if not opts.get('dry_run'):
2258 2271 repo.remove(names, True, wlock=wlock)
2259 2272 return errs
2273 finally:
2274 del wlock
2260 2275
2261 2276 def revert(ui, repo, *pats, **opts):
2262 2277 """revert files or dirs to their states as of some revision
2263 2278
2264 2279 With no revision specified, revert the named files or directories
2265 2280 to the contents they had in the parent of the working directory.
2266 2281 This restores the contents of the affected files to an unmodified
2267 2282 state and unschedules adds, removes, copies, and renames. If the
2268 2283 working directory has two parents, you must explicitly specify the
2269 2284 revision to revert to.
2270 2285
2271 2286 Modified files are saved with a .orig suffix before reverting.
2272 2287 To disable these backups, use --no-backup.
2273 2288
2274 2289 Using the -r option, revert the given files or directories to their
2275 2290 contents as of a specific revision. This can be helpful to "roll
2276 2291 back" some or all of a change that should not have been committed.
2277 2292
2278 2293 Revert modifies the working directory. It does not commit any
2279 2294 changes, or change the parent of the working directory. If you
2280 2295 revert to a revision other than the parent of the working
2281 2296 directory, the reverted files will thus appear modified
2282 2297 afterwards.
2283 2298
2284 2299 If a file has been deleted, it is restored. If the executable
2285 2300 mode of a file was changed, it is reset.
2286 2301
2287 2302 If names are given, all files matching the names are reverted.
2288 2303
2289 2304 If no arguments are given, no files are reverted.
2290 2305 """
2291 2306
2292 2307 if opts["date"]:
2293 2308 if opts["rev"]:
2294 2309 raise util.Abort(_("you can't specify a revision and a date"))
2295 2310 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2296 2311
2297 2312 if not pats and not opts['all']:
2298 2313 raise util.Abort(_('no files or directories specified; '
2299 2314 'use --all to revert the whole repo'))
2300 2315
2301 2316 parent, p2 = repo.dirstate.parents()
2302 2317 if not opts['rev'] and p2 != nullid:
2303 2318 raise util.Abort(_('uncommitted merge - please provide a '
2304 2319 'specific revision'))
2305 2320 ctx = repo.changectx(opts['rev'])
2306 2321 node = ctx.node()
2307 2322 mf = ctx.manifest()
2308 2323 if node == parent:
2309 2324 pmf = mf
2310 2325 else:
2311 2326 pmf = None
2312 2327
2313 wlock = repo.wlock()
2314
2315 2328 # need all matching names in dirstate and manifest of target rev,
2316 2329 # so have to walk both. do not print errors if files exist in one
2317 2330 # but not other.
2318 2331
2319 2332 names = {}
2320 2333 target_only = {}
2321 2334
2335 wlock = repo.wlock()
2336 try:
2322 2337 # walk dirstate.
2323
2324 2338 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2325 2339 badmatch=mf.has_key):
2326 2340 names[abs] = (rel, exact)
2327 2341 if src == 'b':
2328 2342 target_only[abs] = True
2329 2343
2330 2344 # walk target manifest.
2331 2345
2332 2346 def badmatch(path):
2333 2347 if path in names:
2334 2348 return True
2335 2349 path_ = path + '/'
2336 2350 for f in names:
2337 2351 if f.startswith(path_):
2338 2352 return True
2339 2353 return False
2340 2354
2341 2355 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2342 2356 badmatch=badmatch):
2343 2357 if abs in names or src == 'b':
2344 2358 continue
2345 2359 names[abs] = (rel, exact)
2346 2360 target_only[abs] = True
2347 2361
2348 2362 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2349 2363 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2350 2364
2351 2365 revert = ([], _('reverting %s\n'))
2352 2366 add = ([], _('adding %s\n'))
2353 2367 remove = ([], _('removing %s\n'))
2354 2368 forget = ([], _('forgetting %s\n'))
2355 2369 undelete = ([], _('undeleting %s\n'))
2356 2370 update = {}
2357 2371
2358 2372 disptable = (
2359 2373 # dispatch table:
2360 2374 # file state
2361 2375 # action if in target manifest
2362 2376 # action if not in target manifest
2363 2377 # make backup if in target manifest
2364 2378 # make backup if not in target manifest
2365 2379 (modified, revert, remove, True, True),
2366 2380 (added, revert, forget, True, False),
2367 2381 (removed, undelete, None, False, False),
2368 2382 (deleted, revert, remove, False, False),
2369 2383 (unknown, add, None, True, False),
2370 2384 (target_only, add, None, False, False),
2371 2385 )
2372 2386
2373 2387 entries = names.items()
2374 2388 entries.sort()
2375 2389
2376 2390 for abs, (rel, exact) in entries:
2377 2391 mfentry = mf.get(abs)
2378 2392 target = repo.wjoin(abs)
2379 2393 def handle(xlist, dobackup):
2380 2394 xlist[0].append(abs)
2381 2395 update[abs] = 1
2382 2396 if dobackup and not opts['no_backup'] and util.lexists(target):
2383 2397 bakname = "%s.orig" % rel
2384 2398 ui.note(_('saving current version of %s as %s\n') %
2385 2399 (rel, bakname))
2386 2400 if not opts.get('dry_run'):
2387 2401 util.copyfile(target, bakname)
2388 2402 if ui.verbose or not exact:
2389 2403 ui.status(xlist[1] % rel)
2390 2404 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2391 2405 if abs not in table: continue
2392 2406 # file has changed in dirstate
2393 2407 if mfentry:
2394 2408 handle(hitlist, backuphit)
2395 2409 elif misslist is not None:
2396 2410 handle(misslist, backupmiss)
2397 2411 else:
2398 2412 if exact: ui.warn(_('file not managed: %s\n') % rel)
2399 2413 break
2400 2414 else:
2401 2415 # file has not changed in dirstate
2402 2416 if node == parent:
2403 2417 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2404 2418 continue
2405 2419 if pmf is None:
2406 2420 # only need parent manifest in this unlikely case,
2407 2421 # so do not read by default
2408 2422 pmf = repo.changectx(parent).manifest()
2409 2423 if abs in pmf:
2410 2424 if mfentry:
2411 2425 # if version of file is same in parent and target
2412 2426 # manifests, do nothing
2413 2427 if pmf[abs] != mfentry:
2414 2428 handle(revert, False)
2415 2429 else:
2416 2430 handle(remove, False)
2417 2431
2418 2432 if not opts.get('dry_run'):
2419 2433 for f in forget[0]:
2420 2434 repo.dirstate.forget(f)
2421 2435 r = hg.revert(repo, node, update.has_key, wlock)
2422 2436 for f in add[0]:
2423 2437 repo.dirstate.add(f)
2424 2438 for f in undelete[0]:
2425 2439 repo.dirstate.normal(f)
2426 2440 for f in remove[0]:
2427 2441 repo.dirstate.remove(f)
2428 2442 return r
2443 finally:
2444 del wlock
2429 2445
2430 2446 def rollback(ui, repo):
2431 2447 """roll back the last transaction in this repository
2432 2448
2433 2449 Roll back the last transaction in this repository, restoring the
2434 2450 project to its state prior to the transaction.
2435 2451
2436 2452 Transactions are used to encapsulate the effects of all commands
2437 2453 that create new changesets or propagate existing changesets into a
2438 2454 repository. For example, the following commands are transactional,
2439 2455 and their effects can be rolled back:
2440 2456
2441 2457 commit
2442 2458 import
2443 2459 pull
2444 2460 push (with this repository as destination)
2445 2461 unbundle
2446 2462
2447 2463 This command should be used with care. There is only one level of
2448 2464 rollback, and there is no way to undo a rollback. It will also
2449 2465 restore the dirstate at the time of the last transaction, which
2450 2466 may lose subsequent dirstate changes.
2451 2467
2452 2468 This command is not intended for use on public repositories. Once
2453 2469 changes are visible for pull by other users, rolling a transaction
2454 2470 back locally is ineffective (someone else may already have pulled
2455 2471 the changes). Furthermore, a race is possible with readers of the
2456 2472 repository; for example an in-progress pull from the repository
2457 2473 may fail if a rollback is performed.
2458 2474 """
2459 2475 repo.rollback()
2460 2476
2461 2477 def root(ui, repo):
2462 2478 """print the root (top) of the current working dir
2463 2479
2464 2480 Print the root directory of the current repository.
2465 2481 """
2466 2482 ui.write(repo.root + "\n")
2467 2483
2468 2484 def serve(ui, repo, **opts):
2469 2485 """export the repository via HTTP
2470 2486
2471 2487 Start a local HTTP repository browser and pull server.
2472 2488
2473 2489 By default, the server logs accesses to stdout and errors to
2474 2490 stderr. Use the "-A" and "-E" options to log to files.
2475 2491 """
2476 2492
2477 2493 if opts["stdio"]:
2478 2494 if repo is None:
2479 2495 raise hg.RepoError(_("There is no Mercurial repository here"
2480 2496 " (.hg not found)"))
2481 2497 s = sshserver.sshserver(ui, repo)
2482 2498 s.serve_forever()
2483 2499
2484 2500 parentui = ui.parentui or ui
2485 2501 optlist = ("name templates style address port ipv6"
2486 2502 " accesslog errorlog webdir_conf certificate")
2487 2503 for o in optlist.split():
2488 2504 if opts[o]:
2489 2505 parentui.setconfig("web", o, str(opts[o]))
2490 2506 if repo.ui != parentui:
2491 2507 repo.ui.setconfig("web", o, str(opts[o]))
2492 2508
2493 2509 if repo is None and not ui.config("web", "webdir_conf"):
2494 2510 raise hg.RepoError(_("There is no Mercurial repository here"
2495 2511 " (.hg not found)"))
2496 2512
2497 2513 class service:
2498 2514 def init(self):
2499 2515 util.set_signal_handler()
2500 2516 try:
2501 2517 self.httpd = hgweb.server.create_server(parentui, repo)
2502 2518 except socket.error, inst:
2503 2519 raise util.Abort(_('cannot start server: ') + inst.args[1])
2504 2520
2505 2521 if not ui.verbose: return
2506 2522
2507 2523 if self.httpd.port != 80:
2508 2524 ui.status(_('listening at http://%s:%d/\n') %
2509 2525 (self.httpd.addr, self.httpd.port))
2510 2526 else:
2511 2527 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2512 2528
2513 2529 def run(self):
2514 2530 self.httpd.serve_forever()
2515 2531
2516 2532 service = service()
2517 2533
2518 2534 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2519 2535
2520 2536 def status(ui, repo, *pats, **opts):
2521 2537 """show changed files in the working directory
2522 2538
2523 2539 Show status of files in the repository. If names are given, only
2524 2540 files that match are shown. Files that are clean or ignored, are
2525 2541 not listed unless -c (clean), -i (ignored) or -A is given.
2526 2542
2527 2543 NOTE: status may appear to disagree with diff if permissions have
2528 2544 changed or a merge has occurred. The standard diff format does not
2529 2545 report permission changes and diff only reports changes relative
2530 2546 to one merge parent.
2531 2547
2532 2548 If one revision is given, it is used as the base revision.
2533 2549 If two revisions are given, the difference between them is shown.
2534 2550
2535 2551 The codes used to show the status of files are:
2536 2552 M = modified
2537 2553 A = added
2538 2554 R = removed
2539 2555 C = clean
2540 2556 ! = deleted, but still tracked
2541 2557 ? = not tracked
2542 2558 I = ignored (not shown by default)
2543 2559 = the previous added file was copied from here
2544 2560 """
2545 2561
2546 2562 all = opts['all']
2547 2563 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2548 2564
2549 2565 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2550 2566 cwd = (pats and repo.getcwd()) or ''
2551 2567 modified, added, removed, deleted, unknown, ignored, clean = [
2552 2568 n for n in repo.status(node1=node1, node2=node2, files=files,
2553 2569 match=matchfn,
2554 2570 list_ignored=all or opts['ignored'],
2555 2571 list_clean=all or opts['clean'])]
2556 2572
2557 2573 changetypes = (('modified', 'M', modified),
2558 2574 ('added', 'A', added),
2559 2575 ('removed', 'R', removed),
2560 2576 ('deleted', '!', deleted),
2561 2577 ('unknown', '?', unknown),
2562 2578 ('ignored', 'I', ignored))
2563 2579
2564 2580 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2565 2581
2566 2582 end = opts['print0'] and '\0' or '\n'
2567 2583
2568 2584 for opt, char, changes in ([ct for ct in explicit_changetypes
2569 2585 if all or opts[ct[0]]]
2570 2586 or changetypes):
2571 2587 if opts['no_status']:
2572 2588 format = "%%s%s" % end
2573 2589 else:
2574 2590 format = "%s %%s%s" % (char, end)
2575 2591
2576 2592 for f in changes:
2577 2593 ui.write(format % repo.pathto(f, cwd))
2578 2594 if ((all or opts.get('copies')) and not opts.get('no_status')):
2579 2595 copied = repo.dirstate.copied(f)
2580 2596 if copied:
2581 2597 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2582 2598
2583 2599 def tag(ui, repo, name, rev_=None, **opts):
2584 2600 """add a tag for the current or given revision
2585 2601
2586 2602 Name a particular revision using <name>.
2587 2603
2588 2604 Tags are used to name particular revisions of the repository and are
2589 2605 very useful to compare different revision, to go back to significant
2590 2606 earlier versions or to mark branch points as releases, etc.
2591 2607
2592 2608 If no revision is given, the parent of the working directory is used,
2593 2609 or tip if no revision is checked out.
2594 2610
2595 2611 To facilitate version control, distribution, and merging of tags,
2596 2612 they are stored as a file named ".hgtags" which is managed
2597 2613 similarly to other project files and can be hand-edited if
2598 2614 necessary. The file '.hg/localtags' is used for local tags (not
2599 2615 shared among repositories).
2600 2616 """
2601 2617 if name in ['tip', '.', 'null']:
2602 2618 raise util.Abort(_("the name '%s' is reserved") % name)
2603 2619 if rev_ is not None:
2604 2620 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2605 2621 "please use 'hg tag [-r REV] NAME' instead\n"))
2606 2622 if opts['rev']:
2607 2623 raise util.Abort(_("use only one form to specify the revision"))
2608 2624 if opts['rev'] and opts['remove']:
2609 2625 raise util.Abort(_("--rev and --remove are incompatible"))
2610 2626 if opts['rev']:
2611 2627 rev_ = opts['rev']
2612 2628 message = opts['message']
2613 2629 if opts['remove']:
2614 2630 if not name in repo.tags():
2615 2631 raise util.Abort(_('tag %s does not exist') % name)
2616 2632 rev_ = nullid
2617 2633 if not message:
2618 2634 message = _('Removed tag %s') % name
2619 2635 elif name in repo.tags() and not opts['force']:
2620 2636 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2621 2637 % name)
2622 2638 if not rev_ and repo.dirstate.parents()[1] != nullid:
2623 2639 raise util.Abort(_('uncommitted merge - please provide a '
2624 2640 'specific revision'))
2625 2641 r = repo.changectx(rev_).node()
2626 2642
2627 2643 if not message:
2628 2644 message = _('Added tag %s for changeset %s') % (name, short(r))
2629 2645
2630 2646 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2631 2647
2632 2648 def tags(ui, repo):
2633 2649 """list repository tags
2634 2650
2635 2651 List the repository tags.
2636 2652
2637 2653 This lists both regular and local tags.
2638 2654 """
2639 2655
2640 2656 l = repo.tagslist()
2641 2657 l.reverse()
2642 2658 hexfunc = ui.debugflag and hex or short
2643 2659 for t, n in l:
2644 2660 try:
2645 2661 hn = hexfunc(n)
2646 2662 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2647 2663 except revlog.LookupError:
2648 2664 r = " ?:%s" % hn
2649 2665 if ui.quiet:
2650 2666 ui.write("%s\n" % t)
2651 2667 else:
2652 2668 spaces = " " * (30 - util.locallen(t))
2653 2669 ui.write("%s%s %s\n" % (t, spaces, r))
2654 2670
2655 2671 def tip(ui, repo, **opts):
2656 2672 """show the tip revision
2657 2673
2658 2674 Show the tip revision.
2659 2675 """
2660 2676 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2661 2677
2662 2678 def unbundle(ui, repo, fname1, *fnames, **opts):
2663 2679 """apply one or more changegroup files
2664 2680
2665 2681 Apply one or more compressed changegroup files generated by the
2666 2682 bundle command.
2667 2683 """
2668 2684 fnames = (fname1,) + fnames
2669 2685 result = None
2670 2686 wasempty = repo.changelog.count() == 0
2671 2687 for fname in fnames:
2672 2688 if os.path.exists(fname):
2673 2689 f = open(fname, "rb")
2674 2690 else:
2675 2691 f = urllib.urlopen(fname)
2676 2692 gen = changegroup.readbundle(f, fname)
2677 2693 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2678 2694
2679 2695 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2680 2696
2681 2697 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2682 2698 """update working directory
2683 2699
2684 2700 Update the working directory to the specified revision, or the
2685 2701 tip of the current branch if none is specified.
2686 2702
2687 2703 If there are no outstanding changes in the working directory and
2688 2704 there is a linear relationship between the current version and the
2689 2705 requested version, the result is the requested version.
2690 2706
2691 2707 To merge the working directory with another revision, use the
2692 2708 merge command.
2693 2709
2694 2710 By default, update will refuse to run if doing so would require
2695 2711 discarding local changes.
2696 2712 """
2697 2713 if rev and node:
2698 2714 raise util.Abort(_("please specify just one revision"))
2699 2715
2700 2716 if not rev:
2701 2717 rev = node
2702 2718
2703 2719 if date:
2704 2720 if rev:
2705 2721 raise util.Abort(_("you can't specify a revision and a date"))
2706 2722 rev = cmdutil.finddate(ui, repo, date)
2707 2723
2708 2724 if clean:
2709 2725 return hg.clean(repo, rev)
2710 2726 else:
2711 2727 return hg.update(repo, rev)
2712 2728
2713 2729 def verify(ui, repo):
2714 2730 """verify the integrity of the repository
2715 2731
2716 2732 Verify the integrity of the current repository.
2717 2733
2718 2734 This will perform an extensive check of the repository's
2719 2735 integrity, validating the hashes and checksums of each entry in
2720 2736 the changelog, manifest, and tracked files, as well as the
2721 2737 integrity of their crosslinks and indices.
2722 2738 """
2723 2739 return hg.verify(repo)
2724 2740
2725 2741 def version_(ui):
2726 2742 """output version and copyright information"""
2727 2743 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2728 2744 % version.get_version())
2729 2745 ui.status(_(
2730 2746 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2731 2747 "This is free software; see the source for copying conditions. "
2732 2748 "There is NO\nwarranty; "
2733 2749 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2734 2750 ))
2735 2751
2736 2752 # Command options and aliases are listed here, alphabetically
2737 2753
2738 2754 globalopts = [
2739 2755 ('R', 'repository', '',
2740 2756 _('repository root directory or symbolic path name')),
2741 2757 ('', 'cwd', '', _('change working directory')),
2742 2758 ('y', 'noninteractive', None,
2743 2759 _('do not prompt, assume \'yes\' for any required answers')),
2744 2760 ('q', 'quiet', None, _('suppress output')),
2745 2761 ('v', 'verbose', None, _('enable additional output')),
2746 2762 ('', 'config', [], _('set/override config option')),
2747 2763 ('', 'debug', None, _('enable debugging output')),
2748 2764 ('', 'debugger', None, _('start debugger')),
2749 2765 ('', 'encoding', util._encoding, _('set the charset encoding')),
2750 2766 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2751 2767 ('', 'lsprof', None, _('print improved command execution profile')),
2752 2768 ('', 'traceback', None, _('print traceback on exception')),
2753 2769 ('', 'time', None, _('time how long the command takes')),
2754 2770 ('', 'profile', None, _('print command execution profile')),
2755 2771 ('', 'version', None, _('output version information and exit')),
2756 2772 ('h', 'help', None, _('display help and exit')),
2757 2773 ]
2758 2774
2759 2775 dryrunopts = [('n', 'dry-run', None,
2760 2776 _('do not perform actions, just print output'))]
2761 2777
2762 2778 remoteopts = [
2763 2779 ('e', 'ssh', '', _('specify ssh command to use')),
2764 2780 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2765 2781 ]
2766 2782
2767 2783 walkopts = [
2768 2784 ('I', 'include', [], _('include names matching the given patterns')),
2769 2785 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2770 2786 ]
2771 2787
2772 2788 commitopts = [
2773 2789 ('m', 'message', '', _('use <text> as commit message')),
2774 2790 ('l', 'logfile', '', _('read commit message from <file>')),
2775 2791 ]
2776 2792
2777 2793 table = {
2778 2794 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2779 2795 "addremove":
2780 2796 (addremove,
2781 2797 [('s', 'similarity', '',
2782 2798 _('guess renamed files by similarity (0<=s<=100)')),
2783 2799 ] + walkopts + dryrunopts,
2784 2800 _('hg addremove [OPTION]... [FILE]...')),
2785 2801 "^annotate":
2786 2802 (annotate,
2787 2803 [('r', 'rev', '', _('annotate the specified revision')),
2788 2804 ('f', 'follow', None, _('follow file copies and renames')),
2789 2805 ('a', 'text', None, _('treat all files as text')),
2790 2806 ('u', 'user', None, _('list the author')),
2791 2807 ('d', 'date', None, _('list the date')),
2792 2808 ('n', 'number', None, _('list the revision number (default)')),
2793 2809 ('c', 'changeset', None, _('list the changeset')),
2794 2810 ('l', 'line-number', None,
2795 2811 _('show line number at the first appearance'))
2796 2812 ] + walkopts,
2797 2813 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2798 2814 "archive":
2799 2815 (archive,
2800 2816 [('', 'no-decode', None, _('do not pass files through decoders')),
2801 2817 ('p', 'prefix', '', _('directory prefix for files in archive')),
2802 2818 ('r', 'rev', '', _('revision to distribute')),
2803 2819 ('t', 'type', '', _('type of distribution to create')),
2804 2820 ] + walkopts,
2805 2821 _('hg archive [OPTION]... DEST')),
2806 2822 "backout":
2807 2823 (backout,
2808 2824 [('', 'merge', None,
2809 2825 _('merge with old dirstate parent after backout')),
2810 2826 ('d', 'date', '', _('record datecode as commit date')),
2811 2827 ('', 'parent', '', _('parent to choose when backing out merge')),
2812 2828 ('u', 'user', '', _('record user as committer')),
2813 2829 ('r', 'rev', '', _('revision to backout')),
2814 2830 ] + walkopts + commitopts,
2815 2831 _('hg backout [OPTION]... [-r] REV')),
2816 2832 "branch":
2817 2833 (branch,
2818 2834 [('f', 'force', None,
2819 2835 _('set branch name even if it shadows an existing branch'))],
2820 2836 _('hg branch [NAME]')),
2821 2837 "branches":
2822 2838 (branches,
2823 2839 [('a', 'active', False,
2824 2840 _('show only branches that have unmerged heads'))],
2825 2841 _('hg branches [-a]')),
2826 2842 "bundle":
2827 2843 (bundle,
2828 2844 [('f', 'force', None,
2829 2845 _('run even when remote repository is unrelated')),
2830 2846 ('r', 'rev', [],
2831 2847 _('a changeset you would like to bundle')),
2832 2848 ('', 'base', [],
2833 2849 _('a base changeset to specify instead of a destination')),
2834 2850 ] + remoteopts,
2835 2851 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2836 2852 "cat":
2837 2853 (cat,
2838 2854 [('o', 'output', '', _('print output to file with formatted name')),
2839 2855 ('r', 'rev', '', _('print the given revision')),
2840 2856 ] + walkopts,
2841 2857 _('hg cat [OPTION]... FILE...')),
2842 2858 "^clone":
2843 2859 (clone,
2844 2860 [('U', 'noupdate', None, _('do not update the new working directory')),
2845 2861 ('r', 'rev', [],
2846 2862 _('a changeset you would like to have after cloning')),
2847 2863 ('', 'pull', None, _('use pull protocol to copy metadata')),
2848 2864 ('', 'uncompressed', None,
2849 2865 _('use uncompressed transfer (fast over LAN)')),
2850 2866 ] + remoteopts,
2851 2867 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 2868 "^commit|ci":
2853 2869 (commit,
2854 2870 [('A', 'addremove', None,
2855 2871 _('mark new/missing files as added/removed before committing')),
2856 2872 ('d', 'date', '', _('record datecode as commit date')),
2857 2873 ('u', 'user', '', _('record user as commiter')),
2858 2874 ] + walkopts + commitopts,
2859 2875 _('hg commit [OPTION]... [FILE]...')),
2860 2876 "copy|cp":
2861 2877 (copy,
2862 2878 [('A', 'after', None, _('record a copy that has already occurred')),
2863 2879 ('f', 'force', None,
2864 2880 _('forcibly copy over an existing managed file')),
2865 2881 ] + walkopts + dryrunopts,
2866 2882 _('hg copy [OPTION]... [SOURCE]... DEST')),
2867 2883 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2868 2884 "debugcomplete":
2869 2885 (debugcomplete,
2870 2886 [('o', 'options', None, _('show the command options'))],
2871 2887 _('debugcomplete [-o] CMD')),
2872 2888 "debuginstall": (debuginstall, [], _('debuginstall')),
2873 2889 "debugrebuildstate":
2874 2890 (debugrebuildstate,
2875 2891 [('r', 'rev', '', _('revision to rebuild to'))],
2876 2892 _('debugrebuildstate [-r REV] [REV]')),
2877 2893 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2878 2894 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2879 2895 "debugstate": (debugstate, [], _('debugstate')),
2880 2896 "debugdate":
2881 2897 (debugdate,
2882 2898 [('e', 'extended', None, _('try extended date formats'))],
2883 2899 _('debugdate [-e] DATE [RANGE]')),
2884 2900 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2885 2901 "debugindex": (debugindex, [], _('debugindex FILE')),
2886 2902 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2887 2903 "debugrename":
2888 2904 (debugrename,
2889 2905 [('r', 'rev', '', _('revision to debug'))],
2890 2906 _('debugrename [-r REV] FILE')),
2891 2907 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2892 2908 "^diff":
2893 2909 (diff,
2894 2910 [('r', 'rev', [], _('revision')),
2895 2911 ('a', 'text', None, _('treat all files as text')),
2896 2912 ('p', 'show-function', None,
2897 2913 _('show which function each change is in')),
2898 2914 ('g', 'git', None, _('use git extended diff format')),
2899 2915 ('', 'nodates', None, _("don't include dates in diff headers")),
2900 2916 ('w', 'ignore-all-space', None,
2901 2917 _('ignore white space when comparing lines')),
2902 2918 ('b', 'ignore-space-change', None,
2903 2919 _('ignore changes in the amount of white space')),
2904 2920 ('B', 'ignore-blank-lines', None,
2905 2921 _('ignore changes whose lines are all blank')),
2906 2922 ] + walkopts,
2907 2923 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2908 2924 "^export":
2909 2925 (export,
2910 2926 [('o', 'output', '', _('print output to file with formatted name')),
2911 2927 ('a', 'text', None, _('treat all files as text')),
2912 2928 ('g', 'git', None, _('use git extended diff format')),
2913 2929 ('', 'nodates', None, _("don't include dates in diff headers")),
2914 2930 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 2931 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2916 2932 "grep":
2917 2933 (grep,
2918 2934 [('0', 'print0', None, _('end fields with NUL')),
2919 2935 ('', 'all', None, _('print all revisions that match')),
2920 2936 ('f', 'follow', None,
2921 2937 _('follow changeset history, or file history across copies and renames')),
2922 2938 ('i', 'ignore-case', None, _('ignore case when matching')),
2923 2939 ('l', 'files-with-matches', None,
2924 2940 _('print only filenames and revs that match')),
2925 2941 ('n', 'line-number', None, _('print matching line numbers')),
2926 2942 ('r', 'rev', [], _('search in given revision range')),
2927 2943 ('u', 'user', None, _('print user who committed change')),
2928 2944 ] + walkopts,
2929 2945 _('hg grep [OPTION]... PATTERN [FILE]...')),
2930 2946 "heads":
2931 2947 (heads,
2932 2948 [('', 'style', '', _('display using template map file')),
2933 2949 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2934 2950 ('', 'template', '', _('display with template'))],
2935 2951 _('hg heads [-r REV] [REV]...')),
2936 2952 "help": (help_, [], _('hg help [COMMAND]')),
2937 2953 "identify|id":
2938 2954 (identify,
2939 2955 [('r', 'rev', '', _('identify the specified rev')),
2940 2956 ('n', 'num', None, _('show local revision number')),
2941 2957 ('i', 'id', None, _('show global revision id')),
2942 2958 ('b', 'branch', None, _('show branch')),
2943 2959 ('t', 'tags', None, _('show tags'))],
2944 2960 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2945 2961 "import|patch":
2946 2962 (import_,
2947 2963 [('p', 'strip', 1,
2948 2964 _('directory strip option for patch. This has the same\n'
2949 2965 'meaning as the corresponding patch option')),
2950 2966 ('b', 'base', '', _('base path')),
2951 2967 ('f', 'force', None,
2952 2968 _('skip check for outstanding uncommitted changes')),
2953 2969 ('', 'exact', None,
2954 2970 _('apply patch to the nodes from which it was generated')),
2955 2971 ('', 'import-branch', None,
2956 2972 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2957 2973 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2958 2974 "incoming|in": (incoming,
2959 2975 [('M', 'no-merges', None, _('do not show merges')),
2960 2976 ('f', 'force', None,
2961 2977 _('run even when remote repository is unrelated')),
2962 2978 ('', 'style', '', _('display using template map file')),
2963 2979 ('n', 'newest-first', None, _('show newest record first')),
2964 2980 ('', 'bundle', '', _('file to store the bundles into')),
2965 2981 ('p', 'patch', None, _('show patch')),
2966 2982 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2967 2983 ('', 'template', '', _('display with template')),
2968 2984 ] + remoteopts,
2969 2985 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2970 2986 ' [--bundle FILENAME] [SOURCE]')),
2971 2987 "^init":
2972 2988 (init,
2973 2989 remoteopts,
2974 2990 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2975 2991 "locate":
2976 2992 (locate,
2977 2993 [('r', 'rev', '', _('search the repository as it stood at rev')),
2978 2994 ('0', 'print0', None,
2979 2995 _('end filenames with NUL, for use with xargs')),
2980 2996 ('f', 'fullpath', None,
2981 2997 _('print complete paths from the filesystem root')),
2982 2998 ] + walkopts,
2983 2999 _('hg locate [OPTION]... [PATTERN]...')),
2984 3000 "^log|history":
2985 3001 (log,
2986 3002 [('f', 'follow', None,
2987 3003 _('follow changeset history, or file history across copies and renames')),
2988 3004 ('', 'follow-first', None,
2989 3005 _('only follow the first parent of merge changesets')),
2990 3006 ('d', 'date', '', _('show revs matching date spec')),
2991 3007 ('C', 'copies', None, _('show copied files')),
2992 3008 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2993 3009 ('l', 'limit', '', _('limit number of changes displayed')),
2994 3010 ('r', 'rev', [], _('show the specified revision or range')),
2995 3011 ('', 'removed', None, _('include revs where files were removed')),
2996 3012 ('M', 'no-merges', None, _('do not show merges')),
2997 3013 ('', 'style', '', _('display using template map file')),
2998 3014 ('m', 'only-merges', None, _('show only merges')),
2999 3015 ('p', 'patch', None, _('show patch')),
3000 3016 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 3017 ('', 'template', '', _('display with template')),
3002 3018 ] + walkopts,
3003 3019 _('hg log [OPTION]... [FILE]')),
3004 3020 "manifest": (manifest, [], _('hg manifest [REV]')),
3005 3021 "^merge":
3006 3022 (merge,
3007 3023 [('f', 'force', None, _('force a merge with outstanding changes')),
3008 3024 ('r', 'rev', '', _('revision to merge')),
3009 3025 ],
3010 3026 _('hg merge [-f] [[-r] REV]')),
3011 3027 "outgoing|out": (outgoing,
3012 3028 [('M', 'no-merges', None, _('do not show merges')),
3013 3029 ('f', 'force', None,
3014 3030 _('run even when remote repository is unrelated')),
3015 3031 ('p', 'patch', None, _('show patch')),
3016 3032 ('', 'style', '', _('display using template map file')),
3017 3033 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 3034 ('n', 'newest-first', None, _('show newest record first')),
3019 3035 ('', 'template', '', _('display with template')),
3020 3036 ] + remoteopts,
3021 3037 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3022 3038 "^parents":
3023 3039 (parents,
3024 3040 [('r', 'rev', '', _('show parents from the specified rev')),
3025 3041 ('', 'style', '', _('display using template map file')),
3026 3042 ('', 'template', '', _('display with template'))],
3027 3043 _('hg parents [-r REV] [FILE]')),
3028 3044 "paths": (paths, [], _('hg paths [NAME]')),
3029 3045 "^pull":
3030 3046 (pull,
3031 3047 [('u', 'update', None,
3032 3048 _('update to new tip if changesets were pulled')),
3033 3049 ('f', 'force', None,
3034 3050 _('run even when remote repository is unrelated')),
3035 3051 ('r', 'rev', [],
3036 3052 _('a specific revision up to which you would like to pull')),
3037 3053 ] + remoteopts,
3038 3054 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3039 3055 "^push":
3040 3056 (push,
3041 3057 [('f', 'force', None, _('force push')),
3042 3058 ('r', 'rev', [], _('a specific revision you would like to push')),
3043 3059 ] + remoteopts,
3044 3060 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3045 3061 "debugrawcommit|rawcommit":
3046 3062 (rawcommit,
3047 3063 [('p', 'parent', [], _('parent')),
3048 3064 ('d', 'date', '', _('date code')),
3049 3065 ('u', 'user', '', _('user')),
3050 3066 ('F', 'files', '', _('file list'))
3051 3067 ] + commitopts,
3052 3068 _('hg debugrawcommit [OPTION]... [FILE]...')),
3053 3069 "recover": (recover, [], _('hg recover')),
3054 3070 "^remove|rm":
3055 3071 (remove,
3056 3072 [('A', 'after', None, _('record remove that has already occurred')),
3057 3073 ('f', 'force', None, _('remove file even if modified')),
3058 3074 ] + walkopts,
3059 3075 _('hg remove [OPTION]... FILE...')),
3060 3076 "rename|mv":
3061 3077 (rename,
3062 3078 [('A', 'after', None, _('record a rename that has already occurred')),
3063 3079 ('f', 'force', None,
3064 3080 _('forcibly copy over an existing managed file')),
3065 3081 ] + walkopts + dryrunopts,
3066 3082 _('hg rename [OPTION]... SOURCE... DEST')),
3067 3083 "^revert":
3068 3084 (revert,
3069 3085 [('a', 'all', None, _('revert all changes when no arguments given')),
3070 3086 ('d', 'date', '', _('tipmost revision matching date')),
3071 3087 ('r', 'rev', '', _('revision to revert to')),
3072 3088 ('', 'no-backup', None, _('do not save backup copies of files')),
3073 3089 ] + walkopts + dryrunopts,
3074 3090 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3075 3091 "rollback": (rollback, [], _('hg rollback')),
3076 3092 "root": (root, [], _('hg root')),
3077 3093 "showconfig|debugconfig":
3078 3094 (showconfig,
3079 3095 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3080 3096 _('showconfig [-u] [NAME]...')),
3081 3097 "^serve":
3082 3098 (serve,
3083 3099 [('A', 'accesslog', '', _('name of access log file to write to')),
3084 3100 ('d', 'daemon', None, _('run server in background')),
3085 3101 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3086 3102 ('E', 'errorlog', '', _('name of error log file to write to')),
3087 3103 ('p', 'port', 0, _('port to use (default: 8000)')),
3088 3104 ('a', 'address', '', _('address to use')),
3089 3105 ('n', 'name', '',
3090 3106 _('name to show in web pages (default: working dir)')),
3091 3107 ('', 'webdir-conf', '', _('name of the webdir config file'
3092 3108 ' (serve more than one repo)')),
3093 3109 ('', 'pid-file', '', _('name of file to write process ID to')),
3094 3110 ('', 'stdio', None, _('for remote clients')),
3095 3111 ('t', 'templates', '', _('web templates to use')),
3096 3112 ('', 'style', '', _('template style to use')),
3097 3113 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3098 3114 ('', 'certificate', '', _('SSL certificate file'))],
3099 3115 _('hg serve [OPTION]...')),
3100 3116 "^status|st":
3101 3117 (status,
3102 3118 [('A', 'all', None, _('show status of all files')),
3103 3119 ('m', 'modified', None, _('show only modified files')),
3104 3120 ('a', 'added', None, _('show only added files')),
3105 3121 ('r', 'removed', None, _('show only removed files')),
3106 3122 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 3123 ('c', 'clean', None, _('show only files without changes')),
3108 3124 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3109 3125 ('i', 'ignored', None, _('show only ignored files')),
3110 3126 ('n', 'no-status', None, _('hide status prefix')),
3111 3127 ('C', 'copies', None, _('show source of copied files')),
3112 3128 ('0', 'print0', None,
3113 3129 _('end filenames with NUL, for use with xargs')),
3114 3130 ('', 'rev', [], _('show difference from revision')),
3115 3131 ] + walkopts,
3116 3132 _('hg status [OPTION]... [FILE]...')),
3117 3133 "tag":
3118 3134 (tag,
3119 3135 [('f', 'force', None, _('replace existing tag')),
3120 3136 ('l', 'local', None, _('make the tag local')),
3121 3137 ('m', 'message', '', _('message for tag commit log entry')),
3122 3138 ('d', 'date', '', _('record datecode as commit date')),
3123 3139 ('u', 'user', '', _('record user as commiter')),
3124 3140 ('r', 'rev', '', _('revision to tag')),
3125 3141 ('', 'remove', None, _('remove a tag'))],
3126 3142 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3127 3143 "tags": (tags, [], _('hg tags')),
3128 3144 "tip":
3129 3145 (tip,
3130 3146 [('', 'style', '', _('display using template map file')),
3131 3147 ('p', 'patch', None, _('show patch')),
3132 3148 ('', 'template', '', _('display with template'))],
3133 3149 _('hg tip [-p]')),
3134 3150 "unbundle":
3135 3151 (unbundle,
3136 3152 [('u', 'update', None,
3137 3153 _('update to new tip if changesets were unbundled'))],
3138 3154 _('hg unbundle [-u] FILE...')),
3139 3155 "^update|up|checkout|co":
3140 3156 (update,
3141 3157 [('C', 'clean', None, _('overwrite locally modified files')),
3142 3158 ('d', 'date', '', _('tipmost revision matching date')),
3143 3159 ('r', 'rev', '', _('revision'))],
3144 3160 _('hg update [-C] [-d DATE] [[-r] REV]')),
3145 3161 "verify": (verify, [], _('hg verify')),
3146 3162 "version": (version_, [], _('hg version')),
3147 3163 }
3148 3164
3149 3165 extensions.commandtable = table
3150 3166
3151 3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3152 3168 " debugindex debugindexdot debugdate debuginstall")
3153 3169 optionalrepo = ("paths serve showconfig")
3154 3170
3155 3171 def dispatch(args, argv0=None):
3156 3172 try:
3157 3173 u = ui.ui(traceback='--traceback' in args)
3158 3174 except util.Abort, inst:
3159 3175 sys.stderr.write(_("abort: %s\n") % inst)
3160 3176 return -1
3161 3177 return cmdutil.runcatch(u, args, argv0=argv0)
3162 3178
3163 3179 def run():
3164 3180 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,285 +1,281 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from node import *
10 10 from repo import *
11 11 from i18n import _
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import errno, lock, os, shutil, util, cmdutil, extensions
14 14 import merge as _merge
15 15 import verify as _verify
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 schemes = {
22 22 'bundle': bundlerepo,
23 23 'file': _local,
24 24 'http': httprepo,
25 25 'https': httprepo,
26 26 'ssh': sshrepo,
27 27 'static-http': statichttprepo,
28 28 }
29 29
30 30 def _lookup(path):
31 31 scheme = 'file'
32 32 if path:
33 33 c = path.find(':')
34 34 if c > 0:
35 35 scheme = path[:c]
36 36 thing = schemes.get(scheme) or schemes['file']
37 37 try:
38 38 return thing(path)
39 39 except TypeError:
40 40 return thing
41 41
42 42 def islocal(repo):
43 43 '''return true if repo or path is local'''
44 44 if isinstance(repo, str):
45 45 try:
46 46 return _lookup(repo).islocal(repo)
47 47 except AttributeError:
48 48 return False
49 49 return repo.local()
50 50
51 51 def repository(ui, path='', create=False):
52 52 """return a repository object for the specified path"""
53 53 repo = _lookup(path).instance(ui, path, create)
54 54 ui = getattr(repo, "ui", ui)
55 55 for hook in extensions.setuphooks:
56 56 hook(ui, repo)
57 57 return repo
58 58
59 59 def defaultdest(source):
60 60 '''return default destination of clone if none is given'''
61 61 return os.path.basename(os.path.normpath(source))
62 62
63 63 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
64 64 stream=False):
65 65 """Make a copy of an existing repository.
66 66
67 67 Create a copy of an existing repository in a new directory. The
68 68 source and destination are URLs, as passed to the repository
69 69 function. Returns a pair of repository objects, the source and
70 70 newly created destination.
71 71
72 72 The location of the source is added to the new repository's
73 73 .hg/hgrc file, as the default to be used for future pulls and
74 74 pushes.
75 75
76 76 If an exception is raised, the partly cloned/updated destination
77 77 repository will be deleted.
78 78
79 79 Arguments:
80 80
81 81 source: repository object or URL
82 82
83 83 dest: URL of destination repository to create (defaults to base
84 84 name of source repository)
85 85
86 86 pull: always pull from source repository, even in local case
87 87
88 88 stream: stream raw data uncompressed from repository (fast over
89 89 LAN, slow over WAN)
90 90
91 91 rev: revision to clone up to (implies pull=True)
92 92
93 93 update: update working directory after clone completes, if
94 94 destination is local repository
95 95 """
96 96
97 97 origsource = source
98 98 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
99 99
100 100 if isinstance(source, str):
101 101 src_repo = repository(ui, source)
102 102 else:
103 103 src_repo = source
104 104 source = src_repo.url()
105 105
106 106 if dest is None:
107 107 dest = defaultdest(source)
108 108 ui.status(_("destination directory: %s\n") % dest)
109 109
110 110 def localpath(path):
111 111 if path.startswith('file://'):
112 112 return path[7:]
113 113 if path.startswith('file:'):
114 114 return path[5:]
115 115 return path
116 116
117 117 dest = localpath(dest)
118 118 source = localpath(source)
119 119
120 120 if os.path.exists(dest):
121 121 raise util.Abort(_("destination '%s' already exists") % dest)
122 122
123 123 class DirCleanup(object):
124 124 def __init__(self, dir_):
125 125 self.rmtree = shutil.rmtree
126 126 self.dir_ = dir_
127 127 def close(self):
128 128 self.dir_ = None
129 129 def __del__(self):
130 130 if self.dir_:
131 131 self.rmtree(self.dir_, True)
132 132
133 dir_cleanup = None
133 src_lock = dest_lock = dir_cleanup = None
134 try:
134 135 if islocal(dest):
135 136 dir_cleanup = DirCleanup(dest)
136 137
137 138 abspath = origsource
138 139 copy = False
139 140 if src_repo.local() and islocal(dest):
140 141 abspath = os.path.abspath(origsource)
141 142 copy = not pull and not rev
142 143
143 src_lock, dest_lock = None, None
144 144 if copy:
145 145 try:
146 146 # we use a lock here because if we race with commit, we
147 147 # can end up with extra data in the cloned revlogs that's
148 148 # not pointed to by changesets, thus causing verify to
149 149 # fail
150 150 src_lock = src_repo.lock()
151 151 except lock.LockException:
152 152 copy = False
153 153
154 154 if copy:
155 155 def force_copy(src, dst):
156 156 try:
157 157 util.copyfiles(src, dst)
158 158 except OSError, inst:
159 159 if inst.errno != errno.ENOENT:
160 160 raise
161 161
162 162 src_store = os.path.realpath(src_repo.spath)
163 163 if not os.path.exists(dest):
164 164 os.mkdir(dest)
165 165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 166 os.mkdir(dest_path)
167 167 if src_repo.spath != src_repo.path:
168 168 dest_store = os.path.join(dest_path, "store")
169 169 os.mkdir(dest_store)
170 170 else:
171 171 dest_store = dest_path
172 172 # copy the requires file
173 173 force_copy(src_repo.join("requires"),
174 174 os.path.join(dest_path, "requires"))
175 175 # we lock here to avoid premature writing to the target
176 176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177 177
178 178 files = ("data",
179 179 "00manifest.d", "00manifest.i",
180 180 "00changelog.d", "00changelog.i")
181 181 for f in files:
182 182 src = os.path.join(src_store, f)
183 183 dst = os.path.join(dest_store, f)
184 184 force_copy(src, dst)
185 185
186 186 # we need to re-init the repo after manually copying the data
187 187 # into it
188 188 dest_repo = repository(ui, dest)
189 189
190 190 else:
191 191 dest_repo = repository(ui, dest, create=True)
192 192
193 193 revs = None
194 194 if rev:
195 195 if 'lookup' not in src_repo.capabilities:
196 196 raise util.Abort(_("src repository does not support revision "
197 197 "lookup and so doesn't support clone by "
198 198 "revision"))
199 199 revs = [src_repo.lookup(r) for r in rev]
200 200
201 201 if dest_repo.local():
202 202 dest_repo.clone(src_repo, heads=revs, stream=stream)
203 203 elif src_repo.local():
204 204 src_repo.push(dest_repo, revs=revs)
205 205 else:
206 206 raise util.Abort(_("clone from remote to remote not supported"))
207 207
208 if src_lock:
209 src_lock.release()
210
211 208 if dest_repo.local():
212 209 fp = dest_repo.opener("hgrc", "w", text=True)
213 210 fp.write("[paths]\n")
214 211 fp.write("default = %s\n" % abspath)
215 212 fp.close()
216 213
217 if dest_lock:
218 dest_lock.release()
219
220 214 if update:
221 215 try:
222 216 checkout = dest_repo.lookup("default")
223 217 except:
224 218 checkout = dest_repo.changelog.tip()
225 219 _update(dest_repo, checkout)
226 220 if dir_cleanup:
227 221 dir_cleanup.close()
228 222
229 223 return src_repo, dest_repo
224 finally:
225 del src_lock, dest_lock, dir_cleanup
230 226
231 227 def _showstats(repo, stats):
232 228 stats = ((stats[0], _("updated")),
233 229 (stats[1], _("merged")),
234 230 (stats[2], _("removed")),
235 231 (stats[3], _("unresolved")))
236 232 note = ", ".join([_("%d files %s") % s for s in stats])
237 233 repo.ui.status("%s\n" % note)
238 234
239 235 def _update(repo, node): return update(repo, node)
240 236
241 237 def update(repo, node):
242 238 """update the working directory to node, merging linear changes"""
243 239 pl = repo.parents()
244 240 stats = _merge.update(repo, node, False, False, None, None)
245 241 _showstats(repo, stats)
246 242 if stats[3]:
247 243 repo.ui.status(_("There are unresolved merges with"
248 244 " locally modified files.\n"))
249 245 if stats[1]:
250 246 repo.ui.status(_("You can finish the partial merge using:\n"))
251 247 else:
252 248 repo.ui.status(_("You can redo the full merge using:\n"))
253 249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
254 250 repo.ui.status(_(" hg update %s\n hg update %s\n")
255 251 % (pl[0].rev(), repo.changectx(node).rev()))
256 252 return stats[3]
257 253
258 254 def clean(repo, node, wlock=None, show_stats=True):
259 255 """forcibly switch the working directory to node, clobbering changes"""
260 256 stats = _merge.update(repo, node, False, True, None, wlock)
261 257 if show_stats: _showstats(repo, stats)
262 258 return stats[3]
263 259
264 260 def merge(repo, node, force=None, remind=True, wlock=None):
265 261 """branch merge with node, resolving changes"""
266 262 stats = _merge.update(repo, node, True, force, False, wlock)
267 263 _showstats(repo, stats)
268 264 if stats[3]:
269 265 pl = repo.parents()
270 266 repo.ui.status(_("There are unresolved merges,"
271 267 " you can redo the full merge using:\n"
272 268 " hg update -C %s\n"
273 269 " hg merge %s\n")
274 270 % (pl[0].rev(), pl[1].rev()))
275 271 elif remind:
276 272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
277 273 return stats[3]
278 274
279 275 def revert(repo, node, choose, wlock):
280 276 """revert changes to revision in node without updating dirstate"""
281 277 return _merge.update(repo, node, False, True, choose, wlock)[3]
282 278
283 279 def verify(repo):
284 280 """verify the consistency of a repository"""
285 281 return _verify.verify(repo)
@@ -1,1188 +1,1188 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 10 import tempfile, urllib, bz2
11 11 from mercurial.node import *
12 12 from mercurial.i18n import gettext as _
13 13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 14 from mercurial import revlog, templater
15 15 from common import get_mtime, staticfile, style_map, paritygen
16 16
17 17 def _up(p):
18 18 if p[0] != "/":
19 19 p = "/" + p
20 20 if p[-1] == "/":
21 21 p = p[:-1]
22 22 up = os.path.dirname(p)
23 23 if up == "/":
24 24 return "/"
25 25 return up + "/"
26 26
27 27 def revnavgen(pos, pagelen, limit, nodefunc):
28 28 def seq(factor, limit=None):
29 29 if limit:
30 30 yield limit
31 31 if limit >= 20 and limit <= 40:
32 32 yield 50
33 33 else:
34 34 yield 1 * factor
35 35 yield 3 * factor
36 36 for f in seq(factor * 10):
37 37 yield f
38 38
39 39 def nav(**map):
40 40 l = []
41 41 last = 0
42 42 for f in seq(1, pagelen):
43 43 if f < pagelen or f <= last:
44 44 continue
45 45 if f > limit:
46 46 break
47 47 last = f
48 48 if pos + f < limit:
49 49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 50 if pos - f >= 0:
51 51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52 52
53 53 try:
54 54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55 55
56 56 for label, node in l:
57 57 yield {"label": label, "node": node}
58 58
59 59 yield {"label": "tip", "node": "tip"}
60 60 except hg.RepoError:
61 61 pass
62 62
63 63 return nav
64 64
65 65 class hgweb(object):
66 66 def __init__(self, repo, name=None):
67 67 if isinstance(repo, str):
68 68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 69 else:
70 70 self.repo = repo
71 71
72 72 self.mtime = -1
73 73 self.reponame = name
74 74 self.archives = 'zip', 'gz', 'bz2'
75 75 self.stripecount = 1
76 76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 77 # readable by the user running the CGI script
78 78 self.templatepath = self.config("web", "templates",
79 79 templater.templatepath(),
80 80 untrusted=False)
81 81
82 82 # The CGI scripts are often run by a user different from the repo owner.
83 83 # Trust the settings from the .hg/hgrc files by default.
84 84 def config(self, section, name, default=None, untrusted=True):
85 85 return self.repo.ui.config(section, name, default,
86 86 untrusted=untrusted)
87 87
88 88 def configbool(self, section, name, default=False, untrusted=True):
89 89 return self.repo.ui.configbool(section, name, default,
90 90 untrusted=untrusted)
91 91
92 92 def configlist(self, section, name, default=None, untrusted=True):
93 93 return self.repo.ui.configlist(section, name, default,
94 94 untrusted=untrusted)
95 95
96 96 def refresh(self):
97 97 mtime = get_mtime(self.repo.root)
98 98 if mtime != self.mtime:
99 99 self.mtime = mtime
100 100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 102 self.stripecount = int(self.config("web", "stripes", 1))
103 103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 105 self.allowpull = self.configbool("web", "allowpull", True)
106 106 self.encoding = self.config("web", "encoding", util._encoding)
107 107
108 108 def archivelist(self, nodeid):
109 109 allowed = self.configlist("web", "allow_archive")
110 110 for i, spec in self.archive_specs.iteritems():
111 111 if i in allowed or self.configbool("web", "allow" + i):
112 112 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
113 113
114 114 def listfilediffs(self, files, changeset):
115 115 for f in files[:self.maxfiles]:
116 116 yield self.t("filedifflink", node=hex(changeset), file=f)
117 117 if len(files) > self.maxfiles:
118 118 yield self.t("fileellipses")
119 119
120 120 def siblings(self, siblings=[], hiderev=None, **args):
121 121 siblings = [s for s in siblings if s.node() != nullid]
122 122 if len(siblings) == 1 and siblings[0].rev() == hiderev:
123 123 return
124 124 for s in siblings:
125 125 d = {'node': hex(s.node()), 'rev': s.rev()}
126 126 if hasattr(s, 'path'):
127 127 d['file'] = s.path()
128 128 d.update(args)
129 129 yield d
130 130
131 131 def renamelink(self, fl, node):
132 132 r = fl.renamed(node)
133 133 if r:
134 134 return [dict(file=r[0], node=hex(r[1]))]
135 135 return []
136 136
137 137 def nodetagsdict(self, node):
138 138 return [{"name": i} for i in self.repo.nodetags(node)]
139 139
140 140 def nodebranchdict(self, ctx):
141 141 branches = []
142 142 branch = ctx.branch()
143 143 if self.repo.branchtags()[branch] == ctx.node():
144 144 branches.append({"name": branch})
145 145 return branches
146 146
147 147 def showtag(self, t1, node=nullid, **args):
148 148 for t in self.repo.nodetags(node):
149 149 yield self.t(t1, tag=t, **args)
150 150
151 151 def diff(self, node1, node2, files):
152 152 def filterfiles(filters, files):
153 153 l = [x for x in files if x in filters]
154 154
155 155 for t in filters:
156 156 if t and t[-1] != os.sep:
157 157 t += os.sep
158 158 l += [x for x in files if x.startswith(t)]
159 159 return l
160 160
161 161 parity = paritygen(self.stripecount)
162 162 def diffblock(diff, f, fn):
163 163 yield self.t("diffblock",
164 164 lines=prettyprintlines(diff),
165 165 parity=parity.next(),
166 166 file=f,
167 167 filenode=hex(fn or nullid))
168 168
169 169 def prettyprintlines(diff):
170 170 for l in diff.splitlines(1):
171 171 if l.startswith('+'):
172 172 yield self.t("difflineplus", line=l)
173 173 elif l.startswith('-'):
174 174 yield self.t("difflineminus", line=l)
175 175 elif l.startswith('@'):
176 176 yield self.t("difflineat", line=l)
177 177 else:
178 178 yield self.t("diffline", line=l)
179 179
180 180 r = self.repo
181 181 c1 = r.changectx(node1)
182 182 c2 = r.changectx(node2)
183 183 date1 = util.datestr(c1.date())
184 184 date2 = util.datestr(c2.date())
185 185
186 186 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
187 187 if files:
188 188 modified, added, removed = map(lambda x: filterfiles(files, x),
189 189 (modified, added, removed))
190 190
191 191 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
192 192 for f in modified:
193 193 to = c1.filectx(f).data()
194 194 tn = c2.filectx(f).data()
195 195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 196 opts=diffopts), f, tn)
197 197 for f in added:
198 198 to = None
199 199 tn = c2.filectx(f).data()
200 200 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
201 201 opts=diffopts), f, tn)
202 202 for f in removed:
203 203 to = c1.filectx(f).data()
204 204 tn = None
205 205 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
206 206 opts=diffopts), f, tn)
207 207
208 208 def changelog(self, ctx, shortlog=False):
209 209 def changelist(**map):
210 210 cl = self.repo.changelog
211 211 l = [] # build a list in forward order for efficiency
212 212 for i in xrange(start, end):
213 213 ctx = self.repo.changectx(i)
214 214 n = ctx.node()
215 215
216 216 l.insert(0, {"parity": parity.next(),
217 217 "author": ctx.user(),
218 218 "parent": self.siblings(ctx.parents(), i - 1),
219 219 "child": self.siblings(ctx.children(), i + 1),
220 220 "changelogtag": self.showtag("changelogtag",n),
221 221 "desc": ctx.description(),
222 222 "date": ctx.date(),
223 223 "files": self.listfilediffs(ctx.files(), n),
224 224 "rev": i,
225 225 "node": hex(n),
226 226 "tags": self.nodetagsdict(n),
227 227 "branches": self.nodebranchdict(ctx)})
228 228
229 229 for e in l:
230 230 yield e
231 231
232 232 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
233 233 cl = self.repo.changelog
234 234 count = cl.count()
235 235 pos = ctx.rev()
236 236 start = max(0, pos - maxchanges + 1)
237 237 end = min(count, start + maxchanges)
238 238 pos = end - 1
239 239 parity = paritygen(self.stripecount, offset=start-end)
240 240
241 241 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
242 242
243 243 yield self.t(shortlog and 'shortlog' or 'changelog',
244 244 changenav=changenav,
245 245 node=hex(cl.tip()),
246 246 rev=pos, changesets=count, entries=changelist,
247 247 archives=self.archivelist("tip"))
248 248
249 249 def search(self, query):
250 250
251 251 def changelist(**map):
252 252 cl = self.repo.changelog
253 253 count = 0
254 254 qw = query.lower().split()
255 255
256 256 def revgen():
257 257 for i in xrange(cl.count() - 1, 0, -100):
258 258 l = []
259 259 for j in xrange(max(0, i - 100), i):
260 260 ctx = self.repo.changectx(j)
261 261 l.append(ctx)
262 262 l.reverse()
263 263 for e in l:
264 264 yield e
265 265
266 266 for ctx in revgen():
267 267 miss = 0
268 268 for q in qw:
269 269 if not (q in ctx.user().lower() or
270 270 q in ctx.description().lower() or
271 271 q in " ".join(ctx.files()).lower()):
272 272 miss = 1
273 273 break
274 274 if miss:
275 275 continue
276 276
277 277 count += 1
278 278 n = ctx.node()
279 279
280 280 yield self.t('searchentry',
281 281 parity=parity.next(),
282 282 author=ctx.user(),
283 283 parent=self.siblings(ctx.parents()),
284 284 child=self.siblings(ctx.children()),
285 285 changelogtag=self.showtag("changelogtag",n),
286 286 desc=ctx.description(),
287 287 date=ctx.date(),
288 288 files=self.listfilediffs(ctx.files(), n),
289 289 rev=ctx.rev(),
290 290 node=hex(n),
291 291 tags=self.nodetagsdict(n),
292 292 branches=self.nodebranchdict(ctx))
293 293
294 294 if count >= self.maxchanges:
295 295 break
296 296
297 297 cl = self.repo.changelog
298 298 parity = paritygen(self.stripecount)
299 299
300 300 yield self.t('search',
301 301 query=query,
302 302 node=hex(cl.tip()),
303 303 entries=changelist,
304 304 archives=self.archivelist("tip"))
305 305
306 306 def changeset(self, ctx):
307 307 n = ctx.node()
308 308 parents = ctx.parents()
309 309 p1 = parents[0].node()
310 310
311 311 files = []
312 312 parity = paritygen(self.stripecount)
313 313 for f in ctx.files():
314 314 files.append(self.t("filenodelink",
315 315 node=hex(n), file=f,
316 316 parity=parity.next()))
317 317
318 318 def diff(**map):
319 319 yield self.diff(p1, n, None)
320 320
321 321 yield self.t('changeset',
322 322 diff=diff,
323 323 rev=ctx.rev(),
324 324 node=hex(n),
325 325 parent=self.siblings(parents),
326 326 child=self.siblings(ctx.children()),
327 327 changesettag=self.showtag("changesettag",n),
328 328 author=ctx.user(),
329 329 desc=ctx.description(),
330 330 date=ctx.date(),
331 331 files=files,
332 332 archives=self.archivelist(hex(n)),
333 333 tags=self.nodetagsdict(n),
334 334 branches=self.nodebranchdict(ctx))
335 335
336 336 def filelog(self, fctx):
337 337 f = fctx.path()
338 338 fl = fctx.filelog()
339 339 count = fl.count()
340 340 pagelen = self.maxshortchanges
341 341 pos = fctx.filerev()
342 342 start = max(0, pos - pagelen + 1)
343 343 end = min(count, start + pagelen)
344 344 pos = end - 1
345 345 parity = paritygen(self.stripecount, offset=start-end)
346 346
347 347 def entries(**map):
348 348 l = []
349 349
350 350 for i in xrange(start, end):
351 351 ctx = fctx.filectx(i)
352 352 n = fl.node(i)
353 353
354 354 l.insert(0, {"parity": parity.next(),
355 355 "filerev": i,
356 356 "file": f,
357 357 "node": hex(ctx.node()),
358 358 "author": ctx.user(),
359 359 "date": ctx.date(),
360 360 "rename": self.renamelink(fl, n),
361 361 "parent": self.siblings(fctx.parents()),
362 362 "child": self.siblings(fctx.children()),
363 363 "desc": ctx.description()})
364 364
365 365 for e in l:
366 366 yield e
367 367
368 368 nodefunc = lambda x: fctx.filectx(fileid=x)
369 369 nav = revnavgen(pos, pagelen, count, nodefunc)
370 370 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
371 371 entries=entries)
372 372
373 373 def filerevision(self, fctx):
374 374 f = fctx.path()
375 375 text = fctx.data()
376 376 fl = fctx.filelog()
377 377 n = fctx.filenode()
378 378 parity = paritygen(self.stripecount)
379 379
380 380 mt = mimetypes.guess_type(f)[0]
381 381 rawtext = text
382 382 if util.binary(text):
383 383 mt = mt or 'application/octet-stream'
384 384 text = "(binary:%s)" % mt
385 385 mt = mt or 'text/plain'
386 386
387 387 def lines():
388 388 for l, t in enumerate(text.splitlines(1)):
389 389 yield {"line": t,
390 390 "linenumber": "% 6d" % (l + 1),
391 391 "parity": parity.next()}
392 392
393 393 yield self.t("filerevision",
394 394 file=f,
395 395 path=_up(f),
396 396 text=lines(),
397 397 raw=rawtext,
398 398 mimetype=mt,
399 399 rev=fctx.rev(),
400 400 node=hex(fctx.node()),
401 401 author=fctx.user(),
402 402 date=fctx.date(),
403 403 desc=fctx.description(),
404 404 parent=self.siblings(fctx.parents()),
405 405 child=self.siblings(fctx.children()),
406 406 rename=self.renamelink(fl, n),
407 407 permissions=fctx.manifest().flags(f))
408 408
409 409 def fileannotate(self, fctx):
410 410 f = fctx.path()
411 411 n = fctx.filenode()
412 412 fl = fctx.filelog()
413 413 parity = paritygen(self.stripecount)
414 414
415 415 def annotate(**map):
416 416 last = None
417 417 for f, l in fctx.annotate(follow=True):
418 418 fnode = f.filenode()
419 419 name = self.repo.ui.shortuser(f.user())
420 420
421 421 if last != fnode:
422 422 last = fnode
423 423
424 424 yield {"parity": parity.next(),
425 425 "node": hex(f.node()),
426 426 "rev": f.rev(),
427 427 "author": name,
428 428 "file": f.path(),
429 429 "line": l}
430 430
431 431 yield self.t("fileannotate",
432 432 file=f,
433 433 annotate=annotate,
434 434 path=_up(f),
435 435 rev=fctx.rev(),
436 436 node=hex(fctx.node()),
437 437 author=fctx.user(),
438 438 date=fctx.date(),
439 439 desc=fctx.description(),
440 440 rename=self.renamelink(fl, n),
441 441 parent=self.siblings(fctx.parents()),
442 442 child=self.siblings(fctx.children()),
443 443 permissions=fctx.manifest().flags(f))
444 444
445 445 def manifest(self, ctx, path):
446 446 mf = ctx.manifest()
447 447 node = ctx.node()
448 448
449 449 files = {}
450 450 parity = paritygen(self.stripecount)
451 451
452 452 if path and path[-1] != "/":
453 453 path += "/"
454 454 l = len(path)
455 455 abspath = "/" + path
456 456
457 457 for f, n in mf.items():
458 458 if f[:l] != path:
459 459 continue
460 460 remain = f[l:]
461 461 if "/" in remain:
462 462 short = remain[:remain.index("/") + 1] # bleah
463 463 files[short] = (f, None)
464 464 else:
465 465 short = os.path.basename(remain)
466 466 files[short] = (f, n)
467 467
468 468 def filelist(**map):
469 469 fl = files.keys()
470 470 fl.sort()
471 471 for f in fl:
472 472 full, fnode = files[f]
473 473 if not fnode:
474 474 continue
475 475
476 476 yield {"file": full,
477 477 "parity": parity.next(),
478 478 "basename": f,
479 479 "size": ctx.filectx(full).size(),
480 480 "permissions": mf.flags(full)}
481 481
482 482 def dirlist(**map):
483 483 fl = files.keys()
484 484 fl.sort()
485 485 for f in fl:
486 486 full, fnode = files[f]
487 487 if fnode:
488 488 continue
489 489
490 490 yield {"parity": parity.next(),
491 491 "path": os.path.join(abspath, f),
492 492 "basename": f[:-1]}
493 493
494 494 yield self.t("manifest",
495 495 rev=ctx.rev(),
496 496 node=hex(node),
497 497 path=abspath,
498 498 up=_up(abspath),
499 499 upparity=parity.next(),
500 500 fentries=filelist,
501 501 dentries=dirlist,
502 502 archives=self.archivelist(hex(node)),
503 503 tags=self.nodetagsdict(node),
504 504 branches=self.nodebranchdict(ctx))
505 505
506 506 def tags(self):
507 507 i = self.repo.tagslist()
508 508 i.reverse()
509 509 parity = paritygen(self.stripecount)
510 510
511 511 def entries(notip=False, **map):
512 512 for k, n in i:
513 513 if notip and k == "tip":
514 514 continue
515 515 yield {"parity": parity.next(),
516 516 "tag": k,
517 517 "date": self.repo.changectx(n).date(),
518 518 "node": hex(n)}
519 519
520 520 yield self.t("tags",
521 521 node=hex(self.repo.changelog.tip()),
522 522 entries=lambda **x: entries(False, **x),
523 523 entriesnotip=lambda **x: entries(True, **x))
524 524
525 525 def summary(self):
526 526 i = self.repo.tagslist()
527 527 i.reverse()
528 528
529 529 def tagentries(**map):
530 530 parity = paritygen(self.stripecount)
531 531 count = 0
532 532 for k, n in i:
533 533 if k == "tip": # skip tip
534 534 continue;
535 535
536 536 count += 1
537 537 if count > 10: # limit to 10 tags
538 538 break;
539 539
540 540 yield self.t("tagentry",
541 541 parity=parity.next(),
542 542 tag=k,
543 543 node=hex(n),
544 544 date=self.repo.changectx(n).date())
545 545
546 546
547 547 def branches(**map):
548 548 parity = paritygen(self.stripecount)
549 549
550 550 b = self.repo.branchtags()
551 551 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
552 552 l.sort()
553 553
554 554 for r,n,t in l:
555 555 ctx = self.repo.changectx(n)
556 556
557 557 yield {'parity': parity.next(),
558 558 'branch': t,
559 559 'node': hex(n),
560 560 'date': ctx.date()}
561 561
562 562 def changelist(**map):
563 563 parity = paritygen(self.stripecount, offset=start-end)
564 564 l = [] # build a list in forward order for efficiency
565 565 for i in xrange(start, end):
566 566 ctx = self.repo.changectx(i)
567 567 n = ctx.node()
568 568 hn = hex(n)
569 569
570 570 l.insert(0, self.t(
571 571 'shortlogentry',
572 572 parity=parity.next(),
573 573 author=ctx.user(),
574 574 desc=ctx.description(),
575 575 date=ctx.date(),
576 576 rev=i,
577 577 node=hn,
578 578 tags=self.nodetagsdict(n),
579 579 branches=self.nodebranchdict(ctx)))
580 580
581 581 yield l
582 582
583 583 cl = self.repo.changelog
584 584 count = cl.count()
585 585 start = max(0, count - self.maxchanges)
586 586 end = min(count, start + self.maxchanges)
587 587
588 588 yield self.t("summary",
589 589 desc=self.config("web", "description", "unknown"),
590 590 owner=(self.config("ui", "username") or # preferred
591 591 self.config("web", "contact") or # deprecated
592 592 self.config("web", "author", "unknown")), # also
593 593 lastchange=cl.read(cl.tip())[2],
594 594 tags=tagentries,
595 595 branches=branches,
596 596 shortlog=changelist,
597 597 node=hex(cl.tip()),
598 598 archives=self.archivelist("tip"))
599 599
600 600 def filediff(self, fctx):
601 601 n = fctx.node()
602 602 path = fctx.path()
603 603 parents = fctx.parents()
604 604 p1 = parents and parents[0].node() or nullid
605 605
606 606 def diff(**map):
607 607 yield self.diff(p1, n, [path])
608 608
609 609 yield self.t("filediff",
610 610 file=path,
611 611 node=hex(n),
612 612 rev=fctx.rev(),
613 613 parent=self.siblings(parents),
614 614 child=self.siblings(fctx.children()),
615 615 diff=diff)
616 616
617 617 archive_specs = {
618 618 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
619 619 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
620 620 'zip': ('application/zip', 'zip', '.zip', None),
621 621 }
622 622
623 623 def archive(self, req, key, type_):
624 624 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
625 625 cnode = self.repo.lookup(key)
626 626 arch_version = key
627 627 if cnode == key or key == 'tip':
628 628 arch_version = short(cnode)
629 629 name = "%s-%s" % (reponame, arch_version)
630 630 mimetype, artype, extension, encoding = self.archive_specs[type_]
631 631 headers = [('Content-type', mimetype),
632 632 ('Content-disposition', 'attachment; filename=%s%s' %
633 633 (name, extension))]
634 634 if encoding:
635 635 headers.append(('Content-encoding', encoding))
636 636 req.header(headers)
637 637 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
638 638
639 639 # add tags to things
640 640 # tags -> list of changesets corresponding to tags
641 641 # find tag, changeset, file
642 642
643 643 def cleanpath(self, path):
644 644 path = path.lstrip('/')
645 645 return util.canonpath(self.repo.root, '', path)
646 646
647 647 def run(self):
648 648 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
649 649 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
650 650 import mercurial.hgweb.wsgicgi as wsgicgi
651 651 from request import wsgiapplication
652 652 def make_web_app():
653 653 return self
654 654 wsgicgi.launch(wsgiapplication(make_web_app))
655 655
656 656 def run_wsgi(self, req):
657 657 def header(**map):
658 658 header_file = cStringIO.StringIO(
659 659 ''.join(self.t("header", encoding=self.encoding, **map)))
660 660 msg = mimetools.Message(header_file, 0)
661 661 req.header(msg.items())
662 662 yield header_file.read()
663 663
664 664 def rawfileheader(**map):
665 665 req.header([('Content-type', map['mimetype']),
666 666 ('Content-disposition', 'filename=%s' % map['file']),
667 667 ('Content-length', str(len(map['raw'])))])
668 668 yield ''
669 669
670 670 def footer(**map):
671 671 yield self.t("footer", **map)
672 672
673 673 def motd(**map):
674 674 yield self.config("web", "motd", "")
675 675
676 676 def expand_form(form):
677 677 shortcuts = {
678 678 'cl': [('cmd', ['changelog']), ('rev', None)],
679 679 'sl': [('cmd', ['shortlog']), ('rev', None)],
680 680 'cs': [('cmd', ['changeset']), ('node', None)],
681 681 'f': [('cmd', ['file']), ('filenode', None)],
682 682 'fl': [('cmd', ['filelog']), ('filenode', None)],
683 683 'fd': [('cmd', ['filediff']), ('node', None)],
684 684 'fa': [('cmd', ['annotate']), ('filenode', None)],
685 685 'mf': [('cmd', ['manifest']), ('manifest', None)],
686 686 'ca': [('cmd', ['archive']), ('node', None)],
687 687 'tags': [('cmd', ['tags'])],
688 688 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
689 689 'static': [('cmd', ['static']), ('file', None)]
690 690 }
691 691
692 692 for k in shortcuts.iterkeys():
693 693 if form.has_key(k):
694 694 for name, value in shortcuts[k]:
695 695 if value is None:
696 696 value = form[k]
697 697 form[name] = value
698 698 del form[k]
699 699
700 700 def rewrite_request(req):
701 701 '''translate new web interface to traditional format'''
702 702
703 703 def spliturl(req):
704 704 def firstitem(query):
705 705 return query.split('&', 1)[0].split(';', 1)[0]
706 706
707 707 def normurl(url):
708 708 inner = '/'.join([x for x in url.split('/') if x])
709 709 tl = len(url) > 1 and url.endswith('/') and '/' or ''
710 710
711 711 return '%s%s%s' % (url.startswith('/') and '/' or '',
712 712 inner, tl)
713 713
714 714 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
715 715 pi = normurl(req.env.get('PATH_INFO', ''))
716 716 if pi:
717 717 # strip leading /
718 718 pi = pi[1:]
719 719 if pi:
720 720 root = root[:root.rfind(pi)]
721 721 if req.env.has_key('REPO_NAME'):
722 722 rn = req.env['REPO_NAME'] + '/'
723 723 root += rn
724 724 query = pi[len(rn):]
725 725 else:
726 726 query = pi
727 727 else:
728 728 root += '?'
729 729 query = firstitem(req.env['QUERY_STRING'])
730 730
731 731 return (root, query)
732 732
733 733 req.url, query = spliturl(req)
734 734
735 735 if req.form.has_key('cmd'):
736 736 # old style
737 737 return
738 738
739 739 args = query.split('/', 2)
740 740 if not args or not args[0]:
741 741 return
742 742
743 743 cmd = args.pop(0)
744 744 style = cmd.rfind('-')
745 745 if style != -1:
746 746 req.form['style'] = [cmd[:style]]
747 747 cmd = cmd[style+1:]
748 748 # avoid accepting e.g. style parameter as command
749 749 if hasattr(self, 'do_' + cmd):
750 750 req.form['cmd'] = [cmd]
751 751
752 752 if args and args[0]:
753 753 node = args.pop(0)
754 754 req.form['node'] = [node]
755 755 if args:
756 756 req.form['file'] = args
757 757
758 758 if cmd == 'static':
759 759 req.form['file'] = req.form['node']
760 760 elif cmd == 'archive':
761 761 fn = req.form['node'][0]
762 762 for type_, spec in self.archive_specs.iteritems():
763 763 ext = spec[2]
764 764 if fn.endswith(ext):
765 765 req.form['node'] = [fn[:-len(ext)]]
766 766 req.form['type'] = [type_]
767 767
768 768 def sessionvars(**map):
769 769 fields = []
770 770 if req.form.has_key('style'):
771 771 style = req.form['style'][0]
772 772 if style != self.config('web', 'style', ''):
773 773 fields.append(('style', style))
774 774
775 775 separator = req.url[-1] == '?' and ';' or '?'
776 776 for name, value in fields:
777 777 yield dict(name=name, value=value, separator=separator)
778 778 separator = ';'
779 779
780 780 self.refresh()
781 781
782 782 expand_form(req.form)
783 783 rewrite_request(req)
784 784
785 785 style = self.config("web", "style", "")
786 786 if req.form.has_key('style'):
787 787 style = req.form['style'][0]
788 788 mapfile = style_map(self.templatepath, style)
789 789
790 790 proto = req.env.get('wsgi.url_scheme')
791 791 if proto == 'https':
792 792 proto = 'https'
793 793 default_port = "443"
794 794 else:
795 795 proto = 'http'
796 796 default_port = "80"
797 797
798 798 port = req.env["SERVER_PORT"]
799 799 port = port != default_port and (":" + port) or ""
800 800 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
801 801 staticurl = self.config("web", "staticurl") or req.url + 'static/'
802 802 if not staticurl.endswith('/'):
803 803 staticurl += '/'
804 804
805 805 if not self.reponame:
806 806 self.reponame = (self.config("web", "name")
807 807 or req.env.get('REPO_NAME')
808 808 or req.url.strip('/') or self.repo.root)
809 809
810 810 self.t = templater.templater(mapfile, templater.common_filters,
811 811 defaults={"url": req.url,
812 812 "staticurl": staticurl,
813 813 "urlbase": urlbase,
814 814 "repo": self.reponame,
815 815 "header": header,
816 816 "footer": footer,
817 817 "motd": motd,
818 818 "rawfileheader": rawfileheader,
819 819 "sessionvars": sessionvars
820 820 })
821 821
822 822 try:
823 823 if not req.form.has_key('cmd'):
824 824 req.form['cmd'] = [self.t.cache['default']]
825 825
826 826 cmd = req.form['cmd'][0]
827 827
828 828 method = getattr(self, 'do_' + cmd, None)
829 829 if method:
830 830 try:
831 831 method(req)
832 832 except (hg.RepoError, revlog.RevlogError), inst:
833 833 req.write(self.t("error", error=str(inst)))
834 834 else:
835 835 req.write(self.t("error", error='No such method: ' + cmd))
836 836 finally:
837 837 self.t = None
838 838
839 839 def changectx(self, req):
840 840 if req.form.has_key('node'):
841 841 changeid = req.form['node'][0]
842 842 elif req.form.has_key('manifest'):
843 843 changeid = req.form['manifest'][0]
844 844 else:
845 845 changeid = self.repo.changelog.count() - 1
846 846
847 847 try:
848 848 ctx = self.repo.changectx(changeid)
849 849 except hg.RepoError:
850 850 man = self.repo.manifest
851 851 mn = man.lookup(changeid)
852 852 ctx = self.repo.changectx(man.linkrev(mn))
853 853
854 854 return ctx
855 855
856 856 def filectx(self, req):
857 857 path = self.cleanpath(req.form['file'][0])
858 858 if req.form.has_key('node'):
859 859 changeid = req.form['node'][0]
860 860 else:
861 861 changeid = req.form['filenode'][0]
862 862 try:
863 863 ctx = self.repo.changectx(changeid)
864 864 fctx = ctx.filectx(path)
865 865 except hg.RepoError:
866 866 fctx = self.repo.filectx(path, fileid=changeid)
867 867
868 868 return fctx
869 869
870 870 def do_log(self, req):
871 871 if req.form.has_key('file') and req.form['file'][0]:
872 872 self.do_filelog(req)
873 873 else:
874 874 self.do_changelog(req)
875 875
876 876 def do_rev(self, req):
877 877 self.do_changeset(req)
878 878
879 879 def do_file(self, req):
880 880 path = self.cleanpath(req.form.get('file', [''])[0])
881 881 if path:
882 882 try:
883 883 req.write(self.filerevision(self.filectx(req)))
884 884 return
885 885 except revlog.LookupError:
886 886 pass
887 887
888 888 req.write(self.manifest(self.changectx(req), path))
889 889
890 890 def do_diff(self, req):
891 891 self.do_filediff(req)
892 892
893 893 def do_changelog(self, req, shortlog = False):
894 894 if req.form.has_key('node'):
895 895 ctx = self.changectx(req)
896 896 else:
897 897 if req.form.has_key('rev'):
898 898 hi = req.form['rev'][0]
899 899 else:
900 900 hi = self.repo.changelog.count() - 1
901 901 try:
902 902 ctx = self.repo.changectx(hi)
903 903 except hg.RepoError:
904 904 req.write(self.search(hi)) # XXX redirect to 404 page?
905 905 return
906 906
907 907 req.write(self.changelog(ctx, shortlog = shortlog))
908 908
909 909 def do_shortlog(self, req):
910 910 self.do_changelog(req, shortlog = True)
911 911
912 912 def do_changeset(self, req):
913 913 req.write(self.changeset(self.changectx(req)))
914 914
915 915 def do_manifest(self, req):
916 916 req.write(self.manifest(self.changectx(req),
917 917 self.cleanpath(req.form['path'][0])))
918 918
919 919 def do_tags(self, req):
920 920 req.write(self.tags())
921 921
922 922 def do_summary(self, req):
923 923 req.write(self.summary())
924 924
925 925 def do_filediff(self, req):
926 926 req.write(self.filediff(self.filectx(req)))
927 927
928 928 def do_annotate(self, req):
929 929 req.write(self.fileannotate(self.filectx(req)))
930 930
931 931 def do_filelog(self, req):
932 932 req.write(self.filelog(self.filectx(req)))
933 933
934 934 def do_lookup(self, req):
935 935 try:
936 936 r = hex(self.repo.lookup(req.form['key'][0]))
937 937 success = 1
938 938 except Exception,inst:
939 939 r = str(inst)
940 940 success = 0
941 941 resp = "%s %s\n" % (success, r)
942 942 req.httphdr("application/mercurial-0.1", length=len(resp))
943 943 req.write(resp)
944 944
945 945 def do_heads(self, req):
946 946 resp = " ".join(map(hex, self.repo.heads())) + "\n"
947 947 req.httphdr("application/mercurial-0.1", length=len(resp))
948 948 req.write(resp)
949 949
950 950 def do_branches(self, req):
951 951 nodes = []
952 952 if req.form.has_key('nodes'):
953 953 nodes = map(bin, req.form['nodes'][0].split(" "))
954 954 resp = cStringIO.StringIO()
955 955 for b in self.repo.branches(nodes):
956 956 resp.write(" ".join(map(hex, b)) + "\n")
957 957 resp = resp.getvalue()
958 958 req.httphdr("application/mercurial-0.1", length=len(resp))
959 959 req.write(resp)
960 960
961 961 def do_between(self, req):
962 962 if req.form.has_key('pairs'):
963 963 pairs = [map(bin, p.split("-"))
964 964 for p in req.form['pairs'][0].split(" ")]
965 965 resp = cStringIO.StringIO()
966 966 for b in self.repo.between(pairs):
967 967 resp.write(" ".join(map(hex, b)) + "\n")
968 968 resp = resp.getvalue()
969 969 req.httphdr("application/mercurial-0.1", length=len(resp))
970 970 req.write(resp)
971 971
972 972 def do_changegroup(self, req):
973 973 req.httphdr("application/mercurial-0.1")
974 974 nodes = []
975 975 if not self.allowpull:
976 976 return
977 977
978 978 if req.form.has_key('roots'):
979 979 nodes = map(bin, req.form['roots'][0].split(" "))
980 980
981 981 z = zlib.compressobj()
982 982 f = self.repo.changegroup(nodes, 'serve')
983 983 while 1:
984 984 chunk = f.read(4096)
985 985 if not chunk:
986 986 break
987 987 req.write(z.compress(chunk))
988 988
989 989 req.write(z.flush())
990 990
991 991 def do_changegroupsubset(self, req):
992 992 req.httphdr("application/mercurial-0.1")
993 993 bases = []
994 994 heads = []
995 995 if not self.allowpull:
996 996 return
997 997
998 998 if req.form.has_key('bases'):
999 999 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
1000 1000 if req.form.has_key('heads'):
1001 1001 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
1002 1002
1003 1003 z = zlib.compressobj()
1004 1004 f = self.repo.changegroupsubset(bases, heads, 'serve')
1005 1005 while 1:
1006 1006 chunk = f.read(4096)
1007 1007 if not chunk:
1008 1008 break
1009 1009 req.write(z.compress(chunk))
1010 1010
1011 1011 req.write(z.flush())
1012 1012
1013 1013 def do_archive(self, req):
1014 1014 type_ = req.form['type'][0]
1015 1015 allowed = self.configlist("web", "allow_archive")
1016 1016 if (type_ in self.archives and (type_ in allowed or
1017 1017 self.configbool("web", "allow" + type_, False))):
1018 1018 self.archive(req, req.form['node'][0], type_)
1019 1019 return
1020 1020
1021 1021 req.write(self.t("error"))
1022 1022
1023 1023 def do_static(self, req):
1024 1024 fname = req.form['file'][0]
1025 1025 # a repo owner may set web.static in .hg/hgrc to get any file
1026 1026 # readable by the user running the CGI script
1027 1027 static = self.config("web", "static",
1028 1028 os.path.join(self.templatepath, "static"),
1029 1029 untrusted=False)
1030 1030 req.write(staticfile(static, fname, req)
1031 1031 or self.t("error", error="%r not found" % fname))
1032 1032
1033 1033 def do_capabilities(self, req):
1034 1034 caps = ['lookup', 'changegroupsubset']
1035 1035 if self.configbool('server', 'uncompressed'):
1036 1036 caps.append('stream=%d' % self.repo.changelog.version)
1037 1037 # XXX: make configurable and/or share code with do_unbundle:
1038 1038 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1039 1039 if unbundleversions:
1040 1040 caps.append('unbundle=%s' % ','.join(unbundleversions))
1041 1041 resp = ' '.join(caps)
1042 1042 req.httphdr("application/mercurial-0.1", length=len(resp))
1043 1043 req.write(resp)
1044 1044
1045 1045 def check_perm(self, req, op, default):
1046 1046 '''check permission for operation based on user auth.
1047 1047 return true if op allowed, else false.
1048 1048 default is policy to use if no config given.'''
1049 1049
1050 1050 user = req.env.get('REMOTE_USER')
1051 1051
1052 1052 deny = self.configlist('web', 'deny_' + op)
1053 1053 if deny and (not user or deny == ['*'] or user in deny):
1054 1054 return False
1055 1055
1056 1056 allow = self.configlist('web', 'allow_' + op)
1057 1057 return (allow and (allow == ['*'] or user in allow)) or default
1058 1058
1059 1059 def do_unbundle(self, req):
1060 1060 def bail(response, headers={}):
1061 1061 length = int(req.env['CONTENT_LENGTH'])
1062 1062 for s in util.filechunkiter(req, limit=length):
1063 1063 # drain incoming bundle, else client will not see
1064 1064 # response when run outside cgi script
1065 1065 pass
1066 1066 req.httphdr("application/mercurial-0.1", headers=headers)
1067 1067 req.write('0\n')
1068 1068 req.write(response)
1069 1069
1070 1070 # require ssl by default, auth info cannot be sniffed and
1071 1071 # replayed
1072 1072 ssl_req = self.configbool('web', 'push_ssl', True)
1073 1073 if ssl_req:
1074 1074 if req.env.get('wsgi.url_scheme') != 'https':
1075 1075 bail(_('ssl required\n'))
1076 1076 return
1077 1077 proto = 'https'
1078 1078 else:
1079 1079 proto = 'http'
1080 1080
1081 1081 # do not allow push unless explicitly allowed
1082 1082 if not self.check_perm(req, 'push', False):
1083 1083 bail(_('push not authorized\n'),
1084 1084 headers={'status': '401 Unauthorized'})
1085 1085 return
1086 1086
1087 1087 their_heads = req.form['heads'][0].split(' ')
1088 1088
1089 1089 def check_heads():
1090 1090 heads = map(hex, self.repo.heads())
1091 1091 return their_heads == [hex('force')] or their_heads == heads
1092 1092
1093 1093 # fail early if possible
1094 1094 if not check_heads():
1095 1095 bail(_('unsynced changes\n'))
1096 1096 return
1097 1097
1098 1098 req.httphdr("application/mercurial-0.1")
1099 1099
1100 1100 # do not lock repo until all changegroup data is
1101 1101 # streamed. save to temporary file.
1102 1102
1103 1103 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1104 1104 fp = os.fdopen(fd, 'wb+')
1105 1105 try:
1106 1106 length = int(req.env['CONTENT_LENGTH'])
1107 1107 for s in util.filechunkiter(req, limit=length):
1108 1108 fp.write(s)
1109 1109
1110 1110 try:
1111 1111 lock = self.repo.lock()
1112 1112 try:
1113 1113 if not check_heads():
1114 1114 req.write('0\n')
1115 1115 req.write(_('unsynced changes\n'))
1116 1116 return
1117 1117
1118 1118 fp.seek(0)
1119 1119 header = fp.read(6)
1120 1120 if not header.startswith("HG"):
1121 1121 # old client with uncompressed bundle
1122 1122 def generator(f):
1123 1123 yield header
1124 1124 for chunk in f:
1125 1125 yield chunk
1126 1126 elif not header.startswith("HG10"):
1127 1127 req.write("0\n")
1128 1128 req.write(_("unknown bundle version\n"))
1129 1129 return
1130 1130 elif header == "HG10GZ":
1131 1131 def generator(f):
1132 1132 zd = zlib.decompressobj()
1133 1133 for chunk in f:
1134 1134 yield zd.decompress(chunk)
1135 1135 elif header == "HG10BZ":
1136 1136 def generator(f):
1137 1137 zd = bz2.BZ2Decompressor()
1138 1138 zd.decompress("BZ")
1139 1139 for chunk in f:
1140 1140 yield zd.decompress(chunk)
1141 1141 elif header == "HG10UN":
1142 1142 def generator(f):
1143 1143 for chunk in f:
1144 1144 yield chunk
1145 1145 else:
1146 1146 req.write("0\n")
1147 1147 req.write(_("unknown bundle compression type\n"))
1148 1148 return
1149 1149 gen = generator(util.filechunkiter(fp, 4096))
1150 1150
1151 1151 # send addchangegroup output to client
1152 1152
1153 1153 old_stdout = sys.stdout
1154 1154 sys.stdout = cStringIO.StringIO()
1155 1155
1156 1156 try:
1157 1157 url = 'remote:%s:%s' % (proto,
1158 1158 req.env.get('REMOTE_HOST', ''))
1159 1159 try:
1160 1160 ret = self.repo.addchangegroup(
1161 1161 util.chunkbuffer(gen), 'serve', url)
1162 1162 except util.Abort, inst:
1163 1163 sys.stdout.write("abort: %s\n" % inst)
1164 1164 ret = 0
1165 1165 finally:
1166 1166 val = sys.stdout.getvalue()
1167 1167 sys.stdout = old_stdout
1168 1168 req.write('%d\n' % ret)
1169 1169 req.write(val)
1170 1170 finally:
1171 lock.release()
1171 del lock
1172 1172 except (OSError, IOError), inst:
1173 1173 req.write('0\n')
1174 1174 filename = getattr(inst, 'filename', '')
1175 1175 # Don't send our filesystem layout to the client
1176 1176 if filename.startswith(self.repo.root):
1177 1177 filename = filename[len(self.repo.root)+1:]
1178 1178 else:
1179 1179 filename = ''
1180 1180 error = getattr(inst, 'strerror', 'Unknown error')
1181 1181 req.write('%s: %s\n' % (error, filename))
1182 1182 finally:
1183 1183 fp.close()
1184 1184 os.unlink(tempname)
1185 1185
1186 1186 def do_stream_out(self, req):
1187 1187 req.httphdr("application/mercurial-0.1")
1188 1188 streamclone.stream_out(self.repo, req, untrusted=True)
@@ -1,1950 +1,1976 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.path = path
24 24 self.root = os.path.realpath(path)
25 25 self.path = os.path.join(self.root, ".hg")
26 26 self.origroot = path
27 27 self.opener = util.opener(self.path)
28 28 self.wopener = util.opener(self.root)
29 29
30 30 if not os.path.isdir(self.path):
31 31 if create:
32 32 if not os.path.exists(path):
33 33 os.mkdir(path)
34 34 os.mkdir(self.path)
35 35 requirements = ["revlogv1"]
36 36 if parentui.configbool('format', 'usestore', True):
37 37 os.mkdir(os.path.join(self.path, "store"))
38 38 requirements.append("store")
39 39 # create an invalid changelog
40 40 self.opener("00changelog.i", "a").write(
41 41 '\0\0\0\2' # represents revlogv2
42 42 ' dummy changelog to prevent using the old repo layout'
43 43 )
44 44 reqfile = self.opener("requires", "w")
45 45 for r in requirements:
46 46 reqfile.write("%s\n" % r)
47 47 reqfile.close()
48 48 else:
49 49 raise repo.RepoError(_("repository %s not found") % path)
50 50 elif create:
51 51 raise repo.RepoError(_("repository %s already exists") % path)
52 52 else:
53 53 # find requirements
54 54 try:
55 55 requirements = self.opener("requires").read().splitlines()
56 56 except IOError, inst:
57 57 if inst.errno != errno.ENOENT:
58 58 raise
59 59 requirements = []
60 60 # check them
61 61 for r in requirements:
62 62 if r not in self.supported:
63 63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64 64
65 65 # setup store
66 66 if "store" in requirements:
67 67 self.encodefn = util.encodefilename
68 68 self.decodefn = util.decodefilename
69 69 self.spath = os.path.join(self.path, "store")
70 70 else:
71 71 self.encodefn = lambda x: x
72 72 self.decodefn = lambda x: x
73 73 self.spath = self.path
74 74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75 75
76 76 self.ui = ui.ui(parentui=parentui)
77 77 try:
78 78 self.ui.readconfig(self.join("hgrc"), self.root)
79 79 extensions.loadall(self.ui)
80 80 except IOError:
81 81 pass
82 82
83 83 self.tagscache = None
84 84 self.branchcache = None
85 85 self.nodetagscache = None
86 86 self.filterpats = {}
87 87 self.transhandle = None
88 88
89 89 def __getattr__(self, name):
90 90 if name == 'changelog':
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 return self.changelog
94 94 if name == 'manifest':
95 95 self.changelog
96 96 self.manifest = manifest.manifest(self.sopener)
97 97 return self.manifest
98 98 if name == 'dirstate':
99 99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 100 return self.dirstate
101 101 else:
102 102 raise AttributeError, name
103 103
104 104 def url(self):
105 105 return 'file:' + self.root
106 106
107 107 def hook(self, name, throw=False, **args):
108 108 return hook.hook(self.ui, self, name, throw, **args)
109 109
110 110 tag_disallowed = ':\r\n'
111 111
112 112 def _tag(self, name, node, message, local, user, date, parent=None,
113 113 extra={}):
114 114 use_dirstate = parent is None
115 115
116 116 for c in self.tag_disallowed:
117 117 if c in name:
118 118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119 119
120 120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121 121
122 122 def writetag(fp, name, munge, prevtags):
123 123 if prevtags and prevtags[-1] != '\n':
124 124 fp.write('\n')
125 125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 126 fp.close()
127 127 self.hook('tag', node=hex(node), tag=name, local=local)
128 128
129 129 prevtags = ''
130 130 if local:
131 131 try:
132 132 fp = self.opener('localtags', 'r+')
133 133 except IOError, err:
134 134 fp = self.opener('localtags', 'a')
135 135 else:
136 136 prevtags = fp.read()
137 137
138 138 # local tags are stored in the current charset
139 139 writetag(fp, name, None, prevtags)
140 140 return
141 141
142 142 if use_dirstate:
143 143 try:
144 144 fp = self.wfile('.hgtags', 'rb+')
145 145 except IOError, err:
146 146 fp = self.wfile('.hgtags', 'ab')
147 147 else:
148 148 prevtags = fp.read()
149 149 else:
150 150 try:
151 151 prevtags = self.filectx('.hgtags', parent).data()
152 152 except revlog.LookupError:
153 153 pass
154 154 fp = self.wfile('.hgtags', 'wb')
155 155
156 156 # committed tags are stored in UTF-8
157 157 writetag(fp, name, util.fromlocal, prevtags)
158 158
159 159 if use_dirstate and '.hgtags' not in self.dirstate:
160 160 self.add(['.hgtags'])
161 161
162 162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 163 extra=extra)
164 164
165 165 self.hook('tag', node=hex(node), tag=name, local=local)
166 166
167 167 return tagnode
168 168
169 169 def tag(self, name, node, message, local, user, date):
170 170 '''tag a revision with a symbolic name.
171 171
172 172 if local is True, the tag is stored in a per-repository file.
173 173 otherwise, it is stored in the .hgtags file, and a new
174 174 changeset is committed with the change.
175 175
176 176 keyword arguments:
177 177
178 178 local: whether to store tag in non-version-controlled file
179 179 (default False)
180 180
181 181 message: commit message to use if committing
182 182
183 183 user: name of user to use if committing
184 184
185 185 date: date tuple to use if committing'''
186 186
187 187 for x in self.status()[:5]:
188 188 if '.hgtags' in x:
189 189 raise util.Abort(_('working copy of .hgtags is changed '
190 190 '(please commit .hgtags manually)'))
191 191
192 192
193 193 self._tag(name, node, message, local, user, date)
194 194
195 195 def tags(self):
196 196 '''return a mapping of tag to node'''
197 197 if self.tagscache:
198 198 return self.tagscache
199 199
200 200 globaltags = {}
201 201
202 202 def readtags(lines, fn):
203 203 filetags = {}
204 204 count = 0
205 205
206 206 def warn(msg):
207 207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208 208
209 209 for l in lines:
210 210 count += 1
211 211 if not l:
212 212 continue
213 213 s = l.split(" ", 1)
214 214 if len(s) != 2:
215 215 warn(_("cannot parse entry"))
216 216 continue
217 217 node, key = s
218 218 key = util.tolocal(key.strip()) # stored in UTF-8
219 219 try:
220 220 bin_n = bin(node)
221 221 except TypeError:
222 222 warn(_("node '%s' is not well formed") % node)
223 223 continue
224 224 if bin_n not in self.changelog.nodemap:
225 225 warn(_("tag '%s' refers to unknown node") % key)
226 226 continue
227 227
228 228 h = []
229 229 if key in filetags:
230 230 n, h = filetags[key]
231 231 h.append(n)
232 232 filetags[key] = (bin_n, h)
233 233
234 234 for k, nh in filetags.items():
235 235 if k not in globaltags:
236 236 globaltags[k] = nh
237 237 continue
238 238 # we prefer the global tag if:
239 239 # it supercedes us OR
240 240 # mutual supercedes and it has a higher rank
241 241 # otherwise we win because we're tip-most
242 242 an, ah = nh
243 243 bn, bh = globaltags[k]
244 244 if (bn != an and an in bh and
245 245 (bn not in ah or len(bh) > len(ah))):
246 246 an = bn
247 247 ah.extend([n for n in bh if n not in ah])
248 248 globaltags[k] = an, ah
249 249
250 250 # read the tags file from each head, ending with the tip
251 251 f = None
252 252 for rev, node, fnode in self._hgtagsnodes():
253 253 f = (f and f.filectx(fnode) or
254 254 self.filectx('.hgtags', fileid=fnode))
255 255 readtags(f.data().splitlines(), f)
256 256
257 257 try:
258 258 data = util.fromlocal(self.opener("localtags").read())
259 259 # localtags are stored in the local character set
260 260 # while the internal tag table is stored in UTF-8
261 261 readtags(data.splitlines(), "localtags")
262 262 except IOError:
263 263 pass
264 264
265 265 self.tagscache = {}
266 266 for k,nh in globaltags.items():
267 267 n = nh[0]
268 268 if n != nullid:
269 269 self.tagscache[k] = n
270 270 self.tagscache['tip'] = self.changelog.tip()
271 271
272 272 return self.tagscache
273 273
274 274 def _hgtagsnodes(self):
275 275 heads = self.heads()
276 276 heads.reverse()
277 277 last = {}
278 278 ret = []
279 279 for node in heads:
280 280 c = self.changectx(node)
281 281 rev = c.rev()
282 282 try:
283 283 fnode = c.filenode('.hgtags')
284 284 except revlog.LookupError:
285 285 continue
286 286 ret.append((rev, node, fnode))
287 287 if fnode in last:
288 288 ret[last[fnode]] = None
289 289 last[fnode] = len(ret) - 1
290 290 return [item for item in ret if item]
291 291
292 292 def tagslist(self):
293 293 '''return a list of tags ordered by revision'''
294 294 l = []
295 295 for t, n in self.tags().items():
296 296 try:
297 297 r = self.changelog.rev(n)
298 298 except:
299 299 r = -2 # sort to the beginning of the list if unknown
300 300 l.append((r, t, n))
301 301 l.sort()
302 302 return [(t, n) for r, t, n in l]
303 303
304 304 def nodetags(self, node):
305 305 '''return the tags associated with a node'''
306 306 if not self.nodetagscache:
307 307 self.nodetagscache = {}
308 308 for t, n in self.tags().items():
309 309 self.nodetagscache.setdefault(n, []).append(t)
310 310 return self.nodetagscache.get(node, [])
311 311
312 312 def _branchtags(self):
313 313 partial, last, lrev = self._readbranchcache()
314 314
315 315 tiprev = self.changelog.count() - 1
316 316 if lrev != tiprev:
317 317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319 319
320 320 return partial
321 321
322 322 def branchtags(self):
323 323 if self.branchcache is not None:
324 324 return self.branchcache
325 325
326 326 self.branchcache = {} # avoid recursion in changectx
327 327 partial = self._branchtags()
328 328
329 329 # the branch cache is stored on disk as UTF-8, but in the local
330 330 # charset internally
331 331 for k, v in partial.items():
332 332 self.branchcache[util.tolocal(k)] = v
333 333 return self.branchcache
334 334
335 335 def _readbranchcache(self):
336 336 partial = {}
337 337 try:
338 338 f = self.opener("branch.cache")
339 339 lines = f.read().split('\n')
340 340 f.close()
341 341 except (IOError, OSError):
342 342 return {}, nullid, nullrev
343 343
344 344 try:
345 345 last, lrev = lines.pop(0).split(" ", 1)
346 346 last, lrev = bin(last), int(lrev)
347 347 if not (lrev < self.changelog.count() and
348 348 self.changelog.node(lrev) == last): # sanity check
349 349 # invalidate the cache
350 350 raise ValueError('Invalid branch cache: unknown tip')
351 351 for l in lines:
352 352 if not l: continue
353 353 node, label = l.split(" ", 1)
354 354 partial[label.strip()] = bin(node)
355 355 except (KeyboardInterrupt, util.SignalInterrupt):
356 356 raise
357 357 except Exception, inst:
358 358 if self.ui.debugflag:
359 359 self.ui.warn(str(inst), '\n')
360 360 partial, last, lrev = {}, nullid, nullrev
361 361 return partial, last, lrev
362 362
363 363 def _writebranchcache(self, branches, tip, tiprev):
364 364 try:
365 365 f = self.opener("branch.cache", "w", atomictemp=True)
366 366 f.write("%s %s\n" % (hex(tip), tiprev))
367 367 for label, node in branches.iteritems():
368 368 f.write("%s %s\n" % (hex(node), label))
369 369 f.rename()
370 370 except (IOError, OSError):
371 371 pass
372 372
373 373 def _updatebranchcache(self, partial, start, end):
374 374 for r in xrange(start, end):
375 375 c = self.changectx(r)
376 376 b = c.branch()
377 377 partial[b] = c.node()
378 378
379 379 def lookup(self, key):
380 380 if key == '.':
381 381 key, second = self.dirstate.parents()
382 382 if key == nullid:
383 383 raise repo.RepoError(_("no revision checked out"))
384 384 if second != nullid:
385 385 self.ui.warn(_("warning: working directory has two parents, "
386 386 "tag '.' uses the first\n"))
387 387 elif key == 'null':
388 388 return nullid
389 389 n = self.changelog._match(key)
390 390 if n:
391 391 return n
392 392 if key in self.tags():
393 393 return self.tags()[key]
394 394 if key in self.branchtags():
395 395 return self.branchtags()[key]
396 396 n = self.changelog._partialmatch(key)
397 397 if n:
398 398 return n
399 399 raise repo.RepoError(_("unknown revision '%s'") % key)
400 400
401 401 def dev(self):
402 402 return os.lstat(self.path).st_dev
403 403
404 404 def local(self):
405 405 return True
406 406
407 407 def join(self, f):
408 408 return os.path.join(self.path, f)
409 409
410 410 def sjoin(self, f):
411 411 f = self.encodefn(f)
412 412 return os.path.join(self.spath, f)
413 413
414 414 def wjoin(self, f):
415 415 return os.path.join(self.root, f)
416 416
417 417 def file(self, f):
418 418 if f[0] == '/':
419 419 f = f[1:]
420 420 return filelog.filelog(self.sopener, f)
421 421
422 422 def changectx(self, changeid=None):
423 423 return context.changectx(self, changeid)
424 424
425 425 def workingctx(self):
426 426 return context.workingctx(self)
427 427
428 428 def parents(self, changeid=None):
429 429 '''
430 430 get list of changectxs for parents of changeid or working directory
431 431 '''
432 432 if changeid is None:
433 433 pl = self.dirstate.parents()
434 434 else:
435 435 n = self.changelog.lookup(changeid)
436 436 pl = self.changelog.parents(n)
437 437 if pl[1] == nullid:
438 438 return [self.changectx(pl[0])]
439 439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440 440
441 441 def filectx(self, path, changeid=None, fileid=None):
442 442 """changeid can be a changeset revision, node, or tag.
443 443 fileid can be a file revision or node."""
444 444 return context.filectx(self, path, changeid, fileid)
445 445
446 446 def getcwd(self):
447 447 return self.dirstate.getcwd()
448 448
449 449 def pathto(self, f, cwd=None):
450 450 return self.dirstate.pathto(f, cwd)
451 451
452 452 def wfile(self, f, mode='r'):
453 453 return self.wopener(f, mode)
454 454
455 455 def _link(self, f):
456 456 return os.path.islink(self.wjoin(f))
457 457
458 458 def _filter(self, filter, filename, data):
459 459 if filter not in self.filterpats:
460 460 l = []
461 461 for pat, cmd in self.ui.configitems(filter):
462 462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 463 l.append((mf, cmd))
464 464 self.filterpats[filter] = l
465 465
466 466 for mf, cmd in self.filterpats[filter]:
467 467 if mf(filename):
468 468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 469 data = util.filter(data, cmd)
470 470 break
471 471
472 472 return data
473 473
474 474 def wread(self, filename):
475 475 if self._link(filename):
476 476 data = os.readlink(self.wjoin(filename))
477 477 else:
478 478 data = self.wopener(filename, 'r').read()
479 479 return self._filter("encode", filename, data)
480 480
481 481 def wwrite(self, filename, data, flags):
482 482 data = self._filter("decode", filename, data)
483 483 if "l" in flags:
484 484 self.wopener.symlink(data, filename)
485 485 else:
486 486 try:
487 487 if self._link(filename):
488 488 os.unlink(self.wjoin(filename))
489 489 except OSError:
490 490 pass
491 491 self.wopener(filename, 'w').write(data)
492 492 util.set_exec(self.wjoin(filename), "x" in flags)
493 493
494 494 def wwritedata(self, filename, data):
495 495 return self._filter("decode", filename, data)
496 496
497 497 def transaction(self):
498 498 tr = self.transhandle
499 499 if tr != None and tr.running():
500 500 return tr.nest()
501 501
502 502 # save dirstate for rollback
503 503 try:
504 504 ds = self.opener("dirstate").read()
505 505 except IOError:
506 506 ds = ""
507 507 self.opener("journal.dirstate", "w").write(ds)
508 508
509 509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 512 self.sjoin("journal"),
513 513 aftertrans(renames))
514 514 self.transhandle = tr
515 515 return tr
516 516
517 517 def recover(self):
518 518 l = self.lock()
519 try:
519 520 if os.path.exists(self.sjoin("journal")):
520 521 self.ui.status(_("rolling back interrupted transaction\n"))
521 522 transaction.rollback(self.sopener, self.sjoin("journal"))
522 523 self.invalidate()
523 524 return True
524 525 else:
525 526 self.ui.warn(_("no interrupted transaction available\n"))
526 527 return False
528 finally:
529 del l
527 530
528 531 def rollback(self, wlock=None, lock=None):
532 try:
529 533 if not wlock:
530 534 wlock = self.wlock()
531 535 if not lock:
532 536 lock = self.lock()
533 537 if os.path.exists(self.sjoin("undo")):
534 538 self.ui.status(_("rolling back last transaction\n"))
535 539 transaction.rollback(self.sopener, self.sjoin("undo"))
536 540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
537 541 self.invalidate()
538 542 self.dirstate.invalidate()
539 543 else:
540 544 self.ui.warn(_("no rollback information available\n"))
545 finally:
546 del wlock, lock
541 547
542 548 def invalidate(self):
543 549 for a in "changelog manifest".split():
544 550 if hasattr(self, a):
545 551 self.__delattr__(a)
546 552 self.tagscache = None
547 553 self.nodetagscache = None
548 554
549 555 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
550 556 try:
551 557 l = lock.lock(lockname, 0, releasefn, desc=desc)
552 558 except lock.LockHeld, inst:
553 559 if not wait:
554 560 raise
555 561 self.ui.warn(_("waiting for lock on %s held by %r\n") %
556 562 (desc, inst.locker))
557 563 # default to 600 seconds timeout
558 564 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
559 565 releasefn, desc=desc)
560 566 if acquirefn:
561 567 acquirefn()
562 568 return l
563 569
564 570 def lock(self, wait=True):
565 571 return self._lock(self.sjoin("lock"), wait, None, self.invalidate,
566 572 _('repository %s') % self.origroot)
567 573
568 574 def wlock(self, wait=True):
569 575 return self._lock(self.join("wlock"), wait, self.dirstate.write,
570 576 self.dirstate.invalidate,
571 577 _('working directory of %s') % self.origroot)
572 578
573 579 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
574 580 """
575 581 commit an individual file as part of a larger transaction
576 582 """
577 583
578 584 t = self.wread(fn)
579 585 fl = self.file(fn)
580 586 fp1 = manifest1.get(fn, nullid)
581 587 fp2 = manifest2.get(fn, nullid)
582 588
583 589 meta = {}
584 590 cp = self.dirstate.copied(fn)
585 591 if cp:
586 592 # Mark the new revision of this file as a copy of another
587 593 # file. This copy data will effectively act as a parent
588 594 # of this new revision. If this is a merge, the first
589 595 # parent will be the nullid (meaning "look up the copy data")
590 596 # and the second one will be the other parent. For example:
591 597 #
592 598 # 0 --- 1 --- 3 rev1 changes file foo
593 599 # \ / rev2 renames foo to bar and changes it
594 600 # \- 2 -/ rev3 should have bar with all changes and
595 601 # should record that bar descends from
596 602 # bar in rev2 and foo in rev1
597 603 #
598 604 # this allows this merge to succeed:
599 605 #
600 606 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
601 607 # \ / merging rev3 and rev4 should use bar@rev2
602 608 # \- 2 --- 4 as the merge base
603 609 #
604 610 meta["copy"] = cp
605 611 if not manifest2: # not a branch merge
606 612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
607 613 fp2 = nullid
608 614 elif fp2 != nullid: # copied on remote side
609 615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
610 616 elif fp1 != nullid: # copied on local side, reversed
611 617 meta["copyrev"] = hex(manifest2.get(cp))
612 618 fp2 = fp1
613 619 else: # directory rename
614 620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 621 self.ui.debug(_(" %s: copy %s:%s\n") %
616 622 (fn, cp, meta["copyrev"]))
617 623 fp1 = nullid
618 624 elif fp2 != nullid:
619 625 # is one parent an ancestor of the other?
620 626 fpa = fl.ancestor(fp1, fp2)
621 627 if fpa == fp1:
622 628 fp1, fp2 = fp2, nullid
623 629 elif fpa == fp2:
624 630 fp2 = nullid
625 631
626 632 # is the file unmodified from the parent? report existing entry
627 633 if fp2 == nullid and not fl.cmp(fp1, t):
628 634 return fp1
629 635
630 636 changelist.append(fn)
631 637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
632 638
633 639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
634 640 if p1 is None:
635 641 p1, p2 = self.dirstate.parents()
636 642 return self.commit(files=files, text=text, user=user, date=date,
637 643 p1=p1, p2=p2, wlock=wlock, extra=extra)
638 644
639 645 def commit(self, files=None, text="", user=None, date=None,
640 646 match=util.always, force=False, lock=None, wlock=None,
641 647 force_editor=False, p1=None, p2=None, extra={}):
642
648 tr = None
649 try:
643 650 commit = []
644 651 remove = []
645 652 changed = []
646 653 use_dirstate = (p1 is None) # not rawcommit
647 654 extra = extra.copy()
648 655
649 656 if use_dirstate:
650 657 if files:
651 658 for f in files:
652 659 s = self.dirstate[f]
653 660 if s in 'nma':
654 661 commit.append(f)
655 662 elif s == 'r':
656 663 remove.append(f)
657 664 else:
658 665 self.ui.warn(_("%s not tracked!\n") % f)
659 666 else:
660 667 changes = self.status(match=match)[:5]
661 668 modified, added, removed, deleted, unknown = changes
662 669 commit = modified + added
663 670 remove = removed
664 671 else:
665 672 commit = files
666 673
667 674 if use_dirstate:
668 675 p1, p2 = self.dirstate.parents()
669 676 update_dirstate = True
670 677 else:
671 678 p1, p2 = p1, p2 or nullid
672 679 update_dirstate = (self.dirstate.parents()[0] == p1)
673 680
674 681 c1 = self.changelog.read(p1)
675 682 c2 = self.changelog.read(p2)
676 683 m1 = self.manifest.read(c1[0]).copy()
677 684 m2 = self.manifest.read(c2[0])
678 685
679 686 if use_dirstate:
680 687 branchname = self.workingctx().branch()
681 688 try:
682 689 branchname = branchname.decode('UTF-8').encode('UTF-8')
683 690 except UnicodeDecodeError:
684 691 raise util.Abort(_('branch name not in UTF-8!'))
685 692 else:
686 693 branchname = ""
687 694
688 695 if use_dirstate:
689 696 oldname = c1[5].get("branch") # stored in UTF-8
690 697 if (not commit and not remove and not force and p2 == nullid
691 698 and branchname == oldname):
692 699 self.ui.status(_("nothing changed\n"))
693 700 return None
694 701
695 702 xp1 = hex(p1)
696 703 if p2 == nullid: xp2 = ''
697 704 else: xp2 = hex(p2)
698 705
699 706 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
700 707
701 708 if not wlock:
702 709 wlock = self.wlock()
703 710 if not lock:
704 711 lock = self.lock()
705 712 tr = self.transaction()
706 713
707 714 # check in files
708 715 new = {}
709 716 linkrev = self.changelog.count()
710 717 commit.sort()
711 718 is_exec = util.execfunc(self.root, m1.execf)
712 719 is_link = util.linkfunc(self.root, m1.linkf)
713 720 for f in commit:
714 721 self.ui.note(f + "\n")
715 722 try:
716 723 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
717 724 new_exec = is_exec(f)
718 725 new_link = is_link(f)
719 726 if not changed or changed[-1] != f:
720 # mention the file in the changelog if some flag changed,
721 # even if there was no content change.
727 # mention the file in the changelog if some
728 # flag changed, even if there was no content
729 # change.
722 730 old_exec = m1.execf(f)
723 731 old_link = m1.linkf(f)
724 732 if old_exec != new_exec or old_link != new_link:
725 733 changed.append(f)
726 734 m1.set(f, new_exec, new_link)
727 735 except (OSError, IOError):
728 736 if use_dirstate:
729 737 self.ui.warn(_("trouble committing %s!\n") % f)
730 738 raise
731 739 else:
732 740 remove.append(f)
733 741
734 742 # update manifest
735 743 m1.update(new)
736 744 remove.sort()
737 745 removed = []
738 746
739 747 for f in remove:
740 748 if f in m1:
741 749 del m1[f]
742 750 removed.append(f)
743 751 elif f in m2:
744 752 removed.append(f)
745 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
753 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
754 (new, removed))
746 755
747 756 # add changeset
748 757 new = new.keys()
749 758 new.sort()
750 759
751 760 user = user or self.ui.username()
752 761 if not text or force_editor:
753 762 edittext = []
754 763 if text:
755 764 edittext.append(text)
756 765 edittext.append("")
757 766 edittext.append("HG: user: %s" % user)
758 767 if p2 != nullid:
759 768 edittext.append("HG: branch merge")
760 769 if branchname:
761 770 edittext.append("HG: branch %s" % util.tolocal(branchname))
762 771 edittext.extend(["HG: changed %s" % f for f in changed])
763 772 edittext.extend(["HG: removed %s" % f for f in removed])
764 773 if not changed and not remove:
765 774 edittext.append("HG: no files changed")
766 775 edittext.append("")
767 776 # run editor in the repository root
768 777 olddir = os.getcwd()
769 778 os.chdir(self.root)
770 779 text = self.ui.edit("\n".join(edittext), user)
771 780 os.chdir(olddir)
772 781
773 782 lines = [line.rstrip() for line in text.rstrip().splitlines()]
774 783 while lines and not lines[0]:
775 784 del lines[0]
776 785 if not lines:
777 786 return None
778 787 text = '\n'.join(lines)
779 788 if branchname:
780 789 extra["branch"] = branchname
781 790 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
782 791 user, date, extra)
783 792 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
784 793 parent2=xp2)
785 794 tr.close()
786 795
787 796 if self.branchcache and "branch" in extra:
788 797 self.branchcache[util.tolocal(extra["branch"])] = n
789 798
790 799 if use_dirstate or update_dirstate:
791 800 self.dirstate.setparents(n)
792 801 if use_dirstate:
793 802 for f in new:
794 803 self.dirstate.normal(f)
795 804 for f in removed:
796 805 self.dirstate.forget(f)
797 806
798 807 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
799 808 return n
809 finally:
810 del lock, wlock, tr
800 811
801 812 def walk(self, node=None, files=[], match=util.always, badmatch=None):
802 813 '''
803 814 walk recursively through the directory tree or a given
804 815 changeset, finding all files matched by the match
805 816 function
806 817
807 818 results are yielded in a tuple (src, filename), where src
808 819 is one of:
809 820 'f' the file was found in the directory tree
810 821 'm' the file was only in the dirstate and not in the tree
811 822 'b' file was not found and matched badmatch
812 823 '''
813 824
814 825 if node:
815 826 fdict = dict.fromkeys(files)
816 827 # for dirstate.walk, files=['.'] means "walk the whole tree".
817 828 # follow that here, too
818 829 fdict.pop('.', None)
819 830 mdict = self.manifest.read(self.changelog.read(node)[0])
820 831 mfiles = mdict.keys()
821 832 mfiles.sort()
822 833 for fn in mfiles:
823 834 for ffn in fdict:
824 835 # match if the file is the exact name or a directory
825 836 if ffn == fn or fn.startswith("%s/" % ffn):
826 837 del fdict[ffn]
827 838 break
828 839 if match(fn):
829 840 yield 'm', fn
830 841 ffiles = fdict.keys()
831 842 ffiles.sort()
832 843 for fn in ffiles:
833 844 if badmatch and badmatch(fn):
834 845 if match(fn):
835 846 yield 'b', fn
836 847 else:
837 848 self.ui.warn(_('%s: No such file in rev %s\n')
838 849 % (self.pathto(fn), short(node)))
839 850 else:
840 851 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
841 852 yield src, fn
842 853
843 854 def status(self, node1=None, node2=None, files=[], match=util.always,
844 855 wlock=None, list_ignored=False, list_clean=False):
845 856 """return status of files between two nodes or node and working directory
846 857
847 858 If node1 is None, use the first dirstate parent instead.
848 859 If node2 is None, compare node1 with working directory.
849 860 """
850 861
851 862 def fcmp(fn, getnode):
852 863 t1 = self.wread(fn)
853 864 return self.file(fn).cmp(getnode(fn), t1)
854 865
855 866 def mfmatches(node):
856 867 change = self.changelog.read(node)
857 868 mf = self.manifest.read(change[0]).copy()
858 869 for fn in mf.keys():
859 870 if not match(fn):
860 871 del mf[fn]
861 872 return mf
862 873
863 874 modified, added, removed, deleted, unknown = [], [], [], [], []
864 875 ignored, clean = [], []
865 876
866 877 compareworking = False
867 878 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
868 879 compareworking = True
869 880
870 881 if not compareworking:
871 882 # read the manifest from node1 before the manifest from node2,
872 883 # so that we'll hit the manifest cache if we're going through
873 884 # all the revisions in parent->child order.
874 885 mf1 = mfmatches(node1)
875 886
876 887 # are we comparing the working directory?
877 888 if not node2:
878 889 (lookup, modified, added, removed, deleted, unknown,
879 890 ignored, clean) = self.dirstate.status(files, match,
880 891 list_ignored, list_clean)
881 892
882 893 # are we comparing working dir against its parent?
883 894 if compareworking:
884 895 if lookup:
885 896 fixup = []
886 897 # do a full compare of any files that might have changed
887 898 ctx = self.changectx()
888 899 for f in lookup:
889 900 if f not in ctx or ctx[f].cmp(self.wread(f)):
890 901 modified.append(f)
891 902 else:
892 903 fixup.append(f)
893 904 if list_clean:
894 905 clean.append(f)
895 906
896 907 # update dirstate for files that are actually clean
897 908 if fixup:
898 cleanup = False
899 if not wlock:
909 fixlock = wlock
900 910 try:
901 wlock = self.wlock(False)
902 cleanup = True
911 if not fixlock:
912 try:
913 fixlock = self.wlock(False)
903 914 except lock.LockException:
904 915 pass
905 if wlock:
916 if fixlock:
906 917 for f in fixup:
907 918 self.dirstate.normal(f)
908 if cleanup:
909 wlock.release()
919 finally:
920 del fixlock
910 921 else:
911 922 # we are comparing working dir against non-parent
912 923 # generate a pseudo-manifest for the working dir
913 924 # XXX: create it in dirstate.py ?
914 925 mf2 = mfmatches(self.dirstate.parents()[0])
915 926 is_exec = util.execfunc(self.root, mf2.execf)
916 927 is_link = util.linkfunc(self.root, mf2.linkf)
917 928 for f in lookup + modified + added:
918 929 mf2[f] = ""
919 930 mf2.set(f, is_exec(f), is_link(f))
920 931 for f in removed:
921 932 if f in mf2:
922 933 del mf2[f]
923 934
924 935 else:
925 936 # we are comparing two revisions
926 937 mf2 = mfmatches(node2)
927 938
928 939 if not compareworking:
929 940 # flush lists from dirstate before comparing manifests
930 941 modified, added, clean = [], [], []
931 942
932 943 # make sure to sort the files so we talk to the disk in a
933 944 # reasonable order
934 945 mf2keys = mf2.keys()
935 946 mf2keys.sort()
936 947 getnode = lambda fn: mf1.get(fn, nullid)
937 948 for fn in mf2keys:
938 949 if mf1.has_key(fn):
939 950 if (mf1.flags(fn) != mf2.flags(fn) or
940 951 (mf1[fn] != mf2[fn] and
941 952 (mf2[fn] != "" or fcmp(fn, getnode)))):
942 953 modified.append(fn)
943 954 elif list_clean:
944 955 clean.append(fn)
945 956 del mf1[fn]
946 957 else:
947 958 added.append(fn)
948 959
949 960 removed = mf1.keys()
950 961
951 962 # sort and return results:
952 963 for l in modified, added, removed, deleted, unknown, ignored, clean:
953 964 l.sort()
954 965 return (modified, added, removed, deleted, unknown, ignored, clean)
955 966
956 967 def add(self, list, wlock=None):
968 try:
957 969 if not wlock:
958 970 wlock = self.wlock()
959 971 for f in list:
960 972 p = self.wjoin(f)
961 973 try:
962 974 st = os.lstat(p)
963 975 except:
964 976 self.ui.warn(_("%s does not exist!\n") % f)
965 977 continue
966 978 if st.st_size > 10000000:
967 979 self.ui.warn(_("%s: files over 10MB may cause memory and"
968 980 " performance problems\n"
969 981 "(use 'hg revert %s' to unadd the file)\n")
970 982 % (f, f))
971 983 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
972 984 self.ui.warn(_("%s not added: only files and symlinks "
973 985 "supported currently\n") % f)
974 986 elif self.dirstate[f] in 'an':
975 987 self.ui.warn(_("%s already tracked!\n") % f)
976 988 else:
977 989 self.dirstate.add(f)
990 finally:
991 del wlock
978 992
979 993 def forget(self, list, wlock=None):
994 try:
980 995 if not wlock:
981 996 wlock = self.wlock()
982 997 for f in list:
983 998 if self.dirstate[f] != 'a':
984 999 self.ui.warn(_("%s not added!\n") % f)
985 1000 else:
986 1001 self.dirstate.forget(f)
1002 finally:
1003 del wlock
987 1004
988 1005 def remove(self, list, unlink=False, wlock=None):
1006 try:
989 1007 if unlink:
990 1008 for f in list:
991 1009 try:
992 1010 util.unlink(self.wjoin(f))
993 1011 except OSError, inst:
994 1012 if inst.errno != errno.ENOENT:
995 1013 raise
996 1014 if not wlock:
997 1015 wlock = self.wlock()
998 1016 for f in list:
999 1017 if unlink and os.path.exists(self.wjoin(f)):
1000 1018 self.ui.warn(_("%s still exists!\n") % f)
1001 1019 elif self.dirstate[f] == 'a':
1002 1020 self.dirstate.forget(f)
1003 1021 elif f not in self.dirstate:
1004 1022 self.ui.warn(_("%s not tracked!\n") % f)
1005 1023 else:
1006 1024 self.dirstate.remove(f)
1025 finally:
1026 del wlock
1007 1027
1008 1028 def undelete(self, list, wlock=None):
1029 try:
1009 1030 p = self.dirstate.parents()[0]
1010 1031 mn = self.changelog.read(p)[0]
1011 1032 m = self.manifest.read(mn)
1012 1033 if not wlock:
1013 1034 wlock = self.wlock()
1014 1035 for f in list:
1015 1036 if self.dirstate[f] != 'r':
1016 1037 self.ui.warn("%s not removed!\n" % f)
1017 1038 else:
1018 1039 t = self.file(f).read(m[f])
1019 1040 self.wwrite(f, t, m.flags(f))
1020 1041 self.dirstate.normal(f)
1042 finally:
1043 del wlock
1021 1044
1022 1045 def copy(self, source, dest, wlock=None):
1046 try:
1023 1047 p = self.wjoin(dest)
1024 1048 if not (os.path.exists(p) or os.path.islink(p)):
1025 1049 self.ui.warn(_("%s does not exist!\n") % dest)
1026 1050 elif not (os.path.isfile(p) or os.path.islink(p)):
1027 1051 self.ui.warn(_("copy failed: %s is not a file or a "
1028 1052 "symbolic link\n") % dest)
1029 1053 else:
1030 1054 if not wlock:
1031 1055 wlock = self.wlock()
1032 1056 if dest not in self.dirstate:
1033 1057 self.dirstate.add(dest)
1034 1058 self.dirstate.copy(source, dest)
1059 finally:
1060 del wlock
1035 1061
1036 1062 def heads(self, start=None):
1037 1063 heads = self.changelog.heads(start)
1038 1064 # sort the output in rev descending order
1039 1065 heads = [(-self.changelog.rev(h), h) for h in heads]
1040 1066 heads.sort()
1041 1067 return [n for (r, n) in heads]
1042 1068
1043 1069 def branchheads(self, branch, start=None):
1044 1070 branches = self.branchtags()
1045 1071 if branch not in branches:
1046 1072 return []
1047 1073 # The basic algorithm is this:
1048 1074 #
1049 1075 # Start from the branch tip since there are no later revisions that can
1050 1076 # possibly be in this branch, and the tip is a guaranteed head.
1051 1077 #
1052 1078 # Remember the tip's parents as the first ancestors, since these by
1053 1079 # definition are not heads.
1054 1080 #
1055 1081 # Step backwards from the brach tip through all the revisions. We are
1056 1082 # guaranteed by the rules of Mercurial that we will now be visiting the
1057 1083 # nodes in reverse topological order (children before parents).
1058 1084 #
1059 1085 # If a revision is one of the ancestors of a head then we can toss it
1060 1086 # out of the ancestors set (we've already found it and won't be
1061 1087 # visiting it again) and put its parents in the ancestors set.
1062 1088 #
1063 1089 # Otherwise, if a revision is in the branch it's another head, since it
1064 1090 # wasn't in the ancestor list of an existing head. So add it to the
1065 1091 # head list, and add its parents to the ancestor list.
1066 1092 #
1067 1093 # If it is not in the branch ignore it.
1068 1094 #
1069 1095 # Once we have a list of heads, use nodesbetween to filter out all the
1070 1096 # heads that cannot be reached from startrev. There may be a more
1071 1097 # efficient way to do this as part of the previous algorithm.
1072 1098
1073 1099 set = util.set
1074 1100 heads = [self.changelog.rev(branches[branch])]
1075 1101 # Don't care if ancestors contains nullrev or not.
1076 1102 ancestors = set(self.changelog.parentrevs(heads[0]))
1077 1103 for rev in xrange(heads[0] - 1, nullrev, -1):
1078 1104 if rev in ancestors:
1079 1105 ancestors.update(self.changelog.parentrevs(rev))
1080 1106 ancestors.remove(rev)
1081 1107 elif self.changectx(rev).branch() == branch:
1082 1108 heads.append(rev)
1083 1109 ancestors.update(self.changelog.parentrevs(rev))
1084 1110 heads = [self.changelog.node(rev) for rev in heads]
1085 1111 if start is not None:
1086 1112 heads = self.changelog.nodesbetween([start], heads)[2]
1087 1113 return heads
1088 1114
1089 1115 def branches(self, nodes):
1090 1116 if not nodes:
1091 1117 nodes = [self.changelog.tip()]
1092 1118 b = []
1093 1119 for n in nodes:
1094 1120 t = n
1095 1121 while 1:
1096 1122 p = self.changelog.parents(n)
1097 1123 if p[1] != nullid or p[0] == nullid:
1098 1124 b.append((t, n, p[0], p[1]))
1099 1125 break
1100 1126 n = p[0]
1101 1127 return b
1102 1128
1103 1129 def between(self, pairs):
1104 1130 r = []
1105 1131
1106 1132 for top, bottom in pairs:
1107 1133 n, l, i = top, [], 0
1108 1134 f = 1
1109 1135
1110 1136 while n != bottom:
1111 1137 p = self.changelog.parents(n)[0]
1112 1138 if i == f:
1113 1139 l.append(n)
1114 1140 f = f * 2
1115 1141 n = p
1116 1142 i += 1
1117 1143
1118 1144 r.append(l)
1119 1145
1120 1146 return r
1121 1147
1122 1148 def findincoming(self, remote, base=None, heads=None, force=False):
1123 1149 """Return list of roots of the subsets of missing nodes from remote
1124 1150
1125 1151 If base dict is specified, assume that these nodes and their parents
1126 1152 exist on the remote side and that no child of a node of base exists
1127 1153 in both remote and self.
1128 1154 Furthermore base will be updated to include the nodes that exists
1129 1155 in self and remote but no children exists in self and remote.
1130 1156 If a list of heads is specified, return only nodes which are heads
1131 1157 or ancestors of these heads.
1132 1158
1133 1159 All the ancestors of base are in self and in remote.
1134 1160 All the descendants of the list returned are missing in self.
1135 1161 (and so we know that the rest of the nodes are missing in remote, see
1136 1162 outgoing)
1137 1163 """
1138 1164 m = self.changelog.nodemap
1139 1165 search = []
1140 1166 fetch = {}
1141 1167 seen = {}
1142 1168 seenbranch = {}
1143 1169 if base == None:
1144 1170 base = {}
1145 1171
1146 1172 if not heads:
1147 1173 heads = remote.heads()
1148 1174
1149 1175 if self.changelog.tip() == nullid:
1150 1176 base[nullid] = 1
1151 1177 if heads != [nullid]:
1152 1178 return [nullid]
1153 1179 return []
1154 1180
1155 1181 # assume we're closer to the tip than the root
1156 1182 # and start by examining the heads
1157 1183 self.ui.status(_("searching for changes\n"))
1158 1184
1159 1185 unknown = []
1160 1186 for h in heads:
1161 1187 if h not in m:
1162 1188 unknown.append(h)
1163 1189 else:
1164 1190 base[h] = 1
1165 1191
1166 1192 if not unknown:
1167 1193 return []
1168 1194
1169 1195 req = dict.fromkeys(unknown)
1170 1196 reqcnt = 0
1171 1197
1172 1198 # search through remote branches
1173 1199 # a 'branch' here is a linear segment of history, with four parts:
1174 1200 # head, root, first parent, second parent
1175 1201 # (a branch always has two parents (or none) by definition)
1176 1202 unknown = remote.branches(unknown)
1177 1203 while unknown:
1178 1204 r = []
1179 1205 while unknown:
1180 1206 n = unknown.pop(0)
1181 1207 if n[0] in seen:
1182 1208 continue
1183 1209
1184 1210 self.ui.debug(_("examining %s:%s\n")
1185 1211 % (short(n[0]), short(n[1])))
1186 1212 if n[0] == nullid: # found the end of the branch
1187 1213 pass
1188 1214 elif n in seenbranch:
1189 1215 self.ui.debug(_("branch already found\n"))
1190 1216 continue
1191 1217 elif n[1] and n[1] in m: # do we know the base?
1192 1218 self.ui.debug(_("found incomplete branch %s:%s\n")
1193 1219 % (short(n[0]), short(n[1])))
1194 1220 search.append(n) # schedule branch range for scanning
1195 1221 seenbranch[n] = 1
1196 1222 else:
1197 1223 if n[1] not in seen and n[1] not in fetch:
1198 1224 if n[2] in m and n[3] in m:
1199 1225 self.ui.debug(_("found new changeset %s\n") %
1200 1226 short(n[1]))
1201 1227 fetch[n[1]] = 1 # earliest unknown
1202 1228 for p in n[2:4]:
1203 1229 if p in m:
1204 1230 base[p] = 1 # latest known
1205 1231
1206 1232 for p in n[2:4]:
1207 1233 if p not in req and p not in m:
1208 1234 r.append(p)
1209 1235 req[p] = 1
1210 1236 seen[n[0]] = 1
1211 1237
1212 1238 if r:
1213 1239 reqcnt += 1
1214 1240 self.ui.debug(_("request %d: %s\n") %
1215 1241 (reqcnt, " ".join(map(short, r))))
1216 1242 for p in xrange(0, len(r), 10):
1217 1243 for b in remote.branches(r[p:p+10]):
1218 1244 self.ui.debug(_("received %s:%s\n") %
1219 1245 (short(b[0]), short(b[1])))
1220 1246 unknown.append(b)
1221 1247
1222 1248 # do binary search on the branches we found
1223 1249 while search:
1224 1250 n = search.pop(0)
1225 1251 reqcnt += 1
1226 1252 l = remote.between([(n[0], n[1])])[0]
1227 1253 l.append(n[1])
1228 1254 p = n[0]
1229 1255 f = 1
1230 1256 for i in l:
1231 1257 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1232 1258 if i in m:
1233 1259 if f <= 2:
1234 1260 self.ui.debug(_("found new branch changeset %s\n") %
1235 1261 short(p))
1236 1262 fetch[p] = 1
1237 1263 base[i] = 1
1238 1264 else:
1239 1265 self.ui.debug(_("narrowed branch search to %s:%s\n")
1240 1266 % (short(p), short(i)))
1241 1267 search.append((p, i))
1242 1268 break
1243 1269 p, f = i, f * 2
1244 1270
1245 1271 # sanity check our fetch list
1246 1272 for f in fetch.keys():
1247 1273 if f in m:
1248 1274 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1249 1275
1250 1276 if base.keys() == [nullid]:
1251 1277 if force:
1252 1278 self.ui.warn(_("warning: repository is unrelated\n"))
1253 1279 else:
1254 1280 raise util.Abort(_("repository is unrelated"))
1255 1281
1256 1282 self.ui.debug(_("found new changesets starting at ") +
1257 1283 " ".join([short(f) for f in fetch]) + "\n")
1258 1284
1259 1285 self.ui.debug(_("%d total queries\n") % reqcnt)
1260 1286
1261 1287 return fetch.keys()
1262 1288
1263 1289 def findoutgoing(self, remote, base=None, heads=None, force=False):
1264 1290 """Return list of nodes that are roots of subsets not in remote
1265 1291
1266 1292 If base dict is specified, assume that these nodes and their parents
1267 1293 exist on the remote side.
1268 1294 If a list of heads is specified, return only nodes which are heads
1269 1295 or ancestors of these heads, and return a second element which
1270 1296 contains all remote heads which get new children.
1271 1297 """
1272 1298 if base == None:
1273 1299 base = {}
1274 1300 self.findincoming(remote, base, heads, force=force)
1275 1301
1276 1302 self.ui.debug(_("common changesets up to ")
1277 1303 + " ".join(map(short, base.keys())) + "\n")
1278 1304
1279 1305 remain = dict.fromkeys(self.changelog.nodemap)
1280 1306
1281 1307 # prune everything remote has from the tree
1282 1308 del remain[nullid]
1283 1309 remove = base.keys()
1284 1310 while remove:
1285 1311 n = remove.pop(0)
1286 1312 if n in remain:
1287 1313 del remain[n]
1288 1314 for p in self.changelog.parents(n):
1289 1315 remove.append(p)
1290 1316
1291 1317 # find every node whose parents have been pruned
1292 1318 subset = []
1293 1319 # find every remote head that will get new children
1294 1320 updated_heads = {}
1295 1321 for n in remain:
1296 1322 p1, p2 = self.changelog.parents(n)
1297 1323 if p1 not in remain and p2 not in remain:
1298 1324 subset.append(n)
1299 1325 if heads:
1300 1326 if p1 in heads:
1301 1327 updated_heads[p1] = True
1302 1328 if p2 in heads:
1303 1329 updated_heads[p2] = True
1304 1330
1305 1331 # this is the set of all roots we have to push
1306 1332 if heads:
1307 1333 return subset, updated_heads.keys()
1308 1334 else:
1309 1335 return subset
1310 1336
1311 1337 def pull(self, remote, heads=None, force=False, lock=None):
1312 mylock = False
1338 try:
1313 1339 if not lock:
1314 1340 lock = self.lock()
1315 mylock = True
1316
1317 try:
1318 1341 fetch = self.findincoming(remote, force=force)
1319 1342 if fetch == [nullid]:
1320 1343 self.ui.status(_("requesting all changes\n"))
1321 1344
1322 1345 if not fetch:
1323 1346 self.ui.status(_("no changes found\n"))
1324 1347 return 0
1325 1348
1326 1349 if heads is None:
1327 1350 cg = remote.changegroup(fetch, 'pull')
1328 1351 else:
1329 1352 if 'changegroupsubset' not in remote.capabilities:
1330 1353 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1331 1354 cg = remote.changegroupsubset(fetch, heads, 'pull')
1332 1355 return self.addchangegroup(cg, 'pull', remote.url())
1333 1356 finally:
1334 if mylock:
1335 lock.release()
1357 del lock
1336 1358
1337 1359 def push(self, remote, force=False, revs=None):
1338 1360 # there are two ways to push to remote repo:
1339 1361 #
1340 1362 # addchangegroup assumes local user can lock remote
1341 1363 # repo (local filesystem, old ssh servers).
1342 1364 #
1343 1365 # unbundle assumes local user cannot lock remote repo (new ssh
1344 1366 # servers, http servers).
1345 1367
1346 1368 if remote.capable('unbundle'):
1347 1369 return self.push_unbundle(remote, force, revs)
1348 1370 return self.push_addchangegroup(remote, force, revs)
1349 1371
1350 1372 def prepush(self, remote, force, revs):
1351 1373 base = {}
1352 1374 remote_heads = remote.heads()
1353 1375 inc = self.findincoming(remote, base, remote_heads, force=force)
1354 1376
1355 1377 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1356 1378 if revs is not None:
1357 1379 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1358 1380 else:
1359 1381 bases, heads = update, self.changelog.heads()
1360 1382
1361 1383 if not bases:
1362 1384 self.ui.status(_("no changes found\n"))
1363 1385 return None, 1
1364 1386 elif not force:
1365 1387 # check if we're creating new remote heads
1366 1388 # to be a remote head after push, node must be either
1367 1389 # - unknown locally
1368 1390 # - a local outgoing head descended from update
1369 1391 # - a remote head that's known locally and not
1370 1392 # ancestral to an outgoing head
1371 1393
1372 1394 warn = 0
1373 1395
1374 1396 if remote_heads == [nullid]:
1375 1397 warn = 0
1376 1398 elif not revs and len(heads) > len(remote_heads):
1377 1399 warn = 1
1378 1400 else:
1379 1401 newheads = list(heads)
1380 1402 for r in remote_heads:
1381 1403 if r in self.changelog.nodemap:
1382 1404 desc = self.changelog.heads(r, heads)
1383 1405 l = [h for h in heads if h in desc]
1384 1406 if not l:
1385 1407 newheads.append(r)
1386 1408 else:
1387 1409 newheads.append(r)
1388 1410 if len(newheads) > len(remote_heads):
1389 1411 warn = 1
1390 1412
1391 1413 if warn:
1392 1414 self.ui.warn(_("abort: push creates new remote branches!\n"))
1393 1415 self.ui.status(_("(did you forget to merge?"
1394 1416 " use push -f to force)\n"))
1395 1417 return None, 1
1396 1418 elif inc:
1397 1419 self.ui.warn(_("note: unsynced remote changes!\n"))
1398 1420
1399 1421
1400 1422 if revs is None:
1401 1423 cg = self.changegroup(update, 'push')
1402 1424 else:
1403 1425 cg = self.changegroupsubset(update, revs, 'push')
1404 1426 return cg, remote_heads
1405 1427
1406 1428 def push_addchangegroup(self, remote, force, revs):
1407 1429 lock = remote.lock()
1408
1430 try:
1409 1431 ret = self.prepush(remote, force, revs)
1410 1432 if ret[0] is not None:
1411 1433 cg, remote_heads = ret
1412 1434 return remote.addchangegroup(cg, 'push', self.url())
1413 1435 return ret[1]
1436 finally:
1437 del lock
1414 1438
1415 1439 def push_unbundle(self, remote, force, revs):
1416 1440 # local repo finds heads on server, finds out what revs it
1417 1441 # must push. once revs transferred, if server finds it has
1418 1442 # different heads (someone else won commit/push race), server
1419 1443 # aborts.
1420 1444
1421 1445 ret = self.prepush(remote, force, revs)
1422 1446 if ret[0] is not None:
1423 1447 cg, remote_heads = ret
1424 1448 if force: remote_heads = ['force']
1425 1449 return remote.unbundle(cg, remote_heads, 'push')
1426 1450 return ret[1]
1427 1451
1428 1452 def changegroupinfo(self, nodes):
1429 1453 self.ui.note(_("%d changesets found\n") % len(nodes))
1430 1454 if self.ui.debugflag:
1431 1455 self.ui.debug(_("List of changesets:\n"))
1432 1456 for node in nodes:
1433 1457 self.ui.debug("%s\n" % hex(node))
1434 1458
1435 1459 def changegroupsubset(self, bases, heads, source):
1436 1460 """This function generates a changegroup consisting of all the nodes
1437 1461 that are descendents of any of the bases, and ancestors of any of
1438 1462 the heads.
1439 1463
1440 1464 It is fairly complex as determining which filenodes and which
1441 1465 manifest nodes need to be included for the changeset to be complete
1442 1466 is non-trivial.
1443 1467
1444 1468 Another wrinkle is doing the reverse, figuring out which changeset in
1445 1469 the changegroup a particular filenode or manifestnode belongs to."""
1446 1470
1447 1471 self.hook('preoutgoing', throw=True, source=source)
1448 1472
1449 1473 # Set up some initial variables
1450 1474 # Make it easy to refer to self.changelog
1451 1475 cl = self.changelog
1452 1476 # msng is short for missing - compute the list of changesets in this
1453 1477 # changegroup.
1454 1478 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1455 1479 self.changegroupinfo(msng_cl_lst)
1456 1480 # Some bases may turn out to be superfluous, and some heads may be
1457 1481 # too. nodesbetween will return the minimal set of bases and heads
1458 1482 # necessary to re-create the changegroup.
1459 1483
1460 1484 # Known heads are the list of heads that it is assumed the recipient
1461 1485 # of this changegroup will know about.
1462 1486 knownheads = {}
1463 1487 # We assume that all parents of bases are known heads.
1464 1488 for n in bases:
1465 1489 for p in cl.parents(n):
1466 1490 if p != nullid:
1467 1491 knownheads[p] = 1
1468 1492 knownheads = knownheads.keys()
1469 1493 if knownheads:
1470 1494 # Now that we know what heads are known, we can compute which
1471 1495 # changesets are known. The recipient must know about all
1472 1496 # changesets required to reach the known heads from the null
1473 1497 # changeset.
1474 1498 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1475 1499 junk = None
1476 1500 # Transform the list into an ersatz set.
1477 1501 has_cl_set = dict.fromkeys(has_cl_set)
1478 1502 else:
1479 1503 # If there were no known heads, the recipient cannot be assumed to
1480 1504 # know about any changesets.
1481 1505 has_cl_set = {}
1482 1506
1483 1507 # Make it easy to refer to self.manifest
1484 1508 mnfst = self.manifest
1485 1509 # We don't know which manifests are missing yet
1486 1510 msng_mnfst_set = {}
1487 1511 # Nor do we know which filenodes are missing.
1488 1512 msng_filenode_set = {}
1489 1513
1490 1514 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1491 1515 junk = None
1492 1516
1493 1517 # A changeset always belongs to itself, so the changenode lookup
1494 1518 # function for a changenode is identity.
1495 1519 def identity(x):
1496 1520 return x
1497 1521
1498 1522 # A function generating function. Sets up an environment for the
1499 1523 # inner function.
1500 1524 def cmp_by_rev_func(revlog):
1501 1525 # Compare two nodes by their revision number in the environment's
1502 1526 # revision history. Since the revision number both represents the
1503 1527 # most efficient order to read the nodes in, and represents a
1504 1528 # topological sorting of the nodes, this function is often useful.
1505 1529 def cmp_by_rev(a, b):
1506 1530 return cmp(revlog.rev(a), revlog.rev(b))
1507 1531 return cmp_by_rev
1508 1532
1509 1533 # If we determine that a particular file or manifest node must be a
1510 1534 # node that the recipient of the changegroup will already have, we can
1511 1535 # also assume the recipient will have all the parents. This function
1512 1536 # prunes them from the set of missing nodes.
1513 1537 def prune_parents(revlog, hasset, msngset):
1514 1538 haslst = hasset.keys()
1515 1539 haslst.sort(cmp_by_rev_func(revlog))
1516 1540 for node in haslst:
1517 1541 parentlst = [p for p in revlog.parents(node) if p != nullid]
1518 1542 while parentlst:
1519 1543 n = parentlst.pop()
1520 1544 if n not in hasset:
1521 1545 hasset[n] = 1
1522 1546 p = [p for p in revlog.parents(n) if p != nullid]
1523 1547 parentlst.extend(p)
1524 1548 for n in hasset:
1525 1549 msngset.pop(n, None)
1526 1550
1527 1551 # This is a function generating function used to set up an environment
1528 1552 # for the inner function to execute in.
1529 1553 def manifest_and_file_collector(changedfileset):
1530 1554 # This is an information gathering function that gathers
1531 1555 # information from each changeset node that goes out as part of
1532 1556 # the changegroup. The information gathered is a list of which
1533 1557 # manifest nodes are potentially required (the recipient may
1534 1558 # already have them) and total list of all files which were
1535 1559 # changed in any changeset in the changegroup.
1536 1560 #
1537 1561 # We also remember the first changenode we saw any manifest
1538 1562 # referenced by so we can later determine which changenode 'owns'
1539 1563 # the manifest.
1540 1564 def collect_manifests_and_files(clnode):
1541 1565 c = cl.read(clnode)
1542 1566 for f in c[3]:
1543 1567 # This is to make sure we only have one instance of each
1544 1568 # filename string for each filename.
1545 1569 changedfileset.setdefault(f, f)
1546 1570 msng_mnfst_set.setdefault(c[0], clnode)
1547 1571 return collect_manifests_and_files
1548 1572
1549 1573 # Figure out which manifest nodes (of the ones we think might be part
1550 1574 # of the changegroup) the recipient must know about and remove them
1551 1575 # from the changegroup.
1552 1576 def prune_manifests():
1553 1577 has_mnfst_set = {}
1554 1578 for n in msng_mnfst_set:
1555 1579 # If a 'missing' manifest thinks it belongs to a changenode
1556 1580 # the recipient is assumed to have, obviously the recipient
1557 1581 # must have that manifest.
1558 1582 linknode = cl.node(mnfst.linkrev(n))
1559 1583 if linknode in has_cl_set:
1560 1584 has_mnfst_set[n] = 1
1561 1585 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1562 1586
1563 1587 # Use the information collected in collect_manifests_and_files to say
1564 1588 # which changenode any manifestnode belongs to.
1565 1589 def lookup_manifest_link(mnfstnode):
1566 1590 return msng_mnfst_set[mnfstnode]
1567 1591
1568 1592 # A function generating function that sets up the initial environment
1569 1593 # the inner function.
1570 1594 def filenode_collector(changedfiles):
1571 1595 next_rev = [0]
1572 1596 # This gathers information from each manifestnode included in the
1573 1597 # changegroup about which filenodes the manifest node references
1574 1598 # so we can include those in the changegroup too.
1575 1599 #
1576 1600 # It also remembers which changenode each filenode belongs to. It
1577 1601 # does this by assuming the a filenode belongs to the changenode
1578 1602 # the first manifest that references it belongs to.
1579 1603 def collect_msng_filenodes(mnfstnode):
1580 1604 r = mnfst.rev(mnfstnode)
1581 1605 if r == next_rev[0]:
1582 1606 # If the last rev we looked at was the one just previous,
1583 1607 # we only need to see a diff.
1584 1608 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1585 1609 # For each line in the delta
1586 1610 for dline in delta.splitlines():
1587 1611 # get the filename and filenode for that line
1588 1612 f, fnode = dline.split('\0')
1589 1613 fnode = bin(fnode[:40])
1590 1614 f = changedfiles.get(f, None)
1591 1615 # And if the file is in the list of files we care
1592 1616 # about.
1593 1617 if f is not None:
1594 1618 # Get the changenode this manifest belongs to
1595 1619 clnode = msng_mnfst_set[mnfstnode]
1596 1620 # Create the set of filenodes for the file if
1597 1621 # there isn't one already.
1598 1622 ndset = msng_filenode_set.setdefault(f, {})
1599 1623 # And set the filenode's changelog node to the
1600 1624 # manifest's if it hasn't been set already.
1601 1625 ndset.setdefault(fnode, clnode)
1602 1626 else:
1603 1627 # Otherwise we need a full manifest.
1604 1628 m = mnfst.read(mnfstnode)
1605 1629 # For every file in we care about.
1606 1630 for f in changedfiles:
1607 1631 fnode = m.get(f, None)
1608 1632 # If it's in the manifest
1609 1633 if fnode is not None:
1610 1634 # See comments above.
1611 1635 clnode = msng_mnfst_set[mnfstnode]
1612 1636 ndset = msng_filenode_set.setdefault(f, {})
1613 1637 ndset.setdefault(fnode, clnode)
1614 1638 # Remember the revision we hope to see next.
1615 1639 next_rev[0] = r + 1
1616 1640 return collect_msng_filenodes
1617 1641
1618 1642 # We have a list of filenodes we think we need for a file, lets remove
1619 1643 # all those we now the recipient must have.
1620 1644 def prune_filenodes(f, filerevlog):
1621 1645 msngset = msng_filenode_set[f]
1622 1646 hasset = {}
1623 1647 # If a 'missing' filenode thinks it belongs to a changenode we
1624 1648 # assume the recipient must have, then the recipient must have
1625 1649 # that filenode.
1626 1650 for n in msngset:
1627 1651 clnode = cl.node(filerevlog.linkrev(n))
1628 1652 if clnode in has_cl_set:
1629 1653 hasset[n] = 1
1630 1654 prune_parents(filerevlog, hasset, msngset)
1631 1655
1632 1656 # A function generator function that sets up the a context for the
1633 1657 # inner function.
1634 1658 def lookup_filenode_link_func(fname):
1635 1659 msngset = msng_filenode_set[fname]
1636 1660 # Lookup the changenode the filenode belongs to.
1637 1661 def lookup_filenode_link(fnode):
1638 1662 return msngset[fnode]
1639 1663 return lookup_filenode_link
1640 1664
1641 1665 # Now that we have all theses utility functions to help out and
1642 1666 # logically divide up the task, generate the group.
1643 1667 def gengroup():
1644 1668 # The set of changed files starts empty.
1645 1669 changedfiles = {}
1646 1670 # Create a changenode group generator that will call our functions
1647 1671 # back to lookup the owning changenode and collect information.
1648 1672 group = cl.group(msng_cl_lst, identity,
1649 1673 manifest_and_file_collector(changedfiles))
1650 1674 for chnk in group:
1651 1675 yield chnk
1652 1676
1653 1677 # The list of manifests has been collected by the generator
1654 1678 # calling our functions back.
1655 1679 prune_manifests()
1656 1680 msng_mnfst_lst = msng_mnfst_set.keys()
1657 1681 # Sort the manifestnodes by revision number.
1658 1682 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1659 1683 # Create a generator for the manifestnodes that calls our lookup
1660 1684 # and data collection functions back.
1661 1685 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1662 1686 filenode_collector(changedfiles))
1663 1687 for chnk in group:
1664 1688 yield chnk
1665 1689
1666 1690 # These are no longer needed, dereference and toss the memory for
1667 1691 # them.
1668 1692 msng_mnfst_lst = None
1669 1693 msng_mnfst_set.clear()
1670 1694
1671 1695 changedfiles = changedfiles.keys()
1672 1696 changedfiles.sort()
1673 1697 # Go through all our files in order sorted by name.
1674 1698 for fname in changedfiles:
1675 1699 filerevlog = self.file(fname)
1676 1700 # Toss out the filenodes that the recipient isn't really
1677 1701 # missing.
1678 1702 if msng_filenode_set.has_key(fname):
1679 1703 prune_filenodes(fname, filerevlog)
1680 1704 msng_filenode_lst = msng_filenode_set[fname].keys()
1681 1705 else:
1682 1706 msng_filenode_lst = []
1683 1707 # If any filenodes are left, generate the group for them,
1684 1708 # otherwise don't bother.
1685 1709 if len(msng_filenode_lst) > 0:
1686 1710 yield changegroup.genchunk(fname)
1687 1711 # Sort the filenodes by their revision #
1688 1712 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1689 1713 # Create a group generator and only pass in a changenode
1690 1714 # lookup function as we need to collect no information
1691 1715 # from filenodes.
1692 1716 group = filerevlog.group(msng_filenode_lst,
1693 1717 lookup_filenode_link_func(fname))
1694 1718 for chnk in group:
1695 1719 yield chnk
1696 1720 if msng_filenode_set.has_key(fname):
1697 1721 # Don't need this anymore, toss it to free memory.
1698 1722 del msng_filenode_set[fname]
1699 1723 # Signal that no more groups are left.
1700 1724 yield changegroup.closechunk()
1701 1725
1702 1726 if msng_cl_lst:
1703 1727 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1704 1728
1705 1729 return util.chunkbuffer(gengroup())
1706 1730
1707 1731 def changegroup(self, basenodes, source):
1708 1732 """Generate a changegroup of all nodes that we have that a recipient
1709 1733 doesn't.
1710 1734
1711 1735 This is much easier than the previous function as we can assume that
1712 1736 the recipient has any changenode we aren't sending them."""
1713 1737
1714 1738 self.hook('preoutgoing', throw=True, source=source)
1715 1739
1716 1740 cl = self.changelog
1717 1741 nodes = cl.nodesbetween(basenodes, None)[0]
1718 1742 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1719 1743 self.changegroupinfo(nodes)
1720 1744
1721 1745 def identity(x):
1722 1746 return x
1723 1747
1724 1748 def gennodelst(revlog):
1725 1749 for r in xrange(0, revlog.count()):
1726 1750 n = revlog.node(r)
1727 1751 if revlog.linkrev(n) in revset:
1728 1752 yield n
1729 1753
1730 1754 def changed_file_collector(changedfileset):
1731 1755 def collect_changed_files(clnode):
1732 1756 c = cl.read(clnode)
1733 1757 for fname in c[3]:
1734 1758 changedfileset[fname] = 1
1735 1759 return collect_changed_files
1736 1760
1737 1761 def lookuprevlink_func(revlog):
1738 1762 def lookuprevlink(n):
1739 1763 return cl.node(revlog.linkrev(n))
1740 1764 return lookuprevlink
1741 1765
1742 1766 def gengroup():
1743 1767 # construct a list of all changed files
1744 1768 changedfiles = {}
1745 1769
1746 1770 for chnk in cl.group(nodes, identity,
1747 1771 changed_file_collector(changedfiles)):
1748 1772 yield chnk
1749 1773 changedfiles = changedfiles.keys()
1750 1774 changedfiles.sort()
1751 1775
1752 1776 mnfst = self.manifest
1753 1777 nodeiter = gennodelst(mnfst)
1754 1778 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1755 1779 yield chnk
1756 1780
1757 1781 for fname in changedfiles:
1758 1782 filerevlog = self.file(fname)
1759 1783 nodeiter = gennodelst(filerevlog)
1760 1784 nodeiter = list(nodeiter)
1761 1785 if nodeiter:
1762 1786 yield changegroup.genchunk(fname)
1763 1787 lookup = lookuprevlink_func(filerevlog)
1764 1788 for chnk in filerevlog.group(nodeiter, lookup):
1765 1789 yield chnk
1766 1790
1767 1791 yield changegroup.closechunk()
1768 1792
1769 1793 if nodes:
1770 1794 self.hook('outgoing', node=hex(nodes[0]), source=source)
1771 1795
1772 1796 return util.chunkbuffer(gengroup())
1773 1797
1774 1798 def addchangegroup(self, source, srctype, url):
1775 1799 """add changegroup to repo.
1776 1800
1777 1801 return values:
1778 1802 - nothing changed or no source: 0
1779 1803 - more heads than before: 1+added heads (2..n)
1780 1804 - less heads than before: -1-removed heads (-2..-n)
1781 1805 - number of heads stays the same: 1
1782 1806 """
1783 1807 def csmap(x):
1784 1808 self.ui.debug(_("add changeset %s\n") % short(x))
1785 1809 return cl.count()
1786 1810
1787 1811 def revmap(x):
1788 1812 return cl.rev(x)
1789 1813
1790 1814 if not source:
1791 1815 return 0
1792 1816
1793 1817 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1794 1818
1795 1819 changesets = files = revisions = 0
1796 1820
1797 tr = self.transaction()
1798
1799 1821 # write changelog data to temp files so concurrent readers will not see
1800 1822 # inconsistent view
1801 1823 cl = self.changelog
1802 1824 cl.delayupdate()
1803 1825 oldheads = len(cl.heads())
1804 1826
1827 tr = self.transaction()
1828 try:
1805 1829 # pull off the changeset group
1806 1830 self.ui.status(_("adding changesets\n"))
1807 1831 cor = cl.count() - 1
1808 1832 chunkiter = changegroup.chunkiter(source)
1809 1833 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1810 1834 raise util.Abort(_("received changelog group is empty"))
1811 1835 cnr = cl.count() - 1
1812 1836 changesets = cnr - cor
1813 1837
1814 1838 # pull off the manifest group
1815 1839 self.ui.status(_("adding manifests\n"))
1816 1840 chunkiter = changegroup.chunkiter(source)
1817 1841 # no need to check for empty manifest group here:
1818 1842 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1819 1843 # no new manifest will be created and the manifest group will
1820 1844 # be empty during the pull
1821 1845 self.manifest.addgroup(chunkiter, revmap, tr)
1822 1846
1823 1847 # process the files
1824 1848 self.ui.status(_("adding file changes\n"))
1825 1849 while 1:
1826 1850 f = changegroup.getchunk(source)
1827 1851 if not f:
1828 1852 break
1829 1853 self.ui.debug(_("adding %s revisions\n") % f)
1830 1854 fl = self.file(f)
1831 1855 o = fl.count()
1832 1856 chunkiter = changegroup.chunkiter(source)
1833 1857 if fl.addgroup(chunkiter, revmap, tr) is None:
1834 1858 raise util.Abort(_("received file revlog group is empty"))
1835 1859 revisions += fl.count() - o
1836 1860 files += 1
1837 1861
1838 1862 # make changelog see real files again
1839 1863 cl.finalize(tr)
1840 1864
1841 1865 newheads = len(self.changelog.heads())
1842 1866 heads = ""
1843 1867 if oldheads and newheads != oldheads:
1844 1868 heads = _(" (%+d heads)") % (newheads - oldheads)
1845 1869
1846 1870 self.ui.status(_("added %d changesets"
1847 1871 " with %d changes to %d files%s\n")
1848 1872 % (changesets, revisions, files, heads))
1849 1873
1850 1874 if changesets > 0:
1851 1875 self.hook('pretxnchangegroup', throw=True,
1852 1876 node=hex(self.changelog.node(cor+1)), source=srctype,
1853 1877 url=url)
1854 1878
1855 1879 tr.close()
1880 finally:
1881 del tr
1856 1882
1857 1883 if changesets > 0:
1858 1884 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1859 1885 source=srctype, url=url)
1860 1886
1861 1887 for i in xrange(cor + 1, cnr + 1):
1862 1888 self.hook("incoming", node=hex(self.changelog.node(i)),
1863 1889 source=srctype, url=url)
1864 1890
1865 1891 # never return 0 here:
1866 1892 if newheads < oldheads:
1867 1893 return newheads - oldheads - 1
1868 1894 else:
1869 1895 return newheads - oldheads + 1
1870 1896
1871 1897
1872 1898 def stream_in(self, remote):
1873 1899 fp = remote.stream_out()
1874 1900 l = fp.readline()
1875 1901 try:
1876 1902 resp = int(l)
1877 1903 except ValueError:
1878 1904 raise util.UnexpectedOutput(
1879 1905 _('Unexpected response from remote server:'), l)
1880 1906 if resp == 1:
1881 1907 raise util.Abort(_('operation forbidden by server'))
1882 1908 elif resp == 2:
1883 1909 raise util.Abort(_('locking the remote repository failed'))
1884 1910 elif resp != 0:
1885 1911 raise util.Abort(_('the server sent an unknown error code'))
1886 1912 self.ui.status(_('streaming all changes\n'))
1887 1913 l = fp.readline()
1888 1914 try:
1889 1915 total_files, total_bytes = map(int, l.split(' ', 1))
1890 1916 except ValueError, TypeError:
1891 1917 raise util.UnexpectedOutput(
1892 1918 _('Unexpected response from remote server:'), l)
1893 1919 self.ui.status(_('%d files to transfer, %s of data\n') %
1894 1920 (total_files, util.bytecount(total_bytes)))
1895 1921 start = time.time()
1896 1922 for i in xrange(total_files):
1897 1923 # XXX doesn't support '\n' or '\r' in filenames
1898 1924 l = fp.readline()
1899 1925 try:
1900 1926 name, size = l.split('\0', 1)
1901 1927 size = int(size)
1902 1928 except ValueError, TypeError:
1903 1929 raise util.UnexpectedOutput(
1904 1930 _('Unexpected response from remote server:'), l)
1905 1931 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1906 1932 ofp = self.sopener(name, 'w')
1907 1933 for chunk in util.filechunkiter(fp, limit=size):
1908 1934 ofp.write(chunk)
1909 1935 ofp.close()
1910 1936 elapsed = time.time() - start
1911 1937 if elapsed <= 0:
1912 1938 elapsed = 0.001
1913 1939 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1914 1940 (util.bytecount(total_bytes), elapsed,
1915 1941 util.bytecount(total_bytes / elapsed)))
1916 1942 self.invalidate()
1917 1943 return len(self.heads()) + 1
1918 1944
1919 1945 def clone(self, remote, heads=[], stream=False):
1920 1946 '''clone remote repository.
1921 1947
1922 1948 keyword arguments:
1923 1949 heads: list of revs to clone (forces use of pull)
1924 1950 stream: use streaming clone if possible'''
1925 1951
1926 1952 # now, all clients that can request uncompressed clones can
1927 1953 # read repo formats supported by all servers that can serve
1928 1954 # them.
1929 1955
1930 1956 # if revlog format changes, client will have to check version
1931 1957 # and format flags on "stream" capability, and use
1932 1958 # uncompressed only if compatible.
1933 1959
1934 1960 if stream and not heads and remote.capable('stream'):
1935 1961 return self.stream_in(remote)
1936 1962 return self.pull(remote, heads)
1937 1963
1938 1964 # used to avoid circular references so destructors work
1939 1965 def aftertrans(files):
1940 1966 renamefiles = [tuple(t) for t in files]
1941 1967 def a():
1942 1968 for src, dest in renamefiles:
1943 1969 util.rename(src, dest)
1944 1970 return a
1945 1971
1946 1972 def instance(ui, path, create):
1947 1973 return localrepository(ui, util.drop_scheme('file', path), create)
1948 1974
1949 1975 def islocal(path):
1950 1976 return True
@@ -1,570 +1,572 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import errno, util, os, tempfile, context
11 11
12 12 def filemerge(repo, fw, fo, wctx, mctx):
13 13 """perform a 3-way merge in the working directory
14 14
15 15 fw = filename in the working directory
16 16 fo = filename in other parent
17 17 wctx, mctx = working and merge changecontexts
18 18 """
19 19
20 20 def temp(prefix, ctx):
21 21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 23 data = repo.wwritedata(ctx.path(), ctx.data())
24 24 f = os.fdopen(fd, "wb")
25 25 f.write(data)
26 26 f.close()
27 27 return name
28 28
29 29 fcm = wctx.filectx(fw)
30 30 fco = mctx.filectx(fo)
31 31
32 32 if not fco.cmp(fcm.data()): # files identical?
33 33 return None
34 34
35 35 fca = fcm.ancestor(fco)
36 36 if not fca:
37 37 fca = repo.filectx(fw, fileid=nullrev)
38 38 a = repo.wjoin(fw)
39 39 b = temp("base", fca)
40 40 c = temp("other", fco)
41 41
42 42 if fw != fo:
43 43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 44 else:
45 45 repo.ui.status(_("merging %s\n") % fw)
46 46
47 47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48 48
49 49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 50 or "hgmerge")
51 51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 52 environ={'HG_FILE': fw,
53 53 'HG_MY_NODE': str(wctx.parents()[0]),
54 54 'HG_OTHER_NODE': str(mctx)})
55 55 if r:
56 56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 57
58 58 os.unlink(b)
59 59 os.unlink(c)
60 60 return r
61 61
62 62 def checkunknown(wctx, mctx):
63 63 "check for collisions between unknown files and files in mctx"
64 64 man = mctx.manifest()
65 65 for f in wctx.unknown():
66 66 if f in man:
67 67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 68 raise util.Abort(_("untracked local file '%s' differs"
69 69 " from remote version") % f)
70 70
71 71 def checkcollision(mctx):
72 72 "check for case folding collisions in the destination context"
73 73 folded = {}
74 74 for fn in mctx.manifest():
75 75 fold = fn.lower()
76 76 if fold in folded:
77 77 raise util.Abort(_("case-folding collision between %s and %s")
78 78 % (fn, folded[fold]))
79 79 folded[fold] = fn
80 80
81 81 def forgetremoved(wctx, mctx):
82 82 """
83 83 Forget removed files
84 84
85 85 If we're jumping between revisions (as opposed to merging), and if
86 86 neither the working directory nor the target rev has the file,
87 87 then we need to remove it from the dirstate, to prevent the
88 88 dirstate from listing the file when it is no longer in the
89 89 manifest.
90 90 """
91 91
92 92 action = []
93 93 man = mctx.manifest()
94 94 for f in wctx.deleted() + wctx.removed():
95 95 if f not in man:
96 96 action.append((f, "f"))
97 97
98 98 return action
99 99
100 100 def findcopies(repo, m1, m2, ma, limit):
101 101 """
102 102 Find moves and copies between m1 and m2 back to limit linkrev
103 103 """
104 104
105 105 def nonoverlap(d1, d2, d3):
106 106 "Return list of elements in d1 not in d2 or d3"
107 107 l = [d for d in d1 if d not in d3 and d not in d2]
108 108 l.sort()
109 109 return l
110 110
111 111 def dirname(f):
112 112 s = f.rfind("/")
113 113 if s == -1:
114 114 return ""
115 115 return f[:s]
116 116
117 117 def dirs(files):
118 118 d = {}
119 119 for f in files:
120 120 f = dirname(f)
121 121 while f not in d:
122 122 d[f] = True
123 123 f = dirname(f)
124 124 return d
125 125
126 126 wctx = repo.workingctx()
127 127
128 128 def makectx(f, n):
129 129 if len(n) == 20:
130 130 return repo.filectx(f, fileid=n)
131 131 return wctx.filectx(f)
132 132 ctx = util.cachefunc(makectx)
133 133
134 134 def findold(fctx):
135 135 "find files that path was copied from, back to linkrev limit"
136 136 old = {}
137 137 seen = {}
138 138 orig = fctx.path()
139 139 visit = [fctx]
140 140 while visit:
141 141 fc = visit.pop()
142 142 s = str(fc)
143 143 if s in seen:
144 144 continue
145 145 seen[s] = 1
146 146 if fc.path() != orig and fc.path() not in old:
147 147 old[fc.path()] = 1
148 148 if fc.rev() < limit:
149 149 continue
150 150 visit += fc.parents()
151 151
152 152 old = old.keys()
153 153 old.sort()
154 154 return old
155 155
156 156 copy = {}
157 157 fullcopy = {}
158 158 diverge = {}
159 159
160 160 def checkcopies(c, man, aman):
161 161 '''check possible copies for filectx c'''
162 162 for of in findold(c):
163 163 fullcopy[c.path()] = of # remember for dir rename detection
164 164 if of not in man: # original file not in other manifest?
165 165 if of in ma:
166 166 diverge.setdefault(of, []).append(c.path())
167 167 continue
168 168 # if the original file is unchanged on the other branch,
169 169 # no merge needed
170 170 if man[of] == aman.get(of):
171 171 continue
172 172 c2 = ctx(of, man[of])
173 173 ca = c.ancestor(c2)
174 174 if not ca: # unrelated?
175 175 continue
176 176 # named changed on only one side?
177 177 if ca.path() == c.path() or ca.path() == c2.path():
178 178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 179 continue
180 180 copy[c.path()] = of
181 181
182 182 if not repo.ui.configbool("merge", "followcopies", True):
183 183 return {}, {}
184 184
185 185 # avoid silly behavior for update from empty dir
186 186 if not m1 or not m2 or not ma:
187 187 return {}, {}
188 188
189 189 u1 = nonoverlap(m1, m2, ma)
190 190 u2 = nonoverlap(m2, m1, ma)
191 191
192 192 for f in u1:
193 193 checkcopies(ctx(f, m1[f]), m2, ma)
194 194
195 195 for f in u2:
196 196 checkcopies(ctx(f, m2[f]), m1, ma)
197 197
198 198 d2 = {}
199 199 for of, fl in diverge.items():
200 200 for f in fl:
201 201 fo = list(fl)
202 202 fo.remove(f)
203 203 d2[f] = (of, fo)
204 204
205 205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 206 return copy, diverge
207 207
208 208 # generate a directory move map
209 209 d1, d2 = dirs(m1), dirs(m2)
210 210 invalid = {}
211 211 dirmove = {}
212 212
213 213 # examine each file copy for a potential directory move, which is
214 214 # when all the files in a directory are moved to a new directory
215 215 for dst, src in fullcopy.items():
216 216 dsrc, ddst = dirname(src), dirname(dst)
217 217 if dsrc in invalid:
218 218 # already seen to be uninteresting
219 219 continue
220 220 elif dsrc in d1 and ddst in d1:
221 221 # directory wasn't entirely moved locally
222 222 invalid[dsrc] = True
223 223 elif dsrc in d2 and ddst in d2:
224 224 # directory wasn't entirely moved remotely
225 225 invalid[dsrc] = True
226 226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 227 # files from the same directory moved to two different places
228 228 invalid[dsrc] = True
229 229 else:
230 230 # looks good so far
231 231 dirmove[dsrc + "/"] = ddst + "/"
232 232
233 233 for i in invalid:
234 234 if i in dirmove:
235 235 del dirmove[i]
236 236
237 237 del d1, d2, invalid
238 238
239 239 if not dirmove:
240 240 return copy, diverge
241 241
242 242 # check unaccounted nonoverlapping files against directory moves
243 243 for f in u1 + u2:
244 244 if f not in fullcopy:
245 245 for d in dirmove:
246 246 if f.startswith(d):
247 247 # new file added in a directory that was moved, move it
248 248 copy[f] = dirmove[d] + f[len(d):]
249 249 break
250 250
251 251 return copy, diverge
252 252
253 253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 254 """
255 255 Merge p1 and p2 with ancestor ma and generate merge action list
256 256
257 257 overwrite = whether we clobber working files
258 258 partial = function to filter file lists
259 259 """
260 260
261 261 repo.ui.note(_("resolving manifests\n"))
262 262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264 264
265 265 m1 = p1.manifest()
266 266 m2 = p2.manifest()
267 267 ma = pa.manifest()
268 268 backwards = (pa == p2)
269 269 action = []
270 270 copy = {}
271 271 diverge = {}
272 272
273 273 def fmerge(f, f2=None, fa=None):
274 274 """merge flags"""
275 275 if not f2:
276 276 f2 = f
277 277 fa = f
278 278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 279 if ((a^b) | (a^c)) ^ a:
280 280 return 'x'
281 281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 282 if ((a^b) | (a^c)) ^ a:
283 283 return 'l'
284 284 return ''
285 285
286 286 def act(msg, m, f, *args):
287 287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 288 action.append((f, m) + args)
289 289
290 290 if not (backwards or overwrite):
291 291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292 292
293 293 for of, fl in diverge.items():
294 294 act("divergent renames", "dr", of, fl)
295 295
296 296 copied = dict.fromkeys(copy.values())
297 297
298 298 # Compare manifests
299 299 for f, n in m1.iteritems():
300 300 if partial and not partial(f):
301 301 continue
302 302 if f in m2:
303 303 # are files different?
304 304 if n != m2[f]:
305 305 a = ma.get(f, nullid)
306 306 # are both different from the ancestor?
307 307 if not overwrite and n != a and m2[f] != a:
308 308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 309 # are we clobbering?
310 310 # is remote's version newer?
311 311 # or are we going back in time and clean?
312 312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 313 act("remote is newer", "g", f, m2.flags(f))
314 314 # local is newer, not overwrite, check mode bits
315 315 elif fmerge(f) != m1.flags(f):
316 316 act("update permissions", "e", f, m2.flags(f))
317 317 # contents same, check mode bits
318 318 elif m1.flags(f) != m2.flags(f):
319 319 if overwrite or fmerge(f) != m1.flags(f):
320 320 act("update permissions", "e", f, m2.flags(f))
321 321 elif f in copied:
322 322 continue
323 323 elif f in copy:
324 324 f2 = copy[f]
325 325 if f2 not in m2: # directory rename
326 326 act("remote renamed directory to " + f2, "d",
327 327 f, None, f2, m1.flags(f))
328 328 elif f2 in m1: # case 2 A,B/B/B
329 329 act("local copied to " + f2, "m",
330 330 f, f2, f, fmerge(f, f2, f2), False)
331 331 else: # case 4,21 A/B/B
332 332 act("local moved to " + f2, "m",
333 333 f, f2, f, fmerge(f, f2, f2), False)
334 334 elif f in ma:
335 335 if n != ma[f] and not overwrite:
336 336 if repo.ui.prompt(
337 337 (_(" local changed %s which remote deleted\n") % f) +
338 338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 339 act("prompt delete", "r", f)
340 340 else:
341 341 act("other deleted", "r", f)
342 342 else:
343 343 # file is created on branch or in working directory
344 344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 345 act("remote deleted", "r", f)
346 346
347 347 for f, n in m2.iteritems():
348 348 if partial and not partial(f):
349 349 continue
350 350 if f in m1:
351 351 continue
352 352 if f in copied:
353 353 continue
354 354 if f in copy:
355 355 f2 = copy[f]
356 356 if f2 not in m1: # directory rename
357 357 act("local renamed directory to " + f2, "d",
358 358 None, f, f2, m2.flags(f))
359 359 elif f2 in m2: # rename case 1, A/A,B/A
360 360 act("remote copied to " + f, "m",
361 361 f2, f, f, fmerge(f2, f, f2), False)
362 362 else: # case 3,20 A/B/A
363 363 act("remote moved to " + f, "m",
364 364 f2, f, f, fmerge(f2, f, f2), True)
365 365 elif f in ma:
366 366 if overwrite or backwards:
367 367 act("recreating", "g", f, m2.flags(f))
368 368 elif n != ma[f]:
369 369 if repo.ui.prompt(
370 370 (_("remote changed %s which local deleted\n") % f) +
371 371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 372 act("prompt recreating", "g", f, m2.flags(f))
373 373 else:
374 374 act("remote created", "g", f, m2.flags(f))
375 375
376 376 return action
377 377
378 378 def applyupdates(repo, action, wctx, mctx):
379 379 "apply the merge action list to the working directory"
380 380
381 381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 382 action.sort()
383 383 for a in action:
384 384 f, m = a[:2]
385 385 if f and f[0] == "/":
386 386 continue
387 387 if m == "r": # remove
388 388 repo.ui.note(_("removing %s\n") % f)
389 389 util.audit_path(f)
390 390 try:
391 391 util.unlink(repo.wjoin(f))
392 392 except OSError, inst:
393 393 if inst.errno != errno.ENOENT:
394 394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 395 (f, inst.strerror))
396 396 removed += 1
397 397 elif m == "m": # merge
398 398 f2, fd, flags, move = a[2:]
399 399 r = filemerge(repo, f, f2, wctx, mctx)
400 400 if r > 0:
401 401 unresolved += 1
402 402 else:
403 403 if r is None:
404 404 updated += 1
405 405 else:
406 406 merged += 1
407 407 if f != fd:
408 408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 409 repo.wwrite(fd, repo.wread(f), flags)
410 410 if move:
411 411 repo.ui.debug(_("removing %s\n") % f)
412 412 os.unlink(repo.wjoin(f))
413 413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 414 elif m == "g": # get
415 415 flags = a[2]
416 416 repo.ui.note(_("getting %s\n") % f)
417 417 t = mctx.filectx(f).data()
418 418 repo.wwrite(f, t, flags)
419 419 updated += 1
420 420 elif m == "d": # directory rename
421 421 f2, fd, flags = a[2:]
422 422 if f:
423 423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 424 t = wctx.filectx(f).data()
425 425 repo.wwrite(fd, t, flags)
426 426 util.unlink(repo.wjoin(f))
427 427 if f2:
428 428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 429 t = mctx.filectx(f2).data()
430 430 repo.wwrite(fd, t, flags)
431 431 updated += 1
432 432 elif m == "dr": # divergent renames
433 433 fl = a[2]
434 434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 435 for nf in fl:
436 436 repo.ui.warn(" %s\n" % nf)
437 437 elif m == "e": # exec
438 438 flags = a[2]
439 439 util.set_exec(repo.wjoin(f), flags)
440 440
441 441 return updated, merged, removed, unresolved
442 442
443 443 def recordupdates(repo, action, branchmerge):
444 444 "record merge actions to the dirstate"
445 445
446 446 for a in action:
447 447 f, m = a[:2]
448 448 if m == "r": # remove
449 449 if branchmerge:
450 450 repo.dirstate.remove(f)
451 451 else:
452 452 repo.dirstate.forget(f)
453 453 elif m == "f": # forget
454 454 repo.dirstate.forget(f)
455 455 elif m == "g": # get
456 456 if branchmerge:
457 457 repo.dirstate.normaldirty(f)
458 458 else:
459 459 repo.dirstate.normal(f)
460 460 elif m == "m": # merge
461 461 f2, fd, flag, move = a[2:]
462 462 if branchmerge:
463 463 # We've done a branch merge, mark this file as merged
464 464 # so that we properly record the merger later
465 465 repo.dirstate.merge(fd)
466 466 if f != f2: # copy/rename
467 467 if move:
468 468 repo.dirstate.remove(f)
469 469 if f != fd:
470 470 repo.dirstate.copy(f, fd)
471 471 else:
472 472 repo.dirstate.copy(f2, fd)
473 473 else:
474 474 # We've update-merged a locally modified file, so
475 475 # we set the dirstate to emulate a normal checkout
476 476 # of that file some time in the past. Thus our
477 477 # merge will appear as a normal local file
478 478 # modification.
479 479 repo.dirstate.normaldirty(fd)
480 480 if move:
481 481 repo.dirstate.forget(f)
482 482 elif m == "d": # directory rename
483 483 f2, fd, flag = a[2:]
484 484 if not f2 and f not in repo.dirstate:
485 485 # untracked file moved
486 486 continue
487 487 if branchmerge:
488 488 repo.dirstate.add(fd)
489 489 if f:
490 490 repo.dirstate.remove(f)
491 491 repo.dirstate.copy(f, fd)
492 492 if f2:
493 493 repo.dirstate.copy(f2, fd)
494 494 else:
495 495 repo.dirstate.normal(fd)
496 496 if f:
497 497 repo.dirstate.forget(f)
498 498
499 499 def update(repo, node, branchmerge, force, partial, wlock):
500 500 """
501 501 Perform a merge between the working directory and the given node
502 502
503 503 branchmerge = whether to merge between branches
504 504 force = whether to force branch merging or file overwriting
505 505 partial = a function to filter file lists (dirstate not updated)
506 506 wlock = working dir lock, if already held
507 507 """
508 508
509 try:
509 510 if not wlock:
510 511 wlock = repo.wlock()
511 512
512 513 wc = repo.workingctx()
513 514 if node is None:
514 515 # tip of current branch
515 516 try:
516 517 node = repo.branchtags()[wc.branch()]
517 518 except KeyError:
518 519 raise util.Abort(_("branch %s not found") % wc.branch())
519 520 overwrite = force and not branchmerge
520 521 forcemerge = force and branchmerge
521 522 pl = wc.parents()
522 523 p1, p2 = pl[0], repo.changectx(node)
523 524 pa = p1.ancestor(p2)
524 525 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
525 526 fastforward = False
526 527
527 528 ### check phase
528 529 if not overwrite and len(pl) > 1:
529 530 raise util.Abort(_("outstanding uncommitted merges"))
530 531 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
531 532 if branchmerge:
532 533 if p1.branch() != p2.branch() and pa != p2:
533 534 fastforward = True
534 535 else:
535 536 raise util.Abort(_("there is nothing to merge, just use "
536 537 "'hg update' or look at 'hg heads'"))
537 538 elif not (overwrite or branchmerge):
538 539 raise util.Abort(_("update spans branches, use 'hg merge' "
539 540 "or 'hg update -C' to lose changes"))
540 541 if branchmerge and not forcemerge:
541 542 if wc.files():
542 543 raise util.Abort(_("outstanding uncommitted changes"))
543 544
544 545 ### calculate phase
545 546 action = []
546 547 if not force:
547 548 checkunknown(wc, p2)
548 549 if not util.checkfolding(repo.path):
549 550 checkcollision(p2)
550 551 if not branchmerge:
551 552 action += forgetremoved(wc, p2)
552 553 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
553 554
554 555 ### apply phase
555 556 if not branchmerge: # just jump to the new rev
556 557 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
557 558 if not partial:
558 559 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
559 560
560 561 stats = applyupdates(repo, action, wc, p2)
561 562
562 563 if not partial:
563 564 recordupdates(repo, action, branchmerge)
564 565 repo.dirstate.setparents(fp1, fp2)
565 566 if not branchmerge and not fastforward:
566 567 repo.dirstate.setbranch(p2.branch())
567 568 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
568 569
569 570 return stats
570
571 finally:
572 del wlock
@@ -1,95 +1,98 b''
1 1 # streamclone.py - streaming clone server support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import os, stat, util, lock
10 10
11 11 # if server supports streaming clone, it advertises "stream"
12 12 # capability with value that is version+flags of repo it is serving.
13 13 # client only streams if it can read that repo format.
14 14
15 15 def walkrepo(root):
16 16 '''iterate over metadata files in repository.
17 17 walk in natural (sorted) order.
18 18 yields 2-tuples: name of .d or .i file, size of file.'''
19 19
20 20 strip_count = len(root) + len(os.sep)
21 21 def walk(path, recurse):
22 22 ents = os.listdir(path)
23 23 ents.sort()
24 24 for e in ents:
25 25 pe = os.path.join(path, e)
26 26 st = os.lstat(pe)
27 27 if stat.S_ISDIR(st.st_mode):
28 28 if recurse:
29 29 for x in walk(pe, True):
30 30 yield x
31 31 else:
32 32 if not stat.S_ISREG(st.st_mode) or len(e) < 2:
33 33 continue
34 34 sfx = e[-2:]
35 35 if sfx in ('.d', '.i'):
36 36 yield pe[strip_count:], st.st_size
37 37 # write file data first
38 38 for x in walk(os.path.join(root, 'data'), True):
39 39 yield x
40 40 # write manifest before changelog
41 41 meta = list(walk(root, False))
42 42 meta.sort()
43 43 meta.reverse()
44 44 for x in meta:
45 45 yield x
46 46
47 47 # stream file format is simple.
48 48 #
49 49 # server writes out line that says how many files, how many total
50 50 # bytes. separator is ascii space, byte counts are strings.
51 51 #
52 52 # then for each file:
53 53 #
54 54 # server writes out line that says file name, how many bytes in
55 55 # file. separator is ascii nul, byte count is string.
56 56 #
57 57 # server writes out raw file data.
58 58
59 59 def stream_out(repo, fileobj, untrusted=False):
60 60 '''stream out all metadata files in repository.
61 61 writes to file-like object, must support write() and optional flush().'''
62 62
63 63 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
64 64 fileobj.write('1\n')
65 65 return
66 66
67 67 # get consistent snapshot of repo. lock during scan so lock not
68 68 # needed while we stream, and commits can happen.
69 lock = None
70 try:
69 71 try:
70 72 repolock = repo.lock()
71 73 except (lock.LockHeld, lock.LockUnavailable), inst:
72 74 repo.ui.warn('locking the repository failed: %s\n' % (inst,))
73 75 fileobj.write('2\n')
74 76 return
75 77
76 78 fileobj.write('0\n')
77 79 repo.ui.debug('scanning\n')
78 80 entries = []
79 81 total_bytes = 0
80 82 for name, size in walkrepo(repo.spath):
81 83 name = repo.decodefn(util.pconvert(name))
82 84 entries.append((name, size))
83 85 total_bytes += size
84 repolock.release()
86 finally:
87 del repolock
85 88
86 89 repo.ui.debug('%d files, %d bytes to transfer\n' %
87 90 (len(entries), total_bytes))
88 91 fileobj.write('%d %d\n' % (len(entries), total_bytes))
89 92 for name, size in entries:
90 93 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
91 94 fileobj.write('%s\0%d\n' % (name, size))
92 95 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
93 96 fileobj.write(chunk)
94 97 flush = getattr(fileobj, 'flush', None)
95 98 if flush: flush()
@@ -1,208 +1,213 b''
1 1 # verify.py - repository integrity checking for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import revlog, mdiff
11 11
12 12 def verify(repo):
13 lock = repo.lock()
14 try:
15 return _verify(repo)
16 finally:
17 del lock
18
19 def _verify(repo):
13 20 filelinkrevs = {}
14 21 filenodes = {}
15 22 changesets = revisions = files = 0
16 23 errors = [0]
17 24 warnings = [0]
18 25 neededmanifests = {}
19 26
20 lock = repo.lock()
21
22 27 def err(msg):
23 28 repo.ui.warn(msg + "\n")
24 29 errors[0] += 1
25 30
26 31 def warn(msg):
27 32 repo.ui.warn(msg + "\n")
28 33 warnings[0] += 1
29 34
30 35 def checksize(obj, name):
31 36 d = obj.checksize()
32 37 if d[0]:
33 38 err(_("%s data length off by %d bytes") % (name, d[0]))
34 39 if d[1]:
35 40 err(_("%s index contains %d extra bytes") % (name, d[1]))
36 41
37 42 def checkversion(obj, name):
38 43 if obj.version != revlog.REVLOGV0:
39 44 if not revlogv1:
40 45 warn(_("warning: `%s' uses revlog format 1") % name)
41 46 elif revlogv1:
42 47 warn(_("warning: `%s' uses revlog format 0") % name)
43 48
44 49 revlogv1 = repo.changelog.version != revlog.REVLOGV0
45 50 if repo.ui.verbose or not revlogv1:
46 51 repo.ui.status(_("repository uses revlog format %d\n") %
47 52 (revlogv1 and 1 or 0))
48 53
49 54 seen = {}
50 55 repo.ui.status(_("checking changesets\n"))
51 56 checksize(repo.changelog, "changelog")
52 57
53 58 for i in xrange(repo.changelog.count()):
54 59 changesets += 1
55 60 n = repo.changelog.node(i)
56 61 l = repo.changelog.linkrev(n)
57 62 if l != i:
58 63 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
59 64 if n in seen:
60 65 err(_("duplicate changeset at revision %d") % i)
61 66 seen[n] = 1
62 67
63 68 for p in repo.changelog.parents(n):
64 69 if p not in repo.changelog.nodemap:
65 70 err(_("changeset %s has unknown parent %s") %
66 71 (short(n), short(p)))
67 72 try:
68 73 changes = repo.changelog.read(n)
69 74 except KeyboardInterrupt:
70 75 repo.ui.warn(_("interrupted"))
71 76 raise
72 77 except Exception, inst:
73 78 err(_("unpacking changeset %s: %s") % (short(n), inst))
74 79 continue
75 80
76 81 neededmanifests[changes[0]] = n
77 82
78 83 for f in changes[3]:
79 84 filelinkrevs.setdefault(f, []).append(i)
80 85
81 86 seen = {}
82 87 repo.ui.status(_("checking manifests\n"))
83 88 checkversion(repo.manifest, "manifest")
84 89 checksize(repo.manifest, "manifest")
85 90
86 91 for i in xrange(repo.manifest.count()):
87 92 n = repo.manifest.node(i)
88 93 l = repo.manifest.linkrev(n)
89 94
90 95 if l < 0 or l >= repo.changelog.count():
91 96 err(_("bad manifest link (%d) at revision %d") % (l, i))
92 97
93 98 if n in neededmanifests:
94 99 del neededmanifests[n]
95 100
96 101 if n in seen:
97 102 err(_("duplicate manifest at revision %d") % i)
98 103
99 104 seen[n] = 1
100 105
101 106 for p in repo.manifest.parents(n):
102 107 if p not in repo.manifest.nodemap:
103 108 err(_("manifest %s has unknown parent %s") %
104 109 (short(n), short(p)))
105 110
106 111 try:
107 112 for f, fn in repo.manifest.readdelta(n).iteritems():
108 113 filenodes.setdefault(f, {})[fn] = 1
109 114 except KeyboardInterrupt:
110 115 repo.ui.warn(_("interrupted"))
111 116 raise
112 117 except Exception, inst:
113 118 err(_("reading delta for manifest %s: %s") % (short(n), inst))
114 119 continue
115 120
116 121 repo.ui.status(_("crosschecking files in changesets and manifests\n"))
117 122
118 123 for m, c in neededmanifests.items():
119 124 err(_("Changeset %s refers to unknown manifest %s") %
120 125 (short(m), short(c)))
121 126 del neededmanifests
122 127
123 128 for f in filenodes:
124 129 if f not in filelinkrevs:
125 130 err(_("file %s in manifest but not in changesets") % f)
126 131
127 132 for f in filelinkrevs:
128 133 if f not in filenodes:
129 134 err(_("file %s in changeset but not in manifest") % f)
130 135
131 136 repo.ui.status(_("checking files\n"))
132 137 ff = filenodes.keys()
133 138 ff.sort()
134 139 for f in ff:
135 140 if f == "/dev/null":
136 141 continue
137 142 files += 1
138 143 if not f:
139 144 err(_("file without name in manifest %s") % short(n))
140 145 continue
141 146 fl = repo.file(f)
142 147 checkversion(fl, f)
143 148 checksize(fl, f)
144 149
145 150 nodes = {nullid: 1}
146 151 seen = {}
147 152 for i in xrange(fl.count()):
148 153 revisions += 1
149 154 n = fl.node(i)
150 155
151 156 if n in seen:
152 157 err(_("%s: duplicate revision %d") % (f, i))
153 158 if n not in filenodes[f]:
154 159 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
155 160 else:
156 161 del filenodes[f][n]
157 162
158 163 flr = fl.linkrev(n)
159 164 if flr not in filelinkrevs.get(f, []):
160 165 err(_("%s:%s points to unexpected changeset %d")
161 166 % (f, short(n), flr))
162 167 else:
163 168 filelinkrevs[f].remove(flr)
164 169
165 170 # verify contents
166 171 try:
167 172 t = fl.read(n)
168 173 except KeyboardInterrupt:
169 174 repo.ui.warn(_("interrupted"))
170 175 raise
171 176 except Exception, inst:
172 177 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
173 178
174 179 # verify parents
175 180 (p1, p2) = fl.parents(n)
176 181 if p1 not in nodes:
177 182 err(_("file %s:%s unknown parent 1 %s") %
178 183 (f, short(n), short(p1)))
179 184 if p2 not in nodes:
180 185 err(_("file %s:%s unknown parent 2 %s") %
181 186 (f, short(n), short(p1)))
182 187 nodes[n] = 1
183 188
184 189 # check renames
185 190 try:
186 191 rp = fl.renamed(n)
187 192 if rp:
188 193 fl2 = repo.file(rp[0])
189 194 rev = fl2.rev(rp[1])
190 195 except KeyboardInterrupt:
191 196 repo.ui.warn(_("interrupted"))
192 197 raise
193 198 except Exception, inst:
194 199 err(_("checking rename on file %s %s: %s") % (f, short(n), inst))
195 200
196 201 # cross-check
197 202 for node in filenodes[f]:
198 203 err(_("node %s in manifests not in %s") % (hex(node), f))
199 204
200 205 repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
201 206 (files, changesets, revisions))
202 207
203 208 if warnings[0]:
204 209 repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
205 210 if errors[0]:
206 211 repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
207 212 return 1
208 213
General Comments 0
You need to be logged in to leave comments. Login now