##// END OF EJS Templates
export: fixed silent output file overwriting...
Ronny Pfannschmidt -
r7319:eae1767c default
parent child Browse files
Show More
@@ -1,1190 +1,1193 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, bisect, stat
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20
21 21 def findpossible(cmd, table, strict=False):
22 22 """
23 23 Return cmd -> (aliases, command table entry)
24 24 for each matching command.
25 25 Return debug commands (or their aliases) only if no normal command matches.
26 26 """
27 27 choice = {}
28 28 debugchoice = {}
29 29 for e in table.keys():
30 30 aliases = e.lstrip("^").split("|")
31 31 found = None
32 32 if cmd in aliases:
33 33 found = cmd
34 34 elif not strict:
35 35 for a in aliases:
36 36 if a.startswith(cmd):
37 37 found = a
38 38 break
39 39 if found is not None:
40 40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 41 debugchoice[found] = (aliases, table[e])
42 42 else:
43 43 choice[found] = (aliases, table[e])
44 44
45 45 if not choice and debugchoice:
46 46 choice = debugchoice
47 47
48 48 return choice
49 49
50 50 def findcmd(cmd, table, strict=True):
51 51 """Return (aliases, command table entry) for command string."""
52 52 choice = findpossible(cmd, table, strict)
53 53
54 54 if cmd in choice:
55 55 return choice[cmd]
56 56
57 57 if len(choice) > 1:
58 58 clist = choice.keys()
59 59 clist.sort()
60 60 raise AmbiguousCommand(cmd, clist)
61 61
62 62 if choice:
63 63 return choice.values()[0]
64 64
65 65 raise UnknownCommand(cmd)
66 66
67 67 def bail_if_changed(repo):
68 68 if repo.dirstate.parents()[1] != nullid:
69 69 raise util.Abort(_('outstanding uncommitted merge'))
70 70 modified, added, removed, deleted = repo.status()[:4]
71 71 if modified or added or removed or deleted:
72 72 raise util.Abort(_("outstanding uncommitted changes"))
73 73
74 74 def logmessage(opts):
75 75 """ get the log message according to -m and -l option """
76 76 message = opts['message']
77 77 logfile = opts['logfile']
78 78
79 79 if message and logfile:
80 80 raise util.Abort(_('options --message and --logfile are mutually '
81 81 'exclusive'))
82 82 if not message and logfile:
83 83 try:
84 84 if logfile == '-':
85 85 message = sys.stdin.read()
86 86 else:
87 87 message = open(logfile).read()
88 88 except IOError, inst:
89 89 raise util.Abort(_("can't read commit message '%s': %s") %
90 90 (logfile, inst.strerror))
91 91 return message
92 92
93 93 def loglimit(opts):
94 94 """get the log limit according to option -l/--limit"""
95 95 limit = opts.get('limit')
96 96 if limit:
97 97 try:
98 98 limit = int(limit)
99 99 except ValueError:
100 100 raise util.Abort(_('limit must be a positive integer'))
101 101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 102 else:
103 103 limit = sys.maxint
104 104 return limit
105 105
106 106 def setremoteconfig(ui, opts):
107 107 "copy remote options to ui tree"
108 108 if opts.get('ssh'):
109 109 ui.setconfig("ui", "ssh", opts['ssh'])
110 110 if opts.get('remotecmd'):
111 111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112 112
113 113 def revpair(repo, revs):
114 114 '''return pair of nodes, given list of revisions. second item can
115 115 be None, meaning use working dir.'''
116 116
117 117 def revfix(repo, val, defval):
118 118 if not val and val != 0 and defval is not None:
119 119 val = defval
120 120 return repo.lookup(val)
121 121
122 122 if not revs:
123 123 return repo.dirstate.parents()[0], None
124 124 end = None
125 125 if len(revs) == 1:
126 126 if revrangesep in revs[0]:
127 127 start, end = revs[0].split(revrangesep, 1)
128 128 start = revfix(repo, start, 0)
129 129 end = revfix(repo, end, len(repo) - 1)
130 130 else:
131 131 start = revfix(repo, revs[0], None)
132 132 elif len(revs) == 2:
133 133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 134 raise util.Abort(_('too many revisions specified'))
135 135 start = revfix(repo, revs[0], None)
136 136 end = revfix(repo, revs[1], None)
137 137 else:
138 138 raise util.Abort(_('too many revisions specified'))
139 139 return start, end
140 140
141 141 def revrange(repo, revs):
142 142 """Yield revision as strings from a list of revision specifications."""
143 143
144 144 def revfix(repo, val, defval):
145 145 if not val and val != 0 and defval is not None:
146 146 return defval
147 147 return repo.changelog.rev(repo.lookup(val))
148 148
149 149 seen, l = {}, []
150 150 for spec in revs:
151 151 if revrangesep in spec:
152 152 start, end = spec.split(revrangesep, 1)
153 153 start = revfix(repo, start, 0)
154 154 end = revfix(repo, end, len(repo) - 1)
155 155 step = start > end and -1 or 1
156 156 for rev in xrange(start, end+step, step):
157 157 if rev in seen:
158 158 continue
159 159 seen[rev] = 1
160 160 l.append(rev)
161 161 else:
162 162 rev = revfix(repo, spec, None)
163 163 if rev in seen:
164 164 continue
165 165 seen[rev] = 1
166 166 l.append(rev)
167 167
168 168 return l
169 169
170 170 def make_filename(repo, pat, node,
171 171 total=None, seqno=None, revwidth=None, pathname=None):
172 172 node_expander = {
173 173 'H': lambda: hex(node),
174 174 'R': lambda: str(repo.changelog.rev(node)),
175 175 'h': lambda: short(node),
176 176 }
177 177 expander = {
178 178 '%': lambda: '%',
179 179 'b': lambda: os.path.basename(repo.root),
180 180 }
181 181
182 182 try:
183 183 if node:
184 184 expander.update(node_expander)
185 185 if node:
186 186 expander['r'] = (lambda:
187 187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 188 if total is not None:
189 189 expander['N'] = lambda: str(total)
190 190 if seqno is not None:
191 191 expander['n'] = lambda: str(seqno)
192 192 if total is not None and seqno is not None:
193 193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 194 if pathname is not None:
195 195 expander['s'] = lambda: os.path.basename(pathname)
196 196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 197 expander['p'] = lambda: pathname
198 198
199 199 newname = []
200 200 patlen = len(pat)
201 201 i = 0
202 202 while i < patlen:
203 203 c = pat[i]
204 204 if c == '%':
205 205 i += 1
206 206 c = pat[i]
207 207 c = expander[c]()
208 208 newname.append(c)
209 209 i += 1
210 210 return ''.join(newname)
211 211 except KeyError, inst:
212 212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 213 inst.args[0])
214 214
215 215 def make_file(repo, pat, node=None,
216 216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217
218 writable = 'w' in mode or 'a' in mode
219
217 220 if not pat or pat == '-':
218 return 'w' in mode and sys.stdout or sys.stdin
219 if hasattr(pat, 'write') and 'w' in mode:
221 return writable and sys.stdout or sys.stdin
222 if hasattr(pat, 'write') and writable:
220 223 return pat
221 224 if hasattr(pat, 'read') and 'r' in mode:
222 225 return pat
223 226 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 227 pathname),
225 228 mode)
226 229
227 230 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 231 if not globbed and default == 'relpath':
229 232 pats = util.expand_glob(pats or [])
230 233 m = _match.match(repo.root, repo.getcwd(), pats,
231 234 opts.get('include'), opts.get('exclude'), default)
232 235 def badfn(f, msg):
233 236 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 237 return False
235 238 m.bad = badfn
236 239 return m
237 240
238 241 def matchall(repo):
239 242 return _match.always(repo.root, repo.getcwd())
240 243
241 244 def matchfiles(repo, files):
242 245 return _match.exact(repo.root, repo.getcwd(), files)
243 246
244 247 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 248 '''find renamed files -- yields (before, after, score) tuples'''
246 249 if added is None or removed is None:
247 250 added, removed = repo.status()[1:3]
248 251 ctx = repo['.']
249 252 for a in added:
250 253 aa = repo.wread(a)
251 254 bestname, bestscore = None, threshold
252 255 for r in removed:
253 256 rr = ctx.filectx(r).data()
254 257
255 258 # bdiff.blocks() returns blocks of matching lines
256 259 # count the number of bytes in each
257 260 equal = 0
258 261 alines = mdiff.splitnewlines(aa)
259 262 matches = bdiff.blocks(aa, rr)
260 263 for x1,x2,y1,y2 in matches:
261 264 for line in alines[x1:x2]:
262 265 equal += len(line)
263 266
264 267 lengths = len(aa) + len(rr)
265 268 if lengths:
266 269 myscore = equal*2.0 / lengths
267 270 if myscore >= bestscore:
268 271 bestname, bestscore = r, myscore
269 272 if bestname:
270 273 yield bestname, a, bestscore
271 274
272 275 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 276 if dry_run is None:
274 277 dry_run = opts.get('dry_run')
275 278 if similarity is None:
276 279 similarity = float(opts.get('similarity') or 0)
277 280 add, remove = [], []
278 281 mapping = {}
279 282 audit_path = util.path_auditor(repo.root)
280 283 m = match(repo, pats, opts)
281 284 for abs in repo.walk(m):
282 285 target = repo.wjoin(abs)
283 286 good = True
284 287 try:
285 288 audit_path(abs)
286 289 except:
287 290 good = False
288 291 rel = m.rel(abs)
289 292 exact = m.exact(abs)
290 293 if good and abs not in repo.dirstate:
291 294 add.append(abs)
292 295 mapping[abs] = rel, m.exact(abs)
293 296 if repo.ui.verbose or not exact:
294 297 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 298 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 299 or (os.path.isdir(target) and not os.path.islink(target))):
297 300 remove.append(abs)
298 301 mapping[abs] = rel, exact
299 302 if repo.ui.verbose or not exact:
300 303 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 304 if not dry_run:
302 305 repo.remove(remove)
303 306 repo.add(add)
304 307 if similarity > 0:
305 308 for old, new, score in findrenames(repo, add, remove, similarity):
306 309 oldrel, oldexact = mapping[old]
307 310 newrel, newexact = mapping[new]
308 311 if repo.ui.verbose or not oldexact or not newexact:
309 312 repo.ui.status(_('recording removal of %s as rename to %s '
310 313 '(%d%% similar)\n') %
311 314 (oldrel, newrel, score * 100))
312 315 if not dry_run:
313 316 repo.copy(old, new)
314 317
315 318 def copy(ui, repo, pats, opts, rename=False):
316 319 # called with the repo lock held
317 320 #
318 321 # hgsep => pathname that uses "/" to separate directories
319 322 # ossep => pathname that uses os.sep to separate directories
320 323 cwd = repo.getcwd()
321 324 targets = {}
322 325 after = opts.get("after")
323 326 dryrun = opts.get("dry_run")
324 327
325 328 def walkpat(pat):
326 329 srcs = []
327 330 m = match(repo, [pat], opts, globbed=True)
328 331 for abs in repo.walk(m):
329 332 state = repo.dirstate[abs]
330 333 rel = m.rel(abs)
331 334 exact = m.exact(abs)
332 335 if state in '?r':
333 336 if exact and state == '?':
334 337 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 338 if exact and state == 'r':
336 339 ui.warn(_('%s: not copying - file has been marked for'
337 340 ' remove\n') % rel)
338 341 continue
339 342 # abs: hgsep
340 343 # rel: ossep
341 344 srcs.append((abs, rel, exact))
342 345 return srcs
343 346
344 347 # abssrc: hgsep
345 348 # relsrc: ossep
346 349 # otarget: ossep
347 350 def copyfile(abssrc, relsrc, otarget, exact):
348 351 abstarget = util.canonpath(repo.root, cwd, otarget)
349 352 reltarget = repo.pathto(abstarget, cwd)
350 353 target = repo.wjoin(abstarget)
351 354 src = repo.wjoin(abssrc)
352 355 state = repo.dirstate[abstarget]
353 356
354 357 # check for collisions
355 358 prevsrc = targets.get(abstarget)
356 359 if prevsrc is not None:
357 360 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 361 (reltarget, repo.pathto(abssrc, cwd),
359 362 repo.pathto(prevsrc, cwd)))
360 363 return
361 364
362 365 # check for overwrites
363 366 exists = os.path.exists(target)
364 367 if (not after and exists or after and state in 'mn'):
365 368 if not opts['force']:
366 369 ui.warn(_('%s: not overwriting - file exists\n') %
367 370 reltarget)
368 371 return
369 372
370 373 if after:
371 374 if not exists:
372 375 return
373 376 elif not dryrun:
374 377 try:
375 378 if exists:
376 379 os.unlink(target)
377 380 targetdir = os.path.dirname(target) or '.'
378 381 if not os.path.isdir(targetdir):
379 382 os.makedirs(targetdir)
380 383 util.copyfile(src, target)
381 384 except IOError, inst:
382 385 if inst.errno == errno.ENOENT:
383 386 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 387 else:
385 388 ui.warn(_('%s: cannot copy - %s\n') %
386 389 (relsrc, inst.strerror))
387 390 return True # report a failure
388 391
389 392 if ui.verbose or not exact:
390 393 action = rename and "moving" or "copying"
391 394 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392 395
393 396 targets[abstarget] = abssrc
394 397
395 398 # fix up dirstate
396 399 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 400 if abstarget == origsrc: # copying back a copy?
398 401 if state not in 'mn' and not dryrun:
399 402 repo.dirstate.normallookup(abstarget)
400 403 else:
401 404 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
402 405 if not ui.quiet:
403 406 ui.warn(_("%s has not been committed yet, so no copy "
404 407 "data will be stored for %s.\n")
405 408 % (repo.pathto(origsrc, cwd), reltarget))
406 409 if repo.dirstate[abstarget] in '?r' and not dryrun:
407 410 repo.add([abstarget])
408 411 elif not dryrun:
409 412 repo.copy(origsrc, abstarget)
410 413
411 414 if rename and not dryrun:
412 415 repo.remove([abssrc], not after)
413 416
414 417 # pat: ossep
415 418 # dest ossep
416 419 # srcs: list of (hgsep, hgsep, ossep, bool)
417 420 # return: function that takes hgsep and returns ossep
418 421 def targetpathfn(pat, dest, srcs):
419 422 if os.path.isdir(pat):
420 423 abspfx = util.canonpath(repo.root, cwd, pat)
421 424 abspfx = util.localpath(abspfx)
422 425 if destdirexists:
423 426 striplen = len(os.path.split(abspfx)[0])
424 427 else:
425 428 striplen = len(abspfx)
426 429 if striplen:
427 430 striplen += len(os.sep)
428 431 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 432 elif destdirexists:
430 433 res = lambda p: os.path.join(dest,
431 434 os.path.basename(util.localpath(p)))
432 435 else:
433 436 res = lambda p: dest
434 437 return res
435 438
436 439 # pat: ossep
437 440 # dest ossep
438 441 # srcs: list of (hgsep, hgsep, ossep, bool)
439 442 # return: function that takes hgsep and returns ossep
440 443 def targetpathafterfn(pat, dest, srcs):
441 444 if util.patkind(pat, None)[0]:
442 445 # a mercurial pattern
443 446 res = lambda p: os.path.join(dest,
444 447 os.path.basename(util.localpath(p)))
445 448 else:
446 449 abspfx = util.canonpath(repo.root, cwd, pat)
447 450 if len(abspfx) < len(srcs[0][0]):
448 451 # A directory. Either the target path contains the last
449 452 # component of the source path or it does not.
450 453 def evalpath(striplen):
451 454 score = 0
452 455 for s in srcs:
453 456 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 457 if os.path.exists(t):
455 458 score += 1
456 459 return score
457 460
458 461 abspfx = util.localpath(abspfx)
459 462 striplen = len(abspfx)
460 463 if striplen:
461 464 striplen += len(os.sep)
462 465 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 466 score = evalpath(striplen)
464 467 striplen1 = len(os.path.split(abspfx)[0])
465 468 if striplen1:
466 469 striplen1 += len(os.sep)
467 470 if evalpath(striplen1) > score:
468 471 striplen = striplen1
469 472 res = lambda p: os.path.join(dest,
470 473 util.localpath(p)[striplen:])
471 474 else:
472 475 # a file
473 476 if destdirexists:
474 477 res = lambda p: os.path.join(dest,
475 478 os.path.basename(util.localpath(p)))
476 479 else:
477 480 res = lambda p: dest
478 481 return res
479 482
480 483
481 484 pats = util.expand_glob(pats)
482 485 if not pats:
483 486 raise util.Abort(_('no source or destination specified'))
484 487 if len(pats) == 1:
485 488 raise util.Abort(_('no destination specified'))
486 489 dest = pats.pop()
487 490 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 491 if not destdirexists:
489 492 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 493 raise util.Abort(_('with multiple sources, destination must be an '
491 494 'existing directory'))
492 495 if util.endswithsep(dest):
493 496 raise util.Abort(_('destination %s is not a directory') % dest)
494 497
495 498 tfn = targetpathfn
496 499 if after:
497 500 tfn = targetpathafterfn
498 501 copylist = []
499 502 for pat in pats:
500 503 srcs = walkpat(pat)
501 504 if not srcs:
502 505 continue
503 506 copylist.append((tfn(pat, dest, srcs), srcs))
504 507 if not copylist:
505 508 raise util.Abort(_('no files to copy'))
506 509
507 510 errors = 0
508 511 for targetpath, srcs in copylist:
509 512 for abssrc, relsrc, exact in srcs:
510 513 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 514 errors += 1
512 515
513 516 if errors:
514 517 ui.warn(_('(consider using --after)\n'))
515 518
516 519 return errors
517 520
518 521 def service(opts, parentfn=None, initfn=None, runfn=None):
519 522 '''Run a command as a service.'''
520 523
521 524 if opts['daemon'] and not opts['daemon_pipefds']:
522 525 rfd, wfd = os.pipe()
523 526 args = sys.argv[:]
524 527 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 528 # Don't pass --cwd to the child process, because we've already
526 529 # changed directory.
527 530 for i in xrange(1,len(args)):
528 531 if args[i].startswith('--cwd='):
529 532 del args[i]
530 533 break
531 534 elif args[i].startswith('--cwd'):
532 535 del args[i:i+2]
533 536 break
534 537 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 538 args[0], args)
536 539 os.close(wfd)
537 540 os.read(rfd, 1)
538 541 if parentfn:
539 542 return parentfn(pid)
540 543 else:
541 544 os._exit(0)
542 545
543 546 if initfn:
544 547 initfn()
545 548
546 549 if opts['pid_file']:
547 550 fp = open(opts['pid_file'], 'w')
548 551 fp.write(str(os.getpid()) + '\n')
549 552 fp.close()
550 553
551 554 if opts['daemon_pipefds']:
552 555 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 556 os.close(rfd)
554 557 try:
555 558 os.setsid()
556 559 except AttributeError:
557 560 pass
558 561 os.write(wfd, 'y')
559 562 os.close(wfd)
560 563 sys.stdout.flush()
561 564 sys.stderr.flush()
562 565 fd = os.open(util.nulldev, os.O_RDWR)
563 566 if fd != 0: os.dup2(fd, 0)
564 567 if fd != 1: os.dup2(fd, 1)
565 568 if fd != 2: os.dup2(fd, 2)
566 569 if fd not in (0, 1, 2): os.close(fd)
567 570
568 571 if runfn:
569 572 return runfn()
570 573
571 574 class changeset_printer(object):
572 575 '''show changeset information when templating not requested.'''
573 576
574 577 def __init__(self, ui, repo, patch, buffered):
575 578 self.ui = ui
576 579 self.repo = repo
577 580 self.buffered = buffered
578 581 self.patch = patch
579 582 self.header = {}
580 583 self.hunk = {}
581 584 self.lastheader = None
582 585
583 586 def flush(self, rev):
584 587 if rev in self.header:
585 588 h = self.header[rev]
586 589 if h != self.lastheader:
587 590 self.lastheader = h
588 591 self.ui.write(h)
589 592 del self.header[rev]
590 593 if rev in self.hunk:
591 594 self.ui.write(self.hunk[rev])
592 595 del self.hunk[rev]
593 596 return 1
594 597 return 0
595 598
596 599 def show(self, rev=0, changenode=None, copies=(), **props):
597 600 if self.buffered:
598 601 self.ui.pushbuffer()
599 602 self._show(rev, changenode, copies, props)
600 603 self.hunk[rev] = self.ui.popbuffer()
601 604 else:
602 605 self._show(rev, changenode, copies, props)
603 606
604 607 def _show(self, rev, changenode, copies, props):
605 608 '''show a single changeset or file revision'''
606 609 log = self.repo.changelog
607 610 if changenode is None:
608 611 changenode = log.node(rev)
609 612 elif not rev:
610 613 rev = log.rev(changenode)
611 614
612 615 if self.ui.quiet:
613 616 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 617 return
615 618
616 619 changes = log.read(changenode)
617 620 date = util.datestr(changes[2])
618 621 extra = changes[5]
619 622 branch = extra.get("branch")
620 623
621 624 hexfunc = self.ui.debugflag and hex or short
622 625
623 626 parents = [(p, hexfunc(log.node(p)))
624 627 for p in self._meaningful_parentrevs(log, rev)]
625 628
626 629 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627 630
628 631 # don't show the default branch name
629 632 if branch != 'default':
630 633 branch = util.tolocal(branch)
631 634 self.ui.write(_("branch: %s\n") % branch)
632 635 for tag in self.repo.nodetags(changenode):
633 636 self.ui.write(_("tag: %s\n") % tag)
634 637 for parent in parents:
635 638 self.ui.write(_("parent: %d:%s\n") % parent)
636 639
637 640 if self.ui.debugflag:
638 641 self.ui.write(_("manifest: %d:%s\n") %
639 642 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 643 self.ui.write(_("user: %s\n") % changes[1])
641 644 self.ui.write(_("date: %s\n") % date)
642 645
643 646 if self.ui.debugflag:
644 647 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 648 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 649 files):
647 650 if value:
648 651 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 652 elif changes[3] and self.ui.verbose:
650 653 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 654 if copies and self.ui.verbose:
652 655 copies = ['%s (%s)' % c for c in copies]
653 656 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654 657
655 658 if extra and self.ui.debugflag:
656 659 for key, value in util.sort(extra.items()):
657 660 self.ui.write(_("extra: %s=%s\n")
658 661 % (key, value.encode('string_escape')))
659 662
660 663 description = changes[4].strip()
661 664 if description:
662 665 if self.ui.verbose:
663 666 self.ui.write(_("description:\n"))
664 667 self.ui.write(description)
665 668 self.ui.write("\n\n")
666 669 else:
667 670 self.ui.write(_("summary: %s\n") %
668 671 description.splitlines()[0])
669 672 self.ui.write("\n")
670 673
671 674 self.showpatch(changenode)
672 675
673 676 def showpatch(self, node):
674 677 if self.patch:
675 678 prev = self.repo.changelog.parents(node)[0]
676 679 chunks = patch.diff(self.repo, prev, node, match=self.patch,
677 680 opts=patch.diffopts(self.ui))
678 681 for chunk in chunks:
679 682 self.ui.write(chunk)
680 683 self.ui.write("\n")
681 684
682 685 def _meaningful_parentrevs(self, log, rev):
683 686 """Return list of meaningful (or all if debug) parentrevs for rev.
684 687
685 688 For merges (two non-nullrev revisions) both parents are meaningful.
686 689 Otherwise the first parent revision is considered meaningful if it
687 690 is not the preceding revision.
688 691 """
689 692 parents = log.parentrevs(rev)
690 693 if not self.ui.debugflag and parents[1] == nullrev:
691 694 if parents[0] >= rev - 1:
692 695 parents = []
693 696 else:
694 697 parents = [parents[0]]
695 698 return parents
696 699
697 700
698 701 class changeset_templater(changeset_printer):
699 702 '''format changeset information.'''
700 703
701 704 def __init__(self, ui, repo, patch, mapfile, buffered):
702 705 changeset_printer.__init__(self, ui, repo, patch, buffered)
703 706 filters = templatefilters.filters.copy()
704 707 filters['formatnode'] = (ui.debugflag and (lambda x: x)
705 708 or (lambda x: x[:12]))
706 709 self.t = templater.templater(mapfile, filters,
707 710 cache={
708 711 'parent': '{rev}:{node|formatnode} ',
709 712 'manifest': '{rev}:{node|formatnode}',
710 713 'filecopy': '{name} ({source})'})
711 714
712 715 def use_template(self, t):
713 716 '''set template string to use'''
714 717 self.t.cache['changeset'] = t
715 718
716 719 def _show(self, rev, changenode, copies, props):
717 720 '''show a single changeset or file revision'''
718 721 log = self.repo.changelog
719 722 if changenode is None:
720 723 changenode = log.node(rev)
721 724 elif not rev:
722 725 rev = log.rev(changenode)
723 726
724 727 changes = log.read(changenode)
725 728
726 729 def showlist(name, values, plural=None, **args):
727 730 '''expand set of values.
728 731 name is name of key in template map.
729 732 values is list of strings or dicts.
730 733 plural is plural of name, if not simply name + 's'.
731 734
732 735 expansion works like this, given name 'foo'.
733 736
734 737 if values is empty, expand 'no_foos'.
735 738
736 739 if 'foo' not in template map, return values as a string,
737 740 joined by space.
738 741
739 742 expand 'start_foos'.
740 743
741 744 for each value, expand 'foo'. if 'last_foo' in template
742 745 map, expand it instead of 'foo' for last key.
743 746
744 747 expand 'end_foos'.
745 748 '''
746 749 if plural: names = plural
747 750 else: names = name + 's'
748 751 if not values:
749 752 noname = 'no_' + names
750 753 if noname in self.t:
751 754 yield self.t(noname, **args)
752 755 return
753 756 if name not in self.t:
754 757 if isinstance(values[0], str):
755 758 yield ' '.join(values)
756 759 else:
757 760 for v in values:
758 761 yield dict(v, **args)
759 762 return
760 763 startname = 'start_' + names
761 764 if startname in self.t:
762 765 yield self.t(startname, **args)
763 766 vargs = args.copy()
764 767 def one(v, tag=name):
765 768 try:
766 769 vargs.update(v)
767 770 except (AttributeError, ValueError):
768 771 try:
769 772 for a, b in v:
770 773 vargs[a] = b
771 774 except ValueError:
772 775 vargs[name] = v
773 776 return self.t(tag, **vargs)
774 777 lastname = 'last_' + name
775 778 if lastname in self.t:
776 779 last = values.pop()
777 780 else:
778 781 last = None
779 782 for v in values:
780 783 yield one(v)
781 784 if last is not None:
782 785 yield one(last, tag=lastname)
783 786 endname = 'end_' + names
784 787 if endname in self.t:
785 788 yield self.t(endname, **args)
786 789
787 790 def showbranches(**args):
788 791 branch = changes[5].get("branch")
789 792 if branch != 'default':
790 793 branch = util.tolocal(branch)
791 794 return showlist('branch', [branch], plural='branches', **args)
792 795
793 796 def showparents(**args):
794 797 parents = [[('rev', p), ('node', hex(log.node(p)))]
795 798 for p in self._meaningful_parentrevs(log, rev)]
796 799 return showlist('parent', parents, **args)
797 800
798 801 def showtags(**args):
799 802 return showlist('tag', self.repo.nodetags(changenode), **args)
800 803
801 804 def showextras(**args):
802 805 for key, value in util.sort(changes[5].items()):
803 806 args = args.copy()
804 807 args.update(dict(key=key, value=value))
805 808 yield self.t('extra', **args)
806 809
807 810 def showcopies(**args):
808 811 c = [{'name': x[0], 'source': x[1]} for x in copies]
809 812 return showlist('file_copy', c, plural='file_copies', **args)
810 813
811 814 files = []
812 815 def getfiles():
813 816 if not files:
814 817 files[:] = self.repo.status(
815 818 log.parents(changenode)[0], changenode)[:3]
816 819 return files
817 820 def showfiles(**args):
818 821 return showlist('file', changes[3], **args)
819 822 def showmods(**args):
820 823 return showlist('file_mod', getfiles()[0], **args)
821 824 def showadds(**args):
822 825 return showlist('file_add', getfiles()[1], **args)
823 826 def showdels(**args):
824 827 return showlist('file_del', getfiles()[2], **args)
825 828 def showmanifest(**args):
826 829 args = args.copy()
827 830 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
828 831 node=hex(changes[0])))
829 832 return self.t('manifest', **args)
830 833
831 834 defprops = {
832 835 'author': changes[1],
833 836 'branches': showbranches,
834 837 'date': changes[2],
835 838 'desc': changes[4].strip(),
836 839 'file_adds': showadds,
837 840 'file_dels': showdels,
838 841 'file_mods': showmods,
839 842 'files': showfiles,
840 843 'file_copies': showcopies,
841 844 'manifest': showmanifest,
842 845 'node': hex(changenode),
843 846 'parents': showparents,
844 847 'rev': rev,
845 848 'tags': showtags,
846 849 'extras': showextras,
847 850 }
848 851 props = props.copy()
849 852 props.update(defprops)
850 853
851 854 try:
852 855 if self.ui.debugflag and 'header_debug' in self.t:
853 856 key = 'header_debug'
854 857 elif self.ui.quiet and 'header_quiet' in self.t:
855 858 key = 'header_quiet'
856 859 elif self.ui.verbose and 'header_verbose' in self.t:
857 860 key = 'header_verbose'
858 861 elif 'header' in self.t:
859 862 key = 'header'
860 863 else:
861 864 key = ''
862 865 if key:
863 866 h = templater.stringify(self.t(key, **props))
864 867 if self.buffered:
865 868 self.header[rev] = h
866 869 else:
867 870 self.ui.write(h)
868 871 if self.ui.debugflag and 'changeset_debug' in self.t:
869 872 key = 'changeset_debug'
870 873 elif self.ui.quiet and 'changeset_quiet' in self.t:
871 874 key = 'changeset_quiet'
872 875 elif self.ui.verbose and 'changeset_verbose' in self.t:
873 876 key = 'changeset_verbose'
874 877 else:
875 878 key = 'changeset'
876 879 self.ui.write(templater.stringify(self.t(key, **props)))
877 880 self.showpatch(changenode)
878 881 except KeyError, inst:
879 882 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
880 883 inst.args[0]))
881 884 except SyntaxError, inst:
882 885 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
883 886
884 887 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
885 888 """show one changeset using template or regular display.
886 889
887 890 Display format will be the first non-empty hit of:
888 891 1. option 'template'
889 892 2. option 'style'
890 893 3. [ui] setting 'logtemplate'
891 894 4. [ui] setting 'style'
892 895 If all of these values are either the unset or the empty string,
893 896 regular display via changeset_printer() is done.
894 897 """
895 898 # options
896 899 patch = False
897 900 if opts.get('patch'):
898 901 patch = matchfn or matchall(repo)
899 902
900 903 tmpl = opts.get('template')
901 904 mapfile = None
902 905 if tmpl:
903 906 tmpl = templater.parsestring(tmpl, quoted=False)
904 907 else:
905 908 mapfile = opts.get('style')
906 909 # ui settings
907 910 if not mapfile:
908 911 tmpl = ui.config('ui', 'logtemplate')
909 912 if tmpl:
910 913 tmpl = templater.parsestring(tmpl)
911 914 else:
912 915 mapfile = ui.config('ui', 'style')
913 916
914 917 if tmpl or mapfile:
915 918 if mapfile:
916 919 if not os.path.split(mapfile)[0]:
917 920 mapname = (templater.templatepath('map-cmdline.' + mapfile)
918 921 or templater.templatepath(mapfile))
919 922 if mapname: mapfile = mapname
920 923 try:
921 924 t = changeset_templater(ui, repo, patch, mapfile, buffered)
922 925 except SyntaxError, inst:
923 926 raise util.Abort(inst.args[0])
924 927 if tmpl: t.use_template(tmpl)
925 928 return t
926 929 return changeset_printer(ui, repo, patch, buffered)
927 930
928 931 def finddate(ui, repo, date):
929 932 """Find the tipmost changeset that matches the given date spec"""
930 933 df = util.matchdate(date)
931 934 get = util.cachefunc(lambda r: repo[r].changeset())
932 935 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
933 936 results = {}
934 937 for st, rev, fns in changeiter:
935 938 if st == 'add':
936 939 d = get(rev)[2]
937 940 if df(d[0]):
938 941 results[rev] = d
939 942 elif st == 'iter':
940 943 if rev in results:
941 944 ui.status(_("Found revision %s from %s\n") %
942 945 (rev, util.datestr(results[rev])))
943 946 return str(rev)
944 947
945 948 raise util.Abort(_("revision matching date not found"))
946 949
947 950 def walkchangerevs(ui, repo, pats, change, opts):
948 951 '''Iterate over files and the revs they changed in.
949 952
950 953 Callers most commonly need to iterate backwards over the history
951 954 it is interested in. Doing so has awful (quadratic-looking)
952 955 performance, so we use iterators in a "windowed" way.
953 956
954 957 We walk a window of revisions in the desired order. Within the
955 958 window, we first walk forwards to gather data, then in the desired
956 959 order (usually backwards) to display it.
957 960
958 961 This function returns an (iterator, matchfn) tuple. The iterator
959 962 yields 3-tuples. They will be of one of the following forms:
960 963
961 964 "window", incrementing, lastrev: stepping through a window,
962 965 positive if walking forwards through revs, last rev in the
963 966 sequence iterated over - use to reset state for the current window
964 967
965 968 "add", rev, fns: out-of-order traversal of the given file names
966 969 fns, which changed during revision rev - use to gather data for
967 970 possible display
968 971
969 972 "iter", rev, None: in-order traversal of the revs earlier iterated
970 973 over with "add" - use to display data'''
971 974
972 975 def increasing_windows(start, end, windowsize=8, sizelimit=512):
973 976 if start < end:
974 977 while start < end:
975 978 yield start, min(windowsize, end-start)
976 979 start += windowsize
977 980 if windowsize < sizelimit:
978 981 windowsize *= 2
979 982 else:
980 983 while start > end:
981 984 yield start, min(windowsize, start-end-1)
982 985 start -= windowsize
983 986 if windowsize < sizelimit:
984 987 windowsize *= 2
985 988
986 989 m = match(repo, pats, opts)
987 990 follow = opts.get('follow') or opts.get('follow_first')
988 991
989 992 if not len(repo):
990 993 return [], m
991 994
992 995 if follow:
993 996 defrange = '%s:0' % repo['.'].rev()
994 997 else:
995 998 defrange = '-1:0'
996 999 revs = revrange(repo, opts['rev'] or [defrange])
997 1000 wanted = {}
998 1001 slowpath = m.anypats() or opts.get('removed')
999 1002 fncache = {}
1000 1003
1001 1004 if not slowpath and not m.files():
1002 1005 # No files, no patterns. Display all revs.
1003 1006 wanted = dict.fromkeys(revs)
1004 1007 copies = []
1005 1008 if not slowpath:
1006 1009 # Only files, no patterns. Check the history of each file.
1007 1010 def filerevgen(filelog, node):
1008 1011 cl_count = len(repo)
1009 1012 if node is None:
1010 1013 last = len(filelog) - 1
1011 1014 else:
1012 1015 last = filelog.rev(node)
1013 1016 for i, window in increasing_windows(last, nullrev):
1014 1017 revs = []
1015 1018 for j in xrange(i - window, i + 1):
1016 1019 n = filelog.node(j)
1017 1020 revs.append((filelog.linkrev(n),
1018 1021 follow and filelog.renamed(n)))
1019 1022 revs.reverse()
1020 1023 for rev in revs:
1021 1024 # only yield rev for which we have the changelog, it can
1022 1025 # happen while doing "hg log" during a pull or commit
1023 1026 if rev[0] < cl_count:
1024 1027 yield rev
1025 1028 def iterfiles():
1026 1029 for filename in m.files():
1027 1030 yield filename, None
1028 1031 for filename_node in copies:
1029 1032 yield filename_node
1030 1033 minrev, maxrev = min(revs), max(revs)
1031 1034 for file_, node in iterfiles():
1032 1035 filelog = repo.file(file_)
1033 1036 if not len(filelog):
1034 1037 if node is None:
1035 1038 # A zero count may be a directory or deleted file, so
1036 1039 # try to find matching entries on the slow path.
1037 1040 slowpath = True
1038 1041 break
1039 1042 else:
1040 1043 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1041 1044 % (file_, short(node)))
1042 1045 continue
1043 1046 for rev, copied in filerevgen(filelog, node):
1044 1047 if rev <= maxrev:
1045 1048 if rev < minrev:
1046 1049 break
1047 1050 fncache.setdefault(rev, [])
1048 1051 fncache[rev].append(file_)
1049 1052 wanted[rev] = 1
1050 1053 if follow and copied:
1051 1054 copies.append(copied)
1052 1055 if slowpath:
1053 1056 if follow:
1054 1057 raise util.Abort(_('can only follow copies/renames for explicit '
1055 1058 'file names'))
1056 1059
1057 1060 # The slow path checks files modified in every changeset.
1058 1061 def changerevgen():
1059 1062 for i, window in increasing_windows(len(repo) - 1, nullrev):
1060 1063 for j in xrange(i - window, i + 1):
1061 1064 yield j, change(j)[3]
1062 1065
1063 1066 for rev, changefiles in changerevgen():
1064 1067 matches = filter(m, changefiles)
1065 1068 if matches:
1066 1069 fncache[rev] = matches
1067 1070 wanted[rev] = 1
1068 1071
1069 1072 class followfilter:
1070 1073 def __init__(self, onlyfirst=False):
1071 1074 self.startrev = nullrev
1072 1075 self.roots = []
1073 1076 self.onlyfirst = onlyfirst
1074 1077
1075 1078 def match(self, rev):
1076 1079 def realparents(rev):
1077 1080 if self.onlyfirst:
1078 1081 return repo.changelog.parentrevs(rev)[0:1]
1079 1082 else:
1080 1083 return filter(lambda x: x != nullrev,
1081 1084 repo.changelog.parentrevs(rev))
1082 1085
1083 1086 if self.startrev == nullrev:
1084 1087 self.startrev = rev
1085 1088 return True
1086 1089
1087 1090 if rev > self.startrev:
1088 1091 # forward: all descendants
1089 1092 if not self.roots:
1090 1093 self.roots.append(self.startrev)
1091 1094 for parent in realparents(rev):
1092 1095 if parent in self.roots:
1093 1096 self.roots.append(rev)
1094 1097 return True
1095 1098 else:
1096 1099 # backwards: all parents
1097 1100 if not self.roots:
1098 1101 self.roots.extend(realparents(self.startrev))
1099 1102 if rev in self.roots:
1100 1103 self.roots.remove(rev)
1101 1104 self.roots.extend(realparents(rev))
1102 1105 return True
1103 1106
1104 1107 return False
1105 1108
1106 1109 # it might be worthwhile to do this in the iterator if the rev range
1107 1110 # is descending and the prune args are all within that range
1108 1111 for rev in opts.get('prune', ()):
1109 1112 rev = repo.changelog.rev(repo.lookup(rev))
1110 1113 ff = followfilter()
1111 1114 stop = min(revs[0], revs[-1])
1112 1115 for x in xrange(rev, stop-1, -1):
1113 1116 if ff.match(x) and x in wanted:
1114 1117 del wanted[x]
1115 1118
1116 1119 def iterate():
1117 1120 if follow and not m.files():
1118 1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1119 1122 def want(rev):
1120 1123 if ff.match(rev) and rev in wanted:
1121 1124 return True
1122 1125 return False
1123 1126 else:
1124 1127 def want(rev):
1125 1128 return rev in wanted
1126 1129
1127 1130 for i, window in increasing_windows(0, len(revs)):
1128 1131 yield 'window', revs[0] < revs[-1], revs[-1]
1129 1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1130 1133 for rev in util.sort(list(nrevs)):
1131 1134 fns = fncache.get(rev)
1132 1135 if not fns:
1133 1136 def fns_generator():
1134 1137 for f in change(rev)[3]:
1135 1138 if m(f):
1136 1139 yield f
1137 1140 fns = fns_generator()
1138 1141 yield 'add', rev, fns
1139 1142 for rev in nrevs:
1140 1143 yield 'iter', rev, None
1141 1144 return iterate(), m
1142 1145
1143 1146 def commit(ui, repo, commitfunc, pats, opts):
1144 1147 '''commit the specified files or all outstanding changes'''
1145 1148 date = opts.get('date')
1146 1149 if date:
1147 1150 opts['date'] = util.parsedate(date)
1148 1151 message = logmessage(opts)
1149 1152
1150 1153 # extract addremove carefully -- this function can be called from a command
1151 1154 # that doesn't support addremove
1152 1155 if opts.get('addremove'):
1153 1156 addremove(repo, pats, opts)
1154 1157
1155 1158 m = match(repo, pats, opts)
1156 1159 if pats:
1157 1160 modified, added, removed = repo.status(match=m)[:3]
1158 1161 files = util.sort(modified + added + removed)
1159 1162
1160 1163 def is_dir(f):
1161 1164 name = f + '/'
1162 1165 i = bisect.bisect(files, name)
1163 1166 return i < len(files) and files[i].startswith(name)
1164 1167
1165 1168 for f in m.files():
1166 1169 if f == '.':
1167 1170 continue
1168 1171 if f not in files:
1169 1172 rf = repo.wjoin(f)
1170 1173 rel = repo.pathto(f)
1171 1174 try:
1172 1175 mode = os.lstat(rf)[stat.ST_MODE]
1173 1176 except OSError:
1174 1177 if is_dir(f): # deleted directory ?
1175 1178 continue
1176 1179 raise util.Abort(_("file %s not found!") % rel)
1177 1180 if stat.S_ISDIR(mode):
1178 1181 if not is_dir(f):
1179 1182 raise util.Abort(_("no match under directory %s!")
1180 1183 % rel)
1181 1184 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1182 1185 raise util.Abort(_("can't commit %s: "
1183 1186 "unsupported file type!") % rel)
1184 1187 elif f not in repo.dirstate:
1185 1188 raise util.Abort(_("file %s not tracked!") % rel)
1186 1189 m = matchfiles(repo, files)
1187 1190 try:
1188 1191 return commitfunc(ui, repo, message, m, opts)
1189 1192 except ValueError, inst:
1190 1193 raise util.Abort(str(inst))
@@ -1,1330 +1,1331 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 12 import cStringIO, email.Parser, os, re, errno
13 13 import sys, tempfile, zlib
14 14
15 15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16 16
17 17 class PatchError(Exception):
18 18 pass
19 19
20 20 class NoHunks(PatchError):
21 21 pass
22 22
23 23 # helper functions
24 24
25 25 def copyfile(src, dst, basedir=None):
26 26 if not basedir:
27 27 basedir = os.getcwd()
28 28
29 29 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
30 30 if os.path.exists(absdst):
31 31 raise util.Abort(_("cannot create %s: destination already exists") %
32 32 dst)
33 33
34 34 if not os.path.isdir(basedir):
35 35 os.makedirs(basedir)
36 36
37 37 util.copyfile(abssrc, absdst)
38 38
39 39 # public functions
40 40
41 41 def extract(ui, fileobj):
42 42 '''extract patch from data read from fileobj.
43 43
44 44 patch can be a normal patch or contained in an email message.
45 45
46 46 return tuple (filename, message, user, date, node, p1, p2).
47 47 Any item in the returned tuple can be None. If filename is None,
48 48 fileobj did not contain a patch. Caller must unlink filename when done.'''
49 49
50 50 # attempt to detect the start of a patch
51 51 # (this heuristic is borrowed from quilt)
52 52 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
53 53 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
54 54 '(---|\*\*\*)[ \t])', re.MULTILINE)
55 55
56 56 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
57 57 tmpfp = os.fdopen(fd, 'w')
58 58 try:
59 59 msg = email.Parser.Parser().parse(fileobj)
60 60
61 61 subject = msg['Subject']
62 62 user = msg['From']
63 63 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
64 64 # should try to parse msg['Date']
65 65 date = None
66 66 nodeid = None
67 67 branch = None
68 68 parents = []
69 69
70 70 if subject:
71 71 if subject.startswith('[PATCH'):
72 72 pend = subject.find(']')
73 73 if pend >= 0:
74 74 subject = subject[pend+1:].lstrip()
75 75 subject = subject.replace('\n\t', ' ')
76 76 ui.debug('Subject: %s\n' % subject)
77 77 if user:
78 78 ui.debug('From: %s\n' % user)
79 79 diffs_seen = 0
80 80 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
81 81 message = ''
82 82 for part in msg.walk():
83 83 content_type = part.get_content_type()
84 84 ui.debug('Content-Type: %s\n' % content_type)
85 85 if content_type not in ok_types:
86 86 continue
87 87 payload = part.get_payload(decode=True)
88 88 m = diffre.search(payload)
89 89 if m:
90 90 hgpatch = False
91 91 ignoretext = False
92 92
93 93 ui.debug(_('found patch at byte %d\n') % m.start(0))
94 94 diffs_seen += 1
95 95 cfp = cStringIO.StringIO()
96 96 for line in payload[:m.start(0)].splitlines():
97 97 if line.startswith('# HG changeset patch'):
98 98 ui.debug(_('patch generated by hg export\n'))
99 99 hgpatch = True
100 100 # drop earlier commit message content
101 101 cfp.seek(0)
102 102 cfp.truncate()
103 103 subject = None
104 104 elif hgpatch:
105 105 if line.startswith('# User '):
106 106 user = line[7:]
107 107 ui.debug('From: %s\n' % user)
108 108 elif line.startswith("# Date "):
109 109 date = line[7:]
110 110 elif line.startswith("# Branch "):
111 111 branch = line[9:]
112 112 elif line.startswith("# Node ID "):
113 113 nodeid = line[10:]
114 114 elif line.startswith("# Parent "):
115 115 parents.append(line[10:])
116 116 elif line == '---' and gitsendmail:
117 117 ignoretext = True
118 118 if not line.startswith('# ') and not ignoretext:
119 119 cfp.write(line)
120 120 cfp.write('\n')
121 121 message = cfp.getvalue()
122 122 if tmpfp:
123 123 tmpfp.write(payload)
124 124 if not payload.endswith('\n'):
125 125 tmpfp.write('\n')
126 126 elif not diffs_seen and message and content_type == 'text/plain':
127 127 message += '\n' + payload
128 128 except:
129 129 tmpfp.close()
130 130 os.unlink(tmpname)
131 131 raise
132 132
133 133 if subject and not message.startswith(subject):
134 134 message = '%s\n%s' % (subject, message)
135 135 tmpfp.close()
136 136 if not diffs_seen:
137 137 os.unlink(tmpname)
138 138 return None, message, user, date, branch, None, None, None
139 139 p1 = parents and parents.pop(0) or None
140 140 p2 = parents and parents.pop(0) or None
141 141 return tmpname, message, user, date, branch, nodeid, p1, p2
142 142
143 143 GP_PATCH = 1 << 0 # we have to run patch
144 144 GP_FILTER = 1 << 1 # there's some copy/rename operation
145 145 GP_BINARY = 1 << 2 # there's a binary patch
146 146
147 147 class patchmeta:
148 148 """Patched file metadata
149 149
150 150 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
151 151 or COPY. 'path' is patched file path. 'oldpath' is set to the
152 152 origin file when 'op' is either COPY or RENAME, None otherwise. If
153 153 file mode is changed, 'mode' is a tuple (islink, isexec) where
154 154 'islink' is True if the file is a symlink and 'isexec' is True if
155 155 the file is executable. Otherwise, 'mode' is None.
156 156 """
157 157 def __init__(self, path):
158 158 self.path = path
159 159 self.oldpath = None
160 160 self.mode = None
161 161 self.op = 'MODIFY'
162 162 self.lineno = 0
163 163 self.binary = False
164 164
165 165 def setmode(self, mode):
166 166 islink = mode & 020000
167 167 isexec = mode & 0100
168 168 self.mode = (islink, isexec)
169 169
170 170 def readgitpatch(lr):
171 171 """extract git-style metadata about patches from <patchname>"""
172 172
173 173 # Filter patch for git information
174 174 gp = None
175 175 gitpatches = []
176 176 # Can have a git patch with only metadata, causing patch to complain
177 177 dopatch = 0
178 178
179 179 lineno = 0
180 180 for line in lr:
181 181 lineno += 1
182 182 if line.startswith('diff --git'):
183 183 m = gitre.match(line)
184 184 if m:
185 185 if gp:
186 186 gitpatches.append(gp)
187 187 src, dst = m.group(1, 2)
188 188 gp = patchmeta(dst)
189 189 gp.lineno = lineno
190 190 elif gp:
191 191 if line.startswith('--- '):
192 192 if gp.op in ('COPY', 'RENAME'):
193 193 dopatch |= GP_FILTER
194 194 gitpatches.append(gp)
195 195 gp = None
196 196 dopatch |= GP_PATCH
197 197 continue
198 198 if line.startswith('rename from '):
199 199 gp.op = 'RENAME'
200 200 gp.oldpath = line[12:].rstrip()
201 201 elif line.startswith('rename to '):
202 202 gp.path = line[10:].rstrip()
203 203 elif line.startswith('copy from '):
204 204 gp.op = 'COPY'
205 205 gp.oldpath = line[10:].rstrip()
206 206 elif line.startswith('copy to '):
207 207 gp.path = line[8:].rstrip()
208 208 elif line.startswith('deleted file'):
209 209 gp.op = 'DELETE'
210 210 elif line.startswith('new file mode '):
211 211 gp.op = 'ADD'
212 212 gp.setmode(int(line.rstrip()[-6:], 8))
213 213 elif line.startswith('new mode '):
214 214 gp.setmode(int(line.rstrip()[-6:], 8))
215 215 elif line.startswith('GIT binary patch'):
216 216 dopatch |= GP_BINARY
217 217 gp.binary = True
218 218 if gp:
219 219 gitpatches.append(gp)
220 220
221 221 if not gitpatches:
222 222 dopatch = GP_PATCH
223 223
224 224 return (dopatch, gitpatches)
225 225
226 226 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
227 227 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
228 228 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
229 229
230 230 class patchfile:
231 231 def __init__(self, ui, fname, missing=False):
232 232 self.fname = fname
233 233 self.ui = ui
234 234 self.lines = []
235 235 self.exists = False
236 236 self.missing = missing
237 237 if not missing:
238 238 try:
239 239 fp = file(fname, 'rb')
240 240 self.lines = fp.readlines()
241 241 self.exists = True
242 242 except IOError:
243 243 pass
244 244 else:
245 245 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
246 246
247 247 if not self.exists:
248 248 dirname = os.path.dirname(fname)
249 249 if dirname and not os.path.isdir(dirname):
250 250 os.makedirs(dirname)
251 251
252 252 self.hash = {}
253 253 self.dirty = 0
254 254 self.offset = 0
255 255 self.rej = []
256 256 self.fileprinted = False
257 257 self.printfile(False)
258 258 self.hunks = 0
259 259
260 260 def printfile(self, warn):
261 261 if self.fileprinted:
262 262 return
263 263 if warn or self.ui.verbose:
264 264 self.fileprinted = True
265 265 s = _("patching file %s\n") % self.fname
266 266 if warn:
267 267 self.ui.warn(s)
268 268 else:
269 269 self.ui.note(s)
270 270
271 271
272 272 def findlines(self, l, linenum):
273 273 # looks through the hash and finds candidate lines. The
274 274 # result is a list of line numbers sorted based on distance
275 275 # from linenum
276 276 def sorter(a, b):
277 277 vala = abs(a - linenum)
278 278 valb = abs(b - linenum)
279 279 return cmp(vala, valb)
280 280
281 281 try:
282 282 cand = self.hash[l]
283 283 except:
284 284 return []
285 285
286 286 if len(cand) > 1:
287 287 # resort our list of potentials forward then back.
288 288 cand.sort(sorter)
289 289 return cand
290 290
291 291 def hashlines(self):
292 292 self.hash = {}
293 293 for x in xrange(len(self.lines)):
294 294 s = self.lines[x]
295 295 self.hash.setdefault(s, []).append(x)
296 296
297 297 def write_rej(self):
298 298 # our rejects are a little different from patch(1). This always
299 299 # creates rejects in the same form as the original patch. A file
300 300 # header is inserted so that you can run the reject through patch again
301 301 # without having to type the filename.
302 302
303 303 if not self.rej:
304 304 return
305 305
306 306 fname = self.fname + ".rej"
307 307 self.ui.warn(
308 308 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
309 309 (len(self.rej), self.hunks, fname))
310 310 try: os.unlink(fname)
311 311 except:
312 312 pass
313 313 fp = file(fname, 'wb')
314 314 base = os.path.basename(self.fname)
315 315 fp.write("--- %s\n+++ %s\n" % (base, base))
316 316 for x in self.rej:
317 317 for l in x.hunk:
318 318 fp.write(l)
319 319 if l[-1] != '\n':
320 320 fp.write("\n\ No newline at end of file\n")
321 321
322 322 def write(self, dest=None):
323 323 if self.dirty:
324 324 if not dest:
325 325 dest = self.fname
326 326 st = None
327 327 try:
328 328 st = os.lstat(dest)
329 329 except OSError, inst:
330 330 if inst.errno != errno.ENOENT:
331 331 raise
332 332 if st and st.st_nlink > 1:
333 333 os.unlink(dest)
334 334 fp = file(dest, 'wb')
335 335 if st and st.st_nlink > 1:
336 336 os.chmod(dest, st.st_mode)
337 337 fp.writelines(self.lines)
338 338 fp.close()
339 339
340 340 def close(self):
341 341 self.write()
342 342 self.write_rej()
343 343
344 344 def apply(self, h, reverse):
345 345 if not h.complete():
346 346 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
347 347 (h.number, h.desc, len(h.a), h.lena, len(h.b),
348 348 h.lenb))
349 349
350 350 self.hunks += 1
351 351 if reverse:
352 352 h.reverse()
353 353
354 354 if self.missing:
355 355 self.rej.append(h)
356 356 return -1
357 357
358 358 if self.exists and h.createfile():
359 359 self.ui.warn(_("file %s already exists\n") % self.fname)
360 360 self.rej.append(h)
361 361 return -1
362 362
363 363 if isinstance(h, binhunk):
364 364 if h.rmfile():
365 365 os.unlink(self.fname)
366 366 else:
367 367 self.lines[:] = h.new()
368 368 self.offset += len(h.new())
369 369 self.dirty = 1
370 370 return 0
371 371
372 372 # fast case first, no offsets, no fuzz
373 373 old = h.old()
374 374 # patch starts counting at 1 unless we are adding the file
375 375 if h.starta == 0:
376 376 start = 0
377 377 else:
378 378 start = h.starta + self.offset - 1
379 379 orig_start = start
380 380 if diffhelpers.testhunk(old, self.lines, start) == 0:
381 381 if h.rmfile():
382 382 os.unlink(self.fname)
383 383 else:
384 384 self.lines[start : start + h.lena] = h.new()
385 385 self.offset += h.lenb - h.lena
386 386 self.dirty = 1
387 387 return 0
388 388
389 389 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
390 390 self.hashlines()
391 391 if h.hunk[-1][0] != ' ':
392 392 # if the hunk tried to put something at the bottom of the file
393 393 # override the start line and use eof here
394 394 search_start = len(self.lines)
395 395 else:
396 396 search_start = orig_start
397 397
398 398 for fuzzlen in xrange(3):
399 399 for toponly in [ True, False ]:
400 400 old = h.old(fuzzlen, toponly)
401 401
402 402 cand = self.findlines(old[0][1:], search_start)
403 403 for l in cand:
404 404 if diffhelpers.testhunk(old, self.lines, l) == 0:
405 405 newlines = h.new(fuzzlen, toponly)
406 406 self.lines[l : l + len(old)] = newlines
407 407 self.offset += len(newlines) - len(old)
408 408 self.dirty = 1
409 409 if fuzzlen:
410 410 fuzzstr = "with fuzz %d " % fuzzlen
411 411 f = self.ui.warn
412 412 self.printfile(True)
413 413 else:
414 414 fuzzstr = ""
415 415 f = self.ui.note
416 416 offset = l - orig_start - fuzzlen
417 417 if offset == 1:
418 418 linestr = "line"
419 419 else:
420 420 linestr = "lines"
421 421 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
422 422 (h.number, l+1, fuzzstr, offset, linestr))
423 423 return fuzzlen
424 424 self.printfile(True)
425 425 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
426 426 self.rej.append(h)
427 427 return -1
428 428
429 429 class hunk:
430 430 def __init__(self, desc, num, lr, context, create=False, remove=False):
431 431 self.number = num
432 432 self.desc = desc
433 433 self.hunk = [ desc ]
434 434 self.a = []
435 435 self.b = []
436 436 if context:
437 437 self.read_context_hunk(lr)
438 438 else:
439 439 self.read_unified_hunk(lr)
440 440 self.create = create
441 441 self.remove = remove and not create
442 442
443 443 def read_unified_hunk(self, lr):
444 444 m = unidesc.match(self.desc)
445 445 if not m:
446 446 raise PatchError(_("bad hunk #%d") % self.number)
447 447 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
448 448 if self.lena == None:
449 449 self.lena = 1
450 450 else:
451 451 self.lena = int(self.lena)
452 452 if self.lenb == None:
453 453 self.lenb = 1
454 454 else:
455 455 self.lenb = int(self.lenb)
456 456 self.starta = int(self.starta)
457 457 self.startb = int(self.startb)
458 458 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
459 459 # if we hit eof before finishing out the hunk, the last line will
460 460 # be zero length. Lets try to fix it up.
461 461 while len(self.hunk[-1]) == 0:
462 462 del self.hunk[-1]
463 463 del self.a[-1]
464 464 del self.b[-1]
465 465 self.lena -= 1
466 466 self.lenb -= 1
467 467
468 468 def read_context_hunk(self, lr):
469 469 self.desc = lr.readline()
470 470 m = contextdesc.match(self.desc)
471 471 if not m:
472 472 raise PatchError(_("bad hunk #%d") % self.number)
473 473 foo, self.starta, foo2, aend, foo3 = m.groups()
474 474 self.starta = int(self.starta)
475 475 if aend == None:
476 476 aend = self.starta
477 477 self.lena = int(aend) - self.starta
478 478 if self.starta:
479 479 self.lena += 1
480 480 for x in xrange(self.lena):
481 481 l = lr.readline()
482 482 if l.startswith('---'):
483 483 lr.push(l)
484 484 break
485 485 s = l[2:]
486 486 if l.startswith('- ') or l.startswith('! '):
487 487 u = '-' + s
488 488 elif l.startswith(' '):
489 489 u = ' ' + s
490 490 else:
491 491 raise PatchError(_("bad hunk #%d old text line %d") %
492 492 (self.number, x))
493 493 self.a.append(u)
494 494 self.hunk.append(u)
495 495
496 496 l = lr.readline()
497 497 if l.startswith('\ '):
498 498 s = self.a[-1][:-1]
499 499 self.a[-1] = s
500 500 self.hunk[-1] = s
501 501 l = lr.readline()
502 502 m = contextdesc.match(l)
503 503 if not m:
504 504 raise PatchError(_("bad hunk #%d") % self.number)
505 505 foo, self.startb, foo2, bend, foo3 = m.groups()
506 506 self.startb = int(self.startb)
507 507 if bend == None:
508 508 bend = self.startb
509 509 self.lenb = int(bend) - self.startb
510 510 if self.startb:
511 511 self.lenb += 1
512 512 hunki = 1
513 513 for x in xrange(self.lenb):
514 514 l = lr.readline()
515 515 if l.startswith('\ '):
516 516 s = self.b[-1][:-1]
517 517 self.b[-1] = s
518 518 self.hunk[hunki-1] = s
519 519 continue
520 520 if not l:
521 521 lr.push(l)
522 522 break
523 523 s = l[2:]
524 524 if l.startswith('+ ') or l.startswith('! '):
525 525 u = '+' + s
526 526 elif l.startswith(' '):
527 527 u = ' ' + s
528 528 elif len(self.b) == 0:
529 529 # this can happen when the hunk does not add any lines
530 530 lr.push(l)
531 531 break
532 532 else:
533 533 raise PatchError(_("bad hunk #%d old text line %d") %
534 534 (self.number, x))
535 535 self.b.append(s)
536 536 while True:
537 537 if hunki >= len(self.hunk):
538 538 h = ""
539 539 else:
540 540 h = self.hunk[hunki]
541 541 hunki += 1
542 542 if h == u:
543 543 break
544 544 elif h.startswith('-'):
545 545 continue
546 546 else:
547 547 self.hunk.insert(hunki-1, u)
548 548 break
549 549
550 550 if not self.a:
551 551 # this happens when lines were only added to the hunk
552 552 for x in self.hunk:
553 553 if x.startswith('-') or x.startswith(' '):
554 554 self.a.append(x)
555 555 if not self.b:
556 556 # this happens when lines were only deleted from the hunk
557 557 for x in self.hunk:
558 558 if x.startswith('+') or x.startswith(' '):
559 559 self.b.append(x[1:])
560 560 # @@ -start,len +start,len @@
561 561 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
562 562 self.startb, self.lenb)
563 563 self.hunk[0] = self.desc
564 564
565 565 def reverse(self):
566 566 self.create, self.remove = self.remove, self.create
567 567 origlena = self.lena
568 568 origstarta = self.starta
569 569 self.lena = self.lenb
570 570 self.starta = self.startb
571 571 self.lenb = origlena
572 572 self.startb = origstarta
573 573 self.a = []
574 574 self.b = []
575 575 # self.hunk[0] is the @@ description
576 576 for x in xrange(1, len(self.hunk)):
577 577 o = self.hunk[x]
578 578 if o.startswith('-'):
579 579 n = '+' + o[1:]
580 580 self.b.append(o[1:])
581 581 elif o.startswith('+'):
582 582 n = '-' + o[1:]
583 583 self.a.append(n)
584 584 else:
585 585 n = o
586 586 self.b.append(o[1:])
587 587 self.a.append(o)
588 588 self.hunk[x] = o
589 589
590 590 def fix_newline(self):
591 591 diffhelpers.fix_newline(self.hunk, self.a, self.b)
592 592
593 593 def complete(self):
594 594 return len(self.a) == self.lena and len(self.b) == self.lenb
595 595
596 596 def createfile(self):
597 597 return self.starta == 0 and self.lena == 0 and self.create
598 598
599 599 def rmfile(self):
600 600 return self.startb == 0 and self.lenb == 0 and self.remove
601 601
602 602 def fuzzit(self, l, fuzz, toponly):
603 603 # this removes context lines from the top and bottom of list 'l'. It
604 604 # checks the hunk to make sure only context lines are removed, and then
605 605 # returns a new shortened list of lines.
606 606 fuzz = min(fuzz, len(l)-1)
607 607 if fuzz:
608 608 top = 0
609 609 bot = 0
610 610 hlen = len(self.hunk)
611 611 for x in xrange(hlen-1):
612 612 # the hunk starts with the @@ line, so use x+1
613 613 if self.hunk[x+1][0] == ' ':
614 614 top += 1
615 615 else:
616 616 break
617 617 if not toponly:
618 618 for x in xrange(hlen-1):
619 619 if self.hunk[hlen-bot-1][0] == ' ':
620 620 bot += 1
621 621 else:
622 622 break
623 623
624 624 # top and bot now count context in the hunk
625 625 # adjust them if either one is short
626 626 context = max(top, bot, 3)
627 627 if bot < context:
628 628 bot = max(0, fuzz - (context - bot))
629 629 else:
630 630 bot = min(fuzz, bot)
631 631 if top < context:
632 632 top = max(0, fuzz - (context - top))
633 633 else:
634 634 top = min(fuzz, top)
635 635
636 636 return l[top:len(l)-bot]
637 637 return l
638 638
639 639 def old(self, fuzz=0, toponly=False):
640 640 return self.fuzzit(self.a, fuzz, toponly)
641 641
642 642 def newctrl(self):
643 643 res = []
644 644 for x in self.hunk:
645 645 c = x[0]
646 646 if c == ' ' or c == '+':
647 647 res.append(x)
648 648 return res
649 649
650 650 def new(self, fuzz=0, toponly=False):
651 651 return self.fuzzit(self.b, fuzz, toponly)
652 652
653 653 class binhunk:
654 654 'A binary patch file. Only understands literals so far.'
655 655 def __init__(self, gitpatch):
656 656 self.gitpatch = gitpatch
657 657 self.text = None
658 658 self.hunk = ['GIT binary patch\n']
659 659
660 660 def createfile(self):
661 661 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
662 662
663 663 def rmfile(self):
664 664 return self.gitpatch.op == 'DELETE'
665 665
666 666 def complete(self):
667 667 return self.text is not None
668 668
669 669 def new(self):
670 670 return [self.text]
671 671
672 672 def extract(self, lr):
673 673 line = lr.readline()
674 674 self.hunk.append(line)
675 675 while line and not line.startswith('literal '):
676 676 line = lr.readline()
677 677 self.hunk.append(line)
678 678 if not line:
679 679 raise PatchError(_('could not extract binary patch'))
680 680 size = int(line[8:].rstrip())
681 681 dec = []
682 682 line = lr.readline()
683 683 self.hunk.append(line)
684 684 while len(line) > 1:
685 685 l = line[0]
686 686 if l <= 'Z' and l >= 'A':
687 687 l = ord(l) - ord('A') + 1
688 688 else:
689 689 l = ord(l) - ord('a') + 27
690 690 dec.append(base85.b85decode(line[1:-1])[:l])
691 691 line = lr.readline()
692 692 self.hunk.append(line)
693 693 text = zlib.decompress(''.join(dec))
694 694 if len(text) != size:
695 695 raise PatchError(_('binary patch is %d bytes, not %d') %
696 696 len(text), size)
697 697 self.text = text
698 698
699 699 def parsefilename(str):
700 700 # --- filename \t|space stuff
701 701 s = str[4:].rstrip('\r\n')
702 702 i = s.find('\t')
703 703 if i < 0:
704 704 i = s.find(' ')
705 705 if i < 0:
706 706 return s
707 707 return s[:i]
708 708
709 709 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
710 710 def pathstrip(path, count=1):
711 711 pathlen = len(path)
712 712 i = 0
713 713 if count == 0:
714 714 return '', path.rstrip()
715 715 while count > 0:
716 716 i = path.find('/', i)
717 717 if i == -1:
718 718 raise PatchError(_("unable to strip away %d dirs from %s") %
719 719 (count, path))
720 720 i += 1
721 721 # consume '//' in the path
722 722 while i < pathlen - 1 and path[i] == '/':
723 723 i += 1
724 724 count -= 1
725 725 return path[:i].lstrip(), path[i:].rstrip()
726 726
727 727 nulla = afile_orig == "/dev/null"
728 728 nullb = bfile_orig == "/dev/null"
729 729 abase, afile = pathstrip(afile_orig, strip)
730 730 gooda = not nulla and os.path.exists(afile)
731 731 bbase, bfile = pathstrip(bfile_orig, strip)
732 732 if afile == bfile:
733 733 goodb = gooda
734 734 else:
735 735 goodb = not nullb and os.path.exists(bfile)
736 736 createfunc = hunk.createfile
737 737 if reverse:
738 738 createfunc = hunk.rmfile
739 739 missing = not goodb and not gooda and not createfunc()
740 740 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
741 741 # diff is between a file and its backup. In this case, the original
742 742 # file should be patched (see original mpatch code).
743 743 isbackup = (abase == bbase and bfile.startswith(afile))
744 744 fname = None
745 745 if not missing:
746 746 if gooda and goodb:
747 747 fname = isbackup and afile or bfile
748 748 elif gooda:
749 749 fname = afile
750 750
751 751 if not fname:
752 752 if not nullb:
753 753 fname = isbackup and afile or bfile
754 754 elif not nulla:
755 755 fname = afile
756 756 else:
757 757 raise PatchError(_("undefined source and destination files"))
758 758
759 759 return fname, missing
760 760
761 761 class linereader:
762 762 # simple class to allow pushing lines back into the input stream
763 763 def __init__(self, fp):
764 764 self.fp = fp
765 765 self.buf = []
766 766
767 767 def push(self, line):
768 768 if line is not None:
769 769 self.buf.append(line)
770 770
771 771 def readline(self):
772 772 if self.buf:
773 773 l = self.buf[0]
774 774 del self.buf[0]
775 775 return l
776 776 return self.fp.readline()
777 777
778 778 def __iter__(self):
779 779 while 1:
780 780 l = self.readline()
781 781 if not l:
782 782 break
783 783 yield l
784 784
785 785 def scangitpatch(lr, firstline):
786 786 """
787 787 Git patches can emit:
788 788 - rename a to b
789 789 - change b
790 790 - copy a to c
791 791 - change c
792 792
793 793 We cannot apply this sequence as-is, the renamed 'a' could not be
794 794 found for it would have been renamed already. And we cannot copy
795 795 from 'b' instead because 'b' would have been changed already. So
796 796 we scan the git patch for copy and rename commands so we can
797 797 perform the copies ahead of time.
798 798 """
799 799 pos = 0
800 800 try:
801 801 pos = lr.fp.tell()
802 802 fp = lr.fp
803 803 except IOError:
804 804 fp = cStringIO.StringIO(lr.fp.read())
805 805 gitlr = linereader(fp)
806 806 gitlr.push(firstline)
807 807 (dopatch, gitpatches) = readgitpatch(gitlr)
808 808 fp.seek(pos)
809 809 return dopatch, gitpatches
810 810
811 811 def iterhunks(ui, fp, sourcefile=None):
812 812 """Read a patch and yield the following events:
813 813 - ("file", afile, bfile, firsthunk): select a new target file.
814 814 - ("hunk", hunk): a new hunk is ready to be applied, follows a
815 815 "file" event.
816 816 - ("git", gitchanges): current diff is in git format, gitchanges
817 817 maps filenames to gitpatch records. Unique event.
818 818 """
819 819 changed = {}
820 820 current_hunk = None
821 821 afile = ""
822 822 bfile = ""
823 823 state = None
824 824 hunknum = 0
825 825 emitfile = False
826 826 git = False
827 827
828 828 # our states
829 829 BFILE = 1
830 830 context = None
831 831 lr = linereader(fp)
832 832 dopatch = True
833 833 # gitworkdone is True if a git operation (copy, rename, ...) was
834 834 # performed already for the current file. Useful when the file
835 835 # section may have no hunk.
836 836 gitworkdone = False
837 837
838 838 while True:
839 839 newfile = False
840 840 x = lr.readline()
841 841 if not x:
842 842 break
843 843 if current_hunk:
844 844 if x.startswith('\ '):
845 845 current_hunk.fix_newline()
846 846 yield 'hunk', current_hunk
847 847 current_hunk = None
848 848 gitworkdone = False
849 849 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
850 850 ((context or context == None) and x.startswith('***************')))):
851 851 try:
852 852 if context == None and x.startswith('***************'):
853 853 context = True
854 854 gpatch = changed.get(bfile)
855 855 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
856 856 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
857 857 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
858 858 except PatchError, err:
859 859 ui.debug(err)
860 860 current_hunk = None
861 861 continue
862 862 hunknum += 1
863 863 if emitfile:
864 864 emitfile = False
865 865 yield 'file', (afile, bfile, current_hunk)
866 866 elif state == BFILE and x.startswith('GIT binary patch'):
867 867 current_hunk = binhunk(changed[bfile])
868 868 hunknum += 1
869 869 if emitfile:
870 870 emitfile = False
871 871 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
872 872 current_hunk.extract(lr)
873 873 elif x.startswith('diff --git'):
874 874 # check for git diff, scanning the whole patch file if needed
875 875 m = gitre.match(x)
876 876 if m:
877 877 afile, bfile = m.group(1, 2)
878 878 if not git:
879 879 git = True
880 880 dopatch, gitpatches = scangitpatch(lr, x)
881 881 yield 'git', gitpatches
882 882 for gp in gitpatches:
883 883 changed[gp.path] = gp
884 884 # else error?
885 885 # copy/rename + modify should modify target, not source
886 886 gp = changed.get(bfile)
887 887 if gp and gp.op in ('COPY', 'DELETE', 'RENAME'):
888 888 afile = bfile
889 889 gitworkdone = True
890 890 newfile = True
891 891 elif x.startswith('---'):
892 892 # check for a unified diff
893 893 l2 = lr.readline()
894 894 if not l2.startswith('+++'):
895 895 lr.push(l2)
896 896 continue
897 897 newfile = True
898 898 context = False
899 899 afile = parsefilename(x)
900 900 bfile = parsefilename(l2)
901 901 elif x.startswith('***'):
902 902 # check for a context diff
903 903 l2 = lr.readline()
904 904 if not l2.startswith('---'):
905 905 lr.push(l2)
906 906 continue
907 907 l3 = lr.readline()
908 908 lr.push(l3)
909 909 if not l3.startswith("***************"):
910 910 lr.push(l2)
911 911 continue
912 912 newfile = True
913 913 context = True
914 914 afile = parsefilename(x)
915 915 bfile = parsefilename(l2)
916 916
917 917 if newfile:
918 918 emitfile = True
919 919 state = BFILE
920 920 hunknum = 0
921 921 if current_hunk:
922 922 if current_hunk.complete():
923 923 yield 'hunk', current_hunk
924 924 else:
925 925 raise PatchError(_("malformed patch %s %s") % (afile,
926 926 current_hunk.desc))
927 927
928 928 if hunknum == 0 and dopatch and not gitworkdone:
929 929 raise NoHunks
930 930
931 931 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
932 932 """reads a patch from fp and tries to apply it. The dict 'changed' is
933 933 filled in with all of the filenames changed by the patch. Returns 0
934 934 for a clean patch, -1 if any rejects were found and 1 if there was
935 935 any fuzz."""
936 936
937 937 rejects = 0
938 938 err = 0
939 939 current_file = None
940 940 gitpatches = None
941 941
942 942 def closefile():
943 943 if not current_file:
944 944 return 0
945 945 current_file.close()
946 946 return len(current_file.rej)
947 947
948 948 for state, values in iterhunks(ui, fp, sourcefile):
949 949 if state == 'hunk':
950 950 if not current_file:
951 951 continue
952 952 current_hunk = values
953 953 ret = current_file.apply(current_hunk, reverse)
954 954 if ret >= 0:
955 955 changed.setdefault(current_file.fname, None)
956 956 if ret > 0:
957 957 err = 1
958 958 elif state == 'file':
959 959 rejects += closefile()
960 960 afile, bfile, first_hunk = values
961 961 try:
962 962 if sourcefile:
963 963 current_file = patchfile(ui, sourcefile)
964 964 else:
965 965 current_file, missing = selectfile(afile, bfile, first_hunk,
966 966 strip, reverse)
967 967 current_file = patchfile(ui, current_file, missing)
968 968 except PatchError, err:
969 969 ui.warn(str(err) + '\n')
970 970 current_file, current_hunk = None, None
971 971 rejects += 1
972 972 continue
973 973 elif state == 'git':
974 974 gitpatches = values
975 975 cwd = os.getcwd()
976 976 for gp in gitpatches:
977 977 if gp.op in ('COPY', 'RENAME'):
978 978 src, dst = [util.canonpath(cwd, cwd, x)
979 979 for x in [gp.oldpath, gp.path]]
980 980 copyfile(src, dst)
981 981 changed[gp.path] = gp
982 982 else:
983 983 raise util.Abort(_('unsupported parser state: %s') % state)
984 984
985 985 rejects += closefile()
986 986
987 987 if rejects:
988 988 return -1
989 989 return err
990 990
991 991 def diffopts(ui, opts={}, untrusted=False):
992 992 def get(key, name=None, getter=ui.configbool):
993 993 return (opts.get(key) or
994 994 getter('diff', name or key, None, untrusted=untrusted))
995 995 return mdiff.diffopts(
996 996 text=opts.get('text'),
997 997 git=get('git'),
998 998 nodates=get('nodates'),
999 999 showfunc=get('show_function', 'showfunc'),
1000 1000 ignorews=get('ignore_all_space', 'ignorews'),
1001 1001 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1002 1002 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1003 1003 context=get('unified', getter=ui.config))
1004 1004
1005 1005 def updatedir(ui, repo, patches):
1006 1006 '''Update dirstate after patch application according to metadata'''
1007 1007 if not patches:
1008 1008 return
1009 1009 copies = []
1010 1010 removes = {}
1011 1011 cfiles = patches.keys()
1012 1012 cwd = repo.getcwd()
1013 1013 if cwd:
1014 1014 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1015 1015 for f in patches:
1016 1016 gp = patches[f]
1017 1017 if not gp:
1018 1018 continue
1019 1019 if gp.op == 'RENAME':
1020 1020 copies.append((gp.oldpath, gp.path))
1021 1021 removes[gp.oldpath] = 1
1022 1022 elif gp.op == 'COPY':
1023 1023 copies.append((gp.oldpath, gp.path))
1024 1024 elif gp.op == 'DELETE':
1025 1025 removes[gp.path] = 1
1026 1026 for src, dst in copies:
1027 1027 repo.copy(src, dst)
1028 1028 removes = removes.keys()
1029 1029 if removes:
1030 1030 repo.remove(util.sort(removes), True)
1031 1031 for f in patches:
1032 1032 gp = patches[f]
1033 1033 if gp and gp.mode:
1034 1034 islink, isexec = gp.mode
1035 1035 dst = os.path.join(repo.root, gp.path)
1036 1036 # patch won't create empty files
1037 1037 if gp.op == 'ADD' and not os.path.exists(dst):
1038 1038 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1039 1039 repo.wwrite(gp.path, '', flags)
1040 1040 else:
1041 1041 util.set_flags(dst, islink, isexec)
1042 1042 cmdutil.addremove(repo, cfiles)
1043 1043 files = patches.keys()
1044 1044 files.extend([r for r in removes if r not in files])
1045 1045 return util.sort(files)
1046 1046
1047 1047 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1048 1048 """use <patcher> to apply <patchname> to the working directory.
1049 1049 returns whether patch was applied with fuzz factor."""
1050 1050
1051 1051 fuzz = False
1052 1052 if cwd:
1053 1053 args.append('-d %s' % util.shellquote(cwd))
1054 1054 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1055 1055 util.shellquote(patchname)))
1056 1056
1057 1057 for line in fp:
1058 1058 line = line.rstrip()
1059 1059 ui.note(line + '\n')
1060 1060 if line.startswith('patching file '):
1061 1061 pf = util.parse_patch_output(line)
1062 1062 printed_file = False
1063 1063 files.setdefault(pf, None)
1064 1064 elif line.find('with fuzz') >= 0:
1065 1065 fuzz = True
1066 1066 if not printed_file:
1067 1067 ui.warn(pf + '\n')
1068 1068 printed_file = True
1069 1069 ui.warn(line + '\n')
1070 1070 elif line.find('saving rejects to file') >= 0:
1071 1071 ui.warn(line + '\n')
1072 1072 elif line.find('FAILED') >= 0:
1073 1073 if not printed_file:
1074 1074 ui.warn(pf + '\n')
1075 1075 printed_file = True
1076 1076 ui.warn(line + '\n')
1077 1077 code = fp.close()
1078 1078 if code:
1079 1079 raise PatchError(_("patch command failed: %s") %
1080 1080 util.explain_exit(code)[0])
1081 1081 return fuzz
1082 1082
1083 1083 def internalpatch(patchobj, ui, strip, cwd, files={}):
1084 1084 """use builtin patch to apply <patchobj> to the working directory.
1085 1085 returns whether patch was applied with fuzz factor."""
1086 1086 try:
1087 1087 fp = file(patchobj, 'rb')
1088 1088 except TypeError:
1089 1089 fp = patchobj
1090 1090 if cwd:
1091 1091 curdir = os.getcwd()
1092 1092 os.chdir(cwd)
1093 1093 try:
1094 1094 ret = applydiff(ui, fp, files, strip=strip)
1095 1095 finally:
1096 1096 if cwd:
1097 1097 os.chdir(curdir)
1098 1098 if ret < 0:
1099 1099 raise PatchError
1100 1100 return ret > 0
1101 1101
1102 1102 def patch(patchname, ui, strip=1, cwd=None, files={}):
1103 1103 """apply <patchname> to the working directory.
1104 1104 returns whether patch was applied with fuzz factor."""
1105 1105 patcher = ui.config('ui', 'patch')
1106 1106 args = []
1107 1107 try:
1108 1108 if patcher:
1109 1109 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1110 1110 files)
1111 1111 else:
1112 1112 try:
1113 1113 return internalpatch(patchname, ui, strip, cwd, files)
1114 1114 except NoHunks:
1115 1115 patcher = util.find_exe('gpatch') or util.find_exe('patch')
1116 1116 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1117 1117 patcher)
1118 1118 if util.needbinarypatch():
1119 1119 args.append('--binary')
1120 1120 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1121 1121 files)
1122 1122 except PatchError, err:
1123 1123 s = str(err)
1124 1124 if s:
1125 1125 raise util.Abort(s)
1126 1126 else:
1127 1127 raise util.Abort(_('patch failed to apply'))
1128 1128
1129 1129 def b85diff(to, tn):
1130 1130 '''print base85-encoded binary diff'''
1131 1131 def gitindex(text):
1132 1132 if not text:
1133 1133 return '0' * 40
1134 1134 l = len(text)
1135 1135 s = util.sha1('blob %d\0' % l)
1136 1136 s.update(text)
1137 1137 return s.hexdigest()
1138 1138
1139 1139 def fmtline(line):
1140 1140 l = len(line)
1141 1141 if l <= 26:
1142 1142 l = chr(ord('A') + l - 1)
1143 1143 else:
1144 1144 l = chr(l - 26 + ord('a') - 1)
1145 1145 return '%c%s\n' % (l, base85.b85encode(line, True))
1146 1146
1147 1147 def chunk(text, csize=52):
1148 1148 l = len(text)
1149 1149 i = 0
1150 1150 while i < l:
1151 1151 yield text[i:i+csize]
1152 1152 i += csize
1153 1153
1154 1154 tohash = gitindex(to)
1155 1155 tnhash = gitindex(tn)
1156 1156 if tohash == tnhash:
1157 1157 return ""
1158 1158
1159 1159 # TODO: deltas
1160 1160 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1161 1161 (tohash, tnhash, len(tn))]
1162 1162 for l in chunk(zlib.compress(tn)):
1163 1163 ret.append(fmtline(l))
1164 1164 ret.append('\n')
1165 1165 return ''.join(ret)
1166 1166
1167 1167 def _addmodehdr(header, omode, nmode):
1168 1168 if omode != nmode:
1169 1169 header.append('old mode %s\n' % omode)
1170 1170 header.append('new mode %s\n' % nmode)
1171 1171
1172 1172 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1173 1173 '''yields diff of changes to files between two nodes, or node and
1174 1174 working directory.
1175 1175
1176 1176 if node1 is None, use first dirstate parent instead.
1177 1177 if node2 is None, compare node1 with working directory.'''
1178 1178
1179 1179 if not match:
1180 1180 match = cmdutil.matchall(repo)
1181 1181
1182 1182 if opts is None:
1183 1183 opts = mdiff.defaultopts
1184 1184
1185 1185 if not node1:
1186 1186 node1 = repo.dirstate.parents()[0]
1187 1187
1188 1188 flcache = {}
1189 1189 def getfilectx(f, ctx):
1190 1190 flctx = ctx.filectx(f, filelog=flcache.get(f))
1191 1191 if f not in flcache:
1192 1192 flcache[f] = flctx._filelog
1193 1193 return flctx
1194 1194
1195 1195 ctx1 = repo[node1]
1196 1196 ctx2 = repo[node2]
1197 1197
1198 1198 if not changes:
1199 1199 changes = repo.status(ctx1, ctx2, match=match)
1200 1200 modified, added, removed = changes[:3]
1201 1201
1202 1202 if not modified and not added and not removed:
1203 1203 return
1204 1204
1205 1205 date1 = util.datestr(ctx1.date())
1206 1206 man1 = ctx1.manifest()
1207 1207
1208 1208 if repo.ui.quiet:
1209 1209 r = None
1210 1210 else:
1211 1211 hexfunc = repo.ui.debugflag and hex or short
1212 1212 r = [hexfunc(node) for node in [node1, node2] if node]
1213 1213
1214 1214 if opts.git:
1215 1215 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1216 1216 for k, v in copy.items():
1217 1217 copy[v] = k
1218 1218
1219 1219 gone = {}
1220 1220 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1221 1221
1222 1222 for f in util.sort(modified + added + removed):
1223 1223 to = None
1224 1224 tn = None
1225 1225 dodiff = True
1226 1226 header = []
1227 1227 if f in man1:
1228 1228 to = getfilectx(f, ctx1).data()
1229 1229 if f not in removed:
1230 1230 tn = getfilectx(f, ctx2).data()
1231 1231 a, b = f, f
1232 1232 if opts.git:
1233 1233 if f in added:
1234 1234 mode = gitmode[ctx2.flags(f)]
1235 1235 if f in copy:
1236 1236 a = copy[f]
1237 1237 omode = gitmode[man1.flags(a)]
1238 1238 _addmodehdr(header, omode, mode)
1239 1239 if a in removed and a not in gone:
1240 1240 op = 'rename'
1241 1241 gone[a] = 1
1242 1242 else:
1243 1243 op = 'copy'
1244 1244 header.append('%s from %s\n' % (op, a))
1245 1245 header.append('%s to %s\n' % (op, f))
1246 1246 to = getfilectx(a, ctx1).data()
1247 1247 else:
1248 1248 header.append('new file mode %s\n' % mode)
1249 1249 if util.binary(tn):
1250 1250 dodiff = 'binary'
1251 1251 elif f in removed:
1252 1252 # have we already reported a copy above?
1253 1253 if f in copy and copy[f] in added and copy[copy[f]] == f:
1254 1254 dodiff = False
1255 1255 else:
1256 1256 header.append('deleted file mode %s\n' %
1257 1257 gitmode[man1.flags(f)])
1258 1258 else:
1259 1259 omode = gitmode[man1.flags(f)]
1260 1260 nmode = gitmode[ctx2.flags(f)]
1261 1261 _addmodehdr(header, omode, nmode)
1262 1262 if util.binary(to) or util.binary(tn):
1263 1263 dodiff = 'binary'
1264 1264 r = None
1265 1265 header.insert(0, mdiff.diffline(r, a, b, opts))
1266 1266 if dodiff:
1267 1267 if dodiff == 'binary':
1268 1268 text = b85diff(to, tn)
1269 1269 else:
1270 1270 text = mdiff.unidiff(to, date1,
1271 1271 # ctx2 date may be dynamic
1272 1272 tn, util.datestr(ctx2.date()),
1273 1273 a, b, r, opts=opts)
1274 1274 if header and (text or len(header) > 1):
1275 1275 yield ''.join(header)
1276 1276 if text:
1277 1277 yield text
1278 1278
1279 1279 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1280 1280 opts=None):
1281 1281 '''export changesets as hg patches.'''
1282 1282
1283 1283 total = len(revs)
1284 1284 revwidth = max([len(str(rev)) for rev in revs])
1285 1285
1286 1286 def single(rev, seqno, fp):
1287 1287 ctx = repo[rev]
1288 1288 node = ctx.node()
1289 1289 parents = [p.node() for p in ctx.parents() if p]
1290 1290 branch = ctx.branch()
1291 1291 if switch_parent:
1292 1292 parents.reverse()
1293 1293 prev = (parents and parents[0]) or nullid
1294 1294
1295 1295 if not fp:
1296 1296 fp = cmdutil.make_file(repo, template, node, total=total,
1297 seqno=seqno, revwidth=revwidth)
1297 seqno=seqno, revwidth=revwidth,
1298 mode='ab')
1298 1299 if fp != sys.stdout and hasattr(fp, 'name'):
1299 1300 repo.ui.note("%s\n" % fp.name)
1300 1301
1301 1302 fp.write("# HG changeset patch\n")
1302 1303 fp.write("# User %s\n" % ctx.user())
1303 1304 fp.write("# Date %d %d\n" % ctx.date())
1304 1305 if branch and (branch != 'default'):
1305 1306 fp.write("# Branch %s\n" % branch)
1306 1307 fp.write("# Node ID %s\n" % hex(node))
1307 1308 fp.write("# Parent %s\n" % hex(prev))
1308 1309 if len(parents) > 1:
1309 1310 fp.write("# Parent %s\n" % hex(parents[1]))
1310 1311 fp.write(ctx.description().rstrip())
1311 1312 fp.write("\n\n")
1312 1313
1313 1314 for chunk in diff(repo, prev, node, opts=opts):
1314 1315 fp.write(chunk)
1315 1316 if fp not in (sys.stdout, repo.ui):
1316 1317 fp.close()
1317 1318
1318 1319 for seqno, rev in enumerate(revs):
1319 1320 single(rev, seqno+1, fp)
1320 1321
1321 1322 def diffstat(patchlines):
1322 1323 if not util.find_exe('diffstat'):
1323 1324 return
1324 1325 output = util.filter('\n'.join(patchlines),
1325 1326 'diffstat -p1 -w79 2>%s' % util.nulldev)
1326 1327 stat = [l.lstrip() for l in output.splitlines(True)]
1327 1328 last = stat.pop()
1328 1329 stat.insert(0, last)
1329 1330 stat = ''.join(stat)
1330 1331 return stat
@@ -1,15 +1,21 b''
1 1 #!/bin/sh
2 2
3 3 hg init repo
4 4 cd repo
5 5 touch foo
6 6 hg add foo
7 7 for i in 0 1 2 3 4 5 6 7 8 9 10 11; do
8 8 echo "foo-$i" >> foo
9 9 hg ci -m "foo-$i" -d "0 0"
10 10 done
11 11
12 12 for out in "%nof%N" "%%%H" "%b-%R" "%h" "%r"; do
13 13 echo "# foo-$out.patch"
14 14 hg export -v -o "foo-$out.patch" 2:tip
15 15 done
16
17 echo "# exporting 4 changesets to a file"
18 hg export -o export_internal 1 2 3 4
19 grep HG export_internal | wc -l
20 echo "# exporting 4 changesets to a file"
21 hg export 1 2 3 4 | grep HG | wc -l
@@ -1,60 +1,64 b''
1 1 # foo-%nof%N.patch
2 2 exporting patches:
3 3 foo-01of10.patch
4 4 foo-02of10.patch
5 5 foo-03of10.patch
6 6 foo-04of10.patch
7 7 foo-05of10.patch
8 8 foo-06of10.patch
9 9 foo-07of10.patch
10 10 foo-08of10.patch
11 11 foo-09of10.patch
12 12 foo-10of10.patch
13 13 # foo-%%%H.patch
14 14 exporting patches:
15 15 foo-%617188a1c80f869a7b66c85134da88a6fb145f67.patch
16 16 foo-%dd41a5ff707a5225204105611ba49cc5c229d55f.patch
17 17 foo-%f95a5410f8664b6e1490a4af654e4b7d41a7b321.patch
18 18 foo-%4346bcfde53b4d9042489078bcfa9c3e28201db2.patch
19 19 foo-%afda8c3a009cc99449a05ad8aa4655648c4ecd34.patch
20 20 foo-%35284ce2b6b99c9d2ac66268fe99e68e1974e1aa.patch
21 21 foo-%9688c41894e6931305fa7165a37f6568050b4e9b.patch
22 22 foo-%747d3c68f8ec44bb35816bfcd59aeb50b9654c2f.patch
23 23 foo-%5f17a83f5fbd9414006a5e563eab4c8a00729efd.patch
24 24 foo-%f3acbafac161ec68f1598af38f794f28847ca5d3.patch
25 25 # foo-%b-%R.patch
26 26 exporting patches:
27 27 foo-repo-2.patch
28 28 foo-repo-3.patch
29 29 foo-repo-4.patch
30 30 foo-repo-5.patch
31 31 foo-repo-6.patch
32 32 foo-repo-7.patch
33 33 foo-repo-8.patch
34 34 foo-repo-9.patch
35 35 foo-repo-10.patch
36 36 foo-repo-11.patch
37 37 # foo-%h.patch
38 38 exporting patches:
39 39 foo-617188a1c80f.patch
40 40 foo-dd41a5ff707a.patch
41 41 foo-f95a5410f866.patch
42 42 foo-4346bcfde53b.patch
43 43 foo-afda8c3a009c.patch
44 44 foo-35284ce2b6b9.patch
45 45 foo-9688c41894e6.patch
46 46 foo-747d3c68f8ec.patch
47 47 foo-5f17a83f5fbd.patch
48 48 foo-f3acbafac161.patch
49 49 # foo-%r.patch
50 50 exporting patches:
51 51 foo-02.patch
52 52 foo-03.patch
53 53 foo-04.patch
54 54 foo-05.patch
55 55 foo-06.patch
56 56 foo-07.patch
57 57 foo-08.patch
58 58 foo-09.patch
59 59 foo-10.patch
60 60 foo-11.patch
61 # exporting 4 changesets to a file
62 4
63 # exporting 4 changesets to a file
64 4
General Comments 0
You need to be logged in to leave comments. Login now