##// END OF EJS Templates
Make util.find_exe alway returns existing file, fixing issue1459...
Mads Kiilerich -
r7732:3793802e default
parent child Browse files
Show More
@@ -1,1398 +1,1398 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 12 import cStringIO, email.Parser, os, re, errno, math
13 13 import sys, tempfile, zlib
14 14
15 15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16 16
17 17 class PatchError(Exception):
18 18 pass
19 19
20 20 class NoHunks(PatchError):
21 21 pass
22 22
23 23 # helper functions
24 24
25 25 def copyfile(src, dst, basedir):
26 26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 27 if os.path.exists(absdst):
28 28 raise util.Abort(_("cannot create %s: destination already exists") %
29 29 dst)
30 30
31 31 dstdir = os.path.dirname(absdst)
32 32 if dstdir and not os.path.isdir(dstdir):
33 33 try:
34 34 os.makedirs(dstdir)
35 35 except IOError:
36 36 raise util.Abort(
37 37 _("cannot create %s: unable to create destination directory")
38 38 % dst)
39 39
40 40 util.copyfile(abssrc, absdst)
41 41
42 42 # public functions
43 43
44 44 def extract(ui, fileobj):
45 45 '''extract patch from data read from fileobj.
46 46
47 47 patch can be a normal patch or contained in an email message.
48 48
49 49 return tuple (filename, message, user, date, node, p1, p2).
50 50 Any item in the returned tuple can be None. If filename is None,
51 51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52 52
53 53 # attempt to detect the start of a patch
54 54 # (this heuristic is borrowed from quilt)
55 55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
56 56 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
57 57 '(---|\*\*\*)[ \t])', re.MULTILINE)
58 58
59 59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 60 tmpfp = os.fdopen(fd, 'w')
61 61 try:
62 62 msg = email.Parser.Parser().parse(fileobj)
63 63
64 64 subject = msg['Subject']
65 65 user = msg['From']
66 66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 67 # should try to parse msg['Date']
68 68 date = None
69 69 nodeid = None
70 70 branch = None
71 71 parents = []
72 72
73 73 if subject:
74 74 if subject.startswith('[PATCH'):
75 75 pend = subject.find(']')
76 76 if pend >= 0:
77 77 subject = subject[pend+1:].lstrip()
78 78 subject = subject.replace('\n\t', ' ')
79 79 ui.debug('Subject: %s\n' % subject)
80 80 if user:
81 81 ui.debug('From: %s\n' % user)
82 82 diffs_seen = 0
83 83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 84 message = ''
85 85 for part in msg.walk():
86 86 content_type = part.get_content_type()
87 87 ui.debug('Content-Type: %s\n' % content_type)
88 88 if content_type not in ok_types:
89 89 continue
90 90 payload = part.get_payload(decode=True)
91 91 m = diffre.search(payload)
92 92 if m:
93 93 hgpatch = False
94 94 ignoretext = False
95 95
96 96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 97 diffs_seen += 1
98 98 cfp = cStringIO.StringIO()
99 99 for line in payload[:m.start(0)].splitlines():
100 100 if line.startswith('# HG changeset patch'):
101 101 ui.debug(_('patch generated by hg export\n'))
102 102 hgpatch = True
103 103 # drop earlier commit message content
104 104 cfp.seek(0)
105 105 cfp.truncate()
106 106 subject = None
107 107 elif hgpatch:
108 108 if line.startswith('# User '):
109 109 user = line[7:]
110 110 ui.debug('From: %s\n' % user)
111 111 elif line.startswith("# Date "):
112 112 date = line[7:]
113 113 elif line.startswith("# Branch "):
114 114 branch = line[9:]
115 115 elif line.startswith("# Node ID "):
116 116 nodeid = line[10:]
117 117 elif line.startswith("# Parent "):
118 118 parents.append(line[10:])
119 119 elif line == '---' and gitsendmail:
120 120 ignoretext = True
121 121 if not line.startswith('# ') and not ignoretext:
122 122 cfp.write(line)
123 123 cfp.write('\n')
124 124 message = cfp.getvalue()
125 125 if tmpfp:
126 126 tmpfp.write(payload)
127 127 if not payload.endswith('\n'):
128 128 tmpfp.write('\n')
129 129 elif not diffs_seen and message and content_type == 'text/plain':
130 130 message += '\n' + payload
131 131 except:
132 132 tmpfp.close()
133 133 os.unlink(tmpname)
134 134 raise
135 135
136 136 if subject and not message.startswith(subject):
137 137 message = '%s\n%s' % (subject, message)
138 138 tmpfp.close()
139 139 if not diffs_seen:
140 140 os.unlink(tmpname)
141 141 return None, message, user, date, branch, None, None, None
142 142 p1 = parents and parents.pop(0) or None
143 143 p2 = parents and parents.pop(0) or None
144 144 return tmpname, message, user, date, branch, nodeid, p1, p2
145 145
146 146 GP_PATCH = 1 << 0 # we have to run patch
147 147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 148 GP_BINARY = 1 << 2 # there's a binary patch
149 149
150 150 class patchmeta:
151 151 """Patched file metadata
152 152
153 153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 157 'islink' is True if the file is a symlink and 'isexec' is True if
158 158 the file is executable. Otherwise, 'mode' is None.
159 159 """
160 160 def __init__(self, path):
161 161 self.path = path
162 162 self.oldpath = None
163 163 self.mode = None
164 164 self.op = 'MODIFY'
165 165 self.lineno = 0
166 166 self.binary = False
167 167
168 168 def setmode(self, mode):
169 169 islink = mode & 020000
170 170 isexec = mode & 0100
171 171 self.mode = (islink, isexec)
172 172
173 173 def readgitpatch(lr):
174 174 """extract git-style metadata about patches from <patchname>"""
175 175
176 176 # Filter patch for git information
177 177 gp = None
178 178 gitpatches = []
179 179 # Can have a git patch with only metadata, causing patch to complain
180 180 dopatch = 0
181 181
182 182 lineno = 0
183 183 for line in lr:
184 184 lineno += 1
185 185 if line.startswith('diff --git'):
186 186 m = gitre.match(line)
187 187 if m:
188 188 if gp:
189 189 gitpatches.append(gp)
190 190 src, dst = m.group(1, 2)
191 191 gp = patchmeta(dst)
192 192 gp.lineno = lineno
193 193 elif gp:
194 194 if line.startswith('--- '):
195 195 if gp.op in ('COPY', 'RENAME'):
196 196 dopatch |= GP_FILTER
197 197 gitpatches.append(gp)
198 198 gp = None
199 199 dopatch |= GP_PATCH
200 200 continue
201 201 if line.startswith('rename from '):
202 202 gp.op = 'RENAME'
203 203 gp.oldpath = line[12:].rstrip()
204 204 elif line.startswith('rename to '):
205 205 gp.path = line[10:].rstrip()
206 206 elif line.startswith('copy from '):
207 207 gp.op = 'COPY'
208 208 gp.oldpath = line[10:].rstrip()
209 209 elif line.startswith('copy to '):
210 210 gp.path = line[8:].rstrip()
211 211 elif line.startswith('deleted file'):
212 212 gp.op = 'DELETE'
213 213 # is the deleted file a symlink?
214 214 gp.setmode(int(line.rstrip()[-6:], 8))
215 215 elif line.startswith('new file mode '):
216 216 gp.op = 'ADD'
217 217 gp.setmode(int(line.rstrip()[-6:], 8))
218 218 elif line.startswith('new mode '):
219 219 gp.setmode(int(line.rstrip()[-6:], 8))
220 220 elif line.startswith('GIT binary patch'):
221 221 dopatch |= GP_BINARY
222 222 gp.binary = True
223 223 if gp:
224 224 gitpatches.append(gp)
225 225
226 226 if not gitpatches:
227 227 dopatch = GP_PATCH
228 228
229 229 return (dopatch, gitpatches)
230 230
231 231 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
232 232 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
233 233 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
234 234
235 235 class patchfile:
236 236 def __init__(self, ui, fname, opener, missing=False):
237 237 self.fname = fname
238 238 self.opener = opener
239 239 self.ui = ui
240 240 self.lines = []
241 241 self.exists = False
242 242 self.missing = missing
243 243 if not missing:
244 244 try:
245 245 self.lines = self.readlines(fname)
246 246 self.exists = True
247 247 except IOError:
248 248 pass
249 249 else:
250 250 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
251 251
252 252 self.hash = {}
253 253 self.dirty = 0
254 254 self.offset = 0
255 255 self.rej = []
256 256 self.fileprinted = False
257 257 self.printfile(False)
258 258 self.hunks = 0
259 259
260 260 def readlines(self, fname):
261 261 fp = self.opener(fname, 'r')
262 262 try:
263 263 return fp.readlines()
264 264 finally:
265 265 fp.close()
266 266
267 267 def writelines(self, fname, lines):
268 268 fp = self.opener(fname, 'w')
269 269 try:
270 270 fp.writelines(lines)
271 271 finally:
272 272 fp.close()
273 273
274 274 def unlink(self, fname):
275 275 os.unlink(fname)
276 276
277 277 def printfile(self, warn):
278 278 if self.fileprinted:
279 279 return
280 280 if warn or self.ui.verbose:
281 281 self.fileprinted = True
282 282 s = _("patching file %s\n") % self.fname
283 283 if warn:
284 284 self.ui.warn(s)
285 285 else:
286 286 self.ui.note(s)
287 287
288 288
289 289 def findlines(self, l, linenum):
290 290 # looks through the hash and finds candidate lines. The
291 291 # result is a list of line numbers sorted based on distance
292 292 # from linenum
293 293 def sorter(a, b):
294 294 vala = abs(a - linenum)
295 295 valb = abs(b - linenum)
296 296 return cmp(vala, valb)
297 297
298 298 try:
299 299 cand = self.hash[l]
300 300 except:
301 301 return []
302 302
303 303 if len(cand) > 1:
304 304 # resort our list of potentials forward then back.
305 305 cand.sort(sorter)
306 306 return cand
307 307
308 308 def hashlines(self):
309 309 self.hash = {}
310 310 for x in xrange(len(self.lines)):
311 311 s = self.lines[x]
312 312 self.hash.setdefault(s, []).append(x)
313 313
314 314 def write_rej(self):
315 315 # our rejects are a little different from patch(1). This always
316 316 # creates rejects in the same form as the original patch. A file
317 317 # header is inserted so that you can run the reject through patch again
318 318 # without having to type the filename.
319 319
320 320 if not self.rej:
321 321 return
322 322
323 323 fname = self.fname + ".rej"
324 324 self.ui.warn(
325 325 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
326 326 (len(self.rej), self.hunks, fname))
327 327
328 328 def rejlines():
329 329 base = os.path.basename(self.fname)
330 330 yield "--- %s\n+++ %s\n" % (base, base)
331 331 for x in self.rej:
332 332 for l in x.hunk:
333 333 yield l
334 334 if l[-1] != '\n':
335 335 yield "\n\ No newline at end of file\n"
336 336
337 337 self.writelines(fname, rejlines())
338 338
339 339 def write(self, dest=None):
340 340 if not self.dirty:
341 341 return
342 342 if not dest:
343 343 dest = self.fname
344 344 self.writelines(dest, self.lines)
345 345
346 346 def close(self):
347 347 self.write()
348 348 self.write_rej()
349 349
350 350 def apply(self, h, reverse):
351 351 if not h.complete():
352 352 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
353 353 (h.number, h.desc, len(h.a), h.lena, len(h.b),
354 354 h.lenb))
355 355
356 356 self.hunks += 1
357 357 if reverse:
358 358 h.reverse()
359 359
360 360 if self.missing:
361 361 self.rej.append(h)
362 362 return -1
363 363
364 364 if self.exists and h.createfile():
365 365 self.ui.warn(_("file %s already exists\n") % self.fname)
366 366 self.rej.append(h)
367 367 return -1
368 368
369 369 if isinstance(h, githunk):
370 370 if h.rmfile():
371 371 self.unlink(self.fname)
372 372 else:
373 373 self.lines[:] = h.new()
374 374 self.offset += len(h.new())
375 375 self.dirty = 1
376 376 return 0
377 377
378 378 # fast case first, no offsets, no fuzz
379 379 old = h.old()
380 380 # patch starts counting at 1 unless we are adding the file
381 381 if h.starta == 0:
382 382 start = 0
383 383 else:
384 384 start = h.starta + self.offset - 1
385 385 orig_start = start
386 386 if diffhelpers.testhunk(old, self.lines, start) == 0:
387 387 if h.rmfile():
388 388 self.unlink(self.fname)
389 389 else:
390 390 self.lines[start : start + h.lena] = h.new()
391 391 self.offset += h.lenb - h.lena
392 392 self.dirty = 1
393 393 return 0
394 394
395 395 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
396 396 self.hashlines()
397 397 if h.hunk[-1][0] != ' ':
398 398 # if the hunk tried to put something at the bottom of the file
399 399 # override the start line and use eof here
400 400 search_start = len(self.lines)
401 401 else:
402 402 search_start = orig_start
403 403
404 404 for fuzzlen in xrange(3):
405 405 for toponly in [ True, False ]:
406 406 old = h.old(fuzzlen, toponly)
407 407
408 408 cand = self.findlines(old[0][1:], search_start)
409 409 for l in cand:
410 410 if diffhelpers.testhunk(old, self.lines, l) == 0:
411 411 newlines = h.new(fuzzlen, toponly)
412 412 self.lines[l : l + len(old)] = newlines
413 413 self.offset += len(newlines) - len(old)
414 414 self.dirty = 1
415 415 if fuzzlen:
416 416 fuzzstr = "with fuzz %d " % fuzzlen
417 417 f = self.ui.warn
418 418 self.printfile(True)
419 419 else:
420 420 fuzzstr = ""
421 421 f = self.ui.note
422 422 offset = l - orig_start - fuzzlen
423 423 if offset == 1:
424 424 linestr = "line"
425 425 else:
426 426 linestr = "lines"
427 427 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
428 428 (h.number, l+1, fuzzstr, offset, linestr))
429 429 return fuzzlen
430 430 self.printfile(True)
431 431 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
432 432 self.rej.append(h)
433 433 return -1
434 434
435 435 class hunk:
436 436 def __init__(self, desc, num, lr, context, create=False, remove=False):
437 437 self.number = num
438 438 self.desc = desc
439 439 self.hunk = [ desc ]
440 440 self.a = []
441 441 self.b = []
442 442 if context:
443 443 self.read_context_hunk(lr)
444 444 else:
445 445 self.read_unified_hunk(lr)
446 446 self.create = create
447 447 self.remove = remove and not create
448 448
449 449 def read_unified_hunk(self, lr):
450 450 m = unidesc.match(self.desc)
451 451 if not m:
452 452 raise PatchError(_("bad hunk #%d") % self.number)
453 453 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
454 454 if self.lena == None:
455 455 self.lena = 1
456 456 else:
457 457 self.lena = int(self.lena)
458 458 if self.lenb == None:
459 459 self.lenb = 1
460 460 else:
461 461 self.lenb = int(self.lenb)
462 462 self.starta = int(self.starta)
463 463 self.startb = int(self.startb)
464 464 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
465 465 # if we hit eof before finishing out the hunk, the last line will
466 466 # be zero length. Lets try to fix it up.
467 467 while len(self.hunk[-1]) == 0:
468 468 del self.hunk[-1]
469 469 del self.a[-1]
470 470 del self.b[-1]
471 471 self.lena -= 1
472 472 self.lenb -= 1
473 473
474 474 def read_context_hunk(self, lr):
475 475 self.desc = lr.readline()
476 476 m = contextdesc.match(self.desc)
477 477 if not m:
478 478 raise PatchError(_("bad hunk #%d") % self.number)
479 479 foo, self.starta, foo2, aend, foo3 = m.groups()
480 480 self.starta = int(self.starta)
481 481 if aend == None:
482 482 aend = self.starta
483 483 self.lena = int(aend) - self.starta
484 484 if self.starta:
485 485 self.lena += 1
486 486 for x in xrange(self.lena):
487 487 l = lr.readline()
488 488 if l.startswith('---'):
489 489 lr.push(l)
490 490 break
491 491 s = l[2:]
492 492 if l.startswith('- ') or l.startswith('! '):
493 493 u = '-' + s
494 494 elif l.startswith(' '):
495 495 u = ' ' + s
496 496 else:
497 497 raise PatchError(_("bad hunk #%d old text line %d") %
498 498 (self.number, x))
499 499 self.a.append(u)
500 500 self.hunk.append(u)
501 501
502 502 l = lr.readline()
503 503 if l.startswith('\ '):
504 504 s = self.a[-1][:-1]
505 505 self.a[-1] = s
506 506 self.hunk[-1] = s
507 507 l = lr.readline()
508 508 m = contextdesc.match(l)
509 509 if not m:
510 510 raise PatchError(_("bad hunk #%d") % self.number)
511 511 foo, self.startb, foo2, bend, foo3 = m.groups()
512 512 self.startb = int(self.startb)
513 513 if bend == None:
514 514 bend = self.startb
515 515 self.lenb = int(bend) - self.startb
516 516 if self.startb:
517 517 self.lenb += 1
518 518 hunki = 1
519 519 for x in xrange(self.lenb):
520 520 l = lr.readline()
521 521 if l.startswith('\ '):
522 522 s = self.b[-1][:-1]
523 523 self.b[-1] = s
524 524 self.hunk[hunki-1] = s
525 525 continue
526 526 if not l:
527 527 lr.push(l)
528 528 break
529 529 s = l[2:]
530 530 if l.startswith('+ ') or l.startswith('! '):
531 531 u = '+' + s
532 532 elif l.startswith(' '):
533 533 u = ' ' + s
534 534 elif len(self.b) == 0:
535 535 # this can happen when the hunk does not add any lines
536 536 lr.push(l)
537 537 break
538 538 else:
539 539 raise PatchError(_("bad hunk #%d old text line %d") %
540 540 (self.number, x))
541 541 self.b.append(s)
542 542 while True:
543 543 if hunki >= len(self.hunk):
544 544 h = ""
545 545 else:
546 546 h = self.hunk[hunki]
547 547 hunki += 1
548 548 if h == u:
549 549 break
550 550 elif h.startswith('-'):
551 551 continue
552 552 else:
553 553 self.hunk.insert(hunki-1, u)
554 554 break
555 555
556 556 if not self.a:
557 557 # this happens when lines were only added to the hunk
558 558 for x in self.hunk:
559 559 if x.startswith('-') or x.startswith(' '):
560 560 self.a.append(x)
561 561 if not self.b:
562 562 # this happens when lines were only deleted from the hunk
563 563 for x in self.hunk:
564 564 if x.startswith('+') or x.startswith(' '):
565 565 self.b.append(x[1:])
566 566 # @@ -start,len +start,len @@
567 567 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
568 568 self.startb, self.lenb)
569 569 self.hunk[0] = self.desc
570 570
571 571 def reverse(self):
572 572 self.create, self.remove = self.remove, self.create
573 573 origlena = self.lena
574 574 origstarta = self.starta
575 575 self.lena = self.lenb
576 576 self.starta = self.startb
577 577 self.lenb = origlena
578 578 self.startb = origstarta
579 579 self.a = []
580 580 self.b = []
581 581 # self.hunk[0] is the @@ description
582 582 for x in xrange(1, len(self.hunk)):
583 583 o = self.hunk[x]
584 584 if o.startswith('-'):
585 585 n = '+' + o[1:]
586 586 self.b.append(o[1:])
587 587 elif o.startswith('+'):
588 588 n = '-' + o[1:]
589 589 self.a.append(n)
590 590 else:
591 591 n = o
592 592 self.b.append(o[1:])
593 593 self.a.append(o)
594 594 self.hunk[x] = o
595 595
596 596 def fix_newline(self):
597 597 diffhelpers.fix_newline(self.hunk, self.a, self.b)
598 598
599 599 def complete(self):
600 600 return len(self.a) == self.lena and len(self.b) == self.lenb
601 601
602 602 def createfile(self):
603 603 return self.starta == 0 and self.lena == 0 and self.create
604 604
605 605 def rmfile(self):
606 606 return self.startb == 0 and self.lenb == 0 and self.remove
607 607
608 608 def fuzzit(self, l, fuzz, toponly):
609 609 # this removes context lines from the top and bottom of list 'l'. It
610 610 # checks the hunk to make sure only context lines are removed, and then
611 611 # returns a new shortened list of lines.
612 612 fuzz = min(fuzz, len(l)-1)
613 613 if fuzz:
614 614 top = 0
615 615 bot = 0
616 616 hlen = len(self.hunk)
617 617 for x in xrange(hlen-1):
618 618 # the hunk starts with the @@ line, so use x+1
619 619 if self.hunk[x+1][0] == ' ':
620 620 top += 1
621 621 else:
622 622 break
623 623 if not toponly:
624 624 for x in xrange(hlen-1):
625 625 if self.hunk[hlen-bot-1][0] == ' ':
626 626 bot += 1
627 627 else:
628 628 break
629 629
630 630 # top and bot now count context in the hunk
631 631 # adjust them if either one is short
632 632 context = max(top, bot, 3)
633 633 if bot < context:
634 634 bot = max(0, fuzz - (context - bot))
635 635 else:
636 636 bot = min(fuzz, bot)
637 637 if top < context:
638 638 top = max(0, fuzz - (context - top))
639 639 else:
640 640 top = min(fuzz, top)
641 641
642 642 return l[top:len(l)-bot]
643 643 return l
644 644
645 645 def old(self, fuzz=0, toponly=False):
646 646 return self.fuzzit(self.a, fuzz, toponly)
647 647
648 648 def newctrl(self):
649 649 res = []
650 650 for x in self.hunk:
651 651 c = x[0]
652 652 if c == ' ' or c == '+':
653 653 res.append(x)
654 654 return res
655 655
656 656 def new(self, fuzz=0, toponly=False):
657 657 return self.fuzzit(self.b, fuzz, toponly)
658 658
659 659 class githunk(object):
660 660 """A git hunk"""
661 661 def __init__(self, gitpatch):
662 662 self.gitpatch = gitpatch
663 663 self.text = None
664 664 self.hunk = []
665 665
666 666 def createfile(self):
667 667 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
668 668
669 669 def rmfile(self):
670 670 return self.gitpatch.op == 'DELETE'
671 671
672 672 def complete(self):
673 673 return self.text is not None
674 674
675 675 def new(self):
676 676 return [self.text]
677 677
678 678 class binhunk(githunk):
679 679 'A binary patch file. Only understands literals so far.'
680 680 def __init__(self, gitpatch):
681 681 super(binhunk, self).__init__(gitpatch)
682 682 self.hunk = ['GIT binary patch\n']
683 683
684 684 def extract(self, lr):
685 685 line = lr.readline()
686 686 self.hunk.append(line)
687 687 while line and not line.startswith('literal '):
688 688 line = lr.readline()
689 689 self.hunk.append(line)
690 690 if not line:
691 691 raise PatchError(_('could not extract binary patch'))
692 692 size = int(line[8:].rstrip())
693 693 dec = []
694 694 line = lr.readline()
695 695 self.hunk.append(line)
696 696 while len(line) > 1:
697 697 l = line[0]
698 698 if l <= 'Z' and l >= 'A':
699 699 l = ord(l) - ord('A') + 1
700 700 else:
701 701 l = ord(l) - ord('a') + 27
702 702 dec.append(base85.b85decode(line[1:-1])[:l])
703 703 line = lr.readline()
704 704 self.hunk.append(line)
705 705 text = zlib.decompress(''.join(dec))
706 706 if len(text) != size:
707 707 raise PatchError(_('binary patch is %d bytes, not %d') %
708 708 len(text), size)
709 709 self.text = text
710 710
711 711 class symlinkhunk(githunk):
712 712 """A git symlink hunk"""
713 713 def __init__(self, gitpatch, hunk):
714 714 super(symlinkhunk, self).__init__(gitpatch)
715 715 self.hunk = hunk
716 716
717 717 def complete(self):
718 718 return True
719 719
720 720 def fix_newline(self):
721 721 return
722 722
723 723 def parsefilename(str):
724 724 # --- filename \t|space stuff
725 725 s = str[4:].rstrip('\r\n')
726 726 i = s.find('\t')
727 727 if i < 0:
728 728 i = s.find(' ')
729 729 if i < 0:
730 730 return s
731 731 return s[:i]
732 732
733 733 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
734 734 def pathstrip(path, count=1):
735 735 pathlen = len(path)
736 736 i = 0
737 737 if count == 0:
738 738 return '', path.rstrip()
739 739 while count > 0:
740 740 i = path.find('/', i)
741 741 if i == -1:
742 742 raise PatchError(_("unable to strip away %d dirs from %s") %
743 743 (count, path))
744 744 i += 1
745 745 # consume '//' in the path
746 746 while i < pathlen - 1 and path[i] == '/':
747 747 i += 1
748 748 count -= 1
749 749 return path[:i].lstrip(), path[i:].rstrip()
750 750
751 751 nulla = afile_orig == "/dev/null"
752 752 nullb = bfile_orig == "/dev/null"
753 753 abase, afile = pathstrip(afile_orig, strip)
754 754 gooda = not nulla and os.path.exists(afile)
755 755 bbase, bfile = pathstrip(bfile_orig, strip)
756 756 if afile == bfile:
757 757 goodb = gooda
758 758 else:
759 759 goodb = not nullb and os.path.exists(bfile)
760 760 createfunc = hunk.createfile
761 761 if reverse:
762 762 createfunc = hunk.rmfile
763 763 missing = not goodb and not gooda and not createfunc()
764 764 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
765 765 # diff is between a file and its backup. In this case, the original
766 766 # file should be patched (see original mpatch code).
767 767 isbackup = (abase == bbase and bfile.startswith(afile))
768 768 fname = None
769 769 if not missing:
770 770 if gooda and goodb:
771 771 fname = isbackup and afile or bfile
772 772 elif gooda:
773 773 fname = afile
774 774
775 775 if not fname:
776 776 if not nullb:
777 777 fname = isbackup and afile or bfile
778 778 elif not nulla:
779 779 fname = afile
780 780 else:
781 781 raise PatchError(_("undefined source and destination files"))
782 782
783 783 return fname, missing
784 784
785 785 class linereader:
786 786 # simple class to allow pushing lines back into the input stream
787 787 def __init__(self, fp):
788 788 self.fp = fp
789 789 self.buf = []
790 790
791 791 def push(self, line):
792 792 if line is not None:
793 793 self.buf.append(line)
794 794
795 795 def readline(self):
796 796 if self.buf:
797 797 return self.buf.pop(0)
798 798 return self.fp.readline()
799 799
800 800 def __iter__(self):
801 801 while 1:
802 802 l = self.readline()
803 803 if not l:
804 804 break
805 805 yield l
806 806
807 807 def scangitpatch(lr, firstline):
808 808 """
809 809 Git patches can emit:
810 810 - rename a to b
811 811 - change b
812 812 - copy a to c
813 813 - change c
814 814
815 815 We cannot apply this sequence as-is, the renamed 'a' could not be
816 816 found for it would have been renamed already. And we cannot copy
817 817 from 'b' instead because 'b' would have been changed already. So
818 818 we scan the git patch for copy and rename commands so we can
819 819 perform the copies ahead of time.
820 820 """
821 821 pos = 0
822 822 try:
823 823 pos = lr.fp.tell()
824 824 fp = lr.fp
825 825 except IOError:
826 826 fp = cStringIO.StringIO(lr.fp.read())
827 827 gitlr = linereader(fp)
828 828 gitlr.push(firstline)
829 829 (dopatch, gitpatches) = readgitpatch(gitlr)
830 830 fp.seek(pos)
831 831 return dopatch, gitpatches
832 832
833 833 def iterhunks(ui, fp, sourcefile=None):
834 834 """Read a patch and yield the following events:
835 835 - ("file", afile, bfile, firsthunk): select a new target file.
836 836 - ("hunk", hunk): a new hunk is ready to be applied, follows a
837 837 "file" event.
838 838 - ("git", gitchanges): current diff is in git format, gitchanges
839 839 maps filenames to gitpatch records. Unique event.
840 840 """
841 841 changed = {}
842 842 current_hunk = None
843 843 afile = ""
844 844 bfile = ""
845 845 state = None
846 846 hunknum = 0
847 847 emitfile = False
848 848 git = False
849 849
850 850 # our states
851 851 BFILE = 1
852 852 context = None
853 853 lr = linereader(fp)
854 854 dopatch = True
855 855 # gitworkdone is True if a git operation (copy, rename, ...) was
856 856 # performed already for the current file. Useful when the file
857 857 # section may have no hunk.
858 858 gitworkdone = False
859 859
860 860 while True:
861 861 newfile = False
862 862 x = lr.readline()
863 863 if not x:
864 864 break
865 865 if current_hunk:
866 866 if x.startswith('\ '):
867 867 current_hunk.fix_newline()
868 868 yield 'hunk', current_hunk
869 869 current_hunk = None
870 870 gitworkdone = False
871 871 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
872 872 ((context or context == None) and x.startswith('***************')))):
873 873 try:
874 874 if context == None and x.startswith('***************'):
875 875 context = True
876 876 gpatch = changed.get(bfile)
877 877 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
878 878 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
879 879 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
880 880 if remove:
881 881 gpatch = changed.get(afile[2:])
882 882 if gpatch and gpatch.mode[0]:
883 883 current_hunk = symlinkhunk(gpatch, current_hunk)
884 884 except PatchError, err:
885 885 ui.debug(err)
886 886 current_hunk = None
887 887 continue
888 888 hunknum += 1
889 889 if emitfile:
890 890 emitfile = False
891 891 yield 'file', (afile, bfile, current_hunk)
892 892 elif state == BFILE and x.startswith('GIT binary patch'):
893 893 current_hunk = binhunk(changed[bfile])
894 894 hunknum += 1
895 895 if emitfile:
896 896 emitfile = False
897 897 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
898 898 current_hunk.extract(lr)
899 899 elif x.startswith('diff --git'):
900 900 # check for git diff, scanning the whole patch file if needed
901 901 m = gitre.match(x)
902 902 if m:
903 903 afile, bfile = m.group(1, 2)
904 904 if not git:
905 905 git = True
906 906 dopatch, gitpatches = scangitpatch(lr, x)
907 907 yield 'git', gitpatches
908 908 for gp in gitpatches:
909 909 changed[gp.path] = gp
910 910 # else error?
911 911 # copy/rename + modify should modify target, not source
912 912 gp = changed.get(bfile)
913 913 if gp and gp.op in ('COPY', 'DELETE', 'RENAME'):
914 914 afile = bfile
915 915 gitworkdone = True
916 916 newfile = True
917 917 elif x.startswith('---'):
918 918 # check for a unified diff
919 919 l2 = lr.readline()
920 920 if not l2.startswith('+++'):
921 921 lr.push(l2)
922 922 continue
923 923 newfile = True
924 924 context = False
925 925 afile = parsefilename(x)
926 926 bfile = parsefilename(l2)
927 927 elif x.startswith('***'):
928 928 # check for a context diff
929 929 l2 = lr.readline()
930 930 if not l2.startswith('---'):
931 931 lr.push(l2)
932 932 continue
933 933 l3 = lr.readline()
934 934 lr.push(l3)
935 935 if not l3.startswith("***************"):
936 936 lr.push(l2)
937 937 continue
938 938 newfile = True
939 939 context = True
940 940 afile = parsefilename(x)
941 941 bfile = parsefilename(l2)
942 942
943 943 if newfile:
944 944 emitfile = True
945 945 state = BFILE
946 946 hunknum = 0
947 947 if current_hunk:
948 948 if current_hunk.complete():
949 949 yield 'hunk', current_hunk
950 950 else:
951 951 raise PatchError(_("malformed patch %s %s") % (afile,
952 952 current_hunk.desc))
953 953
954 954 if hunknum == 0 and dopatch and not gitworkdone:
955 955 raise NoHunks
956 956
957 957 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
958 958 """reads a patch from fp and tries to apply it. The dict 'changed' is
959 959 filled in with all of the filenames changed by the patch. Returns 0
960 960 for a clean patch, -1 if any rejects were found and 1 if there was
961 961 any fuzz."""
962 962
963 963 rejects = 0
964 964 err = 0
965 965 current_file = None
966 966 gitpatches = None
967 967 opener = util.opener(os.getcwd())
968 968
969 969 def closefile():
970 970 if not current_file:
971 971 return 0
972 972 current_file.close()
973 973 return len(current_file.rej)
974 974
975 975 for state, values in iterhunks(ui, fp, sourcefile):
976 976 if state == 'hunk':
977 977 if not current_file:
978 978 continue
979 979 current_hunk = values
980 980 ret = current_file.apply(current_hunk, reverse)
981 981 if ret >= 0:
982 982 changed.setdefault(current_file.fname, None)
983 983 if ret > 0:
984 984 err = 1
985 985 elif state == 'file':
986 986 rejects += closefile()
987 987 afile, bfile, first_hunk = values
988 988 try:
989 989 if sourcefile:
990 990 current_file = patchfile(ui, sourcefile, opener)
991 991 else:
992 992 current_file, missing = selectfile(afile, bfile, first_hunk,
993 993 strip, reverse)
994 994 current_file = patchfile(ui, current_file, opener, missing)
995 995 except PatchError, err:
996 996 ui.warn(str(err) + '\n')
997 997 current_file, current_hunk = None, None
998 998 rejects += 1
999 999 continue
1000 1000 elif state == 'git':
1001 1001 gitpatches = values
1002 1002 cwd = os.getcwd()
1003 1003 for gp in gitpatches:
1004 1004 if gp.op in ('COPY', 'RENAME'):
1005 1005 copyfile(gp.oldpath, gp.path, cwd)
1006 1006 changed[gp.path] = gp
1007 1007 else:
1008 1008 raise util.Abort(_('unsupported parser state: %s') % state)
1009 1009
1010 1010 rejects += closefile()
1011 1011
1012 1012 if rejects:
1013 1013 return -1
1014 1014 return err
1015 1015
1016 1016 def diffopts(ui, opts={}, untrusted=False):
1017 1017 def get(key, name=None, getter=ui.configbool):
1018 1018 return (opts.get(key) or
1019 1019 getter('diff', name or key, None, untrusted=untrusted))
1020 1020 return mdiff.diffopts(
1021 1021 text=opts.get('text'),
1022 1022 git=get('git'),
1023 1023 nodates=get('nodates'),
1024 1024 showfunc=get('show_function', 'showfunc'),
1025 1025 ignorews=get('ignore_all_space', 'ignorews'),
1026 1026 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1027 1027 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1028 1028 context=get('unified', getter=ui.config))
1029 1029
1030 1030 def updatedir(ui, repo, patches, similarity=0):
1031 1031 '''Update dirstate after patch application according to metadata'''
1032 1032 if not patches:
1033 1033 return
1034 1034 copies = []
1035 1035 removes = {}
1036 1036 cfiles = patches.keys()
1037 1037 cwd = repo.getcwd()
1038 1038 if cwd:
1039 1039 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1040 1040 for f in patches:
1041 1041 gp = patches[f]
1042 1042 if not gp:
1043 1043 continue
1044 1044 if gp.op == 'RENAME':
1045 1045 copies.append((gp.oldpath, gp.path))
1046 1046 removes[gp.oldpath] = 1
1047 1047 elif gp.op == 'COPY':
1048 1048 copies.append((gp.oldpath, gp.path))
1049 1049 elif gp.op == 'DELETE':
1050 1050 removes[gp.path] = 1
1051 1051 for src, dst in copies:
1052 1052 repo.copy(src, dst)
1053 1053 removes = removes.keys()
1054 1054 if (not similarity) and removes:
1055 1055 repo.remove(util.sort(removes), True)
1056 1056 for f in patches:
1057 1057 gp = patches[f]
1058 1058 if gp and gp.mode:
1059 1059 islink, isexec = gp.mode
1060 1060 dst = repo.wjoin(gp.path)
1061 1061 # patch won't create empty files
1062 1062 if gp.op == 'ADD' and not os.path.exists(dst):
1063 1063 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1064 1064 repo.wwrite(gp.path, '', flags)
1065 1065 elif gp.op != 'DELETE':
1066 1066 util.set_flags(dst, islink, isexec)
1067 1067 cmdutil.addremove(repo, cfiles, similarity=similarity)
1068 1068 files = patches.keys()
1069 1069 files.extend([r for r in removes if r not in files])
1070 1070 return util.sort(files)
1071 1071
1072 1072 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1073 1073 """use <patcher> to apply <patchname> to the working directory.
1074 1074 returns whether patch was applied with fuzz factor."""
1075 1075
1076 1076 fuzz = False
1077 1077 if cwd:
1078 1078 args.append('-d %s' % util.shellquote(cwd))
1079 1079 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1080 1080 util.shellquote(patchname)))
1081 1081
1082 1082 for line in fp:
1083 1083 line = line.rstrip()
1084 1084 ui.note(line + '\n')
1085 1085 if line.startswith('patching file '):
1086 1086 pf = util.parse_patch_output(line)
1087 1087 printed_file = False
1088 1088 files.setdefault(pf, None)
1089 1089 elif line.find('with fuzz') >= 0:
1090 1090 fuzz = True
1091 1091 if not printed_file:
1092 1092 ui.warn(pf + '\n')
1093 1093 printed_file = True
1094 1094 ui.warn(line + '\n')
1095 1095 elif line.find('saving rejects to file') >= 0:
1096 1096 ui.warn(line + '\n')
1097 1097 elif line.find('FAILED') >= 0:
1098 1098 if not printed_file:
1099 1099 ui.warn(pf + '\n')
1100 1100 printed_file = True
1101 1101 ui.warn(line + '\n')
1102 1102 code = fp.close()
1103 1103 if code:
1104 1104 raise PatchError(_("patch command failed: %s") %
1105 1105 util.explain_exit(code)[0])
1106 1106 return fuzz
1107 1107
1108 1108 def internalpatch(patchobj, ui, strip, cwd, files={}):
1109 1109 """use builtin patch to apply <patchobj> to the working directory.
1110 1110 returns whether patch was applied with fuzz factor."""
1111 1111 try:
1112 1112 fp = file(patchobj, 'rb')
1113 1113 except TypeError:
1114 1114 fp = patchobj
1115 1115 if cwd:
1116 1116 curdir = os.getcwd()
1117 1117 os.chdir(cwd)
1118 1118 try:
1119 1119 ret = applydiff(ui, fp, files, strip=strip)
1120 1120 finally:
1121 1121 if cwd:
1122 1122 os.chdir(curdir)
1123 1123 if ret < 0:
1124 1124 raise PatchError
1125 1125 return ret > 0
1126 1126
1127 1127 def patch(patchname, ui, strip=1, cwd=None, files={}):
1128 1128 """apply <patchname> to the working directory.
1129 1129 returns whether patch was applied with fuzz factor."""
1130 1130 patcher = ui.config('ui', 'patch')
1131 1131 args = []
1132 1132 try:
1133 1133 if patcher:
1134 1134 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1135 1135 files)
1136 1136 else:
1137 1137 try:
1138 1138 return internalpatch(patchname, ui, strip, cwd, files)
1139 1139 except NoHunks:
1140 patcher = util.find_exe('gpatch') or util.find_exe('patch')
1140 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1141 1141 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1142 1142 patcher)
1143 1143 if util.needbinarypatch():
1144 1144 args.append('--binary')
1145 1145 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1146 1146 files)
1147 1147 except PatchError, err:
1148 1148 s = str(err)
1149 1149 if s:
1150 1150 raise util.Abort(s)
1151 1151 else:
1152 1152 raise util.Abort(_('patch failed to apply'))
1153 1153
1154 1154 def b85diff(to, tn):
1155 1155 '''print base85-encoded binary diff'''
1156 1156 def gitindex(text):
1157 1157 if not text:
1158 1158 return '0' * 40
1159 1159 l = len(text)
1160 1160 s = util.sha1('blob %d\0' % l)
1161 1161 s.update(text)
1162 1162 return s.hexdigest()
1163 1163
1164 1164 def fmtline(line):
1165 1165 l = len(line)
1166 1166 if l <= 26:
1167 1167 l = chr(ord('A') + l - 1)
1168 1168 else:
1169 1169 l = chr(l - 26 + ord('a') - 1)
1170 1170 return '%c%s\n' % (l, base85.b85encode(line, True))
1171 1171
1172 1172 def chunk(text, csize=52):
1173 1173 l = len(text)
1174 1174 i = 0
1175 1175 while i < l:
1176 1176 yield text[i:i+csize]
1177 1177 i += csize
1178 1178
1179 1179 tohash = gitindex(to)
1180 1180 tnhash = gitindex(tn)
1181 1181 if tohash == tnhash:
1182 1182 return ""
1183 1183
1184 1184 # TODO: deltas
1185 1185 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1186 1186 (tohash, tnhash, len(tn))]
1187 1187 for l in chunk(zlib.compress(tn)):
1188 1188 ret.append(fmtline(l))
1189 1189 ret.append('\n')
1190 1190 return ''.join(ret)
1191 1191
1192 1192 def _addmodehdr(header, omode, nmode):
1193 1193 if omode != nmode:
1194 1194 header.append('old mode %s\n' % omode)
1195 1195 header.append('new mode %s\n' % nmode)
1196 1196
1197 1197 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1198 1198 '''yields diff of changes to files between two nodes, or node and
1199 1199 working directory.
1200 1200
1201 1201 if node1 is None, use first dirstate parent instead.
1202 1202 if node2 is None, compare node1 with working directory.'''
1203 1203
1204 1204 if not match:
1205 1205 match = cmdutil.matchall(repo)
1206 1206
1207 1207 if opts is None:
1208 1208 opts = mdiff.defaultopts
1209 1209
1210 1210 if not node1:
1211 1211 node1 = repo.dirstate.parents()[0]
1212 1212
1213 1213 flcache = {}
1214 1214 def getfilectx(f, ctx):
1215 1215 flctx = ctx.filectx(f, filelog=flcache.get(f))
1216 1216 if f not in flcache:
1217 1217 flcache[f] = flctx._filelog
1218 1218 return flctx
1219 1219
1220 1220 ctx1 = repo[node1]
1221 1221 ctx2 = repo[node2]
1222 1222
1223 1223 if not changes:
1224 1224 changes = repo.status(ctx1, ctx2, match=match)
1225 1225 modified, added, removed = changes[:3]
1226 1226
1227 1227 if not modified and not added and not removed:
1228 1228 return
1229 1229
1230 1230 date1 = util.datestr(ctx1.date())
1231 1231 man1 = ctx1.manifest()
1232 1232
1233 1233 if repo.ui.quiet:
1234 1234 r = None
1235 1235 else:
1236 1236 hexfunc = repo.ui.debugflag and hex or short
1237 1237 r = [hexfunc(node) for node in [node1, node2] if node]
1238 1238
1239 1239 if opts.git:
1240 1240 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1241 1241 for k, v in copy.items():
1242 1242 copy[v] = k
1243 1243
1244 1244 gone = {}
1245 1245 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1246 1246
1247 1247 for f in util.sort(modified + added + removed):
1248 1248 to = None
1249 1249 tn = None
1250 1250 dodiff = True
1251 1251 header = []
1252 1252 if f in man1:
1253 1253 to = getfilectx(f, ctx1).data()
1254 1254 if f not in removed:
1255 1255 tn = getfilectx(f, ctx2).data()
1256 1256 a, b = f, f
1257 1257 if opts.git:
1258 1258 if f in added:
1259 1259 mode = gitmode[ctx2.flags(f)]
1260 1260 if f in copy:
1261 1261 a = copy[f]
1262 1262 omode = gitmode[man1.flags(a)]
1263 1263 _addmodehdr(header, omode, mode)
1264 1264 if a in removed and a not in gone:
1265 1265 op = 'rename'
1266 1266 gone[a] = 1
1267 1267 else:
1268 1268 op = 'copy'
1269 1269 header.append('%s from %s\n' % (op, a))
1270 1270 header.append('%s to %s\n' % (op, f))
1271 1271 to = getfilectx(a, ctx1).data()
1272 1272 else:
1273 1273 header.append('new file mode %s\n' % mode)
1274 1274 if util.binary(tn):
1275 1275 dodiff = 'binary'
1276 1276 elif f in removed:
1277 1277 # have we already reported a copy above?
1278 1278 if f in copy and copy[f] in added and copy[copy[f]] == f:
1279 1279 dodiff = False
1280 1280 else:
1281 1281 header.append('deleted file mode %s\n' %
1282 1282 gitmode[man1.flags(f)])
1283 1283 else:
1284 1284 omode = gitmode[man1.flags(f)]
1285 1285 nmode = gitmode[ctx2.flags(f)]
1286 1286 _addmodehdr(header, omode, nmode)
1287 1287 if util.binary(to) or util.binary(tn):
1288 1288 dodiff = 'binary'
1289 1289 r = None
1290 1290 header.insert(0, mdiff.diffline(r, a, b, opts))
1291 1291 if dodiff:
1292 1292 if dodiff == 'binary':
1293 1293 text = b85diff(to, tn)
1294 1294 else:
1295 1295 text = mdiff.unidiff(to, date1,
1296 1296 # ctx2 date may be dynamic
1297 1297 tn, util.datestr(ctx2.date()),
1298 1298 a, b, r, opts=opts)
1299 1299 if header and (text or len(header) > 1):
1300 1300 yield ''.join(header)
1301 1301 if text:
1302 1302 yield text
1303 1303
1304 1304 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1305 1305 opts=None):
1306 1306 '''export changesets as hg patches.'''
1307 1307
1308 1308 total = len(revs)
1309 1309 revwidth = max([len(str(rev)) for rev in revs])
1310 1310
1311 1311 def single(rev, seqno, fp):
1312 1312 ctx = repo[rev]
1313 1313 node = ctx.node()
1314 1314 parents = [p.node() for p in ctx.parents() if p]
1315 1315 branch = ctx.branch()
1316 1316 if switch_parent:
1317 1317 parents.reverse()
1318 1318 prev = (parents and parents[0]) or nullid
1319 1319
1320 1320 if not fp:
1321 1321 fp = cmdutil.make_file(repo, template, node, total=total,
1322 1322 seqno=seqno, revwidth=revwidth,
1323 1323 mode='ab')
1324 1324 if fp != sys.stdout and hasattr(fp, 'name'):
1325 1325 repo.ui.note("%s\n" % fp.name)
1326 1326
1327 1327 fp.write("# HG changeset patch\n")
1328 1328 fp.write("# User %s\n" % ctx.user())
1329 1329 fp.write("# Date %d %d\n" % ctx.date())
1330 1330 if branch and (branch != 'default'):
1331 1331 fp.write("# Branch %s\n" % branch)
1332 1332 fp.write("# Node ID %s\n" % hex(node))
1333 1333 fp.write("# Parent %s\n" % hex(prev))
1334 1334 if len(parents) > 1:
1335 1335 fp.write("# Parent %s\n" % hex(parents[1]))
1336 1336 fp.write(ctx.description().rstrip())
1337 1337 fp.write("\n\n")
1338 1338
1339 1339 for chunk in diff(repo, prev, node, opts=opts):
1340 1340 fp.write(chunk)
1341 1341
1342 1342 for seqno, rev in enumerate(revs):
1343 1343 single(rev, seqno+1, fp)
1344 1344
1345 1345 def diffstatdata(lines):
1346 1346 filename, adds, removes = None, 0, 0
1347 1347 for line in lines:
1348 1348 if line.startswith('diff'):
1349 1349 if filename:
1350 1350 yield (filename, adds, removes)
1351 1351 # set numbers to 0 anyway when starting new file
1352 1352 adds, removes = 0, 0
1353 1353 if line.startswith('diff --git'):
1354 1354 filename = gitre.search(line).group(1)
1355 1355 else:
1356 1356 # format: "diff -r ... -r ... file name"
1357 1357 filename = line.split(None, 5)[-1]
1358 1358 elif line.startswith('+') and not line.startswith('+++'):
1359 1359 adds += 1
1360 1360 elif line.startswith('-') and not line.startswith('---'):
1361 1361 removes += 1
1362 1362 if filename:
1363 1363 yield (filename, adds, removes)
1364 1364
1365 1365 def diffstat(lines):
1366 1366 output = []
1367 1367 stats = list(diffstatdata(lines))
1368 1368 width = util.termwidth() - 2
1369 1369
1370 1370 maxtotal, maxname = 0, 0
1371 1371 totaladds, totalremoves = 0, 0
1372 1372 for filename, adds, removes in stats:
1373 1373 totaladds += adds
1374 1374 totalremoves += removes
1375 1375 maxname = max(maxname, len(filename))
1376 1376 maxtotal = max(maxtotal, adds+removes)
1377 1377
1378 1378 countwidth = len(str(maxtotal))
1379 1379 graphwidth = width - countwidth - maxname
1380 1380 if graphwidth < 10:
1381 1381 graphwidth = 10
1382 1382
1383 1383 factor = int(math.ceil(float(maxtotal) / graphwidth))
1384 1384
1385 1385 for filename, adds, removes in stats:
1386 1386 # If diffstat runs out of room it doesn't print anything, which
1387 1387 # isn't very useful, so always print at least one + or - if there
1388 1388 # were at least some changes
1389 1389 pluses = '+' * max(adds/factor, int(bool(adds)))
1390 1390 minuses = '-' * max(removes/factor, int(bool(removes)))
1391 1391 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1392 1392 adds+removes, pluses, minuses))
1393 1393
1394 1394 if stats:
1395 1395 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n' %
1396 1396 (len(stats), totaladds, totalremoves))
1397 1397
1398 1398 return ''.join(output)
@@ -1,2017 +1,2020 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, re, shutil, sys, tempfile, traceback, error
17 17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 18 import imp, unicodedata
19 19
20 20 # Python compatibility
21 21
22 22 try:
23 23 set = set
24 24 frozenset = frozenset
25 25 except NameError:
26 26 from sets import Set as set, ImmutableSet as frozenset
27 27
28 28 _md5 = None
29 29 def md5(s):
30 30 global _md5
31 31 if _md5 is None:
32 32 try:
33 33 import hashlib
34 34 _md5 = hashlib.md5
35 35 except ImportError:
36 36 import md5
37 37 _md5 = md5.md5
38 38 return _md5(s)
39 39
40 40 _sha1 = None
41 41 def sha1(s):
42 42 global _sha1
43 43 if _sha1 is None:
44 44 try:
45 45 import hashlib
46 46 _sha1 = hashlib.sha1
47 47 except ImportError:
48 48 import sha
49 49 _sha1 = sha.sha
50 50 return _sha1(s)
51 51
52 52 try:
53 53 import subprocess
54 54 subprocess.Popen # trigger ImportError early
55 55 closefds = os.name == 'posix'
56 56 def popen2(cmd, mode='t', bufsize=-1):
57 57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 58 close_fds=closefds,
59 59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 60 return p.stdin, p.stdout
61 61 def popen3(cmd, mode='t', bufsize=-1):
62 62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 63 close_fds=closefds,
64 64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 65 stderr=subprocess.PIPE)
66 66 return p.stdin, p.stdout, p.stderr
67 67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 68 stderr = capturestderr and subprocess.PIPE or None
69 69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 70 close_fds=closefds,
71 71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 72 stderr=stderr)
73 73 p.fromchild = p.stdout
74 74 p.tochild = p.stdin
75 75 p.childerr = p.stderr
76 76 return p
77 77 except ImportError:
78 78 subprocess = None
79 79 from popen2 import Popen3
80 80 popen2 = os.popen2
81 81 popen3 = os.popen3
82 82
83 83
84 84 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
85 85
86 86 try:
87 87 _encoding = os.environ.get("HGENCODING")
88 88 if sys.platform == 'darwin' and not _encoding:
89 89 # On darwin, getpreferredencoding ignores the locale environment and
90 90 # always returns mac-roman. We override this if the environment is
91 91 # not C (has been customized by the user).
92 92 locale.setlocale(locale.LC_CTYPE, '')
93 93 _encoding = locale.getlocale()[1]
94 94 if not _encoding:
95 95 _encoding = locale.getpreferredencoding() or 'ascii'
96 96 _encoding = _encodingfixup.get(_encoding, _encoding)
97 97 except locale.Error:
98 98 _encoding = 'ascii'
99 99 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
100 100 _fallbackencoding = 'ISO-8859-1'
101 101
102 102 def tolocal(s):
103 103 """
104 104 Convert a string from internal UTF-8 to local encoding
105 105
106 106 All internal strings should be UTF-8 but some repos before the
107 107 implementation of locale support may contain latin1 or possibly
108 108 other character sets. We attempt to decode everything strictly
109 109 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
110 110 replace unknown characters.
111 111 """
112 112 for e in ('UTF-8', _fallbackencoding):
113 113 try:
114 114 u = s.decode(e) # attempt strict decoding
115 115 return u.encode(_encoding, "replace")
116 116 except LookupError, k:
117 117 raise Abort(_("%s, please check your locale settings") % k)
118 118 except UnicodeDecodeError:
119 119 pass
120 120 u = s.decode("utf-8", "replace") # last ditch
121 121 return u.encode(_encoding, "replace")
122 122
123 123 def fromlocal(s):
124 124 """
125 125 Convert a string from the local character encoding to UTF-8
126 126
127 127 We attempt to decode strings using the encoding mode set by
128 128 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
129 129 characters will cause an error message. Other modes include
130 130 'replace', which replaces unknown characters with a special
131 131 Unicode character, and 'ignore', which drops the character.
132 132 """
133 133 try:
134 134 return s.decode(_encoding, _encodingmode).encode("utf-8")
135 135 except UnicodeDecodeError, inst:
136 136 sub = s[max(0, inst.start-10):inst.start+10]
137 137 raise Abort("decoding near '%s': %s!" % (sub, inst))
138 138 except LookupError, k:
139 139 raise Abort(_("%s, please check your locale settings") % k)
140 140
141 141 def colwidth(s):
142 142 "Find the column width of a UTF-8 string for display"
143 143 d = s.decode(_encoding, 'replace')
144 144 if hasattr(unicodedata, 'east_asian_width'):
145 145 w = unicodedata.east_asian_width
146 146 return sum([w(c) in 'WF' and 2 or 1 for c in d])
147 147 return len(d)
148 148
149 149 def version():
150 150 """Return version information if available."""
151 151 try:
152 152 import __version__
153 153 return __version__.version
154 154 except ImportError:
155 155 return 'unknown'
156 156
157 157 # used by parsedate
158 158 defaultdateformats = (
159 159 '%Y-%m-%d %H:%M:%S',
160 160 '%Y-%m-%d %I:%M:%S%p',
161 161 '%Y-%m-%d %H:%M',
162 162 '%Y-%m-%d %I:%M%p',
163 163 '%Y-%m-%d',
164 164 '%m-%d',
165 165 '%m/%d',
166 166 '%m/%d/%y',
167 167 '%m/%d/%Y',
168 168 '%a %b %d %H:%M:%S %Y',
169 169 '%a %b %d %I:%M:%S%p %Y',
170 170 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
171 171 '%b %d %H:%M:%S %Y',
172 172 '%b %d %I:%M:%S%p %Y',
173 173 '%b %d %H:%M:%S',
174 174 '%b %d %I:%M:%S%p',
175 175 '%b %d %H:%M',
176 176 '%b %d %I:%M%p',
177 177 '%b %d %Y',
178 178 '%b %d',
179 179 '%H:%M:%S',
180 180 '%I:%M:%SP',
181 181 '%H:%M',
182 182 '%I:%M%p',
183 183 )
184 184
185 185 extendeddateformats = defaultdateformats + (
186 186 "%Y",
187 187 "%Y-%m",
188 188 "%b",
189 189 "%b %Y",
190 190 )
191 191
192 192 # differences from SafeConfigParser:
193 193 # - case-sensitive keys
194 194 # - allows values that are not strings (this means that you may not
195 195 # be able to save the configuration to a file)
196 196 class configparser(ConfigParser.SafeConfigParser):
197 197 def optionxform(self, optionstr):
198 198 return optionstr
199 199
200 200 def set(self, section, option, value):
201 201 return ConfigParser.ConfigParser.set(self, section, option, value)
202 202
203 203 def _interpolate(self, section, option, rawval, vars):
204 204 if not isinstance(rawval, basestring):
205 205 return rawval
206 206 return ConfigParser.SafeConfigParser._interpolate(self, section,
207 207 option, rawval, vars)
208 208
209 209 def cachefunc(func):
210 210 '''cache the result of function calls'''
211 211 # XXX doesn't handle keywords args
212 212 cache = {}
213 213 if func.func_code.co_argcount == 1:
214 214 # we gain a small amount of time because
215 215 # we don't need to pack/unpack the list
216 216 def f(arg):
217 217 if arg not in cache:
218 218 cache[arg] = func(arg)
219 219 return cache[arg]
220 220 else:
221 221 def f(*args):
222 222 if args not in cache:
223 223 cache[args] = func(*args)
224 224 return cache[args]
225 225
226 226 return f
227 227
228 228 def pipefilter(s, cmd):
229 229 '''filter string S through command CMD, returning its output'''
230 230 (pin, pout) = popen2(cmd, 'b')
231 231 def writer():
232 232 try:
233 233 pin.write(s)
234 234 pin.close()
235 235 except IOError, inst:
236 236 if inst.errno != errno.EPIPE:
237 237 raise
238 238
239 239 # we should use select instead on UNIX, but this will work on most
240 240 # systems, including Windows
241 241 w = threading.Thread(target=writer)
242 242 w.start()
243 243 f = pout.read()
244 244 pout.close()
245 245 w.join()
246 246 return f
247 247
248 248 def tempfilter(s, cmd):
249 249 '''filter string S through a pair of temporary files with CMD.
250 250 CMD is used as a template to create the real command to be run,
251 251 with the strings INFILE and OUTFILE replaced by the real names of
252 252 the temporary files generated.'''
253 253 inname, outname = None, None
254 254 try:
255 255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 256 fp = os.fdopen(infd, 'wb')
257 257 fp.write(s)
258 258 fp.close()
259 259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 260 os.close(outfd)
261 261 cmd = cmd.replace('INFILE', inname)
262 262 cmd = cmd.replace('OUTFILE', outname)
263 263 code = os.system(cmd)
264 264 if sys.platform == 'OpenVMS' and code & 1:
265 265 code = 0
266 266 if code: raise Abort(_("command '%s' failed: %s") %
267 267 (cmd, explain_exit(code)))
268 268 return open(outname, 'rb').read()
269 269 finally:
270 270 try:
271 271 if inname: os.unlink(inname)
272 272 except: pass
273 273 try:
274 274 if outname: os.unlink(outname)
275 275 except: pass
276 276
277 277 filtertable = {
278 278 'tempfile:': tempfilter,
279 279 'pipe:': pipefilter,
280 280 }
281 281
282 282 def filter(s, cmd):
283 283 "filter a string through a command that transforms its input to its output"
284 284 for name, fn in filtertable.iteritems():
285 285 if cmd.startswith(name):
286 286 return fn(s, cmd[len(name):].lstrip())
287 287 return pipefilter(s, cmd)
288 288
289 289 def binary(s):
290 290 """return true if a string is binary data"""
291 291 if s and '\0' in s:
292 292 return True
293 293 return False
294 294
295 295 def unique(g):
296 296 """return the uniq elements of iterable g"""
297 297 return dict.fromkeys(g).keys()
298 298
299 299 def sort(l):
300 300 if not isinstance(l, list):
301 301 l = list(l)
302 302 l.sort()
303 303 return l
304 304
305 305 def increasingchunks(source, min=1024, max=65536):
306 306 '''return no less than min bytes per chunk while data remains,
307 307 doubling min after each chunk until it reaches max'''
308 308 def log2(x):
309 309 if not x:
310 310 return 0
311 311 i = 0
312 312 while x:
313 313 x >>= 1
314 314 i += 1
315 315 return i - 1
316 316
317 317 buf = []
318 318 blen = 0
319 319 for chunk in source:
320 320 buf.append(chunk)
321 321 blen += len(chunk)
322 322 if blen >= min:
323 323 if min < max:
324 324 min = min << 1
325 325 nmin = 1 << log2(blen)
326 326 if nmin > min:
327 327 min = nmin
328 328 if min > max:
329 329 min = max
330 330 yield ''.join(buf)
331 331 blen = 0
332 332 buf = []
333 333 if buf:
334 334 yield ''.join(buf)
335 335
336 336 class Abort(Exception):
337 337 """Raised if a command needs to print an error and exit."""
338 338
339 339 def always(fn): return True
340 340 def never(fn): return False
341 341
342 342 def expand_glob(pats):
343 343 '''On Windows, expand the implicit globs in a list of patterns'''
344 344 if os.name != 'nt':
345 345 return list(pats)
346 346 ret = []
347 347 for p in pats:
348 348 kind, name = patkind(p, None)
349 349 if kind is None:
350 350 globbed = glob.glob(name)
351 351 if globbed:
352 352 ret.extend(globbed)
353 353 continue
354 354 # if we couldn't expand the glob, just keep it around
355 355 ret.append(p)
356 356 return ret
357 357
358 358 def patkind(name, default):
359 359 """Split a string into an optional pattern kind prefix and the
360 360 actual pattern."""
361 361 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
362 362 if name.startswith(prefix + ':'): return name.split(':', 1)
363 363 return default, name
364 364
365 365 def globre(pat, head='^', tail='$'):
366 366 "convert a glob pattern into a regexp"
367 367 i, n = 0, len(pat)
368 368 res = ''
369 369 group = 0
370 370 def peek(): return i < n and pat[i]
371 371 while i < n:
372 372 c = pat[i]
373 373 i = i+1
374 374 if c == '*':
375 375 if peek() == '*':
376 376 i += 1
377 377 res += '.*'
378 378 else:
379 379 res += '[^/]*'
380 380 elif c == '?':
381 381 res += '.'
382 382 elif c == '[':
383 383 j = i
384 384 if j < n and pat[j] in '!]':
385 385 j += 1
386 386 while j < n and pat[j] != ']':
387 387 j += 1
388 388 if j >= n:
389 389 res += '\\['
390 390 else:
391 391 stuff = pat[i:j].replace('\\','\\\\')
392 392 i = j + 1
393 393 if stuff[0] == '!':
394 394 stuff = '^' + stuff[1:]
395 395 elif stuff[0] == '^':
396 396 stuff = '\\' + stuff
397 397 res = '%s[%s]' % (res, stuff)
398 398 elif c == '{':
399 399 group += 1
400 400 res += '(?:'
401 401 elif c == '}' and group:
402 402 res += ')'
403 403 group -= 1
404 404 elif c == ',' and group:
405 405 res += '|'
406 406 elif c == '\\':
407 407 p = peek()
408 408 if p:
409 409 i += 1
410 410 res += re.escape(p)
411 411 else:
412 412 res += re.escape(c)
413 413 else:
414 414 res += re.escape(c)
415 415 return head + res + tail
416 416
417 417 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
418 418
419 419 def pathto(root, n1, n2):
420 420 '''return the relative path from one place to another.
421 421 root should use os.sep to separate directories
422 422 n1 should use os.sep to separate directories
423 423 n2 should use "/" to separate directories
424 424 returns an os.sep-separated path.
425 425
426 426 If n1 is a relative path, it's assumed it's
427 427 relative to root.
428 428 n2 should always be relative to root.
429 429 '''
430 430 if not n1: return localpath(n2)
431 431 if os.path.isabs(n1):
432 432 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
433 433 return os.path.join(root, localpath(n2))
434 434 n2 = '/'.join((pconvert(root), n2))
435 435 a, b = splitpath(n1), n2.split('/')
436 436 a.reverse()
437 437 b.reverse()
438 438 while a and b and a[-1] == b[-1]:
439 439 a.pop()
440 440 b.pop()
441 441 b.reverse()
442 442 return os.sep.join((['..'] * len(a)) + b) or '.'
443 443
444 444 def canonpath(root, cwd, myname):
445 445 """return the canonical path of myname, given cwd and root"""
446 446 if root == os.sep:
447 447 rootsep = os.sep
448 448 elif endswithsep(root):
449 449 rootsep = root
450 450 else:
451 451 rootsep = root + os.sep
452 452 name = myname
453 453 if not os.path.isabs(name):
454 454 name = os.path.join(root, cwd, name)
455 455 name = os.path.normpath(name)
456 456 audit_path = path_auditor(root)
457 457 if name != rootsep and name.startswith(rootsep):
458 458 name = name[len(rootsep):]
459 459 audit_path(name)
460 460 return pconvert(name)
461 461 elif name == root:
462 462 return ''
463 463 else:
464 464 # Determine whether `name' is in the hierarchy at or beneath `root',
465 465 # by iterating name=dirname(name) until that causes no change (can't
466 466 # check name == '/', because that doesn't work on windows). For each
467 467 # `name', compare dev/inode numbers. If they match, the list `rel'
468 468 # holds the reversed list of components making up the relative file
469 469 # name we want.
470 470 root_st = os.stat(root)
471 471 rel = []
472 472 while True:
473 473 try:
474 474 name_st = os.stat(name)
475 475 except OSError:
476 476 break
477 477 if samestat(name_st, root_st):
478 478 if not rel:
479 479 # name was actually the same as root (maybe a symlink)
480 480 return ''
481 481 rel.reverse()
482 482 name = os.path.join(*rel)
483 483 audit_path(name)
484 484 return pconvert(name)
485 485 dirname, basename = os.path.split(name)
486 486 rel.append(basename)
487 487 if dirname == name:
488 488 break
489 489 name = dirname
490 490
491 491 raise Abort('%s not under root' % myname)
492 492
493 493 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
494 494 """build a function to match a set of file patterns
495 495
496 496 arguments:
497 497 canonroot - the canonical root of the tree you're matching against
498 498 cwd - the current working directory, if relevant
499 499 names - patterns to find
500 500 inc - patterns to include
501 501 exc - patterns to exclude
502 502 dflt_pat - if a pattern in names has no explicit type, assume this one
503 503 src - where these patterns came from (e.g. .hgignore)
504 504
505 505 a pattern is one of:
506 506 'glob:<glob>' - a glob relative to cwd
507 507 're:<regexp>' - a regular expression
508 508 'path:<path>' - a path relative to canonroot
509 509 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
510 510 'relpath:<path>' - a path relative to cwd
511 511 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
512 512 '<something>' - one of the cases above, selected by the dflt_pat argument
513 513
514 514 returns:
515 515 a 3-tuple containing
516 516 - list of roots (places where one should start a recursive walk of the fs);
517 517 this often matches the explicit non-pattern names passed in, but also
518 518 includes the initial part of glob: patterns that has no glob characters
519 519 - a bool match(filename) function
520 520 - a bool indicating if any patterns were passed in
521 521 """
522 522
523 523 # a common case: no patterns at all
524 524 if not names and not inc and not exc:
525 525 return [], always, False
526 526
527 527 def contains_glob(name):
528 528 for c in name:
529 529 if c in _globchars: return True
530 530 return False
531 531
532 532 def regex(kind, name, tail):
533 533 '''convert a pattern into a regular expression'''
534 534 if not name:
535 535 return ''
536 536 if kind == 're':
537 537 return name
538 538 elif kind == 'path':
539 539 return '^' + re.escape(name) + '(?:/|$)'
540 540 elif kind == 'relglob':
541 541 return globre(name, '(?:|.*/)', tail)
542 542 elif kind == 'relpath':
543 543 return re.escape(name) + '(?:/|$)'
544 544 elif kind == 'relre':
545 545 if name.startswith('^'):
546 546 return name
547 547 return '.*' + name
548 548 return globre(name, '', tail)
549 549
550 550 def matchfn(pats, tail):
551 551 """build a matching function from a set of patterns"""
552 552 if not pats:
553 553 return
554 554 try:
555 555 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
556 556 if len(pat) > 20000:
557 557 raise OverflowError()
558 558 return re.compile(pat).match
559 559 except OverflowError:
560 560 # We're using a Python with a tiny regex engine and we
561 561 # made it explode, so we'll divide the pattern list in two
562 562 # until it works
563 563 l = len(pats)
564 564 if l < 2:
565 565 raise
566 566 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
567 567 return lambda s: a(s) or b(s)
568 568 except re.error:
569 569 for k, p in pats:
570 570 try:
571 571 re.compile('(?:%s)' % regex(k, p, tail))
572 572 except re.error:
573 573 if src:
574 574 raise Abort("%s: invalid pattern (%s): %s" %
575 575 (src, k, p))
576 576 else:
577 577 raise Abort("invalid pattern (%s): %s" % (k, p))
578 578 raise Abort("invalid pattern")
579 579
580 580 def globprefix(pat):
581 581 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
582 582 root = []
583 583 for p in pat.split('/'):
584 584 if contains_glob(p): break
585 585 root.append(p)
586 586 return '/'.join(root) or '.'
587 587
588 588 def normalizepats(names, default):
589 589 pats = []
590 590 roots = []
591 591 anypats = False
592 592 for kind, name in [patkind(p, default) for p in names]:
593 593 if kind in ('glob', 'relpath'):
594 594 name = canonpath(canonroot, cwd, name)
595 595 elif kind in ('relglob', 'path'):
596 596 name = normpath(name)
597 597
598 598 pats.append((kind, name))
599 599
600 600 if kind in ('glob', 're', 'relglob', 'relre'):
601 601 anypats = True
602 602
603 603 if kind == 'glob':
604 604 root = globprefix(name)
605 605 roots.append(root)
606 606 elif kind in ('relpath', 'path'):
607 607 roots.append(name or '.')
608 608 elif kind == 'relglob':
609 609 roots.append('.')
610 610 return roots, pats, anypats
611 611
612 612 roots, pats, anypats = normalizepats(names, dflt_pat)
613 613
614 614 patmatch = matchfn(pats, '$') or always
615 615 incmatch = always
616 616 if inc:
617 617 dummy, inckinds, dummy = normalizepats(inc, 'glob')
618 618 incmatch = matchfn(inckinds, '(?:/|$)')
619 619 excmatch = never
620 620 if exc:
621 621 dummy, exckinds, dummy = normalizepats(exc, 'glob')
622 622 excmatch = matchfn(exckinds, '(?:/|$)')
623 623
624 624 if not names and inc and not exc:
625 625 # common case: hgignore patterns
626 626 match = incmatch
627 627 else:
628 628 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
629 629
630 630 return (roots, match, (inc or exc or anypats) and True)
631 631
632 632 _hgexecutable = None
633 633
634 634 def main_is_frozen():
635 635 """return True if we are a frozen executable.
636 636
637 637 The code supports py2exe (most common, Windows only) and tools/freeze
638 638 (portable, not much used).
639 639 """
640 640 return (hasattr(sys, "frozen") or # new py2exe
641 641 hasattr(sys, "importers") or # old py2exe
642 642 imp.is_frozen("__main__")) # tools/freeze
643 643
644 644 def hgexecutable():
645 645 """return location of the 'hg' executable.
646 646
647 647 Defaults to $HG or 'hg' in the search path.
648 648 """
649 649 if _hgexecutable is None:
650 650 hg = os.environ.get('HG')
651 651 if hg:
652 652 set_hgexecutable(hg)
653 653 elif main_is_frozen():
654 654 set_hgexecutable(sys.executable)
655 655 else:
656 set_hgexecutable(find_exe('hg', 'hg'))
656 set_hgexecutable(find_exe('hg') or 'hg')
657 657 return _hgexecutable
658 658
659 659 def set_hgexecutable(path):
660 660 """set location of the 'hg' executable"""
661 661 global _hgexecutable
662 662 _hgexecutable = path
663 663
664 664 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
665 665 '''enhanced shell command execution.
666 666 run with environment maybe modified, maybe in different dir.
667 667
668 668 if command fails and onerr is None, return status. if ui object,
669 669 print error message and return status, else raise onerr object as
670 670 exception.'''
671 671 def py2shell(val):
672 672 'convert python object into string that is useful to shell'
673 673 if val in (None, False):
674 674 return '0'
675 675 if val == True:
676 676 return '1'
677 677 return str(val)
678 678 oldenv = {}
679 679 for k in environ:
680 680 oldenv[k] = os.environ.get(k)
681 681 if cwd is not None:
682 682 oldcwd = os.getcwd()
683 683 origcmd = cmd
684 684 if os.name == 'nt':
685 685 cmd = '"%s"' % cmd
686 686 try:
687 687 for k, v in environ.iteritems():
688 688 os.environ[k] = py2shell(v)
689 689 os.environ['HG'] = hgexecutable()
690 690 if cwd is not None and oldcwd != cwd:
691 691 os.chdir(cwd)
692 692 rc = os.system(cmd)
693 693 if sys.platform == 'OpenVMS' and rc & 1:
694 694 rc = 0
695 695 if rc and onerr:
696 696 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
697 697 explain_exit(rc)[0])
698 698 if errprefix:
699 699 errmsg = '%s: %s' % (errprefix, errmsg)
700 700 try:
701 701 onerr.warn(errmsg + '\n')
702 702 except AttributeError:
703 703 raise onerr(errmsg)
704 704 return rc
705 705 finally:
706 706 for k, v in oldenv.iteritems():
707 707 if v is None:
708 708 del os.environ[k]
709 709 else:
710 710 os.environ[k] = v
711 711 if cwd is not None and oldcwd != cwd:
712 712 os.chdir(oldcwd)
713 713
714 714 def checksignature(func):
715 715 '''wrap a function with code to check for calling errors'''
716 716 def check(*args, **kwargs):
717 717 try:
718 718 return func(*args, **kwargs)
719 719 except TypeError:
720 720 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
721 721 raise error.SignatureError
722 722 raise
723 723
724 724 return check
725 725
726 726 # os.path.lexists is not available on python2.3
727 727 def lexists(filename):
728 728 "test whether a file with this name exists. does not follow symlinks"
729 729 try:
730 730 os.lstat(filename)
731 731 except:
732 732 return False
733 733 return True
734 734
735 735 def rename(src, dst):
736 736 """forcibly rename a file"""
737 737 try:
738 738 os.rename(src, dst)
739 739 except OSError, err: # FIXME: check err (EEXIST ?)
740 740 # on windows, rename to existing file is not allowed, so we
741 741 # must delete destination first. but if file is open, unlink
742 742 # schedules it for delete but does not delete it. rename
743 743 # happens immediately even for open files, so we create
744 744 # temporary file, delete it, rename destination to that name,
745 745 # then delete that. then rename is safe to do.
746 746 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
747 747 os.close(fd)
748 748 os.unlink(temp)
749 749 os.rename(dst, temp)
750 750 os.unlink(temp)
751 751 os.rename(src, dst)
752 752
753 753 def unlink(f):
754 754 """unlink and remove the directory if it is empty"""
755 755 os.unlink(f)
756 756 # try removing directories that might now be empty
757 757 try:
758 758 os.removedirs(os.path.dirname(f))
759 759 except OSError:
760 760 pass
761 761
762 762 def copyfile(src, dest):
763 763 "copy a file, preserving mode"
764 764 if os.path.islink(src):
765 765 try:
766 766 os.unlink(dest)
767 767 except:
768 768 pass
769 769 os.symlink(os.readlink(src), dest)
770 770 else:
771 771 try:
772 772 shutil.copyfile(src, dest)
773 773 shutil.copymode(src, dest)
774 774 except shutil.Error, inst:
775 775 raise Abort(str(inst))
776 776
777 777 def copyfiles(src, dst, hardlink=None):
778 778 """Copy a directory tree using hardlinks if possible"""
779 779
780 780 if hardlink is None:
781 781 hardlink = (os.stat(src).st_dev ==
782 782 os.stat(os.path.dirname(dst)).st_dev)
783 783
784 784 if os.path.isdir(src):
785 785 os.mkdir(dst)
786 786 for name, kind in osutil.listdir(src):
787 787 srcname = os.path.join(src, name)
788 788 dstname = os.path.join(dst, name)
789 789 copyfiles(srcname, dstname, hardlink)
790 790 else:
791 791 if hardlink:
792 792 try:
793 793 os_link(src, dst)
794 794 except (IOError, OSError):
795 795 hardlink = False
796 796 shutil.copy(src, dst)
797 797 else:
798 798 shutil.copy(src, dst)
799 799
800 800 class path_auditor(object):
801 801 '''ensure that a filesystem path contains no banned components.
802 802 the following properties of a path are checked:
803 803
804 804 - under top-level .hg
805 805 - starts at the root of a windows drive
806 806 - contains ".."
807 807 - traverses a symlink (e.g. a/symlink_here/b)
808 808 - inside a nested repository'''
809 809
810 810 def __init__(self, root):
811 811 self.audited = set()
812 812 self.auditeddir = set()
813 813 self.root = root
814 814
815 815 def __call__(self, path):
816 816 if path in self.audited:
817 817 return
818 818 normpath = os.path.normcase(path)
819 819 parts = splitpath(normpath)
820 820 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '.hg.', '')
821 821 or os.pardir in parts):
822 822 raise Abort(_("path contains illegal component: %s") % path)
823 823 if '.hg' in path:
824 824 for p in '.hg', '.hg.':
825 825 if p in parts[1:-1]:
826 826 pos = parts.index(p)
827 827 base = os.path.join(*parts[:pos])
828 828 raise Abort(_('path %r is inside repo %r') % (path, base))
829 829 def check(prefix):
830 830 curpath = os.path.join(self.root, prefix)
831 831 try:
832 832 st = os.lstat(curpath)
833 833 except OSError, err:
834 834 # EINVAL can be raised as invalid path syntax under win32.
835 835 # They must be ignored for patterns can be checked too.
836 836 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
837 837 raise
838 838 else:
839 839 if stat.S_ISLNK(st.st_mode):
840 840 raise Abort(_('path %r traverses symbolic link %r') %
841 841 (path, prefix))
842 842 elif (stat.S_ISDIR(st.st_mode) and
843 843 os.path.isdir(os.path.join(curpath, '.hg'))):
844 844 raise Abort(_('path %r is inside repo %r') %
845 845 (path, prefix))
846 846 parts.pop()
847 847 prefixes = []
848 848 for n in range(len(parts)):
849 849 prefix = os.sep.join(parts)
850 850 if prefix in self.auditeddir:
851 851 break
852 852 check(prefix)
853 853 prefixes.append(prefix)
854 854 parts.pop()
855 855
856 856 self.audited.add(path)
857 857 # only add prefixes to the cache after checking everything: we don't
858 858 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
859 859 self.auditeddir.update(prefixes)
860 860
861 861 def _makelock_file(info, pathname):
862 862 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
863 863 os.write(ld, info)
864 864 os.close(ld)
865 865
866 866 def _readlock_file(pathname):
867 867 return posixfile(pathname).read()
868 868
869 869 def nlinks(pathname):
870 870 """Return number of hardlinks for the given file."""
871 871 return os.lstat(pathname).st_nlink
872 872
873 873 if hasattr(os, 'link'):
874 874 os_link = os.link
875 875 else:
876 876 def os_link(src, dst):
877 877 raise OSError(0, _("Hardlinks not supported"))
878 878
879 879 def fstat(fp):
880 880 '''stat file object that may not have fileno method.'''
881 881 try:
882 882 return os.fstat(fp.fileno())
883 883 except AttributeError:
884 884 return os.stat(fp.name)
885 885
886 886 posixfile = file
887 887
888 888 def openhardlinks():
889 889 '''return true if it is safe to hold open file handles to hardlinks'''
890 890 return True
891 891
892 892 def _statfiles(files):
893 893 'Stat each file in files and yield stat or None if file does not exist.'
894 894 lstat = os.lstat
895 895 for nf in files:
896 896 try:
897 897 st = lstat(nf)
898 898 except OSError, err:
899 899 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
900 900 raise
901 901 st = None
902 902 yield st
903 903
904 904 def _statfiles_clustered(files):
905 905 '''Stat each file in files and yield stat or None if file does not exist.
906 906 Cluster and cache stat per directory to minimize number of OS stat calls.'''
907 907 lstat = os.lstat
908 908 ncase = os.path.normcase
909 909 sep = os.sep
910 910 dircache = {} # dirname -> filename -> status | None if file does not exist
911 911 for nf in files:
912 912 nf = ncase(nf)
913 913 pos = nf.rfind(sep)
914 914 if pos == -1:
915 915 dir, base = '.', nf
916 916 else:
917 917 dir, base = nf[:pos+1], nf[pos+1:]
918 918 cache = dircache.get(dir, None)
919 919 if cache is None:
920 920 try:
921 921 dmap = dict([(ncase(n), s)
922 922 for n, k, s in osutil.listdir(dir, True)])
923 923 except OSError, err:
924 924 # handle directory not found in Python version prior to 2.5
925 925 # Python <= 2.4 returns native Windows code 3 in errno
926 926 # Python >= 2.5 returns ENOENT and adds winerror field
927 927 # EINVAL is raised if dir is not a directory.
928 928 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
929 929 errno.ENOTDIR):
930 930 raise
931 931 dmap = {}
932 932 cache = dircache.setdefault(dir, dmap)
933 933 yield cache.get(base, None)
934 934
935 935 if sys.platform == 'win32':
936 936 statfiles = _statfiles_clustered
937 937 else:
938 938 statfiles = _statfiles
939 939
940 940 getuser_fallback = None
941 941
942 942 def getuser():
943 943 '''return name of current user'''
944 944 try:
945 945 return getpass.getuser()
946 946 except ImportError:
947 947 # import of pwd will fail on windows - try fallback
948 948 if getuser_fallback:
949 949 return getuser_fallback()
950 950 # raised if win32api not available
951 951 raise Abort(_('user name not available - set USERNAME '
952 952 'environment variable'))
953 953
954 954 def username(uid=None):
955 955 """Return the name of the user with the given uid.
956 956
957 957 If uid is None, return the name of the current user."""
958 958 try:
959 959 import pwd
960 960 if uid is None:
961 961 uid = os.getuid()
962 962 try:
963 963 return pwd.getpwuid(uid)[0]
964 964 except KeyError:
965 965 return str(uid)
966 966 except ImportError:
967 967 return None
968 968
969 969 def groupname(gid=None):
970 970 """Return the name of the group with the given gid.
971 971
972 972 If gid is None, return the name of the current group."""
973 973 try:
974 974 import grp
975 975 if gid is None:
976 976 gid = os.getgid()
977 977 try:
978 978 return grp.getgrgid(gid)[0]
979 979 except KeyError:
980 980 return str(gid)
981 981 except ImportError:
982 982 return None
983 983
984 984 # File system features
985 985
986 986 def checkcase(path):
987 987 """
988 988 Check whether the given path is on a case-sensitive filesystem
989 989
990 990 Requires a path (like /foo/.hg) ending with a foldable final
991 991 directory component.
992 992 """
993 993 s1 = os.stat(path)
994 994 d, b = os.path.split(path)
995 995 p2 = os.path.join(d, b.upper())
996 996 if path == p2:
997 997 p2 = os.path.join(d, b.lower())
998 998 try:
999 999 s2 = os.stat(p2)
1000 1000 if s2 == s1:
1001 1001 return False
1002 1002 return True
1003 1003 except:
1004 1004 return True
1005 1005
1006 1006 _fspathcache = {}
1007 1007 def fspath(name, root):
1008 1008 '''Get name in the case stored in the filesystem
1009 1009
1010 1010 The name is either relative to root, or it is an absolute path starting
1011 1011 with root. Note that this function is unnecessary, and should not be
1012 1012 called, for case-sensitive filesystems (simply because it's expensive).
1013 1013 '''
1014 1014 # If name is absolute, make it relative
1015 1015 if name.lower().startswith(root.lower()):
1016 1016 l = len(root)
1017 1017 if name[l] == os.sep or name[l] == os.altsep:
1018 1018 l = l + 1
1019 1019 name = name[l:]
1020 1020
1021 1021 if not os.path.exists(os.path.join(root, name)):
1022 1022 return None
1023 1023
1024 1024 seps = os.sep
1025 1025 if os.altsep:
1026 1026 seps = seps + os.altsep
1027 1027 # Protect backslashes. This gets silly very quickly.
1028 1028 seps.replace('\\','\\\\')
1029 1029 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1030 1030 dir = os.path.normcase(os.path.normpath(root))
1031 1031 result = []
1032 1032 for part, sep in pattern.findall(name):
1033 1033 if sep:
1034 1034 result.append(sep)
1035 1035 continue
1036 1036
1037 1037 if dir not in _fspathcache:
1038 1038 _fspathcache[dir] = os.listdir(dir)
1039 1039 contents = _fspathcache[dir]
1040 1040
1041 1041 lpart = part.lower()
1042 1042 for n in contents:
1043 1043 if n.lower() == lpart:
1044 1044 result.append(n)
1045 1045 break
1046 1046 else:
1047 1047 # Cannot happen, as the file exists!
1048 1048 result.append(part)
1049 1049 dir = os.path.join(dir, lpart)
1050 1050
1051 1051 return ''.join(result)
1052 1052
1053 1053 def checkexec(path):
1054 1054 """
1055 1055 Check whether the given path is on a filesystem with UNIX-like exec flags
1056 1056
1057 1057 Requires a directory (like /foo/.hg)
1058 1058 """
1059 1059
1060 1060 # VFAT on some Linux versions can flip mode but it doesn't persist
1061 1061 # a FS remount. Frequently we can detect it if files are created
1062 1062 # with exec bit on.
1063 1063
1064 1064 try:
1065 1065 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
1066 1066 fh, fn = tempfile.mkstemp("", "", path)
1067 1067 try:
1068 1068 os.close(fh)
1069 1069 m = os.stat(fn).st_mode & 0777
1070 1070 new_file_has_exec = m & EXECFLAGS
1071 1071 os.chmod(fn, m ^ EXECFLAGS)
1072 1072 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
1073 1073 finally:
1074 1074 os.unlink(fn)
1075 1075 except (IOError, OSError):
1076 1076 # we don't care, the user probably won't be able to commit anyway
1077 1077 return False
1078 1078 return not (new_file_has_exec or exec_flags_cannot_flip)
1079 1079
1080 1080 def checklink(path):
1081 1081 """check whether the given path is on a symlink-capable filesystem"""
1082 1082 # mktemp is not racy because symlink creation will fail if the
1083 1083 # file already exists
1084 1084 name = tempfile.mktemp(dir=path)
1085 1085 try:
1086 1086 os.symlink(".", name)
1087 1087 os.unlink(name)
1088 1088 return True
1089 1089 except (OSError, AttributeError):
1090 1090 return False
1091 1091
1092 1092 _umask = os.umask(0)
1093 1093 os.umask(_umask)
1094 1094
1095 1095 def needbinarypatch():
1096 1096 """return True if patches should be applied in binary mode by default."""
1097 1097 return os.name == 'nt'
1098 1098
1099 1099 def endswithsep(path):
1100 1100 '''Check path ends with os.sep or os.altsep.'''
1101 1101 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1102 1102
1103 1103 def splitpath(path):
1104 1104 '''Split path by os.sep.
1105 1105 Note that this function does not use os.altsep because this is
1106 1106 an alternative of simple "xxx.split(os.sep)".
1107 1107 It is recommended to use os.path.normpath() before using this
1108 1108 function if need.'''
1109 1109 return path.split(os.sep)
1110 1110
1111 1111 def gui():
1112 1112 '''Are we running in a GUI?'''
1113 1113 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
1114 1114
1115 1115 def lookup_reg(key, name=None, scope=None):
1116 1116 return None
1117 1117
1118 1118 # Platform specific variants
1119 1119 if os.name == 'nt':
1120 1120 import msvcrt
1121 1121 nulldev = 'NUL:'
1122 1122
1123 1123 class winstdout:
1124 1124 '''stdout on windows misbehaves if sent through a pipe'''
1125 1125
1126 1126 def __init__(self, fp):
1127 1127 self.fp = fp
1128 1128
1129 1129 def __getattr__(self, key):
1130 1130 return getattr(self.fp, key)
1131 1131
1132 1132 def close(self):
1133 1133 try:
1134 1134 self.fp.close()
1135 1135 except: pass
1136 1136
1137 1137 def write(self, s):
1138 1138 try:
1139 1139 # This is workaround for "Not enough space" error on
1140 1140 # writing large size of data to console.
1141 1141 limit = 16000
1142 1142 l = len(s)
1143 1143 start = 0
1144 1144 while start < l:
1145 1145 end = start + limit
1146 1146 self.fp.write(s[start:end])
1147 1147 start = end
1148 1148 except IOError, inst:
1149 1149 if inst.errno != 0: raise
1150 1150 self.close()
1151 1151 raise IOError(errno.EPIPE, 'Broken pipe')
1152 1152
1153 1153 def flush(self):
1154 1154 try:
1155 1155 return self.fp.flush()
1156 1156 except IOError, inst:
1157 1157 if inst.errno != errno.EINVAL: raise
1158 1158 self.close()
1159 1159 raise IOError(errno.EPIPE, 'Broken pipe')
1160 1160
1161 1161 sys.stdout = winstdout(sys.stdout)
1162 1162
1163 1163 def _is_win_9x():
1164 1164 '''return true if run on windows 95, 98 or me.'''
1165 1165 try:
1166 1166 return sys.getwindowsversion()[3] == 1
1167 1167 except AttributeError:
1168 1168 return 'command' in os.environ.get('comspec', '')
1169 1169
1170 1170 def openhardlinks():
1171 1171 return not _is_win_9x and "win32api" in locals()
1172 1172
1173 1173 def system_rcpath():
1174 1174 try:
1175 1175 return system_rcpath_win32()
1176 1176 except:
1177 1177 return [r'c:\mercurial\mercurial.ini']
1178 1178
1179 1179 def user_rcpath():
1180 1180 '''return os-specific hgrc search path to the user dir'''
1181 1181 try:
1182 1182 path = user_rcpath_win32()
1183 1183 except:
1184 1184 home = os.path.expanduser('~')
1185 1185 path = [os.path.join(home, 'mercurial.ini'),
1186 1186 os.path.join(home, '.hgrc')]
1187 1187 userprofile = os.environ.get('USERPROFILE')
1188 1188 if userprofile:
1189 1189 path.append(os.path.join(userprofile, 'mercurial.ini'))
1190 1190 path.append(os.path.join(userprofile, '.hgrc'))
1191 1191 return path
1192 1192
1193 1193 def parse_patch_output(output_line):
1194 1194 """parses the output produced by patch and returns the file name"""
1195 1195 pf = output_line[14:]
1196 1196 if pf[0] == '`':
1197 1197 pf = pf[1:-1] # Remove the quotes
1198 1198 return pf
1199 1199
1200 1200 def sshargs(sshcmd, host, user, port):
1201 1201 '''Build argument list for ssh or Plink'''
1202 1202 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1203 1203 args = user and ("%s@%s" % (user, host)) or host
1204 1204 return port and ("%s %s %s" % (args, pflag, port)) or args
1205 1205
1206 1206 def testpid(pid):
1207 1207 '''return False if pid dead, True if running or not known'''
1208 1208 return True
1209 1209
1210 1210 def set_flags(f, l, x):
1211 1211 pass
1212 1212
1213 1213 def set_binary(fd):
1214 1214 # When run without console, pipes may expose invalid
1215 1215 # fileno(), usually set to -1.
1216 1216 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1217 1217 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1218 1218
1219 1219 def pconvert(path):
1220 1220 return '/'.join(splitpath(path))
1221 1221
1222 1222 def localpath(path):
1223 1223 return path.replace('/', '\\')
1224 1224
1225 1225 def normpath(path):
1226 1226 return pconvert(os.path.normpath(path))
1227 1227
1228 1228 makelock = _makelock_file
1229 1229 readlock = _readlock_file
1230 1230
1231 1231 def samestat(s1, s2):
1232 1232 return False
1233 1233
1234 1234 # A sequence of backslashes is special iff it precedes a double quote:
1235 1235 # - if there's an even number of backslashes, the double quote is not
1236 1236 # quoted (i.e. it ends the quoted region)
1237 1237 # - if there's an odd number of backslashes, the double quote is quoted
1238 1238 # - in both cases, every pair of backslashes is unquoted into a single
1239 1239 # backslash
1240 1240 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1241 1241 # So, to quote a string, we must surround it in double quotes, double
1242 1242 # the number of backslashes that preceed double quotes and add another
1243 1243 # backslash before every double quote (being careful with the double
1244 1244 # quote we've appended to the end)
1245 1245 _quotere = None
1246 1246 def shellquote(s):
1247 1247 global _quotere
1248 1248 if _quotere is None:
1249 1249 _quotere = re.compile(r'(\\*)("|\\$)')
1250 1250 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1251 1251
1252 1252 def quotecommand(cmd):
1253 1253 """Build a command string suitable for os.popen* calls."""
1254 1254 # The extra quotes are needed because popen* runs the command
1255 1255 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1256 1256 return '"' + cmd + '"'
1257 1257
1258 1258 def popen(command, mode='r'):
1259 1259 # Work around "popen spawned process may not write to stdout
1260 1260 # under windows"
1261 1261 # http://bugs.python.org/issue1366
1262 1262 command += " 2> %s" % nulldev
1263 1263 return os.popen(quotecommand(command), mode)
1264 1264
1265 1265 def explain_exit(code):
1266 1266 return _("exited with status %d") % code, code
1267 1267
1268 1268 # if you change this stub into a real check, please try to implement the
1269 1269 # username and groupname functions above, too.
1270 1270 def isowner(fp, st=None):
1271 1271 return True
1272 1272
1273 def find_in_path(name, path, default=None):
1274 '''find name in search path. path can be string (will be split
1275 with os.pathsep), or iterable thing that returns strings. if name
1276 found, return path to name. else return default. name is looked up
1277 using cmd.exe rules, using PATHEXT.'''
1278 if isinstance(path, str):
1279 path = path.split(os.pathsep)
1280
1273 def find_exe(command):
1274 '''Find executable for command searching like cmd.exe does.
1275 If command is a basename then PATH is searched for command.
1276 PATH isn't searched if command is an absolute or relative path.
1277 An extension from PATHEXT is found and added if not present.
1278 If command isn't found None is returned.'''
1281 1279 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1282 pathext = pathext.lower().split(os.pathsep)
1283 isexec = os.path.splitext(name)[1].lower() in pathext
1280 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
1281 if os.path.splitext(command)[1].lower() in pathexts:
1282 pathexts = ['']
1283
1284 def findexisting(pathcommand):
1285 'Will append extension (if needed) and return existing file'
1286 for ext in pathexts:
1287 executable = pathcommand + ext
1288 if os.path.exists(executable):
1289 return executable
1290 return None
1284 1291
1285 for p in path:
1286 p_name = os.path.join(p, name)
1287
1288 if isexec and os.path.exists(p_name):
1289 return p_name
1290
1291 for ext in pathext:
1292 p_name_ext = p_name + ext
1293 if os.path.exists(p_name_ext):
1294 return p_name_ext
1295 return default
1292 if os.sep in command:
1293 return findexisting(command)
1294
1295 for path in os.environ.get('PATH', '').split(os.pathsep):
1296 executable = findexisting(os.path.join(path, command))
1297 if executable is not None:
1298 return executable
1299 return None
1296 1300
1297 1301 def set_signal_handler():
1298 1302 try:
1299 1303 set_signal_handler_win32()
1300 1304 except NameError:
1301 1305 pass
1302 1306
1303 1307 try:
1304 1308 # override functions with win32 versions if possible
1305 1309 from util_win32 import *
1306 1310 if not _is_win_9x():
1307 1311 posixfile = posixfile_nt
1308 1312 except ImportError:
1309 1313 pass
1310 1314
1311 1315 else:
1312 1316 nulldev = '/dev/null'
1313 1317
1314 1318 def rcfiles(path):
1315 1319 rcs = [os.path.join(path, 'hgrc')]
1316 1320 rcdir = os.path.join(path, 'hgrc.d')
1317 1321 try:
1318 1322 rcs.extend([os.path.join(rcdir, f)
1319 1323 for f, kind in osutil.listdir(rcdir)
1320 1324 if f.endswith(".rc")])
1321 1325 except OSError:
1322 1326 pass
1323 1327 return rcs
1324 1328
1325 1329 def system_rcpath():
1326 1330 path = []
1327 1331 # old mod_python does not set sys.argv
1328 1332 if len(getattr(sys, 'argv', [])) > 0:
1329 1333 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1330 1334 '/../etc/mercurial'))
1331 1335 path.extend(rcfiles('/etc/mercurial'))
1332 1336 return path
1333 1337
1334 1338 def user_rcpath():
1335 1339 return [os.path.expanduser('~/.hgrc')]
1336 1340
1337 1341 def parse_patch_output(output_line):
1338 1342 """parses the output produced by patch and returns the file name"""
1339 1343 pf = output_line[14:]
1340 1344 if os.sys.platform == 'OpenVMS':
1341 1345 if pf[0] == '`':
1342 1346 pf = pf[1:-1] # Remove the quotes
1343 1347 else:
1344 1348 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1345 1349 pf = pf[1:-1] # Remove the quotes
1346 1350 return pf
1347 1351
1348 1352 def sshargs(sshcmd, host, user, port):
1349 1353 '''Build argument list for ssh'''
1350 1354 args = user and ("%s@%s" % (user, host)) or host
1351 1355 return port and ("%s -p %s" % (args, port)) or args
1352 1356
1353 1357 def is_exec(f):
1354 1358 """check whether a file is executable"""
1355 1359 return (os.lstat(f).st_mode & 0100 != 0)
1356 1360
1357 1361 def set_flags(f, l, x):
1358 1362 s = os.lstat(f).st_mode
1359 1363 if l:
1360 1364 if not stat.S_ISLNK(s):
1361 1365 # switch file to link
1362 1366 data = file(f).read()
1363 1367 os.unlink(f)
1364 1368 try:
1365 1369 os.symlink(data, f)
1366 1370 except:
1367 1371 # failed to make a link, rewrite file
1368 1372 file(f, "w").write(data)
1369 1373 # no chmod needed at this point
1370 1374 return
1371 1375 if stat.S_ISLNK(s):
1372 1376 # switch link to file
1373 1377 data = os.readlink(f)
1374 1378 os.unlink(f)
1375 1379 file(f, "w").write(data)
1376 1380 s = 0666 & ~_umask # avoid restatting for chmod
1377 1381
1378 1382 sx = s & 0100
1379 1383 if x and not sx:
1380 1384 # Turn on +x for every +r bit when making a file executable
1381 1385 # and obey umask.
1382 1386 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1383 1387 elif not x and sx:
1384 1388 # Turn off all +x bits
1385 1389 os.chmod(f, s & 0666)
1386 1390
1387 1391 def set_binary(fd):
1388 1392 pass
1389 1393
1390 1394 def pconvert(path):
1391 1395 return path
1392 1396
1393 1397 def localpath(path):
1394 1398 return path
1395 1399
1396 1400 normpath = os.path.normpath
1397 1401 samestat = os.path.samestat
1398 1402
1399 1403 def makelock(info, pathname):
1400 1404 try:
1401 1405 os.symlink(info, pathname)
1402 1406 except OSError, why:
1403 1407 if why.errno == errno.EEXIST:
1404 1408 raise
1405 1409 else:
1406 1410 _makelock_file(info, pathname)
1407 1411
1408 1412 def readlock(pathname):
1409 1413 try:
1410 1414 return os.readlink(pathname)
1411 1415 except OSError, why:
1412 1416 if why.errno in (errno.EINVAL, errno.ENOSYS):
1413 1417 return _readlock_file(pathname)
1414 1418 else:
1415 1419 raise
1416 1420
1417 1421 def shellquote(s):
1418 1422 if os.sys.platform == 'OpenVMS':
1419 1423 return '"%s"' % s
1420 1424 else:
1421 1425 return "'%s'" % s.replace("'", "'\\''")
1422 1426
1423 1427 def quotecommand(cmd):
1424 1428 return cmd
1425 1429
1426 1430 def popen(command, mode='r'):
1427 1431 return os.popen(command, mode)
1428 1432
1429 1433 def testpid(pid):
1430 1434 '''return False if pid dead, True if running or not sure'''
1431 1435 if os.sys.platform == 'OpenVMS':
1432 1436 return True
1433 1437 try:
1434 1438 os.kill(pid, 0)
1435 1439 return True
1436 1440 except OSError, inst:
1437 1441 return inst.errno != errno.ESRCH
1438 1442
1439 1443 def explain_exit(code):
1440 1444 """return a 2-tuple (desc, code) describing a process's status"""
1441 1445 if os.WIFEXITED(code):
1442 1446 val = os.WEXITSTATUS(code)
1443 1447 return _("exited with status %d") % val, val
1444 1448 elif os.WIFSIGNALED(code):
1445 1449 val = os.WTERMSIG(code)
1446 1450 return _("killed by signal %d") % val, val
1447 1451 elif os.WIFSTOPPED(code):
1448 1452 val = os.WSTOPSIG(code)
1449 1453 return _("stopped by signal %d") % val, val
1450 1454 raise ValueError(_("invalid exit code"))
1451 1455
1452 1456 def isowner(fp, st=None):
1453 1457 """Return True if the file object f belongs to the current user.
1454 1458
1455 1459 The return value of a util.fstat(f) may be passed as the st argument.
1456 1460 """
1457 1461 if st is None:
1458 1462 st = fstat(fp)
1459 1463 return st.st_uid == os.getuid()
1460 1464
1461 def find_in_path(name, path, default=None):
1462 '''find name in search path. path can be string (will be split
1463 with os.pathsep), or iterable thing that returns strings. if name
1464 found, return path to name. else return default.'''
1465 if isinstance(path, str):
1466 path = path.split(os.pathsep)
1467 for p in path:
1468 p_name = os.path.join(p, name)
1469 if os.path.exists(p_name):
1470 return p_name
1471 return default
1465 def find_exe(command):
1466 '''Find executable for command searching like which does.
1467 If command is a basename then PATH is searched for command.
1468 PATH isn't searched if command is an absolute or relative path.
1469 If command isn't found None is returned.'''
1470 if sys.platform == 'OpenVMS':
1471 return command
1472
1473 def findexisting(executable):
1474 'Will return executable if existing file'
1475 if os.path.exists(executable):
1476 return executable
1477 return None
1478
1479 if os.sep in command:
1480 return findexisting(command)
1481
1482 for path in os.environ.get('PATH', '').split(os.pathsep):
1483 executable = findexisting(os.path.join(path, command))
1484 if executable is not None:
1485 return executable
1486 return None
1472 1487
1473 1488 def set_signal_handler():
1474 1489 pass
1475 1490
1476 def find_exe(name, default=None):
1477 '''find path of an executable.
1478 if name contains a path component, return it as is. otherwise,
1479 use normal executable search path.'''
1480
1481 if os.sep in name or sys.platform == 'OpenVMS':
1482 # don't check the executable bit. if the file isn't
1483 # executable, whoever tries to actually run it will give a
1484 # much more useful error message.
1485 return name
1486 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1487
1488 1491 def mktempcopy(name, emptyok=False, createmode=None):
1489 1492 """Create a temporary file with the same contents from name
1490 1493
1491 1494 The permission bits are copied from the original file.
1492 1495
1493 1496 If the temporary file is going to be truncated immediately, you
1494 1497 can use emptyok=True as an optimization.
1495 1498
1496 1499 Returns the name of the temporary file.
1497 1500 """
1498 1501 d, fn = os.path.split(name)
1499 1502 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1500 1503 os.close(fd)
1501 1504 # Temporary files are created with mode 0600, which is usually not
1502 1505 # what we want. If the original file already exists, just copy
1503 1506 # its mode. Otherwise, manually obey umask.
1504 1507 try:
1505 1508 st_mode = os.lstat(name).st_mode & 0777
1506 1509 except OSError, inst:
1507 1510 if inst.errno != errno.ENOENT:
1508 1511 raise
1509 1512 st_mode = createmode
1510 1513 if st_mode is None:
1511 1514 st_mode = ~_umask
1512 1515 st_mode &= 0666
1513 1516 os.chmod(temp, st_mode)
1514 1517 if emptyok:
1515 1518 return temp
1516 1519 try:
1517 1520 try:
1518 1521 ifp = posixfile(name, "rb")
1519 1522 except IOError, inst:
1520 1523 if inst.errno == errno.ENOENT:
1521 1524 return temp
1522 1525 if not getattr(inst, 'filename', None):
1523 1526 inst.filename = name
1524 1527 raise
1525 1528 ofp = posixfile(temp, "wb")
1526 1529 for chunk in filechunkiter(ifp):
1527 1530 ofp.write(chunk)
1528 1531 ifp.close()
1529 1532 ofp.close()
1530 1533 except:
1531 1534 try: os.unlink(temp)
1532 1535 except: pass
1533 1536 raise
1534 1537 return temp
1535 1538
1536 1539 class atomictempfile(posixfile):
1537 1540 """file-like object that atomically updates a file
1538 1541
1539 1542 All writes will be redirected to a temporary copy of the original
1540 1543 file. When rename is called, the copy is renamed to the original
1541 1544 name, making the changes visible.
1542 1545 """
1543 1546 def __init__(self, name, mode, createmode):
1544 1547 self.__name = name
1545 1548 self.temp = mktempcopy(name, emptyok=('w' in mode),
1546 1549 createmode=createmode)
1547 1550 posixfile.__init__(self, self.temp, mode)
1548 1551
1549 1552 def rename(self):
1550 1553 if not self.closed:
1551 1554 posixfile.close(self)
1552 1555 rename(self.temp, localpath(self.__name))
1553 1556
1554 1557 def __del__(self):
1555 1558 if not self.closed:
1556 1559 try:
1557 1560 os.unlink(self.temp)
1558 1561 except: pass
1559 1562 posixfile.close(self)
1560 1563
1561 1564 def makedirs(name, mode=None):
1562 1565 """recursive directory creation with parent mode inheritance"""
1563 1566 try:
1564 1567 os.mkdir(name)
1565 1568 if mode is not None:
1566 1569 os.chmod(name, mode)
1567 1570 return
1568 1571 except OSError, err:
1569 1572 if err.errno == errno.EEXIST:
1570 1573 return
1571 1574 if err.errno != errno.ENOENT:
1572 1575 raise
1573 1576 parent = os.path.abspath(os.path.dirname(name))
1574 1577 makedirs(parent, mode)
1575 1578 makedirs(name, mode)
1576 1579
1577 1580 class opener(object):
1578 1581 """Open files relative to a base directory
1579 1582
1580 1583 This class is used to hide the details of COW semantics and
1581 1584 remote file access from higher level code.
1582 1585 """
1583 1586 def __init__(self, base, audit=True):
1584 1587 self.base = base
1585 1588 if audit:
1586 1589 self.audit_path = path_auditor(base)
1587 1590 else:
1588 1591 self.audit_path = always
1589 1592 self.createmode = None
1590 1593
1591 1594 def __getattr__(self, name):
1592 1595 if name == '_can_symlink':
1593 1596 self._can_symlink = checklink(self.base)
1594 1597 return self._can_symlink
1595 1598 raise AttributeError(name)
1596 1599
1597 1600 def _fixfilemode(self, name):
1598 1601 if self.createmode is None:
1599 1602 return
1600 1603 os.chmod(name, self.createmode & 0666)
1601 1604
1602 1605 def __call__(self, path, mode="r", text=False, atomictemp=False):
1603 1606 self.audit_path(path)
1604 1607 f = os.path.join(self.base, path)
1605 1608
1606 1609 if not text and "b" not in mode:
1607 1610 mode += "b" # for that other OS
1608 1611
1609 1612 nlink = -1
1610 1613 if mode not in ("r", "rb"):
1611 1614 try:
1612 1615 nlink = nlinks(f)
1613 1616 except OSError:
1614 1617 nlink = 0
1615 1618 d = os.path.dirname(f)
1616 1619 if not os.path.isdir(d):
1617 1620 makedirs(d, self.createmode)
1618 1621 if atomictemp:
1619 1622 return atomictempfile(f, mode, self.createmode)
1620 1623 if nlink > 1:
1621 1624 rename(mktempcopy(f), f)
1622 1625 fp = posixfile(f, mode)
1623 1626 if nlink == 0:
1624 1627 self._fixfilemode(f)
1625 1628 return fp
1626 1629
1627 1630 def symlink(self, src, dst):
1628 1631 self.audit_path(dst)
1629 1632 linkname = os.path.join(self.base, dst)
1630 1633 try:
1631 1634 os.unlink(linkname)
1632 1635 except OSError:
1633 1636 pass
1634 1637
1635 1638 dirname = os.path.dirname(linkname)
1636 1639 if not os.path.exists(dirname):
1637 1640 makedirs(dirname, self.createmode)
1638 1641
1639 1642 if self._can_symlink:
1640 1643 try:
1641 1644 os.symlink(src, linkname)
1642 1645 except OSError, err:
1643 1646 raise OSError(err.errno, _('could not symlink to %r: %s') %
1644 1647 (src, err.strerror), linkname)
1645 1648 else:
1646 1649 f = self(dst, "w")
1647 1650 f.write(src)
1648 1651 f.close()
1649 1652 self._fixfilemode(dst)
1650 1653
1651 1654 class chunkbuffer(object):
1652 1655 """Allow arbitrary sized chunks of data to be efficiently read from an
1653 1656 iterator over chunks of arbitrary size."""
1654 1657
1655 1658 def __init__(self, in_iter):
1656 1659 """in_iter is the iterator that's iterating over the input chunks.
1657 1660 targetsize is how big a buffer to try to maintain."""
1658 1661 self.iter = iter(in_iter)
1659 1662 self.buf = ''
1660 1663 self.targetsize = 2**16
1661 1664
1662 1665 def read(self, l):
1663 1666 """Read L bytes of data from the iterator of chunks of data.
1664 1667 Returns less than L bytes if the iterator runs dry."""
1665 1668 if l > len(self.buf) and self.iter:
1666 1669 # Clamp to a multiple of self.targetsize
1667 1670 targetsize = max(l, self.targetsize)
1668 1671 collector = cStringIO.StringIO()
1669 1672 collector.write(self.buf)
1670 1673 collected = len(self.buf)
1671 1674 for chunk in self.iter:
1672 1675 collector.write(chunk)
1673 1676 collected += len(chunk)
1674 1677 if collected >= targetsize:
1675 1678 break
1676 1679 if collected < targetsize:
1677 1680 self.iter = False
1678 1681 self.buf = collector.getvalue()
1679 1682 if len(self.buf) == l:
1680 1683 s, self.buf = str(self.buf), ''
1681 1684 else:
1682 1685 s, self.buf = self.buf[:l], buffer(self.buf, l)
1683 1686 return s
1684 1687
1685 1688 def filechunkiter(f, size=65536, limit=None):
1686 1689 """Create a generator that produces the data in the file size
1687 1690 (default 65536) bytes at a time, up to optional limit (default is
1688 1691 to read all data). Chunks may be less than size bytes if the
1689 1692 chunk is the last chunk in the file, or the file is a socket or
1690 1693 some other type of file that sometimes reads less data than is
1691 1694 requested."""
1692 1695 assert size >= 0
1693 1696 assert limit is None or limit >= 0
1694 1697 while True:
1695 1698 if limit is None: nbytes = size
1696 1699 else: nbytes = min(limit, size)
1697 1700 s = nbytes and f.read(nbytes)
1698 1701 if not s: break
1699 1702 if limit: limit -= len(s)
1700 1703 yield s
1701 1704
1702 1705 def makedate():
1703 1706 lt = time.localtime()
1704 1707 if lt[8] == 1 and time.daylight:
1705 1708 tz = time.altzone
1706 1709 else:
1707 1710 tz = time.timezone
1708 1711 return time.mktime(lt), tz
1709 1712
1710 1713 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1711 1714 """represent a (unixtime, offset) tuple as a localized time.
1712 1715 unixtime is seconds since the epoch, and offset is the time zone's
1713 1716 number of seconds away from UTC. if timezone is false, do not
1714 1717 append time zone to string."""
1715 1718 t, tz = date or makedate()
1716 1719 if "%1" in format or "%2" in format:
1717 1720 sign = (tz > 0) and "-" or "+"
1718 1721 minutes = abs(tz) / 60
1719 1722 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1720 1723 format = format.replace("%2", "%02d" % (minutes % 60))
1721 1724 s = time.strftime(format, time.gmtime(float(t) - tz))
1722 1725 return s
1723 1726
1724 1727 def shortdate(date=None):
1725 1728 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1726 1729 return datestr(date, format='%Y-%m-%d')
1727 1730
1728 1731 def strdate(string, format, defaults=[]):
1729 1732 """parse a localized time string and return a (unixtime, offset) tuple.
1730 1733 if the string cannot be parsed, ValueError is raised."""
1731 1734 def timezone(string):
1732 1735 tz = string.split()[-1]
1733 1736 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1734 1737 sign = (tz[0] == "+") and 1 or -1
1735 1738 hours = int(tz[1:3])
1736 1739 minutes = int(tz[3:5])
1737 1740 return -sign * (hours * 60 + minutes) * 60
1738 1741 if tz == "GMT" or tz == "UTC":
1739 1742 return 0
1740 1743 return None
1741 1744
1742 1745 # NOTE: unixtime = localunixtime + offset
1743 1746 offset, date = timezone(string), string
1744 1747 if offset != None:
1745 1748 date = " ".join(string.split()[:-1])
1746 1749
1747 1750 # add missing elements from defaults
1748 1751 for part in defaults:
1749 1752 found = [True for p in part if ("%"+p) in format]
1750 1753 if not found:
1751 1754 date += "@" + defaults[part]
1752 1755 format += "@%" + part[0]
1753 1756
1754 1757 timetuple = time.strptime(date, format)
1755 1758 localunixtime = int(calendar.timegm(timetuple))
1756 1759 if offset is None:
1757 1760 # local timezone
1758 1761 unixtime = int(time.mktime(timetuple))
1759 1762 offset = unixtime - localunixtime
1760 1763 else:
1761 1764 unixtime = localunixtime + offset
1762 1765 return unixtime, offset
1763 1766
1764 1767 def parsedate(date, formats=None, defaults=None):
1765 1768 """parse a localized date/time string and return a (unixtime, offset) tuple.
1766 1769
1767 1770 The date may be a "unixtime offset" string or in one of the specified
1768 1771 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1769 1772 """
1770 1773 if not date:
1771 1774 return 0, 0
1772 1775 if isinstance(date, tuple) and len(date) == 2:
1773 1776 return date
1774 1777 if not formats:
1775 1778 formats = defaultdateformats
1776 1779 date = date.strip()
1777 1780 try:
1778 1781 when, offset = map(int, date.split(' '))
1779 1782 except ValueError:
1780 1783 # fill out defaults
1781 1784 if not defaults:
1782 1785 defaults = {}
1783 1786 now = makedate()
1784 1787 for part in "d mb yY HI M S".split():
1785 1788 if part not in defaults:
1786 1789 if part[0] in "HMS":
1787 1790 defaults[part] = "00"
1788 1791 else:
1789 1792 defaults[part] = datestr(now, "%" + part[0])
1790 1793
1791 1794 for format in formats:
1792 1795 try:
1793 1796 when, offset = strdate(date, format, defaults)
1794 1797 except (ValueError, OverflowError):
1795 1798 pass
1796 1799 else:
1797 1800 break
1798 1801 else:
1799 1802 raise Abort(_('invalid date: %r ') % date)
1800 1803 # validate explicit (probably user-specified) date and
1801 1804 # time zone offset. values must fit in signed 32 bits for
1802 1805 # current 32-bit linux runtimes. timezones go from UTC-12
1803 1806 # to UTC+14
1804 1807 if abs(when) > 0x7fffffff:
1805 1808 raise Abort(_('date exceeds 32 bits: %d') % when)
1806 1809 if offset < -50400 or offset > 43200:
1807 1810 raise Abort(_('impossible time zone offset: %d') % offset)
1808 1811 return when, offset
1809 1812
1810 1813 def matchdate(date):
1811 1814 """Return a function that matches a given date match specifier
1812 1815
1813 1816 Formats include:
1814 1817
1815 1818 '{date}' match a given date to the accuracy provided
1816 1819
1817 1820 '<{date}' on or before a given date
1818 1821
1819 1822 '>{date}' on or after a given date
1820 1823
1821 1824 """
1822 1825
1823 1826 def lower(date):
1824 1827 d = dict(mb="1", d="1")
1825 1828 return parsedate(date, extendeddateformats, d)[0]
1826 1829
1827 1830 def upper(date):
1828 1831 d = dict(mb="12", HI="23", M="59", S="59")
1829 1832 for days in "31 30 29".split():
1830 1833 try:
1831 1834 d["d"] = days
1832 1835 return parsedate(date, extendeddateformats, d)[0]
1833 1836 except:
1834 1837 pass
1835 1838 d["d"] = "28"
1836 1839 return parsedate(date, extendeddateformats, d)[0]
1837 1840
1838 1841 if date[0] == "<":
1839 1842 when = upper(date[1:])
1840 1843 return lambda x: x <= when
1841 1844 elif date[0] == ">":
1842 1845 when = lower(date[1:])
1843 1846 return lambda x: x >= when
1844 1847 elif date[0] == "-":
1845 1848 try:
1846 1849 days = int(date[1:])
1847 1850 except ValueError:
1848 1851 raise Abort(_("invalid day spec: %s") % date[1:])
1849 1852 when = makedate()[0] - days * 3600 * 24
1850 1853 return lambda x: x >= when
1851 1854 elif " to " in date:
1852 1855 a, b = date.split(" to ")
1853 1856 start, stop = lower(a), upper(b)
1854 1857 return lambda x: x >= start and x <= stop
1855 1858 else:
1856 1859 start, stop = lower(date), upper(date)
1857 1860 return lambda x: x >= start and x <= stop
1858 1861
1859 1862 def shortuser(user):
1860 1863 """Return a short representation of a user name or email address."""
1861 1864 f = user.find('@')
1862 1865 if f >= 0:
1863 1866 user = user[:f]
1864 1867 f = user.find('<')
1865 1868 if f >= 0:
1866 1869 user = user[f+1:]
1867 1870 f = user.find(' ')
1868 1871 if f >= 0:
1869 1872 user = user[:f]
1870 1873 f = user.find('.')
1871 1874 if f >= 0:
1872 1875 user = user[:f]
1873 1876 return user
1874 1877
1875 1878 def email(author):
1876 1879 '''get email of author.'''
1877 1880 r = author.find('>')
1878 1881 if r == -1: r = None
1879 1882 return author[author.find('<')+1:r]
1880 1883
1881 1884 def ellipsis(text, maxlength=400):
1882 1885 """Trim string to at most maxlength (default: 400) characters."""
1883 1886 if len(text) <= maxlength:
1884 1887 return text
1885 1888 else:
1886 1889 return "%s..." % (text[:maxlength-3])
1887 1890
1888 1891 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1889 1892 '''yield every hg repository under path, recursively.'''
1890 1893 def errhandler(err):
1891 1894 if err.filename == path:
1892 1895 raise err
1893 1896 if followsym and hasattr(os.path, 'samestat'):
1894 1897 def _add_dir_if_not_there(dirlst, dirname):
1895 1898 match = False
1896 1899 samestat = os.path.samestat
1897 1900 dirstat = os.stat(dirname)
1898 1901 for lstdirstat in dirlst:
1899 1902 if samestat(dirstat, lstdirstat):
1900 1903 match = True
1901 1904 break
1902 1905 if not match:
1903 1906 dirlst.append(dirstat)
1904 1907 return not match
1905 1908 else:
1906 1909 followsym = False
1907 1910
1908 1911 if (seen_dirs is None) and followsym:
1909 1912 seen_dirs = []
1910 1913 _add_dir_if_not_there(seen_dirs, path)
1911 1914 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1912 1915 if '.hg' in dirs:
1913 1916 yield root # found a repository
1914 1917 qroot = os.path.join(root, '.hg', 'patches')
1915 1918 if os.path.isdir(os.path.join(qroot, '.hg')):
1916 1919 yield qroot # we have a patch queue repo here
1917 1920 if recurse:
1918 1921 # avoid recursing inside the .hg directory
1919 1922 dirs.remove('.hg')
1920 1923 else:
1921 1924 dirs[:] = [] # don't descend further
1922 1925 elif followsym:
1923 1926 newdirs = []
1924 1927 for d in dirs:
1925 1928 fname = os.path.join(root, d)
1926 1929 if _add_dir_if_not_there(seen_dirs, fname):
1927 1930 if os.path.islink(fname):
1928 1931 for hgname in walkrepos(fname, True, seen_dirs):
1929 1932 yield hgname
1930 1933 else:
1931 1934 newdirs.append(d)
1932 1935 dirs[:] = newdirs
1933 1936
1934 1937 _rcpath = None
1935 1938
1936 1939 def os_rcpath():
1937 1940 '''return default os-specific hgrc search path'''
1938 1941 path = system_rcpath()
1939 1942 path.extend(user_rcpath())
1940 1943 path = [os.path.normpath(f) for f in path]
1941 1944 return path
1942 1945
1943 1946 def rcpath():
1944 1947 '''return hgrc search path. if env var HGRCPATH is set, use it.
1945 1948 for each item in path, if directory, use files ending in .rc,
1946 1949 else use item.
1947 1950 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1948 1951 if no HGRCPATH, use default os-specific path.'''
1949 1952 global _rcpath
1950 1953 if _rcpath is None:
1951 1954 if 'HGRCPATH' in os.environ:
1952 1955 _rcpath = []
1953 1956 for p in os.environ['HGRCPATH'].split(os.pathsep):
1954 1957 if not p: continue
1955 1958 if os.path.isdir(p):
1956 1959 for f, kind in osutil.listdir(p):
1957 1960 if f.endswith('.rc'):
1958 1961 _rcpath.append(os.path.join(p, f))
1959 1962 else:
1960 1963 _rcpath.append(p)
1961 1964 else:
1962 1965 _rcpath = os_rcpath()
1963 1966 return _rcpath
1964 1967
1965 1968 def bytecount(nbytes):
1966 1969 '''return byte count formatted as readable string, with units'''
1967 1970
1968 1971 units = (
1969 1972 (100, 1<<30, _('%.0f GB')),
1970 1973 (10, 1<<30, _('%.1f GB')),
1971 1974 (1, 1<<30, _('%.2f GB')),
1972 1975 (100, 1<<20, _('%.0f MB')),
1973 1976 (10, 1<<20, _('%.1f MB')),
1974 1977 (1, 1<<20, _('%.2f MB')),
1975 1978 (100, 1<<10, _('%.0f KB')),
1976 1979 (10, 1<<10, _('%.1f KB')),
1977 1980 (1, 1<<10, _('%.2f KB')),
1978 1981 (1, 1, _('%.0f bytes')),
1979 1982 )
1980 1983
1981 1984 for multiplier, divisor, format in units:
1982 1985 if nbytes >= divisor * multiplier:
1983 1986 return format % (nbytes / float(divisor))
1984 1987 return units[-1][2] % nbytes
1985 1988
1986 1989 def drop_scheme(scheme, path):
1987 1990 sc = scheme + ':'
1988 1991 if path.startswith(sc):
1989 1992 path = path[len(sc):]
1990 1993 if path.startswith('//'):
1991 1994 path = path[2:]
1992 1995 return path
1993 1996
1994 1997 def uirepr(s):
1995 1998 # Avoid double backslash in Windows path repr()
1996 1999 return repr(s).replace('\\\\', '\\')
1997 2000
1998 2001 def termwidth():
1999 2002 if 'COLUMNS' in os.environ:
2000 2003 try:
2001 2004 return int(os.environ['COLUMNS'])
2002 2005 except ValueError:
2003 2006 pass
2004 2007 try:
2005 2008 import termios, array, fcntl
2006 2009 for dev in (sys.stdout, sys.stdin):
2007 2010 try:
2008 2011 fd = dev.fileno()
2009 2012 if not os.isatty(fd):
2010 2013 continue
2011 2014 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
2012 2015 return array.array('h', arri)[1]
2013 2016 except ValueError:
2014 2017 pass
2015 2018 except ImportError:
2016 2019 pass
2017 2020 return 80
@@ -1,400 +1,399 b''
1 1 # revision 0
2 2 adding f
3 3 # revision 1
4 4 # revision 2
5 5 created new head
6 6 # revision 3 - simple to merge
7 7 created new head
8 8
9 9
10 10 Tool selection
11 11
12 12 # default is internal merge:
13 13 [merge-tools]
14 14 # hg update -C 1
15 15 # hg merge -r 2
16 16 merging f
17 17 warning: conflicts during merge.
18 18 merging f failed!
19 19 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
20 20 use 'hg resolve' to retry unresolved file merges
21 21 # cat f
22 22 <<<<<<< local
23 23 revision 1
24 24 =======
25 25 revision 2
26 26 >>>>>>> other
27 27 space
28 28 # hg stat
29 29 M f
30 30 ? f.orig
31 31
32 32 # simplest hgrc using false for merge:
33 33 [merge-tools]
34 34 false.whatever=
35 35 # hg update -C 1
36 36 # hg merge -r 2
37 37 merging f
38 38 merging f failed!
39 39 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
40 40 use 'hg resolve' to retry unresolved file merges
41 41 # cat f
42 42 revision 1
43 43 space
44 44 # hg stat
45 45 M f
46 46 ? f.orig
47 47
48 48 # true with higher .priority gets precedence:
49 49 [merge-tools]
50 50 false.whatever=
51 51 true.priority=1
52 52 # hg update -C 1
53 53 # hg merge -r 2
54 54 merging f
55 55 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
56 56 (branch merge, don't forget to commit)
57 57 # cat f
58 58 revision 1
59 59 space
60 60 # hg stat
61 61 M f
62 62
63 63 # unless lowered on command line:
64 64 [merge-tools]
65 65 false.whatever=
66 66 true.priority=1
67 67 # hg update -C 1
68 68 # hg merge -r 2 --config merge-tools.true.priority=-7
69 69 merging f
70 70 merging f failed!
71 71 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
72 72 use 'hg resolve' to retry unresolved file merges
73 73 # cat f
74 74 revision 1
75 75 space
76 76 # hg stat
77 77 M f
78 78 ? f.orig
79 79
80 80 # or false set higher on command line:
81 81 [merge-tools]
82 82 false.whatever=
83 83 true.priority=1
84 84 # hg update -C 1
85 85 # hg merge -r 2 --config merge-tools.false.priority=117
86 86 merging f
87 87 merging f failed!
88 88 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
89 89 use 'hg resolve' to retry unresolved file merges
90 90 # cat f
91 91 revision 1
92 92 space
93 93 # hg stat
94 94 M f
95 95 ? f.orig
96 96
97 97 # or true.executable not found in PATH:
98 98 [merge-tools]
99 99 false.whatever=
100 100 true.priority=1
101 101 # hg update -C 1
102 102 # hg merge -r 2 --config merge-tools.true.executable=nonexistingmergetool
103 103 merging f
104 104 merging f failed!
105 105 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
106 106 use 'hg resolve' to retry unresolved file merges
107 107 # cat f
108 108 revision 1
109 109 space
110 110 # hg stat
111 111 M f
112 112 ? f.orig
113 113
114 114 # or true.executable with bogus path:
115 115 [merge-tools]
116 116 false.whatever=
117 117 true.priority=1
118 118 # hg update -C 1
119 119 # hg merge -r 2 --config merge-tools.true.executable=/bin/nonexistingmergetool
120 sh: /bin/nonexistingmergetool: No such file or directory
121 120 merging f
122 121 merging f failed!
123 122 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
124 123 use 'hg resolve' to retry unresolved file merges
125 124 # cat f
126 125 revision 1
127 126 space
128 127 # hg stat
129 128 M f
130 129 ? f.orig
131 130
132 131 # but true.executable set to cat found in PATH works:
133 132 [merge-tools]
134 133 false.whatever=
135 134 true.priority=1
136 135 true.executable=cat
137 136 # hg update -C 1
138 137 # hg merge -r 2
139 138 revision 1
140 139 space
141 140 revision 0
142 141 space
143 142 revision 2
144 143 space
145 144 merging f
146 145 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
147 146 (branch merge, don't forget to commit)
148 147 # cat f
149 148 revision 1
150 149 space
151 150 # hg stat
152 151 M f
153 152
154 153 # and true.executable set to cat with path works:
155 154 [merge-tools]
156 155 false.whatever=
157 156 true.priority=1
158 157 true.executable=cat
159 158 # hg update -C 1
160 159 # hg merge -r 2 --config merge-tools.true.executable=/bin/cat
161 160 revision 1
162 161 space
163 162 revision 0
164 163 space
165 164 revision 2
166 165 space
167 166 merging f
168 167 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
169 168 (branch merge, don't forget to commit)
170 169 # cat f
171 170 revision 1
172 171 space
173 172 # hg stat
174 173 M f
175 174
176 175
177 176 Tool selection and merge-patterns
178 177
179 178 # merge-patterns specifies new tool tac:
180 179 [merge-tools]
181 180 false.whatever=
182 181 true.priority=1
183 182 true.executable=cat
184 183 # hg update -C 1
185 184 # hg merge -r 2 --config merge-patterns.f=tac
186 185 space
187 186 revision 1
188 187 space
189 188 revision 0
190 189 space
191 190 revision 2
192 191 merging f
193 192 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
194 193 (branch merge, don't forget to commit)
195 194 # cat f
196 195 revision 1
197 196 space
198 197 # hg stat
199 198 M f
200 199
201 200 # merge-patterns specifies executable not found in PATH and gets warning:
202 201 [merge-tools]
203 202 false.whatever=
204 203 true.priority=1
205 204 true.executable=cat
206 205 # hg update -C 1
207 206 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool
208 207 couldn't find merge tool true specified for f
209 208 merging f
210 209 merging f failed!
211 210 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
212 211 use 'hg resolve' to retry unresolved file merges
213 212 # cat f
214 213 revision 1
215 214 space
216 215 # hg stat
217 216 M f
218 217 ? f.orig
219 218
220 219 # merge-patterns specifies executable with bogus path and gets warning:
221 220 [merge-tools]
222 221 false.whatever=
223 222 true.priority=1
224 223 true.executable=cat
225 224 # hg update -C 1
226 225 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/bin/nonexistingmergetool
227 sh: /bin/nonexistingmergetool: No such file or directory
226 couldn't find merge tool true specified for f
228 227 merging f
229 228 merging f failed!
230 229 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
231 230 use 'hg resolve' to retry unresolved file merges
232 231 # cat f
233 232 revision 1
234 233 space
235 234 # hg stat
236 235 M f
237 236 ? f.orig
238 237
239 238
240 239 Premerge
241 240
242 241 # Default is silent simplemerge:
243 242 [merge-tools]
244 243 false.whatever=
245 244 true.priority=1
246 245 true.executable=cat
247 246 # hg update -C 1
248 247 # hg merge -r 3
249 248 merging f
250 249 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
251 250 (branch merge, don't forget to commit)
252 251 # cat f
253 252 revision 1
254 253 space
255 254 revision 3
256 255 # hg stat
257 256 M f
258 257
259 258 # .premerge=True is same:
260 259 [merge-tools]
261 260 false.whatever=
262 261 true.priority=1
263 262 true.executable=cat
264 263 # hg update -C 1
265 264 # hg merge -r 3 --config merge-tools.true.premerge=True
266 265 merging f
267 266 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
268 267 (branch merge, don't forget to commit)
269 268 # cat f
270 269 revision 1
271 270 space
272 271 revision 3
273 272 # hg stat
274 273 M f
275 274
276 275 # .premerge=False executes merge-tool:
277 276 [merge-tools]
278 277 false.whatever=
279 278 true.priority=1
280 279 true.executable=cat
281 280 # hg update -C 1
282 281 # hg merge -r 3 --config merge-tools.true.premerge=False
283 282 revision 1
284 283 space
285 284 revision 0
286 285 space
287 286 revision 0
288 287 space
289 288 revision 3
290 289 merging f
291 290 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
292 291 (branch merge, don't forget to commit)
293 292 # cat f
294 293 revision 1
295 294 space
296 295 # hg stat
297 296 M f
298 297
299 298
300 299 Tool execution
301 300
302 301 # set tools.args explicit to include $base $local $other $output:
303 302 [merge-tools]
304 303 false.whatever=
305 304 true.priority=1
306 305 true.executable=cat
307 306 # hg update -C 1
308 307 ==> ... <==
309 308 revision 0
310 309 space
311 310
312 311 ==> ... <==
313 312 revision 1
314 313 space
315 314
316 315 ==> ... <==
317 316 revision 2
318 317 space
319 318
320 319 ==> ... <==
321 320 revision 1
322 321 space
323 322 merging f
324 323 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
325 324 (branch merge, don't forget to commit)
326 325 # cat f
327 326 revision 1
328 327 space
329 328 # hg stat
330 329 M f
331 330
332 331 # Merge with "echo mergeresult > $local":
333 332 [merge-tools]
334 333 false.whatever=
335 334 true.priority=1
336 335 true.executable=cat
337 336 # hg update -C 1
338 337 merging f
339 338 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
340 339 (branch merge, don't forget to commit)
341 340 # cat f
342 341 mergeresult
343 342 # hg stat
344 343 M f
345 344
346 345 # - and $local is the file f:
347 346 [merge-tools]
348 347 false.whatever=
349 348 true.priority=1
350 349 true.executable=cat
351 350 # hg update -C 1
352 351 merging f
353 352 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
354 353 (branch merge, don't forget to commit)
355 354 # cat f
356 355 mergeresult
357 356 # hg stat
358 357 M f
359 358
360 359 # Merge with "echo mergeresult > $output" - the variable is a bit magic:
361 360 [merge-tools]
362 361 false.whatever=
363 362 true.priority=1
364 363 true.executable=cat
365 364 # hg update -C 1
366 365 merging f
367 366 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
368 367 (branch merge, don't forget to commit)
369 368 # cat f
370 369 mergeresult
371 370 # hg stat
372 371 M f
373 372
374 373
375 374 Merge post-processing
376 375
377 376 # cat is a bad merge-tool and doesn't change:
378 377 [merge-tools]
379 378 false.whatever=
380 379 true.priority=1
381 380 true.executable=cat
382 381 # hg update -C 1
383 382 # hg merge -r 2 --config merge-tools.true.checkchanged=1
384 383 revision 1
385 384 space
386 385 revision 0
387 386 space
388 387 revision 2
389 388 space
390 389 merging f
391 390 merging f failed!
392 391 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
393 392 use 'hg resolve' to retry unresolved file merges
394 393 # cat f
395 394 revision 1
396 395 space
397 396 # hg stat
398 397 M f
399 398 ? f.orig
400 399
General Comments 0
You need to be logged in to leave comments. Login now