##// END OF EJS Templates
Make util.find_exe alway returns existing file, fixing issue1459...
Mads Kiilerich -
r7732:3793802e default
parent child Browse files
Show More
@@ -1,1398 +1,1398 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 import cStringIO, email.Parser, os, re, errno, math
12 import cStringIO, email.Parser, os, re, errno, math
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16
16
17 class PatchError(Exception):
17 class PatchError(Exception):
18 pass
18 pass
19
19
20 class NoHunks(PatchError):
20 class NoHunks(PatchError):
21 pass
21 pass
22
22
23 # helper functions
23 # helper functions
24
24
25 def copyfile(src, dst, basedir):
25 def copyfile(src, dst, basedir):
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 if os.path.exists(absdst):
27 if os.path.exists(absdst):
28 raise util.Abort(_("cannot create %s: destination already exists") %
28 raise util.Abort(_("cannot create %s: destination already exists") %
29 dst)
29 dst)
30
30
31 dstdir = os.path.dirname(absdst)
31 dstdir = os.path.dirname(absdst)
32 if dstdir and not os.path.isdir(dstdir):
32 if dstdir and not os.path.isdir(dstdir):
33 try:
33 try:
34 os.makedirs(dstdir)
34 os.makedirs(dstdir)
35 except IOError:
35 except IOError:
36 raise util.Abort(
36 raise util.Abort(
37 _("cannot create %s: unable to create destination directory")
37 _("cannot create %s: unable to create destination directory")
38 % dst)
38 % dst)
39
39
40 util.copyfile(abssrc, absdst)
40 util.copyfile(abssrc, absdst)
41
41
42 # public functions
42 # public functions
43
43
44 def extract(ui, fileobj):
44 def extract(ui, fileobj):
45 '''extract patch from data read from fileobj.
45 '''extract patch from data read from fileobj.
46
46
47 patch can be a normal patch or contained in an email message.
47 patch can be a normal patch or contained in an email message.
48
48
49 return tuple (filename, message, user, date, node, p1, p2).
49 return tuple (filename, message, user, date, node, p1, p2).
50 Any item in the returned tuple can be None. If filename is None,
50 Any item in the returned tuple can be None. If filename is None,
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52
52
53 # attempt to detect the start of a patch
53 # attempt to detect the start of a patch
54 # (this heuristic is borrowed from quilt)
54 # (this heuristic is borrowed from quilt)
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
56 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
56 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
57 '(---|\*\*\*)[ \t])', re.MULTILINE)
57 '(---|\*\*\*)[ \t])', re.MULTILINE)
58
58
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 tmpfp = os.fdopen(fd, 'w')
60 tmpfp = os.fdopen(fd, 'w')
61 try:
61 try:
62 msg = email.Parser.Parser().parse(fileobj)
62 msg = email.Parser.Parser().parse(fileobj)
63
63
64 subject = msg['Subject']
64 subject = msg['Subject']
65 user = msg['From']
65 user = msg['From']
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 # should try to parse msg['Date']
67 # should try to parse msg['Date']
68 date = None
68 date = None
69 nodeid = None
69 nodeid = None
70 branch = None
70 branch = None
71 parents = []
71 parents = []
72
72
73 if subject:
73 if subject:
74 if subject.startswith('[PATCH'):
74 if subject.startswith('[PATCH'):
75 pend = subject.find(']')
75 pend = subject.find(']')
76 if pend >= 0:
76 if pend >= 0:
77 subject = subject[pend+1:].lstrip()
77 subject = subject[pend+1:].lstrip()
78 subject = subject.replace('\n\t', ' ')
78 subject = subject.replace('\n\t', ' ')
79 ui.debug('Subject: %s\n' % subject)
79 ui.debug('Subject: %s\n' % subject)
80 if user:
80 if user:
81 ui.debug('From: %s\n' % user)
81 ui.debug('From: %s\n' % user)
82 diffs_seen = 0
82 diffs_seen = 0
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 message = ''
84 message = ''
85 for part in msg.walk():
85 for part in msg.walk():
86 content_type = part.get_content_type()
86 content_type = part.get_content_type()
87 ui.debug('Content-Type: %s\n' % content_type)
87 ui.debug('Content-Type: %s\n' % content_type)
88 if content_type not in ok_types:
88 if content_type not in ok_types:
89 continue
89 continue
90 payload = part.get_payload(decode=True)
90 payload = part.get_payload(decode=True)
91 m = diffre.search(payload)
91 m = diffre.search(payload)
92 if m:
92 if m:
93 hgpatch = False
93 hgpatch = False
94 ignoretext = False
94 ignoretext = False
95
95
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 diffs_seen += 1
97 diffs_seen += 1
98 cfp = cStringIO.StringIO()
98 cfp = cStringIO.StringIO()
99 for line in payload[:m.start(0)].splitlines():
99 for line in payload[:m.start(0)].splitlines():
100 if line.startswith('# HG changeset patch'):
100 if line.startswith('# HG changeset patch'):
101 ui.debug(_('patch generated by hg export\n'))
101 ui.debug(_('patch generated by hg export\n'))
102 hgpatch = True
102 hgpatch = True
103 # drop earlier commit message content
103 # drop earlier commit message content
104 cfp.seek(0)
104 cfp.seek(0)
105 cfp.truncate()
105 cfp.truncate()
106 subject = None
106 subject = None
107 elif hgpatch:
107 elif hgpatch:
108 if line.startswith('# User '):
108 if line.startswith('# User '):
109 user = line[7:]
109 user = line[7:]
110 ui.debug('From: %s\n' % user)
110 ui.debug('From: %s\n' % user)
111 elif line.startswith("# Date "):
111 elif line.startswith("# Date "):
112 date = line[7:]
112 date = line[7:]
113 elif line.startswith("# Branch "):
113 elif line.startswith("# Branch "):
114 branch = line[9:]
114 branch = line[9:]
115 elif line.startswith("# Node ID "):
115 elif line.startswith("# Node ID "):
116 nodeid = line[10:]
116 nodeid = line[10:]
117 elif line.startswith("# Parent "):
117 elif line.startswith("# Parent "):
118 parents.append(line[10:])
118 parents.append(line[10:])
119 elif line == '---' and gitsendmail:
119 elif line == '---' and gitsendmail:
120 ignoretext = True
120 ignoretext = True
121 if not line.startswith('# ') and not ignoretext:
121 if not line.startswith('# ') and not ignoretext:
122 cfp.write(line)
122 cfp.write(line)
123 cfp.write('\n')
123 cfp.write('\n')
124 message = cfp.getvalue()
124 message = cfp.getvalue()
125 if tmpfp:
125 if tmpfp:
126 tmpfp.write(payload)
126 tmpfp.write(payload)
127 if not payload.endswith('\n'):
127 if not payload.endswith('\n'):
128 tmpfp.write('\n')
128 tmpfp.write('\n')
129 elif not diffs_seen and message and content_type == 'text/plain':
129 elif not diffs_seen and message and content_type == 'text/plain':
130 message += '\n' + payload
130 message += '\n' + payload
131 except:
131 except:
132 tmpfp.close()
132 tmpfp.close()
133 os.unlink(tmpname)
133 os.unlink(tmpname)
134 raise
134 raise
135
135
136 if subject and not message.startswith(subject):
136 if subject and not message.startswith(subject):
137 message = '%s\n%s' % (subject, message)
137 message = '%s\n%s' % (subject, message)
138 tmpfp.close()
138 tmpfp.close()
139 if not diffs_seen:
139 if not diffs_seen:
140 os.unlink(tmpname)
140 os.unlink(tmpname)
141 return None, message, user, date, branch, None, None, None
141 return None, message, user, date, branch, None, None, None
142 p1 = parents and parents.pop(0) or None
142 p1 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
144 return tmpname, message, user, date, branch, nodeid, p1, p2
144 return tmpname, message, user, date, branch, nodeid, p1, p2
145
145
146 GP_PATCH = 1 << 0 # we have to run patch
146 GP_PATCH = 1 << 0 # we have to run patch
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 GP_BINARY = 1 << 2 # there's a binary patch
148 GP_BINARY = 1 << 2 # there's a binary patch
149
149
150 class patchmeta:
150 class patchmeta:
151 """Patched file metadata
151 """Patched file metadata
152
152
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 'islink' is True if the file is a symlink and 'isexec' is True if
157 'islink' is True if the file is a symlink and 'isexec' is True if
158 the file is executable. Otherwise, 'mode' is None.
158 the file is executable. Otherwise, 'mode' is None.
159 """
159 """
160 def __init__(self, path):
160 def __init__(self, path):
161 self.path = path
161 self.path = path
162 self.oldpath = None
162 self.oldpath = None
163 self.mode = None
163 self.mode = None
164 self.op = 'MODIFY'
164 self.op = 'MODIFY'
165 self.lineno = 0
165 self.lineno = 0
166 self.binary = False
166 self.binary = False
167
167
168 def setmode(self, mode):
168 def setmode(self, mode):
169 islink = mode & 020000
169 islink = mode & 020000
170 isexec = mode & 0100
170 isexec = mode & 0100
171 self.mode = (islink, isexec)
171 self.mode = (islink, isexec)
172
172
173 def readgitpatch(lr):
173 def readgitpatch(lr):
174 """extract git-style metadata about patches from <patchname>"""
174 """extract git-style metadata about patches from <patchname>"""
175
175
176 # Filter patch for git information
176 # Filter patch for git information
177 gp = None
177 gp = None
178 gitpatches = []
178 gitpatches = []
179 # Can have a git patch with only metadata, causing patch to complain
179 # Can have a git patch with only metadata, causing patch to complain
180 dopatch = 0
180 dopatch = 0
181
181
182 lineno = 0
182 lineno = 0
183 for line in lr:
183 for line in lr:
184 lineno += 1
184 lineno += 1
185 if line.startswith('diff --git'):
185 if line.startswith('diff --git'):
186 m = gitre.match(line)
186 m = gitre.match(line)
187 if m:
187 if m:
188 if gp:
188 if gp:
189 gitpatches.append(gp)
189 gitpatches.append(gp)
190 src, dst = m.group(1, 2)
190 src, dst = m.group(1, 2)
191 gp = patchmeta(dst)
191 gp = patchmeta(dst)
192 gp.lineno = lineno
192 gp.lineno = lineno
193 elif gp:
193 elif gp:
194 if line.startswith('--- '):
194 if line.startswith('--- '):
195 if gp.op in ('COPY', 'RENAME'):
195 if gp.op in ('COPY', 'RENAME'):
196 dopatch |= GP_FILTER
196 dopatch |= GP_FILTER
197 gitpatches.append(gp)
197 gitpatches.append(gp)
198 gp = None
198 gp = None
199 dopatch |= GP_PATCH
199 dopatch |= GP_PATCH
200 continue
200 continue
201 if line.startswith('rename from '):
201 if line.startswith('rename from '):
202 gp.op = 'RENAME'
202 gp.op = 'RENAME'
203 gp.oldpath = line[12:].rstrip()
203 gp.oldpath = line[12:].rstrip()
204 elif line.startswith('rename to '):
204 elif line.startswith('rename to '):
205 gp.path = line[10:].rstrip()
205 gp.path = line[10:].rstrip()
206 elif line.startswith('copy from '):
206 elif line.startswith('copy from '):
207 gp.op = 'COPY'
207 gp.op = 'COPY'
208 gp.oldpath = line[10:].rstrip()
208 gp.oldpath = line[10:].rstrip()
209 elif line.startswith('copy to '):
209 elif line.startswith('copy to '):
210 gp.path = line[8:].rstrip()
210 gp.path = line[8:].rstrip()
211 elif line.startswith('deleted file'):
211 elif line.startswith('deleted file'):
212 gp.op = 'DELETE'
212 gp.op = 'DELETE'
213 # is the deleted file a symlink?
213 # is the deleted file a symlink?
214 gp.setmode(int(line.rstrip()[-6:], 8))
214 gp.setmode(int(line.rstrip()[-6:], 8))
215 elif line.startswith('new file mode '):
215 elif line.startswith('new file mode '):
216 gp.op = 'ADD'
216 gp.op = 'ADD'
217 gp.setmode(int(line.rstrip()[-6:], 8))
217 gp.setmode(int(line.rstrip()[-6:], 8))
218 elif line.startswith('new mode '):
218 elif line.startswith('new mode '):
219 gp.setmode(int(line.rstrip()[-6:], 8))
219 gp.setmode(int(line.rstrip()[-6:], 8))
220 elif line.startswith('GIT binary patch'):
220 elif line.startswith('GIT binary patch'):
221 dopatch |= GP_BINARY
221 dopatch |= GP_BINARY
222 gp.binary = True
222 gp.binary = True
223 if gp:
223 if gp:
224 gitpatches.append(gp)
224 gitpatches.append(gp)
225
225
226 if not gitpatches:
226 if not gitpatches:
227 dopatch = GP_PATCH
227 dopatch = GP_PATCH
228
228
229 return (dopatch, gitpatches)
229 return (dopatch, gitpatches)
230
230
231 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
231 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
232 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
232 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
233 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
233 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
234
234
235 class patchfile:
235 class patchfile:
236 def __init__(self, ui, fname, opener, missing=False):
236 def __init__(self, ui, fname, opener, missing=False):
237 self.fname = fname
237 self.fname = fname
238 self.opener = opener
238 self.opener = opener
239 self.ui = ui
239 self.ui = ui
240 self.lines = []
240 self.lines = []
241 self.exists = False
241 self.exists = False
242 self.missing = missing
242 self.missing = missing
243 if not missing:
243 if not missing:
244 try:
244 try:
245 self.lines = self.readlines(fname)
245 self.lines = self.readlines(fname)
246 self.exists = True
246 self.exists = True
247 except IOError:
247 except IOError:
248 pass
248 pass
249 else:
249 else:
250 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
250 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
251
251
252 self.hash = {}
252 self.hash = {}
253 self.dirty = 0
253 self.dirty = 0
254 self.offset = 0
254 self.offset = 0
255 self.rej = []
255 self.rej = []
256 self.fileprinted = False
256 self.fileprinted = False
257 self.printfile(False)
257 self.printfile(False)
258 self.hunks = 0
258 self.hunks = 0
259
259
260 def readlines(self, fname):
260 def readlines(self, fname):
261 fp = self.opener(fname, 'r')
261 fp = self.opener(fname, 'r')
262 try:
262 try:
263 return fp.readlines()
263 return fp.readlines()
264 finally:
264 finally:
265 fp.close()
265 fp.close()
266
266
267 def writelines(self, fname, lines):
267 def writelines(self, fname, lines):
268 fp = self.opener(fname, 'w')
268 fp = self.opener(fname, 'w')
269 try:
269 try:
270 fp.writelines(lines)
270 fp.writelines(lines)
271 finally:
271 finally:
272 fp.close()
272 fp.close()
273
273
274 def unlink(self, fname):
274 def unlink(self, fname):
275 os.unlink(fname)
275 os.unlink(fname)
276
276
277 def printfile(self, warn):
277 def printfile(self, warn):
278 if self.fileprinted:
278 if self.fileprinted:
279 return
279 return
280 if warn or self.ui.verbose:
280 if warn or self.ui.verbose:
281 self.fileprinted = True
281 self.fileprinted = True
282 s = _("patching file %s\n") % self.fname
282 s = _("patching file %s\n") % self.fname
283 if warn:
283 if warn:
284 self.ui.warn(s)
284 self.ui.warn(s)
285 else:
285 else:
286 self.ui.note(s)
286 self.ui.note(s)
287
287
288
288
289 def findlines(self, l, linenum):
289 def findlines(self, l, linenum):
290 # looks through the hash and finds candidate lines. The
290 # looks through the hash and finds candidate lines. The
291 # result is a list of line numbers sorted based on distance
291 # result is a list of line numbers sorted based on distance
292 # from linenum
292 # from linenum
293 def sorter(a, b):
293 def sorter(a, b):
294 vala = abs(a - linenum)
294 vala = abs(a - linenum)
295 valb = abs(b - linenum)
295 valb = abs(b - linenum)
296 return cmp(vala, valb)
296 return cmp(vala, valb)
297
297
298 try:
298 try:
299 cand = self.hash[l]
299 cand = self.hash[l]
300 except:
300 except:
301 return []
301 return []
302
302
303 if len(cand) > 1:
303 if len(cand) > 1:
304 # resort our list of potentials forward then back.
304 # resort our list of potentials forward then back.
305 cand.sort(sorter)
305 cand.sort(sorter)
306 return cand
306 return cand
307
307
308 def hashlines(self):
308 def hashlines(self):
309 self.hash = {}
309 self.hash = {}
310 for x in xrange(len(self.lines)):
310 for x in xrange(len(self.lines)):
311 s = self.lines[x]
311 s = self.lines[x]
312 self.hash.setdefault(s, []).append(x)
312 self.hash.setdefault(s, []).append(x)
313
313
314 def write_rej(self):
314 def write_rej(self):
315 # our rejects are a little different from patch(1). This always
315 # our rejects are a little different from patch(1). This always
316 # creates rejects in the same form as the original patch. A file
316 # creates rejects in the same form as the original patch. A file
317 # header is inserted so that you can run the reject through patch again
317 # header is inserted so that you can run the reject through patch again
318 # without having to type the filename.
318 # without having to type the filename.
319
319
320 if not self.rej:
320 if not self.rej:
321 return
321 return
322
322
323 fname = self.fname + ".rej"
323 fname = self.fname + ".rej"
324 self.ui.warn(
324 self.ui.warn(
325 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
325 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
326 (len(self.rej), self.hunks, fname))
326 (len(self.rej), self.hunks, fname))
327
327
328 def rejlines():
328 def rejlines():
329 base = os.path.basename(self.fname)
329 base = os.path.basename(self.fname)
330 yield "--- %s\n+++ %s\n" % (base, base)
330 yield "--- %s\n+++ %s\n" % (base, base)
331 for x in self.rej:
331 for x in self.rej:
332 for l in x.hunk:
332 for l in x.hunk:
333 yield l
333 yield l
334 if l[-1] != '\n':
334 if l[-1] != '\n':
335 yield "\n\ No newline at end of file\n"
335 yield "\n\ No newline at end of file\n"
336
336
337 self.writelines(fname, rejlines())
337 self.writelines(fname, rejlines())
338
338
339 def write(self, dest=None):
339 def write(self, dest=None):
340 if not self.dirty:
340 if not self.dirty:
341 return
341 return
342 if not dest:
342 if not dest:
343 dest = self.fname
343 dest = self.fname
344 self.writelines(dest, self.lines)
344 self.writelines(dest, self.lines)
345
345
346 def close(self):
346 def close(self):
347 self.write()
347 self.write()
348 self.write_rej()
348 self.write_rej()
349
349
350 def apply(self, h, reverse):
350 def apply(self, h, reverse):
351 if not h.complete():
351 if not h.complete():
352 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
352 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
353 (h.number, h.desc, len(h.a), h.lena, len(h.b),
353 (h.number, h.desc, len(h.a), h.lena, len(h.b),
354 h.lenb))
354 h.lenb))
355
355
356 self.hunks += 1
356 self.hunks += 1
357 if reverse:
357 if reverse:
358 h.reverse()
358 h.reverse()
359
359
360 if self.missing:
360 if self.missing:
361 self.rej.append(h)
361 self.rej.append(h)
362 return -1
362 return -1
363
363
364 if self.exists and h.createfile():
364 if self.exists and h.createfile():
365 self.ui.warn(_("file %s already exists\n") % self.fname)
365 self.ui.warn(_("file %s already exists\n") % self.fname)
366 self.rej.append(h)
366 self.rej.append(h)
367 return -1
367 return -1
368
368
369 if isinstance(h, githunk):
369 if isinstance(h, githunk):
370 if h.rmfile():
370 if h.rmfile():
371 self.unlink(self.fname)
371 self.unlink(self.fname)
372 else:
372 else:
373 self.lines[:] = h.new()
373 self.lines[:] = h.new()
374 self.offset += len(h.new())
374 self.offset += len(h.new())
375 self.dirty = 1
375 self.dirty = 1
376 return 0
376 return 0
377
377
378 # fast case first, no offsets, no fuzz
378 # fast case first, no offsets, no fuzz
379 old = h.old()
379 old = h.old()
380 # patch starts counting at 1 unless we are adding the file
380 # patch starts counting at 1 unless we are adding the file
381 if h.starta == 0:
381 if h.starta == 0:
382 start = 0
382 start = 0
383 else:
383 else:
384 start = h.starta + self.offset - 1
384 start = h.starta + self.offset - 1
385 orig_start = start
385 orig_start = start
386 if diffhelpers.testhunk(old, self.lines, start) == 0:
386 if diffhelpers.testhunk(old, self.lines, start) == 0:
387 if h.rmfile():
387 if h.rmfile():
388 self.unlink(self.fname)
388 self.unlink(self.fname)
389 else:
389 else:
390 self.lines[start : start + h.lena] = h.new()
390 self.lines[start : start + h.lena] = h.new()
391 self.offset += h.lenb - h.lena
391 self.offset += h.lenb - h.lena
392 self.dirty = 1
392 self.dirty = 1
393 return 0
393 return 0
394
394
395 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
395 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
396 self.hashlines()
396 self.hashlines()
397 if h.hunk[-1][0] != ' ':
397 if h.hunk[-1][0] != ' ':
398 # if the hunk tried to put something at the bottom of the file
398 # if the hunk tried to put something at the bottom of the file
399 # override the start line and use eof here
399 # override the start line and use eof here
400 search_start = len(self.lines)
400 search_start = len(self.lines)
401 else:
401 else:
402 search_start = orig_start
402 search_start = orig_start
403
403
404 for fuzzlen in xrange(3):
404 for fuzzlen in xrange(3):
405 for toponly in [ True, False ]:
405 for toponly in [ True, False ]:
406 old = h.old(fuzzlen, toponly)
406 old = h.old(fuzzlen, toponly)
407
407
408 cand = self.findlines(old[0][1:], search_start)
408 cand = self.findlines(old[0][1:], search_start)
409 for l in cand:
409 for l in cand:
410 if diffhelpers.testhunk(old, self.lines, l) == 0:
410 if diffhelpers.testhunk(old, self.lines, l) == 0:
411 newlines = h.new(fuzzlen, toponly)
411 newlines = h.new(fuzzlen, toponly)
412 self.lines[l : l + len(old)] = newlines
412 self.lines[l : l + len(old)] = newlines
413 self.offset += len(newlines) - len(old)
413 self.offset += len(newlines) - len(old)
414 self.dirty = 1
414 self.dirty = 1
415 if fuzzlen:
415 if fuzzlen:
416 fuzzstr = "with fuzz %d " % fuzzlen
416 fuzzstr = "with fuzz %d " % fuzzlen
417 f = self.ui.warn
417 f = self.ui.warn
418 self.printfile(True)
418 self.printfile(True)
419 else:
419 else:
420 fuzzstr = ""
420 fuzzstr = ""
421 f = self.ui.note
421 f = self.ui.note
422 offset = l - orig_start - fuzzlen
422 offset = l - orig_start - fuzzlen
423 if offset == 1:
423 if offset == 1:
424 linestr = "line"
424 linestr = "line"
425 else:
425 else:
426 linestr = "lines"
426 linestr = "lines"
427 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
427 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
428 (h.number, l+1, fuzzstr, offset, linestr))
428 (h.number, l+1, fuzzstr, offset, linestr))
429 return fuzzlen
429 return fuzzlen
430 self.printfile(True)
430 self.printfile(True)
431 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
431 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
432 self.rej.append(h)
432 self.rej.append(h)
433 return -1
433 return -1
434
434
435 class hunk:
435 class hunk:
436 def __init__(self, desc, num, lr, context, create=False, remove=False):
436 def __init__(self, desc, num, lr, context, create=False, remove=False):
437 self.number = num
437 self.number = num
438 self.desc = desc
438 self.desc = desc
439 self.hunk = [ desc ]
439 self.hunk = [ desc ]
440 self.a = []
440 self.a = []
441 self.b = []
441 self.b = []
442 if context:
442 if context:
443 self.read_context_hunk(lr)
443 self.read_context_hunk(lr)
444 else:
444 else:
445 self.read_unified_hunk(lr)
445 self.read_unified_hunk(lr)
446 self.create = create
446 self.create = create
447 self.remove = remove and not create
447 self.remove = remove and not create
448
448
449 def read_unified_hunk(self, lr):
449 def read_unified_hunk(self, lr):
450 m = unidesc.match(self.desc)
450 m = unidesc.match(self.desc)
451 if not m:
451 if not m:
452 raise PatchError(_("bad hunk #%d") % self.number)
452 raise PatchError(_("bad hunk #%d") % self.number)
453 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
453 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
454 if self.lena == None:
454 if self.lena == None:
455 self.lena = 1
455 self.lena = 1
456 else:
456 else:
457 self.lena = int(self.lena)
457 self.lena = int(self.lena)
458 if self.lenb == None:
458 if self.lenb == None:
459 self.lenb = 1
459 self.lenb = 1
460 else:
460 else:
461 self.lenb = int(self.lenb)
461 self.lenb = int(self.lenb)
462 self.starta = int(self.starta)
462 self.starta = int(self.starta)
463 self.startb = int(self.startb)
463 self.startb = int(self.startb)
464 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
464 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
465 # if we hit eof before finishing out the hunk, the last line will
465 # if we hit eof before finishing out the hunk, the last line will
466 # be zero length. Lets try to fix it up.
466 # be zero length. Lets try to fix it up.
467 while len(self.hunk[-1]) == 0:
467 while len(self.hunk[-1]) == 0:
468 del self.hunk[-1]
468 del self.hunk[-1]
469 del self.a[-1]
469 del self.a[-1]
470 del self.b[-1]
470 del self.b[-1]
471 self.lena -= 1
471 self.lena -= 1
472 self.lenb -= 1
472 self.lenb -= 1
473
473
474 def read_context_hunk(self, lr):
474 def read_context_hunk(self, lr):
475 self.desc = lr.readline()
475 self.desc = lr.readline()
476 m = contextdesc.match(self.desc)
476 m = contextdesc.match(self.desc)
477 if not m:
477 if not m:
478 raise PatchError(_("bad hunk #%d") % self.number)
478 raise PatchError(_("bad hunk #%d") % self.number)
479 foo, self.starta, foo2, aend, foo3 = m.groups()
479 foo, self.starta, foo2, aend, foo3 = m.groups()
480 self.starta = int(self.starta)
480 self.starta = int(self.starta)
481 if aend == None:
481 if aend == None:
482 aend = self.starta
482 aend = self.starta
483 self.lena = int(aend) - self.starta
483 self.lena = int(aend) - self.starta
484 if self.starta:
484 if self.starta:
485 self.lena += 1
485 self.lena += 1
486 for x in xrange(self.lena):
486 for x in xrange(self.lena):
487 l = lr.readline()
487 l = lr.readline()
488 if l.startswith('---'):
488 if l.startswith('---'):
489 lr.push(l)
489 lr.push(l)
490 break
490 break
491 s = l[2:]
491 s = l[2:]
492 if l.startswith('- ') or l.startswith('! '):
492 if l.startswith('- ') or l.startswith('! '):
493 u = '-' + s
493 u = '-' + s
494 elif l.startswith(' '):
494 elif l.startswith(' '):
495 u = ' ' + s
495 u = ' ' + s
496 else:
496 else:
497 raise PatchError(_("bad hunk #%d old text line %d") %
497 raise PatchError(_("bad hunk #%d old text line %d") %
498 (self.number, x))
498 (self.number, x))
499 self.a.append(u)
499 self.a.append(u)
500 self.hunk.append(u)
500 self.hunk.append(u)
501
501
502 l = lr.readline()
502 l = lr.readline()
503 if l.startswith('\ '):
503 if l.startswith('\ '):
504 s = self.a[-1][:-1]
504 s = self.a[-1][:-1]
505 self.a[-1] = s
505 self.a[-1] = s
506 self.hunk[-1] = s
506 self.hunk[-1] = s
507 l = lr.readline()
507 l = lr.readline()
508 m = contextdesc.match(l)
508 m = contextdesc.match(l)
509 if not m:
509 if not m:
510 raise PatchError(_("bad hunk #%d") % self.number)
510 raise PatchError(_("bad hunk #%d") % self.number)
511 foo, self.startb, foo2, bend, foo3 = m.groups()
511 foo, self.startb, foo2, bend, foo3 = m.groups()
512 self.startb = int(self.startb)
512 self.startb = int(self.startb)
513 if bend == None:
513 if bend == None:
514 bend = self.startb
514 bend = self.startb
515 self.lenb = int(bend) - self.startb
515 self.lenb = int(bend) - self.startb
516 if self.startb:
516 if self.startb:
517 self.lenb += 1
517 self.lenb += 1
518 hunki = 1
518 hunki = 1
519 for x in xrange(self.lenb):
519 for x in xrange(self.lenb):
520 l = lr.readline()
520 l = lr.readline()
521 if l.startswith('\ '):
521 if l.startswith('\ '):
522 s = self.b[-1][:-1]
522 s = self.b[-1][:-1]
523 self.b[-1] = s
523 self.b[-1] = s
524 self.hunk[hunki-1] = s
524 self.hunk[hunki-1] = s
525 continue
525 continue
526 if not l:
526 if not l:
527 lr.push(l)
527 lr.push(l)
528 break
528 break
529 s = l[2:]
529 s = l[2:]
530 if l.startswith('+ ') or l.startswith('! '):
530 if l.startswith('+ ') or l.startswith('! '):
531 u = '+' + s
531 u = '+' + s
532 elif l.startswith(' '):
532 elif l.startswith(' '):
533 u = ' ' + s
533 u = ' ' + s
534 elif len(self.b) == 0:
534 elif len(self.b) == 0:
535 # this can happen when the hunk does not add any lines
535 # this can happen when the hunk does not add any lines
536 lr.push(l)
536 lr.push(l)
537 break
537 break
538 else:
538 else:
539 raise PatchError(_("bad hunk #%d old text line %d") %
539 raise PatchError(_("bad hunk #%d old text line %d") %
540 (self.number, x))
540 (self.number, x))
541 self.b.append(s)
541 self.b.append(s)
542 while True:
542 while True:
543 if hunki >= len(self.hunk):
543 if hunki >= len(self.hunk):
544 h = ""
544 h = ""
545 else:
545 else:
546 h = self.hunk[hunki]
546 h = self.hunk[hunki]
547 hunki += 1
547 hunki += 1
548 if h == u:
548 if h == u:
549 break
549 break
550 elif h.startswith('-'):
550 elif h.startswith('-'):
551 continue
551 continue
552 else:
552 else:
553 self.hunk.insert(hunki-1, u)
553 self.hunk.insert(hunki-1, u)
554 break
554 break
555
555
556 if not self.a:
556 if not self.a:
557 # this happens when lines were only added to the hunk
557 # this happens when lines were only added to the hunk
558 for x in self.hunk:
558 for x in self.hunk:
559 if x.startswith('-') or x.startswith(' '):
559 if x.startswith('-') or x.startswith(' '):
560 self.a.append(x)
560 self.a.append(x)
561 if not self.b:
561 if not self.b:
562 # this happens when lines were only deleted from the hunk
562 # this happens when lines were only deleted from the hunk
563 for x in self.hunk:
563 for x in self.hunk:
564 if x.startswith('+') or x.startswith(' '):
564 if x.startswith('+') or x.startswith(' '):
565 self.b.append(x[1:])
565 self.b.append(x[1:])
566 # @@ -start,len +start,len @@
566 # @@ -start,len +start,len @@
567 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
567 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
568 self.startb, self.lenb)
568 self.startb, self.lenb)
569 self.hunk[0] = self.desc
569 self.hunk[0] = self.desc
570
570
571 def reverse(self):
571 def reverse(self):
572 self.create, self.remove = self.remove, self.create
572 self.create, self.remove = self.remove, self.create
573 origlena = self.lena
573 origlena = self.lena
574 origstarta = self.starta
574 origstarta = self.starta
575 self.lena = self.lenb
575 self.lena = self.lenb
576 self.starta = self.startb
576 self.starta = self.startb
577 self.lenb = origlena
577 self.lenb = origlena
578 self.startb = origstarta
578 self.startb = origstarta
579 self.a = []
579 self.a = []
580 self.b = []
580 self.b = []
581 # self.hunk[0] is the @@ description
581 # self.hunk[0] is the @@ description
582 for x in xrange(1, len(self.hunk)):
582 for x in xrange(1, len(self.hunk)):
583 o = self.hunk[x]
583 o = self.hunk[x]
584 if o.startswith('-'):
584 if o.startswith('-'):
585 n = '+' + o[1:]
585 n = '+' + o[1:]
586 self.b.append(o[1:])
586 self.b.append(o[1:])
587 elif o.startswith('+'):
587 elif o.startswith('+'):
588 n = '-' + o[1:]
588 n = '-' + o[1:]
589 self.a.append(n)
589 self.a.append(n)
590 else:
590 else:
591 n = o
591 n = o
592 self.b.append(o[1:])
592 self.b.append(o[1:])
593 self.a.append(o)
593 self.a.append(o)
594 self.hunk[x] = o
594 self.hunk[x] = o
595
595
596 def fix_newline(self):
596 def fix_newline(self):
597 diffhelpers.fix_newline(self.hunk, self.a, self.b)
597 diffhelpers.fix_newline(self.hunk, self.a, self.b)
598
598
599 def complete(self):
599 def complete(self):
600 return len(self.a) == self.lena and len(self.b) == self.lenb
600 return len(self.a) == self.lena and len(self.b) == self.lenb
601
601
602 def createfile(self):
602 def createfile(self):
603 return self.starta == 0 and self.lena == 0 and self.create
603 return self.starta == 0 and self.lena == 0 and self.create
604
604
605 def rmfile(self):
605 def rmfile(self):
606 return self.startb == 0 and self.lenb == 0 and self.remove
606 return self.startb == 0 and self.lenb == 0 and self.remove
607
607
608 def fuzzit(self, l, fuzz, toponly):
608 def fuzzit(self, l, fuzz, toponly):
609 # this removes context lines from the top and bottom of list 'l'. It
609 # this removes context lines from the top and bottom of list 'l'. It
610 # checks the hunk to make sure only context lines are removed, and then
610 # checks the hunk to make sure only context lines are removed, and then
611 # returns a new shortened list of lines.
611 # returns a new shortened list of lines.
612 fuzz = min(fuzz, len(l)-1)
612 fuzz = min(fuzz, len(l)-1)
613 if fuzz:
613 if fuzz:
614 top = 0
614 top = 0
615 bot = 0
615 bot = 0
616 hlen = len(self.hunk)
616 hlen = len(self.hunk)
617 for x in xrange(hlen-1):
617 for x in xrange(hlen-1):
618 # the hunk starts with the @@ line, so use x+1
618 # the hunk starts with the @@ line, so use x+1
619 if self.hunk[x+1][0] == ' ':
619 if self.hunk[x+1][0] == ' ':
620 top += 1
620 top += 1
621 else:
621 else:
622 break
622 break
623 if not toponly:
623 if not toponly:
624 for x in xrange(hlen-1):
624 for x in xrange(hlen-1):
625 if self.hunk[hlen-bot-1][0] == ' ':
625 if self.hunk[hlen-bot-1][0] == ' ':
626 bot += 1
626 bot += 1
627 else:
627 else:
628 break
628 break
629
629
630 # top and bot now count context in the hunk
630 # top and bot now count context in the hunk
631 # adjust them if either one is short
631 # adjust them if either one is short
632 context = max(top, bot, 3)
632 context = max(top, bot, 3)
633 if bot < context:
633 if bot < context:
634 bot = max(0, fuzz - (context - bot))
634 bot = max(0, fuzz - (context - bot))
635 else:
635 else:
636 bot = min(fuzz, bot)
636 bot = min(fuzz, bot)
637 if top < context:
637 if top < context:
638 top = max(0, fuzz - (context - top))
638 top = max(0, fuzz - (context - top))
639 else:
639 else:
640 top = min(fuzz, top)
640 top = min(fuzz, top)
641
641
642 return l[top:len(l)-bot]
642 return l[top:len(l)-bot]
643 return l
643 return l
644
644
645 def old(self, fuzz=0, toponly=False):
645 def old(self, fuzz=0, toponly=False):
646 return self.fuzzit(self.a, fuzz, toponly)
646 return self.fuzzit(self.a, fuzz, toponly)
647
647
648 def newctrl(self):
648 def newctrl(self):
649 res = []
649 res = []
650 for x in self.hunk:
650 for x in self.hunk:
651 c = x[0]
651 c = x[0]
652 if c == ' ' or c == '+':
652 if c == ' ' or c == '+':
653 res.append(x)
653 res.append(x)
654 return res
654 return res
655
655
656 def new(self, fuzz=0, toponly=False):
656 def new(self, fuzz=0, toponly=False):
657 return self.fuzzit(self.b, fuzz, toponly)
657 return self.fuzzit(self.b, fuzz, toponly)
658
658
659 class githunk(object):
659 class githunk(object):
660 """A git hunk"""
660 """A git hunk"""
661 def __init__(self, gitpatch):
661 def __init__(self, gitpatch):
662 self.gitpatch = gitpatch
662 self.gitpatch = gitpatch
663 self.text = None
663 self.text = None
664 self.hunk = []
664 self.hunk = []
665
665
666 def createfile(self):
666 def createfile(self):
667 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
667 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
668
668
669 def rmfile(self):
669 def rmfile(self):
670 return self.gitpatch.op == 'DELETE'
670 return self.gitpatch.op == 'DELETE'
671
671
672 def complete(self):
672 def complete(self):
673 return self.text is not None
673 return self.text is not None
674
674
675 def new(self):
675 def new(self):
676 return [self.text]
676 return [self.text]
677
677
678 class binhunk(githunk):
678 class binhunk(githunk):
679 'A binary patch file. Only understands literals so far.'
679 'A binary patch file. Only understands literals so far.'
680 def __init__(self, gitpatch):
680 def __init__(self, gitpatch):
681 super(binhunk, self).__init__(gitpatch)
681 super(binhunk, self).__init__(gitpatch)
682 self.hunk = ['GIT binary patch\n']
682 self.hunk = ['GIT binary patch\n']
683
683
684 def extract(self, lr):
684 def extract(self, lr):
685 line = lr.readline()
685 line = lr.readline()
686 self.hunk.append(line)
686 self.hunk.append(line)
687 while line and not line.startswith('literal '):
687 while line and not line.startswith('literal '):
688 line = lr.readline()
688 line = lr.readline()
689 self.hunk.append(line)
689 self.hunk.append(line)
690 if not line:
690 if not line:
691 raise PatchError(_('could not extract binary patch'))
691 raise PatchError(_('could not extract binary patch'))
692 size = int(line[8:].rstrip())
692 size = int(line[8:].rstrip())
693 dec = []
693 dec = []
694 line = lr.readline()
694 line = lr.readline()
695 self.hunk.append(line)
695 self.hunk.append(line)
696 while len(line) > 1:
696 while len(line) > 1:
697 l = line[0]
697 l = line[0]
698 if l <= 'Z' and l >= 'A':
698 if l <= 'Z' and l >= 'A':
699 l = ord(l) - ord('A') + 1
699 l = ord(l) - ord('A') + 1
700 else:
700 else:
701 l = ord(l) - ord('a') + 27
701 l = ord(l) - ord('a') + 27
702 dec.append(base85.b85decode(line[1:-1])[:l])
702 dec.append(base85.b85decode(line[1:-1])[:l])
703 line = lr.readline()
703 line = lr.readline()
704 self.hunk.append(line)
704 self.hunk.append(line)
705 text = zlib.decompress(''.join(dec))
705 text = zlib.decompress(''.join(dec))
706 if len(text) != size:
706 if len(text) != size:
707 raise PatchError(_('binary patch is %d bytes, not %d') %
707 raise PatchError(_('binary patch is %d bytes, not %d') %
708 len(text), size)
708 len(text), size)
709 self.text = text
709 self.text = text
710
710
711 class symlinkhunk(githunk):
711 class symlinkhunk(githunk):
712 """A git symlink hunk"""
712 """A git symlink hunk"""
713 def __init__(self, gitpatch, hunk):
713 def __init__(self, gitpatch, hunk):
714 super(symlinkhunk, self).__init__(gitpatch)
714 super(symlinkhunk, self).__init__(gitpatch)
715 self.hunk = hunk
715 self.hunk = hunk
716
716
717 def complete(self):
717 def complete(self):
718 return True
718 return True
719
719
720 def fix_newline(self):
720 def fix_newline(self):
721 return
721 return
722
722
723 def parsefilename(str):
723 def parsefilename(str):
724 # --- filename \t|space stuff
724 # --- filename \t|space stuff
725 s = str[4:].rstrip('\r\n')
725 s = str[4:].rstrip('\r\n')
726 i = s.find('\t')
726 i = s.find('\t')
727 if i < 0:
727 if i < 0:
728 i = s.find(' ')
728 i = s.find(' ')
729 if i < 0:
729 if i < 0:
730 return s
730 return s
731 return s[:i]
731 return s[:i]
732
732
733 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
733 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
734 def pathstrip(path, count=1):
734 def pathstrip(path, count=1):
735 pathlen = len(path)
735 pathlen = len(path)
736 i = 0
736 i = 0
737 if count == 0:
737 if count == 0:
738 return '', path.rstrip()
738 return '', path.rstrip()
739 while count > 0:
739 while count > 0:
740 i = path.find('/', i)
740 i = path.find('/', i)
741 if i == -1:
741 if i == -1:
742 raise PatchError(_("unable to strip away %d dirs from %s") %
742 raise PatchError(_("unable to strip away %d dirs from %s") %
743 (count, path))
743 (count, path))
744 i += 1
744 i += 1
745 # consume '//' in the path
745 # consume '//' in the path
746 while i < pathlen - 1 and path[i] == '/':
746 while i < pathlen - 1 and path[i] == '/':
747 i += 1
747 i += 1
748 count -= 1
748 count -= 1
749 return path[:i].lstrip(), path[i:].rstrip()
749 return path[:i].lstrip(), path[i:].rstrip()
750
750
751 nulla = afile_orig == "/dev/null"
751 nulla = afile_orig == "/dev/null"
752 nullb = bfile_orig == "/dev/null"
752 nullb = bfile_orig == "/dev/null"
753 abase, afile = pathstrip(afile_orig, strip)
753 abase, afile = pathstrip(afile_orig, strip)
754 gooda = not nulla and os.path.exists(afile)
754 gooda = not nulla and os.path.exists(afile)
755 bbase, bfile = pathstrip(bfile_orig, strip)
755 bbase, bfile = pathstrip(bfile_orig, strip)
756 if afile == bfile:
756 if afile == bfile:
757 goodb = gooda
757 goodb = gooda
758 else:
758 else:
759 goodb = not nullb and os.path.exists(bfile)
759 goodb = not nullb and os.path.exists(bfile)
760 createfunc = hunk.createfile
760 createfunc = hunk.createfile
761 if reverse:
761 if reverse:
762 createfunc = hunk.rmfile
762 createfunc = hunk.rmfile
763 missing = not goodb and not gooda and not createfunc()
763 missing = not goodb and not gooda and not createfunc()
764 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
764 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
765 # diff is between a file and its backup. In this case, the original
765 # diff is between a file and its backup. In this case, the original
766 # file should be patched (see original mpatch code).
766 # file should be patched (see original mpatch code).
767 isbackup = (abase == bbase and bfile.startswith(afile))
767 isbackup = (abase == bbase and bfile.startswith(afile))
768 fname = None
768 fname = None
769 if not missing:
769 if not missing:
770 if gooda and goodb:
770 if gooda and goodb:
771 fname = isbackup and afile or bfile
771 fname = isbackup and afile or bfile
772 elif gooda:
772 elif gooda:
773 fname = afile
773 fname = afile
774
774
775 if not fname:
775 if not fname:
776 if not nullb:
776 if not nullb:
777 fname = isbackup and afile or bfile
777 fname = isbackup and afile or bfile
778 elif not nulla:
778 elif not nulla:
779 fname = afile
779 fname = afile
780 else:
780 else:
781 raise PatchError(_("undefined source and destination files"))
781 raise PatchError(_("undefined source and destination files"))
782
782
783 return fname, missing
783 return fname, missing
784
784
785 class linereader:
785 class linereader:
786 # simple class to allow pushing lines back into the input stream
786 # simple class to allow pushing lines back into the input stream
787 def __init__(self, fp):
787 def __init__(self, fp):
788 self.fp = fp
788 self.fp = fp
789 self.buf = []
789 self.buf = []
790
790
791 def push(self, line):
791 def push(self, line):
792 if line is not None:
792 if line is not None:
793 self.buf.append(line)
793 self.buf.append(line)
794
794
795 def readline(self):
795 def readline(self):
796 if self.buf:
796 if self.buf:
797 return self.buf.pop(0)
797 return self.buf.pop(0)
798 return self.fp.readline()
798 return self.fp.readline()
799
799
800 def __iter__(self):
800 def __iter__(self):
801 while 1:
801 while 1:
802 l = self.readline()
802 l = self.readline()
803 if not l:
803 if not l:
804 break
804 break
805 yield l
805 yield l
806
806
807 def scangitpatch(lr, firstline):
807 def scangitpatch(lr, firstline):
808 """
808 """
809 Git patches can emit:
809 Git patches can emit:
810 - rename a to b
810 - rename a to b
811 - change b
811 - change b
812 - copy a to c
812 - copy a to c
813 - change c
813 - change c
814
814
815 We cannot apply this sequence as-is, the renamed 'a' could not be
815 We cannot apply this sequence as-is, the renamed 'a' could not be
816 found for it would have been renamed already. And we cannot copy
816 found for it would have been renamed already. And we cannot copy
817 from 'b' instead because 'b' would have been changed already. So
817 from 'b' instead because 'b' would have been changed already. So
818 we scan the git patch for copy and rename commands so we can
818 we scan the git patch for copy and rename commands so we can
819 perform the copies ahead of time.
819 perform the copies ahead of time.
820 """
820 """
821 pos = 0
821 pos = 0
822 try:
822 try:
823 pos = lr.fp.tell()
823 pos = lr.fp.tell()
824 fp = lr.fp
824 fp = lr.fp
825 except IOError:
825 except IOError:
826 fp = cStringIO.StringIO(lr.fp.read())
826 fp = cStringIO.StringIO(lr.fp.read())
827 gitlr = linereader(fp)
827 gitlr = linereader(fp)
828 gitlr.push(firstline)
828 gitlr.push(firstline)
829 (dopatch, gitpatches) = readgitpatch(gitlr)
829 (dopatch, gitpatches) = readgitpatch(gitlr)
830 fp.seek(pos)
830 fp.seek(pos)
831 return dopatch, gitpatches
831 return dopatch, gitpatches
832
832
833 def iterhunks(ui, fp, sourcefile=None):
833 def iterhunks(ui, fp, sourcefile=None):
834 """Read a patch and yield the following events:
834 """Read a patch and yield the following events:
835 - ("file", afile, bfile, firsthunk): select a new target file.
835 - ("file", afile, bfile, firsthunk): select a new target file.
836 - ("hunk", hunk): a new hunk is ready to be applied, follows a
836 - ("hunk", hunk): a new hunk is ready to be applied, follows a
837 "file" event.
837 "file" event.
838 - ("git", gitchanges): current diff is in git format, gitchanges
838 - ("git", gitchanges): current diff is in git format, gitchanges
839 maps filenames to gitpatch records. Unique event.
839 maps filenames to gitpatch records. Unique event.
840 """
840 """
841 changed = {}
841 changed = {}
842 current_hunk = None
842 current_hunk = None
843 afile = ""
843 afile = ""
844 bfile = ""
844 bfile = ""
845 state = None
845 state = None
846 hunknum = 0
846 hunknum = 0
847 emitfile = False
847 emitfile = False
848 git = False
848 git = False
849
849
850 # our states
850 # our states
851 BFILE = 1
851 BFILE = 1
852 context = None
852 context = None
853 lr = linereader(fp)
853 lr = linereader(fp)
854 dopatch = True
854 dopatch = True
855 # gitworkdone is True if a git operation (copy, rename, ...) was
855 # gitworkdone is True if a git operation (copy, rename, ...) was
856 # performed already for the current file. Useful when the file
856 # performed already for the current file. Useful when the file
857 # section may have no hunk.
857 # section may have no hunk.
858 gitworkdone = False
858 gitworkdone = False
859
859
860 while True:
860 while True:
861 newfile = False
861 newfile = False
862 x = lr.readline()
862 x = lr.readline()
863 if not x:
863 if not x:
864 break
864 break
865 if current_hunk:
865 if current_hunk:
866 if x.startswith('\ '):
866 if x.startswith('\ '):
867 current_hunk.fix_newline()
867 current_hunk.fix_newline()
868 yield 'hunk', current_hunk
868 yield 'hunk', current_hunk
869 current_hunk = None
869 current_hunk = None
870 gitworkdone = False
870 gitworkdone = False
871 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
871 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
872 ((context or context == None) and x.startswith('***************')))):
872 ((context or context == None) and x.startswith('***************')))):
873 try:
873 try:
874 if context == None and x.startswith('***************'):
874 if context == None and x.startswith('***************'):
875 context = True
875 context = True
876 gpatch = changed.get(bfile)
876 gpatch = changed.get(bfile)
877 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
877 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
878 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
878 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
879 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
879 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
880 if remove:
880 if remove:
881 gpatch = changed.get(afile[2:])
881 gpatch = changed.get(afile[2:])
882 if gpatch and gpatch.mode[0]:
882 if gpatch and gpatch.mode[0]:
883 current_hunk = symlinkhunk(gpatch, current_hunk)
883 current_hunk = symlinkhunk(gpatch, current_hunk)
884 except PatchError, err:
884 except PatchError, err:
885 ui.debug(err)
885 ui.debug(err)
886 current_hunk = None
886 current_hunk = None
887 continue
887 continue
888 hunknum += 1
888 hunknum += 1
889 if emitfile:
889 if emitfile:
890 emitfile = False
890 emitfile = False
891 yield 'file', (afile, bfile, current_hunk)
891 yield 'file', (afile, bfile, current_hunk)
892 elif state == BFILE and x.startswith('GIT binary patch'):
892 elif state == BFILE and x.startswith('GIT binary patch'):
893 current_hunk = binhunk(changed[bfile])
893 current_hunk = binhunk(changed[bfile])
894 hunknum += 1
894 hunknum += 1
895 if emitfile:
895 if emitfile:
896 emitfile = False
896 emitfile = False
897 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
897 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
898 current_hunk.extract(lr)
898 current_hunk.extract(lr)
899 elif x.startswith('diff --git'):
899 elif x.startswith('diff --git'):
900 # check for git diff, scanning the whole patch file if needed
900 # check for git diff, scanning the whole patch file if needed
901 m = gitre.match(x)
901 m = gitre.match(x)
902 if m:
902 if m:
903 afile, bfile = m.group(1, 2)
903 afile, bfile = m.group(1, 2)
904 if not git:
904 if not git:
905 git = True
905 git = True
906 dopatch, gitpatches = scangitpatch(lr, x)
906 dopatch, gitpatches = scangitpatch(lr, x)
907 yield 'git', gitpatches
907 yield 'git', gitpatches
908 for gp in gitpatches:
908 for gp in gitpatches:
909 changed[gp.path] = gp
909 changed[gp.path] = gp
910 # else error?
910 # else error?
911 # copy/rename + modify should modify target, not source
911 # copy/rename + modify should modify target, not source
912 gp = changed.get(bfile)
912 gp = changed.get(bfile)
913 if gp and gp.op in ('COPY', 'DELETE', 'RENAME'):
913 if gp and gp.op in ('COPY', 'DELETE', 'RENAME'):
914 afile = bfile
914 afile = bfile
915 gitworkdone = True
915 gitworkdone = True
916 newfile = True
916 newfile = True
917 elif x.startswith('---'):
917 elif x.startswith('---'):
918 # check for a unified diff
918 # check for a unified diff
919 l2 = lr.readline()
919 l2 = lr.readline()
920 if not l2.startswith('+++'):
920 if not l2.startswith('+++'):
921 lr.push(l2)
921 lr.push(l2)
922 continue
922 continue
923 newfile = True
923 newfile = True
924 context = False
924 context = False
925 afile = parsefilename(x)
925 afile = parsefilename(x)
926 bfile = parsefilename(l2)
926 bfile = parsefilename(l2)
927 elif x.startswith('***'):
927 elif x.startswith('***'):
928 # check for a context diff
928 # check for a context diff
929 l2 = lr.readline()
929 l2 = lr.readline()
930 if not l2.startswith('---'):
930 if not l2.startswith('---'):
931 lr.push(l2)
931 lr.push(l2)
932 continue
932 continue
933 l3 = lr.readline()
933 l3 = lr.readline()
934 lr.push(l3)
934 lr.push(l3)
935 if not l3.startswith("***************"):
935 if not l3.startswith("***************"):
936 lr.push(l2)
936 lr.push(l2)
937 continue
937 continue
938 newfile = True
938 newfile = True
939 context = True
939 context = True
940 afile = parsefilename(x)
940 afile = parsefilename(x)
941 bfile = parsefilename(l2)
941 bfile = parsefilename(l2)
942
942
943 if newfile:
943 if newfile:
944 emitfile = True
944 emitfile = True
945 state = BFILE
945 state = BFILE
946 hunknum = 0
946 hunknum = 0
947 if current_hunk:
947 if current_hunk:
948 if current_hunk.complete():
948 if current_hunk.complete():
949 yield 'hunk', current_hunk
949 yield 'hunk', current_hunk
950 else:
950 else:
951 raise PatchError(_("malformed patch %s %s") % (afile,
951 raise PatchError(_("malformed patch %s %s") % (afile,
952 current_hunk.desc))
952 current_hunk.desc))
953
953
954 if hunknum == 0 and dopatch and not gitworkdone:
954 if hunknum == 0 and dopatch and not gitworkdone:
955 raise NoHunks
955 raise NoHunks
956
956
957 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
957 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
958 """reads a patch from fp and tries to apply it. The dict 'changed' is
958 """reads a patch from fp and tries to apply it. The dict 'changed' is
959 filled in with all of the filenames changed by the patch. Returns 0
959 filled in with all of the filenames changed by the patch. Returns 0
960 for a clean patch, -1 if any rejects were found and 1 if there was
960 for a clean patch, -1 if any rejects were found and 1 if there was
961 any fuzz."""
961 any fuzz."""
962
962
963 rejects = 0
963 rejects = 0
964 err = 0
964 err = 0
965 current_file = None
965 current_file = None
966 gitpatches = None
966 gitpatches = None
967 opener = util.opener(os.getcwd())
967 opener = util.opener(os.getcwd())
968
968
969 def closefile():
969 def closefile():
970 if not current_file:
970 if not current_file:
971 return 0
971 return 0
972 current_file.close()
972 current_file.close()
973 return len(current_file.rej)
973 return len(current_file.rej)
974
974
975 for state, values in iterhunks(ui, fp, sourcefile):
975 for state, values in iterhunks(ui, fp, sourcefile):
976 if state == 'hunk':
976 if state == 'hunk':
977 if not current_file:
977 if not current_file:
978 continue
978 continue
979 current_hunk = values
979 current_hunk = values
980 ret = current_file.apply(current_hunk, reverse)
980 ret = current_file.apply(current_hunk, reverse)
981 if ret >= 0:
981 if ret >= 0:
982 changed.setdefault(current_file.fname, None)
982 changed.setdefault(current_file.fname, None)
983 if ret > 0:
983 if ret > 0:
984 err = 1
984 err = 1
985 elif state == 'file':
985 elif state == 'file':
986 rejects += closefile()
986 rejects += closefile()
987 afile, bfile, first_hunk = values
987 afile, bfile, first_hunk = values
988 try:
988 try:
989 if sourcefile:
989 if sourcefile:
990 current_file = patchfile(ui, sourcefile, opener)
990 current_file = patchfile(ui, sourcefile, opener)
991 else:
991 else:
992 current_file, missing = selectfile(afile, bfile, first_hunk,
992 current_file, missing = selectfile(afile, bfile, first_hunk,
993 strip, reverse)
993 strip, reverse)
994 current_file = patchfile(ui, current_file, opener, missing)
994 current_file = patchfile(ui, current_file, opener, missing)
995 except PatchError, err:
995 except PatchError, err:
996 ui.warn(str(err) + '\n')
996 ui.warn(str(err) + '\n')
997 current_file, current_hunk = None, None
997 current_file, current_hunk = None, None
998 rejects += 1
998 rejects += 1
999 continue
999 continue
1000 elif state == 'git':
1000 elif state == 'git':
1001 gitpatches = values
1001 gitpatches = values
1002 cwd = os.getcwd()
1002 cwd = os.getcwd()
1003 for gp in gitpatches:
1003 for gp in gitpatches:
1004 if gp.op in ('COPY', 'RENAME'):
1004 if gp.op in ('COPY', 'RENAME'):
1005 copyfile(gp.oldpath, gp.path, cwd)
1005 copyfile(gp.oldpath, gp.path, cwd)
1006 changed[gp.path] = gp
1006 changed[gp.path] = gp
1007 else:
1007 else:
1008 raise util.Abort(_('unsupported parser state: %s') % state)
1008 raise util.Abort(_('unsupported parser state: %s') % state)
1009
1009
1010 rejects += closefile()
1010 rejects += closefile()
1011
1011
1012 if rejects:
1012 if rejects:
1013 return -1
1013 return -1
1014 return err
1014 return err
1015
1015
1016 def diffopts(ui, opts={}, untrusted=False):
1016 def diffopts(ui, opts={}, untrusted=False):
1017 def get(key, name=None, getter=ui.configbool):
1017 def get(key, name=None, getter=ui.configbool):
1018 return (opts.get(key) or
1018 return (opts.get(key) or
1019 getter('diff', name or key, None, untrusted=untrusted))
1019 getter('diff', name or key, None, untrusted=untrusted))
1020 return mdiff.diffopts(
1020 return mdiff.diffopts(
1021 text=opts.get('text'),
1021 text=opts.get('text'),
1022 git=get('git'),
1022 git=get('git'),
1023 nodates=get('nodates'),
1023 nodates=get('nodates'),
1024 showfunc=get('show_function', 'showfunc'),
1024 showfunc=get('show_function', 'showfunc'),
1025 ignorews=get('ignore_all_space', 'ignorews'),
1025 ignorews=get('ignore_all_space', 'ignorews'),
1026 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1026 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1027 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1027 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1028 context=get('unified', getter=ui.config))
1028 context=get('unified', getter=ui.config))
1029
1029
1030 def updatedir(ui, repo, patches, similarity=0):
1030 def updatedir(ui, repo, patches, similarity=0):
1031 '''Update dirstate after patch application according to metadata'''
1031 '''Update dirstate after patch application according to metadata'''
1032 if not patches:
1032 if not patches:
1033 return
1033 return
1034 copies = []
1034 copies = []
1035 removes = {}
1035 removes = {}
1036 cfiles = patches.keys()
1036 cfiles = patches.keys()
1037 cwd = repo.getcwd()
1037 cwd = repo.getcwd()
1038 if cwd:
1038 if cwd:
1039 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1039 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1040 for f in patches:
1040 for f in patches:
1041 gp = patches[f]
1041 gp = patches[f]
1042 if not gp:
1042 if not gp:
1043 continue
1043 continue
1044 if gp.op == 'RENAME':
1044 if gp.op == 'RENAME':
1045 copies.append((gp.oldpath, gp.path))
1045 copies.append((gp.oldpath, gp.path))
1046 removes[gp.oldpath] = 1
1046 removes[gp.oldpath] = 1
1047 elif gp.op == 'COPY':
1047 elif gp.op == 'COPY':
1048 copies.append((gp.oldpath, gp.path))
1048 copies.append((gp.oldpath, gp.path))
1049 elif gp.op == 'DELETE':
1049 elif gp.op == 'DELETE':
1050 removes[gp.path] = 1
1050 removes[gp.path] = 1
1051 for src, dst in copies:
1051 for src, dst in copies:
1052 repo.copy(src, dst)
1052 repo.copy(src, dst)
1053 removes = removes.keys()
1053 removes = removes.keys()
1054 if (not similarity) and removes:
1054 if (not similarity) and removes:
1055 repo.remove(util.sort(removes), True)
1055 repo.remove(util.sort(removes), True)
1056 for f in patches:
1056 for f in patches:
1057 gp = patches[f]
1057 gp = patches[f]
1058 if gp and gp.mode:
1058 if gp and gp.mode:
1059 islink, isexec = gp.mode
1059 islink, isexec = gp.mode
1060 dst = repo.wjoin(gp.path)
1060 dst = repo.wjoin(gp.path)
1061 # patch won't create empty files
1061 # patch won't create empty files
1062 if gp.op == 'ADD' and not os.path.exists(dst):
1062 if gp.op == 'ADD' and not os.path.exists(dst):
1063 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1063 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1064 repo.wwrite(gp.path, '', flags)
1064 repo.wwrite(gp.path, '', flags)
1065 elif gp.op != 'DELETE':
1065 elif gp.op != 'DELETE':
1066 util.set_flags(dst, islink, isexec)
1066 util.set_flags(dst, islink, isexec)
1067 cmdutil.addremove(repo, cfiles, similarity=similarity)
1067 cmdutil.addremove(repo, cfiles, similarity=similarity)
1068 files = patches.keys()
1068 files = patches.keys()
1069 files.extend([r for r in removes if r not in files])
1069 files.extend([r for r in removes if r not in files])
1070 return util.sort(files)
1070 return util.sort(files)
1071
1071
1072 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1072 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1073 """use <patcher> to apply <patchname> to the working directory.
1073 """use <patcher> to apply <patchname> to the working directory.
1074 returns whether patch was applied with fuzz factor."""
1074 returns whether patch was applied with fuzz factor."""
1075
1075
1076 fuzz = False
1076 fuzz = False
1077 if cwd:
1077 if cwd:
1078 args.append('-d %s' % util.shellquote(cwd))
1078 args.append('-d %s' % util.shellquote(cwd))
1079 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1079 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1080 util.shellquote(patchname)))
1080 util.shellquote(patchname)))
1081
1081
1082 for line in fp:
1082 for line in fp:
1083 line = line.rstrip()
1083 line = line.rstrip()
1084 ui.note(line + '\n')
1084 ui.note(line + '\n')
1085 if line.startswith('patching file '):
1085 if line.startswith('patching file '):
1086 pf = util.parse_patch_output(line)
1086 pf = util.parse_patch_output(line)
1087 printed_file = False
1087 printed_file = False
1088 files.setdefault(pf, None)
1088 files.setdefault(pf, None)
1089 elif line.find('with fuzz') >= 0:
1089 elif line.find('with fuzz') >= 0:
1090 fuzz = True
1090 fuzz = True
1091 if not printed_file:
1091 if not printed_file:
1092 ui.warn(pf + '\n')
1092 ui.warn(pf + '\n')
1093 printed_file = True
1093 printed_file = True
1094 ui.warn(line + '\n')
1094 ui.warn(line + '\n')
1095 elif line.find('saving rejects to file') >= 0:
1095 elif line.find('saving rejects to file') >= 0:
1096 ui.warn(line + '\n')
1096 ui.warn(line + '\n')
1097 elif line.find('FAILED') >= 0:
1097 elif line.find('FAILED') >= 0:
1098 if not printed_file:
1098 if not printed_file:
1099 ui.warn(pf + '\n')
1099 ui.warn(pf + '\n')
1100 printed_file = True
1100 printed_file = True
1101 ui.warn(line + '\n')
1101 ui.warn(line + '\n')
1102 code = fp.close()
1102 code = fp.close()
1103 if code:
1103 if code:
1104 raise PatchError(_("patch command failed: %s") %
1104 raise PatchError(_("patch command failed: %s") %
1105 util.explain_exit(code)[0])
1105 util.explain_exit(code)[0])
1106 return fuzz
1106 return fuzz
1107
1107
1108 def internalpatch(patchobj, ui, strip, cwd, files={}):
1108 def internalpatch(patchobj, ui, strip, cwd, files={}):
1109 """use builtin patch to apply <patchobj> to the working directory.
1109 """use builtin patch to apply <patchobj> to the working directory.
1110 returns whether patch was applied with fuzz factor."""
1110 returns whether patch was applied with fuzz factor."""
1111 try:
1111 try:
1112 fp = file(patchobj, 'rb')
1112 fp = file(patchobj, 'rb')
1113 except TypeError:
1113 except TypeError:
1114 fp = patchobj
1114 fp = patchobj
1115 if cwd:
1115 if cwd:
1116 curdir = os.getcwd()
1116 curdir = os.getcwd()
1117 os.chdir(cwd)
1117 os.chdir(cwd)
1118 try:
1118 try:
1119 ret = applydiff(ui, fp, files, strip=strip)
1119 ret = applydiff(ui, fp, files, strip=strip)
1120 finally:
1120 finally:
1121 if cwd:
1121 if cwd:
1122 os.chdir(curdir)
1122 os.chdir(curdir)
1123 if ret < 0:
1123 if ret < 0:
1124 raise PatchError
1124 raise PatchError
1125 return ret > 0
1125 return ret > 0
1126
1126
1127 def patch(patchname, ui, strip=1, cwd=None, files={}):
1127 def patch(patchname, ui, strip=1, cwd=None, files={}):
1128 """apply <patchname> to the working directory.
1128 """apply <patchname> to the working directory.
1129 returns whether patch was applied with fuzz factor."""
1129 returns whether patch was applied with fuzz factor."""
1130 patcher = ui.config('ui', 'patch')
1130 patcher = ui.config('ui', 'patch')
1131 args = []
1131 args = []
1132 try:
1132 try:
1133 if patcher:
1133 if patcher:
1134 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1134 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1135 files)
1135 files)
1136 else:
1136 else:
1137 try:
1137 try:
1138 return internalpatch(patchname, ui, strip, cwd, files)
1138 return internalpatch(patchname, ui, strip, cwd, files)
1139 except NoHunks:
1139 except NoHunks:
1140 patcher = util.find_exe('gpatch') or util.find_exe('patch')
1140 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1141 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1141 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1142 patcher)
1142 patcher)
1143 if util.needbinarypatch():
1143 if util.needbinarypatch():
1144 args.append('--binary')
1144 args.append('--binary')
1145 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1145 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1146 files)
1146 files)
1147 except PatchError, err:
1147 except PatchError, err:
1148 s = str(err)
1148 s = str(err)
1149 if s:
1149 if s:
1150 raise util.Abort(s)
1150 raise util.Abort(s)
1151 else:
1151 else:
1152 raise util.Abort(_('patch failed to apply'))
1152 raise util.Abort(_('patch failed to apply'))
1153
1153
1154 def b85diff(to, tn):
1154 def b85diff(to, tn):
1155 '''print base85-encoded binary diff'''
1155 '''print base85-encoded binary diff'''
1156 def gitindex(text):
1156 def gitindex(text):
1157 if not text:
1157 if not text:
1158 return '0' * 40
1158 return '0' * 40
1159 l = len(text)
1159 l = len(text)
1160 s = util.sha1('blob %d\0' % l)
1160 s = util.sha1('blob %d\0' % l)
1161 s.update(text)
1161 s.update(text)
1162 return s.hexdigest()
1162 return s.hexdigest()
1163
1163
1164 def fmtline(line):
1164 def fmtline(line):
1165 l = len(line)
1165 l = len(line)
1166 if l <= 26:
1166 if l <= 26:
1167 l = chr(ord('A') + l - 1)
1167 l = chr(ord('A') + l - 1)
1168 else:
1168 else:
1169 l = chr(l - 26 + ord('a') - 1)
1169 l = chr(l - 26 + ord('a') - 1)
1170 return '%c%s\n' % (l, base85.b85encode(line, True))
1170 return '%c%s\n' % (l, base85.b85encode(line, True))
1171
1171
1172 def chunk(text, csize=52):
1172 def chunk(text, csize=52):
1173 l = len(text)
1173 l = len(text)
1174 i = 0
1174 i = 0
1175 while i < l:
1175 while i < l:
1176 yield text[i:i+csize]
1176 yield text[i:i+csize]
1177 i += csize
1177 i += csize
1178
1178
1179 tohash = gitindex(to)
1179 tohash = gitindex(to)
1180 tnhash = gitindex(tn)
1180 tnhash = gitindex(tn)
1181 if tohash == tnhash:
1181 if tohash == tnhash:
1182 return ""
1182 return ""
1183
1183
1184 # TODO: deltas
1184 # TODO: deltas
1185 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1185 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1186 (tohash, tnhash, len(tn))]
1186 (tohash, tnhash, len(tn))]
1187 for l in chunk(zlib.compress(tn)):
1187 for l in chunk(zlib.compress(tn)):
1188 ret.append(fmtline(l))
1188 ret.append(fmtline(l))
1189 ret.append('\n')
1189 ret.append('\n')
1190 return ''.join(ret)
1190 return ''.join(ret)
1191
1191
1192 def _addmodehdr(header, omode, nmode):
1192 def _addmodehdr(header, omode, nmode):
1193 if omode != nmode:
1193 if omode != nmode:
1194 header.append('old mode %s\n' % omode)
1194 header.append('old mode %s\n' % omode)
1195 header.append('new mode %s\n' % nmode)
1195 header.append('new mode %s\n' % nmode)
1196
1196
1197 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1197 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1198 '''yields diff of changes to files between two nodes, or node and
1198 '''yields diff of changes to files between two nodes, or node and
1199 working directory.
1199 working directory.
1200
1200
1201 if node1 is None, use first dirstate parent instead.
1201 if node1 is None, use first dirstate parent instead.
1202 if node2 is None, compare node1 with working directory.'''
1202 if node2 is None, compare node1 with working directory.'''
1203
1203
1204 if not match:
1204 if not match:
1205 match = cmdutil.matchall(repo)
1205 match = cmdutil.matchall(repo)
1206
1206
1207 if opts is None:
1207 if opts is None:
1208 opts = mdiff.defaultopts
1208 opts = mdiff.defaultopts
1209
1209
1210 if not node1:
1210 if not node1:
1211 node1 = repo.dirstate.parents()[0]
1211 node1 = repo.dirstate.parents()[0]
1212
1212
1213 flcache = {}
1213 flcache = {}
1214 def getfilectx(f, ctx):
1214 def getfilectx(f, ctx):
1215 flctx = ctx.filectx(f, filelog=flcache.get(f))
1215 flctx = ctx.filectx(f, filelog=flcache.get(f))
1216 if f not in flcache:
1216 if f not in flcache:
1217 flcache[f] = flctx._filelog
1217 flcache[f] = flctx._filelog
1218 return flctx
1218 return flctx
1219
1219
1220 ctx1 = repo[node1]
1220 ctx1 = repo[node1]
1221 ctx2 = repo[node2]
1221 ctx2 = repo[node2]
1222
1222
1223 if not changes:
1223 if not changes:
1224 changes = repo.status(ctx1, ctx2, match=match)
1224 changes = repo.status(ctx1, ctx2, match=match)
1225 modified, added, removed = changes[:3]
1225 modified, added, removed = changes[:3]
1226
1226
1227 if not modified and not added and not removed:
1227 if not modified and not added and not removed:
1228 return
1228 return
1229
1229
1230 date1 = util.datestr(ctx1.date())
1230 date1 = util.datestr(ctx1.date())
1231 man1 = ctx1.manifest()
1231 man1 = ctx1.manifest()
1232
1232
1233 if repo.ui.quiet:
1233 if repo.ui.quiet:
1234 r = None
1234 r = None
1235 else:
1235 else:
1236 hexfunc = repo.ui.debugflag and hex or short
1236 hexfunc = repo.ui.debugflag and hex or short
1237 r = [hexfunc(node) for node in [node1, node2] if node]
1237 r = [hexfunc(node) for node in [node1, node2] if node]
1238
1238
1239 if opts.git:
1239 if opts.git:
1240 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1240 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1241 for k, v in copy.items():
1241 for k, v in copy.items():
1242 copy[v] = k
1242 copy[v] = k
1243
1243
1244 gone = {}
1244 gone = {}
1245 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1245 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1246
1246
1247 for f in util.sort(modified + added + removed):
1247 for f in util.sort(modified + added + removed):
1248 to = None
1248 to = None
1249 tn = None
1249 tn = None
1250 dodiff = True
1250 dodiff = True
1251 header = []
1251 header = []
1252 if f in man1:
1252 if f in man1:
1253 to = getfilectx(f, ctx1).data()
1253 to = getfilectx(f, ctx1).data()
1254 if f not in removed:
1254 if f not in removed:
1255 tn = getfilectx(f, ctx2).data()
1255 tn = getfilectx(f, ctx2).data()
1256 a, b = f, f
1256 a, b = f, f
1257 if opts.git:
1257 if opts.git:
1258 if f in added:
1258 if f in added:
1259 mode = gitmode[ctx2.flags(f)]
1259 mode = gitmode[ctx2.flags(f)]
1260 if f in copy:
1260 if f in copy:
1261 a = copy[f]
1261 a = copy[f]
1262 omode = gitmode[man1.flags(a)]
1262 omode = gitmode[man1.flags(a)]
1263 _addmodehdr(header, omode, mode)
1263 _addmodehdr(header, omode, mode)
1264 if a in removed and a not in gone:
1264 if a in removed and a not in gone:
1265 op = 'rename'
1265 op = 'rename'
1266 gone[a] = 1
1266 gone[a] = 1
1267 else:
1267 else:
1268 op = 'copy'
1268 op = 'copy'
1269 header.append('%s from %s\n' % (op, a))
1269 header.append('%s from %s\n' % (op, a))
1270 header.append('%s to %s\n' % (op, f))
1270 header.append('%s to %s\n' % (op, f))
1271 to = getfilectx(a, ctx1).data()
1271 to = getfilectx(a, ctx1).data()
1272 else:
1272 else:
1273 header.append('new file mode %s\n' % mode)
1273 header.append('new file mode %s\n' % mode)
1274 if util.binary(tn):
1274 if util.binary(tn):
1275 dodiff = 'binary'
1275 dodiff = 'binary'
1276 elif f in removed:
1276 elif f in removed:
1277 # have we already reported a copy above?
1277 # have we already reported a copy above?
1278 if f in copy and copy[f] in added and copy[copy[f]] == f:
1278 if f in copy and copy[f] in added and copy[copy[f]] == f:
1279 dodiff = False
1279 dodiff = False
1280 else:
1280 else:
1281 header.append('deleted file mode %s\n' %
1281 header.append('deleted file mode %s\n' %
1282 gitmode[man1.flags(f)])
1282 gitmode[man1.flags(f)])
1283 else:
1283 else:
1284 omode = gitmode[man1.flags(f)]
1284 omode = gitmode[man1.flags(f)]
1285 nmode = gitmode[ctx2.flags(f)]
1285 nmode = gitmode[ctx2.flags(f)]
1286 _addmodehdr(header, omode, nmode)
1286 _addmodehdr(header, omode, nmode)
1287 if util.binary(to) or util.binary(tn):
1287 if util.binary(to) or util.binary(tn):
1288 dodiff = 'binary'
1288 dodiff = 'binary'
1289 r = None
1289 r = None
1290 header.insert(0, mdiff.diffline(r, a, b, opts))
1290 header.insert(0, mdiff.diffline(r, a, b, opts))
1291 if dodiff:
1291 if dodiff:
1292 if dodiff == 'binary':
1292 if dodiff == 'binary':
1293 text = b85diff(to, tn)
1293 text = b85diff(to, tn)
1294 else:
1294 else:
1295 text = mdiff.unidiff(to, date1,
1295 text = mdiff.unidiff(to, date1,
1296 # ctx2 date may be dynamic
1296 # ctx2 date may be dynamic
1297 tn, util.datestr(ctx2.date()),
1297 tn, util.datestr(ctx2.date()),
1298 a, b, r, opts=opts)
1298 a, b, r, opts=opts)
1299 if header and (text or len(header) > 1):
1299 if header and (text or len(header) > 1):
1300 yield ''.join(header)
1300 yield ''.join(header)
1301 if text:
1301 if text:
1302 yield text
1302 yield text
1303
1303
1304 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1304 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1305 opts=None):
1305 opts=None):
1306 '''export changesets as hg patches.'''
1306 '''export changesets as hg patches.'''
1307
1307
1308 total = len(revs)
1308 total = len(revs)
1309 revwidth = max([len(str(rev)) for rev in revs])
1309 revwidth = max([len(str(rev)) for rev in revs])
1310
1310
1311 def single(rev, seqno, fp):
1311 def single(rev, seqno, fp):
1312 ctx = repo[rev]
1312 ctx = repo[rev]
1313 node = ctx.node()
1313 node = ctx.node()
1314 parents = [p.node() for p in ctx.parents() if p]
1314 parents = [p.node() for p in ctx.parents() if p]
1315 branch = ctx.branch()
1315 branch = ctx.branch()
1316 if switch_parent:
1316 if switch_parent:
1317 parents.reverse()
1317 parents.reverse()
1318 prev = (parents and parents[0]) or nullid
1318 prev = (parents and parents[0]) or nullid
1319
1319
1320 if not fp:
1320 if not fp:
1321 fp = cmdutil.make_file(repo, template, node, total=total,
1321 fp = cmdutil.make_file(repo, template, node, total=total,
1322 seqno=seqno, revwidth=revwidth,
1322 seqno=seqno, revwidth=revwidth,
1323 mode='ab')
1323 mode='ab')
1324 if fp != sys.stdout and hasattr(fp, 'name'):
1324 if fp != sys.stdout and hasattr(fp, 'name'):
1325 repo.ui.note("%s\n" % fp.name)
1325 repo.ui.note("%s\n" % fp.name)
1326
1326
1327 fp.write("# HG changeset patch\n")
1327 fp.write("# HG changeset patch\n")
1328 fp.write("# User %s\n" % ctx.user())
1328 fp.write("# User %s\n" % ctx.user())
1329 fp.write("# Date %d %d\n" % ctx.date())
1329 fp.write("# Date %d %d\n" % ctx.date())
1330 if branch and (branch != 'default'):
1330 if branch and (branch != 'default'):
1331 fp.write("# Branch %s\n" % branch)
1331 fp.write("# Branch %s\n" % branch)
1332 fp.write("# Node ID %s\n" % hex(node))
1332 fp.write("# Node ID %s\n" % hex(node))
1333 fp.write("# Parent %s\n" % hex(prev))
1333 fp.write("# Parent %s\n" % hex(prev))
1334 if len(parents) > 1:
1334 if len(parents) > 1:
1335 fp.write("# Parent %s\n" % hex(parents[1]))
1335 fp.write("# Parent %s\n" % hex(parents[1]))
1336 fp.write(ctx.description().rstrip())
1336 fp.write(ctx.description().rstrip())
1337 fp.write("\n\n")
1337 fp.write("\n\n")
1338
1338
1339 for chunk in diff(repo, prev, node, opts=opts):
1339 for chunk in diff(repo, prev, node, opts=opts):
1340 fp.write(chunk)
1340 fp.write(chunk)
1341
1341
1342 for seqno, rev in enumerate(revs):
1342 for seqno, rev in enumerate(revs):
1343 single(rev, seqno+1, fp)
1343 single(rev, seqno+1, fp)
1344
1344
1345 def diffstatdata(lines):
1345 def diffstatdata(lines):
1346 filename, adds, removes = None, 0, 0
1346 filename, adds, removes = None, 0, 0
1347 for line in lines:
1347 for line in lines:
1348 if line.startswith('diff'):
1348 if line.startswith('diff'):
1349 if filename:
1349 if filename:
1350 yield (filename, adds, removes)
1350 yield (filename, adds, removes)
1351 # set numbers to 0 anyway when starting new file
1351 # set numbers to 0 anyway when starting new file
1352 adds, removes = 0, 0
1352 adds, removes = 0, 0
1353 if line.startswith('diff --git'):
1353 if line.startswith('diff --git'):
1354 filename = gitre.search(line).group(1)
1354 filename = gitre.search(line).group(1)
1355 else:
1355 else:
1356 # format: "diff -r ... -r ... file name"
1356 # format: "diff -r ... -r ... file name"
1357 filename = line.split(None, 5)[-1]
1357 filename = line.split(None, 5)[-1]
1358 elif line.startswith('+') and not line.startswith('+++'):
1358 elif line.startswith('+') and not line.startswith('+++'):
1359 adds += 1
1359 adds += 1
1360 elif line.startswith('-') and not line.startswith('---'):
1360 elif line.startswith('-') and not line.startswith('---'):
1361 removes += 1
1361 removes += 1
1362 if filename:
1362 if filename:
1363 yield (filename, adds, removes)
1363 yield (filename, adds, removes)
1364
1364
1365 def diffstat(lines):
1365 def diffstat(lines):
1366 output = []
1366 output = []
1367 stats = list(diffstatdata(lines))
1367 stats = list(diffstatdata(lines))
1368 width = util.termwidth() - 2
1368 width = util.termwidth() - 2
1369
1369
1370 maxtotal, maxname = 0, 0
1370 maxtotal, maxname = 0, 0
1371 totaladds, totalremoves = 0, 0
1371 totaladds, totalremoves = 0, 0
1372 for filename, adds, removes in stats:
1372 for filename, adds, removes in stats:
1373 totaladds += adds
1373 totaladds += adds
1374 totalremoves += removes
1374 totalremoves += removes
1375 maxname = max(maxname, len(filename))
1375 maxname = max(maxname, len(filename))
1376 maxtotal = max(maxtotal, adds+removes)
1376 maxtotal = max(maxtotal, adds+removes)
1377
1377
1378 countwidth = len(str(maxtotal))
1378 countwidth = len(str(maxtotal))
1379 graphwidth = width - countwidth - maxname
1379 graphwidth = width - countwidth - maxname
1380 if graphwidth < 10:
1380 if graphwidth < 10:
1381 graphwidth = 10
1381 graphwidth = 10
1382
1382
1383 factor = int(math.ceil(float(maxtotal) / graphwidth))
1383 factor = int(math.ceil(float(maxtotal) / graphwidth))
1384
1384
1385 for filename, adds, removes in stats:
1385 for filename, adds, removes in stats:
1386 # If diffstat runs out of room it doesn't print anything, which
1386 # If diffstat runs out of room it doesn't print anything, which
1387 # isn't very useful, so always print at least one + or - if there
1387 # isn't very useful, so always print at least one + or - if there
1388 # were at least some changes
1388 # were at least some changes
1389 pluses = '+' * max(adds/factor, int(bool(adds)))
1389 pluses = '+' * max(adds/factor, int(bool(adds)))
1390 minuses = '-' * max(removes/factor, int(bool(removes)))
1390 minuses = '-' * max(removes/factor, int(bool(removes)))
1391 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1391 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1392 adds+removes, pluses, minuses))
1392 adds+removes, pluses, minuses))
1393
1393
1394 if stats:
1394 if stats:
1395 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n' %
1395 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n' %
1396 (len(stats), totaladds, totalremoves))
1396 (len(stats), totaladds, totalremoves))
1397
1397
1398 return ''.join(output)
1398 return ''.join(output)
@@ -1,2017 +1,2020 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, re, shutil, sys, tempfile, traceback, error
16 import cStringIO, errno, getpass, re, shutil, sys, tempfile, traceback, error
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 import imp, unicodedata
18 import imp, unicodedata
19
19
20 # Python compatibility
20 # Python compatibility
21
21
22 try:
22 try:
23 set = set
23 set = set
24 frozenset = frozenset
24 frozenset = frozenset
25 except NameError:
25 except NameError:
26 from sets import Set as set, ImmutableSet as frozenset
26 from sets import Set as set, ImmutableSet as frozenset
27
27
28 _md5 = None
28 _md5 = None
29 def md5(s):
29 def md5(s):
30 global _md5
30 global _md5
31 if _md5 is None:
31 if _md5 is None:
32 try:
32 try:
33 import hashlib
33 import hashlib
34 _md5 = hashlib.md5
34 _md5 = hashlib.md5
35 except ImportError:
35 except ImportError:
36 import md5
36 import md5
37 _md5 = md5.md5
37 _md5 = md5.md5
38 return _md5(s)
38 return _md5(s)
39
39
40 _sha1 = None
40 _sha1 = None
41 def sha1(s):
41 def sha1(s):
42 global _sha1
42 global _sha1
43 if _sha1 is None:
43 if _sha1 is None:
44 try:
44 try:
45 import hashlib
45 import hashlib
46 _sha1 = hashlib.sha1
46 _sha1 = hashlib.sha1
47 except ImportError:
47 except ImportError:
48 import sha
48 import sha
49 _sha1 = sha.sha
49 _sha1 = sha.sha
50 return _sha1(s)
50 return _sha1(s)
51
51
52 try:
52 try:
53 import subprocess
53 import subprocess
54 subprocess.Popen # trigger ImportError early
54 subprocess.Popen # trigger ImportError early
55 closefds = os.name == 'posix'
55 closefds = os.name == 'posix'
56 def popen2(cmd, mode='t', bufsize=-1):
56 def popen2(cmd, mode='t', bufsize=-1):
57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 close_fds=closefds,
58 close_fds=closefds,
59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 return p.stdin, p.stdout
60 return p.stdin, p.stdout
61 def popen3(cmd, mode='t', bufsize=-1):
61 def popen3(cmd, mode='t', bufsize=-1):
62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 close_fds=closefds,
63 close_fds=closefds,
64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stderr=subprocess.PIPE)
65 stderr=subprocess.PIPE)
66 return p.stdin, p.stdout, p.stderr
66 return p.stdin, p.stdout, p.stderr
67 def Popen3(cmd, capturestderr=False, bufsize=-1):
67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 stderr = capturestderr and subprocess.PIPE or None
68 stderr = capturestderr and subprocess.PIPE or None
69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 close_fds=closefds,
70 close_fds=closefds,
71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 stderr=stderr)
72 stderr=stderr)
73 p.fromchild = p.stdout
73 p.fromchild = p.stdout
74 p.tochild = p.stdin
74 p.tochild = p.stdin
75 p.childerr = p.stderr
75 p.childerr = p.stderr
76 return p
76 return p
77 except ImportError:
77 except ImportError:
78 subprocess = None
78 subprocess = None
79 from popen2 import Popen3
79 from popen2 import Popen3
80 popen2 = os.popen2
80 popen2 = os.popen2
81 popen3 = os.popen3
81 popen3 = os.popen3
82
82
83
83
84 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
84 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
85
85
86 try:
86 try:
87 _encoding = os.environ.get("HGENCODING")
87 _encoding = os.environ.get("HGENCODING")
88 if sys.platform == 'darwin' and not _encoding:
88 if sys.platform == 'darwin' and not _encoding:
89 # On darwin, getpreferredencoding ignores the locale environment and
89 # On darwin, getpreferredencoding ignores the locale environment and
90 # always returns mac-roman. We override this if the environment is
90 # always returns mac-roman. We override this if the environment is
91 # not C (has been customized by the user).
91 # not C (has been customized by the user).
92 locale.setlocale(locale.LC_CTYPE, '')
92 locale.setlocale(locale.LC_CTYPE, '')
93 _encoding = locale.getlocale()[1]
93 _encoding = locale.getlocale()[1]
94 if not _encoding:
94 if not _encoding:
95 _encoding = locale.getpreferredencoding() or 'ascii'
95 _encoding = locale.getpreferredencoding() or 'ascii'
96 _encoding = _encodingfixup.get(_encoding, _encoding)
96 _encoding = _encodingfixup.get(_encoding, _encoding)
97 except locale.Error:
97 except locale.Error:
98 _encoding = 'ascii'
98 _encoding = 'ascii'
99 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
99 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
100 _fallbackencoding = 'ISO-8859-1'
100 _fallbackencoding = 'ISO-8859-1'
101
101
102 def tolocal(s):
102 def tolocal(s):
103 """
103 """
104 Convert a string from internal UTF-8 to local encoding
104 Convert a string from internal UTF-8 to local encoding
105
105
106 All internal strings should be UTF-8 but some repos before the
106 All internal strings should be UTF-8 but some repos before the
107 implementation of locale support may contain latin1 or possibly
107 implementation of locale support may contain latin1 or possibly
108 other character sets. We attempt to decode everything strictly
108 other character sets. We attempt to decode everything strictly
109 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
109 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
110 replace unknown characters.
110 replace unknown characters.
111 """
111 """
112 for e in ('UTF-8', _fallbackencoding):
112 for e in ('UTF-8', _fallbackencoding):
113 try:
113 try:
114 u = s.decode(e) # attempt strict decoding
114 u = s.decode(e) # attempt strict decoding
115 return u.encode(_encoding, "replace")
115 return u.encode(_encoding, "replace")
116 except LookupError, k:
116 except LookupError, k:
117 raise Abort(_("%s, please check your locale settings") % k)
117 raise Abort(_("%s, please check your locale settings") % k)
118 except UnicodeDecodeError:
118 except UnicodeDecodeError:
119 pass
119 pass
120 u = s.decode("utf-8", "replace") # last ditch
120 u = s.decode("utf-8", "replace") # last ditch
121 return u.encode(_encoding, "replace")
121 return u.encode(_encoding, "replace")
122
122
123 def fromlocal(s):
123 def fromlocal(s):
124 """
124 """
125 Convert a string from the local character encoding to UTF-8
125 Convert a string from the local character encoding to UTF-8
126
126
127 We attempt to decode strings using the encoding mode set by
127 We attempt to decode strings using the encoding mode set by
128 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
128 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
129 characters will cause an error message. Other modes include
129 characters will cause an error message. Other modes include
130 'replace', which replaces unknown characters with a special
130 'replace', which replaces unknown characters with a special
131 Unicode character, and 'ignore', which drops the character.
131 Unicode character, and 'ignore', which drops the character.
132 """
132 """
133 try:
133 try:
134 return s.decode(_encoding, _encodingmode).encode("utf-8")
134 return s.decode(_encoding, _encodingmode).encode("utf-8")
135 except UnicodeDecodeError, inst:
135 except UnicodeDecodeError, inst:
136 sub = s[max(0, inst.start-10):inst.start+10]
136 sub = s[max(0, inst.start-10):inst.start+10]
137 raise Abort("decoding near '%s': %s!" % (sub, inst))
137 raise Abort("decoding near '%s': %s!" % (sub, inst))
138 except LookupError, k:
138 except LookupError, k:
139 raise Abort(_("%s, please check your locale settings") % k)
139 raise Abort(_("%s, please check your locale settings") % k)
140
140
141 def colwidth(s):
141 def colwidth(s):
142 "Find the column width of a UTF-8 string for display"
142 "Find the column width of a UTF-8 string for display"
143 d = s.decode(_encoding, 'replace')
143 d = s.decode(_encoding, 'replace')
144 if hasattr(unicodedata, 'east_asian_width'):
144 if hasattr(unicodedata, 'east_asian_width'):
145 w = unicodedata.east_asian_width
145 w = unicodedata.east_asian_width
146 return sum([w(c) in 'WF' and 2 or 1 for c in d])
146 return sum([w(c) in 'WF' and 2 or 1 for c in d])
147 return len(d)
147 return len(d)
148
148
149 def version():
149 def version():
150 """Return version information if available."""
150 """Return version information if available."""
151 try:
151 try:
152 import __version__
152 import __version__
153 return __version__.version
153 return __version__.version
154 except ImportError:
154 except ImportError:
155 return 'unknown'
155 return 'unknown'
156
156
157 # used by parsedate
157 # used by parsedate
158 defaultdateformats = (
158 defaultdateformats = (
159 '%Y-%m-%d %H:%M:%S',
159 '%Y-%m-%d %H:%M:%S',
160 '%Y-%m-%d %I:%M:%S%p',
160 '%Y-%m-%d %I:%M:%S%p',
161 '%Y-%m-%d %H:%M',
161 '%Y-%m-%d %H:%M',
162 '%Y-%m-%d %I:%M%p',
162 '%Y-%m-%d %I:%M%p',
163 '%Y-%m-%d',
163 '%Y-%m-%d',
164 '%m-%d',
164 '%m-%d',
165 '%m/%d',
165 '%m/%d',
166 '%m/%d/%y',
166 '%m/%d/%y',
167 '%m/%d/%Y',
167 '%m/%d/%Y',
168 '%a %b %d %H:%M:%S %Y',
168 '%a %b %d %H:%M:%S %Y',
169 '%a %b %d %I:%M:%S%p %Y',
169 '%a %b %d %I:%M:%S%p %Y',
170 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
170 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
171 '%b %d %H:%M:%S %Y',
171 '%b %d %H:%M:%S %Y',
172 '%b %d %I:%M:%S%p %Y',
172 '%b %d %I:%M:%S%p %Y',
173 '%b %d %H:%M:%S',
173 '%b %d %H:%M:%S',
174 '%b %d %I:%M:%S%p',
174 '%b %d %I:%M:%S%p',
175 '%b %d %H:%M',
175 '%b %d %H:%M',
176 '%b %d %I:%M%p',
176 '%b %d %I:%M%p',
177 '%b %d %Y',
177 '%b %d %Y',
178 '%b %d',
178 '%b %d',
179 '%H:%M:%S',
179 '%H:%M:%S',
180 '%I:%M:%SP',
180 '%I:%M:%SP',
181 '%H:%M',
181 '%H:%M',
182 '%I:%M%p',
182 '%I:%M%p',
183 )
183 )
184
184
185 extendeddateformats = defaultdateformats + (
185 extendeddateformats = defaultdateformats + (
186 "%Y",
186 "%Y",
187 "%Y-%m",
187 "%Y-%m",
188 "%b",
188 "%b",
189 "%b %Y",
189 "%b %Y",
190 )
190 )
191
191
192 # differences from SafeConfigParser:
192 # differences from SafeConfigParser:
193 # - case-sensitive keys
193 # - case-sensitive keys
194 # - allows values that are not strings (this means that you may not
194 # - allows values that are not strings (this means that you may not
195 # be able to save the configuration to a file)
195 # be able to save the configuration to a file)
196 class configparser(ConfigParser.SafeConfigParser):
196 class configparser(ConfigParser.SafeConfigParser):
197 def optionxform(self, optionstr):
197 def optionxform(self, optionstr):
198 return optionstr
198 return optionstr
199
199
200 def set(self, section, option, value):
200 def set(self, section, option, value):
201 return ConfigParser.ConfigParser.set(self, section, option, value)
201 return ConfigParser.ConfigParser.set(self, section, option, value)
202
202
203 def _interpolate(self, section, option, rawval, vars):
203 def _interpolate(self, section, option, rawval, vars):
204 if not isinstance(rawval, basestring):
204 if not isinstance(rawval, basestring):
205 return rawval
205 return rawval
206 return ConfigParser.SafeConfigParser._interpolate(self, section,
206 return ConfigParser.SafeConfigParser._interpolate(self, section,
207 option, rawval, vars)
207 option, rawval, vars)
208
208
209 def cachefunc(func):
209 def cachefunc(func):
210 '''cache the result of function calls'''
210 '''cache the result of function calls'''
211 # XXX doesn't handle keywords args
211 # XXX doesn't handle keywords args
212 cache = {}
212 cache = {}
213 if func.func_code.co_argcount == 1:
213 if func.func_code.co_argcount == 1:
214 # we gain a small amount of time because
214 # we gain a small amount of time because
215 # we don't need to pack/unpack the list
215 # we don't need to pack/unpack the list
216 def f(arg):
216 def f(arg):
217 if arg not in cache:
217 if arg not in cache:
218 cache[arg] = func(arg)
218 cache[arg] = func(arg)
219 return cache[arg]
219 return cache[arg]
220 else:
220 else:
221 def f(*args):
221 def f(*args):
222 if args not in cache:
222 if args not in cache:
223 cache[args] = func(*args)
223 cache[args] = func(*args)
224 return cache[args]
224 return cache[args]
225
225
226 return f
226 return f
227
227
228 def pipefilter(s, cmd):
228 def pipefilter(s, cmd):
229 '''filter string S through command CMD, returning its output'''
229 '''filter string S through command CMD, returning its output'''
230 (pin, pout) = popen2(cmd, 'b')
230 (pin, pout) = popen2(cmd, 'b')
231 def writer():
231 def writer():
232 try:
232 try:
233 pin.write(s)
233 pin.write(s)
234 pin.close()
234 pin.close()
235 except IOError, inst:
235 except IOError, inst:
236 if inst.errno != errno.EPIPE:
236 if inst.errno != errno.EPIPE:
237 raise
237 raise
238
238
239 # we should use select instead on UNIX, but this will work on most
239 # we should use select instead on UNIX, but this will work on most
240 # systems, including Windows
240 # systems, including Windows
241 w = threading.Thread(target=writer)
241 w = threading.Thread(target=writer)
242 w.start()
242 w.start()
243 f = pout.read()
243 f = pout.read()
244 pout.close()
244 pout.close()
245 w.join()
245 w.join()
246 return f
246 return f
247
247
248 def tempfilter(s, cmd):
248 def tempfilter(s, cmd):
249 '''filter string S through a pair of temporary files with CMD.
249 '''filter string S through a pair of temporary files with CMD.
250 CMD is used as a template to create the real command to be run,
250 CMD is used as a template to create the real command to be run,
251 with the strings INFILE and OUTFILE replaced by the real names of
251 with the strings INFILE and OUTFILE replaced by the real names of
252 the temporary files generated.'''
252 the temporary files generated.'''
253 inname, outname = None, None
253 inname, outname = None, None
254 try:
254 try:
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 fp = os.fdopen(infd, 'wb')
256 fp = os.fdopen(infd, 'wb')
257 fp.write(s)
257 fp.write(s)
258 fp.close()
258 fp.close()
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 os.close(outfd)
260 os.close(outfd)
261 cmd = cmd.replace('INFILE', inname)
261 cmd = cmd.replace('INFILE', inname)
262 cmd = cmd.replace('OUTFILE', outname)
262 cmd = cmd.replace('OUTFILE', outname)
263 code = os.system(cmd)
263 code = os.system(cmd)
264 if sys.platform == 'OpenVMS' and code & 1:
264 if sys.platform == 'OpenVMS' and code & 1:
265 code = 0
265 code = 0
266 if code: raise Abort(_("command '%s' failed: %s") %
266 if code: raise Abort(_("command '%s' failed: %s") %
267 (cmd, explain_exit(code)))
267 (cmd, explain_exit(code)))
268 return open(outname, 'rb').read()
268 return open(outname, 'rb').read()
269 finally:
269 finally:
270 try:
270 try:
271 if inname: os.unlink(inname)
271 if inname: os.unlink(inname)
272 except: pass
272 except: pass
273 try:
273 try:
274 if outname: os.unlink(outname)
274 if outname: os.unlink(outname)
275 except: pass
275 except: pass
276
276
277 filtertable = {
277 filtertable = {
278 'tempfile:': tempfilter,
278 'tempfile:': tempfilter,
279 'pipe:': pipefilter,
279 'pipe:': pipefilter,
280 }
280 }
281
281
282 def filter(s, cmd):
282 def filter(s, cmd):
283 "filter a string through a command that transforms its input to its output"
283 "filter a string through a command that transforms its input to its output"
284 for name, fn in filtertable.iteritems():
284 for name, fn in filtertable.iteritems():
285 if cmd.startswith(name):
285 if cmd.startswith(name):
286 return fn(s, cmd[len(name):].lstrip())
286 return fn(s, cmd[len(name):].lstrip())
287 return pipefilter(s, cmd)
287 return pipefilter(s, cmd)
288
288
289 def binary(s):
289 def binary(s):
290 """return true if a string is binary data"""
290 """return true if a string is binary data"""
291 if s and '\0' in s:
291 if s and '\0' in s:
292 return True
292 return True
293 return False
293 return False
294
294
295 def unique(g):
295 def unique(g):
296 """return the uniq elements of iterable g"""
296 """return the uniq elements of iterable g"""
297 return dict.fromkeys(g).keys()
297 return dict.fromkeys(g).keys()
298
298
299 def sort(l):
299 def sort(l):
300 if not isinstance(l, list):
300 if not isinstance(l, list):
301 l = list(l)
301 l = list(l)
302 l.sort()
302 l.sort()
303 return l
303 return l
304
304
305 def increasingchunks(source, min=1024, max=65536):
305 def increasingchunks(source, min=1024, max=65536):
306 '''return no less than min bytes per chunk while data remains,
306 '''return no less than min bytes per chunk while data remains,
307 doubling min after each chunk until it reaches max'''
307 doubling min after each chunk until it reaches max'''
308 def log2(x):
308 def log2(x):
309 if not x:
309 if not x:
310 return 0
310 return 0
311 i = 0
311 i = 0
312 while x:
312 while x:
313 x >>= 1
313 x >>= 1
314 i += 1
314 i += 1
315 return i - 1
315 return i - 1
316
316
317 buf = []
317 buf = []
318 blen = 0
318 blen = 0
319 for chunk in source:
319 for chunk in source:
320 buf.append(chunk)
320 buf.append(chunk)
321 blen += len(chunk)
321 blen += len(chunk)
322 if blen >= min:
322 if blen >= min:
323 if min < max:
323 if min < max:
324 min = min << 1
324 min = min << 1
325 nmin = 1 << log2(blen)
325 nmin = 1 << log2(blen)
326 if nmin > min:
326 if nmin > min:
327 min = nmin
327 min = nmin
328 if min > max:
328 if min > max:
329 min = max
329 min = max
330 yield ''.join(buf)
330 yield ''.join(buf)
331 blen = 0
331 blen = 0
332 buf = []
332 buf = []
333 if buf:
333 if buf:
334 yield ''.join(buf)
334 yield ''.join(buf)
335
335
336 class Abort(Exception):
336 class Abort(Exception):
337 """Raised if a command needs to print an error and exit."""
337 """Raised if a command needs to print an error and exit."""
338
338
339 def always(fn): return True
339 def always(fn): return True
340 def never(fn): return False
340 def never(fn): return False
341
341
342 def expand_glob(pats):
342 def expand_glob(pats):
343 '''On Windows, expand the implicit globs in a list of patterns'''
343 '''On Windows, expand the implicit globs in a list of patterns'''
344 if os.name != 'nt':
344 if os.name != 'nt':
345 return list(pats)
345 return list(pats)
346 ret = []
346 ret = []
347 for p in pats:
347 for p in pats:
348 kind, name = patkind(p, None)
348 kind, name = patkind(p, None)
349 if kind is None:
349 if kind is None:
350 globbed = glob.glob(name)
350 globbed = glob.glob(name)
351 if globbed:
351 if globbed:
352 ret.extend(globbed)
352 ret.extend(globbed)
353 continue
353 continue
354 # if we couldn't expand the glob, just keep it around
354 # if we couldn't expand the glob, just keep it around
355 ret.append(p)
355 ret.append(p)
356 return ret
356 return ret
357
357
358 def patkind(name, default):
358 def patkind(name, default):
359 """Split a string into an optional pattern kind prefix and the
359 """Split a string into an optional pattern kind prefix and the
360 actual pattern."""
360 actual pattern."""
361 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
361 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
362 if name.startswith(prefix + ':'): return name.split(':', 1)
362 if name.startswith(prefix + ':'): return name.split(':', 1)
363 return default, name
363 return default, name
364
364
365 def globre(pat, head='^', tail='$'):
365 def globre(pat, head='^', tail='$'):
366 "convert a glob pattern into a regexp"
366 "convert a glob pattern into a regexp"
367 i, n = 0, len(pat)
367 i, n = 0, len(pat)
368 res = ''
368 res = ''
369 group = 0
369 group = 0
370 def peek(): return i < n and pat[i]
370 def peek(): return i < n and pat[i]
371 while i < n:
371 while i < n:
372 c = pat[i]
372 c = pat[i]
373 i = i+1
373 i = i+1
374 if c == '*':
374 if c == '*':
375 if peek() == '*':
375 if peek() == '*':
376 i += 1
376 i += 1
377 res += '.*'
377 res += '.*'
378 else:
378 else:
379 res += '[^/]*'
379 res += '[^/]*'
380 elif c == '?':
380 elif c == '?':
381 res += '.'
381 res += '.'
382 elif c == '[':
382 elif c == '[':
383 j = i
383 j = i
384 if j < n and pat[j] in '!]':
384 if j < n and pat[j] in '!]':
385 j += 1
385 j += 1
386 while j < n and pat[j] != ']':
386 while j < n and pat[j] != ']':
387 j += 1
387 j += 1
388 if j >= n:
388 if j >= n:
389 res += '\\['
389 res += '\\['
390 else:
390 else:
391 stuff = pat[i:j].replace('\\','\\\\')
391 stuff = pat[i:j].replace('\\','\\\\')
392 i = j + 1
392 i = j + 1
393 if stuff[0] == '!':
393 if stuff[0] == '!':
394 stuff = '^' + stuff[1:]
394 stuff = '^' + stuff[1:]
395 elif stuff[0] == '^':
395 elif stuff[0] == '^':
396 stuff = '\\' + stuff
396 stuff = '\\' + stuff
397 res = '%s[%s]' % (res, stuff)
397 res = '%s[%s]' % (res, stuff)
398 elif c == '{':
398 elif c == '{':
399 group += 1
399 group += 1
400 res += '(?:'
400 res += '(?:'
401 elif c == '}' and group:
401 elif c == '}' and group:
402 res += ')'
402 res += ')'
403 group -= 1
403 group -= 1
404 elif c == ',' and group:
404 elif c == ',' and group:
405 res += '|'
405 res += '|'
406 elif c == '\\':
406 elif c == '\\':
407 p = peek()
407 p = peek()
408 if p:
408 if p:
409 i += 1
409 i += 1
410 res += re.escape(p)
410 res += re.escape(p)
411 else:
411 else:
412 res += re.escape(c)
412 res += re.escape(c)
413 else:
413 else:
414 res += re.escape(c)
414 res += re.escape(c)
415 return head + res + tail
415 return head + res + tail
416
416
417 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
417 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
418
418
419 def pathto(root, n1, n2):
419 def pathto(root, n1, n2):
420 '''return the relative path from one place to another.
420 '''return the relative path from one place to another.
421 root should use os.sep to separate directories
421 root should use os.sep to separate directories
422 n1 should use os.sep to separate directories
422 n1 should use os.sep to separate directories
423 n2 should use "/" to separate directories
423 n2 should use "/" to separate directories
424 returns an os.sep-separated path.
424 returns an os.sep-separated path.
425
425
426 If n1 is a relative path, it's assumed it's
426 If n1 is a relative path, it's assumed it's
427 relative to root.
427 relative to root.
428 n2 should always be relative to root.
428 n2 should always be relative to root.
429 '''
429 '''
430 if not n1: return localpath(n2)
430 if not n1: return localpath(n2)
431 if os.path.isabs(n1):
431 if os.path.isabs(n1):
432 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
432 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
433 return os.path.join(root, localpath(n2))
433 return os.path.join(root, localpath(n2))
434 n2 = '/'.join((pconvert(root), n2))
434 n2 = '/'.join((pconvert(root), n2))
435 a, b = splitpath(n1), n2.split('/')
435 a, b = splitpath(n1), n2.split('/')
436 a.reverse()
436 a.reverse()
437 b.reverse()
437 b.reverse()
438 while a and b and a[-1] == b[-1]:
438 while a and b and a[-1] == b[-1]:
439 a.pop()
439 a.pop()
440 b.pop()
440 b.pop()
441 b.reverse()
441 b.reverse()
442 return os.sep.join((['..'] * len(a)) + b) or '.'
442 return os.sep.join((['..'] * len(a)) + b) or '.'
443
443
444 def canonpath(root, cwd, myname):
444 def canonpath(root, cwd, myname):
445 """return the canonical path of myname, given cwd and root"""
445 """return the canonical path of myname, given cwd and root"""
446 if root == os.sep:
446 if root == os.sep:
447 rootsep = os.sep
447 rootsep = os.sep
448 elif endswithsep(root):
448 elif endswithsep(root):
449 rootsep = root
449 rootsep = root
450 else:
450 else:
451 rootsep = root + os.sep
451 rootsep = root + os.sep
452 name = myname
452 name = myname
453 if not os.path.isabs(name):
453 if not os.path.isabs(name):
454 name = os.path.join(root, cwd, name)
454 name = os.path.join(root, cwd, name)
455 name = os.path.normpath(name)
455 name = os.path.normpath(name)
456 audit_path = path_auditor(root)
456 audit_path = path_auditor(root)
457 if name != rootsep and name.startswith(rootsep):
457 if name != rootsep and name.startswith(rootsep):
458 name = name[len(rootsep):]
458 name = name[len(rootsep):]
459 audit_path(name)
459 audit_path(name)
460 return pconvert(name)
460 return pconvert(name)
461 elif name == root:
461 elif name == root:
462 return ''
462 return ''
463 else:
463 else:
464 # Determine whether `name' is in the hierarchy at or beneath `root',
464 # Determine whether `name' is in the hierarchy at or beneath `root',
465 # by iterating name=dirname(name) until that causes no change (can't
465 # by iterating name=dirname(name) until that causes no change (can't
466 # check name == '/', because that doesn't work on windows). For each
466 # check name == '/', because that doesn't work on windows). For each
467 # `name', compare dev/inode numbers. If they match, the list `rel'
467 # `name', compare dev/inode numbers. If they match, the list `rel'
468 # holds the reversed list of components making up the relative file
468 # holds the reversed list of components making up the relative file
469 # name we want.
469 # name we want.
470 root_st = os.stat(root)
470 root_st = os.stat(root)
471 rel = []
471 rel = []
472 while True:
472 while True:
473 try:
473 try:
474 name_st = os.stat(name)
474 name_st = os.stat(name)
475 except OSError:
475 except OSError:
476 break
476 break
477 if samestat(name_st, root_st):
477 if samestat(name_st, root_st):
478 if not rel:
478 if not rel:
479 # name was actually the same as root (maybe a symlink)
479 # name was actually the same as root (maybe a symlink)
480 return ''
480 return ''
481 rel.reverse()
481 rel.reverse()
482 name = os.path.join(*rel)
482 name = os.path.join(*rel)
483 audit_path(name)
483 audit_path(name)
484 return pconvert(name)
484 return pconvert(name)
485 dirname, basename = os.path.split(name)
485 dirname, basename = os.path.split(name)
486 rel.append(basename)
486 rel.append(basename)
487 if dirname == name:
487 if dirname == name:
488 break
488 break
489 name = dirname
489 name = dirname
490
490
491 raise Abort('%s not under root' % myname)
491 raise Abort('%s not under root' % myname)
492
492
493 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
493 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
494 """build a function to match a set of file patterns
494 """build a function to match a set of file patterns
495
495
496 arguments:
496 arguments:
497 canonroot - the canonical root of the tree you're matching against
497 canonroot - the canonical root of the tree you're matching against
498 cwd - the current working directory, if relevant
498 cwd - the current working directory, if relevant
499 names - patterns to find
499 names - patterns to find
500 inc - patterns to include
500 inc - patterns to include
501 exc - patterns to exclude
501 exc - patterns to exclude
502 dflt_pat - if a pattern in names has no explicit type, assume this one
502 dflt_pat - if a pattern in names has no explicit type, assume this one
503 src - where these patterns came from (e.g. .hgignore)
503 src - where these patterns came from (e.g. .hgignore)
504
504
505 a pattern is one of:
505 a pattern is one of:
506 'glob:<glob>' - a glob relative to cwd
506 'glob:<glob>' - a glob relative to cwd
507 're:<regexp>' - a regular expression
507 're:<regexp>' - a regular expression
508 'path:<path>' - a path relative to canonroot
508 'path:<path>' - a path relative to canonroot
509 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
509 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
510 'relpath:<path>' - a path relative to cwd
510 'relpath:<path>' - a path relative to cwd
511 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
511 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
512 '<something>' - one of the cases above, selected by the dflt_pat argument
512 '<something>' - one of the cases above, selected by the dflt_pat argument
513
513
514 returns:
514 returns:
515 a 3-tuple containing
515 a 3-tuple containing
516 - list of roots (places where one should start a recursive walk of the fs);
516 - list of roots (places where one should start a recursive walk of the fs);
517 this often matches the explicit non-pattern names passed in, but also
517 this often matches the explicit non-pattern names passed in, but also
518 includes the initial part of glob: patterns that has no glob characters
518 includes the initial part of glob: patterns that has no glob characters
519 - a bool match(filename) function
519 - a bool match(filename) function
520 - a bool indicating if any patterns were passed in
520 - a bool indicating if any patterns were passed in
521 """
521 """
522
522
523 # a common case: no patterns at all
523 # a common case: no patterns at all
524 if not names and not inc and not exc:
524 if not names and not inc and not exc:
525 return [], always, False
525 return [], always, False
526
526
527 def contains_glob(name):
527 def contains_glob(name):
528 for c in name:
528 for c in name:
529 if c in _globchars: return True
529 if c in _globchars: return True
530 return False
530 return False
531
531
532 def regex(kind, name, tail):
532 def regex(kind, name, tail):
533 '''convert a pattern into a regular expression'''
533 '''convert a pattern into a regular expression'''
534 if not name:
534 if not name:
535 return ''
535 return ''
536 if kind == 're':
536 if kind == 're':
537 return name
537 return name
538 elif kind == 'path':
538 elif kind == 'path':
539 return '^' + re.escape(name) + '(?:/|$)'
539 return '^' + re.escape(name) + '(?:/|$)'
540 elif kind == 'relglob':
540 elif kind == 'relglob':
541 return globre(name, '(?:|.*/)', tail)
541 return globre(name, '(?:|.*/)', tail)
542 elif kind == 'relpath':
542 elif kind == 'relpath':
543 return re.escape(name) + '(?:/|$)'
543 return re.escape(name) + '(?:/|$)'
544 elif kind == 'relre':
544 elif kind == 'relre':
545 if name.startswith('^'):
545 if name.startswith('^'):
546 return name
546 return name
547 return '.*' + name
547 return '.*' + name
548 return globre(name, '', tail)
548 return globre(name, '', tail)
549
549
550 def matchfn(pats, tail):
550 def matchfn(pats, tail):
551 """build a matching function from a set of patterns"""
551 """build a matching function from a set of patterns"""
552 if not pats:
552 if not pats:
553 return
553 return
554 try:
554 try:
555 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
555 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
556 if len(pat) > 20000:
556 if len(pat) > 20000:
557 raise OverflowError()
557 raise OverflowError()
558 return re.compile(pat).match
558 return re.compile(pat).match
559 except OverflowError:
559 except OverflowError:
560 # We're using a Python with a tiny regex engine and we
560 # We're using a Python with a tiny regex engine and we
561 # made it explode, so we'll divide the pattern list in two
561 # made it explode, so we'll divide the pattern list in two
562 # until it works
562 # until it works
563 l = len(pats)
563 l = len(pats)
564 if l < 2:
564 if l < 2:
565 raise
565 raise
566 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
566 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
567 return lambda s: a(s) or b(s)
567 return lambda s: a(s) or b(s)
568 except re.error:
568 except re.error:
569 for k, p in pats:
569 for k, p in pats:
570 try:
570 try:
571 re.compile('(?:%s)' % regex(k, p, tail))
571 re.compile('(?:%s)' % regex(k, p, tail))
572 except re.error:
572 except re.error:
573 if src:
573 if src:
574 raise Abort("%s: invalid pattern (%s): %s" %
574 raise Abort("%s: invalid pattern (%s): %s" %
575 (src, k, p))
575 (src, k, p))
576 else:
576 else:
577 raise Abort("invalid pattern (%s): %s" % (k, p))
577 raise Abort("invalid pattern (%s): %s" % (k, p))
578 raise Abort("invalid pattern")
578 raise Abort("invalid pattern")
579
579
580 def globprefix(pat):
580 def globprefix(pat):
581 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
581 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
582 root = []
582 root = []
583 for p in pat.split('/'):
583 for p in pat.split('/'):
584 if contains_glob(p): break
584 if contains_glob(p): break
585 root.append(p)
585 root.append(p)
586 return '/'.join(root) or '.'
586 return '/'.join(root) or '.'
587
587
588 def normalizepats(names, default):
588 def normalizepats(names, default):
589 pats = []
589 pats = []
590 roots = []
590 roots = []
591 anypats = False
591 anypats = False
592 for kind, name in [patkind(p, default) for p in names]:
592 for kind, name in [patkind(p, default) for p in names]:
593 if kind in ('glob', 'relpath'):
593 if kind in ('glob', 'relpath'):
594 name = canonpath(canonroot, cwd, name)
594 name = canonpath(canonroot, cwd, name)
595 elif kind in ('relglob', 'path'):
595 elif kind in ('relglob', 'path'):
596 name = normpath(name)
596 name = normpath(name)
597
597
598 pats.append((kind, name))
598 pats.append((kind, name))
599
599
600 if kind in ('glob', 're', 'relglob', 'relre'):
600 if kind in ('glob', 're', 'relglob', 'relre'):
601 anypats = True
601 anypats = True
602
602
603 if kind == 'glob':
603 if kind == 'glob':
604 root = globprefix(name)
604 root = globprefix(name)
605 roots.append(root)
605 roots.append(root)
606 elif kind in ('relpath', 'path'):
606 elif kind in ('relpath', 'path'):
607 roots.append(name or '.')
607 roots.append(name or '.')
608 elif kind == 'relglob':
608 elif kind == 'relglob':
609 roots.append('.')
609 roots.append('.')
610 return roots, pats, anypats
610 return roots, pats, anypats
611
611
612 roots, pats, anypats = normalizepats(names, dflt_pat)
612 roots, pats, anypats = normalizepats(names, dflt_pat)
613
613
614 patmatch = matchfn(pats, '$') or always
614 patmatch = matchfn(pats, '$') or always
615 incmatch = always
615 incmatch = always
616 if inc:
616 if inc:
617 dummy, inckinds, dummy = normalizepats(inc, 'glob')
617 dummy, inckinds, dummy = normalizepats(inc, 'glob')
618 incmatch = matchfn(inckinds, '(?:/|$)')
618 incmatch = matchfn(inckinds, '(?:/|$)')
619 excmatch = never
619 excmatch = never
620 if exc:
620 if exc:
621 dummy, exckinds, dummy = normalizepats(exc, 'glob')
621 dummy, exckinds, dummy = normalizepats(exc, 'glob')
622 excmatch = matchfn(exckinds, '(?:/|$)')
622 excmatch = matchfn(exckinds, '(?:/|$)')
623
623
624 if not names and inc and not exc:
624 if not names and inc and not exc:
625 # common case: hgignore patterns
625 # common case: hgignore patterns
626 match = incmatch
626 match = incmatch
627 else:
627 else:
628 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
628 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
629
629
630 return (roots, match, (inc or exc or anypats) and True)
630 return (roots, match, (inc or exc or anypats) and True)
631
631
632 _hgexecutable = None
632 _hgexecutable = None
633
633
634 def main_is_frozen():
634 def main_is_frozen():
635 """return True if we are a frozen executable.
635 """return True if we are a frozen executable.
636
636
637 The code supports py2exe (most common, Windows only) and tools/freeze
637 The code supports py2exe (most common, Windows only) and tools/freeze
638 (portable, not much used).
638 (portable, not much used).
639 """
639 """
640 return (hasattr(sys, "frozen") or # new py2exe
640 return (hasattr(sys, "frozen") or # new py2exe
641 hasattr(sys, "importers") or # old py2exe
641 hasattr(sys, "importers") or # old py2exe
642 imp.is_frozen("__main__")) # tools/freeze
642 imp.is_frozen("__main__")) # tools/freeze
643
643
644 def hgexecutable():
644 def hgexecutable():
645 """return location of the 'hg' executable.
645 """return location of the 'hg' executable.
646
646
647 Defaults to $HG or 'hg' in the search path.
647 Defaults to $HG or 'hg' in the search path.
648 """
648 """
649 if _hgexecutable is None:
649 if _hgexecutable is None:
650 hg = os.environ.get('HG')
650 hg = os.environ.get('HG')
651 if hg:
651 if hg:
652 set_hgexecutable(hg)
652 set_hgexecutable(hg)
653 elif main_is_frozen():
653 elif main_is_frozen():
654 set_hgexecutable(sys.executable)
654 set_hgexecutable(sys.executable)
655 else:
655 else:
656 set_hgexecutable(find_exe('hg', 'hg'))
656 set_hgexecutable(find_exe('hg') or 'hg')
657 return _hgexecutable
657 return _hgexecutable
658
658
659 def set_hgexecutable(path):
659 def set_hgexecutable(path):
660 """set location of the 'hg' executable"""
660 """set location of the 'hg' executable"""
661 global _hgexecutable
661 global _hgexecutable
662 _hgexecutable = path
662 _hgexecutable = path
663
663
664 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
664 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
665 '''enhanced shell command execution.
665 '''enhanced shell command execution.
666 run with environment maybe modified, maybe in different dir.
666 run with environment maybe modified, maybe in different dir.
667
667
668 if command fails and onerr is None, return status. if ui object,
668 if command fails and onerr is None, return status. if ui object,
669 print error message and return status, else raise onerr object as
669 print error message and return status, else raise onerr object as
670 exception.'''
670 exception.'''
671 def py2shell(val):
671 def py2shell(val):
672 'convert python object into string that is useful to shell'
672 'convert python object into string that is useful to shell'
673 if val in (None, False):
673 if val in (None, False):
674 return '0'
674 return '0'
675 if val == True:
675 if val == True:
676 return '1'
676 return '1'
677 return str(val)
677 return str(val)
678 oldenv = {}
678 oldenv = {}
679 for k in environ:
679 for k in environ:
680 oldenv[k] = os.environ.get(k)
680 oldenv[k] = os.environ.get(k)
681 if cwd is not None:
681 if cwd is not None:
682 oldcwd = os.getcwd()
682 oldcwd = os.getcwd()
683 origcmd = cmd
683 origcmd = cmd
684 if os.name == 'nt':
684 if os.name == 'nt':
685 cmd = '"%s"' % cmd
685 cmd = '"%s"' % cmd
686 try:
686 try:
687 for k, v in environ.iteritems():
687 for k, v in environ.iteritems():
688 os.environ[k] = py2shell(v)
688 os.environ[k] = py2shell(v)
689 os.environ['HG'] = hgexecutable()
689 os.environ['HG'] = hgexecutable()
690 if cwd is not None and oldcwd != cwd:
690 if cwd is not None and oldcwd != cwd:
691 os.chdir(cwd)
691 os.chdir(cwd)
692 rc = os.system(cmd)
692 rc = os.system(cmd)
693 if sys.platform == 'OpenVMS' and rc & 1:
693 if sys.platform == 'OpenVMS' and rc & 1:
694 rc = 0
694 rc = 0
695 if rc and onerr:
695 if rc and onerr:
696 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
696 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
697 explain_exit(rc)[0])
697 explain_exit(rc)[0])
698 if errprefix:
698 if errprefix:
699 errmsg = '%s: %s' % (errprefix, errmsg)
699 errmsg = '%s: %s' % (errprefix, errmsg)
700 try:
700 try:
701 onerr.warn(errmsg + '\n')
701 onerr.warn(errmsg + '\n')
702 except AttributeError:
702 except AttributeError:
703 raise onerr(errmsg)
703 raise onerr(errmsg)
704 return rc
704 return rc
705 finally:
705 finally:
706 for k, v in oldenv.iteritems():
706 for k, v in oldenv.iteritems():
707 if v is None:
707 if v is None:
708 del os.environ[k]
708 del os.environ[k]
709 else:
709 else:
710 os.environ[k] = v
710 os.environ[k] = v
711 if cwd is not None and oldcwd != cwd:
711 if cwd is not None and oldcwd != cwd:
712 os.chdir(oldcwd)
712 os.chdir(oldcwd)
713
713
714 def checksignature(func):
714 def checksignature(func):
715 '''wrap a function with code to check for calling errors'''
715 '''wrap a function with code to check for calling errors'''
716 def check(*args, **kwargs):
716 def check(*args, **kwargs):
717 try:
717 try:
718 return func(*args, **kwargs)
718 return func(*args, **kwargs)
719 except TypeError:
719 except TypeError:
720 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
720 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
721 raise error.SignatureError
721 raise error.SignatureError
722 raise
722 raise
723
723
724 return check
724 return check
725
725
726 # os.path.lexists is not available on python2.3
726 # os.path.lexists is not available on python2.3
727 def lexists(filename):
727 def lexists(filename):
728 "test whether a file with this name exists. does not follow symlinks"
728 "test whether a file with this name exists. does not follow symlinks"
729 try:
729 try:
730 os.lstat(filename)
730 os.lstat(filename)
731 except:
731 except:
732 return False
732 return False
733 return True
733 return True
734
734
735 def rename(src, dst):
735 def rename(src, dst):
736 """forcibly rename a file"""
736 """forcibly rename a file"""
737 try:
737 try:
738 os.rename(src, dst)
738 os.rename(src, dst)
739 except OSError, err: # FIXME: check err (EEXIST ?)
739 except OSError, err: # FIXME: check err (EEXIST ?)
740 # on windows, rename to existing file is not allowed, so we
740 # on windows, rename to existing file is not allowed, so we
741 # must delete destination first. but if file is open, unlink
741 # must delete destination first. but if file is open, unlink
742 # schedules it for delete but does not delete it. rename
742 # schedules it for delete but does not delete it. rename
743 # happens immediately even for open files, so we create
743 # happens immediately even for open files, so we create
744 # temporary file, delete it, rename destination to that name,
744 # temporary file, delete it, rename destination to that name,
745 # then delete that. then rename is safe to do.
745 # then delete that. then rename is safe to do.
746 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
746 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
747 os.close(fd)
747 os.close(fd)
748 os.unlink(temp)
748 os.unlink(temp)
749 os.rename(dst, temp)
749 os.rename(dst, temp)
750 os.unlink(temp)
750 os.unlink(temp)
751 os.rename(src, dst)
751 os.rename(src, dst)
752
752
753 def unlink(f):
753 def unlink(f):
754 """unlink and remove the directory if it is empty"""
754 """unlink and remove the directory if it is empty"""
755 os.unlink(f)
755 os.unlink(f)
756 # try removing directories that might now be empty
756 # try removing directories that might now be empty
757 try:
757 try:
758 os.removedirs(os.path.dirname(f))
758 os.removedirs(os.path.dirname(f))
759 except OSError:
759 except OSError:
760 pass
760 pass
761
761
762 def copyfile(src, dest):
762 def copyfile(src, dest):
763 "copy a file, preserving mode"
763 "copy a file, preserving mode"
764 if os.path.islink(src):
764 if os.path.islink(src):
765 try:
765 try:
766 os.unlink(dest)
766 os.unlink(dest)
767 except:
767 except:
768 pass
768 pass
769 os.symlink(os.readlink(src), dest)
769 os.symlink(os.readlink(src), dest)
770 else:
770 else:
771 try:
771 try:
772 shutil.copyfile(src, dest)
772 shutil.copyfile(src, dest)
773 shutil.copymode(src, dest)
773 shutil.copymode(src, dest)
774 except shutil.Error, inst:
774 except shutil.Error, inst:
775 raise Abort(str(inst))
775 raise Abort(str(inst))
776
776
777 def copyfiles(src, dst, hardlink=None):
777 def copyfiles(src, dst, hardlink=None):
778 """Copy a directory tree using hardlinks if possible"""
778 """Copy a directory tree using hardlinks if possible"""
779
779
780 if hardlink is None:
780 if hardlink is None:
781 hardlink = (os.stat(src).st_dev ==
781 hardlink = (os.stat(src).st_dev ==
782 os.stat(os.path.dirname(dst)).st_dev)
782 os.stat(os.path.dirname(dst)).st_dev)
783
783
784 if os.path.isdir(src):
784 if os.path.isdir(src):
785 os.mkdir(dst)
785 os.mkdir(dst)
786 for name, kind in osutil.listdir(src):
786 for name, kind in osutil.listdir(src):
787 srcname = os.path.join(src, name)
787 srcname = os.path.join(src, name)
788 dstname = os.path.join(dst, name)
788 dstname = os.path.join(dst, name)
789 copyfiles(srcname, dstname, hardlink)
789 copyfiles(srcname, dstname, hardlink)
790 else:
790 else:
791 if hardlink:
791 if hardlink:
792 try:
792 try:
793 os_link(src, dst)
793 os_link(src, dst)
794 except (IOError, OSError):
794 except (IOError, OSError):
795 hardlink = False
795 hardlink = False
796 shutil.copy(src, dst)
796 shutil.copy(src, dst)
797 else:
797 else:
798 shutil.copy(src, dst)
798 shutil.copy(src, dst)
799
799
800 class path_auditor(object):
800 class path_auditor(object):
801 '''ensure that a filesystem path contains no banned components.
801 '''ensure that a filesystem path contains no banned components.
802 the following properties of a path are checked:
802 the following properties of a path are checked:
803
803
804 - under top-level .hg
804 - under top-level .hg
805 - starts at the root of a windows drive
805 - starts at the root of a windows drive
806 - contains ".."
806 - contains ".."
807 - traverses a symlink (e.g. a/symlink_here/b)
807 - traverses a symlink (e.g. a/symlink_here/b)
808 - inside a nested repository'''
808 - inside a nested repository'''
809
809
810 def __init__(self, root):
810 def __init__(self, root):
811 self.audited = set()
811 self.audited = set()
812 self.auditeddir = set()
812 self.auditeddir = set()
813 self.root = root
813 self.root = root
814
814
815 def __call__(self, path):
815 def __call__(self, path):
816 if path in self.audited:
816 if path in self.audited:
817 return
817 return
818 normpath = os.path.normcase(path)
818 normpath = os.path.normcase(path)
819 parts = splitpath(normpath)
819 parts = splitpath(normpath)
820 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '.hg.', '')
820 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '.hg.', '')
821 or os.pardir in parts):
821 or os.pardir in parts):
822 raise Abort(_("path contains illegal component: %s") % path)
822 raise Abort(_("path contains illegal component: %s") % path)
823 if '.hg' in path:
823 if '.hg' in path:
824 for p in '.hg', '.hg.':
824 for p in '.hg', '.hg.':
825 if p in parts[1:-1]:
825 if p in parts[1:-1]:
826 pos = parts.index(p)
826 pos = parts.index(p)
827 base = os.path.join(*parts[:pos])
827 base = os.path.join(*parts[:pos])
828 raise Abort(_('path %r is inside repo %r') % (path, base))
828 raise Abort(_('path %r is inside repo %r') % (path, base))
829 def check(prefix):
829 def check(prefix):
830 curpath = os.path.join(self.root, prefix)
830 curpath = os.path.join(self.root, prefix)
831 try:
831 try:
832 st = os.lstat(curpath)
832 st = os.lstat(curpath)
833 except OSError, err:
833 except OSError, err:
834 # EINVAL can be raised as invalid path syntax under win32.
834 # EINVAL can be raised as invalid path syntax under win32.
835 # They must be ignored for patterns can be checked too.
835 # They must be ignored for patterns can be checked too.
836 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
836 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
837 raise
837 raise
838 else:
838 else:
839 if stat.S_ISLNK(st.st_mode):
839 if stat.S_ISLNK(st.st_mode):
840 raise Abort(_('path %r traverses symbolic link %r') %
840 raise Abort(_('path %r traverses symbolic link %r') %
841 (path, prefix))
841 (path, prefix))
842 elif (stat.S_ISDIR(st.st_mode) and
842 elif (stat.S_ISDIR(st.st_mode) and
843 os.path.isdir(os.path.join(curpath, '.hg'))):
843 os.path.isdir(os.path.join(curpath, '.hg'))):
844 raise Abort(_('path %r is inside repo %r') %
844 raise Abort(_('path %r is inside repo %r') %
845 (path, prefix))
845 (path, prefix))
846 parts.pop()
846 parts.pop()
847 prefixes = []
847 prefixes = []
848 for n in range(len(parts)):
848 for n in range(len(parts)):
849 prefix = os.sep.join(parts)
849 prefix = os.sep.join(parts)
850 if prefix in self.auditeddir:
850 if prefix in self.auditeddir:
851 break
851 break
852 check(prefix)
852 check(prefix)
853 prefixes.append(prefix)
853 prefixes.append(prefix)
854 parts.pop()
854 parts.pop()
855
855
856 self.audited.add(path)
856 self.audited.add(path)
857 # only add prefixes to the cache after checking everything: we don't
857 # only add prefixes to the cache after checking everything: we don't
858 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
858 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
859 self.auditeddir.update(prefixes)
859 self.auditeddir.update(prefixes)
860
860
861 def _makelock_file(info, pathname):
861 def _makelock_file(info, pathname):
862 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
862 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
863 os.write(ld, info)
863 os.write(ld, info)
864 os.close(ld)
864 os.close(ld)
865
865
866 def _readlock_file(pathname):
866 def _readlock_file(pathname):
867 return posixfile(pathname).read()
867 return posixfile(pathname).read()
868
868
869 def nlinks(pathname):
869 def nlinks(pathname):
870 """Return number of hardlinks for the given file."""
870 """Return number of hardlinks for the given file."""
871 return os.lstat(pathname).st_nlink
871 return os.lstat(pathname).st_nlink
872
872
873 if hasattr(os, 'link'):
873 if hasattr(os, 'link'):
874 os_link = os.link
874 os_link = os.link
875 else:
875 else:
876 def os_link(src, dst):
876 def os_link(src, dst):
877 raise OSError(0, _("Hardlinks not supported"))
877 raise OSError(0, _("Hardlinks not supported"))
878
878
879 def fstat(fp):
879 def fstat(fp):
880 '''stat file object that may not have fileno method.'''
880 '''stat file object that may not have fileno method.'''
881 try:
881 try:
882 return os.fstat(fp.fileno())
882 return os.fstat(fp.fileno())
883 except AttributeError:
883 except AttributeError:
884 return os.stat(fp.name)
884 return os.stat(fp.name)
885
885
886 posixfile = file
886 posixfile = file
887
887
888 def openhardlinks():
888 def openhardlinks():
889 '''return true if it is safe to hold open file handles to hardlinks'''
889 '''return true if it is safe to hold open file handles to hardlinks'''
890 return True
890 return True
891
891
892 def _statfiles(files):
892 def _statfiles(files):
893 'Stat each file in files and yield stat or None if file does not exist.'
893 'Stat each file in files and yield stat or None if file does not exist.'
894 lstat = os.lstat
894 lstat = os.lstat
895 for nf in files:
895 for nf in files:
896 try:
896 try:
897 st = lstat(nf)
897 st = lstat(nf)
898 except OSError, err:
898 except OSError, err:
899 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
899 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
900 raise
900 raise
901 st = None
901 st = None
902 yield st
902 yield st
903
903
904 def _statfiles_clustered(files):
904 def _statfiles_clustered(files):
905 '''Stat each file in files and yield stat or None if file does not exist.
905 '''Stat each file in files and yield stat or None if file does not exist.
906 Cluster and cache stat per directory to minimize number of OS stat calls.'''
906 Cluster and cache stat per directory to minimize number of OS stat calls.'''
907 lstat = os.lstat
907 lstat = os.lstat
908 ncase = os.path.normcase
908 ncase = os.path.normcase
909 sep = os.sep
909 sep = os.sep
910 dircache = {} # dirname -> filename -> status | None if file does not exist
910 dircache = {} # dirname -> filename -> status | None if file does not exist
911 for nf in files:
911 for nf in files:
912 nf = ncase(nf)
912 nf = ncase(nf)
913 pos = nf.rfind(sep)
913 pos = nf.rfind(sep)
914 if pos == -1:
914 if pos == -1:
915 dir, base = '.', nf
915 dir, base = '.', nf
916 else:
916 else:
917 dir, base = nf[:pos+1], nf[pos+1:]
917 dir, base = nf[:pos+1], nf[pos+1:]
918 cache = dircache.get(dir, None)
918 cache = dircache.get(dir, None)
919 if cache is None:
919 if cache is None:
920 try:
920 try:
921 dmap = dict([(ncase(n), s)
921 dmap = dict([(ncase(n), s)
922 for n, k, s in osutil.listdir(dir, True)])
922 for n, k, s in osutil.listdir(dir, True)])
923 except OSError, err:
923 except OSError, err:
924 # handle directory not found in Python version prior to 2.5
924 # handle directory not found in Python version prior to 2.5
925 # Python <= 2.4 returns native Windows code 3 in errno
925 # Python <= 2.4 returns native Windows code 3 in errno
926 # Python >= 2.5 returns ENOENT and adds winerror field
926 # Python >= 2.5 returns ENOENT and adds winerror field
927 # EINVAL is raised if dir is not a directory.
927 # EINVAL is raised if dir is not a directory.
928 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
928 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
929 errno.ENOTDIR):
929 errno.ENOTDIR):
930 raise
930 raise
931 dmap = {}
931 dmap = {}
932 cache = dircache.setdefault(dir, dmap)
932 cache = dircache.setdefault(dir, dmap)
933 yield cache.get(base, None)
933 yield cache.get(base, None)
934
934
935 if sys.platform == 'win32':
935 if sys.platform == 'win32':
936 statfiles = _statfiles_clustered
936 statfiles = _statfiles_clustered
937 else:
937 else:
938 statfiles = _statfiles
938 statfiles = _statfiles
939
939
940 getuser_fallback = None
940 getuser_fallback = None
941
941
942 def getuser():
942 def getuser():
943 '''return name of current user'''
943 '''return name of current user'''
944 try:
944 try:
945 return getpass.getuser()
945 return getpass.getuser()
946 except ImportError:
946 except ImportError:
947 # import of pwd will fail on windows - try fallback
947 # import of pwd will fail on windows - try fallback
948 if getuser_fallback:
948 if getuser_fallback:
949 return getuser_fallback()
949 return getuser_fallback()
950 # raised if win32api not available
950 # raised if win32api not available
951 raise Abort(_('user name not available - set USERNAME '
951 raise Abort(_('user name not available - set USERNAME '
952 'environment variable'))
952 'environment variable'))
953
953
954 def username(uid=None):
954 def username(uid=None):
955 """Return the name of the user with the given uid.
955 """Return the name of the user with the given uid.
956
956
957 If uid is None, return the name of the current user."""
957 If uid is None, return the name of the current user."""
958 try:
958 try:
959 import pwd
959 import pwd
960 if uid is None:
960 if uid is None:
961 uid = os.getuid()
961 uid = os.getuid()
962 try:
962 try:
963 return pwd.getpwuid(uid)[0]
963 return pwd.getpwuid(uid)[0]
964 except KeyError:
964 except KeyError:
965 return str(uid)
965 return str(uid)
966 except ImportError:
966 except ImportError:
967 return None
967 return None
968
968
969 def groupname(gid=None):
969 def groupname(gid=None):
970 """Return the name of the group with the given gid.
970 """Return the name of the group with the given gid.
971
971
972 If gid is None, return the name of the current group."""
972 If gid is None, return the name of the current group."""
973 try:
973 try:
974 import grp
974 import grp
975 if gid is None:
975 if gid is None:
976 gid = os.getgid()
976 gid = os.getgid()
977 try:
977 try:
978 return grp.getgrgid(gid)[0]
978 return grp.getgrgid(gid)[0]
979 except KeyError:
979 except KeyError:
980 return str(gid)
980 return str(gid)
981 except ImportError:
981 except ImportError:
982 return None
982 return None
983
983
984 # File system features
984 # File system features
985
985
986 def checkcase(path):
986 def checkcase(path):
987 """
987 """
988 Check whether the given path is on a case-sensitive filesystem
988 Check whether the given path is on a case-sensitive filesystem
989
989
990 Requires a path (like /foo/.hg) ending with a foldable final
990 Requires a path (like /foo/.hg) ending with a foldable final
991 directory component.
991 directory component.
992 """
992 """
993 s1 = os.stat(path)
993 s1 = os.stat(path)
994 d, b = os.path.split(path)
994 d, b = os.path.split(path)
995 p2 = os.path.join(d, b.upper())
995 p2 = os.path.join(d, b.upper())
996 if path == p2:
996 if path == p2:
997 p2 = os.path.join(d, b.lower())
997 p2 = os.path.join(d, b.lower())
998 try:
998 try:
999 s2 = os.stat(p2)
999 s2 = os.stat(p2)
1000 if s2 == s1:
1000 if s2 == s1:
1001 return False
1001 return False
1002 return True
1002 return True
1003 except:
1003 except:
1004 return True
1004 return True
1005
1005
1006 _fspathcache = {}
1006 _fspathcache = {}
1007 def fspath(name, root):
1007 def fspath(name, root):
1008 '''Get name in the case stored in the filesystem
1008 '''Get name in the case stored in the filesystem
1009
1009
1010 The name is either relative to root, or it is an absolute path starting
1010 The name is either relative to root, or it is an absolute path starting
1011 with root. Note that this function is unnecessary, and should not be
1011 with root. Note that this function is unnecessary, and should not be
1012 called, for case-sensitive filesystems (simply because it's expensive).
1012 called, for case-sensitive filesystems (simply because it's expensive).
1013 '''
1013 '''
1014 # If name is absolute, make it relative
1014 # If name is absolute, make it relative
1015 if name.lower().startswith(root.lower()):
1015 if name.lower().startswith(root.lower()):
1016 l = len(root)
1016 l = len(root)
1017 if name[l] == os.sep or name[l] == os.altsep:
1017 if name[l] == os.sep or name[l] == os.altsep:
1018 l = l + 1
1018 l = l + 1
1019 name = name[l:]
1019 name = name[l:]
1020
1020
1021 if not os.path.exists(os.path.join(root, name)):
1021 if not os.path.exists(os.path.join(root, name)):
1022 return None
1022 return None
1023
1023
1024 seps = os.sep
1024 seps = os.sep
1025 if os.altsep:
1025 if os.altsep:
1026 seps = seps + os.altsep
1026 seps = seps + os.altsep
1027 # Protect backslashes. This gets silly very quickly.
1027 # Protect backslashes. This gets silly very quickly.
1028 seps.replace('\\','\\\\')
1028 seps.replace('\\','\\\\')
1029 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1029 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1030 dir = os.path.normcase(os.path.normpath(root))
1030 dir = os.path.normcase(os.path.normpath(root))
1031 result = []
1031 result = []
1032 for part, sep in pattern.findall(name):
1032 for part, sep in pattern.findall(name):
1033 if sep:
1033 if sep:
1034 result.append(sep)
1034 result.append(sep)
1035 continue
1035 continue
1036
1036
1037 if dir not in _fspathcache:
1037 if dir not in _fspathcache:
1038 _fspathcache[dir] = os.listdir(dir)
1038 _fspathcache[dir] = os.listdir(dir)
1039 contents = _fspathcache[dir]
1039 contents = _fspathcache[dir]
1040
1040
1041 lpart = part.lower()
1041 lpart = part.lower()
1042 for n in contents:
1042 for n in contents:
1043 if n.lower() == lpart:
1043 if n.lower() == lpart:
1044 result.append(n)
1044 result.append(n)
1045 break
1045 break
1046 else:
1046 else:
1047 # Cannot happen, as the file exists!
1047 # Cannot happen, as the file exists!
1048 result.append(part)
1048 result.append(part)
1049 dir = os.path.join(dir, lpart)
1049 dir = os.path.join(dir, lpart)
1050
1050
1051 return ''.join(result)
1051 return ''.join(result)
1052
1052
1053 def checkexec(path):
1053 def checkexec(path):
1054 """
1054 """
1055 Check whether the given path is on a filesystem with UNIX-like exec flags
1055 Check whether the given path is on a filesystem with UNIX-like exec flags
1056
1056
1057 Requires a directory (like /foo/.hg)
1057 Requires a directory (like /foo/.hg)
1058 """
1058 """
1059
1059
1060 # VFAT on some Linux versions can flip mode but it doesn't persist
1060 # VFAT on some Linux versions can flip mode but it doesn't persist
1061 # a FS remount. Frequently we can detect it if files are created
1061 # a FS remount. Frequently we can detect it if files are created
1062 # with exec bit on.
1062 # with exec bit on.
1063
1063
1064 try:
1064 try:
1065 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
1065 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
1066 fh, fn = tempfile.mkstemp("", "", path)
1066 fh, fn = tempfile.mkstemp("", "", path)
1067 try:
1067 try:
1068 os.close(fh)
1068 os.close(fh)
1069 m = os.stat(fn).st_mode & 0777
1069 m = os.stat(fn).st_mode & 0777
1070 new_file_has_exec = m & EXECFLAGS
1070 new_file_has_exec = m & EXECFLAGS
1071 os.chmod(fn, m ^ EXECFLAGS)
1071 os.chmod(fn, m ^ EXECFLAGS)
1072 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
1072 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
1073 finally:
1073 finally:
1074 os.unlink(fn)
1074 os.unlink(fn)
1075 except (IOError, OSError):
1075 except (IOError, OSError):
1076 # we don't care, the user probably won't be able to commit anyway
1076 # we don't care, the user probably won't be able to commit anyway
1077 return False
1077 return False
1078 return not (new_file_has_exec or exec_flags_cannot_flip)
1078 return not (new_file_has_exec or exec_flags_cannot_flip)
1079
1079
1080 def checklink(path):
1080 def checklink(path):
1081 """check whether the given path is on a symlink-capable filesystem"""
1081 """check whether the given path is on a symlink-capable filesystem"""
1082 # mktemp is not racy because symlink creation will fail if the
1082 # mktemp is not racy because symlink creation will fail if the
1083 # file already exists
1083 # file already exists
1084 name = tempfile.mktemp(dir=path)
1084 name = tempfile.mktemp(dir=path)
1085 try:
1085 try:
1086 os.symlink(".", name)
1086 os.symlink(".", name)
1087 os.unlink(name)
1087 os.unlink(name)
1088 return True
1088 return True
1089 except (OSError, AttributeError):
1089 except (OSError, AttributeError):
1090 return False
1090 return False
1091
1091
1092 _umask = os.umask(0)
1092 _umask = os.umask(0)
1093 os.umask(_umask)
1093 os.umask(_umask)
1094
1094
1095 def needbinarypatch():
1095 def needbinarypatch():
1096 """return True if patches should be applied in binary mode by default."""
1096 """return True if patches should be applied in binary mode by default."""
1097 return os.name == 'nt'
1097 return os.name == 'nt'
1098
1098
1099 def endswithsep(path):
1099 def endswithsep(path):
1100 '''Check path ends with os.sep or os.altsep.'''
1100 '''Check path ends with os.sep or os.altsep.'''
1101 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1101 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1102
1102
1103 def splitpath(path):
1103 def splitpath(path):
1104 '''Split path by os.sep.
1104 '''Split path by os.sep.
1105 Note that this function does not use os.altsep because this is
1105 Note that this function does not use os.altsep because this is
1106 an alternative of simple "xxx.split(os.sep)".
1106 an alternative of simple "xxx.split(os.sep)".
1107 It is recommended to use os.path.normpath() before using this
1107 It is recommended to use os.path.normpath() before using this
1108 function if need.'''
1108 function if need.'''
1109 return path.split(os.sep)
1109 return path.split(os.sep)
1110
1110
1111 def gui():
1111 def gui():
1112 '''Are we running in a GUI?'''
1112 '''Are we running in a GUI?'''
1113 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
1113 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
1114
1114
1115 def lookup_reg(key, name=None, scope=None):
1115 def lookup_reg(key, name=None, scope=None):
1116 return None
1116 return None
1117
1117
1118 # Platform specific variants
1118 # Platform specific variants
1119 if os.name == 'nt':
1119 if os.name == 'nt':
1120 import msvcrt
1120 import msvcrt
1121 nulldev = 'NUL:'
1121 nulldev = 'NUL:'
1122
1122
1123 class winstdout:
1123 class winstdout:
1124 '''stdout on windows misbehaves if sent through a pipe'''
1124 '''stdout on windows misbehaves if sent through a pipe'''
1125
1125
1126 def __init__(self, fp):
1126 def __init__(self, fp):
1127 self.fp = fp
1127 self.fp = fp
1128
1128
1129 def __getattr__(self, key):
1129 def __getattr__(self, key):
1130 return getattr(self.fp, key)
1130 return getattr(self.fp, key)
1131
1131
1132 def close(self):
1132 def close(self):
1133 try:
1133 try:
1134 self.fp.close()
1134 self.fp.close()
1135 except: pass
1135 except: pass
1136
1136
1137 def write(self, s):
1137 def write(self, s):
1138 try:
1138 try:
1139 # This is workaround for "Not enough space" error on
1139 # This is workaround for "Not enough space" error on
1140 # writing large size of data to console.
1140 # writing large size of data to console.
1141 limit = 16000
1141 limit = 16000
1142 l = len(s)
1142 l = len(s)
1143 start = 0
1143 start = 0
1144 while start < l:
1144 while start < l:
1145 end = start + limit
1145 end = start + limit
1146 self.fp.write(s[start:end])
1146 self.fp.write(s[start:end])
1147 start = end
1147 start = end
1148 except IOError, inst:
1148 except IOError, inst:
1149 if inst.errno != 0: raise
1149 if inst.errno != 0: raise
1150 self.close()
1150 self.close()
1151 raise IOError(errno.EPIPE, 'Broken pipe')
1151 raise IOError(errno.EPIPE, 'Broken pipe')
1152
1152
1153 def flush(self):
1153 def flush(self):
1154 try:
1154 try:
1155 return self.fp.flush()
1155 return self.fp.flush()
1156 except IOError, inst:
1156 except IOError, inst:
1157 if inst.errno != errno.EINVAL: raise
1157 if inst.errno != errno.EINVAL: raise
1158 self.close()
1158 self.close()
1159 raise IOError(errno.EPIPE, 'Broken pipe')
1159 raise IOError(errno.EPIPE, 'Broken pipe')
1160
1160
1161 sys.stdout = winstdout(sys.stdout)
1161 sys.stdout = winstdout(sys.stdout)
1162
1162
1163 def _is_win_9x():
1163 def _is_win_9x():
1164 '''return true if run on windows 95, 98 or me.'''
1164 '''return true if run on windows 95, 98 or me.'''
1165 try:
1165 try:
1166 return sys.getwindowsversion()[3] == 1
1166 return sys.getwindowsversion()[3] == 1
1167 except AttributeError:
1167 except AttributeError:
1168 return 'command' in os.environ.get('comspec', '')
1168 return 'command' in os.environ.get('comspec', '')
1169
1169
1170 def openhardlinks():
1170 def openhardlinks():
1171 return not _is_win_9x and "win32api" in locals()
1171 return not _is_win_9x and "win32api" in locals()
1172
1172
1173 def system_rcpath():
1173 def system_rcpath():
1174 try:
1174 try:
1175 return system_rcpath_win32()
1175 return system_rcpath_win32()
1176 except:
1176 except:
1177 return [r'c:\mercurial\mercurial.ini']
1177 return [r'c:\mercurial\mercurial.ini']
1178
1178
1179 def user_rcpath():
1179 def user_rcpath():
1180 '''return os-specific hgrc search path to the user dir'''
1180 '''return os-specific hgrc search path to the user dir'''
1181 try:
1181 try:
1182 path = user_rcpath_win32()
1182 path = user_rcpath_win32()
1183 except:
1183 except:
1184 home = os.path.expanduser('~')
1184 home = os.path.expanduser('~')
1185 path = [os.path.join(home, 'mercurial.ini'),
1185 path = [os.path.join(home, 'mercurial.ini'),
1186 os.path.join(home, '.hgrc')]
1186 os.path.join(home, '.hgrc')]
1187 userprofile = os.environ.get('USERPROFILE')
1187 userprofile = os.environ.get('USERPROFILE')
1188 if userprofile:
1188 if userprofile:
1189 path.append(os.path.join(userprofile, 'mercurial.ini'))
1189 path.append(os.path.join(userprofile, 'mercurial.ini'))
1190 path.append(os.path.join(userprofile, '.hgrc'))
1190 path.append(os.path.join(userprofile, '.hgrc'))
1191 return path
1191 return path
1192
1192
1193 def parse_patch_output(output_line):
1193 def parse_patch_output(output_line):
1194 """parses the output produced by patch and returns the file name"""
1194 """parses the output produced by patch and returns the file name"""
1195 pf = output_line[14:]
1195 pf = output_line[14:]
1196 if pf[0] == '`':
1196 if pf[0] == '`':
1197 pf = pf[1:-1] # Remove the quotes
1197 pf = pf[1:-1] # Remove the quotes
1198 return pf
1198 return pf
1199
1199
1200 def sshargs(sshcmd, host, user, port):
1200 def sshargs(sshcmd, host, user, port):
1201 '''Build argument list for ssh or Plink'''
1201 '''Build argument list for ssh or Plink'''
1202 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1202 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1203 args = user and ("%s@%s" % (user, host)) or host
1203 args = user and ("%s@%s" % (user, host)) or host
1204 return port and ("%s %s %s" % (args, pflag, port)) or args
1204 return port and ("%s %s %s" % (args, pflag, port)) or args
1205
1205
1206 def testpid(pid):
1206 def testpid(pid):
1207 '''return False if pid dead, True if running or not known'''
1207 '''return False if pid dead, True if running or not known'''
1208 return True
1208 return True
1209
1209
1210 def set_flags(f, l, x):
1210 def set_flags(f, l, x):
1211 pass
1211 pass
1212
1212
1213 def set_binary(fd):
1213 def set_binary(fd):
1214 # When run without console, pipes may expose invalid
1214 # When run without console, pipes may expose invalid
1215 # fileno(), usually set to -1.
1215 # fileno(), usually set to -1.
1216 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1216 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1217 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1217 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1218
1218
1219 def pconvert(path):
1219 def pconvert(path):
1220 return '/'.join(splitpath(path))
1220 return '/'.join(splitpath(path))
1221
1221
1222 def localpath(path):
1222 def localpath(path):
1223 return path.replace('/', '\\')
1223 return path.replace('/', '\\')
1224
1224
1225 def normpath(path):
1225 def normpath(path):
1226 return pconvert(os.path.normpath(path))
1226 return pconvert(os.path.normpath(path))
1227
1227
1228 makelock = _makelock_file
1228 makelock = _makelock_file
1229 readlock = _readlock_file
1229 readlock = _readlock_file
1230
1230
1231 def samestat(s1, s2):
1231 def samestat(s1, s2):
1232 return False
1232 return False
1233
1233
1234 # A sequence of backslashes is special iff it precedes a double quote:
1234 # A sequence of backslashes is special iff it precedes a double quote:
1235 # - if there's an even number of backslashes, the double quote is not
1235 # - if there's an even number of backslashes, the double quote is not
1236 # quoted (i.e. it ends the quoted region)
1236 # quoted (i.e. it ends the quoted region)
1237 # - if there's an odd number of backslashes, the double quote is quoted
1237 # - if there's an odd number of backslashes, the double quote is quoted
1238 # - in both cases, every pair of backslashes is unquoted into a single
1238 # - in both cases, every pair of backslashes is unquoted into a single
1239 # backslash
1239 # backslash
1240 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1240 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1241 # So, to quote a string, we must surround it in double quotes, double
1241 # So, to quote a string, we must surround it in double quotes, double
1242 # the number of backslashes that preceed double quotes and add another
1242 # the number of backslashes that preceed double quotes and add another
1243 # backslash before every double quote (being careful with the double
1243 # backslash before every double quote (being careful with the double
1244 # quote we've appended to the end)
1244 # quote we've appended to the end)
1245 _quotere = None
1245 _quotere = None
1246 def shellquote(s):
1246 def shellquote(s):
1247 global _quotere
1247 global _quotere
1248 if _quotere is None:
1248 if _quotere is None:
1249 _quotere = re.compile(r'(\\*)("|\\$)')
1249 _quotere = re.compile(r'(\\*)("|\\$)')
1250 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1250 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1251
1251
1252 def quotecommand(cmd):
1252 def quotecommand(cmd):
1253 """Build a command string suitable for os.popen* calls."""
1253 """Build a command string suitable for os.popen* calls."""
1254 # The extra quotes are needed because popen* runs the command
1254 # The extra quotes are needed because popen* runs the command
1255 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1255 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1256 return '"' + cmd + '"'
1256 return '"' + cmd + '"'
1257
1257
1258 def popen(command, mode='r'):
1258 def popen(command, mode='r'):
1259 # Work around "popen spawned process may not write to stdout
1259 # Work around "popen spawned process may not write to stdout
1260 # under windows"
1260 # under windows"
1261 # http://bugs.python.org/issue1366
1261 # http://bugs.python.org/issue1366
1262 command += " 2> %s" % nulldev
1262 command += " 2> %s" % nulldev
1263 return os.popen(quotecommand(command), mode)
1263 return os.popen(quotecommand(command), mode)
1264
1264
1265 def explain_exit(code):
1265 def explain_exit(code):
1266 return _("exited with status %d") % code, code
1266 return _("exited with status %d") % code, code
1267
1267
1268 # if you change this stub into a real check, please try to implement the
1268 # if you change this stub into a real check, please try to implement the
1269 # username and groupname functions above, too.
1269 # username and groupname functions above, too.
1270 def isowner(fp, st=None):
1270 def isowner(fp, st=None):
1271 return True
1271 return True
1272
1272
1273 def find_in_path(name, path, default=None):
1273 def find_exe(command):
1274 '''find name in search path. path can be string (will be split
1274 '''Find executable for command searching like cmd.exe does.
1275 with os.pathsep), or iterable thing that returns strings. if name
1275 If command is a basename then PATH is searched for command.
1276 found, return path to name. else return default. name is looked up
1276 PATH isn't searched if command is an absolute or relative path.
1277 using cmd.exe rules, using PATHEXT.'''
1277 An extension from PATHEXT is found and added if not present.
1278 if isinstance(path, str):
1278 If command isn't found None is returned.'''
1279 path = path.split(os.pathsep)
1280
1281 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1279 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1282 pathext = pathext.lower().split(os.pathsep)
1280 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
1283 isexec = os.path.splitext(name)[1].lower() in pathext
1281 if os.path.splitext(command)[1].lower() in pathexts:
1282 pathexts = ['']
1283
1284 def findexisting(pathcommand):
1285 'Will append extension (if needed) and return existing file'
1286 for ext in pathexts:
1287 executable = pathcommand + ext
1288 if os.path.exists(executable):
1289 return executable
1290 return None
1284
1291
1285 for p in path:
1292 if os.sep in command:
1286 p_name = os.path.join(p, name)
1293 return findexisting(command)
1287
1294
1288 if isexec and os.path.exists(p_name):
1295 for path in os.environ.get('PATH', '').split(os.pathsep):
1289 return p_name
1296 executable = findexisting(os.path.join(path, command))
1290
1297 if executable is not None:
1291 for ext in pathext:
1298 return executable
1292 p_name_ext = p_name + ext
1299 return None
1293 if os.path.exists(p_name_ext):
1294 return p_name_ext
1295 return default
1296
1300
1297 def set_signal_handler():
1301 def set_signal_handler():
1298 try:
1302 try:
1299 set_signal_handler_win32()
1303 set_signal_handler_win32()
1300 except NameError:
1304 except NameError:
1301 pass
1305 pass
1302
1306
1303 try:
1307 try:
1304 # override functions with win32 versions if possible
1308 # override functions with win32 versions if possible
1305 from util_win32 import *
1309 from util_win32 import *
1306 if not _is_win_9x():
1310 if not _is_win_9x():
1307 posixfile = posixfile_nt
1311 posixfile = posixfile_nt
1308 except ImportError:
1312 except ImportError:
1309 pass
1313 pass
1310
1314
1311 else:
1315 else:
1312 nulldev = '/dev/null'
1316 nulldev = '/dev/null'
1313
1317
1314 def rcfiles(path):
1318 def rcfiles(path):
1315 rcs = [os.path.join(path, 'hgrc')]
1319 rcs = [os.path.join(path, 'hgrc')]
1316 rcdir = os.path.join(path, 'hgrc.d')
1320 rcdir = os.path.join(path, 'hgrc.d')
1317 try:
1321 try:
1318 rcs.extend([os.path.join(rcdir, f)
1322 rcs.extend([os.path.join(rcdir, f)
1319 for f, kind in osutil.listdir(rcdir)
1323 for f, kind in osutil.listdir(rcdir)
1320 if f.endswith(".rc")])
1324 if f.endswith(".rc")])
1321 except OSError:
1325 except OSError:
1322 pass
1326 pass
1323 return rcs
1327 return rcs
1324
1328
1325 def system_rcpath():
1329 def system_rcpath():
1326 path = []
1330 path = []
1327 # old mod_python does not set sys.argv
1331 # old mod_python does not set sys.argv
1328 if len(getattr(sys, 'argv', [])) > 0:
1332 if len(getattr(sys, 'argv', [])) > 0:
1329 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1333 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1330 '/../etc/mercurial'))
1334 '/../etc/mercurial'))
1331 path.extend(rcfiles('/etc/mercurial'))
1335 path.extend(rcfiles('/etc/mercurial'))
1332 return path
1336 return path
1333
1337
1334 def user_rcpath():
1338 def user_rcpath():
1335 return [os.path.expanduser('~/.hgrc')]
1339 return [os.path.expanduser('~/.hgrc')]
1336
1340
1337 def parse_patch_output(output_line):
1341 def parse_patch_output(output_line):
1338 """parses the output produced by patch and returns the file name"""
1342 """parses the output produced by patch and returns the file name"""
1339 pf = output_line[14:]
1343 pf = output_line[14:]
1340 if os.sys.platform == 'OpenVMS':
1344 if os.sys.platform == 'OpenVMS':
1341 if pf[0] == '`':
1345 if pf[0] == '`':
1342 pf = pf[1:-1] # Remove the quotes
1346 pf = pf[1:-1] # Remove the quotes
1343 else:
1347 else:
1344 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1348 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1345 pf = pf[1:-1] # Remove the quotes
1349 pf = pf[1:-1] # Remove the quotes
1346 return pf
1350 return pf
1347
1351
1348 def sshargs(sshcmd, host, user, port):
1352 def sshargs(sshcmd, host, user, port):
1349 '''Build argument list for ssh'''
1353 '''Build argument list for ssh'''
1350 args = user and ("%s@%s" % (user, host)) or host
1354 args = user and ("%s@%s" % (user, host)) or host
1351 return port and ("%s -p %s" % (args, port)) or args
1355 return port and ("%s -p %s" % (args, port)) or args
1352
1356
1353 def is_exec(f):
1357 def is_exec(f):
1354 """check whether a file is executable"""
1358 """check whether a file is executable"""
1355 return (os.lstat(f).st_mode & 0100 != 0)
1359 return (os.lstat(f).st_mode & 0100 != 0)
1356
1360
1357 def set_flags(f, l, x):
1361 def set_flags(f, l, x):
1358 s = os.lstat(f).st_mode
1362 s = os.lstat(f).st_mode
1359 if l:
1363 if l:
1360 if not stat.S_ISLNK(s):
1364 if not stat.S_ISLNK(s):
1361 # switch file to link
1365 # switch file to link
1362 data = file(f).read()
1366 data = file(f).read()
1363 os.unlink(f)
1367 os.unlink(f)
1364 try:
1368 try:
1365 os.symlink(data, f)
1369 os.symlink(data, f)
1366 except:
1370 except:
1367 # failed to make a link, rewrite file
1371 # failed to make a link, rewrite file
1368 file(f, "w").write(data)
1372 file(f, "w").write(data)
1369 # no chmod needed at this point
1373 # no chmod needed at this point
1370 return
1374 return
1371 if stat.S_ISLNK(s):
1375 if stat.S_ISLNK(s):
1372 # switch link to file
1376 # switch link to file
1373 data = os.readlink(f)
1377 data = os.readlink(f)
1374 os.unlink(f)
1378 os.unlink(f)
1375 file(f, "w").write(data)
1379 file(f, "w").write(data)
1376 s = 0666 & ~_umask # avoid restatting for chmod
1380 s = 0666 & ~_umask # avoid restatting for chmod
1377
1381
1378 sx = s & 0100
1382 sx = s & 0100
1379 if x and not sx:
1383 if x and not sx:
1380 # Turn on +x for every +r bit when making a file executable
1384 # Turn on +x for every +r bit when making a file executable
1381 # and obey umask.
1385 # and obey umask.
1382 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1386 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1383 elif not x and sx:
1387 elif not x and sx:
1384 # Turn off all +x bits
1388 # Turn off all +x bits
1385 os.chmod(f, s & 0666)
1389 os.chmod(f, s & 0666)
1386
1390
1387 def set_binary(fd):
1391 def set_binary(fd):
1388 pass
1392 pass
1389
1393
1390 def pconvert(path):
1394 def pconvert(path):
1391 return path
1395 return path
1392
1396
1393 def localpath(path):
1397 def localpath(path):
1394 return path
1398 return path
1395
1399
1396 normpath = os.path.normpath
1400 normpath = os.path.normpath
1397 samestat = os.path.samestat
1401 samestat = os.path.samestat
1398
1402
1399 def makelock(info, pathname):
1403 def makelock(info, pathname):
1400 try:
1404 try:
1401 os.symlink(info, pathname)
1405 os.symlink(info, pathname)
1402 except OSError, why:
1406 except OSError, why:
1403 if why.errno == errno.EEXIST:
1407 if why.errno == errno.EEXIST:
1404 raise
1408 raise
1405 else:
1409 else:
1406 _makelock_file(info, pathname)
1410 _makelock_file(info, pathname)
1407
1411
1408 def readlock(pathname):
1412 def readlock(pathname):
1409 try:
1413 try:
1410 return os.readlink(pathname)
1414 return os.readlink(pathname)
1411 except OSError, why:
1415 except OSError, why:
1412 if why.errno in (errno.EINVAL, errno.ENOSYS):
1416 if why.errno in (errno.EINVAL, errno.ENOSYS):
1413 return _readlock_file(pathname)
1417 return _readlock_file(pathname)
1414 else:
1418 else:
1415 raise
1419 raise
1416
1420
1417 def shellquote(s):
1421 def shellquote(s):
1418 if os.sys.platform == 'OpenVMS':
1422 if os.sys.platform == 'OpenVMS':
1419 return '"%s"' % s
1423 return '"%s"' % s
1420 else:
1424 else:
1421 return "'%s'" % s.replace("'", "'\\''")
1425 return "'%s'" % s.replace("'", "'\\''")
1422
1426
1423 def quotecommand(cmd):
1427 def quotecommand(cmd):
1424 return cmd
1428 return cmd
1425
1429
1426 def popen(command, mode='r'):
1430 def popen(command, mode='r'):
1427 return os.popen(command, mode)
1431 return os.popen(command, mode)
1428
1432
1429 def testpid(pid):
1433 def testpid(pid):
1430 '''return False if pid dead, True if running or not sure'''
1434 '''return False if pid dead, True if running or not sure'''
1431 if os.sys.platform == 'OpenVMS':
1435 if os.sys.platform == 'OpenVMS':
1432 return True
1436 return True
1433 try:
1437 try:
1434 os.kill(pid, 0)
1438 os.kill(pid, 0)
1435 return True
1439 return True
1436 except OSError, inst:
1440 except OSError, inst:
1437 return inst.errno != errno.ESRCH
1441 return inst.errno != errno.ESRCH
1438
1442
1439 def explain_exit(code):
1443 def explain_exit(code):
1440 """return a 2-tuple (desc, code) describing a process's status"""
1444 """return a 2-tuple (desc, code) describing a process's status"""
1441 if os.WIFEXITED(code):
1445 if os.WIFEXITED(code):
1442 val = os.WEXITSTATUS(code)
1446 val = os.WEXITSTATUS(code)
1443 return _("exited with status %d") % val, val
1447 return _("exited with status %d") % val, val
1444 elif os.WIFSIGNALED(code):
1448 elif os.WIFSIGNALED(code):
1445 val = os.WTERMSIG(code)
1449 val = os.WTERMSIG(code)
1446 return _("killed by signal %d") % val, val
1450 return _("killed by signal %d") % val, val
1447 elif os.WIFSTOPPED(code):
1451 elif os.WIFSTOPPED(code):
1448 val = os.WSTOPSIG(code)
1452 val = os.WSTOPSIG(code)
1449 return _("stopped by signal %d") % val, val
1453 return _("stopped by signal %d") % val, val
1450 raise ValueError(_("invalid exit code"))
1454 raise ValueError(_("invalid exit code"))
1451
1455
1452 def isowner(fp, st=None):
1456 def isowner(fp, st=None):
1453 """Return True if the file object f belongs to the current user.
1457 """Return True if the file object f belongs to the current user.
1454
1458
1455 The return value of a util.fstat(f) may be passed as the st argument.
1459 The return value of a util.fstat(f) may be passed as the st argument.
1456 """
1460 """
1457 if st is None:
1461 if st is None:
1458 st = fstat(fp)
1462 st = fstat(fp)
1459 return st.st_uid == os.getuid()
1463 return st.st_uid == os.getuid()
1460
1464
1461 def find_in_path(name, path, default=None):
1465 def find_exe(command):
1462 '''find name in search path. path can be string (will be split
1466 '''Find executable for command searching like which does.
1463 with os.pathsep), or iterable thing that returns strings. if name
1467 If command is a basename then PATH is searched for command.
1464 found, return path to name. else return default.'''
1468 PATH isn't searched if command is an absolute or relative path.
1465 if isinstance(path, str):
1469 If command isn't found None is returned.'''
1466 path = path.split(os.pathsep)
1470 if sys.platform == 'OpenVMS':
1467 for p in path:
1471 return command
1468 p_name = os.path.join(p, name)
1472
1469 if os.path.exists(p_name):
1473 def findexisting(executable):
1470 return p_name
1474 'Will return executable if existing file'
1471 return default
1475 if os.path.exists(executable):
1476 return executable
1477 return None
1478
1479 if os.sep in command:
1480 return findexisting(command)
1481
1482 for path in os.environ.get('PATH', '').split(os.pathsep):
1483 executable = findexisting(os.path.join(path, command))
1484 if executable is not None:
1485 return executable
1486 return None
1472
1487
1473 def set_signal_handler():
1488 def set_signal_handler():
1474 pass
1489 pass
1475
1490
1476 def find_exe(name, default=None):
1477 '''find path of an executable.
1478 if name contains a path component, return it as is. otherwise,
1479 use normal executable search path.'''
1480
1481 if os.sep in name or sys.platform == 'OpenVMS':
1482 # don't check the executable bit. if the file isn't
1483 # executable, whoever tries to actually run it will give a
1484 # much more useful error message.
1485 return name
1486 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1487
1488 def mktempcopy(name, emptyok=False, createmode=None):
1491 def mktempcopy(name, emptyok=False, createmode=None):
1489 """Create a temporary file with the same contents from name
1492 """Create a temporary file with the same contents from name
1490
1493
1491 The permission bits are copied from the original file.
1494 The permission bits are copied from the original file.
1492
1495
1493 If the temporary file is going to be truncated immediately, you
1496 If the temporary file is going to be truncated immediately, you
1494 can use emptyok=True as an optimization.
1497 can use emptyok=True as an optimization.
1495
1498
1496 Returns the name of the temporary file.
1499 Returns the name of the temporary file.
1497 """
1500 """
1498 d, fn = os.path.split(name)
1501 d, fn = os.path.split(name)
1499 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1502 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1500 os.close(fd)
1503 os.close(fd)
1501 # Temporary files are created with mode 0600, which is usually not
1504 # Temporary files are created with mode 0600, which is usually not
1502 # what we want. If the original file already exists, just copy
1505 # what we want. If the original file already exists, just copy
1503 # its mode. Otherwise, manually obey umask.
1506 # its mode. Otherwise, manually obey umask.
1504 try:
1507 try:
1505 st_mode = os.lstat(name).st_mode & 0777
1508 st_mode = os.lstat(name).st_mode & 0777
1506 except OSError, inst:
1509 except OSError, inst:
1507 if inst.errno != errno.ENOENT:
1510 if inst.errno != errno.ENOENT:
1508 raise
1511 raise
1509 st_mode = createmode
1512 st_mode = createmode
1510 if st_mode is None:
1513 if st_mode is None:
1511 st_mode = ~_umask
1514 st_mode = ~_umask
1512 st_mode &= 0666
1515 st_mode &= 0666
1513 os.chmod(temp, st_mode)
1516 os.chmod(temp, st_mode)
1514 if emptyok:
1517 if emptyok:
1515 return temp
1518 return temp
1516 try:
1519 try:
1517 try:
1520 try:
1518 ifp = posixfile(name, "rb")
1521 ifp = posixfile(name, "rb")
1519 except IOError, inst:
1522 except IOError, inst:
1520 if inst.errno == errno.ENOENT:
1523 if inst.errno == errno.ENOENT:
1521 return temp
1524 return temp
1522 if not getattr(inst, 'filename', None):
1525 if not getattr(inst, 'filename', None):
1523 inst.filename = name
1526 inst.filename = name
1524 raise
1527 raise
1525 ofp = posixfile(temp, "wb")
1528 ofp = posixfile(temp, "wb")
1526 for chunk in filechunkiter(ifp):
1529 for chunk in filechunkiter(ifp):
1527 ofp.write(chunk)
1530 ofp.write(chunk)
1528 ifp.close()
1531 ifp.close()
1529 ofp.close()
1532 ofp.close()
1530 except:
1533 except:
1531 try: os.unlink(temp)
1534 try: os.unlink(temp)
1532 except: pass
1535 except: pass
1533 raise
1536 raise
1534 return temp
1537 return temp
1535
1538
1536 class atomictempfile(posixfile):
1539 class atomictempfile(posixfile):
1537 """file-like object that atomically updates a file
1540 """file-like object that atomically updates a file
1538
1541
1539 All writes will be redirected to a temporary copy of the original
1542 All writes will be redirected to a temporary copy of the original
1540 file. When rename is called, the copy is renamed to the original
1543 file. When rename is called, the copy is renamed to the original
1541 name, making the changes visible.
1544 name, making the changes visible.
1542 """
1545 """
1543 def __init__(self, name, mode, createmode):
1546 def __init__(self, name, mode, createmode):
1544 self.__name = name
1547 self.__name = name
1545 self.temp = mktempcopy(name, emptyok=('w' in mode),
1548 self.temp = mktempcopy(name, emptyok=('w' in mode),
1546 createmode=createmode)
1549 createmode=createmode)
1547 posixfile.__init__(self, self.temp, mode)
1550 posixfile.__init__(self, self.temp, mode)
1548
1551
1549 def rename(self):
1552 def rename(self):
1550 if not self.closed:
1553 if not self.closed:
1551 posixfile.close(self)
1554 posixfile.close(self)
1552 rename(self.temp, localpath(self.__name))
1555 rename(self.temp, localpath(self.__name))
1553
1556
1554 def __del__(self):
1557 def __del__(self):
1555 if not self.closed:
1558 if not self.closed:
1556 try:
1559 try:
1557 os.unlink(self.temp)
1560 os.unlink(self.temp)
1558 except: pass
1561 except: pass
1559 posixfile.close(self)
1562 posixfile.close(self)
1560
1563
1561 def makedirs(name, mode=None):
1564 def makedirs(name, mode=None):
1562 """recursive directory creation with parent mode inheritance"""
1565 """recursive directory creation with parent mode inheritance"""
1563 try:
1566 try:
1564 os.mkdir(name)
1567 os.mkdir(name)
1565 if mode is not None:
1568 if mode is not None:
1566 os.chmod(name, mode)
1569 os.chmod(name, mode)
1567 return
1570 return
1568 except OSError, err:
1571 except OSError, err:
1569 if err.errno == errno.EEXIST:
1572 if err.errno == errno.EEXIST:
1570 return
1573 return
1571 if err.errno != errno.ENOENT:
1574 if err.errno != errno.ENOENT:
1572 raise
1575 raise
1573 parent = os.path.abspath(os.path.dirname(name))
1576 parent = os.path.abspath(os.path.dirname(name))
1574 makedirs(parent, mode)
1577 makedirs(parent, mode)
1575 makedirs(name, mode)
1578 makedirs(name, mode)
1576
1579
1577 class opener(object):
1580 class opener(object):
1578 """Open files relative to a base directory
1581 """Open files relative to a base directory
1579
1582
1580 This class is used to hide the details of COW semantics and
1583 This class is used to hide the details of COW semantics and
1581 remote file access from higher level code.
1584 remote file access from higher level code.
1582 """
1585 """
1583 def __init__(self, base, audit=True):
1586 def __init__(self, base, audit=True):
1584 self.base = base
1587 self.base = base
1585 if audit:
1588 if audit:
1586 self.audit_path = path_auditor(base)
1589 self.audit_path = path_auditor(base)
1587 else:
1590 else:
1588 self.audit_path = always
1591 self.audit_path = always
1589 self.createmode = None
1592 self.createmode = None
1590
1593
1591 def __getattr__(self, name):
1594 def __getattr__(self, name):
1592 if name == '_can_symlink':
1595 if name == '_can_symlink':
1593 self._can_symlink = checklink(self.base)
1596 self._can_symlink = checklink(self.base)
1594 return self._can_symlink
1597 return self._can_symlink
1595 raise AttributeError(name)
1598 raise AttributeError(name)
1596
1599
1597 def _fixfilemode(self, name):
1600 def _fixfilemode(self, name):
1598 if self.createmode is None:
1601 if self.createmode is None:
1599 return
1602 return
1600 os.chmod(name, self.createmode & 0666)
1603 os.chmod(name, self.createmode & 0666)
1601
1604
1602 def __call__(self, path, mode="r", text=False, atomictemp=False):
1605 def __call__(self, path, mode="r", text=False, atomictemp=False):
1603 self.audit_path(path)
1606 self.audit_path(path)
1604 f = os.path.join(self.base, path)
1607 f = os.path.join(self.base, path)
1605
1608
1606 if not text and "b" not in mode:
1609 if not text and "b" not in mode:
1607 mode += "b" # for that other OS
1610 mode += "b" # for that other OS
1608
1611
1609 nlink = -1
1612 nlink = -1
1610 if mode not in ("r", "rb"):
1613 if mode not in ("r", "rb"):
1611 try:
1614 try:
1612 nlink = nlinks(f)
1615 nlink = nlinks(f)
1613 except OSError:
1616 except OSError:
1614 nlink = 0
1617 nlink = 0
1615 d = os.path.dirname(f)
1618 d = os.path.dirname(f)
1616 if not os.path.isdir(d):
1619 if not os.path.isdir(d):
1617 makedirs(d, self.createmode)
1620 makedirs(d, self.createmode)
1618 if atomictemp:
1621 if atomictemp:
1619 return atomictempfile(f, mode, self.createmode)
1622 return atomictempfile(f, mode, self.createmode)
1620 if nlink > 1:
1623 if nlink > 1:
1621 rename(mktempcopy(f), f)
1624 rename(mktempcopy(f), f)
1622 fp = posixfile(f, mode)
1625 fp = posixfile(f, mode)
1623 if nlink == 0:
1626 if nlink == 0:
1624 self._fixfilemode(f)
1627 self._fixfilemode(f)
1625 return fp
1628 return fp
1626
1629
1627 def symlink(self, src, dst):
1630 def symlink(self, src, dst):
1628 self.audit_path(dst)
1631 self.audit_path(dst)
1629 linkname = os.path.join(self.base, dst)
1632 linkname = os.path.join(self.base, dst)
1630 try:
1633 try:
1631 os.unlink(linkname)
1634 os.unlink(linkname)
1632 except OSError:
1635 except OSError:
1633 pass
1636 pass
1634
1637
1635 dirname = os.path.dirname(linkname)
1638 dirname = os.path.dirname(linkname)
1636 if not os.path.exists(dirname):
1639 if not os.path.exists(dirname):
1637 makedirs(dirname, self.createmode)
1640 makedirs(dirname, self.createmode)
1638
1641
1639 if self._can_symlink:
1642 if self._can_symlink:
1640 try:
1643 try:
1641 os.symlink(src, linkname)
1644 os.symlink(src, linkname)
1642 except OSError, err:
1645 except OSError, err:
1643 raise OSError(err.errno, _('could not symlink to %r: %s') %
1646 raise OSError(err.errno, _('could not symlink to %r: %s') %
1644 (src, err.strerror), linkname)
1647 (src, err.strerror), linkname)
1645 else:
1648 else:
1646 f = self(dst, "w")
1649 f = self(dst, "w")
1647 f.write(src)
1650 f.write(src)
1648 f.close()
1651 f.close()
1649 self._fixfilemode(dst)
1652 self._fixfilemode(dst)
1650
1653
1651 class chunkbuffer(object):
1654 class chunkbuffer(object):
1652 """Allow arbitrary sized chunks of data to be efficiently read from an
1655 """Allow arbitrary sized chunks of data to be efficiently read from an
1653 iterator over chunks of arbitrary size."""
1656 iterator over chunks of arbitrary size."""
1654
1657
1655 def __init__(self, in_iter):
1658 def __init__(self, in_iter):
1656 """in_iter is the iterator that's iterating over the input chunks.
1659 """in_iter is the iterator that's iterating over the input chunks.
1657 targetsize is how big a buffer to try to maintain."""
1660 targetsize is how big a buffer to try to maintain."""
1658 self.iter = iter(in_iter)
1661 self.iter = iter(in_iter)
1659 self.buf = ''
1662 self.buf = ''
1660 self.targetsize = 2**16
1663 self.targetsize = 2**16
1661
1664
1662 def read(self, l):
1665 def read(self, l):
1663 """Read L bytes of data from the iterator of chunks of data.
1666 """Read L bytes of data from the iterator of chunks of data.
1664 Returns less than L bytes if the iterator runs dry."""
1667 Returns less than L bytes if the iterator runs dry."""
1665 if l > len(self.buf) and self.iter:
1668 if l > len(self.buf) and self.iter:
1666 # Clamp to a multiple of self.targetsize
1669 # Clamp to a multiple of self.targetsize
1667 targetsize = max(l, self.targetsize)
1670 targetsize = max(l, self.targetsize)
1668 collector = cStringIO.StringIO()
1671 collector = cStringIO.StringIO()
1669 collector.write(self.buf)
1672 collector.write(self.buf)
1670 collected = len(self.buf)
1673 collected = len(self.buf)
1671 for chunk in self.iter:
1674 for chunk in self.iter:
1672 collector.write(chunk)
1675 collector.write(chunk)
1673 collected += len(chunk)
1676 collected += len(chunk)
1674 if collected >= targetsize:
1677 if collected >= targetsize:
1675 break
1678 break
1676 if collected < targetsize:
1679 if collected < targetsize:
1677 self.iter = False
1680 self.iter = False
1678 self.buf = collector.getvalue()
1681 self.buf = collector.getvalue()
1679 if len(self.buf) == l:
1682 if len(self.buf) == l:
1680 s, self.buf = str(self.buf), ''
1683 s, self.buf = str(self.buf), ''
1681 else:
1684 else:
1682 s, self.buf = self.buf[:l], buffer(self.buf, l)
1685 s, self.buf = self.buf[:l], buffer(self.buf, l)
1683 return s
1686 return s
1684
1687
1685 def filechunkiter(f, size=65536, limit=None):
1688 def filechunkiter(f, size=65536, limit=None):
1686 """Create a generator that produces the data in the file size
1689 """Create a generator that produces the data in the file size
1687 (default 65536) bytes at a time, up to optional limit (default is
1690 (default 65536) bytes at a time, up to optional limit (default is
1688 to read all data). Chunks may be less than size bytes if the
1691 to read all data). Chunks may be less than size bytes if the
1689 chunk is the last chunk in the file, or the file is a socket or
1692 chunk is the last chunk in the file, or the file is a socket or
1690 some other type of file that sometimes reads less data than is
1693 some other type of file that sometimes reads less data than is
1691 requested."""
1694 requested."""
1692 assert size >= 0
1695 assert size >= 0
1693 assert limit is None or limit >= 0
1696 assert limit is None or limit >= 0
1694 while True:
1697 while True:
1695 if limit is None: nbytes = size
1698 if limit is None: nbytes = size
1696 else: nbytes = min(limit, size)
1699 else: nbytes = min(limit, size)
1697 s = nbytes and f.read(nbytes)
1700 s = nbytes and f.read(nbytes)
1698 if not s: break
1701 if not s: break
1699 if limit: limit -= len(s)
1702 if limit: limit -= len(s)
1700 yield s
1703 yield s
1701
1704
1702 def makedate():
1705 def makedate():
1703 lt = time.localtime()
1706 lt = time.localtime()
1704 if lt[8] == 1 and time.daylight:
1707 if lt[8] == 1 and time.daylight:
1705 tz = time.altzone
1708 tz = time.altzone
1706 else:
1709 else:
1707 tz = time.timezone
1710 tz = time.timezone
1708 return time.mktime(lt), tz
1711 return time.mktime(lt), tz
1709
1712
1710 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1713 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1711 """represent a (unixtime, offset) tuple as a localized time.
1714 """represent a (unixtime, offset) tuple as a localized time.
1712 unixtime is seconds since the epoch, and offset is the time zone's
1715 unixtime is seconds since the epoch, and offset is the time zone's
1713 number of seconds away from UTC. if timezone is false, do not
1716 number of seconds away from UTC. if timezone is false, do not
1714 append time zone to string."""
1717 append time zone to string."""
1715 t, tz = date or makedate()
1718 t, tz = date or makedate()
1716 if "%1" in format or "%2" in format:
1719 if "%1" in format or "%2" in format:
1717 sign = (tz > 0) and "-" or "+"
1720 sign = (tz > 0) and "-" or "+"
1718 minutes = abs(tz) / 60
1721 minutes = abs(tz) / 60
1719 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1722 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1720 format = format.replace("%2", "%02d" % (minutes % 60))
1723 format = format.replace("%2", "%02d" % (minutes % 60))
1721 s = time.strftime(format, time.gmtime(float(t) - tz))
1724 s = time.strftime(format, time.gmtime(float(t) - tz))
1722 return s
1725 return s
1723
1726
1724 def shortdate(date=None):
1727 def shortdate(date=None):
1725 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1728 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1726 return datestr(date, format='%Y-%m-%d')
1729 return datestr(date, format='%Y-%m-%d')
1727
1730
1728 def strdate(string, format, defaults=[]):
1731 def strdate(string, format, defaults=[]):
1729 """parse a localized time string and return a (unixtime, offset) tuple.
1732 """parse a localized time string and return a (unixtime, offset) tuple.
1730 if the string cannot be parsed, ValueError is raised."""
1733 if the string cannot be parsed, ValueError is raised."""
1731 def timezone(string):
1734 def timezone(string):
1732 tz = string.split()[-1]
1735 tz = string.split()[-1]
1733 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1736 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1734 sign = (tz[0] == "+") and 1 or -1
1737 sign = (tz[0] == "+") and 1 or -1
1735 hours = int(tz[1:3])
1738 hours = int(tz[1:3])
1736 minutes = int(tz[3:5])
1739 minutes = int(tz[3:5])
1737 return -sign * (hours * 60 + minutes) * 60
1740 return -sign * (hours * 60 + minutes) * 60
1738 if tz == "GMT" or tz == "UTC":
1741 if tz == "GMT" or tz == "UTC":
1739 return 0
1742 return 0
1740 return None
1743 return None
1741
1744
1742 # NOTE: unixtime = localunixtime + offset
1745 # NOTE: unixtime = localunixtime + offset
1743 offset, date = timezone(string), string
1746 offset, date = timezone(string), string
1744 if offset != None:
1747 if offset != None:
1745 date = " ".join(string.split()[:-1])
1748 date = " ".join(string.split()[:-1])
1746
1749
1747 # add missing elements from defaults
1750 # add missing elements from defaults
1748 for part in defaults:
1751 for part in defaults:
1749 found = [True for p in part if ("%"+p) in format]
1752 found = [True for p in part if ("%"+p) in format]
1750 if not found:
1753 if not found:
1751 date += "@" + defaults[part]
1754 date += "@" + defaults[part]
1752 format += "@%" + part[0]
1755 format += "@%" + part[0]
1753
1756
1754 timetuple = time.strptime(date, format)
1757 timetuple = time.strptime(date, format)
1755 localunixtime = int(calendar.timegm(timetuple))
1758 localunixtime = int(calendar.timegm(timetuple))
1756 if offset is None:
1759 if offset is None:
1757 # local timezone
1760 # local timezone
1758 unixtime = int(time.mktime(timetuple))
1761 unixtime = int(time.mktime(timetuple))
1759 offset = unixtime - localunixtime
1762 offset = unixtime - localunixtime
1760 else:
1763 else:
1761 unixtime = localunixtime + offset
1764 unixtime = localunixtime + offset
1762 return unixtime, offset
1765 return unixtime, offset
1763
1766
1764 def parsedate(date, formats=None, defaults=None):
1767 def parsedate(date, formats=None, defaults=None):
1765 """parse a localized date/time string and return a (unixtime, offset) tuple.
1768 """parse a localized date/time string and return a (unixtime, offset) tuple.
1766
1769
1767 The date may be a "unixtime offset" string or in one of the specified
1770 The date may be a "unixtime offset" string or in one of the specified
1768 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1771 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1769 """
1772 """
1770 if not date:
1773 if not date:
1771 return 0, 0
1774 return 0, 0
1772 if isinstance(date, tuple) and len(date) == 2:
1775 if isinstance(date, tuple) and len(date) == 2:
1773 return date
1776 return date
1774 if not formats:
1777 if not formats:
1775 formats = defaultdateformats
1778 formats = defaultdateformats
1776 date = date.strip()
1779 date = date.strip()
1777 try:
1780 try:
1778 when, offset = map(int, date.split(' '))
1781 when, offset = map(int, date.split(' '))
1779 except ValueError:
1782 except ValueError:
1780 # fill out defaults
1783 # fill out defaults
1781 if not defaults:
1784 if not defaults:
1782 defaults = {}
1785 defaults = {}
1783 now = makedate()
1786 now = makedate()
1784 for part in "d mb yY HI M S".split():
1787 for part in "d mb yY HI M S".split():
1785 if part not in defaults:
1788 if part not in defaults:
1786 if part[0] in "HMS":
1789 if part[0] in "HMS":
1787 defaults[part] = "00"
1790 defaults[part] = "00"
1788 else:
1791 else:
1789 defaults[part] = datestr(now, "%" + part[0])
1792 defaults[part] = datestr(now, "%" + part[0])
1790
1793
1791 for format in formats:
1794 for format in formats:
1792 try:
1795 try:
1793 when, offset = strdate(date, format, defaults)
1796 when, offset = strdate(date, format, defaults)
1794 except (ValueError, OverflowError):
1797 except (ValueError, OverflowError):
1795 pass
1798 pass
1796 else:
1799 else:
1797 break
1800 break
1798 else:
1801 else:
1799 raise Abort(_('invalid date: %r ') % date)
1802 raise Abort(_('invalid date: %r ') % date)
1800 # validate explicit (probably user-specified) date and
1803 # validate explicit (probably user-specified) date and
1801 # time zone offset. values must fit in signed 32 bits for
1804 # time zone offset. values must fit in signed 32 bits for
1802 # current 32-bit linux runtimes. timezones go from UTC-12
1805 # current 32-bit linux runtimes. timezones go from UTC-12
1803 # to UTC+14
1806 # to UTC+14
1804 if abs(when) > 0x7fffffff:
1807 if abs(when) > 0x7fffffff:
1805 raise Abort(_('date exceeds 32 bits: %d') % when)
1808 raise Abort(_('date exceeds 32 bits: %d') % when)
1806 if offset < -50400 or offset > 43200:
1809 if offset < -50400 or offset > 43200:
1807 raise Abort(_('impossible time zone offset: %d') % offset)
1810 raise Abort(_('impossible time zone offset: %d') % offset)
1808 return when, offset
1811 return when, offset
1809
1812
1810 def matchdate(date):
1813 def matchdate(date):
1811 """Return a function that matches a given date match specifier
1814 """Return a function that matches a given date match specifier
1812
1815
1813 Formats include:
1816 Formats include:
1814
1817
1815 '{date}' match a given date to the accuracy provided
1818 '{date}' match a given date to the accuracy provided
1816
1819
1817 '<{date}' on or before a given date
1820 '<{date}' on or before a given date
1818
1821
1819 '>{date}' on or after a given date
1822 '>{date}' on or after a given date
1820
1823
1821 """
1824 """
1822
1825
1823 def lower(date):
1826 def lower(date):
1824 d = dict(mb="1", d="1")
1827 d = dict(mb="1", d="1")
1825 return parsedate(date, extendeddateformats, d)[0]
1828 return parsedate(date, extendeddateformats, d)[0]
1826
1829
1827 def upper(date):
1830 def upper(date):
1828 d = dict(mb="12", HI="23", M="59", S="59")
1831 d = dict(mb="12", HI="23", M="59", S="59")
1829 for days in "31 30 29".split():
1832 for days in "31 30 29".split():
1830 try:
1833 try:
1831 d["d"] = days
1834 d["d"] = days
1832 return parsedate(date, extendeddateformats, d)[0]
1835 return parsedate(date, extendeddateformats, d)[0]
1833 except:
1836 except:
1834 pass
1837 pass
1835 d["d"] = "28"
1838 d["d"] = "28"
1836 return parsedate(date, extendeddateformats, d)[0]
1839 return parsedate(date, extendeddateformats, d)[0]
1837
1840
1838 if date[0] == "<":
1841 if date[0] == "<":
1839 when = upper(date[1:])
1842 when = upper(date[1:])
1840 return lambda x: x <= when
1843 return lambda x: x <= when
1841 elif date[0] == ">":
1844 elif date[0] == ">":
1842 when = lower(date[1:])
1845 when = lower(date[1:])
1843 return lambda x: x >= when
1846 return lambda x: x >= when
1844 elif date[0] == "-":
1847 elif date[0] == "-":
1845 try:
1848 try:
1846 days = int(date[1:])
1849 days = int(date[1:])
1847 except ValueError:
1850 except ValueError:
1848 raise Abort(_("invalid day spec: %s") % date[1:])
1851 raise Abort(_("invalid day spec: %s") % date[1:])
1849 when = makedate()[0] - days * 3600 * 24
1852 when = makedate()[0] - days * 3600 * 24
1850 return lambda x: x >= when
1853 return lambda x: x >= when
1851 elif " to " in date:
1854 elif " to " in date:
1852 a, b = date.split(" to ")
1855 a, b = date.split(" to ")
1853 start, stop = lower(a), upper(b)
1856 start, stop = lower(a), upper(b)
1854 return lambda x: x >= start and x <= stop
1857 return lambda x: x >= start and x <= stop
1855 else:
1858 else:
1856 start, stop = lower(date), upper(date)
1859 start, stop = lower(date), upper(date)
1857 return lambda x: x >= start and x <= stop
1860 return lambda x: x >= start and x <= stop
1858
1861
1859 def shortuser(user):
1862 def shortuser(user):
1860 """Return a short representation of a user name or email address."""
1863 """Return a short representation of a user name or email address."""
1861 f = user.find('@')
1864 f = user.find('@')
1862 if f >= 0:
1865 if f >= 0:
1863 user = user[:f]
1866 user = user[:f]
1864 f = user.find('<')
1867 f = user.find('<')
1865 if f >= 0:
1868 if f >= 0:
1866 user = user[f+1:]
1869 user = user[f+1:]
1867 f = user.find(' ')
1870 f = user.find(' ')
1868 if f >= 0:
1871 if f >= 0:
1869 user = user[:f]
1872 user = user[:f]
1870 f = user.find('.')
1873 f = user.find('.')
1871 if f >= 0:
1874 if f >= 0:
1872 user = user[:f]
1875 user = user[:f]
1873 return user
1876 return user
1874
1877
1875 def email(author):
1878 def email(author):
1876 '''get email of author.'''
1879 '''get email of author.'''
1877 r = author.find('>')
1880 r = author.find('>')
1878 if r == -1: r = None
1881 if r == -1: r = None
1879 return author[author.find('<')+1:r]
1882 return author[author.find('<')+1:r]
1880
1883
1881 def ellipsis(text, maxlength=400):
1884 def ellipsis(text, maxlength=400):
1882 """Trim string to at most maxlength (default: 400) characters."""
1885 """Trim string to at most maxlength (default: 400) characters."""
1883 if len(text) <= maxlength:
1886 if len(text) <= maxlength:
1884 return text
1887 return text
1885 else:
1888 else:
1886 return "%s..." % (text[:maxlength-3])
1889 return "%s..." % (text[:maxlength-3])
1887
1890
1888 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1891 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1889 '''yield every hg repository under path, recursively.'''
1892 '''yield every hg repository under path, recursively.'''
1890 def errhandler(err):
1893 def errhandler(err):
1891 if err.filename == path:
1894 if err.filename == path:
1892 raise err
1895 raise err
1893 if followsym and hasattr(os.path, 'samestat'):
1896 if followsym and hasattr(os.path, 'samestat'):
1894 def _add_dir_if_not_there(dirlst, dirname):
1897 def _add_dir_if_not_there(dirlst, dirname):
1895 match = False
1898 match = False
1896 samestat = os.path.samestat
1899 samestat = os.path.samestat
1897 dirstat = os.stat(dirname)
1900 dirstat = os.stat(dirname)
1898 for lstdirstat in dirlst:
1901 for lstdirstat in dirlst:
1899 if samestat(dirstat, lstdirstat):
1902 if samestat(dirstat, lstdirstat):
1900 match = True
1903 match = True
1901 break
1904 break
1902 if not match:
1905 if not match:
1903 dirlst.append(dirstat)
1906 dirlst.append(dirstat)
1904 return not match
1907 return not match
1905 else:
1908 else:
1906 followsym = False
1909 followsym = False
1907
1910
1908 if (seen_dirs is None) and followsym:
1911 if (seen_dirs is None) and followsym:
1909 seen_dirs = []
1912 seen_dirs = []
1910 _add_dir_if_not_there(seen_dirs, path)
1913 _add_dir_if_not_there(seen_dirs, path)
1911 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1914 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1912 if '.hg' in dirs:
1915 if '.hg' in dirs:
1913 yield root # found a repository
1916 yield root # found a repository
1914 qroot = os.path.join(root, '.hg', 'patches')
1917 qroot = os.path.join(root, '.hg', 'patches')
1915 if os.path.isdir(os.path.join(qroot, '.hg')):
1918 if os.path.isdir(os.path.join(qroot, '.hg')):
1916 yield qroot # we have a patch queue repo here
1919 yield qroot # we have a patch queue repo here
1917 if recurse:
1920 if recurse:
1918 # avoid recursing inside the .hg directory
1921 # avoid recursing inside the .hg directory
1919 dirs.remove('.hg')
1922 dirs.remove('.hg')
1920 else:
1923 else:
1921 dirs[:] = [] # don't descend further
1924 dirs[:] = [] # don't descend further
1922 elif followsym:
1925 elif followsym:
1923 newdirs = []
1926 newdirs = []
1924 for d in dirs:
1927 for d in dirs:
1925 fname = os.path.join(root, d)
1928 fname = os.path.join(root, d)
1926 if _add_dir_if_not_there(seen_dirs, fname):
1929 if _add_dir_if_not_there(seen_dirs, fname):
1927 if os.path.islink(fname):
1930 if os.path.islink(fname):
1928 for hgname in walkrepos(fname, True, seen_dirs):
1931 for hgname in walkrepos(fname, True, seen_dirs):
1929 yield hgname
1932 yield hgname
1930 else:
1933 else:
1931 newdirs.append(d)
1934 newdirs.append(d)
1932 dirs[:] = newdirs
1935 dirs[:] = newdirs
1933
1936
1934 _rcpath = None
1937 _rcpath = None
1935
1938
1936 def os_rcpath():
1939 def os_rcpath():
1937 '''return default os-specific hgrc search path'''
1940 '''return default os-specific hgrc search path'''
1938 path = system_rcpath()
1941 path = system_rcpath()
1939 path.extend(user_rcpath())
1942 path.extend(user_rcpath())
1940 path = [os.path.normpath(f) for f in path]
1943 path = [os.path.normpath(f) for f in path]
1941 return path
1944 return path
1942
1945
1943 def rcpath():
1946 def rcpath():
1944 '''return hgrc search path. if env var HGRCPATH is set, use it.
1947 '''return hgrc search path. if env var HGRCPATH is set, use it.
1945 for each item in path, if directory, use files ending in .rc,
1948 for each item in path, if directory, use files ending in .rc,
1946 else use item.
1949 else use item.
1947 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1950 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1948 if no HGRCPATH, use default os-specific path.'''
1951 if no HGRCPATH, use default os-specific path.'''
1949 global _rcpath
1952 global _rcpath
1950 if _rcpath is None:
1953 if _rcpath is None:
1951 if 'HGRCPATH' in os.environ:
1954 if 'HGRCPATH' in os.environ:
1952 _rcpath = []
1955 _rcpath = []
1953 for p in os.environ['HGRCPATH'].split(os.pathsep):
1956 for p in os.environ['HGRCPATH'].split(os.pathsep):
1954 if not p: continue
1957 if not p: continue
1955 if os.path.isdir(p):
1958 if os.path.isdir(p):
1956 for f, kind in osutil.listdir(p):
1959 for f, kind in osutil.listdir(p):
1957 if f.endswith('.rc'):
1960 if f.endswith('.rc'):
1958 _rcpath.append(os.path.join(p, f))
1961 _rcpath.append(os.path.join(p, f))
1959 else:
1962 else:
1960 _rcpath.append(p)
1963 _rcpath.append(p)
1961 else:
1964 else:
1962 _rcpath = os_rcpath()
1965 _rcpath = os_rcpath()
1963 return _rcpath
1966 return _rcpath
1964
1967
1965 def bytecount(nbytes):
1968 def bytecount(nbytes):
1966 '''return byte count formatted as readable string, with units'''
1969 '''return byte count formatted as readable string, with units'''
1967
1970
1968 units = (
1971 units = (
1969 (100, 1<<30, _('%.0f GB')),
1972 (100, 1<<30, _('%.0f GB')),
1970 (10, 1<<30, _('%.1f GB')),
1973 (10, 1<<30, _('%.1f GB')),
1971 (1, 1<<30, _('%.2f GB')),
1974 (1, 1<<30, _('%.2f GB')),
1972 (100, 1<<20, _('%.0f MB')),
1975 (100, 1<<20, _('%.0f MB')),
1973 (10, 1<<20, _('%.1f MB')),
1976 (10, 1<<20, _('%.1f MB')),
1974 (1, 1<<20, _('%.2f MB')),
1977 (1, 1<<20, _('%.2f MB')),
1975 (100, 1<<10, _('%.0f KB')),
1978 (100, 1<<10, _('%.0f KB')),
1976 (10, 1<<10, _('%.1f KB')),
1979 (10, 1<<10, _('%.1f KB')),
1977 (1, 1<<10, _('%.2f KB')),
1980 (1, 1<<10, _('%.2f KB')),
1978 (1, 1, _('%.0f bytes')),
1981 (1, 1, _('%.0f bytes')),
1979 )
1982 )
1980
1983
1981 for multiplier, divisor, format in units:
1984 for multiplier, divisor, format in units:
1982 if nbytes >= divisor * multiplier:
1985 if nbytes >= divisor * multiplier:
1983 return format % (nbytes / float(divisor))
1986 return format % (nbytes / float(divisor))
1984 return units[-1][2] % nbytes
1987 return units[-1][2] % nbytes
1985
1988
1986 def drop_scheme(scheme, path):
1989 def drop_scheme(scheme, path):
1987 sc = scheme + ':'
1990 sc = scheme + ':'
1988 if path.startswith(sc):
1991 if path.startswith(sc):
1989 path = path[len(sc):]
1992 path = path[len(sc):]
1990 if path.startswith('//'):
1993 if path.startswith('//'):
1991 path = path[2:]
1994 path = path[2:]
1992 return path
1995 return path
1993
1996
1994 def uirepr(s):
1997 def uirepr(s):
1995 # Avoid double backslash in Windows path repr()
1998 # Avoid double backslash in Windows path repr()
1996 return repr(s).replace('\\\\', '\\')
1999 return repr(s).replace('\\\\', '\\')
1997
2000
1998 def termwidth():
2001 def termwidth():
1999 if 'COLUMNS' in os.environ:
2002 if 'COLUMNS' in os.environ:
2000 try:
2003 try:
2001 return int(os.environ['COLUMNS'])
2004 return int(os.environ['COLUMNS'])
2002 except ValueError:
2005 except ValueError:
2003 pass
2006 pass
2004 try:
2007 try:
2005 import termios, array, fcntl
2008 import termios, array, fcntl
2006 for dev in (sys.stdout, sys.stdin):
2009 for dev in (sys.stdout, sys.stdin):
2007 try:
2010 try:
2008 fd = dev.fileno()
2011 fd = dev.fileno()
2009 if not os.isatty(fd):
2012 if not os.isatty(fd):
2010 continue
2013 continue
2011 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
2014 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
2012 return array.array('h', arri)[1]
2015 return array.array('h', arri)[1]
2013 except ValueError:
2016 except ValueError:
2014 pass
2017 pass
2015 except ImportError:
2018 except ImportError:
2016 pass
2019 pass
2017 return 80
2020 return 80
@@ -1,400 +1,399 b''
1 # revision 0
1 # revision 0
2 adding f
2 adding f
3 # revision 1
3 # revision 1
4 # revision 2
4 # revision 2
5 created new head
5 created new head
6 # revision 3 - simple to merge
6 # revision 3 - simple to merge
7 created new head
7 created new head
8
8
9
9
10 Tool selection
10 Tool selection
11
11
12 # default is internal merge:
12 # default is internal merge:
13 [merge-tools]
13 [merge-tools]
14 # hg update -C 1
14 # hg update -C 1
15 # hg merge -r 2
15 # hg merge -r 2
16 merging f
16 merging f
17 warning: conflicts during merge.
17 warning: conflicts during merge.
18 merging f failed!
18 merging f failed!
19 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
19 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
20 use 'hg resolve' to retry unresolved file merges
20 use 'hg resolve' to retry unresolved file merges
21 # cat f
21 # cat f
22 <<<<<<< local
22 <<<<<<< local
23 revision 1
23 revision 1
24 =======
24 =======
25 revision 2
25 revision 2
26 >>>>>>> other
26 >>>>>>> other
27 space
27 space
28 # hg stat
28 # hg stat
29 M f
29 M f
30 ? f.orig
30 ? f.orig
31
31
32 # simplest hgrc using false for merge:
32 # simplest hgrc using false for merge:
33 [merge-tools]
33 [merge-tools]
34 false.whatever=
34 false.whatever=
35 # hg update -C 1
35 # hg update -C 1
36 # hg merge -r 2
36 # hg merge -r 2
37 merging f
37 merging f
38 merging f failed!
38 merging f failed!
39 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
39 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
40 use 'hg resolve' to retry unresolved file merges
40 use 'hg resolve' to retry unresolved file merges
41 # cat f
41 # cat f
42 revision 1
42 revision 1
43 space
43 space
44 # hg stat
44 # hg stat
45 M f
45 M f
46 ? f.orig
46 ? f.orig
47
47
48 # true with higher .priority gets precedence:
48 # true with higher .priority gets precedence:
49 [merge-tools]
49 [merge-tools]
50 false.whatever=
50 false.whatever=
51 true.priority=1
51 true.priority=1
52 # hg update -C 1
52 # hg update -C 1
53 # hg merge -r 2
53 # hg merge -r 2
54 merging f
54 merging f
55 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
55 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
56 (branch merge, don't forget to commit)
56 (branch merge, don't forget to commit)
57 # cat f
57 # cat f
58 revision 1
58 revision 1
59 space
59 space
60 # hg stat
60 # hg stat
61 M f
61 M f
62
62
63 # unless lowered on command line:
63 # unless lowered on command line:
64 [merge-tools]
64 [merge-tools]
65 false.whatever=
65 false.whatever=
66 true.priority=1
66 true.priority=1
67 # hg update -C 1
67 # hg update -C 1
68 # hg merge -r 2 --config merge-tools.true.priority=-7
68 # hg merge -r 2 --config merge-tools.true.priority=-7
69 merging f
69 merging f
70 merging f failed!
70 merging f failed!
71 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
71 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
72 use 'hg resolve' to retry unresolved file merges
72 use 'hg resolve' to retry unresolved file merges
73 # cat f
73 # cat f
74 revision 1
74 revision 1
75 space
75 space
76 # hg stat
76 # hg stat
77 M f
77 M f
78 ? f.orig
78 ? f.orig
79
79
80 # or false set higher on command line:
80 # or false set higher on command line:
81 [merge-tools]
81 [merge-tools]
82 false.whatever=
82 false.whatever=
83 true.priority=1
83 true.priority=1
84 # hg update -C 1
84 # hg update -C 1
85 # hg merge -r 2 --config merge-tools.false.priority=117
85 # hg merge -r 2 --config merge-tools.false.priority=117
86 merging f
86 merging f
87 merging f failed!
87 merging f failed!
88 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
88 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
89 use 'hg resolve' to retry unresolved file merges
89 use 'hg resolve' to retry unresolved file merges
90 # cat f
90 # cat f
91 revision 1
91 revision 1
92 space
92 space
93 # hg stat
93 # hg stat
94 M f
94 M f
95 ? f.orig
95 ? f.orig
96
96
97 # or true.executable not found in PATH:
97 # or true.executable not found in PATH:
98 [merge-tools]
98 [merge-tools]
99 false.whatever=
99 false.whatever=
100 true.priority=1
100 true.priority=1
101 # hg update -C 1
101 # hg update -C 1
102 # hg merge -r 2 --config merge-tools.true.executable=nonexistingmergetool
102 # hg merge -r 2 --config merge-tools.true.executable=nonexistingmergetool
103 merging f
103 merging f
104 merging f failed!
104 merging f failed!
105 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
105 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
106 use 'hg resolve' to retry unresolved file merges
106 use 'hg resolve' to retry unresolved file merges
107 # cat f
107 # cat f
108 revision 1
108 revision 1
109 space
109 space
110 # hg stat
110 # hg stat
111 M f
111 M f
112 ? f.orig
112 ? f.orig
113
113
114 # or true.executable with bogus path:
114 # or true.executable with bogus path:
115 [merge-tools]
115 [merge-tools]
116 false.whatever=
116 false.whatever=
117 true.priority=1
117 true.priority=1
118 # hg update -C 1
118 # hg update -C 1
119 # hg merge -r 2 --config merge-tools.true.executable=/bin/nonexistingmergetool
119 # hg merge -r 2 --config merge-tools.true.executable=/bin/nonexistingmergetool
120 sh: /bin/nonexistingmergetool: No such file or directory
121 merging f
120 merging f
122 merging f failed!
121 merging f failed!
123 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
122 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
124 use 'hg resolve' to retry unresolved file merges
123 use 'hg resolve' to retry unresolved file merges
125 # cat f
124 # cat f
126 revision 1
125 revision 1
127 space
126 space
128 # hg stat
127 # hg stat
129 M f
128 M f
130 ? f.orig
129 ? f.orig
131
130
132 # but true.executable set to cat found in PATH works:
131 # but true.executable set to cat found in PATH works:
133 [merge-tools]
132 [merge-tools]
134 false.whatever=
133 false.whatever=
135 true.priority=1
134 true.priority=1
136 true.executable=cat
135 true.executable=cat
137 # hg update -C 1
136 # hg update -C 1
138 # hg merge -r 2
137 # hg merge -r 2
139 revision 1
138 revision 1
140 space
139 space
141 revision 0
140 revision 0
142 space
141 space
143 revision 2
142 revision 2
144 space
143 space
145 merging f
144 merging f
146 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
145 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
147 (branch merge, don't forget to commit)
146 (branch merge, don't forget to commit)
148 # cat f
147 # cat f
149 revision 1
148 revision 1
150 space
149 space
151 # hg stat
150 # hg stat
152 M f
151 M f
153
152
154 # and true.executable set to cat with path works:
153 # and true.executable set to cat with path works:
155 [merge-tools]
154 [merge-tools]
156 false.whatever=
155 false.whatever=
157 true.priority=1
156 true.priority=1
158 true.executable=cat
157 true.executable=cat
159 # hg update -C 1
158 # hg update -C 1
160 # hg merge -r 2 --config merge-tools.true.executable=/bin/cat
159 # hg merge -r 2 --config merge-tools.true.executable=/bin/cat
161 revision 1
160 revision 1
162 space
161 space
163 revision 0
162 revision 0
164 space
163 space
165 revision 2
164 revision 2
166 space
165 space
167 merging f
166 merging f
168 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
167 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
169 (branch merge, don't forget to commit)
168 (branch merge, don't forget to commit)
170 # cat f
169 # cat f
171 revision 1
170 revision 1
172 space
171 space
173 # hg stat
172 # hg stat
174 M f
173 M f
175
174
176
175
177 Tool selection and merge-patterns
176 Tool selection and merge-patterns
178
177
179 # merge-patterns specifies new tool tac:
178 # merge-patterns specifies new tool tac:
180 [merge-tools]
179 [merge-tools]
181 false.whatever=
180 false.whatever=
182 true.priority=1
181 true.priority=1
183 true.executable=cat
182 true.executable=cat
184 # hg update -C 1
183 # hg update -C 1
185 # hg merge -r 2 --config merge-patterns.f=tac
184 # hg merge -r 2 --config merge-patterns.f=tac
186 space
185 space
187 revision 1
186 revision 1
188 space
187 space
189 revision 0
188 revision 0
190 space
189 space
191 revision 2
190 revision 2
192 merging f
191 merging f
193 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
192 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
194 (branch merge, don't forget to commit)
193 (branch merge, don't forget to commit)
195 # cat f
194 # cat f
196 revision 1
195 revision 1
197 space
196 space
198 # hg stat
197 # hg stat
199 M f
198 M f
200
199
201 # merge-patterns specifies executable not found in PATH and gets warning:
200 # merge-patterns specifies executable not found in PATH and gets warning:
202 [merge-tools]
201 [merge-tools]
203 false.whatever=
202 false.whatever=
204 true.priority=1
203 true.priority=1
205 true.executable=cat
204 true.executable=cat
206 # hg update -C 1
205 # hg update -C 1
207 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool
206 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool
208 couldn't find merge tool true specified for f
207 couldn't find merge tool true specified for f
209 merging f
208 merging f
210 merging f failed!
209 merging f failed!
211 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
210 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
212 use 'hg resolve' to retry unresolved file merges
211 use 'hg resolve' to retry unresolved file merges
213 # cat f
212 # cat f
214 revision 1
213 revision 1
215 space
214 space
216 # hg stat
215 # hg stat
217 M f
216 M f
218 ? f.orig
217 ? f.orig
219
218
220 # merge-patterns specifies executable with bogus path and gets warning:
219 # merge-patterns specifies executable with bogus path and gets warning:
221 [merge-tools]
220 [merge-tools]
222 false.whatever=
221 false.whatever=
223 true.priority=1
222 true.priority=1
224 true.executable=cat
223 true.executable=cat
225 # hg update -C 1
224 # hg update -C 1
226 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/bin/nonexistingmergetool
225 # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/bin/nonexistingmergetool
227 sh: /bin/nonexistingmergetool: No such file or directory
226 couldn't find merge tool true specified for f
228 merging f
227 merging f
229 merging f failed!
228 merging f failed!
230 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
229 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
231 use 'hg resolve' to retry unresolved file merges
230 use 'hg resolve' to retry unresolved file merges
232 # cat f
231 # cat f
233 revision 1
232 revision 1
234 space
233 space
235 # hg stat
234 # hg stat
236 M f
235 M f
237 ? f.orig
236 ? f.orig
238
237
239
238
240 Premerge
239 Premerge
241
240
242 # Default is silent simplemerge:
241 # Default is silent simplemerge:
243 [merge-tools]
242 [merge-tools]
244 false.whatever=
243 false.whatever=
245 true.priority=1
244 true.priority=1
246 true.executable=cat
245 true.executable=cat
247 # hg update -C 1
246 # hg update -C 1
248 # hg merge -r 3
247 # hg merge -r 3
249 merging f
248 merging f
250 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
249 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
251 (branch merge, don't forget to commit)
250 (branch merge, don't forget to commit)
252 # cat f
251 # cat f
253 revision 1
252 revision 1
254 space
253 space
255 revision 3
254 revision 3
256 # hg stat
255 # hg stat
257 M f
256 M f
258
257
259 # .premerge=True is same:
258 # .premerge=True is same:
260 [merge-tools]
259 [merge-tools]
261 false.whatever=
260 false.whatever=
262 true.priority=1
261 true.priority=1
263 true.executable=cat
262 true.executable=cat
264 # hg update -C 1
263 # hg update -C 1
265 # hg merge -r 3 --config merge-tools.true.premerge=True
264 # hg merge -r 3 --config merge-tools.true.premerge=True
266 merging f
265 merging f
267 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
266 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
268 (branch merge, don't forget to commit)
267 (branch merge, don't forget to commit)
269 # cat f
268 # cat f
270 revision 1
269 revision 1
271 space
270 space
272 revision 3
271 revision 3
273 # hg stat
272 # hg stat
274 M f
273 M f
275
274
276 # .premerge=False executes merge-tool:
275 # .premerge=False executes merge-tool:
277 [merge-tools]
276 [merge-tools]
278 false.whatever=
277 false.whatever=
279 true.priority=1
278 true.priority=1
280 true.executable=cat
279 true.executable=cat
281 # hg update -C 1
280 # hg update -C 1
282 # hg merge -r 3 --config merge-tools.true.premerge=False
281 # hg merge -r 3 --config merge-tools.true.premerge=False
283 revision 1
282 revision 1
284 space
283 space
285 revision 0
284 revision 0
286 space
285 space
287 revision 0
286 revision 0
288 space
287 space
289 revision 3
288 revision 3
290 merging f
289 merging f
291 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
290 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
292 (branch merge, don't forget to commit)
291 (branch merge, don't forget to commit)
293 # cat f
292 # cat f
294 revision 1
293 revision 1
295 space
294 space
296 # hg stat
295 # hg stat
297 M f
296 M f
298
297
299
298
300 Tool execution
299 Tool execution
301
300
302 # set tools.args explicit to include $base $local $other $output:
301 # set tools.args explicit to include $base $local $other $output:
303 [merge-tools]
302 [merge-tools]
304 false.whatever=
303 false.whatever=
305 true.priority=1
304 true.priority=1
306 true.executable=cat
305 true.executable=cat
307 # hg update -C 1
306 # hg update -C 1
308 ==> ... <==
307 ==> ... <==
309 revision 0
308 revision 0
310 space
309 space
311
310
312 ==> ... <==
311 ==> ... <==
313 revision 1
312 revision 1
314 space
313 space
315
314
316 ==> ... <==
315 ==> ... <==
317 revision 2
316 revision 2
318 space
317 space
319
318
320 ==> ... <==
319 ==> ... <==
321 revision 1
320 revision 1
322 space
321 space
323 merging f
322 merging f
324 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
323 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
325 (branch merge, don't forget to commit)
324 (branch merge, don't forget to commit)
326 # cat f
325 # cat f
327 revision 1
326 revision 1
328 space
327 space
329 # hg stat
328 # hg stat
330 M f
329 M f
331
330
332 # Merge with "echo mergeresult > $local":
331 # Merge with "echo mergeresult > $local":
333 [merge-tools]
332 [merge-tools]
334 false.whatever=
333 false.whatever=
335 true.priority=1
334 true.priority=1
336 true.executable=cat
335 true.executable=cat
337 # hg update -C 1
336 # hg update -C 1
338 merging f
337 merging f
339 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
338 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
340 (branch merge, don't forget to commit)
339 (branch merge, don't forget to commit)
341 # cat f
340 # cat f
342 mergeresult
341 mergeresult
343 # hg stat
342 # hg stat
344 M f
343 M f
345
344
346 # - and $local is the file f:
345 # - and $local is the file f:
347 [merge-tools]
346 [merge-tools]
348 false.whatever=
347 false.whatever=
349 true.priority=1
348 true.priority=1
350 true.executable=cat
349 true.executable=cat
351 # hg update -C 1
350 # hg update -C 1
352 merging f
351 merging f
353 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
352 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
354 (branch merge, don't forget to commit)
353 (branch merge, don't forget to commit)
355 # cat f
354 # cat f
356 mergeresult
355 mergeresult
357 # hg stat
356 # hg stat
358 M f
357 M f
359
358
360 # Merge with "echo mergeresult > $output" - the variable is a bit magic:
359 # Merge with "echo mergeresult > $output" - the variable is a bit magic:
361 [merge-tools]
360 [merge-tools]
362 false.whatever=
361 false.whatever=
363 true.priority=1
362 true.priority=1
364 true.executable=cat
363 true.executable=cat
365 # hg update -C 1
364 # hg update -C 1
366 merging f
365 merging f
367 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
366 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
368 (branch merge, don't forget to commit)
367 (branch merge, don't forget to commit)
369 # cat f
368 # cat f
370 mergeresult
369 mergeresult
371 # hg stat
370 # hg stat
372 M f
371 M f
373
372
374
373
375 Merge post-processing
374 Merge post-processing
376
375
377 # cat is a bad merge-tool and doesn't change:
376 # cat is a bad merge-tool and doesn't change:
378 [merge-tools]
377 [merge-tools]
379 false.whatever=
378 false.whatever=
380 true.priority=1
379 true.priority=1
381 true.executable=cat
380 true.executable=cat
382 # hg update -C 1
381 # hg update -C 1
383 # hg merge -r 2 --config merge-tools.true.checkchanged=1
382 # hg merge -r 2 --config merge-tools.true.checkchanged=1
384 revision 1
383 revision 1
385 space
384 space
386 revision 0
385 revision 0
387 space
386 space
388 revision 2
387 revision 2
389 space
388 space
390 merging f
389 merging f
391 merging f failed!
390 merging f failed!
392 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
391 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
393 use 'hg resolve' to retry unresolved file merges
392 use 'hg resolve' to retry unresolved file merges
394 # cat f
393 # cat f
395 revision 1
394 revision 1
396 space
395 space
397 # hg stat
396 # hg stat
398 M f
397 M f
399 ? f.orig
398 ? f.orig
400
399
General Comments 0
You need to be logged in to leave comments. Login now