##// END OF EJS Templates
Merge with main
Brendan Cully -
r9282:f9087eea merge default
parent child Browse files
Show More
@@ -0,0 +1,4 b''
1 #!/bin/sh
2
3 hg clone --quiet $TESTDIR/test-path-normalization.hg t
4 exec hg st -R t
1 NO CONTENT: new file 100644, binary diff hidden
@@ -1,333 +1,340 b''
1 1 # Mercurial extension to provide the 'hg bookmark' command
2 2 #
3 3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 '''track a line of development with movable markers
9 9
10 10 Bookmarks are local movable markers to changesets. Every bookmark
11 11 points to a changeset identified by its hash. If you commit a
12 12 changeset that is based on a changeset that has a bookmark on it, the
13 13 bookmark shifts to the new changeset.
14 14
15 15 It is possible to use bookmark names in every revision lookup (e.g. hg
16 16 merge, hg update).
17 17
18 18 By default, when several bookmarks point to the same changeset, they
19 19 will all move forward together. It is possible to obtain a more
20 20 git-like experience by adding the following configuration option to
21 21 your .hgrc::
22 22
23 23 [bookmarks]
24 24 track.current = True
25 25
26 26 This will cause Mercurial to track the bookmark that you are currently
27 27 using, and only update it. This is similar to git's approach to
28 28 branching.
29 29 '''
30 30
31 31 from mercurial.i18n import _
32 32 from mercurial.node import nullid, nullrev, hex, short
33 33 from mercurial import util, commands, localrepo, repair, extensions
34 34 import os
35 35
36 36 def parse(repo):
37 37 '''Parse .hg/bookmarks file and return a dictionary
38 38
39 39 Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
40 40 in the .hg/bookmarks file. They are read by the parse() method and
41 41 returned as a dictionary with name => hash values.
42 42
43 43 The parsed dictionary is cached until a write() operation is done.
44 44 '''
45 45 try:
46 46 if repo._bookmarks:
47 47 return repo._bookmarks
48 48 repo._bookmarks = {}
49 49 for line in repo.opener('bookmarks'):
50 50 sha, refspec = line.strip().split(' ', 1)
51 51 repo._bookmarks[refspec] = repo.lookup(sha)
52 52 except:
53 53 pass
54 54 return repo._bookmarks
55 55
56 56 def write(repo, refs):
57 57 '''Write bookmarks
58 58
59 59 Write the given bookmark => hash dictionary to the .hg/bookmarks file
60 60 in a format equal to those of localtags.
61 61
62 62 We also store a backup of the previous state in undo.bookmarks that
63 63 can be copied back on rollback.
64 64 '''
65 65 if os.path.exists(repo.join('bookmarks')):
66 66 util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks'))
67 67 if current(repo) not in refs:
68 68 setcurrent(repo, None)
69 69 wlock = repo.wlock()
70 70 try:
71 71 file = repo.opener('bookmarks', 'w', atomictemp=True)
72 72 for refspec, node in refs.iteritems():
73 73 file.write("%s %s\n" % (hex(node), refspec))
74 74 file.rename()
75 75 finally:
76 76 wlock.release()
77 77
78 78 def current(repo):
79 79 '''Get the current bookmark
80 80
81 81 If we use gittishsh branches we have a current bookmark that
82 82 we are on. This function returns the name of the bookmark. It
83 83 is stored in .hg/bookmarks.current
84 84 '''
85 85 if repo._bookmarkcurrent:
86 86 return repo._bookmarkcurrent
87 87 mark = None
88 88 if os.path.exists(repo.join('bookmarks.current')):
89 89 file = repo.opener('bookmarks.current')
90 90 # No readline() in posixfile_nt, reading everything is cheap
91 91 mark = (file.readlines() or [''])[0]
92 92 if mark == '':
93 93 mark = None
94 94 file.close()
95 95 repo._bookmarkcurrent = mark
96 96 return mark
97 97
98 98 def setcurrent(repo, mark):
99 99 '''Set the name of the bookmark that we are currently on
100 100
101 101 Set the name of the bookmark that we are on (hg update <bookmark>).
102 102 The name is recorded in .hg/bookmarks.current
103 103 '''
104 104 if current(repo) == mark:
105 105 return
106 106
107 107 refs = parse(repo)
108 108
109 109 # do not update if we do update to a rev equal to the current bookmark
110 110 if (mark and mark not in refs and
111 111 current(repo) and refs[current(repo)] == repo.changectx('.').node()):
112 112 return
113 113 if mark not in refs:
114 114 mark = ''
115 115 wlock = repo.wlock()
116 116 try:
117 117 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
118 118 file.write(mark)
119 119 file.rename()
120 120 finally:
121 121 wlock.release()
122 122 repo._bookmarkcurrent = mark
123 123
124 124 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
125 125 '''track a line of development with movable markers
126 126
127 127 Bookmarks are pointers to certain commits that move when
128 128 committing. Bookmarks are local. They can be renamed, copied and
129 129 deleted. It is possible to use bookmark names in 'hg merge' and
130 130 'hg update' to merge and update respectively to a given bookmark.
131 131
132 132 You can use 'hg bookmark NAME' to set a bookmark on the working
133 133 directory's parent revision with the given name. If you specify
134 134 a revision using -r REV (where REV may be an existing bookmark),
135 135 the bookmark is assigned to that revision.
136 136 '''
137 137 hexfn = ui.debugflag and hex or short
138 138 marks = parse(repo)
139 139 cur = repo.changectx('.').node()
140 140
141 141 if rename:
142 142 if rename not in marks:
143 143 raise util.Abort(_("a bookmark of this name does not exist"))
144 144 if mark in marks and not force:
145 145 raise util.Abort(_("a bookmark of the same name already exists"))
146 146 if mark is None:
147 147 raise util.Abort(_("new bookmark name required"))
148 148 marks[mark] = marks[rename]
149 149 del marks[rename]
150 150 if current(repo) == rename:
151 151 setcurrent(repo, mark)
152 152 write(repo, marks)
153 153 return
154 154
155 155 if delete:
156 156 if mark is None:
157 157 raise util.Abort(_("bookmark name required"))
158 158 if mark not in marks:
159 159 raise util.Abort(_("a bookmark of this name does not exist"))
160 160 if mark == current(repo):
161 161 setcurrent(repo, None)
162 162 del marks[mark]
163 163 write(repo, marks)
164 164 return
165 165
166 166 if mark != None:
167 167 if "\n" in mark:
168 168 raise util.Abort(_("bookmark name cannot contain newlines"))
169 169 mark = mark.strip()
170 170 if mark in marks and not force:
171 171 raise util.Abort(_("a bookmark of the same name already exists"))
172 172 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
173 173 and not force):
174 174 raise util.Abort(
175 175 _("a bookmark cannot have the name of an existing branch"))
176 176 if rev:
177 177 marks[mark] = repo.lookup(rev)
178 178 else:
179 179 marks[mark] = repo.changectx('.').node()
180 180 setcurrent(repo, mark)
181 181 write(repo, marks)
182 182 return
183 183
184 184 if mark is None:
185 185 if rev:
186 186 raise util.Abort(_("bookmark name required"))
187 187 if len(marks) == 0:
188 188 ui.status("no bookmarks set\n")
189 189 else:
190 190 for bmark, n in marks.iteritems():
191 191 if ui.configbool('bookmarks', 'track.current'):
192 192 prefix = (bmark == current(repo) and n == cur) and '*' or ' '
193 193 else:
194 194 prefix = (n == cur) and '*' or ' '
195 195
196 196 ui.write(" %s %-25s %d:%s\n" % (
197 197 prefix, bmark, repo.changelog.rev(n), hexfn(n)))
198 198 return
199 199
200 200 def _revstostrip(changelog, node):
201 201 srev = changelog.rev(node)
202 202 tostrip = [srev]
203 203 saveheads = []
204 204 for r in xrange(srev, len(changelog)):
205 205 parents = changelog.parentrevs(r)
206 206 if parents[0] in tostrip or parents[1] in tostrip:
207 207 tostrip.append(r)
208 208 if parents[1] != nullrev:
209 209 for p in parents:
210 210 if p not in tostrip and p > srev:
211 211 saveheads.append(p)
212 212 return [r for r in tostrip if r not in saveheads]
213 213
214 214 def strip(oldstrip, ui, repo, node, backup="all"):
215 215 """Strip bookmarks if revisions are stripped using
216 216 the mercurial.strip method. This usually happens during
217 217 qpush and qpop"""
218 218 revisions = _revstostrip(repo.changelog, node)
219 219 marks = parse(repo)
220 220 update = []
221 221 for mark, n in marks.iteritems():
222 222 if repo.changelog.rev(n) in revisions:
223 223 update.append(mark)
224 224 oldstrip(ui, repo, node, backup)
225 225 if len(update) > 0:
226 226 for m in update:
227 227 marks[m] = repo.changectx('.').node()
228 228 write(repo, marks)
229 229
230 230 def reposetup(ui, repo):
231 231 if not isinstance(repo, localrepo.localrepository):
232 232 return
233 233
234 234 # init a bookmark cache as otherwise we would get a infinite reading
235 235 # in lookup()
236 236 repo._bookmarks = None
237 237 repo._bookmarkcurrent = None
238 238
239 239 class bookmark_repo(repo.__class__):
240 240 def rollback(self):
241 241 if os.path.exists(self.join('undo.bookmarks')):
242 242 util.rename(self.join('undo.bookmarks'), self.join('bookmarks'))
243 243 return super(bookmark_repo, self).rollback()
244 244
245 245 def lookup(self, key):
246 246 if self._bookmarks is None:
247 247 self._bookmarks = parse(self)
248 248 if key in self._bookmarks:
249 249 key = self._bookmarks[key]
250 250 return super(bookmark_repo, self).lookup(key)
251 251
252 def commit(self, *k, **kw):
252 def commitctx(self, ctx, error=False):
253 253 """Add a revision to the repository and
254 254 move the bookmark"""
255 255 wlock = self.wlock() # do both commit and bookmark with lock held
256 256 try:
257 node = super(bookmark_repo, self).commit(*k, **kw)
257 node = super(bookmark_repo, self).commitctx(ctx, error)
258 258 if node is None:
259 259 return None
260 260 parents = self.changelog.parents(node)
261 261 if parents[1] == nullid:
262 262 parents = (parents[0],)
263 263 marks = parse(self)
264 264 update = False
265 for mark, n in marks.items():
266 265 if ui.configbool('bookmarks', 'track.current'):
267 if mark == current(self) and n in parents:
266 mark = current(self)
267 if mark and marks[mark] in parents:
268 268 marks[mark] = node
269 269 update = True
270 270 else:
271 for mark, n in marks.items():
271 272 if n in parents:
272 273 marks[mark] = node
273 274 update = True
274 275 if update:
275 276 write(self, marks)
276 277 return node
277 278 finally:
278 279 wlock.release()
279 280
280 281 def addchangegroup(self, source, srctype, url, emptyok=False):
281 282 parents = self.dirstate.parents()
282 283
283 284 result = super(bookmark_repo, self).addchangegroup(
284 285 source, srctype, url, emptyok)
285 286 if result > 1:
286 287 # We have more heads than before
287 288 return result
288 289 node = self.changelog.tip()
289 290 marks = parse(self)
290 291 update = False
292 if ui.configbool('bookmarks', 'track.current'):
293 mark = current(self)
294 if mark and marks[mark] in parents:
295 marks[mark] = node
296 update = True
297 else:
291 298 for mark, n in marks.items():
292 299 if n in parents:
293 300 marks[mark] = node
294 301 update = True
295 302 if update:
296 303 write(self, marks)
297 304 return result
298 305
299 306 def _findtags(self):
300 307 """Merge bookmarks with normal tags"""
301 308 (tags, tagtypes) = super(bookmark_repo, self)._findtags()
302 309 tags.update(parse(self))
303 310 return (tags, tagtypes)
304 311
305 312 repo.__class__ = bookmark_repo
306 313
307 314 def uisetup(ui):
308 315 extensions.wrapfunction(repair, "strip", strip)
309 316 if ui.configbool('bookmarks', 'track.current'):
310 317 extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
311 318
312 319 def updatecurbookmark(orig, ui, repo, *args, **opts):
313 320 '''Set the current bookmark
314 321
315 322 If the user updates to a bookmark we update the .hg/bookmarks.current
316 323 file.
317 324 '''
318 325 res = orig(ui, repo, *args, **opts)
319 326 rev = opts['rev']
320 327 if not rev and len(args) > 0:
321 328 rev = args[0]
322 329 setcurrent(repo, rev)
323 330 return res
324 331
325 332 cmdtable = {
326 333 "bookmarks":
327 334 (bookmark,
328 335 [('f', 'force', False, _('force')),
329 336 ('r', 'rev', '', _('revision')),
330 337 ('d', 'delete', False, _('delete a given bookmark')),
331 338 ('m', 'rename', '', _('rename a given bookmark'))],
332 339 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
333 340 }
@@ -1,601 +1,601 b''
1 1 # dirstate.py - working directory tracking for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import nullid
9 9 from i18n import _
10 10 import util, ignore, osutil, parsers
11 11 import struct, os, stat, errno
12 12 import cStringIO, sys
13 13
14 14 _unknown = ('?', 0, 0, 0)
15 15 _format = ">cllll"
16 16 propertycache = util.propertycache
17 17
18 18 def _finddirs(path):
19 19 pos = path.rfind('/')
20 20 while pos != -1:
21 21 yield path[:pos]
22 22 pos = path.rfind('/', 0, pos)
23 23
24 24 def _incdirs(dirs, path):
25 25 for base in _finddirs(path):
26 26 if base in dirs:
27 27 dirs[base] += 1
28 28 return
29 29 dirs[base] = 1
30 30
31 31 def _decdirs(dirs, path):
32 32 for base in _finddirs(path):
33 33 if dirs[base] > 1:
34 34 dirs[base] -= 1
35 35 return
36 36 del dirs[base]
37 37
38 38 class dirstate(object):
39 39
40 40 def __init__(self, opener, ui, root):
41 41 self._opener = opener
42 42 self._root = root
43 43 self._rootdir = os.path.join(root, '')
44 44 self._dirty = False
45 45 self._dirtypl = False
46 46 self._ui = ui
47 47
48 48 @propertycache
49 49 def _map(self):
50 50 self._read()
51 51 return self._map
52 52
53 53 @propertycache
54 54 def _copymap(self):
55 55 self._read()
56 56 return self._copymap
57 57
58 58 @propertycache
59 59 def _foldmap(self):
60 60 f = {}
61 61 for name in self._map:
62 f[os.path.normcase(name)] = name
62 f[util.realpath(self._join(name))] = name
63 63 return f
64 64
65 65 @propertycache
66 66 def _branch(self):
67 67 try:
68 68 return self._opener("branch").read().strip() or "default"
69 69 except IOError:
70 70 return "default"
71 71
72 72 @propertycache
73 73 def _pl(self):
74 74 try:
75 75 st = self._opener("dirstate").read(40)
76 76 l = len(st)
77 77 if l == 40:
78 78 return st[:20], st[20:40]
79 79 elif l > 0 and l < 40:
80 80 raise util.Abort(_('working directory state appears damaged!'))
81 81 except IOError, err:
82 82 if err.errno != errno.ENOENT: raise
83 83 return [nullid, nullid]
84 84
85 85 @propertycache
86 86 def _dirs(self):
87 87 dirs = {}
88 88 for f,s in self._map.iteritems():
89 89 if s[0] != 'r':
90 90 _incdirs(dirs, f)
91 91 return dirs
92 92
93 93 @propertycache
94 94 def _ignore(self):
95 95 files = [self._join('.hgignore')]
96 96 for name, path in self._ui.configitems("ui"):
97 97 if name == 'ignore' or name.startswith('ignore.'):
98 98 files.append(os.path.expanduser(path))
99 99 return ignore.ignore(self._root, files, self._ui.warn)
100 100
101 101 @propertycache
102 102 def _slash(self):
103 103 return self._ui.configbool('ui', 'slash') and os.sep != '/'
104 104
105 105 @propertycache
106 106 def _checklink(self):
107 107 return util.checklink(self._root)
108 108
109 109 @propertycache
110 110 def _checkexec(self):
111 111 return util.checkexec(self._root)
112 112
113 113 @propertycache
114 114 def _checkcase(self):
115 115 return not util.checkcase(self._join('.hg'))
116 116
117 117 def _join(self, f):
118 118 # much faster than os.path.join()
119 119 # it's safe because f is always a relative path
120 120 return self._rootdir + f
121 121
122 122 def flagfunc(self, fallback):
123 123 if self._checklink:
124 124 if self._checkexec:
125 125 def f(x):
126 126 p = self._join(x)
127 127 if os.path.islink(p):
128 128 return 'l'
129 129 if util.is_exec(p):
130 130 return 'x'
131 131 return ''
132 132 return f
133 133 def f(x):
134 134 if os.path.islink(self._join(x)):
135 135 return 'l'
136 136 if 'x' in fallback(x):
137 137 return 'x'
138 138 return ''
139 139 return f
140 140 if self._checkexec:
141 141 def f(x):
142 142 if 'l' in fallback(x):
143 143 return 'l'
144 144 if util.is_exec(self._join(x)):
145 145 return 'x'
146 146 return ''
147 147 return f
148 148 return fallback
149 149
150 150 def getcwd(self):
151 151 cwd = os.getcwd()
152 152 if cwd == self._root: return ''
153 153 # self._root ends with a path separator if self._root is '/' or 'C:\'
154 154 rootsep = self._root
155 155 if not util.endswithsep(rootsep):
156 156 rootsep += os.sep
157 157 if cwd.startswith(rootsep):
158 158 return cwd[len(rootsep):]
159 159 else:
160 160 # we're outside the repo. return an absolute path.
161 161 return cwd
162 162
163 163 def pathto(self, f, cwd=None):
164 164 if cwd is None:
165 165 cwd = self.getcwd()
166 166 path = util.pathto(self._root, cwd, f)
167 167 if self._slash:
168 168 return util.normpath(path)
169 169 return path
170 170
171 171 def __getitem__(self, key):
172 172 ''' current states:
173 173 n normal
174 174 m needs merging
175 175 r marked for removal
176 176 a marked for addition
177 177 ? not tracked'''
178 178 return self._map.get(key, ("?",))[0]
179 179
180 180 def __contains__(self, key):
181 181 return key in self._map
182 182
183 183 def __iter__(self):
184 184 for x in sorted(self._map):
185 185 yield x
186 186
187 187 def parents(self):
188 188 return self._pl
189 189
190 190 def branch(self):
191 191 return self._branch
192 192
193 193 def setparents(self, p1, p2=nullid):
194 194 self._dirty = self._dirtypl = True
195 195 self._pl = p1, p2
196 196
197 197 def setbranch(self, branch):
198 198 self._branch = branch
199 199 self._opener("branch", "w").write(branch + '\n')
200 200
201 201 def _read(self):
202 202 self._map = {}
203 203 self._copymap = {}
204 204 try:
205 205 st = self._opener("dirstate").read()
206 206 except IOError, err:
207 207 if err.errno != errno.ENOENT: raise
208 208 return
209 209 if not st:
210 210 return
211 211
212 212 p = parsers.parse_dirstate(self._map, self._copymap, st)
213 213 if not self._dirtypl:
214 214 self._pl = p
215 215
216 216 def invalidate(self):
217 217 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
218 218 if a in self.__dict__:
219 219 delattr(self, a)
220 220 self._dirty = False
221 221
222 222 def copy(self, source, dest):
223 223 """Mark dest as a copy of source. Unmark dest if source is None.
224 224 """
225 225 if source == dest:
226 226 return
227 227 self._dirty = True
228 228 if source is not None:
229 229 self._copymap[dest] = source
230 230 elif dest in self._copymap:
231 231 del self._copymap[dest]
232 232
233 233 def copied(self, file):
234 234 return self._copymap.get(file, None)
235 235
236 236 def copies(self):
237 237 return self._copymap
238 238
239 239 def _droppath(self, f):
240 240 if self[f] not in "?r" and "_dirs" in self.__dict__:
241 241 _decdirs(self._dirs, f)
242 242
243 243 def _addpath(self, f, check=False):
244 244 oldstate = self[f]
245 245 if check or oldstate == "r":
246 246 if '\r' in f or '\n' in f:
247 247 raise util.Abort(
248 248 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
249 249 if f in self._dirs:
250 250 raise util.Abort(_('directory %r already in dirstate') % f)
251 251 # shadows
252 252 for d in _finddirs(f):
253 253 if d in self._dirs:
254 254 break
255 255 if d in self._map and self[d] != 'r':
256 256 raise util.Abort(
257 257 _('file %r in dirstate clashes with %r') % (d, f))
258 258 if oldstate in "?r" and "_dirs" in self.__dict__:
259 259 _incdirs(self._dirs, f)
260 260
261 261 def normal(self, f):
262 262 'mark a file normal and clean'
263 263 self._dirty = True
264 264 self._addpath(f)
265 265 s = os.lstat(self._join(f))
266 266 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
267 267 if f in self._copymap:
268 268 del self._copymap[f]
269 269
270 270 def normallookup(self, f):
271 271 'mark a file normal, but possibly dirty'
272 272 if self._pl[1] != nullid and f in self._map:
273 273 # if there is a merge going on and the file was either
274 274 # in state 'm' or dirty before being removed, restore that state.
275 275 entry = self._map[f]
276 276 if entry[0] == 'r' and entry[2] in (-1, -2):
277 277 source = self._copymap.get(f)
278 278 if entry[2] == -1:
279 279 self.merge(f)
280 280 elif entry[2] == -2:
281 281 self.normaldirty(f)
282 282 if source:
283 283 self.copy(source, f)
284 284 return
285 285 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
286 286 return
287 287 self._dirty = True
288 288 self._addpath(f)
289 289 self._map[f] = ('n', 0, -1, -1)
290 290 if f in self._copymap:
291 291 del self._copymap[f]
292 292
293 293 def normaldirty(self, f):
294 294 'mark a file normal, but dirty'
295 295 self._dirty = True
296 296 self._addpath(f)
297 297 self._map[f] = ('n', 0, -2, -1)
298 298 if f in self._copymap:
299 299 del self._copymap[f]
300 300
301 301 def add(self, f):
302 302 'mark a file added'
303 303 self._dirty = True
304 304 self._addpath(f, True)
305 305 self._map[f] = ('a', 0, -1, -1)
306 306 if f in self._copymap:
307 307 del self._copymap[f]
308 308
309 309 def remove(self, f):
310 310 'mark a file removed'
311 311 self._dirty = True
312 312 self._droppath(f)
313 313 size = 0
314 314 if self._pl[1] != nullid and f in self._map:
315 315 entry = self._map[f]
316 316 if entry[0] == 'm':
317 317 size = -1
318 318 elif entry[0] == 'n' and entry[2] == -2:
319 319 size = -2
320 320 self._map[f] = ('r', 0, size, 0)
321 321 if size == 0 and f in self._copymap:
322 322 del self._copymap[f]
323 323
324 324 def merge(self, f):
325 325 'mark a file merged'
326 326 self._dirty = True
327 327 s = os.lstat(self._join(f))
328 328 self._addpath(f)
329 329 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
330 330 if f in self._copymap:
331 331 del self._copymap[f]
332 332
333 333 def forget(self, f):
334 334 'forget a file'
335 335 self._dirty = True
336 336 try:
337 337 self._droppath(f)
338 338 del self._map[f]
339 339 except KeyError:
340 340 self._ui.warn(_("not in dirstate: %s\n") % f)
341 341
342 342 def _normalize(self, path, knownpath):
343 norm_path = os.path.normcase(path)
343 norm_path = util.realpath(self._join(path))
344 344 fold_path = self._foldmap.get(norm_path, None)
345 345 if fold_path is None:
346 346 if knownpath or not os.path.exists(os.path.join(self._root, path)):
347 347 fold_path = path
348 348 else:
349 349 fold_path = self._foldmap.setdefault(norm_path,
350 350 util.fspath(path, self._root))
351 351 return fold_path
352 352
353 353 def clear(self):
354 354 self._map = {}
355 355 if "_dirs" in self.__dict__:
356 356 delattr(self, "_dirs");
357 357 self._copymap = {}
358 358 self._pl = [nullid, nullid]
359 359 self._dirty = True
360 360
361 361 def rebuild(self, parent, files):
362 362 self.clear()
363 363 for f in files:
364 364 if 'x' in files.flags(f):
365 365 self._map[f] = ('n', 0777, -1, 0)
366 366 else:
367 367 self._map[f] = ('n', 0666, -1, 0)
368 368 self._pl = (parent, nullid)
369 369 self._dirty = True
370 370
371 371 def write(self):
372 372 if not self._dirty:
373 373 return
374 374 st = self._opener("dirstate", "w", atomictemp=True)
375 375
376 376 try:
377 377 gran = int(self._ui.config('dirstate', 'granularity', 1))
378 378 except ValueError:
379 379 gran = 1
380 380 limit = sys.maxint
381 381 if gran > 0:
382 382 limit = util.fstat(st).st_mtime - gran
383 383
384 384 cs = cStringIO.StringIO()
385 385 copymap = self._copymap
386 386 pack = struct.pack
387 387 write = cs.write
388 388 write("".join(self._pl))
389 389 for f, e in self._map.iteritems():
390 390 if f in copymap:
391 391 f = "%s\0%s" % (f, copymap[f])
392 392 if e[3] > limit and e[0] == 'n':
393 393 e = (e[0], 0, -1, -1)
394 394 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
395 395 write(e)
396 396 write(f)
397 397 st.write(cs.getvalue())
398 398 st.rename()
399 399 self._dirty = self._dirtypl = False
400 400
401 401 def _dirignore(self, f):
402 402 if f == '.':
403 403 return False
404 404 if self._ignore(f):
405 405 return True
406 406 for p in _finddirs(f):
407 407 if self._ignore(p):
408 408 return True
409 409 return False
410 410
411 411 def walk(self, match, unknown, ignored):
412 412 '''
413 413 walk recursively through the directory tree, finding all files
414 414 matched by the match function
415 415
416 416 results are yielded in a tuple (filename, stat), where stat
417 417 and st is the stat result if the file was found in the directory.
418 418 '''
419 419
420 420 def fwarn(f, msg):
421 421 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
422 422 return False
423 423
424 424 def badtype(mode):
425 425 kind = _('unknown')
426 426 if stat.S_ISCHR(mode): kind = _('character device')
427 427 elif stat.S_ISBLK(mode): kind = _('block device')
428 428 elif stat.S_ISFIFO(mode): kind = _('fifo')
429 429 elif stat.S_ISSOCK(mode): kind = _('socket')
430 430 elif stat.S_ISDIR(mode): kind = _('directory')
431 431 return _('unsupported file type (type is %s)') % kind
432 432
433 433 ignore = self._ignore
434 434 dirignore = self._dirignore
435 435 if ignored:
436 436 ignore = util.never
437 437 dirignore = util.never
438 438 elif not unknown:
439 439 # if unknown and ignored are False, skip step 2
440 440 ignore = util.always
441 441 dirignore = util.always
442 442
443 443 matchfn = match.matchfn
444 444 badfn = match.bad
445 445 dmap = self._map
446 446 normpath = util.normpath
447 447 listdir = osutil.listdir
448 448 lstat = os.lstat
449 449 getkind = stat.S_IFMT
450 450 dirkind = stat.S_IFDIR
451 451 regkind = stat.S_IFREG
452 452 lnkkind = stat.S_IFLNK
453 453 join = self._join
454 454 work = []
455 455 wadd = work.append
456 456
457 457 if self._checkcase:
458 458 normalize = self._normalize
459 459 else:
460 460 normalize = lambda x, y: x
461 461
462 462 exact = skipstep3 = False
463 463 if matchfn == match.exact: # match.exact
464 464 exact = True
465 465 dirignore = util.always # skip step 2
466 466 elif match.files() and not match.anypats(): # match.match, no patterns
467 467 skipstep3 = True
468 468
469 469 files = set(match.files())
470 470 if not files or '.' in files:
471 471 files = ['']
472 472 results = {'.hg': None}
473 473
474 474 # step 1: find all explicit files
475 475 for ff in sorted(files):
476 476 nf = normalize(normpath(ff), False)
477 477 if nf in results:
478 478 continue
479 479
480 480 try:
481 481 st = lstat(join(nf))
482 482 kind = getkind(st.st_mode)
483 483 if kind == dirkind:
484 484 skipstep3 = False
485 485 if nf in dmap:
486 486 #file deleted on disk but still in dirstate
487 487 results[nf] = None
488 488 match.dir(nf)
489 489 if not dirignore(nf):
490 490 wadd(nf)
491 491 elif kind == regkind or kind == lnkkind:
492 492 results[nf] = st
493 493 else:
494 494 badfn(ff, badtype(kind))
495 495 if nf in dmap:
496 496 results[nf] = None
497 497 except OSError, inst:
498 498 if nf in dmap: # does it exactly match a file?
499 499 results[nf] = None
500 500 else: # does it match a directory?
501 501 prefix = nf + "/"
502 502 for fn in dmap:
503 503 if fn.startswith(prefix):
504 504 match.dir(nf)
505 505 skipstep3 = False
506 506 break
507 507 else:
508 508 badfn(ff, inst.strerror)
509 509
510 510 # step 2: visit subdirectories
511 511 while work:
512 512 nd = work.pop()
513 513 skip = None
514 514 if nd == '.':
515 515 nd = ''
516 516 else:
517 517 skip = '.hg'
518 518 try:
519 519 entries = listdir(join(nd), stat=True, skip=skip)
520 520 except OSError, inst:
521 521 if inst.errno == errno.EACCES:
522 522 fwarn(nd, inst.strerror)
523 523 continue
524 524 raise
525 525 for f, kind, st in entries:
526 526 nf = normalize(nd and (nd + "/" + f) or f, True)
527 527 if nf not in results:
528 528 if kind == dirkind:
529 529 if not ignore(nf):
530 530 match.dir(nf)
531 531 wadd(nf)
532 532 if nf in dmap and matchfn(nf):
533 533 results[nf] = None
534 534 elif kind == regkind or kind == lnkkind:
535 535 if nf in dmap:
536 536 if matchfn(nf):
537 537 results[nf] = st
538 538 elif matchfn(nf) and not ignore(nf):
539 539 results[nf] = st
540 540 elif nf in dmap and matchfn(nf):
541 541 results[nf] = None
542 542
543 543 # step 3: report unseen items in the dmap hash
544 544 if not skipstep3 and not exact:
545 545 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
546 546 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
547 547 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
548 548 st = None
549 549 results[nf] = st
550 550
551 551 del results['.hg']
552 552 return results
553 553
554 554 def status(self, match, ignored, clean, unknown):
555 555 listignored, listclean, listunknown = ignored, clean, unknown
556 556 lookup, modified, added, unknown, ignored = [], [], [], [], []
557 557 removed, deleted, clean = [], [], []
558 558
559 559 dmap = self._map
560 560 ladd = lookup.append
561 561 madd = modified.append
562 562 aadd = added.append
563 563 uadd = unknown.append
564 564 iadd = ignored.append
565 565 radd = removed.append
566 566 dadd = deleted.append
567 567 cadd = clean.append
568 568
569 569 for fn, st in self.walk(match, listunknown, listignored).iteritems():
570 570 if fn not in dmap:
571 571 if (listignored or match.exact(fn)) and self._dirignore(fn):
572 572 if listignored:
573 573 iadd(fn)
574 574 elif listunknown:
575 575 uadd(fn)
576 576 continue
577 577
578 578 state, mode, size, time = dmap[fn]
579 579
580 580 if not st and state in "nma":
581 581 dadd(fn)
582 582 elif state == 'n':
583 583 if (size >= 0 and
584 584 (size != st.st_size
585 585 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
586 586 or size == -2
587 587 or fn in self._copymap):
588 588 madd(fn)
589 589 elif time != int(st.st_mtime):
590 590 ladd(fn)
591 591 elif listclean:
592 592 cadd(fn)
593 593 elif state == 'm':
594 594 madd(fn)
595 595 elif state == 'a':
596 596 aadd(fn)
597 597 elif state == 'r':
598 598 radd(fn)
599 599
600 600 return (lookup, modified, added, removed, deleted, unknown, ignored,
601 601 clean)
@@ -1,363 +1,363 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 12 import lock, util, extensions, error
13 13 import merge as _merge
14 14 import verify as _verify
15 15 import errno, os, shutil
16 16
17 17 def _local(path):
18 18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 19 bundlerepo or localrepo)
20 20
21 21 def parseurl(url, revs=[]):
22 22 '''parse url#branch, returning url, branch + revs'''
23 23
24 24 if '#' not in url:
25 25 return url, (revs or None), revs and revs[-1] or None
26 26
27 27 url, branch = url.split('#', 1)
28 28 checkout = revs and revs[-1] or branch
29 29 return url, (revs or []) + [branch], checkout
30 30
31 31 schemes = {
32 32 'bundle': bundlerepo,
33 33 'file': _local,
34 34 'http': httprepo,
35 35 'https': httprepo,
36 36 'ssh': sshrepo,
37 37 'static-http': statichttprepo,
38 38 }
39 39
40 40 def _lookup(path):
41 41 scheme = 'file'
42 42 if path:
43 43 c = path.find(':')
44 44 if c > 0:
45 45 scheme = path[:c]
46 46 thing = schemes.get(scheme) or schemes['file']
47 47 try:
48 48 return thing(path)
49 49 except TypeError:
50 50 return thing
51 51
52 52 def islocal(repo):
53 53 '''return true if repo or path is local'''
54 54 if isinstance(repo, str):
55 55 try:
56 56 return _lookup(repo).islocal(repo)
57 57 except AttributeError:
58 58 return False
59 59 return repo.local()
60 60
61 61 def repository(ui, path='', create=False):
62 62 """return a repository object for the specified path"""
63 63 repo = _lookup(path).instance(ui, path, create)
64 64 ui = getattr(repo, "ui", ui)
65 65 for name, module in extensions.extensions():
66 66 hook = getattr(module, 'reposetup', None)
67 67 if hook:
68 68 hook(ui, repo)
69 69 return repo
70 70
71 71 def defaultdest(source):
72 72 '''return default destination of clone if none is given'''
73 73 return os.path.basename(os.path.normpath(source))
74 74
75 75 def localpath(path):
76 76 if path.startswith('file://localhost/'):
77 77 return path[16:]
78 78 if path.startswith('file://'):
79 79 return path[7:]
80 80 if path.startswith('file:'):
81 81 return path[5:]
82 82 return path
83 83
84 84 def share(ui, source, dest=None, update=True):
85 85 '''create a shared repository'''
86 86
87 87 if not islocal(source):
88 88 raise util.Abort(_('can only share local repositories'))
89 89
90 90 if not dest:
91 91 dest = os.path.basename(source)
92 92
93 93 if isinstance(source, str):
94 94 origsource = ui.expandpath(source)
95 95 source, rev, checkout = parseurl(origsource, '')
96 96 srcrepo = repository(ui, source)
97 97 else:
98 98 srcrepo = source
99 99 origsource = source = srcrepo.url()
100 100 checkout = None
101 101
102 102 sharedpath = srcrepo.sharedpath # if our source is already sharing
103 103
104 104 root = os.path.realpath(dest)
105 105 roothg = os.path.join(root, '.hg')
106 106
107 107 if os.path.exists(roothg):
108 108 raise util.Abort(_('destination already exists'))
109 109
110 110 if not os.path.isdir(root):
111 111 os.mkdir(root)
112 112 os.mkdir(roothg)
113 113
114 114 requirements = ''
115 115 try:
116 116 requirements = srcrepo.opener('requires').read()
117 117 except IOError, inst:
118 118 if inst.errno != errno.ENOENT:
119 119 raise
120 120
121 121 requirements += 'shared\n'
122 122 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
123 123 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
124 124
125 125 default = srcrepo.ui.config('paths', 'default')
126 126 if default:
127 127 f = file(os.path.join(roothg, 'hgrc'), 'w')
128 128 f.write('[paths]\ndefault = %s\n' % default)
129 129 f.close()
130 130
131 131 r = repository(ui, root)
132 132
133 133 if update:
134 134 r.ui.status(_("updating working directory\n"))
135 135 if update is not True:
136 136 checkout = update
137 137 for test in (checkout, 'default', 'tip'):
138 138 try:
139 139 uprev = r.lookup(test)
140 140 break
141 except:
141 except LookupError:
142 142 continue
143 143 _update(r, uprev)
144 144
145 145 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
146 146 stream=False):
147 147 """Make a copy of an existing repository.
148 148
149 149 Create a copy of an existing repository in a new directory. The
150 150 source and destination are URLs, as passed to the repository
151 151 function. Returns a pair of repository objects, the source and
152 152 newly created destination.
153 153
154 154 The location of the source is added to the new repository's
155 155 .hg/hgrc file, as the default to be used for future pulls and
156 156 pushes.
157 157
158 158 If an exception is raised, the partly cloned/updated destination
159 159 repository will be deleted.
160 160
161 161 Arguments:
162 162
163 163 source: repository object or URL
164 164
165 165 dest: URL of destination repository to create (defaults to base
166 166 name of source repository)
167 167
168 168 pull: always pull from source repository, even in local case
169 169
170 170 stream: stream raw data uncompressed from repository (fast over
171 171 LAN, slow over WAN)
172 172
173 173 rev: revision to clone up to (implies pull=True)
174 174
175 175 update: update working directory after clone completes, if
176 176 destination is local repository (True means update to default rev,
177 177 anything else is treated as a revision)
178 178 """
179 179
180 180 if isinstance(source, str):
181 181 origsource = ui.expandpath(source)
182 182 source, rev, checkout = parseurl(origsource, rev)
183 183 src_repo = repository(ui, source)
184 184 else:
185 185 src_repo = source
186 186 origsource = source = src_repo.url()
187 187 checkout = rev and rev[-1] or None
188 188
189 189 if dest is None:
190 190 dest = defaultdest(source)
191 191 ui.status(_("destination directory: %s\n") % dest)
192 192
193 193 dest = localpath(dest)
194 194 source = localpath(source)
195 195
196 196 if os.path.exists(dest):
197 197 if not os.path.isdir(dest):
198 198 raise util.Abort(_("destination '%s' already exists") % dest)
199 199 elif os.listdir(dest):
200 200 raise util.Abort(_("destination '%s' is not empty") % dest)
201 201
202 202 class DirCleanup(object):
203 203 def __init__(self, dir_):
204 204 self.rmtree = shutil.rmtree
205 205 self.dir_ = dir_
206 206 def close(self):
207 207 self.dir_ = None
208 208 def cleanup(self):
209 209 if self.dir_:
210 210 self.rmtree(self.dir_, True)
211 211
212 212 src_lock = dest_lock = dir_cleanup = None
213 213 try:
214 214 if islocal(dest):
215 215 dir_cleanup = DirCleanup(dest)
216 216
217 217 abspath = origsource
218 218 copy = False
219 219 if src_repo.cancopy() and islocal(dest):
220 220 abspath = os.path.abspath(util.drop_scheme('file', origsource))
221 221 copy = not pull and not rev
222 222
223 223 if copy:
224 224 try:
225 225 # we use a lock here because if we race with commit, we
226 226 # can end up with extra data in the cloned revlogs that's
227 227 # not pointed to by changesets, thus causing verify to
228 228 # fail
229 229 src_lock = src_repo.lock(wait=False)
230 230 except error.LockError:
231 231 copy = False
232 232
233 233 if copy:
234 234 src_repo.hook('preoutgoing', throw=True, source='clone')
235 235 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
236 236 if not os.path.exists(dest):
237 237 os.mkdir(dest)
238 238 else:
239 239 # only clean up directories we create ourselves
240 240 dir_cleanup.dir_ = hgdir
241 241 try:
242 242 dest_path = hgdir
243 243 os.mkdir(dest_path)
244 244 except OSError, inst:
245 245 if inst.errno == errno.EEXIST:
246 246 dir_cleanup.close()
247 247 raise util.Abort(_("destination '%s' already exists")
248 248 % dest)
249 249 raise
250 250
251 251 for f in src_repo.store.copylist():
252 252 src = os.path.join(src_repo.path, f)
253 253 dst = os.path.join(dest_path, f)
254 254 dstbase = os.path.dirname(dst)
255 255 if dstbase and not os.path.exists(dstbase):
256 256 os.mkdir(dstbase)
257 257 if os.path.exists(src):
258 258 if dst.endswith('data'):
259 259 # lock to avoid premature writing to the target
260 260 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
261 261 util.copyfiles(src, dst)
262 262
263 263 # we need to re-init the repo after manually copying the data
264 264 # into it
265 265 dest_repo = repository(ui, dest)
266 266 src_repo.hook('outgoing', source='clone', node='0'*40)
267 267 else:
268 268 try:
269 269 dest_repo = repository(ui, dest, create=True)
270 270 except OSError, inst:
271 271 if inst.errno == errno.EEXIST:
272 272 dir_cleanup.close()
273 273 raise util.Abort(_("destination '%s' already exists")
274 274 % dest)
275 275 raise
276 276
277 277 revs = None
278 278 if rev:
279 279 if 'lookup' not in src_repo.capabilities:
280 280 raise util.Abort(_("src repository does not support "
281 281 "revision lookup and so doesn't "
282 282 "support clone by revision"))
283 283 revs = [src_repo.lookup(r) for r in rev]
284 284 checkout = revs[0]
285 285 if dest_repo.local():
286 286 dest_repo.clone(src_repo, heads=revs, stream=stream)
287 287 elif src_repo.local():
288 288 src_repo.push(dest_repo, revs=revs)
289 289 else:
290 290 raise util.Abort(_("clone from remote to remote not supported"))
291 291
292 292 if dir_cleanup:
293 293 dir_cleanup.close()
294 294
295 295 if dest_repo.local():
296 296 fp = dest_repo.opener("hgrc", "w", text=True)
297 297 fp.write("[paths]\n")
298 298 fp.write("default = %s\n" % abspath)
299 299 fp.close()
300 300
301 301 dest_repo.ui.setconfig('paths', 'default', abspath)
302 302
303 303 if update:
304 304 dest_repo.ui.status(_("updating working directory\n"))
305 305 if update is not True:
306 306 checkout = update
307 307 for test in (checkout, 'default', 'tip'):
308 308 try:
309 309 uprev = dest_repo.lookup(test)
310 310 break
311 311 except:
312 312 continue
313 313 _update(dest_repo, uprev)
314 314
315 315 return src_repo, dest_repo
316 316 finally:
317 317 release(src_lock, dest_lock)
318 318 if dir_cleanup is not None:
319 319 dir_cleanup.cleanup()
320 320
321 321 def _showstats(repo, stats):
322 322 stats = ((stats[0], _("updated")),
323 323 (stats[1], _("merged")),
324 324 (stats[2], _("removed")),
325 325 (stats[3], _("unresolved")))
326 326 note = ", ".join([_("%d files %s") % s for s in stats])
327 327 repo.ui.status("%s\n" % note)
328 328
329 329 def update(repo, node):
330 330 """update the working directory to node, merging linear changes"""
331 331 stats = _merge.update(repo, node, False, False, None)
332 332 _showstats(repo, stats)
333 333 if stats[3]:
334 334 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
335 335 return stats[3] > 0
336 336
337 337 # naming conflict in clone()
338 338 _update = update
339 339
340 340 def clean(repo, node, show_stats=True):
341 341 """forcibly switch the working directory to node, clobbering changes"""
342 342 stats = _merge.update(repo, node, False, True, None)
343 343 if show_stats: _showstats(repo, stats)
344 344 return stats[3] > 0
345 345
346 346 def merge(repo, node, force=None, remind=True):
347 347 """branch merge with node, resolving changes"""
348 348 stats = _merge.update(repo, node, True, force, False)
349 349 _showstats(repo, stats)
350 350 if stats[3]:
351 351 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
352 352 "or 'hg up --clean' to abandon\n"))
353 353 elif remind:
354 354 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
355 355 return stats[3] > 0
356 356
357 357 def revert(repo, node, choose):
358 358 """revert changes to revision in node without updating dirstate"""
359 359 return _merge.update(repo, node, False, True, choose)[3] > 0
360 360
361 361 def verify(repo):
362 362 """verify the consistency of a repository"""
363 363 return _verify.verify(repo)
@@ -1,187 +1,190 b''
1 1 # mail.py - mail sending bits for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import util, encoding
10 10 import os, smtplib, socket, quopri
11 11 import email.Header, email.MIMEText, email.Utils
12 12
13 13 def _smtp(ui):
14 14 '''build an smtp connection and return a function to send mail'''
15 15 local_hostname = ui.config('smtp', 'local_hostname')
16 16 s = smtplib.SMTP(local_hostname=local_hostname)
17 17 mailhost = ui.config('smtp', 'host')
18 18 if not mailhost:
19 19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 20 mailport = int(ui.config('smtp', 'port', 25))
21 21 ui.note(_('sending mail: smtp host %s, port %s\n') %
22 22 (mailhost, mailport))
23 23 s.connect(host=mailhost, port=mailport)
24 24 if ui.configbool('smtp', 'tls'):
25 25 if not hasattr(socket, 'ssl'):
26 26 raise util.Abort(_("can't use TLS: Python SSL support "
27 27 "not installed"))
28 28 ui.note(_('(using tls)\n'))
29 29 s.ehlo()
30 30 s.starttls()
31 31 s.ehlo()
32 32 username = ui.config('smtp', 'username')
33 33 password = ui.config('smtp', 'password')
34 34 if username and not password:
35 35 password = ui.getpass()
36 36 if username and password:
37 37 ui.note(_('(authenticating to mail server as %s)\n') %
38 38 (username))
39 try:
39 40 s.login(username, password)
41 except smtplib.SMTPException, inst:
42 raise util.Abort(inst)
40 43
41 44 def send(sender, recipients, msg):
42 45 try:
43 46 return s.sendmail(sender, recipients, msg)
44 47 except smtplib.SMTPRecipientsRefused, inst:
45 48 recipients = [r[1] for r in inst.recipients.values()]
46 49 raise util.Abort('\n' + '\n'.join(recipients))
47 50 except smtplib.SMTPException, inst:
48 51 raise util.Abort(inst)
49 52
50 53 return send
51 54
52 55 def _sendmail(ui, sender, recipients, msg):
53 56 '''send mail using sendmail.'''
54 57 program = ui.config('email', 'method')
55 58 cmdline = '%s -f %s %s' % (program, util.email(sender),
56 59 ' '.join(map(util.email, recipients)))
57 60 ui.note(_('sending mail: %s\n') % cmdline)
58 61 fp = util.popen(cmdline, 'w')
59 62 fp.write(msg)
60 63 ret = fp.close()
61 64 if ret:
62 65 raise util.Abort('%s %s' % (
63 66 os.path.basename(program.split(None, 1)[0]),
64 67 util.explain_exit(ret)[0]))
65 68
66 69 def connect(ui):
67 70 '''make a mail connection. return a function to send mail.
68 71 call as sendmail(sender, list-of-recipients, msg).'''
69 72 if ui.config('email', 'method', 'smtp') == 'smtp':
70 73 return _smtp(ui)
71 74 return lambda s, r, m: _sendmail(ui, s, r, m)
72 75
73 76 def sendmail(ui, sender, recipients, msg):
74 77 send = connect(ui)
75 78 return send(sender, recipients, msg)
76 79
77 80 def validateconfig(ui):
78 81 '''determine if we have enough config data to try sending email.'''
79 82 method = ui.config('email', 'method', 'smtp')
80 83 if method == 'smtp':
81 84 if not ui.config('smtp', 'host'):
82 85 raise util.Abort(_('smtp specified as email transport, '
83 86 'but no smtp host configured'))
84 87 else:
85 88 if not util.find_exe(method):
86 89 raise util.Abort(_('%r specified as email transport, '
87 90 'but not in PATH') % method)
88 91
89 92 def mimetextpatch(s, subtype='plain', display=False):
90 93 '''If patch in utf-8 transfer-encode it.'''
91 94
92 95 enc = None
93 96 for line in s.splitlines():
94 97 if len(line) > 950:
95 98 s = quopri.encodestring(s)
96 99 enc = "quoted-printable"
97 100 break
98 101
99 102 cs = 'us-ascii'
100 103 if not display:
101 104 try:
102 105 s.decode('us-ascii')
103 106 except UnicodeDecodeError:
104 107 try:
105 108 s.decode('utf-8')
106 109 cs = 'utf-8'
107 110 except UnicodeDecodeError:
108 111 # We'll go with us-ascii as a fallback.
109 112 pass
110 113
111 114 msg = email.MIMEText.MIMEText(s, subtype, cs)
112 115 if enc:
113 116 del msg['Content-Transfer-Encoding']
114 117 msg['Content-Transfer-Encoding'] = enc
115 118 return msg
116 119
117 120 def _charsets(ui):
118 121 '''Obtains charsets to send mail parts not containing patches.'''
119 122 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
120 123 fallbacks = [encoding.fallbackencoding.lower(),
121 124 encoding.encoding.lower(), 'utf-8']
122 125 for cs in fallbacks: # find unique charsets while keeping order
123 126 if cs not in charsets:
124 127 charsets.append(cs)
125 128 return [cs for cs in charsets if not cs.endswith('ascii')]
126 129
127 130 def _encode(ui, s, charsets):
128 131 '''Returns (converted) string, charset tuple.
129 132 Finds out best charset by cycling through sendcharsets in descending
130 133 order. Tries both encoding and fallbackencoding for input. Only as
131 134 last resort send as is in fake ascii.
132 135 Caveat: Do not use for mail parts containing patches!'''
133 136 try:
134 137 s.decode('ascii')
135 138 except UnicodeDecodeError:
136 139 sendcharsets = charsets or _charsets(ui)
137 140 for ics in (encoding.encoding, encoding.fallbackencoding):
138 141 try:
139 142 u = s.decode(ics)
140 143 except UnicodeDecodeError:
141 144 continue
142 145 for ocs in sendcharsets:
143 146 try:
144 147 return u.encode(ocs), ocs
145 148 except UnicodeEncodeError:
146 149 pass
147 150 except LookupError:
148 151 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
149 152 # if ascii, or all conversion attempts fail, send (broken) ascii
150 153 return s, 'us-ascii'
151 154
152 155 def headencode(ui, s, charsets=None, display=False):
153 156 '''Returns RFC-2047 compliant header from given string.'''
154 157 if not display:
155 158 # split into words?
156 159 s, cs = _encode(ui, s, charsets)
157 160 return str(email.Header.Header(s, cs))
158 161 return s
159 162
160 163 def addressencode(ui, address, charsets=None, display=False):
161 164 '''Turns address into RFC-2047 compliant header.'''
162 165 if display or not address:
163 166 return address or ''
164 167 name, addr = email.Utils.parseaddr(address)
165 168 name = headencode(ui, name, charsets)
166 169 try:
167 170 acc, dom = addr.split('@')
168 171 acc = acc.encode('ascii')
169 172 dom = dom.encode('idna')
170 173 addr = '%s@%s' % (acc, dom)
171 174 except UnicodeDecodeError:
172 175 raise util.Abort(_('invalid email address: %s') % addr)
173 176 except ValueError:
174 177 try:
175 178 # too strict?
176 179 addr = addr.encode('ascii')
177 180 except UnicodeDecodeError:
178 181 raise util.Abort(_('invalid local address: %s') % addr)
179 182 return email.Utils.formataddr((name, addr))
180 183
181 184 def mimeencode(ui, s, charsets=None, display=False):
182 185 '''creates mime text object, encodes it if needed, and sets
183 186 charset and transfer-encoding accordingly.'''
184 187 cs = 'us-ascii'
185 188 if not display:
186 189 s, cs = _encode(ui, s, charsets)
187 190 return email.MIMEText.MIMEText(s, 'plain', cs)
@@ -1,1439 +1,1440 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, diffhelpers, copies
12 12 import cStringIO, email.Parser, os, re, math
13 13 import sys, tempfile, zlib
14 14
15 15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16 16
17 17 class PatchError(Exception):
18 18 pass
19 19
20 20 class NoHunks(PatchError):
21 21 pass
22 22
23 23 # helper functions
24 24
25 25 def copyfile(src, dst, basedir):
26 26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 27 if os.path.exists(absdst):
28 28 raise util.Abort(_("cannot create %s: destination already exists") %
29 29 dst)
30 30
31 31 dstdir = os.path.dirname(absdst)
32 32 if dstdir and not os.path.isdir(dstdir):
33 33 try:
34 34 os.makedirs(dstdir)
35 35 except IOError:
36 36 raise util.Abort(
37 37 _("cannot create %s: unable to create destination directory")
38 38 % dst)
39 39
40 40 util.copyfile(abssrc, absdst)
41 41
42 42 # public functions
43 43
44 44 def extract(ui, fileobj):
45 45 '''extract patch from data read from fileobj.
46 46
47 47 patch can be a normal patch or contained in an email message.
48 48
49 49 return tuple (filename, message, user, date, node, p1, p2).
50 50 Any item in the returned tuple can be None. If filename is None,
51 51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52 52
53 53 # attempt to detect the start of a patch
54 54 # (this heuristic is borrowed from quilt)
55 55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
56 56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
57 57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
58 58
59 59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 60 tmpfp = os.fdopen(fd, 'w')
61 61 try:
62 62 msg = email.Parser.Parser().parse(fileobj)
63 63
64 64 subject = msg['Subject']
65 65 user = msg['From']
66 66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 67 # should try to parse msg['Date']
68 68 date = None
69 69 nodeid = None
70 70 branch = None
71 71 parents = []
72 72
73 73 if subject:
74 74 if subject.startswith('[PATCH'):
75 75 pend = subject.find(']')
76 76 if pend >= 0:
77 77 subject = subject[pend+1:].lstrip()
78 78 subject = subject.replace('\n\t', ' ')
79 79 ui.debug('Subject: %s\n' % subject)
80 80 if user:
81 81 ui.debug('From: %s\n' % user)
82 82 diffs_seen = 0
83 83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 84 message = ''
85 85 for part in msg.walk():
86 86 content_type = part.get_content_type()
87 87 ui.debug('Content-Type: %s\n' % content_type)
88 88 if content_type not in ok_types:
89 89 continue
90 90 payload = part.get_payload(decode=True)
91 91 m = diffre.search(payload)
92 92 if m:
93 93 hgpatch = False
94 94 ignoretext = False
95 95
96 96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 97 diffs_seen += 1
98 98 cfp = cStringIO.StringIO()
99 99 for line in payload[:m.start(0)].splitlines():
100 100 if line.startswith('# HG changeset patch'):
101 101 ui.debug(_('patch generated by hg export\n'))
102 102 hgpatch = True
103 103 # drop earlier commit message content
104 104 cfp.seek(0)
105 105 cfp.truncate()
106 106 subject = None
107 107 elif hgpatch:
108 108 if line.startswith('# User '):
109 109 user = line[7:]
110 110 ui.debug('From: %s\n' % user)
111 111 elif line.startswith("# Date "):
112 112 date = line[7:]
113 113 elif line.startswith("# Branch "):
114 114 branch = line[9:]
115 115 elif line.startswith("# Node ID "):
116 116 nodeid = line[10:]
117 117 elif line.startswith("# Parent "):
118 118 parents.append(line[10:])
119 119 elif line == '---' and gitsendmail:
120 120 ignoretext = True
121 121 if not line.startswith('# ') and not ignoretext:
122 122 cfp.write(line)
123 123 cfp.write('\n')
124 124 message = cfp.getvalue()
125 125 if tmpfp:
126 126 tmpfp.write(payload)
127 127 if not payload.endswith('\n'):
128 128 tmpfp.write('\n')
129 129 elif not diffs_seen and message and content_type == 'text/plain':
130 130 message += '\n' + payload
131 131 except:
132 132 tmpfp.close()
133 133 os.unlink(tmpname)
134 134 raise
135 135
136 136 if subject and not message.startswith(subject):
137 137 message = '%s\n%s' % (subject, message)
138 138 tmpfp.close()
139 139 if not diffs_seen:
140 140 os.unlink(tmpname)
141 141 return None, message, user, date, branch, None, None, None
142 142 p1 = parents and parents.pop(0) or None
143 143 p2 = parents and parents.pop(0) or None
144 144 return tmpname, message, user, date, branch, nodeid, p1, p2
145 145
146 146 GP_PATCH = 1 << 0 # we have to run patch
147 147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 148 GP_BINARY = 1 << 2 # there's a binary patch
149 149
150 150 class patchmeta(object):
151 151 """Patched file metadata
152 152
153 153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 157 'islink' is True if the file is a symlink and 'isexec' is True if
158 158 the file is executable. Otherwise, 'mode' is None.
159 159 """
160 160 def __init__(self, path):
161 161 self.path = path
162 162 self.oldpath = None
163 163 self.mode = None
164 164 self.op = 'MODIFY'
165 165 self.lineno = 0
166 166 self.binary = False
167 167
168 168 def setmode(self, mode):
169 169 islink = mode & 020000
170 170 isexec = mode & 0100
171 171 self.mode = (islink, isexec)
172 172
173 173 def readgitpatch(lr):
174 174 """extract git-style metadata about patches from <patchname>"""
175 175
176 176 # Filter patch for git information
177 177 gp = None
178 178 gitpatches = []
179 179 # Can have a git patch with only metadata, causing patch to complain
180 180 dopatch = 0
181 181
182 182 lineno = 0
183 183 for line in lr:
184 184 lineno += 1
185 line = line.rstrip(' \r\n')
185 186 if line.startswith('diff --git'):
186 187 m = gitre.match(line)
187 188 if m:
188 189 if gp:
189 190 gitpatches.append(gp)
190 191 src, dst = m.group(1, 2)
191 192 gp = patchmeta(dst)
192 193 gp.lineno = lineno
193 194 elif gp:
194 195 if line.startswith('--- '):
195 196 if gp.op in ('COPY', 'RENAME'):
196 197 dopatch |= GP_FILTER
197 198 gitpatches.append(gp)
198 199 gp = None
199 200 dopatch |= GP_PATCH
200 201 continue
201 202 if line.startswith('rename from '):
202 203 gp.op = 'RENAME'
203 gp.oldpath = line[12:].rstrip()
204 gp.oldpath = line[12:]
204 205 elif line.startswith('rename to '):
205 gp.path = line[10:].rstrip()
206 gp.path = line[10:]
206 207 elif line.startswith('copy from '):
207 208 gp.op = 'COPY'
208 gp.oldpath = line[10:].rstrip()
209 gp.oldpath = line[10:]
209 210 elif line.startswith('copy to '):
210 gp.path = line[8:].rstrip()
211 gp.path = line[8:]
211 212 elif line.startswith('deleted file'):
212 213 gp.op = 'DELETE'
213 214 # is the deleted file a symlink?
214 gp.setmode(int(line.rstrip()[-6:], 8))
215 gp.setmode(int(line[-6:], 8))
215 216 elif line.startswith('new file mode '):
216 217 gp.op = 'ADD'
217 gp.setmode(int(line.rstrip()[-6:], 8))
218 gp.setmode(int(line[-6:], 8))
218 219 elif line.startswith('new mode '):
219 gp.setmode(int(line.rstrip()[-6:], 8))
220 gp.setmode(int(line[-6:], 8))
220 221 elif line.startswith('GIT binary patch'):
221 222 dopatch |= GP_BINARY
222 223 gp.binary = True
223 224 if gp:
224 225 gitpatches.append(gp)
225 226
226 227 if not gitpatches:
227 228 dopatch = GP_PATCH
228 229
229 230 return (dopatch, gitpatches)
230 231
231 232 class linereader(object):
232 233 # simple class to allow pushing lines back into the input stream
233 234 def __init__(self, fp, textmode=False):
234 235 self.fp = fp
235 236 self.buf = []
236 237 self.textmode = textmode
237 238
238 239 def push(self, line):
239 240 if line is not None:
240 241 self.buf.append(line)
241 242
242 243 def readline(self):
243 244 if self.buf:
244 245 l = self.buf[0]
245 246 del self.buf[0]
246 247 return l
247 248 l = self.fp.readline()
248 249 if self.textmode and l.endswith('\r\n'):
249 250 l = l[:-2] + '\n'
250 251 return l
251 252
252 253 def __iter__(self):
253 254 while 1:
254 255 l = self.readline()
255 256 if not l:
256 257 break
257 258 yield l
258 259
259 260 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
260 261 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
261 262 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
262 263
263 264 class patchfile(object):
264 265 def __init__(self, ui, fname, opener, missing=False, eol=None):
265 266 self.fname = fname
266 267 self.eol = eol
267 268 self.opener = opener
268 269 self.ui = ui
269 270 self.lines = []
270 271 self.exists = False
271 272 self.missing = missing
272 273 if not missing:
273 274 try:
274 275 self.lines = self.readlines(fname)
275 276 self.exists = True
276 277 except IOError:
277 278 pass
278 279 else:
279 280 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
280 281
281 282 self.hash = {}
282 283 self.dirty = 0
283 284 self.offset = 0
284 285 self.rej = []
285 286 self.fileprinted = False
286 287 self.printfile(False)
287 288 self.hunks = 0
288 289
289 290 def readlines(self, fname):
290 291 fp = self.opener(fname, 'r')
291 292 try:
292 293 return list(linereader(fp, self.eol is not None))
293 294 finally:
294 295 fp.close()
295 296
296 297 def writelines(self, fname, lines):
297 298 fp = self.opener(fname, 'w')
298 299 try:
299 300 if self.eol and self.eol != '\n':
300 301 for l in lines:
301 302 if l and l[-1] == '\n':
302 303 l = l[:-1] + self.eol
303 304 fp.write(l)
304 305 else:
305 306 fp.writelines(lines)
306 307 finally:
307 308 fp.close()
308 309
309 310 def unlink(self, fname):
310 311 os.unlink(fname)
311 312
312 313 def printfile(self, warn):
313 314 if self.fileprinted:
314 315 return
315 316 if warn or self.ui.verbose:
316 317 self.fileprinted = True
317 318 s = _("patching file %s\n") % self.fname
318 319 if warn:
319 320 self.ui.warn(s)
320 321 else:
321 322 self.ui.note(s)
322 323
323 324
324 325 def findlines(self, l, linenum):
325 326 # looks through the hash and finds candidate lines. The
326 327 # result is a list of line numbers sorted based on distance
327 328 # from linenum
328 329
329 330 try:
330 331 cand = self.hash[l]
331 332 except:
332 333 return []
333 334
334 335 if len(cand) > 1:
335 336 # resort our list of potentials forward then back.
336 337 cand.sort(key=lambda x: abs(x - linenum))
337 338 return cand
338 339
339 340 def hashlines(self):
340 341 self.hash = {}
341 342 for x, s in enumerate(self.lines):
342 343 self.hash.setdefault(s, []).append(x)
343 344
344 345 def write_rej(self):
345 346 # our rejects are a little different from patch(1). This always
346 347 # creates rejects in the same form as the original patch. A file
347 348 # header is inserted so that you can run the reject through patch again
348 349 # without having to type the filename.
349 350
350 351 if not self.rej:
351 352 return
352 353
353 354 fname = self.fname + ".rej"
354 355 self.ui.warn(
355 356 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
356 357 (len(self.rej), self.hunks, fname))
357 358
358 359 def rejlines():
359 360 base = os.path.basename(self.fname)
360 361 yield "--- %s\n+++ %s\n" % (base, base)
361 362 for x in self.rej:
362 363 for l in x.hunk:
363 364 yield l
364 365 if l[-1] != '\n':
365 366 yield "\n\ No newline at end of file\n"
366 367
367 368 self.writelines(fname, rejlines())
368 369
369 370 def write(self, dest=None):
370 371 if not self.dirty:
371 372 return
372 373 if not dest:
373 374 dest = self.fname
374 375 self.writelines(dest, self.lines)
375 376
376 377 def close(self):
377 378 self.write()
378 379 self.write_rej()
379 380
380 381 def apply(self, h, reverse):
381 382 if not h.complete():
382 383 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
383 384 (h.number, h.desc, len(h.a), h.lena, len(h.b),
384 385 h.lenb))
385 386
386 387 self.hunks += 1
387 388 if reverse:
388 389 h.reverse()
389 390
390 391 if self.missing:
391 392 self.rej.append(h)
392 393 return -1
393 394
394 395 if self.exists and h.createfile():
395 396 self.ui.warn(_("file %s already exists\n") % self.fname)
396 397 self.rej.append(h)
397 398 return -1
398 399
399 400 if isinstance(h, githunk):
400 401 if h.rmfile():
401 402 self.unlink(self.fname)
402 403 else:
403 404 self.lines[:] = h.new()
404 405 self.offset += len(h.new())
405 406 self.dirty = 1
406 407 return 0
407 408
408 409 # fast case first, no offsets, no fuzz
409 410 old = h.old()
410 411 # patch starts counting at 1 unless we are adding the file
411 412 if h.starta == 0:
412 413 start = 0
413 414 else:
414 415 start = h.starta + self.offset - 1
415 416 orig_start = start
416 417 if diffhelpers.testhunk(old, self.lines, start) == 0:
417 418 if h.rmfile():
418 419 self.unlink(self.fname)
419 420 else:
420 421 self.lines[start : start + h.lena] = h.new()
421 422 self.offset += h.lenb - h.lena
422 423 self.dirty = 1
423 424 return 0
424 425
425 426 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
426 427 self.hashlines()
427 428 if h.hunk[-1][0] != ' ':
428 429 # if the hunk tried to put something at the bottom of the file
429 430 # override the start line and use eof here
430 431 search_start = len(self.lines)
431 432 else:
432 433 search_start = orig_start
433 434
434 435 for fuzzlen in xrange(3):
435 436 for toponly in [ True, False ]:
436 437 old = h.old(fuzzlen, toponly)
437 438
438 439 cand = self.findlines(old[0][1:], search_start)
439 440 for l in cand:
440 441 if diffhelpers.testhunk(old, self.lines, l) == 0:
441 442 newlines = h.new(fuzzlen, toponly)
442 443 self.lines[l : l + len(old)] = newlines
443 444 self.offset += len(newlines) - len(old)
444 445 self.dirty = 1
445 446 if fuzzlen:
446 447 fuzzstr = "with fuzz %d " % fuzzlen
447 448 f = self.ui.warn
448 449 self.printfile(True)
449 450 else:
450 451 fuzzstr = ""
451 452 f = self.ui.note
452 453 offset = l - orig_start - fuzzlen
453 454 if offset == 1:
454 455 msg = _("Hunk #%d succeeded at %d %s"
455 456 "(offset %d line).\n")
456 457 else:
457 458 msg = _("Hunk #%d succeeded at %d %s"
458 459 "(offset %d lines).\n")
459 460 f(msg % (h.number, l+1, fuzzstr, offset))
460 461 return fuzzlen
461 462 self.printfile(True)
462 463 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
463 464 self.rej.append(h)
464 465 return -1
465 466
466 467 class hunk(object):
467 468 def __init__(self, desc, num, lr, context, create=False, remove=False):
468 469 self.number = num
469 470 self.desc = desc
470 471 self.hunk = [ desc ]
471 472 self.a = []
472 473 self.b = []
473 474 if context:
474 475 self.read_context_hunk(lr)
475 476 else:
476 477 self.read_unified_hunk(lr)
477 478 self.create = create
478 479 self.remove = remove and not create
479 480
480 481 def read_unified_hunk(self, lr):
481 482 m = unidesc.match(self.desc)
482 483 if not m:
483 484 raise PatchError(_("bad hunk #%d") % self.number)
484 485 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
485 486 if self.lena is None:
486 487 self.lena = 1
487 488 else:
488 489 self.lena = int(self.lena)
489 490 if self.lenb is None:
490 491 self.lenb = 1
491 492 else:
492 493 self.lenb = int(self.lenb)
493 494 self.starta = int(self.starta)
494 495 self.startb = int(self.startb)
495 496 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
496 497 # if we hit eof before finishing out the hunk, the last line will
497 498 # be zero length. Lets try to fix it up.
498 499 while len(self.hunk[-1]) == 0:
499 500 del self.hunk[-1]
500 501 del self.a[-1]
501 502 del self.b[-1]
502 503 self.lena -= 1
503 504 self.lenb -= 1
504 505
505 506 def read_context_hunk(self, lr):
506 507 self.desc = lr.readline()
507 508 m = contextdesc.match(self.desc)
508 509 if not m:
509 510 raise PatchError(_("bad hunk #%d") % self.number)
510 511 foo, self.starta, foo2, aend, foo3 = m.groups()
511 512 self.starta = int(self.starta)
512 513 if aend is None:
513 514 aend = self.starta
514 515 self.lena = int(aend) - self.starta
515 516 if self.starta:
516 517 self.lena += 1
517 518 for x in xrange(self.lena):
518 519 l = lr.readline()
519 520 if l.startswith('---'):
520 521 lr.push(l)
521 522 break
522 523 s = l[2:]
523 524 if l.startswith('- ') or l.startswith('! '):
524 525 u = '-' + s
525 526 elif l.startswith(' '):
526 527 u = ' ' + s
527 528 else:
528 529 raise PatchError(_("bad hunk #%d old text line %d") %
529 530 (self.number, x))
530 531 self.a.append(u)
531 532 self.hunk.append(u)
532 533
533 534 l = lr.readline()
534 535 if l.startswith('\ '):
535 536 s = self.a[-1][:-1]
536 537 self.a[-1] = s
537 538 self.hunk[-1] = s
538 539 l = lr.readline()
539 540 m = contextdesc.match(l)
540 541 if not m:
541 542 raise PatchError(_("bad hunk #%d") % self.number)
542 543 foo, self.startb, foo2, bend, foo3 = m.groups()
543 544 self.startb = int(self.startb)
544 545 if bend is None:
545 546 bend = self.startb
546 547 self.lenb = int(bend) - self.startb
547 548 if self.startb:
548 549 self.lenb += 1
549 550 hunki = 1
550 551 for x in xrange(self.lenb):
551 552 l = lr.readline()
552 553 if l.startswith('\ '):
553 554 s = self.b[-1][:-1]
554 555 self.b[-1] = s
555 556 self.hunk[hunki-1] = s
556 557 continue
557 558 if not l:
558 559 lr.push(l)
559 560 break
560 561 s = l[2:]
561 562 if l.startswith('+ ') or l.startswith('! '):
562 563 u = '+' + s
563 564 elif l.startswith(' '):
564 565 u = ' ' + s
565 566 elif len(self.b) == 0:
566 567 # this can happen when the hunk does not add any lines
567 568 lr.push(l)
568 569 break
569 570 else:
570 571 raise PatchError(_("bad hunk #%d old text line %d") %
571 572 (self.number, x))
572 573 self.b.append(s)
573 574 while True:
574 575 if hunki >= len(self.hunk):
575 576 h = ""
576 577 else:
577 578 h = self.hunk[hunki]
578 579 hunki += 1
579 580 if h == u:
580 581 break
581 582 elif h.startswith('-'):
582 583 continue
583 584 else:
584 585 self.hunk.insert(hunki-1, u)
585 586 break
586 587
587 588 if not self.a:
588 589 # this happens when lines were only added to the hunk
589 590 for x in self.hunk:
590 591 if x.startswith('-') or x.startswith(' '):
591 592 self.a.append(x)
592 593 if not self.b:
593 594 # this happens when lines were only deleted from the hunk
594 595 for x in self.hunk:
595 596 if x.startswith('+') or x.startswith(' '):
596 597 self.b.append(x[1:])
597 598 # @@ -start,len +start,len @@
598 599 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
599 600 self.startb, self.lenb)
600 601 self.hunk[0] = self.desc
601 602
602 603 def reverse(self):
603 604 self.create, self.remove = self.remove, self.create
604 605 origlena = self.lena
605 606 origstarta = self.starta
606 607 self.lena = self.lenb
607 608 self.starta = self.startb
608 609 self.lenb = origlena
609 610 self.startb = origstarta
610 611 self.a = []
611 612 self.b = []
612 613 # self.hunk[0] is the @@ description
613 614 for x in xrange(1, len(self.hunk)):
614 615 o = self.hunk[x]
615 616 if o.startswith('-'):
616 617 n = '+' + o[1:]
617 618 self.b.append(o[1:])
618 619 elif o.startswith('+'):
619 620 n = '-' + o[1:]
620 621 self.a.append(n)
621 622 else:
622 623 n = o
623 624 self.b.append(o[1:])
624 625 self.a.append(o)
625 626 self.hunk[x] = o
626 627
627 628 def fix_newline(self):
628 629 diffhelpers.fix_newline(self.hunk, self.a, self.b)
629 630
630 631 def complete(self):
631 632 return len(self.a) == self.lena and len(self.b) == self.lenb
632 633
633 634 def createfile(self):
634 635 return self.starta == 0 and self.lena == 0 and self.create
635 636
636 637 def rmfile(self):
637 638 return self.startb == 0 and self.lenb == 0 and self.remove
638 639
639 640 def fuzzit(self, l, fuzz, toponly):
640 641 # this removes context lines from the top and bottom of list 'l'. It
641 642 # checks the hunk to make sure only context lines are removed, and then
642 643 # returns a new shortened list of lines.
643 644 fuzz = min(fuzz, len(l)-1)
644 645 if fuzz:
645 646 top = 0
646 647 bot = 0
647 648 hlen = len(self.hunk)
648 649 for x in xrange(hlen-1):
649 650 # the hunk starts with the @@ line, so use x+1
650 651 if self.hunk[x+1][0] == ' ':
651 652 top += 1
652 653 else:
653 654 break
654 655 if not toponly:
655 656 for x in xrange(hlen-1):
656 657 if self.hunk[hlen-bot-1][0] == ' ':
657 658 bot += 1
658 659 else:
659 660 break
660 661
661 662 # top and bot now count context in the hunk
662 663 # adjust them if either one is short
663 664 context = max(top, bot, 3)
664 665 if bot < context:
665 666 bot = max(0, fuzz - (context - bot))
666 667 else:
667 668 bot = min(fuzz, bot)
668 669 if top < context:
669 670 top = max(0, fuzz - (context - top))
670 671 else:
671 672 top = min(fuzz, top)
672 673
673 674 return l[top:len(l)-bot]
674 675 return l
675 676
676 677 def old(self, fuzz=0, toponly=False):
677 678 return self.fuzzit(self.a, fuzz, toponly)
678 679
679 680 def newctrl(self):
680 681 res = []
681 682 for x in self.hunk:
682 683 c = x[0]
683 684 if c == ' ' or c == '+':
684 685 res.append(x)
685 686 return res
686 687
687 688 def new(self, fuzz=0, toponly=False):
688 689 return self.fuzzit(self.b, fuzz, toponly)
689 690
690 691 class githunk(object):
691 692 """A git hunk"""
692 693 def __init__(self, gitpatch):
693 694 self.gitpatch = gitpatch
694 695 self.text = None
695 696 self.hunk = []
696 697
697 698 def createfile(self):
698 699 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
699 700
700 701 def rmfile(self):
701 702 return self.gitpatch.op == 'DELETE'
702 703
703 704 def complete(self):
704 705 return self.text is not None
705 706
706 707 def new(self):
707 708 return [self.text]
708 709
709 710 class binhunk(githunk):
710 711 'A binary patch file. Only understands literals so far.'
711 712 def __init__(self, gitpatch):
712 713 super(binhunk, self).__init__(gitpatch)
713 714 self.hunk = ['GIT binary patch\n']
714 715
715 716 def extract(self, lr):
716 717 line = lr.readline()
717 718 self.hunk.append(line)
718 719 while line and not line.startswith('literal '):
719 720 line = lr.readline()
720 721 self.hunk.append(line)
721 722 if not line:
722 723 raise PatchError(_('could not extract binary patch'))
723 724 size = int(line[8:].rstrip())
724 725 dec = []
725 726 line = lr.readline()
726 727 self.hunk.append(line)
727 728 while len(line) > 1:
728 729 l = line[0]
729 730 if l <= 'Z' and l >= 'A':
730 731 l = ord(l) - ord('A') + 1
731 732 else:
732 733 l = ord(l) - ord('a') + 27
733 734 dec.append(base85.b85decode(line[1:-1])[:l])
734 735 line = lr.readline()
735 736 self.hunk.append(line)
736 737 text = zlib.decompress(''.join(dec))
737 738 if len(text) != size:
738 739 raise PatchError(_('binary patch is %d bytes, not %d') %
739 740 len(text), size)
740 741 self.text = text
741 742
742 743 class symlinkhunk(githunk):
743 744 """A git symlink hunk"""
744 745 def __init__(self, gitpatch, hunk):
745 746 super(symlinkhunk, self).__init__(gitpatch)
746 747 self.hunk = hunk
747 748
748 749 def complete(self):
749 750 return True
750 751
751 752 def fix_newline(self):
752 753 return
753 754
754 755 def parsefilename(str):
755 756 # --- filename \t|space stuff
756 757 s = str[4:].rstrip('\r\n')
757 758 i = s.find('\t')
758 759 if i < 0:
759 760 i = s.find(' ')
760 761 if i < 0:
761 762 return s
762 763 return s[:i]
763 764
764 765 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
765 766 def pathstrip(path, count=1):
766 767 pathlen = len(path)
767 768 i = 0
768 769 if count == 0:
769 770 return '', path.rstrip()
770 771 while count > 0:
771 772 i = path.find('/', i)
772 773 if i == -1:
773 774 raise PatchError(_("unable to strip away %d dirs from %s") %
774 775 (count, path))
775 776 i += 1
776 777 # consume '//' in the path
777 778 while i < pathlen - 1 and path[i] == '/':
778 779 i += 1
779 780 count -= 1
780 781 return path[:i].lstrip(), path[i:].rstrip()
781 782
782 783 nulla = afile_orig == "/dev/null"
783 784 nullb = bfile_orig == "/dev/null"
784 785 abase, afile = pathstrip(afile_orig, strip)
785 786 gooda = not nulla and util.lexists(afile)
786 787 bbase, bfile = pathstrip(bfile_orig, strip)
787 788 if afile == bfile:
788 789 goodb = gooda
789 790 else:
790 791 goodb = not nullb and os.path.exists(bfile)
791 792 createfunc = hunk.createfile
792 793 if reverse:
793 794 createfunc = hunk.rmfile
794 795 missing = not goodb and not gooda and not createfunc()
795 796 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
796 797 # diff is between a file and its backup. In this case, the original
797 798 # file should be patched (see original mpatch code).
798 799 isbackup = (abase == bbase and bfile.startswith(afile))
799 800 fname = None
800 801 if not missing:
801 802 if gooda and goodb:
802 803 fname = isbackup and afile or bfile
803 804 elif gooda:
804 805 fname = afile
805 806
806 807 if not fname:
807 808 if not nullb:
808 809 fname = isbackup and afile or bfile
809 810 elif not nulla:
810 811 fname = afile
811 812 else:
812 813 raise PatchError(_("undefined source and destination files"))
813 814
814 815 return fname, missing
815 816
816 817 def scangitpatch(lr, firstline):
817 818 """
818 819 Git patches can emit:
819 820 - rename a to b
820 821 - change b
821 822 - copy a to c
822 823 - change c
823 824
824 825 We cannot apply this sequence as-is, the renamed 'a' could not be
825 826 found for it would have been renamed already. And we cannot copy
826 827 from 'b' instead because 'b' would have been changed already. So
827 828 we scan the git patch for copy and rename commands so we can
828 829 perform the copies ahead of time.
829 830 """
830 831 pos = 0
831 832 try:
832 833 pos = lr.fp.tell()
833 834 fp = lr.fp
834 835 except IOError:
835 836 fp = cStringIO.StringIO(lr.fp.read())
836 837 gitlr = linereader(fp, lr.textmode)
837 838 gitlr.push(firstline)
838 839 (dopatch, gitpatches) = readgitpatch(gitlr)
839 840 fp.seek(pos)
840 841 return dopatch, gitpatches
841 842
842 843 def iterhunks(ui, fp, sourcefile=None, textmode=False):
843 844 """Read a patch and yield the following events:
844 845 - ("file", afile, bfile, firsthunk): select a new target file.
845 846 - ("hunk", hunk): a new hunk is ready to be applied, follows a
846 847 "file" event.
847 848 - ("git", gitchanges): current diff is in git format, gitchanges
848 849 maps filenames to gitpatch records. Unique event.
849 850
850 851 If textmode is True, input line-endings are normalized to LF.
851 852 """
852 853 changed = {}
853 854 current_hunk = None
854 855 afile = ""
855 856 bfile = ""
856 857 state = None
857 858 hunknum = 0
858 859 emitfile = False
859 860 git = False
860 861
861 862 # our states
862 863 BFILE = 1
863 864 context = None
864 865 lr = linereader(fp, textmode)
865 866 dopatch = True
866 867 # gitworkdone is True if a git operation (copy, rename, ...) was
867 868 # performed already for the current file. Useful when the file
868 869 # section may have no hunk.
869 870 gitworkdone = False
870 871
871 872 while True:
872 873 newfile = False
873 874 x = lr.readline()
874 875 if not x:
875 876 break
876 877 if current_hunk:
877 878 if x.startswith('\ '):
878 879 current_hunk.fix_newline()
879 880 yield 'hunk', current_hunk
880 881 current_hunk = None
881 882 gitworkdone = False
882 883 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
883 884 ((context is not False) and x.startswith('***************')))):
884 885 try:
885 886 if context is None and x.startswith('***************'):
886 887 context = True
887 888 gpatch = changed.get(bfile)
888 889 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
889 890 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
890 891 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
891 892 if remove:
892 893 gpatch = changed.get(afile[2:])
893 894 if gpatch and gpatch.mode[0]:
894 895 current_hunk = symlinkhunk(gpatch, current_hunk)
895 896 except PatchError, err:
896 897 ui.debug(err)
897 898 current_hunk = None
898 899 continue
899 900 hunknum += 1
900 901 if emitfile:
901 902 emitfile = False
902 903 yield 'file', (afile, bfile, current_hunk)
903 904 elif state == BFILE and x.startswith('GIT binary patch'):
904 905 current_hunk = binhunk(changed[bfile])
905 906 hunknum += 1
906 907 if emitfile:
907 908 emitfile = False
908 909 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
909 910 current_hunk.extract(lr)
910 911 elif x.startswith('diff --git'):
911 912 # check for git diff, scanning the whole patch file if needed
912 913 m = gitre.match(x)
913 914 if m:
914 915 afile, bfile = m.group(1, 2)
915 916 if not git:
916 917 git = True
917 918 dopatch, gitpatches = scangitpatch(lr, x)
918 919 yield 'git', gitpatches
919 920 for gp in gitpatches:
920 921 changed[gp.path] = gp
921 922 # else error?
922 923 # copy/rename + modify should modify target, not source
923 924 gp = changed.get(bfile)
924 925 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
925 926 afile = bfile
926 927 gitworkdone = True
927 928 newfile = True
928 929 elif x.startswith('---'):
929 930 # check for a unified diff
930 931 l2 = lr.readline()
931 932 if not l2.startswith('+++'):
932 933 lr.push(l2)
933 934 continue
934 935 newfile = True
935 936 context = False
936 937 afile = parsefilename(x)
937 938 bfile = parsefilename(l2)
938 939 elif x.startswith('***'):
939 940 # check for a context diff
940 941 l2 = lr.readline()
941 942 if not l2.startswith('---'):
942 943 lr.push(l2)
943 944 continue
944 945 l3 = lr.readline()
945 946 lr.push(l3)
946 947 if not l3.startswith("***************"):
947 948 lr.push(l2)
948 949 continue
949 950 newfile = True
950 951 context = True
951 952 afile = parsefilename(x)
952 953 bfile = parsefilename(l2)
953 954
954 955 if newfile:
955 956 emitfile = True
956 957 state = BFILE
957 958 hunknum = 0
958 959 if current_hunk:
959 960 if current_hunk.complete():
960 961 yield 'hunk', current_hunk
961 962 else:
962 963 raise PatchError(_("malformed patch %s %s") % (afile,
963 964 current_hunk.desc))
964 965
965 966 if hunknum == 0 and dopatch and not gitworkdone:
966 967 raise NoHunks
967 968
968 969 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
969 970 eol=None):
970 971 """
971 972 Reads a patch from fp and tries to apply it.
972 973
973 974 The dict 'changed' is filled in with all of the filenames changed
974 975 by the patch. Returns 0 for a clean patch, -1 if any rejects were
975 976 found and 1 if there was any fuzz.
976 977
977 978 If 'eol' is None, the patch content and patched file are read in
978 979 binary mode. Otherwise, line endings are ignored when patching then
979 980 normalized to 'eol' (usually '\n' or \r\n').
980 981 """
981 982 rejects = 0
982 983 err = 0
983 984 current_file = None
984 985 gitpatches = None
985 986 opener = util.opener(os.getcwd())
986 987 textmode = eol is not None
987 988
988 989 def closefile():
989 990 if not current_file:
990 991 return 0
991 992 current_file.close()
992 993 return len(current_file.rej)
993 994
994 995 for state, values in iterhunks(ui, fp, sourcefile, textmode):
995 996 if state == 'hunk':
996 997 if not current_file:
997 998 continue
998 999 current_hunk = values
999 1000 ret = current_file.apply(current_hunk, reverse)
1000 1001 if ret >= 0:
1001 1002 changed.setdefault(current_file.fname, None)
1002 1003 if ret > 0:
1003 1004 err = 1
1004 1005 elif state == 'file':
1005 1006 rejects += closefile()
1006 1007 afile, bfile, first_hunk = values
1007 1008 try:
1008 1009 if sourcefile:
1009 1010 current_file = patchfile(ui, sourcefile, opener, eol=eol)
1010 1011 else:
1011 1012 current_file, missing = selectfile(afile, bfile, first_hunk,
1012 1013 strip, reverse)
1013 1014 current_file = patchfile(ui, current_file, opener, missing, eol)
1014 1015 except PatchError, err:
1015 1016 ui.warn(str(err) + '\n')
1016 1017 current_file, current_hunk = None, None
1017 1018 rejects += 1
1018 1019 continue
1019 1020 elif state == 'git':
1020 1021 gitpatches = values
1021 1022 cwd = os.getcwd()
1022 1023 for gp in gitpatches:
1023 1024 if gp.op in ('COPY', 'RENAME'):
1024 1025 copyfile(gp.oldpath, gp.path, cwd)
1025 1026 changed[gp.path] = gp
1026 1027 else:
1027 1028 raise util.Abort(_('unsupported parser state: %s') % state)
1028 1029
1029 1030 rejects += closefile()
1030 1031
1031 1032 if rejects:
1032 1033 return -1
1033 1034 return err
1034 1035
1035 1036 def diffopts(ui, opts={}, untrusted=False):
1036 1037 def get(key, name=None, getter=ui.configbool):
1037 1038 return (opts.get(key) or
1038 1039 getter('diff', name or key, None, untrusted=untrusted))
1039 1040 return mdiff.diffopts(
1040 1041 text=opts.get('text'),
1041 1042 git=get('git'),
1042 1043 nodates=get('nodates'),
1043 1044 showfunc=get('show_function', 'showfunc'),
1044 1045 ignorews=get('ignore_all_space', 'ignorews'),
1045 1046 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1046 1047 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1047 1048 context=get('unified', getter=ui.config))
1048 1049
1049 1050 def updatedir(ui, repo, patches, similarity=0):
1050 1051 '''Update dirstate after patch application according to metadata'''
1051 1052 if not patches:
1052 1053 return
1053 1054 copies = []
1054 1055 removes = set()
1055 1056 cfiles = patches.keys()
1056 1057 cwd = repo.getcwd()
1057 1058 if cwd:
1058 1059 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1059 1060 for f in patches:
1060 1061 gp = patches[f]
1061 1062 if not gp:
1062 1063 continue
1063 1064 if gp.op == 'RENAME':
1064 1065 copies.append((gp.oldpath, gp.path))
1065 1066 removes.add(gp.oldpath)
1066 1067 elif gp.op == 'COPY':
1067 1068 copies.append((gp.oldpath, gp.path))
1068 1069 elif gp.op == 'DELETE':
1069 1070 removes.add(gp.path)
1070 1071 for src, dst in copies:
1071 1072 repo.copy(src, dst)
1072 1073 if (not similarity) and removes:
1073 1074 repo.remove(sorted(removes), True)
1074 1075 for f in patches:
1075 1076 gp = patches[f]
1076 1077 if gp and gp.mode:
1077 1078 islink, isexec = gp.mode
1078 1079 dst = repo.wjoin(gp.path)
1079 1080 # patch won't create empty files
1080 1081 if gp.op == 'ADD' and not os.path.exists(dst):
1081 1082 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1082 1083 repo.wwrite(gp.path, '', flags)
1083 1084 elif gp.op != 'DELETE':
1084 1085 util.set_flags(dst, islink, isexec)
1085 1086 cmdutil.addremove(repo, cfiles, similarity=similarity)
1086 1087 files = patches.keys()
1087 1088 files.extend([r for r in removes if r not in files])
1088 1089 return sorted(files)
1089 1090
1090 1091 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1091 1092 """use <patcher> to apply <patchname> to the working directory.
1092 1093 returns whether patch was applied with fuzz factor."""
1093 1094
1094 1095 fuzz = False
1095 1096 if cwd:
1096 1097 args.append('-d %s' % util.shellquote(cwd))
1097 1098 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1098 1099 util.shellquote(patchname)))
1099 1100
1100 1101 for line in fp:
1101 1102 line = line.rstrip()
1102 1103 ui.note(line + '\n')
1103 1104 if line.startswith('patching file '):
1104 1105 pf = util.parse_patch_output(line)
1105 1106 printed_file = False
1106 1107 files.setdefault(pf, None)
1107 1108 elif line.find('with fuzz') >= 0:
1108 1109 fuzz = True
1109 1110 if not printed_file:
1110 1111 ui.warn(pf + '\n')
1111 1112 printed_file = True
1112 1113 ui.warn(line + '\n')
1113 1114 elif line.find('saving rejects to file') >= 0:
1114 1115 ui.warn(line + '\n')
1115 1116 elif line.find('FAILED') >= 0:
1116 1117 if not printed_file:
1117 1118 ui.warn(pf + '\n')
1118 1119 printed_file = True
1119 1120 ui.warn(line + '\n')
1120 1121 code = fp.close()
1121 1122 if code:
1122 1123 raise PatchError(_("patch command failed: %s") %
1123 1124 util.explain_exit(code)[0])
1124 1125 return fuzz
1125 1126
1126 1127 def internalpatch(patchobj, ui, strip, cwd, files={}, eolmode='strict'):
1127 1128 """use builtin patch to apply <patchobj> to the working directory.
1128 1129 returns whether patch was applied with fuzz factor."""
1129 1130
1130 1131 if eolmode is None:
1131 1132 eolmode = ui.config('patch', 'eol', 'strict')
1132 1133 try:
1133 1134 eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()]
1134 1135 except KeyError:
1135 1136 raise util.Abort(_('Unsupported line endings type: %s') % eolmode)
1136 1137
1137 1138 try:
1138 1139 fp = open(patchobj, 'rb')
1139 1140 except TypeError:
1140 1141 fp = patchobj
1141 1142 if cwd:
1142 1143 curdir = os.getcwd()
1143 1144 os.chdir(cwd)
1144 1145 try:
1145 1146 ret = applydiff(ui, fp, files, strip=strip, eol=eol)
1146 1147 finally:
1147 1148 if cwd:
1148 1149 os.chdir(curdir)
1149 1150 if ret < 0:
1150 1151 raise PatchError
1151 1152 return ret > 0
1152 1153
1153 1154 def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
1154 1155 """Apply <patchname> to the working directory.
1155 1156
1156 1157 'eolmode' specifies how end of lines should be handled. It can be:
1157 1158 - 'strict': inputs are read in binary mode, EOLs are preserved
1158 1159 - 'crlf': EOLs are ignored when patching and reset to CRLF
1159 1160 - 'lf': EOLs are ignored when patching and reset to LF
1160 1161 - None: get it from user settings, default to 'strict'
1161 1162 'eolmode' is ignored when using an external patcher program.
1162 1163
1163 1164 Returns whether patch was applied with fuzz factor.
1164 1165 """
1165 1166 patcher = ui.config('ui', 'patch')
1166 1167 args = []
1167 1168 try:
1168 1169 if patcher:
1169 1170 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1170 1171 files)
1171 1172 else:
1172 1173 try:
1173 1174 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1174 1175 except NoHunks:
1175 1176 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1176 1177 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1177 1178 patcher)
1178 1179 if util.needbinarypatch():
1179 1180 args.append('--binary')
1180 1181 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1181 1182 files)
1182 1183 except PatchError, err:
1183 1184 s = str(err)
1184 1185 if s:
1185 1186 raise util.Abort(s)
1186 1187 else:
1187 1188 raise util.Abort(_('patch failed to apply'))
1188 1189
1189 1190 def b85diff(to, tn):
1190 1191 '''print base85-encoded binary diff'''
1191 1192 def gitindex(text):
1192 1193 if not text:
1193 1194 return '0' * 40
1194 1195 l = len(text)
1195 1196 s = util.sha1('blob %d\0' % l)
1196 1197 s.update(text)
1197 1198 return s.hexdigest()
1198 1199
1199 1200 def fmtline(line):
1200 1201 l = len(line)
1201 1202 if l <= 26:
1202 1203 l = chr(ord('A') + l - 1)
1203 1204 else:
1204 1205 l = chr(l - 26 + ord('a') - 1)
1205 1206 return '%c%s\n' % (l, base85.b85encode(line, True))
1206 1207
1207 1208 def chunk(text, csize=52):
1208 1209 l = len(text)
1209 1210 i = 0
1210 1211 while i < l:
1211 1212 yield text[i:i+csize]
1212 1213 i += csize
1213 1214
1214 1215 tohash = gitindex(to)
1215 1216 tnhash = gitindex(tn)
1216 1217 if tohash == tnhash:
1217 1218 return ""
1218 1219
1219 1220 # TODO: deltas
1220 1221 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1221 1222 (tohash, tnhash, len(tn))]
1222 1223 for l in chunk(zlib.compress(tn)):
1223 1224 ret.append(fmtline(l))
1224 1225 ret.append('\n')
1225 1226 return ''.join(ret)
1226 1227
1227 1228 def _addmodehdr(header, omode, nmode):
1228 1229 if omode != nmode:
1229 1230 header.append('old mode %s\n' % omode)
1230 1231 header.append('new mode %s\n' % nmode)
1231 1232
1232 1233 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1233 1234 '''yields diff of changes to files between two nodes, or node and
1234 1235 working directory.
1235 1236
1236 1237 if node1 is None, use first dirstate parent instead.
1237 1238 if node2 is None, compare node1 with working directory.'''
1238 1239
1239 1240 if opts is None:
1240 1241 opts = mdiff.defaultopts
1241 1242
1242 1243 if not node1:
1243 1244 node1 = repo.dirstate.parents()[0]
1244 1245
1245 1246 def lrugetfilectx():
1246 1247 cache = {}
1247 1248 order = []
1248 1249 def getfilectx(f, ctx):
1249 1250 fctx = ctx.filectx(f, filelog=cache.get(f))
1250 1251 if f not in cache:
1251 1252 if len(cache) > 20:
1252 1253 del cache[order.pop(0)]
1253 1254 cache[f] = fctx._filelog
1254 1255 else:
1255 1256 order.remove(f)
1256 1257 order.append(f)
1257 1258 return fctx
1258 1259 return getfilectx
1259 1260 getfilectx = lrugetfilectx()
1260 1261
1261 1262 ctx1 = repo[node1]
1262 1263 ctx2 = repo[node2]
1263 1264
1264 1265 if not changes:
1265 1266 changes = repo.status(ctx1, ctx2, match=match)
1266 1267 modified, added, removed = changes[:3]
1267 1268
1268 1269 if not modified and not added and not removed:
1269 1270 return
1270 1271
1271 1272 date1 = util.datestr(ctx1.date())
1272 1273 man1 = ctx1.manifest()
1273 1274
1274 1275 if repo.ui.quiet:
1275 1276 r = None
1276 1277 else:
1277 1278 hexfunc = repo.ui.debugflag and hex or short
1278 1279 r = [hexfunc(node) for node in [node1, node2] if node]
1279 1280
1280 1281 if opts.git:
1281 1282 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1282 1283 copy = copy.copy()
1283 1284 for k, v in copy.items():
1284 1285 copy[v] = k
1285 1286
1286 1287 gone = set()
1287 1288 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1288 1289
1289 1290 for f in sorted(modified + added + removed):
1290 1291 to = None
1291 1292 tn = None
1292 1293 dodiff = True
1293 1294 header = []
1294 1295 if f in man1:
1295 1296 to = getfilectx(f, ctx1).data()
1296 1297 if f not in removed:
1297 1298 tn = getfilectx(f, ctx2).data()
1298 1299 a, b = f, f
1299 1300 if opts.git:
1300 1301 if f in added:
1301 1302 mode = gitmode[ctx2.flags(f)]
1302 1303 if f in copy:
1303 1304 a = copy[f]
1304 1305 omode = gitmode[man1.flags(a)]
1305 1306 _addmodehdr(header, omode, mode)
1306 1307 if a in removed and a not in gone:
1307 1308 op = 'rename'
1308 1309 gone.add(a)
1309 1310 else:
1310 1311 op = 'copy'
1311 1312 header.append('%s from %s\n' % (op, a))
1312 1313 header.append('%s to %s\n' % (op, f))
1313 1314 to = getfilectx(a, ctx1).data()
1314 1315 else:
1315 1316 header.append('new file mode %s\n' % mode)
1316 1317 if util.binary(tn):
1317 1318 dodiff = 'binary'
1318 1319 elif f in removed:
1319 1320 # have we already reported a copy above?
1320 1321 if f in copy and copy[f] in added and copy[copy[f]] == f:
1321 1322 dodiff = False
1322 1323 else:
1323 1324 header.append('deleted file mode %s\n' %
1324 1325 gitmode[man1.flags(f)])
1325 1326 else:
1326 1327 omode = gitmode[man1.flags(f)]
1327 1328 nmode = gitmode[ctx2.flags(f)]
1328 1329 _addmodehdr(header, omode, nmode)
1329 1330 if util.binary(to) or util.binary(tn):
1330 1331 dodiff = 'binary'
1331 1332 r = None
1332 1333 header.insert(0, mdiff.diffline(r, a, b, opts))
1333 1334 if dodiff:
1334 1335 if dodiff == 'binary':
1335 1336 text = b85diff(to, tn)
1336 1337 else:
1337 1338 text = mdiff.unidiff(to, date1,
1338 1339 # ctx2 date may be dynamic
1339 1340 tn, util.datestr(ctx2.date()),
1340 1341 a, b, r, opts=opts)
1341 1342 if header and (text or len(header) > 1):
1342 1343 yield ''.join(header)
1343 1344 if text:
1344 1345 yield text
1345 1346
1346 1347 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1347 1348 opts=None):
1348 1349 '''export changesets as hg patches.'''
1349 1350
1350 1351 total = len(revs)
1351 1352 revwidth = max([len(str(rev)) for rev in revs])
1352 1353
1353 1354 def single(rev, seqno, fp):
1354 1355 ctx = repo[rev]
1355 1356 node = ctx.node()
1356 1357 parents = [p.node() for p in ctx.parents() if p]
1357 1358 branch = ctx.branch()
1358 1359 if switch_parent:
1359 1360 parents.reverse()
1360 1361 prev = (parents and parents[0]) or nullid
1361 1362
1362 1363 if not fp:
1363 1364 fp = cmdutil.make_file(repo, template, node, total=total,
1364 1365 seqno=seqno, revwidth=revwidth,
1365 1366 mode='ab')
1366 1367 if fp != sys.stdout and hasattr(fp, 'name'):
1367 1368 repo.ui.note("%s\n" % fp.name)
1368 1369
1369 1370 fp.write("# HG changeset patch\n")
1370 1371 fp.write("# User %s\n" % ctx.user())
1371 1372 fp.write("# Date %d %d\n" % ctx.date())
1372 1373 if branch and (branch != 'default'):
1373 1374 fp.write("# Branch %s\n" % branch)
1374 1375 fp.write("# Node ID %s\n" % hex(node))
1375 1376 fp.write("# Parent %s\n" % hex(prev))
1376 1377 if len(parents) > 1:
1377 1378 fp.write("# Parent %s\n" % hex(parents[1]))
1378 1379 fp.write(ctx.description().rstrip())
1379 1380 fp.write("\n\n")
1380 1381
1381 1382 for chunk in diff(repo, prev, node, opts=opts):
1382 1383 fp.write(chunk)
1383 1384
1384 1385 for seqno, rev in enumerate(revs):
1385 1386 single(rev, seqno+1, fp)
1386 1387
1387 1388 def diffstatdata(lines):
1388 1389 filename, adds, removes = None, 0, 0
1389 1390 for line in lines:
1390 1391 if line.startswith('diff'):
1391 1392 if filename:
1392 1393 yield (filename, adds, removes)
1393 1394 # set numbers to 0 anyway when starting new file
1394 1395 adds, removes = 0, 0
1395 1396 if line.startswith('diff --git'):
1396 1397 filename = gitre.search(line).group(1)
1397 1398 else:
1398 1399 # format: "diff -r ... -r ... filename"
1399 1400 filename = line.split(None, 5)[-1]
1400 1401 elif line.startswith('+') and not line.startswith('+++'):
1401 1402 adds += 1
1402 1403 elif line.startswith('-') and not line.startswith('---'):
1403 1404 removes += 1
1404 1405 if filename:
1405 1406 yield (filename, adds, removes)
1406 1407
1407 1408 def diffstat(lines, width=80):
1408 1409 output = []
1409 1410 stats = list(diffstatdata(lines))
1410 1411
1411 1412 maxtotal, maxname = 0, 0
1412 1413 totaladds, totalremoves = 0, 0
1413 1414 for filename, adds, removes in stats:
1414 1415 totaladds += adds
1415 1416 totalremoves += removes
1416 1417 maxname = max(maxname, len(filename))
1417 1418 maxtotal = max(maxtotal, adds+removes)
1418 1419
1419 1420 countwidth = len(str(maxtotal))
1420 1421 graphwidth = width - countwidth - maxname
1421 1422 if graphwidth < 10:
1422 1423 graphwidth = 10
1423 1424
1424 1425 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1425 1426
1426 1427 for filename, adds, removes in stats:
1427 1428 # If diffstat runs out of room it doesn't print anything, which
1428 1429 # isn't very useful, so always print at least one + or - if there
1429 1430 # were at least some changes
1430 1431 pluses = '+' * max(adds // factor, int(bool(adds)))
1431 1432 minuses = '-' * max(removes // factor, int(bool(removes)))
1432 1433 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1433 1434 adds+removes, pluses, minuses))
1434 1435
1435 1436 if stats:
1436 1437 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1437 1438 % (len(stats), totaladds, totalremoves))
1438 1439
1439 1440 return ''.join(output)
@@ -1,214 +1,252 b''
1 1 # posix.py - Posix utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import osutil
10 import os, sys, errno, stat, getpass, pwd, grp
10 import os, sys, errno, stat, getpass, pwd, grp, fcntl
11 11
12 12 posixfile = open
13 13 nulldev = '/dev/null'
14 14 normpath = os.path.normpath
15 15 samestat = os.path.samestat
16 16 expandglobs = False
17 17
18 18 umask = os.umask(0)
19 19 os.umask(umask)
20 20
21 21 def openhardlinks():
22 22 '''return true if it is safe to hold open file handles to hardlinks'''
23 23 return True
24 24
25 25 def rcfiles(path):
26 26 rcs = [os.path.join(path, 'hgrc')]
27 27 rcdir = os.path.join(path, 'hgrc.d')
28 28 try:
29 29 rcs.extend([os.path.join(rcdir, f)
30 30 for f, kind in osutil.listdir(rcdir)
31 31 if f.endswith(".rc")])
32 32 except OSError:
33 33 pass
34 34 return rcs
35 35
36 36 def system_rcpath():
37 37 path = []
38 38 # old mod_python does not set sys.argv
39 39 if len(getattr(sys, 'argv', [])) > 0:
40 40 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
41 41 '/../etc/mercurial'))
42 42 path.extend(rcfiles('/etc/mercurial'))
43 43 return path
44 44
45 45 def user_rcpath():
46 46 return [os.path.expanduser('~/.hgrc')]
47 47
48 48 def parse_patch_output(output_line):
49 49 """parses the output produced by patch and returns the filename"""
50 50 pf = output_line[14:]
51 51 if os.sys.platform == 'OpenVMS':
52 52 if pf[0] == '`':
53 53 pf = pf[1:-1] # Remove the quotes
54 54 else:
55 55 if pf.startswith("'") and pf.endswith("'") and " " in pf:
56 56 pf = pf[1:-1] # Remove the quotes
57 57 return pf
58 58
59 59 def sshargs(sshcmd, host, user, port):
60 60 '''Build argument list for ssh'''
61 61 args = user and ("%s@%s" % (user, host)) or host
62 62 return port and ("%s -p %s" % (args, port)) or args
63 63
64 64 def is_exec(f):
65 65 """check whether a file is executable"""
66 66 return (os.lstat(f).st_mode & 0100 != 0)
67 67
68 68 def set_flags(f, l, x):
69 69 s = os.lstat(f).st_mode
70 70 if l:
71 71 if not stat.S_ISLNK(s):
72 72 # switch file to link
73 73 data = open(f).read()
74 74 os.unlink(f)
75 75 try:
76 76 os.symlink(data, f)
77 77 except:
78 78 # failed to make a link, rewrite file
79 79 open(f, "w").write(data)
80 80 # no chmod needed at this point
81 81 return
82 82 if stat.S_ISLNK(s):
83 83 # switch link to file
84 84 data = os.readlink(f)
85 85 os.unlink(f)
86 86 open(f, "w").write(data)
87 87 s = 0666 & ~umask # avoid restatting for chmod
88 88
89 89 sx = s & 0100
90 90 if x and not sx:
91 91 # Turn on +x for every +r bit when making a file executable
92 92 # and obey umask.
93 93 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
94 94 elif not x and sx:
95 95 # Turn off all +x bits
96 96 os.chmod(f, s & 0666)
97 97
98 98 def set_binary(fd):
99 99 pass
100 100
101 101 def pconvert(path):
102 102 return path
103 103
104 104 def localpath(path):
105 105 return path
106 106
107 if sys.platform == 'darwin':
108 def realpath(path):
109 '''
110 Returns the true, canonical file system path equivalent to the given
111 path.
112
113 Equivalent means, in this case, resulting in the same, unique
114 file system link to the path. Every file system entry, whether a file,
115 directory, hard link or symbolic link or special, will have a single
116 path preferred by the system, but may allow multiple, differing path
117 lookups to point to it.
118
119 Most regular UNIX file systems only allow a file system entry to be
120 looked up by its distinct path. Obviously, this does not apply to case
121 insensitive file systems, whether case preserving or not. The most
122 complex issue to deal with is file systems transparently reencoding the
123 path, such as the non-standard Unicode normalisation required for HFS+
124 and HFSX.
125 '''
126 # Constants copied from /usr/include/sys/fcntl.h
127 F_GETPATH = 50
128 O_SYMLINK = 0x200000
129
130 try:
131 fd = os.open(path, O_SYMLINK)
132 except OSError, err:
133 if err.errno is errno.ENOENT:
134 return path
135 raise
136
137 try:
138 return fcntl.fcntl(fd, F_GETPATH, '\0' * 1024).rstrip('\0')
139 finally:
140 os.close(fd)
141 else:
142 # Fallback to the likely inadequate Python builtin function.
143 realpath = os.path.realpath
144
107 145 def shellquote(s):
108 146 if os.sys.platform == 'OpenVMS':
109 147 return '"%s"' % s
110 148 else:
111 149 return "'%s'" % s.replace("'", "'\\''")
112 150
113 151 def quotecommand(cmd):
114 152 return cmd
115 153
116 154 def popen(command, mode='r'):
117 155 return os.popen(command, mode)
118 156
119 157 def testpid(pid):
120 158 '''return False if pid dead, True if running or not sure'''
121 159 if os.sys.platform == 'OpenVMS':
122 160 return True
123 161 try:
124 162 os.kill(pid, 0)
125 163 return True
126 164 except OSError, inst:
127 165 return inst.errno != errno.ESRCH
128 166
129 167 def explain_exit(code):
130 168 """return a 2-tuple (desc, code) describing a process's status"""
131 169 if os.WIFEXITED(code):
132 170 val = os.WEXITSTATUS(code)
133 171 return _("exited with status %d") % val, val
134 172 elif os.WIFSIGNALED(code):
135 173 val = os.WTERMSIG(code)
136 174 return _("killed by signal %d") % val, val
137 175 elif os.WIFSTOPPED(code):
138 176 val = os.WSTOPSIG(code)
139 177 return _("stopped by signal %d") % val, val
140 178 raise ValueError(_("invalid exit code"))
141 179
142 180 def isowner(st):
143 181 """Return True if the stat object st is from the current user."""
144 182 return st.st_uid == os.getuid()
145 183
146 184 def find_exe(command):
147 185 '''Find executable for command searching like which does.
148 186 If command is a basename then PATH is searched for command.
149 187 PATH isn't searched if command is an absolute or relative path.
150 188 If command isn't found None is returned.'''
151 189 if sys.platform == 'OpenVMS':
152 190 return command
153 191
154 192 def findexisting(executable):
155 193 'Will return executable if existing file'
156 194 if os.path.exists(executable):
157 195 return executable
158 196 return None
159 197
160 198 if os.sep in command:
161 199 return findexisting(command)
162 200
163 201 for path in os.environ.get('PATH', '').split(os.pathsep):
164 202 executable = findexisting(os.path.join(path, command))
165 203 if executable is not None:
166 204 return executable
167 205 return None
168 206
169 207 def set_signal_handler():
170 208 pass
171 209
172 210 def statfiles(files):
173 211 'Stat each file in files and yield stat or None if file does not exist.'
174 212 lstat = os.lstat
175 213 for nf in files:
176 214 try:
177 215 st = lstat(nf)
178 216 except OSError, err:
179 217 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
180 218 raise
181 219 st = None
182 220 yield st
183 221
184 222 def getuser():
185 223 '''return name of current user'''
186 224 return getpass.getuser()
187 225
188 226 def expand_glob(pats):
189 227 '''On Windows, expand the implicit globs in a list of patterns'''
190 228 return list(pats)
191 229
192 230 def username(uid=None):
193 231 """Return the name of the user with the given uid.
194 232
195 233 If uid is None, return the name of the current user."""
196 234
197 235 if uid is None:
198 236 uid = os.getuid()
199 237 try:
200 238 return pwd.getpwuid(uid)[0]
201 239 except KeyError:
202 240 return str(uid)
203 241
204 242 def groupname(gid=None):
205 243 """Return the name of the group with the given gid.
206 244
207 245 If gid is None, return the name of the current group."""
208 246
209 247 if gid is None:
210 248 gid = os.getgid()
211 249 try:
212 250 return grp.getgrgid(gid)[0]
213 251 except KeyError:
214 252 return str(gid)
@@ -1,283 +1,292 b''
1 1 # windows.py - Windows utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import osutil, error
10 10 import errno, msvcrt, os, re, sys
11 11
12 12 nulldev = 'NUL:'
13 13 umask = 002
14 14
15 15 # wrap osutil.posixfile to provide friendlier exceptions
16 16 def posixfile(name, mode='r', buffering=-1):
17 17 try:
18 18 return osutil.posixfile(name, mode, buffering)
19 19 except WindowsError, err:
20 20 raise IOError(err.errno, err.strerror)
21 21 posixfile.__doc__ = osutil.posixfile.__doc__
22 22
23 23 class winstdout(object):
24 24 '''stdout on windows misbehaves if sent through a pipe'''
25 25
26 26 def __init__(self, fp):
27 27 self.fp = fp
28 28
29 29 def __getattr__(self, key):
30 30 return getattr(self.fp, key)
31 31
32 32 def close(self):
33 33 try:
34 34 self.fp.close()
35 35 except: pass
36 36
37 37 def write(self, s):
38 38 try:
39 39 # This is workaround for "Not enough space" error on
40 40 # writing large size of data to console.
41 41 limit = 16000
42 42 l = len(s)
43 43 start = 0
44 44 self.softspace = 0;
45 45 while start < l:
46 46 end = start + limit
47 47 self.fp.write(s[start:end])
48 48 start = end
49 49 except IOError, inst:
50 50 if inst.errno != 0: raise
51 51 self.close()
52 52 raise IOError(errno.EPIPE, 'Broken pipe')
53 53
54 54 def flush(self):
55 55 try:
56 56 return self.fp.flush()
57 57 except IOError, inst:
58 58 if inst.errno != errno.EINVAL: raise
59 59 self.close()
60 60 raise IOError(errno.EPIPE, 'Broken pipe')
61 61
62 62 sys.stdout = winstdout(sys.stdout)
63 63
64 64 def _is_win_9x():
65 65 '''return true if run on windows 95, 98 or me.'''
66 66 try:
67 67 return sys.getwindowsversion()[3] == 1
68 68 except AttributeError:
69 69 return 'command' in os.environ.get('comspec', '')
70 70
71 71 def openhardlinks():
72 72 return not _is_win_9x() and "win32api" in globals()
73 73
74 74 def system_rcpath():
75 75 try:
76 76 return system_rcpath_win32()
77 77 except:
78 78 return [r'c:\mercurial\mercurial.ini']
79 79
80 80 def user_rcpath():
81 81 '''return os-specific hgrc search path to the user dir'''
82 82 try:
83 83 path = user_rcpath_win32()
84 84 except:
85 85 home = os.path.expanduser('~')
86 86 path = [os.path.join(home, 'mercurial.ini'),
87 87 os.path.join(home, '.hgrc')]
88 88 userprofile = os.environ.get('USERPROFILE')
89 89 if userprofile:
90 90 path.append(os.path.join(userprofile, 'mercurial.ini'))
91 91 path.append(os.path.join(userprofile, '.hgrc'))
92 92 return path
93 93
94 94 def parse_patch_output(output_line):
95 95 """parses the output produced by patch and returns the filename"""
96 96 pf = output_line[14:]
97 97 if pf[0] == '`':
98 98 pf = pf[1:-1] # Remove the quotes
99 99 return pf
100 100
101 101 def sshargs(sshcmd, host, user, port):
102 102 '''Build argument list for ssh or Plink'''
103 103 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
104 104 args = user and ("%s@%s" % (user, host)) or host
105 105 return port and ("%s %s %s" % (args, pflag, port)) or args
106 106
107 107 def testpid(pid):
108 108 '''return False if pid dead, True if running or not known'''
109 109 return True
110 110
111 111 def set_flags(f, l, x):
112 112 pass
113 113
114 114 def set_binary(fd):
115 115 # When run without console, pipes may expose invalid
116 116 # fileno(), usually set to -1.
117 117 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
118 118 msvcrt.setmode(fd.fileno(), os.O_BINARY)
119 119
120 120 def pconvert(path):
121 121 return '/'.join(path.split(os.sep))
122 122
123 123 def localpath(path):
124 124 return path.replace('/', '\\')
125 125
126 126 def normpath(path):
127 127 return pconvert(os.path.normpath(path))
128 128
129 def realpath(path):
130 '''
131 Returns the true, canonical file system path equivalent to the given
132 path.
133 '''
134 # TODO: There may be a more clever way to do this that also handles other,
135 # less common file systems.
136 return os.path.normpath(os.path.normcase(os.path.realpath(path)))
137
129 138 def samestat(s1, s2):
130 139 return False
131 140
132 141 # A sequence of backslashes is special iff it precedes a double quote:
133 142 # - if there's an even number of backslashes, the double quote is not
134 143 # quoted (i.e. it ends the quoted region)
135 144 # - if there's an odd number of backslashes, the double quote is quoted
136 145 # - in both cases, every pair of backslashes is unquoted into a single
137 146 # backslash
138 147 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
139 148 # So, to quote a string, we must surround it in double quotes, double
140 149 # the number of backslashes that preceed double quotes and add another
141 150 # backslash before every double quote (being careful with the double
142 151 # quote we've appended to the end)
143 152 _quotere = None
144 153 def shellquote(s):
145 154 global _quotere
146 155 if _quotere is None:
147 156 _quotere = re.compile(r'(\\*)("|\\$)')
148 157 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
149 158
150 159 def quotecommand(cmd):
151 160 """Build a command string suitable for os.popen* calls."""
152 161 # The extra quotes are needed because popen* runs the command
153 162 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
154 163 return '"' + cmd + '"'
155 164
156 165 def popen(command, mode='r'):
157 166 # Work around "popen spawned process may not write to stdout
158 167 # under windows"
159 168 # http://bugs.python.org/issue1366
160 169 command += " 2> %s" % nulldev
161 170 return os.popen(quotecommand(command), mode)
162 171
163 172 def explain_exit(code):
164 173 return _("exited with status %d") % code, code
165 174
166 175 # if you change this stub into a real check, please try to implement the
167 176 # username and groupname functions above, too.
168 177 def isowner(st):
169 178 return True
170 179
171 180 def find_exe(command):
172 181 '''Find executable for command searching like cmd.exe does.
173 182 If command is a basename then PATH is searched for command.
174 183 PATH isn't searched if command is an absolute or relative path.
175 184 An extension from PATHEXT is found and added if not present.
176 185 If command isn't found None is returned.'''
177 186 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
178 187 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
179 188 if os.path.splitext(command)[1].lower() in pathexts:
180 189 pathexts = ['']
181 190
182 191 def findexisting(pathcommand):
183 192 'Will append extension (if needed) and return existing file'
184 193 for ext in pathexts:
185 194 executable = pathcommand + ext
186 195 if os.path.exists(executable):
187 196 return executable
188 197 return None
189 198
190 199 if os.sep in command:
191 200 return findexisting(command)
192 201
193 202 for path in os.environ.get('PATH', '').split(os.pathsep):
194 203 executable = findexisting(os.path.join(path, command))
195 204 if executable is not None:
196 205 return executable
197 206 return None
198 207
199 208 def set_signal_handler():
200 209 try:
201 210 set_signal_handler_win32()
202 211 except NameError:
203 212 pass
204 213
205 214 def statfiles(files):
206 215 '''Stat each file in files and yield stat or None if file does not exist.
207 216 Cluster and cache stat per directory to minimize number of OS stat calls.'''
208 217 ncase = os.path.normcase
209 218 sep = os.sep
210 219 dircache = {} # dirname -> filename -> status | None if file does not exist
211 220 for nf in files:
212 221 nf = ncase(nf)
213 222 dir, base = os.path.split(nf)
214 223 if not dir:
215 224 dir = '.'
216 225 cache = dircache.get(dir, None)
217 226 if cache is None:
218 227 try:
219 228 dmap = dict([(ncase(n), s)
220 229 for n, k, s in osutil.listdir(dir, True)])
221 230 except OSError, err:
222 231 # handle directory not found in Python version prior to 2.5
223 232 # Python <= 2.4 returns native Windows code 3 in errno
224 233 # Python >= 2.5 returns ENOENT and adds winerror field
225 234 # EINVAL is raised if dir is not a directory.
226 235 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
227 236 errno.ENOTDIR):
228 237 raise
229 238 dmap = {}
230 239 cache = dircache.setdefault(dir, dmap)
231 240 yield cache.get(base, None)
232 241
233 242 def getuser():
234 243 '''return name of current user'''
235 244 raise error.Abort(_('user name not available - set USERNAME '
236 245 'environment variable'))
237 246
238 247 def username(uid=None):
239 248 """Return the name of the user with the given uid.
240 249
241 250 If uid is None, return the name of the current user."""
242 251 return None
243 252
244 253 def groupname(gid=None):
245 254 """Return the name of the group with the given gid.
246 255
247 256 If gid is None, return the name of the current group."""
248 257 return None
249 258
250 259 def _removedirs(name):
251 260 """special version of os.removedirs that does not remove symlinked
252 261 directories or junction points if they actually contain files"""
253 262 if osutil.listdir(name):
254 263 return
255 264 os.rmdir(name)
256 265 head, tail = os.path.split(name)
257 266 if not tail:
258 267 head, tail = os.path.split(head)
259 268 while head and tail:
260 269 try:
261 270 if osutil.listdir(name):
262 271 return
263 272 os.rmdir(head)
264 273 except:
265 274 break
266 275 head, tail = os.path.split(head)
267 276
268 277 def unlink(f):
269 278 """unlink and remove the directory if it is empty"""
270 279 os.unlink(f)
271 280 # try removing directories that might now be empty
272 281 try:
273 282 _removedirs(os.path.dirname(f))
274 283 except OSError:
275 284 pass
276 285
277 286 try:
278 287 # override functions with win32 versions if possible
279 288 from win32 import *
280 289 except ImportError:
281 290 pass
282 291
283 292 expandglobs = True
@@ -1,272 +1,272 b''
1 1 #!/usr/bin/env python
2 2 """Test the running system for features availability. Exit with zero
3 3 if all features are there, non-zero otherwise. If a feature name is
4 4 prefixed with "no-", the absence of feature is tested.
5 5 """
6 6 import optparse
7 7 import os
8 8 import re
9 9 import sys
10 10 import tempfile
11 11
12 12 tempprefix = 'hg-hghave-'
13 13
14 14 def matchoutput(cmd, regexp, ignorestatus=False):
15 15 """Return True if cmd executes successfully and its output
16 16 is matched by the supplied regular expression.
17 17 """
18 18 r = re.compile(regexp)
19 19 fh = os.popen(cmd)
20 20 s = fh.read()
21 21 try:
22 22 ret = fh.close()
23 23 except IOError:
24 24 # Happen in Windows test environment
25 25 ret = 1
26 26 return (ignorestatus or ret is None) and r.search(s)
27 27
28 28 def has_baz():
29 29 return matchoutput('baz --version 2>&1', r'baz Bazaar version')
30 30
31 31 def has_bzr():
32 32 try:
33 33 import bzrlib
34 34 return bzrlib.__doc__ != None
35 35 except ImportError:
36 36 return False
37 37
38 38 def has_bzr114():
39 39 try:
40 40 import bzrlib
41 41 return (bzrlib.__doc__ != None
42 and bzrlib.version_info[:2] == (1, 14))
42 and bzrlib.version_info[:2] >= (1, 14))
43 43 except ImportError:
44 44 return False
45 45
46 46 def has_cvs():
47 47 re = r'Concurrent Versions System.*?server'
48 48 return matchoutput('cvs --version 2>&1', re)
49 49
50 50 def has_cvsps():
51 51 return matchoutput('cvsps -h -q 2>&1', r'cvsps version', True)
52 52
53 53 def has_darcs():
54 54 return matchoutput('darcs', r'darcs version', True)
55 55
56 56 def has_mtn():
57 57 return matchoutput('mtn --version', r'monotone', True) and not matchoutput(
58 58 'mtn --version', r'monotone 0\.(\d|[12]\d|3[01])[^\d]', True)
59 59
60 60 def has_eol_in_paths():
61 61 try:
62 62 fd, path = tempfile.mkstemp(prefix=tempprefix, suffix='\n\r')
63 63 os.close(fd)
64 64 os.remove(path)
65 65 return True
66 66 except:
67 67 return False
68 68
69 69 def has_executablebit():
70 70 fd, path = tempfile.mkstemp(prefix=tempprefix)
71 71 os.close(fd)
72 72 try:
73 73 s = os.lstat(path).st_mode
74 74 os.chmod(path, s | 0100)
75 75 return (os.lstat(path).st_mode & 0100 != 0)
76 76 finally:
77 77 os.remove(path)
78 78
79 79 def has_icasefs():
80 80 # Stolen from mercurial.util
81 81 fd, path = tempfile.mkstemp(prefix=tempprefix)
82 82 os.close(fd)
83 83 try:
84 84 s1 = os.stat(path)
85 85 d, b = os.path.split(path)
86 86 p2 = os.path.join(d, b.upper())
87 87 if path == p2:
88 88 p2 = os.path.join(d, b.lower())
89 89 try:
90 90 s2 = os.stat(p2)
91 91 return s2 == s1
92 92 except:
93 93 return False
94 94 finally:
95 95 os.remove(path)
96 96
97 97 def has_inotify():
98 98 try:
99 99 import hgext.inotify.linux.watcher
100 100 return True
101 101 except ImportError:
102 102 return False
103 103
104 104 def has_fifo():
105 105 return hasattr(os, "mkfifo")
106 106
107 107 def has_hotshot():
108 108 try:
109 109 # hotshot.stats tests hotshot and many problematic dependencies
110 110 # like profile.
111 111 import hotshot.stats
112 112 return True
113 113 except ImportError:
114 114 return False
115 115
116 116 def has_lsprof():
117 117 try:
118 118 import _lsprof
119 119 return True
120 120 except ImportError:
121 121 return False
122 122
123 123 def has_git():
124 124 return matchoutput('git --version 2>&1', r'^git version')
125 125
126 126 def has_svn():
127 127 return matchoutput('svn --version 2>&1', r'^svn, version') and \
128 128 matchoutput('svnadmin --version 2>&1', r'^svnadmin, version')
129 129
130 130 def has_svn_bindings():
131 131 try:
132 132 import svn.core
133 133 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
134 134 if version < (1, 4):
135 135 return False
136 136 return True
137 137 except ImportError:
138 138 return False
139 139
140 140 def has_p4():
141 141 return matchoutput('p4 -V', r'Rev\. P4/') and matchoutput('p4d -V', r'Rev\. P4D/')
142 142
143 143 def has_symlink():
144 144 return hasattr(os, "symlink")
145 145
146 146 def has_tla():
147 147 return matchoutput('tla --version 2>&1', r'The GNU Arch Revision')
148 148
149 149 def has_gpg():
150 150 return matchoutput('gpg --version 2>&1', r'GnuPG')
151 151
152 152 def has_unix_permissions():
153 153 d = tempfile.mkdtemp(prefix=tempprefix, dir=".")
154 154 try:
155 155 fname = os.path.join(d, 'foo')
156 156 for umask in (077, 007, 022):
157 157 os.umask(umask)
158 158 f = open(fname, 'w')
159 159 f.close()
160 160 mode = os.stat(fname).st_mode
161 161 os.unlink(fname)
162 162 if mode & 0777 != ~umask & 0666:
163 163 return False
164 164 return True
165 165 finally:
166 166 os.rmdir(d)
167 167
168 168 def has_pygments():
169 169 try:
170 170 import pygments
171 171 return True
172 172 except ImportError:
173 173 return False
174 174
175 175 def has_outer_repo():
176 176 return matchoutput('hg root 2>&1', r'')
177 177
178 178 checks = {
179 179 "baz": (has_baz, "GNU Arch baz client"),
180 180 "bzr": (has_bzr, "Canonical's Bazaar client"),
181 181 "bzr114": (has_bzr114, "Canonical's Bazaar client >= 1.14"),
182 182 "cvs": (has_cvs, "cvs client/server"),
183 183 "cvsps": (has_cvsps, "cvsps utility"),
184 184 "darcs": (has_darcs, "darcs client"),
185 185 "eol-in-paths": (has_eol_in_paths, "end-of-lines in paths"),
186 186 "execbit": (has_executablebit, "executable bit"),
187 187 "fifo": (has_fifo, "named pipes"),
188 188 "git": (has_git, "git command line client"),
189 189 "gpg": (has_gpg, "gpg client"),
190 190 "hotshot": (has_hotshot, "python hotshot module"),
191 191 "icasefs": (has_icasefs, "case insensitive file system"),
192 192 "inotify": (has_inotify, "inotify extension support"),
193 193 "lsprof": (has_lsprof, "python lsprof module"),
194 194 "mtn": (has_mtn, "monotone client (> 0.31)"),
195 195 "outer-repo": (has_outer_repo, "outer repo"),
196 196 "p4": (has_p4, "Perforce server and client"),
197 197 "pygments": (has_pygments, "Pygments source highlighting library"),
198 198 "svn": (has_svn, "subversion client and admin tools"),
199 199 "svn-bindings": (has_svn_bindings, "subversion python bindings"),
200 200 "symlink": (has_symlink, "symbolic links"),
201 201 "tla": (has_tla, "GNU Arch tla client"),
202 202 "unix-permissions": (has_unix_permissions, "unix-style permissions"),
203 203 }
204 204
205 205 def list_features():
206 206 for name, feature in checks.iteritems():
207 207 desc = feature[1]
208 208 print name + ':', desc
209 209
210 210 def test_features():
211 211 failed = 0
212 212 for name, feature in checks.iteritems():
213 213 check, _ = feature
214 214 try:
215 215 check()
216 216 except Exception, e:
217 217 print "feature %s failed: %s" % (name, e)
218 218 failed += 1
219 219 return failed
220 220
221 221 parser = optparse.OptionParser("%prog [options] [features]")
222 222 parser.add_option("--test-features", action="store_true",
223 223 help="test available features")
224 224 parser.add_option("--list-features", action="store_true",
225 225 help="list available features")
226 226 parser.add_option("-q", "--quiet", action="store_true",
227 227 help="check features silently")
228 228
229 229 if __name__ == '__main__':
230 230 options, args = parser.parse_args()
231 231 if options.list_features:
232 232 list_features()
233 233 sys.exit(0)
234 234
235 235 if options.test_features:
236 236 sys.exit(test_features())
237 237
238 238 quiet = options.quiet
239 239
240 240 failures = 0
241 241
242 242 def error(msg):
243 243 global failures
244 244 if not quiet:
245 245 sys.stderr.write(msg + '\n')
246 246 failures += 1
247 247
248 248 for feature in args:
249 249 negate = feature.startswith('no-')
250 250 if negate:
251 251 feature = feature[3:]
252 252
253 253 if feature not in checks:
254 254 error('skipped: unknown feature: ' + feature)
255 255 continue
256 256
257 257 check, desc = checks[feature]
258 258 try:
259 259 available = check()
260 260 except Exception, e:
261 261 error('hghave check failed: ' + feature)
262 262 continue
263 263
264 264 if not negate and not available:
265 265 error('skipped: missing feature: ' + desc)
266 266 elif negate and available:
267 267 error('skipped: system supports %s' % desc)
268 268
269 269 if failures != 0:
270 270 sys.exit(1)
271 271
272 272
General Comments 0
You need to be logged in to leave comments. Login now