##// END OF EJS Templates
diff: pass contexts to status...
Matt Mackall -
r7090:7b5c063b default
parent child Browse files
Show More
@@ -1,2090 +1,2101 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = util.set(('lookup', 'changegroupsubset'))
18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 supported = ('revlogv1', 'store')
19 supported = ('revlogv1', 'store')
20
20
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.root = os.path.realpath(path)
23 self.root = os.path.realpath(path)
24 self.path = os.path.join(self.root, ".hg")
24 self.path = os.path.join(self.root, ".hg")
25 self.origroot = path
25 self.origroot = path
26 self.opener = util.opener(self.path)
26 self.opener = util.opener(self.path)
27 self.wopener = util.opener(self.root)
27 self.wopener = util.opener(self.root)
28
28
29 if not os.path.isdir(self.path):
29 if not os.path.isdir(self.path):
30 if create:
30 if create:
31 if not os.path.exists(path):
31 if not os.path.exists(path):
32 os.mkdir(path)
32 os.mkdir(path)
33 os.mkdir(self.path)
33 os.mkdir(self.path)
34 requirements = ["revlogv1"]
34 requirements = ["revlogv1"]
35 if parentui.configbool('format', 'usestore', True):
35 if parentui.configbool('format', 'usestore', True):
36 os.mkdir(os.path.join(self.path, "store"))
36 os.mkdir(os.path.join(self.path, "store"))
37 requirements.append("store")
37 requirements.append("store")
38 # create an invalid changelog
38 # create an invalid changelog
39 self.opener("00changelog.i", "a").write(
39 self.opener("00changelog.i", "a").write(
40 '\0\0\0\2' # represents revlogv2
40 '\0\0\0\2' # represents revlogv2
41 ' dummy changelog to prevent using the old repo layout'
41 ' dummy changelog to prevent using the old repo layout'
42 )
42 )
43 reqfile = self.opener("requires", "w")
43 reqfile = self.opener("requires", "w")
44 for r in requirements:
44 for r in requirements:
45 reqfile.write("%s\n" % r)
45 reqfile.write("%s\n" % r)
46 reqfile.close()
46 reqfile.close()
47 else:
47 else:
48 raise repo.RepoError(_("repository %s not found") % path)
48 raise repo.RepoError(_("repository %s not found") % path)
49 elif create:
49 elif create:
50 raise repo.RepoError(_("repository %s already exists") % path)
50 raise repo.RepoError(_("repository %s already exists") % path)
51 else:
51 else:
52 # find requirements
52 # find requirements
53 requirements = []
53 requirements = []
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 for r in requirements:
56 for r in requirements:
57 if r not in self.supported:
57 if r not in self.supported:
58 raise repo.RepoError(_("requirement '%s' not supported") % r)
58 raise repo.RepoError(_("requirement '%s' not supported") % r)
59 except IOError, inst:
59 except IOError, inst:
60 if inst.errno != errno.ENOENT:
60 if inst.errno != errno.ENOENT:
61 raise
61 raise
62
62
63 self.store = store.store(requirements, self.path, util.opener)
63 self.store = store.store(requirements, self.path, util.opener)
64 self.spath = self.store.path
64 self.spath = self.store.path
65 self.sopener = self.store.opener
65 self.sopener = self.store.opener
66 self.sjoin = self.store.join
66 self.sjoin = self.store.join
67 self.opener.createmode = self.store.createmode
67 self.opener.createmode = self.store.createmode
68
68
69 self.ui = ui.ui(parentui=parentui)
69 self.ui = ui.ui(parentui=parentui)
70 try:
70 try:
71 self.ui.readconfig(self.join("hgrc"), self.root)
71 self.ui.readconfig(self.join("hgrc"), self.root)
72 extensions.loadall(self.ui)
72 extensions.loadall(self.ui)
73 except IOError:
73 except IOError:
74 pass
74 pass
75
75
76 self.tagscache = None
76 self.tagscache = None
77 self._tagstypecache = None
77 self._tagstypecache = None
78 self.branchcache = None
78 self.branchcache = None
79 self._ubranchcache = None # UTF-8 version of branchcache
79 self._ubranchcache = None # UTF-8 version of branchcache
80 self._branchcachetip = None
80 self._branchcachetip = None
81 self.nodetagscache = None
81 self.nodetagscache = None
82 self.filterpats = {}
82 self.filterpats = {}
83 self._datafilters = {}
83 self._datafilters = {}
84 self._transref = self._lockref = self._wlockref = None
84 self._transref = self._lockref = self._wlockref = None
85
85
86 def __getattr__(self, name):
86 def __getattr__(self, name):
87 if name == 'changelog':
87 if name == 'changelog':
88 self.changelog = changelog.changelog(self.sopener)
88 self.changelog = changelog.changelog(self.sopener)
89 self.sopener.defversion = self.changelog.version
89 self.sopener.defversion = self.changelog.version
90 return self.changelog
90 return self.changelog
91 if name == 'manifest':
91 if name == 'manifest':
92 self.changelog
92 self.changelog
93 self.manifest = manifest.manifest(self.sopener)
93 self.manifest = manifest.manifest(self.sopener)
94 return self.manifest
94 return self.manifest
95 if name == 'dirstate':
95 if name == 'dirstate':
96 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
96 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
97 return self.dirstate
97 return self.dirstate
98 else:
98 else:
99 raise AttributeError(name)
99 raise AttributeError(name)
100
100
101 def __getitem__(self, changeid):
101 def __getitem__(self, changeid):
102 if changeid == None:
102 if changeid == None:
103 return context.workingctx(self)
103 return context.workingctx(self)
104 return context.changectx(self, changeid)
104 return context.changectx(self, changeid)
105
105
106 def __nonzero__(self):
106 def __nonzero__(self):
107 return True
107 return True
108
108
109 def __len__(self):
109 def __len__(self):
110 return len(self.changelog)
110 return len(self.changelog)
111
111
112 def __iter__(self):
112 def __iter__(self):
113 for i in xrange(len(self)):
113 for i in xrange(len(self)):
114 yield i
114 yield i
115
115
116 def url(self):
116 def url(self):
117 return 'file:' + self.root
117 return 'file:' + self.root
118
118
119 def hook(self, name, throw=False, **args):
119 def hook(self, name, throw=False, **args):
120 return hook.hook(self.ui, self, name, throw, **args)
120 return hook.hook(self.ui, self, name, throw, **args)
121
121
122 tag_disallowed = ':\r\n'
122 tag_disallowed = ':\r\n'
123
123
124 def _tag(self, names, node, message, local, user, date, parent=None,
124 def _tag(self, names, node, message, local, user, date, parent=None,
125 extra={}):
125 extra={}):
126 use_dirstate = parent is None
126 use_dirstate = parent is None
127
127
128 if isinstance(names, str):
128 if isinstance(names, str):
129 allchars = names
129 allchars = names
130 names = (names,)
130 names = (names,)
131 else:
131 else:
132 allchars = ''.join(names)
132 allchars = ''.join(names)
133 for c in self.tag_disallowed:
133 for c in self.tag_disallowed:
134 if c in allchars:
134 if c in allchars:
135 raise util.Abort(_('%r cannot be used in a tag name') % c)
135 raise util.Abort(_('%r cannot be used in a tag name') % c)
136
136
137 for name in names:
137 for name in names:
138 self.hook('pretag', throw=True, node=hex(node), tag=name,
138 self.hook('pretag', throw=True, node=hex(node), tag=name,
139 local=local)
139 local=local)
140
140
141 def writetags(fp, names, munge, prevtags):
141 def writetags(fp, names, munge, prevtags):
142 fp.seek(0, 2)
142 fp.seek(0, 2)
143 if prevtags and prevtags[-1] != '\n':
143 if prevtags and prevtags[-1] != '\n':
144 fp.write('\n')
144 fp.write('\n')
145 for name in names:
145 for name in names:
146 m = munge and munge(name) or name
146 m = munge and munge(name) or name
147 if self._tagstypecache and name in self._tagstypecache:
147 if self._tagstypecache and name in self._tagstypecache:
148 old = self.tagscache.get(name, nullid)
148 old = self.tagscache.get(name, nullid)
149 fp.write('%s %s\n' % (hex(old), m))
149 fp.write('%s %s\n' % (hex(old), m))
150 fp.write('%s %s\n' % (hex(node), m))
150 fp.write('%s %s\n' % (hex(node), m))
151 fp.close()
151 fp.close()
152
152
153 prevtags = ''
153 prevtags = ''
154 if local:
154 if local:
155 try:
155 try:
156 fp = self.opener('localtags', 'r+')
156 fp = self.opener('localtags', 'r+')
157 except IOError, err:
157 except IOError, err:
158 fp = self.opener('localtags', 'a')
158 fp = self.opener('localtags', 'a')
159 else:
159 else:
160 prevtags = fp.read()
160 prevtags = fp.read()
161
161
162 # local tags are stored in the current charset
162 # local tags are stored in the current charset
163 writetags(fp, names, None, prevtags)
163 writetags(fp, names, None, prevtags)
164 for name in names:
164 for name in names:
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166 return
166 return
167
167
168 if use_dirstate:
168 if use_dirstate:
169 try:
169 try:
170 fp = self.wfile('.hgtags', 'rb+')
170 fp = self.wfile('.hgtags', 'rb+')
171 except IOError, err:
171 except IOError, err:
172 fp = self.wfile('.hgtags', 'ab')
172 fp = self.wfile('.hgtags', 'ab')
173 else:
173 else:
174 prevtags = fp.read()
174 prevtags = fp.read()
175 else:
175 else:
176 try:
176 try:
177 prevtags = self.filectx('.hgtags', parent).data()
177 prevtags = self.filectx('.hgtags', parent).data()
178 except revlog.LookupError:
178 except revlog.LookupError:
179 pass
179 pass
180 fp = self.wfile('.hgtags', 'wb')
180 fp = self.wfile('.hgtags', 'wb')
181 if prevtags:
181 if prevtags:
182 fp.write(prevtags)
182 fp.write(prevtags)
183
183
184 # committed tags are stored in UTF-8
184 # committed tags are stored in UTF-8
185 writetags(fp, names, util.fromlocal, prevtags)
185 writetags(fp, names, util.fromlocal, prevtags)
186
186
187 if use_dirstate and '.hgtags' not in self.dirstate:
187 if use_dirstate and '.hgtags' not in self.dirstate:
188 self.add(['.hgtags'])
188 self.add(['.hgtags'])
189
189
190 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
190 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
191 extra=extra)
191 extra=extra)
192
192
193 for name in names:
193 for name in names:
194 self.hook('tag', node=hex(node), tag=name, local=local)
194 self.hook('tag', node=hex(node), tag=name, local=local)
195
195
196 return tagnode
196 return tagnode
197
197
198 def tag(self, names, node, message, local, user, date):
198 def tag(self, names, node, message, local, user, date):
199 '''tag a revision with one or more symbolic names.
199 '''tag a revision with one or more symbolic names.
200
200
201 names is a list of strings or, when adding a single tag, names may be a
201 names is a list of strings or, when adding a single tag, names may be a
202 string.
202 string.
203
203
204 if local is True, the tags are stored in a per-repository file.
204 if local is True, the tags are stored in a per-repository file.
205 otherwise, they are stored in the .hgtags file, and a new
205 otherwise, they are stored in the .hgtags file, and a new
206 changeset is committed with the change.
206 changeset is committed with the change.
207
207
208 keyword arguments:
208 keyword arguments:
209
209
210 local: whether to store tags in non-version-controlled file
210 local: whether to store tags in non-version-controlled file
211 (default False)
211 (default False)
212
212
213 message: commit message to use if committing
213 message: commit message to use if committing
214
214
215 user: name of user to use if committing
215 user: name of user to use if committing
216
216
217 date: date tuple to use if committing'''
217 date: date tuple to use if committing'''
218
218
219 for x in self.status()[:5]:
219 for x in self.status()[:5]:
220 if '.hgtags' in x:
220 if '.hgtags' in x:
221 raise util.Abort(_('working copy of .hgtags is changed '
221 raise util.Abort(_('working copy of .hgtags is changed '
222 '(please commit .hgtags manually)'))
222 '(please commit .hgtags manually)'))
223
223
224 self._tag(names, node, message, local, user, date)
224 self._tag(names, node, message, local, user, date)
225
225
226 def tags(self):
226 def tags(self):
227 '''return a mapping of tag to node'''
227 '''return a mapping of tag to node'''
228 if self.tagscache:
228 if self.tagscache:
229 return self.tagscache
229 return self.tagscache
230
230
231 globaltags = {}
231 globaltags = {}
232 tagtypes = {}
232 tagtypes = {}
233
233
234 def readtags(lines, fn, tagtype):
234 def readtags(lines, fn, tagtype):
235 filetags = {}
235 filetags = {}
236 count = 0
236 count = 0
237
237
238 def warn(msg):
238 def warn(msg):
239 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
239 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
240
240
241 for l in lines:
241 for l in lines:
242 count += 1
242 count += 1
243 if not l:
243 if not l:
244 continue
244 continue
245 s = l.split(" ", 1)
245 s = l.split(" ", 1)
246 if len(s) != 2:
246 if len(s) != 2:
247 warn(_("cannot parse entry"))
247 warn(_("cannot parse entry"))
248 continue
248 continue
249 node, key = s
249 node, key = s
250 key = util.tolocal(key.strip()) # stored in UTF-8
250 key = util.tolocal(key.strip()) # stored in UTF-8
251 try:
251 try:
252 bin_n = bin(node)
252 bin_n = bin(node)
253 except TypeError:
253 except TypeError:
254 warn(_("node '%s' is not well formed") % node)
254 warn(_("node '%s' is not well formed") % node)
255 continue
255 continue
256 if bin_n not in self.changelog.nodemap:
256 if bin_n not in self.changelog.nodemap:
257 warn(_("tag '%s' refers to unknown node") % key)
257 warn(_("tag '%s' refers to unknown node") % key)
258 continue
258 continue
259
259
260 h = []
260 h = []
261 if key in filetags:
261 if key in filetags:
262 n, h = filetags[key]
262 n, h = filetags[key]
263 h.append(n)
263 h.append(n)
264 filetags[key] = (bin_n, h)
264 filetags[key] = (bin_n, h)
265
265
266 for k, nh in filetags.items():
266 for k, nh in filetags.items():
267 if k not in globaltags:
267 if k not in globaltags:
268 globaltags[k] = nh
268 globaltags[k] = nh
269 tagtypes[k] = tagtype
269 tagtypes[k] = tagtype
270 continue
270 continue
271
271
272 # we prefer the global tag if:
272 # we prefer the global tag if:
273 # it supercedes us OR
273 # it supercedes us OR
274 # mutual supercedes and it has a higher rank
274 # mutual supercedes and it has a higher rank
275 # otherwise we win because we're tip-most
275 # otherwise we win because we're tip-most
276 an, ah = nh
276 an, ah = nh
277 bn, bh = globaltags[k]
277 bn, bh = globaltags[k]
278 if (bn != an and an in bh and
278 if (bn != an and an in bh and
279 (bn not in ah or len(bh) > len(ah))):
279 (bn not in ah or len(bh) > len(ah))):
280 an = bn
280 an = bn
281 ah.extend([n for n in bh if n not in ah])
281 ah.extend([n for n in bh if n not in ah])
282 globaltags[k] = an, ah
282 globaltags[k] = an, ah
283 tagtypes[k] = tagtype
283 tagtypes[k] = tagtype
284
284
285 # read the tags file from each head, ending with the tip
285 # read the tags file from each head, ending with the tip
286 f = None
286 f = None
287 for rev, node, fnode in self._hgtagsnodes():
287 for rev, node, fnode in self._hgtagsnodes():
288 f = (f and f.filectx(fnode) or
288 f = (f and f.filectx(fnode) or
289 self.filectx('.hgtags', fileid=fnode))
289 self.filectx('.hgtags', fileid=fnode))
290 readtags(f.data().splitlines(), f, "global")
290 readtags(f.data().splitlines(), f, "global")
291
291
292 try:
292 try:
293 data = util.fromlocal(self.opener("localtags").read())
293 data = util.fromlocal(self.opener("localtags").read())
294 # localtags are stored in the local character set
294 # localtags are stored in the local character set
295 # while the internal tag table is stored in UTF-8
295 # while the internal tag table is stored in UTF-8
296 readtags(data.splitlines(), "localtags", "local")
296 readtags(data.splitlines(), "localtags", "local")
297 except IOError:
297 except IOError:
298 pass
298 pass
299
299
300 self.tagscache = {}
300 self.tagscache = {}
301 self._tagstypecache = {}
301 self._tagstypecache = {}
302 for k,nh in globaltags.items():
302 for k,nh in globaltags.items():
303 n = nh[0]
303 n = nh[0]
304 if n != nullid:
304 if n != nullid:
305 self.tagscache[k] = n
305 self.tagscache[k] = n
306 self._tagstypecache[k] = tagtypes[k]
306 self._tagstypecache[k] = tagtypes[k]
307 self.tagscache['tip'] = self.changelog.tip()
307 self.tagscache['tip'] = self.changelog.tip()
308 return self.tagscache
308 return self.tagscache
309
309
310 def tagtype(self, tagname):
310 def tagtype(self, tagname):
311 '''
311 '''
312 return the type of the given tag. result can be:
312 return the type of the given tag. result can be:
313
313
314 'local' : a local tag
314 'local' : a local tag
315 'global' : a global tag
315 'global' : a global tag
316 None : tag does not exist
316 None : tag does not exist
317 '''
317 '''
318
318
319 self.tags()
319 self.tags()
320
320
321 return self._tagstypecache.get(tagname)
321 return self._tagstypecache.get(tagname)
322
322
323 def _hgtagsnodes(self):
323 def _hgtagsnodes(self):
324 heads = self.heads()
324 heads = self.heads()
325 heads.reverse()
325 heads.reverse()
326 last = {}
326 last = {}
327 ret = []
327 ret = []
328 for node in heads:
328 for node in heads:
329 c = self[node]
329 c = self[node]
330 rev = c.rev()
330 rev = c.rev()
331 try:
331 try:
332 fnode = c.filenode('.hgtags')
332 fnode = c.filenode('.hgtags')
333 except revlog.LookupError:
333 except revlog.LookupError:
334 continue
334 continue
335 ret.append((rev, node, fnode))
335 ret.append((rev, node, fnode))
336 if fnode in last:
336 if fnode in last:
337 ret[last[fnode]] = None
337 ret[last[fnode]] = None
338 last[fnode] = len(ret) - 1
338 last[fnode] = len(ret) - 1
339 return [item for item in ret if item]
339 return [item for item in ret if item]
340
340
341 def tagslist(self):
341 def tagslist(self):
342 '''return a list of tags ordered by revision'''
342 '''return a list of tags ordered by revision'''
343 l = []
343 l = []
344 for t, n in self.tags().items():
344 for t, n in self.tags().items():
345 try:
345 try:
346 r = self.changelog.rev(n)
346 r = self.changelog.rev(n)
347 except:
347 except:
348 r = -2 # sort to the beginning of the list if unknown
348 r = -2 # sort to the beginning of the list if unknown
349 l.append((r, t, n))
349 l.append((r, t, n))
350 return [(t, n) for r, t, n in util.sort(l)]
350 return [(t, n) for r, t, n in util.sort(l)]
351
351
352 def nodetags(self, node):
352 def nodetags(self, node):
353 '''return the tags associated with a node'''
353 '''return the tags associated with a node'''
354 if not self.nodetagscache:
354 if not self.nodetagscache:
355 self.nodetagscache = {}
355 self.nodetagscache = {}
356 for t, n in self.tags().items():
356 for t, n in self.tags().items():
357 self.nodetagscache.setdefault(n, []).append(t)
357 self.nodetagscache.setdefault(n, []).append(t)
358 return self.nodetagscache.get(node, [])
358 return self.nodetagscache.get(node, [])
359
359
360 def _branchtags(self, partial, lrev):
360 def _branchtags(self, partial, lrev):
361 tiprev = len(self) - 1
361 tiprev = len(self) - 1
362 if lrev != tiprev:
362 if lrev != tiprev:
363 self._updatebranchcache(partial, lrev+1, tiprev+1)
363 self._updatebranchcache(partial, lrev+1, tiprev+1)
364 self._writebranchcache(partial, self.changelog.tip(), tiprev)
364 self._writebranchcache(partial, self.changelog.tip(), tiprev)
365
365
366 return partial
366 return partial
367
367
368 def branchtags(self):
368 def branchtags(self):
369 tip = self.changelog.tip()
369 tip = self.changelog.tip()
370 if self.branchcache is not None and self._branchcachetip == tip:
370 if self.branchcache is not None and self._branchcachetip == tip:
371 return self.branchcache
371 return self.branchcache
372
372
373 oldtip = self._branchcachetip
373 oldtip = self._branchcachetip
374 self._branchcachetip = tip
374 self._branchcachetip = tip
375 if self.branchcache is None:
375 if self.branchcache is None:
376 self.branchcache = {} # avoid recursion in changectx
376 self.branchcache = {} # avoid recursion in changectx
377 else:
377 else:
378 self.branchcache.clear() # keep using the same dict
378 self.branchcache.clear() # keep using the same dict
379 if oldtip is None or oldtip not in self.changelog.nodemap:
379 if oldtip is None or oldtip not in self.changelog.nodemap:
380 partial, last, lrev = self._readbranchcache()
380 partial, last, lrev = self._readbranchcache()
381 else:
381 else:
382 lrev = self.changelog.rev(oldtip)
382 lrev = self.changelog.rev(oldtip)
383 partial = self._ubranchcache
383 partial = self._ubranchcache
384
384
385 self._branchtags(partial, lrev)
385 self._branchtags(partial, lrev)
386
386
387 # the branch cache is stored on disk as UTF-8, but in the local
387 # the branch cache is stored on disk as UTF-8, but in the local
388 # charset internally
388 # charset internally
389 for k, v in partial.items():
389 for k, v in partial.items():
390 self.branchcache[util.tolocal(k)] = v
390 self.branchcache[util.tolocal(k)] = v
391 self._ubranchcache = partial
391 self._ubranchcache = partial
392 return self.branchcache
392 return self.branchcache
393
393
394 def _readbranchcache(self):
394 def _readbranchcache(self):
395 partial = {}
395 partial = {}
396 try:
396 try:
397 f = self.opener("branch.cache")
397 f = self.opener("branch.cache")
398 lines = f.read().split('\n')
398 lines = f.read().split('\n')
399 f.close()
399 f.close()
400 except (IOError, OSError):
400 except (IOError, OSError):
401 return {}, nullid, nullrev
401 return {}, nullid, nullrev
402
402
403 try:
403 try:
404 last, lrev = lines.pop(0).split(" ", 1)
404 last, lrev = lines.pop(0).split(" ", 1)
405 last, lrev = bin(last), int(lrev)
405 last, lrev = bin(last), int(lrev)
406 if lrev >= len(self) or self[lrev].node() != last:
406 if lrev >= len(self) or self[lrev].node() != last:
407 # invalidate the cache
407 # invalidate the cache
408 raise ValueError('invalidating branch cache (tip differs)')
408 raise ValueError('invalidating branch cache (tip differs)')
409 for l in lines:
409 for l in lines:
410 if not l: continue
410 if not l: continue
411 node, label = l.split(" ", 1)
411 node, label = l.split(" ", 1)
412 partial[label.strip()] = bin(node)
412 partial[label.strip()] = bin(node)
413 except (KeyboardInterrupt, util.SignalInterrupt):
413 except (KeyboardInterrupt, util.SignalInterrupt):
414 raise
414 raise
415 except Exception, inst:
415 except Exception, inst:
416 if self.ui.debugflag:
416 if self.ui.debugflag:
417 self.ui.warn(str(inst), '\n')
417 self.ui.warn(str(inst), '\n')
418 partial, last, lrev = {}, nullid, nullrev
418 partial, last, lrev = {}, nullid, nullrev
419 return partial, last, lrev
419 return partial, last, lrev
420
420
421 def _writebranchcache(self, branches, tip, tiprev):
421 def _writebranchcache(self, branches, tip, tiprev):
422 try:
422 try:
423 f = self.opener("branch.cache", "w", atomictemp=True)
423 f = self.opener("branch.cache", "w", atomictemp=True)
424 f.write("%s %s\n" % (hex(tip), tiprev))
424 f.write("%s %s\n" % (hex(tip), tiprev))
425 for label, node in branches.iteritems():
425 for label, node in branches.iteritems():
426 f.write("%s %s\n" % (hex(node), label))
426 f.write("%s %s\n" % (hex(node), label))
427 f.rename()
427 f.rename()
428 except (IOError, OSError):
428 except (IOError, OSError):
429 pass
429 pass
430
430
431 def _updatebranchcache(self, partial, start, end):
431 def _updatebranchcache(self, partial, start, end):
432 for r in xrange(start, end):
432 for r in xrange(start, end):
433 c = self[r]
433 c = self[r]
434 b = c.branch()
434 b = c.branch()
435 partial[b] = c.node()
435 partial[b] = c.node()
436
436
437 def lookup(self, key):
437 def lookup(self, key):
438 if key == '.':
438 if key == '.':
439 return self.dirstate.parents()[0]
439 return self.dirstate.parents()[0]
440 elif key == 'null':
440 elif key == 'null':
441 return nullid
441 return nullid
442 n = self.changelog._match(key)
442 n = self.changelog._match(key)
443 if n:
443 if n:
444 return n
444 return n
445 if key in self.tags():
445 if key in self.tags():
446 return self.tags()[key]
446 return self.tags()[key]
447 if key in self.branchtags():
447 if key in self.branchtags():
448 return self.branchtags()[key]
448 return self.branchtags()[key]
449 n = self.changelog._partialmatch(key)
449 n = self.changelog._partialmatch(key)
450 if n:
450 if n:
451 return n
451 return n
452 try:
452 try:
453 if len(key) == 20:
453 if len(key) == 20:
454 key = hex(key)
454 key = hex(key)
455 except:
455 except:
456 pass
456 pass
457 raise repo.RepoError(_("unknown revision '%s'") % key)
457 raise repo.RepoError(_("unknown revision '%s'") % key)
458
458
459 def local(self):
459 def local(self):
460 return True
460 return True
461
461
462 def join(self, f):
462 def join(self, f):
463 return os.path.join(self.path, f)
463 return os.path.join(self.path, f)
464
464
465 def wjoin(self, f):
465 def wjoin(self, f):
466 return os.path.join(self.root, f)
466 return os.path.join(self.root, f)
467
467
468 def rjoin(self, f):
468 def rjoin(self, f):
469 return os.path.join(self.root, util.pconvert(f))
469 return os.path.join(self.root, util.pconvert(f))
470
470
471 def file(self, f):
471 def file(self, f):
472 if f[0] == '/':
472 if f[0] == '/':
473 f = f[1:]
473 f = f[1:]
474 return filelog.filelog(self.sopener, f)
474 return filelog.filelog(self.sopener, f)
475
475
476 def changectx(self, changeid):
476 def changectx(self, changeid):
477 return self[changeid]
477 return self[changeid]
478
478
479 def parents(self, changeid=None):
479 def parents(self, changeid=None):
480 '''get list of changectxs for parents of changeid'''
480 '''get list of changectxs for parents of changeid'''
481 return self[changeid].parents()
481 return self[changeid].parents()
482
482
483 def filectx(self, path, changeid=None, fileid=None):
483 def filectx(self, path, changeid=None, fileid=None):
484 """changeid can be a changeset revision, node, or tag.
484 """changeid can be a changeset revision, node, or tag.
485 fileid can be a file revision or node."""
485 fileid can be a file revision or node."""
486 return context.filectx(self, path, changeid, fileid)
486 return context.filectx(self, path, changeid, fileid)
487
487
488 def getcwd(self):
488 def getcwd(self):
489 return self.dirstate.getcwd()
489 return self.dirstate.getcwd()
490
490
491 def pathto(self, f, cwd=None):
491 def pathto(self, f, cwd=None):
492 return self.dirstate.pathto(f, cwd)
492 return self.dirstate.pathto(f, cwd)
493
493
494 def wfile(self, f, mode='r'):
494 def wfile(self, f, mode='r'):
495 return self.wopener(f, mode)
495 return self.wopener(f, mode)
496
496
497 def _link(self, f):
497 def _link(self, f):
498 return os.path.islink(self.wjoin(f))
498 return os.path.islink(self.wjoin(f))
499
499
500 def _filter(self, filter, filename, data):
500 def _filter(self, filter, filename, data):
501 if filter not in self.filterpats:
501 if filter not in self.filterpats:
502 l = []
502 l = []
503 for pat, cmd in self.ui.configitems(filter):
503 for pat, cmd in self.ui.configitems(filter):
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 fn = None
505 fn = None
506 params = cmd
506 params = cmd
507 for name, filterfn in self._datafilters.iteritems():
507 for name, filterfn in self._datafilters.iteritems():
508 if cmd.startswith(name):
508 if cmd.startswith(name):
509 fn = filterfn
509 fn = filterfn
510 params = cmd[len(name):].lstrip()
510 params = cmd[len(name):].lstrip()
511 break
511 break
512 if not fn:
512 if not fn:
513 fn = lambda s, c, **kwargs: util.filter(s, c)
513 fn = lambda s, c, **kwargs: util.filter(s, c)
514 # Wrap old filters not supporting keyword arguments
514 # Wrap old filters not supporting keyword arguments
515 if not inspect.getargspec(fn)[2]:
515 if not inspect.getargspec(fn)[2]:
516 oldfn = fn
516 oldfn = fn
517 fn = lambda s, c, **kwargs: oldfn(s, c)
517 fn = lambda s, c, **kwargs: oldfn(s, c)
518 l.append((mf, fn, params))
518 l.append((mf, fn, params))
519 self.filterpats[filter] = l
519 self.filterpats[filter] = l
520
520
521 for mf, fn, cmd in self.filterpats[filter]:
521 for mf, fn, cmd in self.filterpats[filter]:
522 if mf(filename):
522 if mf(filename):
523 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
523 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
524 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
524 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
525 break
525 break
526
526
527 return data
527 return data
528
528
529 def adddatafilter(self, name, filter):
529 def adddatafilter(self, name, filter):
530 self._datafilters[name] = filter
530 self._datafilters[name] = filter
531
531
532 def wread(self, filename):
532 def wread(self, filename):
533 if self._link(filename):
533 if self._link(filename):
534 data = os.readlink(self.wjoin(filename))
534 data = os.readlink(self.wjoin(filename))
535 else:
535 else:
536 data = self.wopener(filename, 'r').read()
536 data = self.wopener(filename, 'r').read()
537 return self._filter("encode", filename, data)
537 return self._filter("encode", filename, data)
538
538
539 def wwrite(self, filename, data, flags):
539 def wwrite(self, filename, data, flags):
540 data = self._filter("decode", filename, data)
540 data = self._filter("decode", filename, data)
541 try:
541 try:
542 os.unlink(self.wjoin(filename))
542 os.unlink(self.wjoin(filename))
543 except OSError:
543 except OSError:
544 pass
544 pass
545 if 'l' in flags:
545 if 'l' in flags:
546 self.wopener.symlink(data, filename)
546 self.wopener.symlink(data, filename)
547 else:
547 else:
548 self.wopener(filename, 'w').write(data)
548 self.wopener(filename, 'w').write(data)
549 if 'x' in flags:
549 if 'x' in flags:
550 util.set_flags(self.wjoin(filename), False, True)
550 util.set_flags(self.wjoin(filename), False, True)
551
551
552 def wwritedata(self, filename, data):
552 def wwritedata(self, filename, data):
553 return self._filter("decode", filename, data)
553 return self._filter("decode", filename, data)
554
554
555 def transaction(self):
555 def transaction(self):
556 if self._transref and self._transref():
556 if self._transref and self._transref():
557 return self._transref().nest()
557 return self._transref().nest()
558
558
559 # abort here if the journal already exists
559 # abort here if the journal already exists
560 if os.path.exists(self.sjoin("journal")):
560 if os.path.exists(self.sjoin("journal")):
561 raise repo.RepoError(_("journal already exists - run hg recover"))
561 raise repo.RepoError(_("journal already exists - run hg recover"))
562
562
563 # save dirstate for rollback
563 # save dirstate for rollback
564 try:
564 try:
565 ds = self.opener("dirstate").read()
565 ds = self.opener("dirstate").read()
566 except IOError:
566 except IOError:
567 ds = ""
567 ds = ""
568 self.opener("journal.dirstate", "w").write(ds)
568 self.opener("journal.dirstate", "w").write(ds)
569 self.opener("journal.branch", "w").write(self.dirstate.branch())
569 self.opener("journal.branch", "w").write(self.dirstate.branch())
570
570
571 renames = [(self.sjoin("journal"), self.sjoin("undo")),
571 renames = [(self.sjoin("journal"), self.sjoin("undo")),
572 (self.join("journal.dirstate"), self.join("undo.dirstate")),
572 (self.join("journal.dirstate"), self.join("undo.dirstate")),
573 (self.join("journal.branch"), self.join("undo.branch"))]
573 (self.join("journal.branch"), self.join("undo.branch"))]
574 tr = transaction.transaction(self.ui.warn, self.sopener,
574 tr = transaction.transaction(self.ui.warn, self.sopener,
575 self.sjoin("journal"),
575 self.sjoin("journal"),
576 aftertrans(renames),
576 aftertrans(renames),
577 self.store.createmode)
577 self.store.createmode)
578 self._transref = weakref.ref(tr)
578 self._transref = weakref.ref(tr)
579 return tr
579 return tr
580
580
581 def recover(self):
581 def recover(self):
582 l = self.lock()
582 l = self.lock()
583 try:
583 try:
584 if os.path.exists(self.sjoin("journal")):
584 if os.path.exists(self.sjoin("journal")):
585 self.ui.status(_("rolling back interrupted transaction\n"))
585 self.ui.status(_("rolling back interrupted transaction\n"))
586 transaction.rollback(self.sopener, self.sjoin("journal"))
586 transaction.rollback(self.sopener, self.sjoin("journal"))
587 self.invalidate()
587 self.invalidate()
588 return True
588 return True
589 else:
589 else:
590 self.ui.warn(_("no interrupted transaction available\n"))
590 self.ui.warn(_("no interrupted transaction available\n"))
591 return False
591 return False
592 finally:
592 finally:
593 del l
593 del l
594
594
595 def rollback(self):
595 def rollback(self):
596 wlock = lock = None
596 wlock = lock = None
597 try:
597 try:
598 wlock = self.wlock()
598 wlock = self.wlock()
599 lock = self.lock()
599 lock = self.lock()
600 if os.path.exists(self.sjoin("undo")):
600 if os.path.exists(self.sjoin("undo")):
601 self.ui.status(_("rolling back last transaction\n"))
601 self.ui.status(_("rolling back last transaction\n"))
602 transaction.rollback(self.sopener, self.sjoin("undo"))
602 transaction.rollback(self.sopener, self.sjoin("undo"))
603 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
603 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
604 try:
604 try:
605 branch = self.opener("undo.branch").read()
605 branch = self.opener("undo.branch").read()
606 self.dirstate.setbranch(branch)
606 self.dirstate.setbranch(branch)
607 except IOError:
607 except IOError:
608 self.ui.warn(_("Named branch could not be reset, "
608 self.ui.warn(_("Named branch could not be reset, "
609 "current branch still is: %s\n")
609 "current branch still is: %s\n")
610 % util.tolocal(self.dirstate.branch()))
610 % util.tolocal(self.dirstate.branch()))
611 self.invalidate()
611 self.invalidate()
612 self.dirstate.invalidate()
612 self.dirstate.invalidate()
613 else:
613 else:
614 self.ui.warn(_("no rollback information available\n"))
614 self.ui.warn(_("no rollback information available\n"))
615 finally:
615 finally:
616 del lock, wlock
616 del lock, wlock
617
617
618 def invalidate(self):
618 def invalidate(self):
619 for a in "changelog manifest".split():
619 for a in "changelog manifest".split():
620 if a in self.__dict__:
620 if a in self.__dict__:
621 delattr(self, a)
621 delattr(self, a)
622 self.tagscache = None
622 self.tagscache = None
623 self._tagstypecache = None
623 self._tagstypecache = None
624 self.nodetagscache = None
624 self.nodetagscache = None
625 self.branchcache = None
625 self.branchcache = None
626 self._ubranchcache = None
626 self._ubranchcache = None
627 self._branchcachetip = None
627 self._branchcachetip = None
628
628
629 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
629 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
630 try:
630 try:
631 l = lock.lock(lockname, 0, releasefn, desc=desc)
631 l = lock.lock(lockname, 0, releasefn, desc=desc)
632 except lock.LockHeld, inst:
632 except lock.LockHeld, inst:
633 if not wait:
633 if not wait:
634 raise
634 raise
635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
636 (desc, inst.locker))
636 (desc, inst.locker))
637 # default to 600 seconds timeout
637 # default to 600 seconds timeout
638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
639 releasefn, desc=desc)
639 releasefn, desc=desc)
640 if acquirefn:
640 if acquirefn:
641 acquirefn()
641 acquirefn()
642 return l
642 return l
643
643
644 def lock(self, wait=True):
644 def lock(self, wait=True):
645 if self._lockref and self._lockref():
645 if self._lockref and self._lockref():
646 return self._lockref()
646 return self._lockref()
647
647
648 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
648 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
649 _('repository %s') % self.origroot)
649 _('repository %s') % self.origroot)
650 self._lockref = weakref.ref(l)
650 self._lockref = weakref.ref(l)
651 return l
651 return l
652
652
653 def wlock(self, wait=True):
653 def wlock(self, wait=True):
654 if self._wlockref and self._wlockref():
654 if self._wlockref and self._wlockref():
655 return self._wlockref()
655 return self._wlockref()
656
656
657 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
657 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
658 self.dirstate.invalidate, _('working directory of %s') %
658 self.dirstate.invalidate, _('working directory of %s') %
659 self.origroot)
659 self.origroot)
660 self._wlockref = weakref.ref(l)
660 self._wlockref = weakref.ref(l)
661 return l
661 return l
662
662
663 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
663 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
664 """
664 """
665 commit an individual file as part of a larger transaction
665 commit an individual file as part of a larger transaction
666 """
666 """
667
667
668 fn = fctx.path()
668 fn = fctx.path()
669 t = fctx.data()
669 t = fctx.data()
670 fl = self.file(fn)
670 fl = self.file(fn)
671 fp1 = manifest1.get(fn, nullid)
671 fp1 = manifest1.get(fn, nullid)
672 fp2 = manifest2.get(fn, nullid)
672 fp2 = manifest2.get(fn, nullid)
673
673
674 meta = {}
674 meta = {}
675 cp = fctx.renamed()
675 cp = fctx.renamed()
676 if cp and cp[0] != fn:
676 if cp and cp[0] != fn:
677 # Mark the new revision of this file as a copy of another
677 # Mark the new revision of this file as a copy of another
678 # file. This copy data will effectively act as a parent
678 # file. This copy data will effectively act as a parent
679 # of this new revision. If this is a merge, the first
679 # of this new revision. If this is a merge, the first
680 # parent will be the nullid (meaning "look up the copy data")
680 # parent will be the nullid (meaning "look up the copy data")
681 # and the second one will be the other parent. For example:
681 # and the second one will be the other parent. For example:
682 #
682 #
683 # 0 --- 1 --- 3 rev1 changes file foo
683 # 0 --- 1 --- 3 rev1 changes file foo
684 # \ / rev2 renames foo to bar and changes it
684 # \ / rev2 renames foo to bar and changes it
685 # \- 2 -/ rev3 should have bar with all changes and
685 # \- 2 -/ rev3 should have bar with all changes and
686 # should record that bar descends from
686 # should record that bar descends from
687 # bar in rev2 and foo in rev1
687 # bar in rev2 and foo in rev1
688 #
688 #
689 # this allows this merge to succeed:
689 # this allows this merge to succeed:
690 #
690 #
691 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
691 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
692 # \ / merging rev3 and rev4 should use bar@rev2
692 # \ / merging rev3 and rev4 should use bar@rev2
693 # \- 2 --- 4 as the merge base
693 # \- 2 --- 4 as the merge base
694 #
694 #
695
695
696 cf = cp[0]
696 cf = cp[0]
697 cr = manifest1.get(cf)
697 cr = manifest1.get(cf)
698 nfp = fp2
698 nfp = fp2
699
699
700 if manifest2: # branch merge
700 if manifest2: # branch merge
701 if fp2 == nullid: # copied on remote side
701 if fp2 == nullid: # copied on remote side
702 if fp1 != nullid or cf in manifest2:
702 if fp1 != nullid or cf in manifest2:
703 cr = manifest2[cf]
703 cr = manifest2[cf]
704 nfp = fp1
704 nfp = fp1
705
705
706 # find source in nearest ancestor if we've lost track
706 # find source in nearest ancestor if we've lost track
707 if not cr:
707 if not cr:
708 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
708 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
709 (fn, cf))
709 (fn, cf))
710 for a in self['.'].ancestors():
710 for a in self['.'].ancestors():
711 if cf in a:
711 if cf in a:
712 cr = a[cf].filenode()
712 cr = a[cf].filenode()
713 break
713 break
714
714
715 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
715 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
716 meta["copy"] = cf
716 meta["copy"] = cf
717 meta["copyrev"] = hex(cr)
717 meta["copyrev"] = hex(cr)
718 fp1, fp2 = nullid, nfp
718 fp1, fp2 = nullid, nfp
719 elif fp2 != nullid:
719 elif fp2 != nullid:
720 # is one parent an ancestor of the other?
720 # is one parent an ancestor of the other?
721 fpa = fl.ancestor(fp1, fp2)
721 fpa = fl.ancestor(fp1, fp2)
722 if fpa == fp1:
722 if fpa == fp1:
723 fp1, fp2 = fp2, nullid
723 fp1, fp2 = fp2, nullid
724 elif fpa == fp2:
724 elif fpa == fp2:
725 fp2 = nullid
725 fp2 = nullid
726
726
727 # is the file unmodified from the parent? report existing entry
727 # is the file unmodified from the parent? report existing entry
728 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
728 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
729 return fp1
729 return fp1
730
730
731 changelist.append(fn)
731 changelist.append(fn)
732 return fl.add(t, meta, tr, linkrev, fp1, fp2)
732 return fl.add(t, meta, tr, linkrev, fp1, fp2)
733
733
734 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
734 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
735 if p1 is None:
735 if p1 is None:
736 p1, p2 = self.dirstate.parents()
736 p1, p2 = self.dirstate.parents()
737 return self.commit(files=files, text=text, user=user, date=date,
737 return self.commit(files=files, text=text, user=user, date=date,
738 p1=p1, p2=p2, extra=extra, empty_ok=True)
738 p1=p1, p2=p2, extra=extra, empty_ok=True)
739
739
740 def commit(self, files=None, text="", user=None, date=None,
740 def commit(self, files=None, text="", user=None, date=None,
741 match=None, force=False, force_editor=False,
741 match=None, force=False, force_editor=False,
742 p1=None, p2=None, extra={}, empty_ok=False):
742 p1=None, p2=None, extra={}, empty_ok=False):
743 wlock = lock = None
743 wlock = lock = None
744 if files:
744 if files:
745 files = util.unique(files)
745 files = util.unique(files)
746 try:
746 try:
747 wlock = self.wlock()
747 wlock = self.wlock()
748 lock = self.lock()
748 lock = self.lock()
749 use_dirstate = (p1 is None) # not rawcommit
749 use_dirstate = (p1 is None) # not rawcommit
750
750
751 if use_dirstate:
751 if use_dirstate:
752 p1, p2 = self.dirstate.parents()
752 p1, p2 = self.dirstate.parents()
753 update_dirstate = True
753 update_dirstate = True
754
754
755 if (not force and p2 != nullid and
755 if (not force and p2 != nullid and
756 (match and (match.files() or match.anypats()))):
756 (match and (match.files() or match.anypats()))):
757 raise util.Abort(_('cannot partially commit a merge '
757 raise util.Abort(_('cannot partially commit a merge '
758 '(do not specify files or patterns)'))
758 '(do not specify files or patterns)'))
759
759
760 if files:
760 if files:
761 modified, removed = [], []
761 modified, removed = [], []
762 for f in files:
762 for f in files:
763 s = self.dirstate[f]
763 s = self.dirstate[f]
764 if s in 'nma':
764 if s in 'nma':
765 modified.append(f)
765 modified.append(f)
766 elif s == 'r':
766 elif s == 'r':
767 removed.append(f)
767 removed.append(f)
768 else:
768 else:
769 self.ui.warn(_("%s not tracked!\n") % f)
769 self.ui.warn(_("%s not tracked!\n") % f)
770 changes = [modified, [], removed, [], []]
770 changes = [modified, [], removed, [], []]
771 else:
771 else:
772 changes = self.status(match=match)
772 changes = self.status(match=match)
773 else:
773 else:
774 p1, p2 = p1, p2 or nullid
774 p1, p2 = p1, p2 or nullid
775 update_dirstate = (self.dirstate.parents()[0] == p1)
775 update_dirstate = (self.dirstate.parents()[0] == p1)
776 changes = [files, [], [], [], []]
776 changes = [files, [], [], [], []]
777
777
778 ms = merge_.mergestate(self)
778 ms = merge_.mergestate(self)
779 for f in changes[0]:
779 for f in changes[0]:
780 if f in ms and ms[f] == 'u':
780 if f in ms and ms[f] == 'u':
781 raise util.Abort(_("unresolved merge conflicts "
781 raise util.Abort(_("unresolved merge conflicts "
782 "(see hg resolve)"))
782 "(see hg resolve)"))
783 wctx = context.workingctx(self, (p1, p2), text, user, date,
783 wctx = context.workingctx(self, (p1, p2), text, user, date,
784 extra, changes)
784 extra, changes)
785 return self._commitctx(wctx, force, force_editor, empty_ok,
785 return self._commitctx(wctx, force, force_editor, empty_ok,
786 use_dirstate, update_dirstate)
786 use_dirstate, update_dirstate)
787 finally:
787 finally:
788 del lock, wlock
788 del lock, wlock
789
789
790 def commitctx(self, ctx):
790 def commitctx(self, ctx):
791 """Add a new revision to current repository.
791 """Add a new revision to current repository.
792
792
793 Revision information is passed in the context.memctx argument.
793 Revision information is passed in the context.memctx argument.
794 commitctx() does not touch the working directory.
794 commitctx() does not touch the working directory.
795 """
795 """
796 wlock = lock = None
796 wlock = lock = None
797 try:
797 try:
798 wlock = self.wlock()
798 wlock = self.wlock()
799 lock = self.lock()
799 lock = self.lock()
800 return self._commitctx(ctx, force=True, force_editor=False,
800 return self._commitctx(ctx, force=True, force_editor=False,
801 empty_ok=True, use_dirstate=False,
801 empty_ok=True, use_dirstate=False,
802 update_dirstate=False)
802 update_dirstate=False)
803 finally:
803 finally:
804 del lock, wlock
804 del lock, wlock
805
805
806 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
806 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
807 use_dirstate=True, update_dirstate=True):
807 use_dirstate=True, update_dirstate=True):
808 tr = None
808 tr = None
809 valid = 0 # don't save the dirstate if this isn't set
809 valid = 0 # don't save the dirstate if this isn't set
810 try:
810 try:
811 commit = util.sort(wctx.modified() + wctx.added())
811 commit = util.sort(wctx.modified() + wctx.added())
812 remove = wctx.removed()
812 remove = wctx.removed()
813 extra = wctx.extra().copy()
813 extra = wctx.extra().copy()
814 branchname = extra['branch']
814 branchname = extra['branch']
815 user = wctx.user()
815 user = wctx.user()
816 text = wctx.description()
816 text = wctx.description()
817
817
818 p1, p2 = [p.node() for p in wctx.parents()]
818 p1, p2 = [p.node() for p in wctx.parents()]
819 c1 = self.changelog.read(p1)
819 c1 = self.changelog.read(p1)
820 c2 = self.changelog.read(p2)
820 c2 = self.changelog.read(p2)
821 m1 = self.manifest.read(c1[0]).copy()
821 m1 = self.manifest.read(c1[0]).copy()
822 m2 = self.manifest.read(c2[0])
822 m2 = self.manifest.read(c2[0])
823
823
824 if use_dirstate:
824 if use_dirstate:
825 oldname = c1[5].get("branch") # stored in UTF-8
825 oldname = c1[5].get("branch") # stored in UTF-8
826 if (not commit and not remove and not force and p2 == nullid
826 if (not commit and not remove and not force and p2 == nullid
827 and branchname == oldname):
827 and branchname == oldname):
828 self.ui.status(_("nothing changed\n"))
828 self.ui.status(_("nothing changed\n"))
829 return None
829 return None
830
830
831 xp1 = hex(p1)
831 xp1 = hex(p1)
832 if p2 == nullid: xp2 = ''
832 if p2 == nullid: xp2 = ''
833 else: xp2 = hex(p2)
833 else: xp2 = hex(p2)
834
834
835 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
835 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
836
836
837 tr = self.transaction()
837 tr = self.transaction()
838 trp = weakref.proxy(tr)
838 trp = weakref.proxy(tr)
839
839
840 # check in files
840 # check in files
841 new = {}
841 new = {}
842 changed = []
842 changed = []
843 linkrev = len(self)
843 linkrev = len(self)
844 for f in commit:
844 for f in commit:
845 self.ui.note(f + "\n")
845 self.ui.note(f + "\n")
846 try:
846 try:
847 fctx = wctx.filectx(f)
847 fctx = wctx.filectx(f)
848 newflags = fctx.flags()
848 newflags = fctx.flags()
849 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
849 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
850 if ((not changed or changed[-1] != f) and
850 if ((not changed or changed[-1] != f) and
851 m2.get(f) != new[f]):
851 m2.get(f) != new[f]):
852 # mention the file in the changelog if some
852 # mention the file in the changelog if some
853 # flag changed, even if there was no content
853 # flag changed, even if there was no content
854 # change.
854 # change.
855 if m1.flags(f) != newflags:
855 if m1.flags(f) != newflags:
856 changed.append(f)
856 changed.append(f)
857 m1.set(f, newflags)
857 m1.set(f, newflags)
858 if use_dirstate:
858 if use_dirstate:
859 self.dirstate.normal(f)
859 self.dirstate.normal(f)
860
860
861 except (OSError, IOError):
861 except (OSError, IOError):
862 if use_dirstate:
862 if use_dirstate:
863 self.ui.warn(_("trouble committing %s!\n") % f)
863 self.ui.warn(_("trouble committing %s!\n") % f)
864 raise
864 raise
865 else:
865 else:
866 remove.append(f)
866 remove.append(f)
867
867
868 updated, added = [], []
868 updated, added = [], []
869 for f in util.sort(changed):
869 for f in util.sort(changed):
870 if f in m1 or f in m2:
870 if f in m1 or f in m2:
871 updated.append(f)
871 updated.append(f)
872 else:
872 else:
873 added.append(f)
873 added.append(f)
874
874
875 # update manifest
875 # update manifest
876 m1.update(new)
876 m1.update(new)
877 removed = []
877 removed = []
878
878
879 for f in util.sort(remove):
879 for f in util.sort(remove):
880 if f in m1:
880 if f in m1:
881 del m1[f]
881 del m1[f]
882 removed.append(f)
882 removed.append(f)
883 elif f in m2:
883 elif f in m2:
884 removed.append(f)
884 removed.append(f)
885 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
885 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
886 (new, removed))
886 (new, removed))
887
887
888 # add changeset
888 # add changeset
889 if (not empty_ok and not text) or force_editor:
889 if (not empty_ok and not text) or force_editor:
890 edittext = []
890 edittext = []
891 if text:
891 if text:
892 edittext.append(text)
892 edittext.append(text)
893 edittext.append("")
893 edittext.append("")
894 edittext.append("") # Empty line between message and comments.
894 edittext.append("") # Empty line between message and comments.
895 edittext.append(_("HG: Enter commit message."
895 edittext.append(_("HG: Enter commit message."
896 " Lines beginning with 'HG:' are removed."))
896 " Lines beginning with 'HG:' are removed."))
897 edittext.append("HG: --")
897 edittext.append("HG: --")
898 edittext.append("HG: user: %s" % user)
898 edittext.append("HG: user: %s" % user)
899 if p2 != nullid:
899 if p2 != nullid:
900 edittext.append("HG: branch merge")
900 edittext.append("HG: branch merge")
901 if branchname:
901 if branchname:
902 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
902 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
903 edittext.extend(["HG: added %s" % f for f in added])
903 edittext.extend(["HG: added %s" % f for f in added])
904 edittext.extend(["HG: changed %s" % f for f in updated])
904 edittext.extend(["HG: changed %s" % f for f in updated])
905 edittext.extend(["HG: removed %s" % f for f in removed])
905 edittext.extend(["HG: removed %s" % f for f in removed])
906 if not added and not updated and not removed:
906 if not added and not updated and not removed:
907 edittext.append("HG: no files changed")
907 edittext.append("HG: no files changed")
908 edittext.append("")
908 edittext.append("")
909 # run editor in the repository root
909 # run editor in the repository root
910 olddir = os.getcwd()
910 olddir = os.getcwd()
911 os.chdir(self.root)
911 os.chdir(self.root)
912 text = self.ui.edit("\n".join(edittext), user)
912 text = self.ui.edit("\n".join(edittext), user)
913 os.chdir(olddir)
913 os.chdir(olddir)
914
914
915 lines = [line.rstrip() for line in text.rstrip().splitlines()]
915 lines = [line.rstrip() for line in text.rstrip().splitlines()]
916 while lines and not lines[0]:
916 while lines and not lines[0]:
917 del lines[0]
917 del lines[0]
918 if not lines and use_dirstate:
918 if not lines and use_dirstate:
919 raise util.Abort(_("empty commit message"))
919 raise util.Abort(_("empty commit message"))
920 text = '\n'.join(lines)
920 text = '\n'.join(lines)
921
921
922 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
922 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
923 user, wctx.date(), extra)
923 user, wctx.date(), extra)
924 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
924 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
925 parent2=xp2)
925 parent2=xp2)
926 tr.close()
926 tr.close()
927
927
928 if self.branchcache:
928 if self.branchcache:
929 self.branchtags()
929 self.branchtags()
930
930
931 if use_dirstate or update_dirstate:
931 if use_dirstate or update_dirstate:
932 self.dirstate.setparents(n)
932 self.dirstate.setparents(n)
933 if use_dirstate:
933 if use_dirstate:
934 for f in removed:
934 for f in removed:
935 self.dirstate.forget(f)
935 self.dirstate.forget(f)
936 valid = 1 # our dirstate updates are complete
936 valid = 1 # our dirstate updates are complete
937
937
938 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
938 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
939 return n
939 return n
940 finally:
940 finally:
941 if not valid: # don't save our updated dirstate
941 if not valid: # don't save our updated dirstate
942 self.dirstate.invalidate()
942 self.dirstate.invalidate()
943 del tr
943 del tr
944
944
945 def walk(self, match, node=None):
945 def walk(self, match, node=None):
946 '''
946 '''
947 walk recursively through the directory tree or a given
947 walk recursively through the directory tree or a given
948 changeset, finding all files matched by the match
948 changeset, finding all files matched by the match
949 function
949 function
950 '''
950 '''
951 return self[node].walk(match)
951 return self[node].walk(match)
952
952
953 def status(self, node1='.', node2=None, match=None,
953 def status(self, node1='.', node2=None, match=None,
954 ignored=False, clean=False, unknown=False):
954 ignored=False, clean=False, unknown=False):
955 """return status of files between two nodes or node and working directory
955 """return status of files between two nodes or node and working directory
956
956
957 If node1 is None, use the first dirstate parent instead.
957 If node1 is None, use the first dirstate parent instead.
958 If node2 is None, compare node1 with working directory.
958 If node2 is None, compare node1 with working directory.
959 """
959 """
960
960
961 def mfmatches(ctx):
961 def mfmatches(ctx):
962 mf = ctx.manifest().copy()
962 mf = ctx.manifest().copy()
963 for fn in mf.keys():
963 for fn in mf.keys():
964 if not match(fn):
964 if not match(fn):
965 del mf[fn]
965 del mf[fn]
966 return mf
966 return mf
967
967
968 if isinstance(node1, context.changectx):
969 ctx1 = node1
970 else:
968 ctx1 = self[node1]
971 ctx1 = self[node1]
972 if isinstance(node2, context.changectx):
973 ctx2 = node2
974 else:
969 ctx2 = self[node2]
975 ctx2 = self[node2]
976
970 working = ctx2 == self[None]
977 working = ctx2 == self[None]
971 parentworking = working and ctx1 == self['.']
978 parentworking = working and ctx1 == self['.']
972 match = match or match_.always(self.root, self.getcwd())
979 match = match or match_.always(self.root, self.getcwd())
973 listignored, listclean, listunknown = ignored, clean, unknown
980 listignored, listclean, listunknown = ignored, clean, unknown
974
981
982 # load earliest manifest first for caching reasons
983 if not working and ctx2.rev() < ctx1.rev():
984 ctx2.manifest()
985
975 if not parentworking:
986 if not parentworking:
976 def bad(f, msg):
987 def bad(f, msg):
977 if f not in ctx1:
988 if f not in ctx1:
978 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
989 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
979 return False
990 return False
980 match.bad = bad
991 match.bad = bad
981
992
982 if working: # we need to scan the working dir
993 if working: # we need to scan the working dir
983 s = self.dirstate.status(match, listignored, listclean, listunknown)
994 s = self.dirstate.status(match, listignored, listclean, listunknown)
984 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
995 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
985
996
986 # check for any possibly clean files
997 # check for any possibly clean files
987 if parentworking and cmp:
998 if parentworking and cmp:
988 fixup = []
999 fixup = []
989 # do a full compare of any files that might have changed
1000 # do a full compare of any files that might have changed
990 for f in cmp:
1001 for f in cmp:
991 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1002 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
992 or ctx1[f].cmp(ctx2[f].data())):
1003 or ctx1[f].cmp(ctx2[f].data())):
993 modified.append(f)
1004 modified.append(f)
994 else:
1005 else:
995 fixup.append(f)
1006 fixup.append(f)
996
1007
997 if listclean:
1008 if listclean:
998 clean += fixup
1009 clean += fixup
999
1010
1000 # update dirstate for files that are actually clean
1011 # update dirstate for files that are actually clean
1001 if fixup:
1012 if fixup:
1002 wlock = None
1013 wlock = None
1003 try:
1014 try:
1004 try:
1015 try:
1005 wlock = self.wlock(False)
1016 wlock = self.wlock(False)
1006 for f in fixup:
1017 for f in fixup:
1007 self.dirstate.normal(f)
1018 self.dirstate.normal(f)
1008 except lock.LockException:
1019 except lock.LockException:
1009 pass
1020 pass
1010 finally:
1021 finally:
1011 del wlock
1022 del wlock
1012
1023
1013 if not parentworking:
1024 if not parentworking:
1014 mf1 = mfmatches(ctx1)
1025 mf1 = mfmatches(ctx1)
1015 if working:
1026 if working:
1016 # we are comparing working dir against non-parent
1027 # we are comparing working dir against non-parent
1017 # generate a pseudo-manifest for the working dir
1028 # generate a pseudo-manifest for the working dir
1018 mf2 = mfmatches(self['.'])
1029 mf2 = mfmatches(self['.'])
1019 for f in cmp + modified + added:
1030 for f in cmp + modified + added:
1020 mf2[f] = None
1031 mf2[f] = None
1021 mf2.set(f, ctx2.flags(f))
1032 mf2.set(f, ctx2.flags(f))
1022 for f in removed:
1033 for f in removed:
1023 if f in mf2:
1034 if f in mf2:
1024 del mf2[f]
1035 del mf2[f]
1025 else:
1036 else:
1026 # we are comparing two revisions
1037 # we are comparing two revisions
1027 deleted, unknown, ignored = [], [], []
1038 deleted, unknown, ignored = [], [], []
1028 mf2 = mfmatches(ctx2)
1039 mf2 = mfmatches(ctx2)
1029
1040
1030 modified, added, clean = [], [], []
1041 modified, added, clean = [], [], []
1031 for fn in mf2:
1042 for fn in mf2:
1032 if fn in mf1:
1043 if fn in mf1:
1033 if (mf1.flags(fn) != mf2.flags(fn) or
1044 if (mf1.flags(fn) != mf2.flags(fn) or
1034 (mf1[fn] != mf2[fn] and
1045 (mf1[fn] != mf2[fn] and
1035 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1046 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1036 modified.append(fn)
1047 modified.append(fn)
1037 elif listclean:
1048 elif listclean:
1038 clean.append(fn)
1049 clean.append(fn)
1039 del mf1[fn]
1050 del mf1[fn]
1040 else:
1051 else:
1041 added.append(fn)
1052 added.append(fn)
1042 removed = mf1.keys()
1053 removed = mf1.keys()
1043
1054
1044 r = modified, added, removed, deleted, unknown, ignored, clean
1055 r = modified, added, removed, deleted, unknown, ignored, clean
1045 [l.sort() for l in r]
1056 [l.sort() for l in r]
1046 return r
1057 return r
1047
1058
1048 def add(self, list):
1059 def add(self, list):
1049 wlock = self.wlock()
1060 wlock = self.wlock()
1050 try:
1061 try:
1051 rejected = []
1062 rejected = []
1052 for f in list:
1063 for f in list:
1053 p = self.wjoin(f)
1064 p = self.wjoin(f)
1054 try:
1065 try:
1055 st = os.lstat(p)
1066 st = os.lstat(p)
1056 except:
1067 except:
1057 self.ui.warn(_("%s does not exist!\n") % f)
1068 self.ui.warn(_("%s does not exist!\n") % f)
1058 rejected.append(f)
1069 rejected.append(f)
1059 continue
1070 continue
1060 if st.st_size > 10000000:
1071 if st.st_size > 10000000:
1061 self.ui.warn(_("%s: files over 10MB may cause memory and"
1072 self.ui.warn(_("%s: files over 10MB may cause memory and"
1062 " performance problems\n"
1073 " performance problems\n"
1063 "(use 'hg revert %s' to unadd the file)\n")
1074 "(use 'hg revert %s' to unadd the file)\n")
1064 % (f, f))
1075 % (f, f))
1065 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1076 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1066 self.ui.warn(_("%s not added: only files and symlinks "
1077 self.ui.warn(_("%s not added: only files and symlinks "
1067 "supported currently\n") % f)
1078 "supported currently\n") % f)
1068 rejected.append(p)
1079 rejected.append(p)
1069 elif self.dirstate[f] in 'amn':
1080 elif self.dirstate[f] in 'amn':
1070 self.ui.warn(_("%s already tracked!\n") % f)
1081 self.ui.warn(_("%s already tracked!\n") % f)
1071 elif self.dirstate[f] == 'r':
1082 elif self.dirstate[f] == 'r':
1072 self.dirstate.normallookup(f)
1083 self.dirstate.normallookup(f)
1073 else:
1084 else:
1074 self.dirstate.add(f)
1085 self.dirstate.add(f)
1075 return rejected
1086 return rejected
1076 finally:
1087 finally:
1077 del wlock
1088 del wlock
1078
1089
1079 def forget(self, list):
1090 def forget(self, list):
1080 wlock = self.wlock()
1091 wlock = self.wlock()
1081 try:
1092 try:
1082 for f in list:
1093 for f in list:
1083 if self.dirstate[f] != 'a':
1094 if self.dirstate[f] != 'a':
1084 self.ui.warn(_("%s not added!\n") % f)
1095 self.ui.warn(_("%s not added!\n") % f)
1085 else:
1096 else:
1086 self.dirstate.forget(f)
1097 self.dirstate.forget(f)
1087 finally:
1098 finally:
1088 del wlock
1099 del wlock
1089
1100
1090 def remove(self, list, unlink=False):
1101 def remove(self, list, unlink=False):
1091 wlock = None
1102 wlock = None
1092 try:
1103 try:
1093 if unlink:
1104 if unlink:
1094 for f in list:
1105 for f in list:
1095 try:
1106 try:
1096 util.unlink(self.wjoin(f))
1107 util.unlink(self.wjoin(f))
1097 except OSError, inst:
1108 except OSError, inst:
1098 if inst.errno != errno.ENOENT:
1109 if inst.errno != errno.ENOENT:
1099 raise
1110 raise
1100 wlock = self.wlock()
1111 wlock = self.wlock()
1101 for f in list:
1112 for f in list:
1102 if unlink and os.path.exists(self.wjoin(f)):
1113 if unlink and os.path.exists(self.wjoin(f)):
1103 self.ui.warn(_("%s still exists!\n") % f)
1114 self.ui.warn(_("%s still exists!\n") % f)
1104 elif self.dirstate[f] == 'a':
1115 elif self.dirstate[f] == 'a':
1105 self.dirstate.forget(f)
1116 self.dirstate.forget(f)
1106 elif f not in self.dirstate:
1117 elif f not in self.dirstate:
1107 self.ui.warn(_("%s not tracked!\n") % f)
1118 self.ui.warn(_("%s not tracked!\n") % f)
1108 else:
1119 else:
1109 self.dirstate.remove(f)
1120 self.dirstate.remove(f)
1110 finally:
1121 finally:
1111 del wlock
1122 del wlock
1112
1123
1113 def undelete(self, list):
1124 def undelete(self, list):
1114 wlock = None
1125 wlock = None
1115 try:
1126 try:
1116 manifests = [self.manifest.read(self.changelog.read(p)[0])
1127 manifests = [self.manifest.read(self.changelog.read(p)[0])
1117 for p in self.dirstate.parents() if p != nullid]
1128 for p in self.dirstate.parents() if p != nullid]
1118 wlock = self.wlock()
1129 wlock = self.wlock()
1119 for f in list:
1130 for f in list:
1120 if self.dirstate[f] != 'r':
1131 if self.dirstate[f] != 'r':
1121 self.ui.warn(_("%s not removed!\n") % f)
1132 self.ui.warn(_("%s not removed!\n") % f)
1122 else:
1133 else:
1123 m = f in manifests[0] and manifests[0] or manifests[1]
1134 m = f in manifests[0] and manifests[0] or manifests[1]
1124 t = self.file(f).read(m[f])
1135 t = self.file(f).read(m[f])
1125 self.wwrite(f, t, m.flags(f))
1136 self.wwrite(f, t, m.flags(f))
1126 self.dirstate.normal(f)
1137 self.dirstate.normal(f)
1127 finally:
1138 finally:
1128 del wlock
1139 del wlock
1129
1140
1130 def copy(self, source, dest):
1141 def copy(self, source, dest):
1131 wlock = None
1142 wlock = None
1132 try:
1143 try:
1133 p = self.wjoin(dest)
1144 p = self.wjoin(dest)
1134 if not (os.path.exists(p) or os.path.islink(p)):
1145 if not (os.path.exists(p) or os.path.islink(p)):
1135 self.ui.warn(_("%s does not exist!\n") % dest)
1146 self.ui.warn(_("%s does not exist!\n") % dest)
1136 elif not (os.path.isfile(p) or os.path.islink(p)):
1147 elif not (os.path.isfile(p) or os.path.islink(p)):
1137 self.ui.warn(_("copy failed: %s is not a file or a "
1148 self.ui.warn(_("copy failed: %s is not a file or a "
1138 "symbolic link\n") % dest)
1149 "symbolic link\n") % dest)
1139 else:
1150 else:
1140 wlock = self.wlock()
1151 wlock = self.wlock()
1141 if dest not in self.dirstate:
1152 if dest not in self.dirstate:
1142 self.dirstate.add(dest)
1153 self.dirstate.add(dest)
1143 self.dirstate.copy(source, dest)
1154 self.dirstate.copy(source, dest)
1144 finally:
1155 finally:
1145 del wlock
1156 del wlock
1146
1157
1147 def heads(self, start=None):
1158 def heads(self, start=None):
1148 heads = self.changelog.heads(start)
1159 heads = self.changelog.heads(start)
1149 # sort the output in rev descending order
1160 # sort the output in rev descending order
1150 heads = [(-self.changelog.rev(h), h) for h in heads]
1161 heads = [(-self.changelog.rev(h), h) for h in heads]
1151 return [n for (r, n) in util.sort(heads)]
1162 return [n for (r, n) in util.sort(heads)]
1152
1163
1153 def branchheads(self, branch=None, start=None):
1164 def branchheads(self, branch=None, start=None):
1154 if branch is None:
1165 if branch is None:
1155 branch = self[None].branch()
1166 branch = self[None].branch()
1156 branches = self.branchtags()
1167 branches = self.branchtags()
1157 if branch not in branches:
1168 if branch not in branches:
1158 return []
1169 return []
1159 # The basic algorithm is this:
1170 # The basic algorithm is this:
1160 #
1171 #
1161 # Start from the branch tip since there are no later revisions that can
1172 # Start from the branch tip since there are no later revisions that can
1162 # possibly be in this branch, and the tip is a guaranteed head.
1173 # possibly be in this branch, and the tip is a guaranteed head.
1163 #
1174 #
1164 # Remember the tip's parents as the first ancestors, since these by
1175 # Remember the tip's parents as the first ancestors, since these by
1165 # definition are not heads.
1176 # definition are not heads.
1166 #
1177 #
1167 # Step backwards from the brach tip through all the revisions. We are
1178 # Step backwards from the brach tip through all the revisions. We are
1168 # guaranteed by the rules of Mercurial that we will now be visiting the
1179 # guaranteed by the rules of Mercurial that we will now be visiting the
1169 # nodes in reverse topological order (children before parents).
1180 # nodes in reverse topological order (children before parents).
1170 #
1181 #
1171 # If a revision is one of the ancestors of a head then we can toss it
1182 # If a revision is one of the ancestors of a head then we can toss it
1172 # out of the ancestors set (we've already found it and won't be
1183 # out of the ancestors set (we've already found it and won't be
1173 # visiting it again) and put its parents in the ancestors set.
1184 # visiting it again) and put its parents in the ancestors set.
1174 #
1185 #
1175 # Otherwise, if a revision is in the branch it's another head, since it
1186 # Otherwise, if a revision is in the branch it's another head, since it
1176 # wasn't in the ancestor list of an existing head. So add it to the
1187 # wasn't in the ancestor list of an existing head. So add it to the
1177 # head list, and add its parents to the ancestor list.
1188 # head list, and add its parents to the ancestor list.
1178 #
1189 #
1179 # If it is not in the branch ignore it.
1190 # If it is not in the branch ignore it.
1180 #
1191 #
1181 # Once we have a list of heads, use nodesbetween to filter out all the
1192 # Once we have a list of heads, use nodesbetween to filter out all the
1182 # heads that cannot be reached from startrev. There may be a more
1193 # heads that cannot be reached from startrev. There may be a more
1183 # efficient way to do this as part of the previous algorithm.
1194 # efficient way to do this as part of the previous algorithm.
1184
1195
1185 set = util.set
1196 set = util.set
1186 heads = [self.changelog.rev(branches[branch])]
1197 heads = [self.changelog.rev(branches[branch])]
1187 # Don't care if ancestors contains nullrev or not.
1198 # Don't care if ancestors contains nullrev or not.
1188 ancestors = set(self.changelog.parentrevs(heads[0]))
1199 ancestors = set(self.changelog.parentrevs(heads[0]))
1189 for rev in xrange(heads[0] - 1, nullrev, -1):
1200 for rev in xrange(heads[0] - 1, nullrev, -1):
1190 if rev in ancestors:
1201 if rev in ancestors:
1191 ancestors.update(self.changelog.parentrevs(rev))
1202 ancestors.update(self.changelog.parentrevs(rev))
1192 ancestors.remove(rev)
1203 ancestors.remove(rev)
1193 elif self[rev].branch() == branch:
1204 elif self[rev].branch() == branch:
1194 heads.append(rev)
1205 heads.append(rev)
1195 ancestors.update(self.changelog.parentrevs(rev))
1206 ancestors.update(self.changelog.parentrevs(rev))
1196 heads = [self.changelog.node(rev) for rev in heads]
1207 heads = [self.changelog.node(rev) for rev in heads]
1197 if start is not None:
1208 if start is not None:
1198 heads = self.changelog.nodesbetween([start], heads)[2]
1209 heads = self.changelog.nodesbetween([start], heads)[2]
1199 return heads
1210 return heads
1200
1211
1201 def branches(self, nodes):
1212 def branches(self, nodes):
1202 if not nodes:
1213 if not nodes:
1203 nodes = [self.changelog.tip()]
1214 nodes = [self.changelog.tip()]
1204 b = []
1215 b = []
1205 for n in nodes:
1216 for n in nodes:
1206 t = n
1217 t = n
1207 while 1:
1218 while 1:
1208 p = self.changelog.parents(n)
1219 p = self.changelog.parents(n)
1209 if p[1] != nullid or p[0] == nullid:
1220 if p[1] != nullid or p[0] == nullid:
1210 b.append((t, n, p[0], p[1]))
1221 b.append((t, n, p[0], p[1]))
1211 break
1222 break
1212 n = p[0]
1223 n = p[0]
1213 return b
1224 return b
1214
1225
1215 def between(self, pairs):
1226 def between(self, pairs):
1216 r = []
1227 r = []
1217
1228
1218 for top, bottom in pairs:
1229 for top, bottom in pairs:
1219 n, l, i = top, [], 0
1230 n, l, i = top, [], 0
1220 f = 1
1231 f = 1
1221
1232
1222 while n != bottom:
1233 while n != bottom:
1223 p = self.changelog.parents(n)[0]
1234 p = self.changelog.parents(n)[0]
1224 if i == f:
1235 if i == f:
1225 l.append(n)
1236 l.append(n)
1226 f = f * 2
1237 f = f * 2
1227 n = p
1238 n = p
1228 i += 1
1239 i += 1
1229
1240
1230 r.append(l)
1241 r.append(l)
1231
1242
1232 return r
1243 return r
1233
1244
1234 def findincoming(self, remote, base=None, heads=None, force=False):
1245 def findincoming(self, remote, base=None, heads=None, force=False):
1235 """Return list of roots of the subsets of missing nodes from remote
1246 """Return list of roots of the subsets of missing nodes from remote
1236
1247
1237 If base dict is specified, assume that these nodes and their parents
1248 If base dict is specified, assume that these nodes and their parents
1238 exist on the remote side and that no child of a node of base exists
1249 exist on the remote side and that no child of a node of base exists
1239 in both remote and self.
1250 in both remote and self.
1240 Furthermore base will be updated to include the nodes that exists
1251 Furthermore base will be updated to include the nodes that exists
1241 in self and remote but no children exists in self and remote.
1252 in self and remote but no children exists in self and remote.
1242 If a list of heads is specified, return only nodes which are heads
1253 If a list of heads is specified, return only nodes which are heads
1243 or ancestors of these heads.
1254 or ancestors of these heads.
1244
1255
1245 All the ancestors of base are in self and in remote.
1256 All the ancestors of base are in self and in remote.
1246 All the descendants of the list returned are missing in self.
1257 All the descendants of the list returned are missing in self.
1247 (and so we know that the rest of the nodes are missing in remote, see
1258 (and so we know that the rest of the nodes are missing in remote, see
1248 outgoing)
1259 outgoing)
1249 """
1260 """
1250 m = self.changelog.nodemap
1261 m = self.changelog.nodemap
1251 search = []
1262 search = []
1252 fetch = {}
1263 fetch = {}
1253 seen = {}
1264 seen = {}
1254 seenbranch = {}
1265 seenbranch = {}
1255 if base == None:
1266 if base == None:
1256 base = {}
1267 base = {}
1257
1268
1258 if not heads:
1269 if not heads:
1259 heads = remote.heads()
1270 heads = remote.heads()
1260
1271
1261 if self.changelog.tip() == nullid:
1272 if self.changelog.tip() == nullid:
1262 base[nullid] = 1
1273 base[nullid] = 1
1263 if heads != [nullid]:
1274 if heads != [nullid]:
1264 return [nullid]
1275 return [nullid]
1265 return []
1276 return []
1266
1277
1267 # assume we're closer to the tip than the root
1278 # assume we're closer to the tip than the root
1268 # and start by examining the heads
1279 # and start by examining the heads
1269 self.ui.status(_("searching for changes\n"))
1280 self.ui.status(_("searching for changes\n"))
1270
1281
1271 unknown = []
1282 unknown = []
1272 for h in heads:
1283 for h in heads:
1273 if h not in m:
1284 if h not in m:
1274 unknown.append(h)
1285 unknown.append(h)
1275 else:
1286 else:
1276 base[h] = 1
1287 base[h] = 1
1277
1288
1278 if not unknown:
1289 if not unknown:
1279 return []
1290 return []
1280
1291
1281 req = dict.fromkeys(unknown)
1292 req = dict.fromkeys(unknown)
1282 reqcnt = 0
1293 reqcnt = 0
1283
1294
1284 # search through remote branches
1295 # search through remote branches
1285 # a 'branch' here is a linear segment of history, with four parts:
1296 # a 'branch' here is a linear segment of history, with four parts:
1286 # head, root, first parent, second parent
1297 # head, root, first parent, second parent
1287 # (a branch always has two parents (or none) by definition)
1298 # (a branch always has two parents (or none) by definition)
1288 unknown = remote.branches(unknown)
1299 unknown = remote.branches(unknown)
1289 while unknown:
1300 while unknown:
1290 r = []
1301 r = []
1291 while unknown:
1302 while unknown:
1292 n = unknown.pop(0)
1303 n = unknown.pop(0)
1293 if n[0] in seen:
1304 if n[0] in seen:
1294 continue
1305 continue
1295
1306
1296 self.ui.debug(_("examining %s:%s\n")
1307 self.ui.debug(_("examining %s:%s\n")
1297 % (short(n[0]), short(n[1])))
1308 % (short(n[0]), short(n[1])))
1298 if n[0] == nullid: # found the end of the branch
1309 if n[0] == nullid: # found the end of the branch
1299 pass
1310 pass
1300 elif n in seenbranch:
1311 elif n in seenbranch:
1301 self.ui.debug(_("branch already found\n"))
1312 self.ui.debug(_("branch already found\n"))
1302 continue
1313 continue
1303 elif n[1] and n[1] in m: # do we know the base?
1314 elif n[1] and n[1] in m: # do we know the base?
1304 self.ui.debug(_("found incomplete branch %s:%s\n")
1315 self.ui.debug(_("found incomplete branch %s:%s\n")
1305 % (short(n[0]), short(n[1])))
1316 % (short(n[0]), short(n[1])))
1306 search.append(n) # schedule branch range for scanning
1317 search.append(n) # schedule branch range for scanning
1307 seenbranch[n] = 1
1318 seenbranch[n] = 1
1308 else:
1319 else:
1309 if n[1] not in seen and n[1] not in fetch:
1320 if n[1] not in seen and n[1] not in fetch:
1310 if n[2] in m and n[3] in m:
1321 if n[2] in m and n[3] in m:
1311 self.ui.debug(_("found new changeset %s\n") %
1322 self.ui.debug(_("found new changeset %s\n") %
1312 short(n[1]))
1323 short(n[1]))
1313 fetch[n[1]] = 1 # earliest unknown
1324 fetch[n[1]] = 1 # earliest unknown
1314 for p in n[2:4]:
1325 for p in n[2:4]:
1315 if p in m:
1326 if p in m:
1316 base[p] = 1 # latest known
1327 base[p] = 1 # latest known
1317
1328
1318 for p in n[2:4]:
1329 for p in n[2:4]:
1319 if p not in req and p not in m:
1330 if p not in req and p not in m:
1320 r.append(p)
1331 r.append(p)
1321 req[p] = 1
1332 req[p] = 1
1322 seen[n[0]] = 1
1333 seen[n[0]] = 1
1323
1334
1324 if r:
1335 if r:
1325 reqcnt += 1
1336 reqcnt += 1
1326 self.ui.debug(_("request %d: %s\n") %
1337 self.ui.debug(_("request %d: %s\n") %
1327 (reqcnt, " ".join(map(short, r))))
1338 (reqcnt, " ".join(map(short, r))))
1328 for p in xrange(0, len(r), 10):
1339 for p in xrange(0, len(r), 10):
1329 for b in remote.branches(r[p:p+10]):
1340 for b in remote.branches(r[p:p+10]):
1330 self.ui.debug(_("received %s:%s\n") %
1341 self.ui.debug(_("received %s:%s\n") %
1331 (short(b[0]), short(b[1])))
1342 (short(b[0]), short(b[1])))
1332 unknown.append(b)
1343 unknown.append(b)
1333
1344
1334 # do binary search on the branches we found
1345 # do binary search on the branches we found
1335 while search:
1346 while search:
1336 n = search.pop(0)
1347 n = search.pop(0)
1337 reqcnt += 1
1348 reqcnt += 1
1338 l = remote.between([(n[0], n[1])])[0]
1349 l = remote.between([(n[0], n[1])])[0]
1339 l.append(n[1])
1350 l.append(n[1])
1340 p = n[0]
1351 p = n[0]
1341 f = 1
1352 f = 1
1342 for i in l:
1353 for i in l:
1343 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1354 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1344 if i in m:
1355 if i in m:
1345 if f <= 2:
1356 if f <= 2:
1346 self.ui.debug(_("found new branch changeset %s\n") %
1357 self.ui.debug(_("found new branch changeset %s\n") %
1347 short(p))
1358 short(p))
1348 fetch[p] = 1
1359 fetch[p] = 1
1349 base[i] = 1
1360 base[i] = 1
1350 else:
1361 else:
1351 self.ui.debug(_("narrowed branch search to %s:%s\n")
1362 self.ui.debug(_("narrowed branch search to %s:%s\n")
1352 % (short(p), short(i)))
1363 % (short(p), short(i)))
1353 search.append((p, i))
1364 search.append((p, i))
1354 break
1365 break
1355 p, f = i, f * 2
1366 p, f = i, f * 2
1356
1367
1357 # sanity check our fetch list
1368 # sanity check our fetch list
1358 for f in fetch.keys():
1369 for f in fetch.keys():
1359 if f in m:
1370 if f in m:
1360 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1371 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1361
1372
1362 if base.keys() == [nullid]:
1373 if base.keys() == [nullid]:
1363 if force:
1374 if force:
1364 self.ui.warn(_("warning: repository is unrelated\n"))
1375 self.ui.warn(_("warning: repository is unrelated\n"))
1365 else:
1376 else:
1366 raise util.Abort(_("repository is unrelated"))
1377 raise util.Abort(_("repository is unrelated"))
1367
1378
1368 self.ui.debug(_("found new changesets starting at ") +
1379 self.ui.debug(_("found new changesets starting at ") +
1369 " ".join([short(f) for f in fetch]) + "\n")
1380 " ".join([short(f) for f in fetch]) + "\n")
1370
1381
1371 self.ui.debug(_("%d total queries\n") % reqcnt)
1382 self.ui.debug(_("%d total queries\n") % reqcnt)
1372
1383
1373 return fetch.keys()
1384 return fetch.keys()
1374
1385
1375 def findoutgoing(self, remote, base=None, heads=None, force=False):
1386 def findoutgoing(self, remote, base=None, heads=None, force=False):
1376 """Return list of nodes that are roots of subsets not in remote
1387 """Return list of nodes that are roots of subsets not in remote
1377
1388
1378 If base dict is specified, assume that these nodes and their parents
1389 If base dict is specified, assume that these nodes and their parents
1379 exist on the remote side.
1390 exist on the remote side.
1380 If a list of heads is specified, return only nodes which are heads
1391 If a list of heads is specified, return only nodes which are heads
1381 or ancestors of these heads, and return a second element which
1392 or ancestors of these heads, and return a second element which
1382 contains all remote heads which get new children.
1393 contains all remote heads which get new children.
1383 """
1394 """
1384 if base == None:
1395 if base == None:
1385 base = {}
1396 base = {}
1386 self.findincoming(remote, base, heads, force=force)
1397 self.findincoming(remote, base, heads, force=force)
1387
1398
1388 self.ui.debug(_("common changesets up to ")
1399 self.ui.debug(_("common changesets up to ")
1389 + " ".join(map(short, base.keys())) + "\n")
1400 + " ".join(map(short, base.keys())) + "\n")
1390
1401
1391 remain = dict.fromkeys(self.changelog.nodemap)
1402 remain = dict.fromkeys(self.changelog.nodemap)
1392
1403
1393 # prune everything remote has from the tree
1404 # prune everything remote has from the tree
1394 del remain[nullid]
1405 del remain[nullid]
1395 remove = base.keys()
1406 remove = base.keys()
1396 while remove:
1407 while remove:
1397 n = remove.pop(0)
1408 n = remove.pop(0)
1398 if n in remain:
1409 if n in remain:
1399 del remain[n]
1410 del remain[n]
1400 for p in self.changelog.parents(n):
1411 for p in self.changelog.parents(n):
1401 remove.append(p)
1412 remove.append(p)
1402
1413
1403 # find every node whose parents have been pruned
1414 # find every node whose parents have been pruned
1404 subset = []
1415 subset = []
1405 # find every remote head that will get new children
1416 # find every remote head that will get new children
1406 updated_heads = {}
1417 updated_heads = {}
1407 for n in remain:
1418 for n in remain:
1408 p1, p2 = self.changelog.parents(n)
1419 p1, p2 = self.changelog.parents(n)
1409 if p1 not in remain and p2 not in remain:
1420 if p1 not in remain and p2 not in remain:
1410 subset.append(n)
1421 subset.append(n)
1411 if heads:
1422 if heads:
1412 if p1 in heads:
1423 if p1 in heads:
1413 updated_heads[p1] = True
1424 updated_heads[p1] = True
1414 if p2 in heads:
1425 if p2 in heads:
1415 updated_heads[p2] = True
1426 updated_heads[p2] = True
1416
1427
1417 # this is the set of all roots we have to push
1428 # this is the set of all roots we have to push
1418 if heads:
1429 if heads:
1419 return subset, updated_heads.keys()
1430 return subset, updated_heads.keys()
1420 else:
1431 else:
1421 return subset
1432 return subset
1422
1433
1423 def pull(self, remote, heads=None, force=False):
1434 def pull(self, remote, heads=None, force=False):
1424 lock = self.lock()
1435 lock = self.lock()
1425 try:
1436 try:
1426 fetch = self.findincoming(remote, heads=heads, force=force)
1437 fetch = self.findincoming(remote, heads=heads, force=force)
1427 if fetch == [nullid]:
1438 if fetch == [nullid]:
1428 self.ui.status(_("requesting all changes\n"))
1439 self.ui.status(_("requesting all changes\n"))
1429
1440
1430 if not fetch:
1441 if not fetch:
1431 self.ui.status(_("no changes found\n"))
1442 self.ui.status(_("no changes found\n"))
1432 return 0
1443 return 0
1433
1444
1434 if heads is None:
1445 if heads is None:
1435 cg = remote.changegroup(fetch, 'pull')
1446 cg = remote.changegroup(fetch, 'pull')
1436 else:
1447 else:
1437 if 'changegroupsubset' not in remote.capabilities:
1448 if 'changegroupsubset' not in remote.capabilities:
1438 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1449 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1439 cg = remote.changegroupsubset(fetch, heads, 'pull')
1450 cg = remote.changegroupsubset(fetch, heads, 'pull')
1440 return self.addchangegroup(cg, 'pull', remote.url())
1451 return self.addchangegroup(cg, 'pull', remote.url())
1441 finally:
1452 finally:
1442 del lock
1453 del lock
1443
1454
1444 def push(self, remote, force=False, revs=None):
1455 def push(self, remote, force=False, revs=None):
1445 # there are two ways to push to remote repo:
1456 # there are two ways to push to remote repo:
1446 #
1457 #
1447 # addchangegroup assumes local user can lock remote
1458 # addchangegroup assumes local user can lock remote
1448 # repo (local filesystem, old ssh servers).
1459 # repo (local filesystem, old ssh servers).
1449 #
1460 #
1450 # unbundle assumes local user cannot lock remote repo (new ssh
1461 # unbundle assumes local user cannot lock remote repo (new ssh
1451 # servers, http servers).
1462 # servers, http servers).
1452
1463
1453 if remote.capable('unbundle'):
1464 if remote.capable('unbundle'):
1454 return self.push_unbundle(remote, force, revs)
1465 return self.push_unbundle(remote, force, revs)
1455 return self.push_addchangegroup(remote, force, revs)
1466 return self.push_addchangegroup(remote, force, revs)
1456
1467
1457 def prepush(self, remote, force, revs):
1468 def prepush(self, remote, force, revs):
1458 base = {}
1469 base = {}
1459 remote_heads = remote.heads()
1470 remote_heads = remote.heads()
1460 inc = self.findincoming(remote, base, remote_heads, force=force)
1471 inc = self.findincoming(remote, base, remote_heads, force=force)
1461
1472
1462 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1473 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1463 if revs is not None:
1474 if revs is not None:
1464 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1475 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1465 else:
1476 else:
1466 bases, heads = update, self.changelog.heads()
1477 bases, heads = update, self.changelog.heads()
1467
1478
1468 if not bases:
1479 if not bases:
1469 self.ui.status(_("no changes found\n"))
1480 self.ui.status(_("no changes found\n"))
1470 return None, 1
1481 return None, 1
1471 elif not force:
1482 elif not force:
1472 # check if we're creating new remote heads
1483 # check if we're creating new remote heads
1473 # to be a remote head after push, node must be either
1484 # to be a remote head after push, node must be either
1474 # - unknown locally
1485 # - unknown locally
1475 # - a local outgoing head descended from update
1486 # - a local outgoing head descended from update
1476 # - a remote head that's known locally and not
1487 # - a remote head that's known locally and not
1477 # ancestral to an outgoing head
1488 # ancestral to an outgoing head
1478
1489
1479 warn = 0
1490 warn = 0
1480
1491
1481 if remote_heads == [nullid]:
1492 if remote_heads == [nullid]:
1482 warn = 0
1493 warn = 0
1483 elif not revs and len(heads) > len(remote_heads):
1494 elif not revs and len(heads) > len(remote_heads):
1484 warn = 1
1495 warn = 1
1485 else:
1496 else:
1486 newheads = list(heads)
1497 newheads = list(heads)
1487 for r in remote_heads:
1498 for r in remote_heads:
1488 if r in self.changelog.nodemap:
1499 if r in self.changelog.nodemap:
1489 desc = self.changelog.heads(r, heads)
1500 desc = self.changelog.heads(r, heads)
1490 l = [h for h in heads if h in desc]
1501 l = [h for h in heads if h in desc]
1491 if not l:
1502 if not l:
1492 newheads.append(r)
1503 newheads.append(r)
1493 else:
1504 else:
1494 newheads.append(r)
1505 newheads.append(r)
1495 if len(newheads) > len(remote_heads):
1506 if len(newheads) > len(remote_heads):
1496 warn = 1
1507 warn = 1
1497
1508
1498 if warn:
1509 if warn:
1499 self.ui.warn(_("abort: push creates new remote heads!\n"))
1510 self.ui.warn(_("abort: push creates new remote heads!\n"))
1500 self.ui.status(_("(did you forget to merge?"
1511 self.ui.status(_("(did you forget to merge?"
1501 " use push -f to force)\n"))
1512 " use push -f to force)\n"))
1502 return None, 0
1513 return None, 0
1503 elif inc:
1514 elif inc:
1504 self.ui.warn(_("note: unsynced remote changes!\n"))
1515 self.ui.warn(_("note: unsynced remote changes!\n"))
1505
1516
1506
1517
1507 if revs is None:
1518 if revs is None:
1508 cg = self.changegroup(update, 'push')
1519 cg = self.changegroup(update, 'push')
1509 else:
1520 else:
1510 cg = self.changegroupsubset(update, revs, 'push')
1521 cg = self.changegroupsubset(update, revs, 'push')
1511 return cg, remote_heads
1522 return cg, remote_heads
1512
1523
1513 def push_addchangegroup(self, remote, force, revs):
1524 def push_addchangegroup(self, remote, force, revs):
1514 lock = remote.lock()
1525 lock = remote.lock()
1515 try:
1526 try:
1516 ret = self.prepush(remote, force, revs)
1527 ret = self.prepush(remote, force, revs)
1517 if ret[0] is not None:
1528 if ret[0] is not None:
1518 cg, remote_heads = ret
1529 cg, remote_heads = ret
1519 return remote.addchangegroup(cg, 'push', self.url())
1530 return remote.addchangegroup(cg, 'push', self.url())
1520 return ret[1]
1531 return ret[1]
1521 finally:
1532 finally:
1522 del lock
1533 del lock
1523
1534
1524 def push_unbundle(self, remote, force, revs):
1535 def push_unbundle(self, remote, force, revs):
1525 # local repo finds heads on server, finds out what revs it
1536 # local repo finds heads on server, finds out what revs it
1526 # must push. once revs transferred, if server finds it has
1537 # must push. once revs transferred, if server finds it has
1527 # different heads (someone else won commit/push race), server
1538 # different heads (someone else won commit/push race), server
1528 # aborts.
1539 # aborts.
1529
1540
1530 ret = self.prepush(remote, force, revs)
1541 ret = self.prepush(remote, force, revs)
1531 if ret[0] is not None:
1542 if ret[0] is not None:
1532 cg, remote_heads = ret
1543 cg, remote_heads = ret
1533 if force: remote_heads = ['force']
1544 if force: remote_heads = ['force']
1534 return remote.unbundle(cg, remote_heads, 'push')
1545 return remote.unbundle(cg, remote_heads, 'push')
1535 return ret[1]
1546 return ret[1]
1536
1547
1537 def changegroupinfo(self, nodes, source):
1548 def changegroupinfo(self, nodes, source):
1538 if self.ui.verbose or source == 'bundle':
1549 if self.ui.verbose or source == 'bundle':
1539 self.ui.status(_("%d changesets found\n") % len(nodes))
1550 self.ui.status(_("%d changesets found\n") % len(nodes))
1540 if self.ui.debugflag:
1551 if self.ui.debugflag:
1541 self.ui.debug(_("List of changesets:\n"))
1552 self.ui.debug(_("List of changesets:\n"))
1542 for node in nodes:
1553 for node in nodes:
1543 self.ui.debug("%s\n" % hex(node))
1554 self.ui.debug("%s\n" % hex(node))
1544
1555
1545 def changegroupsubset(self, bases, heads, source, extranodes=None):
1556 def changegroupsubset(self, bases, heads, source, extranodes=None):
1546 """This function generates a changegroup consisting of all the nodes
1557 """This function generates a changegroup consisting of all the nodes
1547 that are descendents of any of the bases, and ancestors of any of
1558 that are descendents of any of the bases, and ancestors of any of
1548 the heads.
1559 the heads.
1549
1560
1550 It is fairly complex as determining which filenodes and which
1561 It is fairly complex as determining which filenodes and which
1551 manifest nodes need to be included for the changeset to be complete
1562 manifest nodes need to be included for the changeset to be complete
1552 is non-trivial.
1563 is non-trivial.
1553
1564
1554 Another wrinkle is doing the reverse, figuring out which changeset in
1565 Another wrinkle is doing the reverse, figuring out which changeset in
1555 the changegroup a particular filenode or manifestnode belongs to.
1566 the changegroup a particular filenode or manifestnode belongs to.
1556
1567
1557 The caller can specify some nodes that must be included in the
1568 The caller can specify some nodes that must be included in the
1558 changegroup using the extranodes argument. It should be a dict
1569 changegroup using the extranodes argument. It should be a dict
1559 where the keys are the filenames (or 1 for the manifest), and the
1570 where the keys are the filenames (or 1 for the manifest), and the
1560 values are lists of (node, linknode) tuples, where node is a wanted
1571 values are lists of (node, linknode) tuples, where node is a wanted
1561 node and linknode is the changelog node that should be transmitted as
1572 node and linknode is the changelog node that should be transmitted as
1562 the linkrev.
1573 the linkrev.
1563 """
1574 """
1564
1575
1565 self.hook('preoutgoing', throw=True, source=source)
1576 self.hook('preoutgoing', throw=True, source=source)
1566
1577
1567 # Set up some initial variables
1578 # Set up some initial variables
1568 # Make it easy to refer to self.changelog
1579 # Make it easy to refer to self.changelog
1569 cl = self.changelog
1580 cl = self.changelog
1570 # msng is short for missing - compute the list of changesets in this
1581 # msng is short for missing - compute the list of changesets in this
1571 # changegroup.
1582 # changegroup.
1572 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1583 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1573 self.changegroupinfo(msng_cl_lst, source)
1584 self.changegroupinfo(msng_cl_lst, source)
1574 # Some bases may turn out to be superfluous, and some heads may be
1585 # Some bases may turn out to be superfluous, and some heads may be
1575 # too. nodesbetween will return the minimal set of bases and heads
1586 # too. nodesbetween will return the minimal set of bases and heads
1576 # necessary to re-create the changegroup.
1587 # necessary to re-create the changegroup.
1577
1588
1578 # Known heads are the list of heads that it is assumed the recipient
1589 # Known heads are the list of heads that it is assumed the recipient
1579 # of this changegroup will know about.
1590 # of this changegroup will know about.
1580 knownheads = {}
1591 knownheads = {}
1581 # We assume that all parents of bases are known heads.
1592 # We assume that all parents of bases are known heads.
1582 for n in bases:
1593 for n in bases:
1583 for p in cl.parents(n):
1594 for p in cl.parents(n):
1584 if p != nullid:
1595 if p != nullid:
1585 knownheads[p] = 1
1596 knownheads[p] = 1
1586 knownheads = knownheads.keys()
1597 knownheads = knownheads.keys()
1587 if knownheads:
1598 if knownheads:
1588 # Now that we know what heads are known, we can compute which
1599 # Now that we know what heads are known, we can compute which
1589 # changesets are known. The recipient must know about all
1600 # changesets are known. The recipient must know about all
1590 # changesets required to reach the known heads from the null
1601 # changesets required to reach the known heads from the null
1591 # changeset.
1602 # changeset.
1592 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1603 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1593 junk = None
1604 junk = None
1594 # Transform the list into an ersatz set.
1605 # Transform the list into an ersatz set.
1595 has_cl_set = dict.fromkeys(has_cl_set)
1606 has_cl_set = dict.fromkeys(has_cl_set)
1596 else:
1607 else:
1597 # If there were no known heads, the recipient cannot be assumed to
1608 # If there were no known heads, the recipient cannot be assumed to
1598 # know about any changesets.
1609 # know about any changesets.
1599 has_cl_set = {}
1610 has_cl_set = {}
1600
1611
1601 # Make it easy to refer to self.manifest
1612 # Make it easy to refer to self.manifest
1602 mnfst = self.manifest
1613 mnfst = self.manifest
1603 # We don't know which manifests are missing yet
1614 # We don't know which manifests are missing yet
1604 msng_mnfst_set = {}
1615 msng_mnfst_set = {}
1605 # Nor do we know which filenodes are missing.
1616 # Nor do we know which filenodes are missing.
1606 msng_filenode_set = {}
1617 msng_filenode_set = {}
1607
1618
1608 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1619 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1609 junk = None
1620 junk = None
1610
1621
1611 # A changeset always belongs to itself, so the changenode lookup
1622 # A changeset always belongs to itself, so the changenode lookup
1612 # function for a changenode is identity.
1623 # function for a changenode is identity.
1613 def identity(x):
1624 def identity(x):
1614 return x
1625 return x
1615
1626
1616 # A function generating function. Sets up an environment for the
1627 # A function generating function. Sets up an environment for the
1617 # inner function.
1628 # inner function.
1618 def cmp_by_rev_func(revlog):
1629 def cmp_by_rev_func(revlog):
1619 # Compare two nodes by their revision number in the environment's
1630 # Compare two nodes by their revision number in the environment's
1620 # revision history. Since the revision number both represents the
1631 # revision history. Since the revision number both represents the
1621 # most efficient order to read the nodes in, and represents a
1632 # most efficient order to read the nodes in, and represents a
1622 # topological sorting of the nodes, this function is often useful.
1633 # topological sorting of the nodes, this function is often useful.
1623 def cmp_by_rev(a, b):
1634 def cmp_by_rev(a, b):
1624 return cmp(revlog.rev(a), revlog.rev(b))
1635 return cmp(revlog.rev(a), revlog.rev(b))
1625 return cmp_by_rev
1636 return cmp_by_rev
1626
1637
1627 # If we determine that a particular file or manifest node must be a
1638 # If we determine that a particular file or manifest node must be a
1628 # node that the recipient of the changegroup will already have, we can
1639 # node that the recipient of the changegroup will already have, we can
1629 # also assume the recipient will have all the parents. This function
1640 # also assume the recipient will have all the parents. This function
1630 # prunes them from the set of missing nodes.
1641 # prunes them from the set of missing nodes.
1631 def prune_parents(revlog, hasset, msngset):
1642 def prune_parents(revlog, hasset, msngset):
1632 haslst = hasset.keys()
1643 haslst = hasset.keys()
1633 haslst.sort(cmp_by_rev_func(revlog))
1644 haslst.sort(cmp_by_rev_func(revlog))
1634 for node in haslst:
1645 for node in haslst:
1635 parentlst = [p for p in revlog.parents(node) if p != nullid]
1646 parentlst = [p for p in revlog.parents(node) if p != nullid]
1636 while parentlst:
1647 while parentlst:
1637 n = parentlst.pop()
1648 n = parentlst.pop()
1638 if n not in hasset:
1649 if n not in hasset:
1639 hasset[n] = 1
1650 hasset[n] = 1
1640 p = [p for p in revlog.parents(n) if p != nullid]
1651 p = [p for p in revlog.parents(n) if p != nullid]
1641 parentlst.extend(p)
1652 parentlst.extend(p)
1642 for n in hasset:
1653 for n in hasset:
1643 msngset.pop(n, None)
1654 msngset.pop(n, None)
1644
1655
1645 # This is a function generating function used to set up an environment
1656 # This is a function generating function used to set up an environment
1646 # for the inner function to execute in.
1657 # for the inner function to execute in.
1647 def manifest_and_file_collector(changedfileset):
1658 def manifest_and_file_collector(changedfileset):
1648 # This is an information gathering function that gathers
1659 # This is an information gathering function that gathers
1649 # information from each changeset node that goes out as part of
1660 # information from each changeset node that goes out as part of
1650 # the changegroup. The information gathered is a list of which
1661 # the changegroup. The information gathered is a list of which
1651 # manifest nodes are potentially required (the recipient may
1662 # manifest nodes are potentially required (the recipient may
1652 # already have them) and total list of all files which were
1663 # already have them) and total list of all files which were
1653 # changed in any changeset in the changegroup.
1664 # changed in any changeset in the changegroup.
1654 #
1665 #
1655 # We also remember the first changenode we saw any manifest
1666 # We also remember the first changenode we saw any manifest
1656 # referenced by so we can later determine which changenode 'owns'
1667 # referenced by so we can later determine which changenode 'owns'
1657 # the manifest.
1668 # the manifest.
1658 def collect_manifests_and_files(clnode):
1669 def collect_manifests_and_files(clnode):
1659 c = cl.read(clnode)
1670 c = cl.read(clnode)
1660 for f in c[3]:
1671 for f in c[3]:
1661 # This is to make sure we only have one instance of each
1672 # This is to make sure we only have one instance of each
1662 # filename string for each filename.
1673 # filename string for each filename.
1663 changedfileset.setdefault(f, f)
1674 changedfileset.setdefault(f, f)
1664 msng_mnfst_set.setdefault(c[0], clnode)
1675 msng_mnfst_set.setdefault(c[0], clnode)
1665 return collect_manifests_and_files
1676 return collect_manifests_and_files
1666
1677
1667 # Figure out which manifest nodes (of the ones we think might be part
1678 # Figure out which manifest nodes (of the ones we think might be part
1668 # of the changegroup) the recipient must know about and remove them
1679 # of the changegroup) the recipient must know about and remove them
1669 # from the changegroup.
1680 # from the changegroup.
1670 def prune_manifests():
1681 def prune_manifests():
1671 has_mnfst_set = {}
1682 has_mnfst_set = {}
1672 for n in msng_mnfst_set:
1683 for n in msng_mnfst_set:
1673 # If a 'missing' manifest thinks it belongs to a changenode
1684 # If a 'missing' manifest thinks it belongs to a changenode
1674 # the recipient is assumed to have, obviously the recipient
1685 # the recipient is assumed to have, obviously the recipient
1675 # must have that manifest.
1686 # must have that manifest.
1676 linknode = cl.node(mnfst.linkrev(n))
1687 linknode = cl.node(mnfst.linkrev(n))
1677 if linknode in has_cl_set:
1688 if linknode in has_cl_set:
1678 has_mnfst_set[n] = 1
1689 has_mnfst_set[n] = 1
1679 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1690 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1680
1691
1681 # Use the information collected in collect_manifests_and_files to say
1692 # Use the information collected in collect_manifests_and_files to say
1682 # which changenode any manifestnode belongs to.
1693 # which changenode any manifestnode belongs to.
1683 def lookup_manifest_link(mnfstnode):
1694 def lookup_manifest_link(mnfstnode):
1684 return msng_mnfst_set[mnfstnode]
1695 return msng_mnfst_set[mnfstnode]
1685
1696
1686 # A function generating function that sets up the initial environment
1697 # A function generating function that sets up the initial environment
1687 # the inner function.
1698 # the inner function.
1688 def filenode_collector(changedfiles):
1699 def filenode_collector(changedfiles):
1689 next_rev = [0]
1700 next_rev = [0]
1690 # This gathers information from each manifestnode included in the
1701 # This gathers information from each manifestnode included in the
1691 # changegroup about which filenodes the manifest node references
1702 # changegroup about which filenodes the manifest node references
1692 # so we can include those in the changegroup too.
1703 # so we can include those in the changegroup too.
1693 #
1704 #
1694 # It also remembers which changenode each filenode belongs to. It
1705 # It also remembers which changenode each filenode belongs to. It
1695 # does this by assuming the a filenode belongs to the changenode
1706 # does this by assuming the a filenode belongs to the changenode
1696 # the first manifest that references it belongs to.
1707 # the first manifest that references it belongs to.
1697 def collect_msng_filenodes(mnfstnode):
1708 def collect_msng_filenodes(mnfstnode):
1698 r = mnfst.rev(mnfstnode)
1709 r = mnfst.rev(mnfstnode)
1699 if r == next_rev[0]:
1710 if r == next_rev[0]:
1700 # If the last rev we looked at was the one just previous,
1711 # If the last rev we looked at was the one just previous,
1701 # we only need to see a diff.
1712 # we only need to see a diff.
1702 deltamf = mnfst.readdelta(mnfstnode)
1713 deltamf = mnfst.readdelta(mnfstnode)
1703 # For each line in the delta
1714 # For each line in the delta
1704 for f, fnode in deltamf.items():
1715 for f, fnode in deltamf.items():
1705 f = changedfiles.get(f, None)
1716 f = changedfiles.get(f, None)
1706 # And if the file is in the list of files we care
1717 # And if the file is in the list of files we care
1707 # about.
1718 # about.
1708 if f is not None:
1719 if f is not None:
1709 # Get the changenode this manifest belongs to
1720 # Get the changenode this manifest belongs to
1710 clnode = msng_mnfst_set[mnfstnode]
1721 clnode = msng_mnfst_set[mnfstnode]
1711 # Create the set of filenodes for the file if
1722 # Create the set of filenodes for the file if
1712 # there isn't one already.
1723 # there isn't one already.
1713 ndset = msng_filenode_set.setdefault(f, {})
1724 ndset = msng_filenode_set.setdefault(f, {})
1714 # And set the filenode's changelog node to the
1725 # And set the filenode's changelog node to the
1715 # manifest's if it hasn't been set already.
1726 # manifest's if it hasn't been set already.
1716 ndset.setdefault(fnode, clnode)
1727 ndset.setdefault(fnode, clnode)
1717 else:
1728 else:
1718 # Otherwise we need a full manifest.
1729 # Otherwise we need a full manifest.
1719 m = mnfst.read(mnfstnode)
1730 m = mnfst.read(mnfstnode)
1720 # For every file in we care about.
1731 # For every file in we care about.
1721 for f in changedfiles:
1732 for f in changedfiles:
1722 fnode = m.get(f, None)
1733 fnode = m.get(f, None)
1723 # If it's in the manifest
1734 # If it's in the manifest
1724 if fnode is not None:
1735 if fnode is not None:
1725 # See comments above.
1736 # See comments above.
1726 clnode = msng_mnfst_set[mnfstnode]
1737 clnode = msng_mnfst_set[mnfstnode]
1727 ndset = msng_filenode_set.setdefault(f, {})
1738 ndset = msng_filenode_set.setdefault(f, {})
1728 ndset.setdefault(fnode, clnode)
1739 ndset.setdefault(fnode, clnode)
1729 # Remember the revision we hope to see next.
1740 # Remember the revision we hope to see next.
1730 next_rev[0] = r + 1
1741 next_rev[0] = r + 1
1731 return collect_msng_filenodes
1742 return collect_msng_filenodes
1732
1743
1733 # We have a list of filenodes we think we need for a file, lets remove
1744 # We have a list of filenodes we think we need for a file, lets remove
1734 # all those we now the recipient must have.
1745 # all those we now the recipient must have.
1735 def prune_filenodes(f, filerevlog):
1746 def prune_filenodes(f, filerevlog):
1736 msngset = msng_filenode_set[f]
1747 msngset = msng_filenode_set[f]
1737 hasset = {}
1748 hasset = {}
1738 # If a 'missing' filenode thinks it belongs to a changenode we
1749 # If a 'missing' filenode thinks it belongs to a changenode we
1739 # assume the recipient must have, then the recipient must have
1750 # assume the recipient must have, then the recipient must have
1740 # that filenode.
1751 # that filenode.
1741 for n in msngset:
1752 for n in msngset:
1742 clnode = cl.node(filerevlog.linkrev(n))
1753 clnode = cl.node(filerevlog.linkrev(n))
1743 if clnode in has_cl_set:
1754 if clnode in has_cl_set:
1744 hasset[n] = 1
1755 hasset[n] = 1
1745 prune_parents(filerevlog, hasset, msngset)
1756 prune_parents(filerevlog, hasset, msngset)
1746
1757
1747 # A function generator function that sets up the a context for the
1758 # A function generator function that sets up the a context for the
1748 # inner function.
1759 # inner function.
1749 def lookup_filenode_link_func(fname):
1760 def lookup_filenode_link_func(fname):
1750 msngset = msng_filenode_set[fname]
1761 msngset = msng_filenode_set[fname]
1751 # Lookup the changenode the filenode belongs to.
1762 # Lookup the changenode the filenode belongs to.
1752 def lookup_filenode_link(fnode):
1763 def lookup_filenode_link(fnode):
1753 return msngset[fnode]
1764 return msngset[fnode]
1754 return lookup_filenode_link
1765 return lookup_filenode_link
1755
1766
1756 # Add the nodes that were explicitly requested.
1767 # Add the nodes that were explicitly requested.
1757 def add_extra_nodes(name, nodes):
1768 def add_extra_nodes(name, nodes):
1758 if not extranodes or name not in extranodes:
1769 if not extranodes or name not in extranodes:
1759 return
1770 return
1760
1771
1761 for node, linknode in extranodes[name]:
1772 for node, linknode in extranodes[name]:
1762 if node not in nodes:
1773 if node not in nodes:
1763 nodes[node] = linknode
1774 nodes[node] = linknode
1764
1775
1765 # Now that we have all theses utility functions to help out and
1776 # Now that we have all theses utility functions to help out and
1766 # logically divide up the task, generate the group.
1777 # logically divide up the task, generate the group.
1767 def gengroup():
1778 def gengroup():
1768 # The set of changed files starts empty.
1779 # The set of changed files starts empty.
1769 changedfiles = {}
1780 changedfiles = {}
1770 # Create a changenode group generator that will call our functions
1781 # Create a changenode group generator that will call our functions
1771 # back to lookup the owning changenode and collect information.
1782 # back to lookup the owning changenode and collect information.
1772 group = cl.group(msng_cl_lst, identity,
1783 group = cl.group(msng_cl_lst, identity,
1773 manifest_and_file_collector(changedfiles))
1784 manifest_and_file_collector(changedfiles))
1774 for chnk in group:
1785 for chnk in group:
1775 yield chnk
1786 yield chnk
1776
1787
1777 # The list of manifests has been collected by the generator
1788 # The list of manifests has been collected by the generator
1778 # calling our functions back.
1789 # calling our functions back.
1779 prune_manifests()
1790 prune_manifests()
1780 add_extra_nodes(1, msng_mnfst_set)
1791 add_extra_nodes(1, msng_mnfst_set)
1781 msng_mnfst_lst = msng_mnfst_set.keys()
1792 msng_mnfst_lst = msng_mnfst_set.keys()
1782 # Sort the manifestnodes by revision number.
1793 # Sort the manifestnodes by revision number.
1783 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1794 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1784 # Create a generator for the manifestnodes that calls our lookup
1795 # Create a generator for the manifestnodes that calls our lookup
1785 # and data collection functions back.
1796 # and data collection functions back.
1786 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1797 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1787 filenode_collector(changedfiles))
1798 filenode_collector(changedfiles))
1788 for chnk in group:
1799 for chnk in group:
1789 yield chnk
1800 yield chnk
1790
1801
1791 # These are no longer needed, dereference and toss the memory for
1802 # These are no longer needed, dereference and toss the memory for
1792 # them.
1803 # them.
1793 msng_mnfst_lst = None
1804 msng_mnfst_lst = None
1794 msng_mnfst_set.clear()
1805 msng_mnfst_set.clear()
1795
1806
1796 if extranodes:
1807 if extranodes:
1797 for fname in extranodes:
1808 for fname in extranodes:
1798 if isinstance(fname, int):
1809 if isinstance(fname, int):
1799 continue
1810 continue
1800 add_extra_nodes(fname,
1811 add_extra_nodes(fname,
1801 msng_filenode_set.setdefault(fname, {}))
1812 msng_filenode_set.setdefault(fname, {}))
1802 changedfiles[fname] = 1
1813 changedfiles[fname] = 1
1803 # Go through all our files in order sorted by name.
1814 # Go through all our files in order sorted by name.
1804 for fname in util.sort(changedfiles):
1815 for fname in util.sort(changedfiles):
1805 filerevlog = self.file(fname)
1816 filerevlog = self.file(fname)
1806 if not len(filerevlog):
1817 if not len(filerevlog):
1807 raise util.Abort(_("empty or missing revlog for %s") % fname)
1818 raise util.Abort(_("empty or missing revlog for %s") % fname)
1808 # Toss out the filenodes that the recipient isn't really
1819 # Toss out the filenodes that the recipient isn't really
1809 # missing.
1820 # missing.
1810 if fname in msng_filenode_set:
1821 if fname in msng_filenode_set:
1811 prune_filenodes(fname, filerevlog)
1822 prune_filenodes(fname, filerevlog)
1812 msng_filenode_lst = msng_filenode_set[fname].keys()
1823 msng_filenode_lst = msng_filenode_set[fname].keys()
1813 else:
1824 else:
1814 msng_filenode_lst = []
1825 msng_filenode_lst = []
1815 # If any filenodes are left, generate the group for them,
1826 # If any filenodes are left, generate the group for them,
1816 # otherwise don't bother.
1827 # otherwise don't bother.
1817 if len(msng_filenode_lst) > 0:
1828 if len(msng_filenode_lst) > 0:
1818 yield changegroup.chunkheader(len(fname))
1829 yield changegroup.chunkheader(len(fname))
1819 yield fname
1830 yield fname
1820 # Sort the filenodes by their revision #
1831 # Sort the filenodes by their revision #
1821 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1832 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1822 # Create a group generator and only pass in a changenode
1833 # Create a group generator and only pass in a changenode
1823 # lookup function as we need to collect no information
1834 # lookup function as we need to collect no information
1824 # from filenodes.
1835 # from filenodes.
1825 group = filerevlog.group(msng_filenode_lst,
1836 group = filerevlog.group(msng_filenode_lst,
1826 lookup_filenode_link_func(fname))
1837 lookup_filenode_link_func(fname))
1827 for chnk in group:
1838 for chnk in group:
1828 yield chnk
1839 yield chnk
1829 if fname in msng_filenode_set:
1840 if fname in msng_filenode_set:
1830 # Don't need this anymore, toss it to free memory.
1841 # Don't need this anymore, toss it to free memory.
1831 del msng_filenode_set[fname]
1842 del msng_filenode_set[fname]
1832 # Signal that no more groups are left.
1843 # Signal that no more groups are left.
1833 yield changegroup.closechunk()
1844 yield changegroup.closechunk()
1834
1845
1835 if msng_cl_lst:
1846 if msng_cl_lst:
1836 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1847 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1837
1848
1838 return util.chunkbuffer(gengroup())
1849 return util.chunkbuffer(gengroup())
1839
1850
1840 def changegroup(self, basenodes, source):
1851 def changegroup(self, basenodes, source):
1841 """Generate a changegroup of all nodes that we have that a recipient
1852 """Generate a changegroup of all nodes that we have that a recipient
1842 doesn't.
1853 doesn't.
1843
1854
1844 This is much easier than the previous function as we can assume that
1855 This is much easier than the previous function as we can assume that
1845 the recipient has any changenode we aren't sending them."""
1856 the recipient has any changenode we aren't sending them."""
1846
1857
1847 self.hook('preoutgoing', throw=True, source=source)
1858 self.hook('preoutgoing', throw=True, source=source)
1848
1859
1849 cl = self.changelog
1860 cl = self.changelog
1850 nodes = cl.nodesbetween(basenodes, None)[0]
1861 nodes = cl.nodesbetween(basenodes, None)[0]
1851 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1862 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1852 self.changegroupinfo(nodes, source)
1863 self.changegroupinfo(nodes, source)
1853
1864
1854 def identity(x):
1865 def identity(x):
1855 return x
1866 return x
1856
1867
1857 def gennodelst(log):
1868 def gennodelst(log):
1858 for r in log:
1869 for r in log:
1859 n = log.node(r)
1870 n = log.node(r)
1860 if log.linkrev(n) in revset:
1871 if log.linkrev(n) in revset:
1861 yield n
1872 yield n
1862
1873
1863 def changed_file_collector(changedfileset):
1874 def changed_file_collector(changedfileset):
1864 def collect_changed_files(clnode):
1875 def collect_changed_files(clnode):
1865 c = cl.read(clnode)
1876 c = cl.read(clnode)
1866 for fname in c[3]:
1877 for fname in c[3]:
1867 changedfileset[fname] = 1
1878 changedfileset[fname] = 1
1868 return collect_changed_files
1879 return collect_changed_files
1869
1880
1870 def lookuprevlink_func(revlog):
1881 def lookuprevlink_func(revlog):
1871 def lookuprevlink(n):
1882 def lookuprevlink(n):
1872 return cl.node(revlog.linkrev(n))
1883 return cl.node(revlog.linkrev(n))
1873 return lookuprevlink
1884 return lookuprevlink
1874
1885
1875 def gengroup():
1886 def gengroup():
1876 # construct a list of all changed files
1887 # construct a list of all changed files
1877 changedfiles = {}
1888 changedfiles = {}
1878
1889
1879 for chnk in cl.group(nodes, identity,
1890 for chnk in cl.group(nodes, identity,
1880 changed_file_collector(changedfiles)):
1891 changed_file_collector(changedfiles)):
1881 yield chnk
1892 yield chnk
1882
1893
1883 mnfst = self.manifest
1894 mnfst = self.manifest
1884 nodeiter = gennodelst(mnfst)
1895 nodeiter = gennodelst(mnfst)
1885 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1896 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1886 yield chnk
1897 yield chnk
1887
1898
1888 for fname in util.sort(changedfiles):
1899 for fname in util.sort(changedfiles):
1889 filerevlog = self.file(fname)
1900 filerevlog = self.file(fname)
1890 if not len(filerevlog):
1901 if not len(filerevlog):
1891 raise util.Abort(_("empty or missing revlog for %s") % fname)
1902 raise util.Abort(_("empty or missing revlog for %s") % fname)
1892 nodeiter = gennodelst(filerevlog)
1903 nodeiter = gennodelst(filerevlog)
1893 nodeiter = list(nodeiter)
1904 nodeiter = list(nodeiter)
1894 if nodeiter:
1905 if nodeiter:
1895 yield changegroup.chunkheader(len(fname))
1906 yield changegroup.chunkheader(len(fname))
1896 yield fname
1907 yield fname
1897 lookup = lookuprevlink_func(filerevlog)
1908 lookup = lookuprevlink_func(filerevlog)
1898 for chnk in filerevlog.group(nodeiter, lookup):
1909 for chnk in filerevlog.group(nodeiter, lookup):
1899 yield chnk
1910 yield chnk
1900
1911
1901 yield changegroup.closechunk()
1912 yield changegroup.closechunk()
1902
1913
1903 if nodes:
1914 if nodes:
1904 self.hook('outgoing', node=hex(nodes[0]), source=source)
1915 self.hook('outgoing', node=hex(nodes[0]), source=source)
1905
1916
1906 return util.chunkbuffer(gengroup())
1917 return util.chunkbuffer(gengroup())
1907
1918
1908 def addchangegroup(self, source, srctype, url, emptyok=False):
1919 def addchangegroup(self, source, srctype, url, emptyok=False):
1909 """add changegroup to repo.
1920 """add changegroup to repo.
1910
1921
1911 return values:
1922 return values:
1912 - nothing changed or no source: 0
1923 - nothing changed or no source: 0
1913 - more heads than before: 1+added heads (2..n)
1924 - more heads than before: 1+added heads (2..n)
1914 - less heads than before: -1-removed heads (-2..-n)
1925 - less heads than before: -1-removed heads (-2..-n)
1915 - number of heads stays the same: 1
1926 - number of heads stays the same: 1
1916 """
1927 """
1917 def csmap(x):
1928 def csmap(x):
1918 self.ui.debug(_("add changeset %s\n") % short(x))
1929 self.ui.debug(_("add changeset %s\n") % short(x))
1919 return len(cl)
1930 return len(cl)
1920
1931
1921 def revmap(x):
1932 def revmap(x):
1922 return cl.rev(x)
1933 return cl.rev(x)
1923
1934
1924 if not source:
1935 if not source:
1925 return 0
1936 return 0
1926
1937
1927 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1938 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1928
1939
1929 changesets = files = revisions = 0
1940 changesets = files = revisions = 0
1930
1941
1931 # write changelog data to temp files so concurrent readers will not see
1942 # write changelog data to temp files so concurrent readers will not see
1932 # inconsistent view
1943 # inconsistent view
1933 cl = self.changelog
1944 cl = self.changelog
1934 cl.delayupdate()
1945 cl.delayupdate()
1935 oldheads = len(cl.heads())
1946 oldheads = len(cl.heads())
1936
1947
1937 tr = self.transaction()
1948 tr = self.transaction()
1938 try:
1949 try:
1939 trp = weakref.proxy(tr)
1950 trp = weakref.proxy(tr)
1940 # pull off the changeset group
1951 # pull off the changeset group
1941 self.ui.status(_("adding changesets\n"))
1952 self.ui.status(_("adding changesets\n"))
1942 cor = len(cl) - 1
1953 cor = len(cl) - 1
1943 chunkiter = changegroup.chunkiter(source)
1954 chunkiter = changegroup.chunkiter(source)
1944 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1955 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1945 raise util.Abort(_("received changelog group is empty"))
1956 raise util.Abort(_("received changelog group is empty"))
1946 cnr = len(cl) - 1
1957 cnr = len(cl) - 1
1947 changesets = cnr - cor
1958 changesets = cnr - cor
1948
1959
1949 # pull off the manifest group
1960 # pull off the manifest group
1950 self.ui.status(_("adding manifests\n"))
1961 self.ui.status(_("adding manifests\n"))
1951 chunkiter = changegroup.chunkiter(source)
1962 chunkiter = changegroup.chunkiter(source)
1952 # no need to check for empty manifest group here:
1963 # no need to check for empty manifest group here:
1953 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1964 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1954 # no new manifest will be created and the manifest group will
1965 # no new manifest will be created and the manifest group will
1955 # be empty during the pull
1966 # be empty during the pull
1956 self.manifest.addgroup(chunkiter, revmap, trp)
1967 self.manifest.addgroup(chunkiter, revmap, trp)
1957
1968
1958 # process the files
1969 # process the files
1959 self.ui.status(_("adding file changes\n"))
1970 self.ui.status(_("adding file changes\n"))
1960 while 1:
1971 while 1:
1961 f = changegroup.getchunk(source)
1972 f = changegroup.getchunk(source)
1962 if not f:
1973 if not f:
1963 break
1974 break
1964 self.ui.debug(_("adding %s revisions\n") % f)
1975 self.ui.debug(_("adding %s revisions\n") % f)
1965 fl = self.file(f)
1976 fl = self.file(f)
1966 o = len(fl)
1977 o = len(fl)
1967 chunkiter = changegroup.chunkiter(source)
1978 chunkiter = changegroup.chunkiter(source)
1968 if fl.addgroup(chunkiter, revmap, trp) is None:
1979 if fl.addgroup(chunkiter, revmap, trp) is None:
1969 raise util.Abort(_("received file revlog group is empty"))
1980 raise util.Abort(_("received file revlog group is empty"))
1970 revisions += len(fl) - o
1981 revisions += len(fl) - o
1971 files += 1
1982 files += 1
1972
1983
1973 # make changelog see real files again
1984 # make changelog see real files again
1974 cl.finalize(trp)
1985 cl.finalize(trp)
1975
1986
1976 newheads = len(self.changelog.heads())
1987 newheads = len(self.changelog.heads())
1977 heads = ""
1988 heads = ""
1978 if oldheads and newheads != oldheads:
1989 if oldheads and newheads != oldheads:
1979 heads = _(" (%+d heads)") % (newheads - oldheads)
1990 heads = _(" (%+d heads)") % (newheads - oldheads)
1980
1991
1981 self.ui.status(_("added %d changesets"
1992 self.ui.status(_("added %d changesets"
1982 " with %d changes to %d files%s\n")
1993 " with %d changes to %d files%s\n")
1983 % (changesets, revisions, files, heads))
1994 % (changesets, revisions, files, heads))
1984
1995
1985 if changesets > 0:
1996 if changesets > 0:
1986 self.hook('pretxnchangegroup', throw=True,
1997 self.hook('pretxnchangegroup', throw=True,
1987 node=hex(self.changelog.node(cor+1)), source=srctype,
1998 node=hex(self.changelog.node(cor+1)), source=srctype,
1988 url=url)
1999 url=url)
1989
2000
1990 tr.close()
2001 tr.close()
1991 finally:
2002 finally:
1992 del tr
2003 del tr
1993
2004
1994 if changesets > 0:
2005 if changesets > 0:
1995 # forcefully update the on-disk branch cache
2006 # forcefully update the on-disk branch cache
1996 self.ui.debug(_("updating the branch cache\n"))
2007 self.ui.debug(_("updating the branch cache\n"))
1997 self.branchtags()
2008 self.branchtags()
1998 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2009 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1999 source=srctype, url=url)
2010 source=srctype, url=url)
2000
2011
2001 for i in xrange(cor + 1, cnr + 1):
2012 for i in xrange(cor + 1, cnr + 1):
2002 self.hook("incoming", node=hex(self.changelog.node(i)),
2013 self.hook("incoming", node=hex(self.changelog.node(i)),
2003 source=srctype, url=url)
2014 source=srctype, url=url)
2004
2015
2005 # never return 0 here:
2016 # never return 0 here:
2006 if newheads < oldheads:
2017 if newheads < oldheads:
2007 return newheads - oldheads - 1
2018 return newheads - oldheads - 1
2008 else:
2019 else:
2009 return newheads - oldheads + 1
2020 return newheads - oldheads + 1
2010
2021
2011
2022
2012 def stream_in(self, remote):
2023 def stream_in(self, remote):
2013 fp = remote.stream_out()
2024 fp = remote.stream_out()
2014 l = fp.readline()
2025 l = fp.readline()
2015 try:
2026 try:
2016 resp = int(l)
2027 resp = int(l)
2017 except ValueError:
2028 except ValueError:
2018 raise util.UnexpectedOutput(
2029 raise util.UnexpectedOutput(
2019 _('Unexpected response from remote server:'), l)
2030 _('Unexpected response from remote server:'), l)
2020 if resp == 1:
2031 if resp == 1:
2021 raise util.Abort(_('operation forbidden by server'))
2032 raise util.Abort(_('operation forbidden by server'))
2022 elif resp == 2:
2033 elif resp == 2:
2023 raise util.Abort(_('locking the remote repository failed'))
2034 raise util.Abort(_('locking the remote repository failed'))
2024 elif resp != 0:
2035 elif resp != 0:
2025 raise util.Abort(_('the server sent an unknown error code'))
2036 raise util.Abort(_('the server sent an unknown error code'))
2026 self.ui.status(_('streaming all changes\n'))
2037 self.ui.status(_('streaming all changes\n'))
2027 l = fp.readline()
2038 l = fp.readline()
2028 try:
2039 try:
2029 total_files, total_bytes = map(int, l.split(' ', 1))
2040 total_files, total_bytes = map(int, l.split(' ', 1))
2030 except (ValueError, TypeError):
2041 except (ValueError, TypeError):
2031 raise util.UnexpectedOutput(
2042 raise util.UnexpectedOutput(
2032 _('Unexpected response from remote server:'), l)
2043 _('Unexpected response from remote server:'), l)
2033 self.ui.status(_('%d files to transfer, %s of data\n') %
2044 self.ui.status(_('%d files to transfer, %s of data\n') %
2034 (total_files, util.bytecount(total_bytes)))
2045 (total_files, util.bytecount(total_bytes)))
2035 start = time.time()
2046 start = time.time()
2036 for i in xrange(total_files):
2047 for i in xrange(total_files):
2037 # XXX doesn't support '\n' or '\r' in filenames
2048 # XXX doesn't support '\n' or '\r' in filenames
2038 l = fp.readline()
2049 l = fp.readline()
2039 try:
2050 try:
2040 name, size = l.split('\0', 1)
2051 name, size = l.split('\0', 1)
2041 size = int(size)
2052 size = int(size)
2042 except (ValueError, TypeError):
2053 except (ValueError, TypeError):
2043 raise util.UnexpectedOutput(
2054 raise util.UnexpectedOutput(
2044 _('Unexpected response from remote server:'), l)
2055 _('Unexpected response from remote server:'), l)
2045 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2056 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2046 ofp = self.sopener(name, 'w')
2057 ofp = self.sopener(name, 'w')
2047 for chunk in util.filechunkiter(fp, limit=size):
2058 for chunk in util.filechunkiter(fp, limit=size):
2048 ofp.write(chunk)
2059 ofp.write(chunk)
2049 ofp.close()
2060 ofp.close()
2050 elapsed = time.time() - start
2061 elapsed = time.time() - start
2051 if elapsed <= 0:
2062 if elapsed <= 0:
2052 elapsed = 0.001
2063 elapsed = 0.001
2053 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2064 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2054 (util.bytecount(total_bytes), elapsed,
2065 (util.bytecount(total_bytes), elapsed,
2055 util.bytecount(total_bytes / elapsed)))
2066 util.bytecount(total_bytes / elapsed)))
2056 self.invalidate()
2067 self.invalidate()
2057 return len(self.heads()) + 1
2068 return len(self.heads()) + 1
2058
2069
2059 def clone(self, remote, heads=[], stream=False):
2070 def clone(self, remote, heads=[], stream=False):
2060 '''clone remote repository.
2071 '''clone remote repository.
2061
2072
2062 keyword arguments:
2073 keyword arguments:
2063 heads: list of revs to clone (forces use of pull)
2074 heads: list of revs to clone (forces use of pull)
2064 stream: use streaming clone if possible'''
2075 stream: use streaming clone if possible'''
2065
2076
2066 # now, all clients that can request uncompressed clones can
2077 # now, all clients that can request uncompressed clones can
2067 # read repo formats supported by all servers that can serve
2078 # read repo formats supported by all servers that can serve
2068 # them.
2079 # them.
2069
2080
2070 # if revlog format changes, client will have to check version
2081 # if revlog format changes, client will have to check version
2071 # and format flags on "stream" capability, and use
2082 # and format flags on "stream" capability, and use
2072 # uncompressed only if compatible.
2083 # uncompressed only if compatible.
2073
2084
2074 if stream and not heads and remote.capable('stream'):
2085 if stream and not heads and remote.capable('stream'):
2075 return self.stream_in(remote)
2086 return self.stream_in(remote)
2076 return self.pull(remote, heads)
2087 return self.pull(remote, heads)
2077
2088
2078 # used to avoid circular references so destructors work
2089 # used to avoid circular references so destructors work
2079 def aftertrans(files):
2090 def aftertrans(files):
2080 renamefiles = [tuple(t) for t in files]
2091 renamefiles = [tuple(t) for t in files]
2081 def a():
2092 def a():
2082 for src, dest in renamefiles:
2093 for src, dest in renamefiles:
2083 util.rename(src, dest)
2094 util.rename(src, dest)
2084 return a
2095 return a
2085
2096
2086 def instance(ui, path, create):
2097 def instance(ui, path, create):
2087 return localrepository(ui, util.drop_scheme('file', path), create)
2098 return localrepository(ui, util.drop_scheme('file', path), create)
2088
2099
2089 def islocal(path):
2100 def islocal(path):
2090 return True
2101 return True
@@ -1,1330 +1,1327 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 import cStringIO, email.Parser, os, popen2, re, errno
12 import cStringIO, email.Parser, os, popen2, re, errno
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 # should try to parse msg['Date']
63 # should try to parse msg['Date']
64 date = None
64 date = None
65 nodeid = None
65 nodeid = None
66 branch = None
66 branch = None
67 parents = []
67 parents = []
68
68
69 if subject:
69 if subject:
70 if subject.startswith('[PATCH'):
70 if subject.startswith('[PATCH'):
71 pend = subject.find(']')
71 pend = subject.find(']')
72 if pend >= 0:
72 if pend >= 0:
73 subject = subject[pend+1:].lstrip()
73 subject = subject[pend+1:].lstrip()
74 subject = subject.replace('\n\t', ' ')
74 subject = subject.replace('\n\t', ' ')
75 ui.debug('Subject: %s\n' % subject)
75 ui.debug('Subject: %s\n' % subject)
76 if user:
76 if user:
77 ui.debug('From: %s\n' % user)
77 ui.debug('From: %s\n' % user)
78 diffs_seen = 0
78 diffs_seen = 0
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 message = ''
80 message = ''
81 for part in msg.walk():
81 for part in msg.walk():
82 content_type = part.get_content_type()
82 content_type = part.get_content_type()
83 ui.debug('Content-Type: %s\n' % content_type)
83 ui.debug('Content-Type: %s\n' % content_type)
84 if content_type not in ok_types:
84 if content_type not in ok_types:
85 continue
85 continue
86 payload = part.get_payload(decode=True)
86 payload = part.get_payload(decode=True)
87 m = diffre.search(payload)
87 m = diffre.search(payload)
88 if m:
88 if m:
89 hgpatch = False
89 hgpatch = False
90 ignoretext = False
90 ignoretext = False
91
91
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 diffs_seen += 1
93 diffs_seen += 1
94 cfp = cStringIO.StringIO()
94 cfp = cStringIO.StringIO()
95 for line in payload[:m.start(0)].splitlines():
95 for line in payload[:m.start(0)].splitlines():
96 if line.startswith('# HG changeset patch'):
96 if line.startswith('# HG changeset patch'):
97 ui.debug(_('patch generated by hg export\n'))
97 ui.debug(_('patch generated by hg export\n'))
98 hgpatch = True
98 hgpatch = True
99 # drop earlier commit message content
99 # drop earlier commit message content
100 cfp.seek(0)
100 cfp.seek(0)
101 cfp.truncate()
101 cfp.truncate()
102 subject = None
102 subject = None
103 elif hgpatch:
103 elif hgpatch:
104 if line.startswith('# User '):
104 if line.startswith('# User '):
105 user = line[7:]
105 user = line[7:]
106 ui.debug('From: %s\n' % user)
106 ui.debug('From: %s\n' % user)
107 elif line.startswith("# Date "):
107 elif line.startswith("# Date "):
108 date = line[7:]
108 date = line[7:]
109 elif line.startswith("# Branch "):
109 elif line.startswith("# Branch "):
110 branch = line[9:]
110 branch = line[9:]
111 elif line.startswith("# Node ID "):
111 elif line.startswith("# Node ID "):
112 nodeid = line[10:]
112 nodeid = line[10:]
113 elif line.startswith("# Parent "):
113 elif line.startswith("# Parent "):
114 parents.append(line[10:])
114 parents.append(line[10:])
115 elif line == '---' and gitsendmail:
115 elif line == '---' and gitsendmail:
116 ignoretext = True
116 ignoretext = True
117 if not line.startswith('# ') and not ignoretext:
117 if not line.startswith('# ') and not ignoretext:
118 cfp.write(line)
118 cfp.write(line)
119 cfp.write('\n')
119 cfp.write('\n')
120 message = cfp.getvalue()
120 message = cfp.getvalue()
121 if tmpfp:
121 if tmpfp:
122 tmpfp.write(payload)
122 tmpfp.write(payload)
123 if not payload.endswith('\n'):
123 if not payload.endswith('\n'):
124 tmpfp.write('\n')
124 tmpfp.write('\n')
125 elif not diffs_seen and message and content_type == 'text/plain':
125 elif not diffs_seen and message and content_type == 'text/plain':
126 message += '\n' + payload
126 message += '\n' + payload
127 except:
127 except:
128 tmpfp.close()
128 tmpfp.close()
129 os.unlink(tmpname)
129 os.unlink(tmpname)
130 raise
130 raise
131
131
132 if subject and not message.startswith(subject):
132 if subject and not message.startswith(subject):
133 message = '%s\n%s' % (subject, message)
133 message = '%s\n%s' % (subject, message)
134 tmpfp.close()
134 tmpfp.close()
135 if not diffs_seen:
135 if not diffs_seen:
136 os.unlink(tmpname)
136 os.unlink(tmpname)
137 return None, message, user, date, branch, None, None, None
137 return None, message, user, date, branch, None, None, None
138 p1 = parents and parents.pop(0) or None
138 p1 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
140 return tmpname, message, user, date, branch, nodeid, p1, p2
140 return tmpname, message, user, date, branch, nodeid, p1, p2
141
141
142 GP_PATCH = 1 << 0 # we have to run patch
142 GP_PATCH = 1 << 0 # we have to run patch
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 GP_BINARY = 1 << 2 # there's a binary patch
144 GP_BINARY = 1 << 2 # there's a binary patch
145
145
146 def readgitpatch(fp, firstline=None):
146 def readgitpatch(fp, firstline=None):
147 """extract git-style metadata about patches from <patchname>"""
147 """extract git-style metadata about patches from <patchname>"""
148 class gitpatch:
148 class gitpatch:
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 def __init__(self, path):
150 def __init__(self, path):
151 self.path = path
151 self.path = path
152 self.oldpath = None
152 self.oldpath = None
153 self.mode = None
153 self.mode = None
154 self.op = 'MODIFY'
154 self.op = 'MODIFY'
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 if firstline is not None:
159 if firstline is not None:
160 yield firstline
160 yield firstline
161 for line in fp:
161 for line in fp:
162 yield line
162 yield line
163
163
164 # Filter patch for git information
164 # Filter patch for git information
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 gp = None
166 gp = None
167 gitpatches = []
167 gitpatches = []
168 # Can have a git patch with only metadata, causing patch to complain
168 # Can have a git patch with only metadata, causing patch to complain
169 dopatch = 0
169 dopatch = 0
170
170
171 lineno = 0
171 lineno = 0
172 for line in reader(fp, firstline):
172 for line in reader(fp, firstline):
173 lineno += 1
173 lineno += 1
174 if line.startswith('diff --git'):
174 if line.startswith('diff --git'):
175 m = gitre.match(line)
175 m = gitre.match(line)
176 if m:
176 if m:
177 if gp:
177 if gp:
178 gitpatches.append(gp)
178 gitpatches.append(gp)
179 src, dst = m.group(1, 2)
179 src, dst = m.group(1, 2)
180 gp = gitpatch(dst)
180 gp = gitpatch(dst)
181 gp.lineno = lineno
181 gp.lineno = lineno
182 elif gp:
182 elif gp:
183 if line.startswith('--- '):
183 if line.startswith('--- '):
184 if gp.op in ('COPY', 'RENAME'):
184 if gp.op in ('COPY', 'RENAME'):
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-6:], 8)
204 gp.mode = int(line.rstrip()[-6:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-6:], 8)
206 gp.mode = int(line.rstrip()[-6:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug(_('no valid hunks found; trying with %r instead\n') %
232 ui.debug(_('no valid hunks found; trying with %r instead\n') %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 """use builtin patch to apply <patchobj> to the working directory.
282 """use builtin patch to apply <patchobj> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 try:
284 try:
285 fp = file(patchobj, 'rb')
285 fp = file(patchobj, 'rb')
286 except TypeError:
286 except TypeError:
287 fp = patchobj
287 fp = patchobj
288 if cwd:
288 if cwd:
289 curdir = os.getcwd()
289 curdir = os.getcwd()
290 os.chdir(cwd)
290 os.chdir(cwd)
291 try:
291 try:
292 ret = applydiff(ui, fp, files, strip=strip)
292 ret = applydiff(ui, fp, files, strip=strip)
293 finally:
293 finally:
294 if cwd:
294 if cwd:
295 os.chdir(curdir)
295 os.chdir(curdir)
296 if ret < 0:
296 if ret < 0:
297 raise PatchError
297 raise PatchError
298 return ret > 0
298 return ret > 0
299
299
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303
303
304 class patchfile:
304 class patchfile:
305 def __init__(self, ui, fname, missing=False):
305 def __init__(self, ui, fname, missing=False):
306 self.fname = fname
306 self.fname = fname
307 self.ui = ui
307 self.ui = ui
308 self.lines = []
308 self.lines = []
309 self.exists = False
309 self.exists = False
310 self.missing = missing
310 self.missing = missing
311 if not missing:
311 if not missing:
312 try:
312 try:
313 fp = file(fname, 'rb')
313 fp = file(fname, 'rb')
314 self.lines = fp.readlines()
314 self.lines = fp.readlines()
315 self.exists = True
315 self.exists = True
316 except IOError:
316 except IOError:
317 pass
317 pass
318 else:
318 else:
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320
320
321 if not self.exists:
321 if not self.exists:
322 dirname = os.path.dirname(fname)
322 dirname = os.path.dirname(fname)
323 if dirname and not os.path.isdir(dirname):
323 if dirname and not os.path.isdir(dirname):
324 os.makedirs(dirname)
324 os.makedirs(dirname)
325
325
326 self.hash = {}
326 self.hash = {}
327 self.dirty = 0
327 self.dirty = 0
328 self.offset = 0
328 self.offset = 0
329 self.rej = []
329 self.rej = []
330 self.fileprinted = False
330 self.fileprinted = False
331 self.printfile(False)
331 self.printfile(False)
332 self.hunks = 0
332 self.hunks = 0
333
333
334 def printfile(self, warn):
334 def printfile(self, warn):
335 if self.fileprinted:
335 if self.fileprinted:
336 return
336 return
337 if warn or self.ui.verbose:
337 if warn or self.ui.verbose:
338 self.fileprinted = True
338 self.fileprinted = True
339 s = _("patching file %s\n") % self.fname
339 s = _("patching file %s\n") % self.fname
340 if warn:
340 if warn:
341 self.ui.warn(s)
341 self.ui.warn(s)
342 else:
342 else:
343 self.ui.note(s)
343 self.ui.note(s)
344
344
345
345
346 def findlines(self, l, linenum):
346 def findlines(self, l, linenum):
347 # looks through the hash and finds candidate lines. The
347 # looks through the hash and finds candidate lines. The
348 # result is a list of line numbers sorted based on distance
348 # result is a list of line numbers sorted based on distance
349 # from linenum
349 # from linenum
350 def sorter(a, b):
350 def sorter(a, b):
351 vala = abs(a - linenum)
351 vala = abs(a - linenum)
352 valb = abs(b - linenum)
352 valb = abs(b - linenum)
353 return cmp(vala, valb)
353 return cmp(vala, valb)
354
354
355 try:
355 try:
356 cand = self.hash[l]
356 cand = self.hash[l]
357 except:
357 except:
358 return []
358 return []
359
359
360 if len(cand) > 1:
360 if len(cand) > 1:
361 # resort our list of potentials forward then back.
361 # resort our list of potentials forward then back.
362 cand.sort(sorter)
362 cand.sort(sorter)
363 return cand
363 return cand
364
364
365 def hashlines(self):
365 def hashlines(self):
366 self.hash = {}
366 self.hash = {}
367 for x in xrange(len(self.lines)):
367 for x in xrange(len(self.lines)):
368 s = self.lines[x]
368 s = self.lines[x]
369 self.hash.setdefault(s, []).append(x)
369 self.hash.setdefault(s, []).append(x)
370
370
371 def write_rej(self):
371 def write_rej(self):
372 # our rejects are a little different from patch(1). This always
372 # our rejects are a little different from patch(1). This always
373 # creates rejects in the same form as the original patch. A file
373 # creates rejects in the same form as the original patch. A file
374 # header is inserted so that you can run the reject through patch again
374 # header is inserted so that you can run the reject through patch again
375 # without having to type the filename.
375 # without having to type the filename.
376
376
377 if not self.rej:
377 if not self.rej:
378 return
378 return
379
379
380 fname = self.fname + ".rej"
380 fname = self.fname + ".rej"
381 self.ui.warn(
381 self.ui.warn(
382 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
382 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
383 (len(self.rej), self.hunks, fname))
383 (len(self.rej), self.hunks, fname))
384 try: os.unlink(fname)
384 try: os.unlink(fname)
385 except:
385 except:
386 pass
386 pass
387 fp = file(fname, 'wb')
387 fp = file(fname, 'wb')
388 base = os.path.basename(self.fname)
388 base = os.path.basename(self.fname)
389 fp.write("--- %s\n+++ %s\n" % (base, base))
389 fp.write("--- %s\n+++ %s\n" % (base, base))
390 for x in self.rej:
390 for x in self.rej:
391 for l in x.hunk:
391 for l in x.hunk:
392 fp.write(l)
392 fp.write(l)
393 if l[-1] != '\n':
393 if l[-1] != '\n':
394 fp.write("\n\ No newline at end of file\n")
394 fp.write("\n\ No newline at end of file\n")
395
395
396 def write(self, dest=None):
396 def write(self, dest=None):
397 if self.dirty:
397 if self.dirty:
398 if not dest:
398 if not dest:
399 dest = self.fname
399 dest = self.fname
400 st = None
400 st = None
401 try:
401 try:
402 st = os.lstat(dest)
402 st = os.lstat(dest)
403 except OSError, inst:
403 except OSError, inst:
404 if inst.errno != errno.ENOENT:
404 if inst.errno != errno.ENOENT:
405 raise
405 raise
406 if st and st.st_nlink > 1:
406 if st and st.st_nlink > 1:
407 os.unlink(dest)
407 os.unlink(dest)
408 fp = file(dest, 'wb')
408 fp = file(dest, 'wb')
409 if st and st.st_nlink > 1:
409 if st and st.st_nlink > 1:
410 os.chmod(dest, st.st_mode)
410 os.chmod(dest, st.st_mode)
411 fp.writelines(self.lines)
411 fp.writelines(self.lines)
412 fp.close()
412 fp.close()
413
413
414 def close(self):
414 def close(self):
415 self.write()
415 self.write()
416 self.write_rej()
416 self.write_rej()
417
417
418 def apply(self, h, reverse):
418 def apply(self, h, reverse):
419 if not h.complete():
419 if not h.complete():
420 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
420 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
421 (h.number, h.desc, len(h.a), h.lena, len(h.b),
421 (h.number, h.desc, len(h.a), h.lena, len(h.b),
422 h.lenb))
422 h.lenb))
423
423
424 self.hunks += 1
424 self.hunks += 1
425 if reverse:
425 if reverse:
426 h.reverse()
426 h.reverse()
427
427
428 if self.missing:
428 if self.missing:
429 self.rej.append(h)
429 self.rej.append(h)
430 return -1
430 return -1
431
431
432 if self.exists and h.createfile():
432 if self.exists and h.createfile():
433 self.ui.warn(_("file %s already exists\n") % self.fname)
433 self.ui.warn(_("file %s already exists\n") % self.fname)
434 self.rej.append(h)
434 self.rej.append(h)
435 return -1
435 return -1
436
436
437 if isinstance(h, binhunk):
437 if isinstance(h, binhunk):
438 if h.rmfile():
438 if h.rmfile():
439 os.unlink(self.fname)
439 os.unlink(self.fname)
440 else:
440 else:
441 self.lines[:] = h.new()
441 self.lines[:] = h.new()
442 self.offset += len(h.new())
442 self.offset += len(h.new())
443 self.dirty = 1
443 self.dirty = 1
444 return 0
444 return 0
445
445
446 # fast case first, no offsets, no fuzz
446 # fast case first, no offsets, no fuzz
447 old = h.old()
447 old = h.old()
448 # patch starts counting at 1 unless we are adding the file
448 # patch starts counting at 1 unless we are adding the file
449 if h.starta == 0:
449 if h.starta == 0:
450 start = 0
450 start = 0
451 else:
451 else:
452 start = h.starta + self.offset - 1
452 start = h.starta + self.offset - 1
453 orig_start = start
453 orig_start = start
454 if diffhelpers.testhunk(old, self.lines, start) == 0:
454 if diffhelpers.testhunk(old, self.lines, start) == 0:
455 if h.rmfile():
455 if h.rmfile():
456 os.unlink(self.fname)
456 os.unlink(self.fname)
457 else:
457 else:
458 self.lines[start : start + h.lena] = h.new()
458 self.lines[start : start + h.lena] = h.new()
459 self.offset += h.lenb - h.lena
459 self.offset += h.lenb - h.lena
460 self.dirty = 1
460 self.dirty = 1
461 return 0
461 return 0
462
462
463 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
463 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
464 self.hashlines()
464 self.hashlines()
465 if h.hunk[-1][0] != ' ':
465 if h.hunk[-1][0] != ' ':
466 # if the hunk tried to put something at the bottom of the file
466 # if the hunk tried to put something at the bottom of the file
467 # override the start line and use eof here
467 # override the start line and use eof here
468 search_start = len(self.lines)
468 search_start = len(self.lines)
469 else:
469 else:
470 search_start = orig_start
470 search_start = orig_start
471
471
472 for fuzzlen in xrange(3):
472 for fuzzlen in xrange(3):
473 for toponly in [ True, False ]:
473 for toponly in [ True, False ]:
474 old = h.old(fuzzlen, toponly)
474 old = h.old(fuzzlen, toponly)
475
475
476 cand = self.findlines(old[0][1:], search_start)
476 cand = self.findlines(old[0][1:], search_start)
477 for l in cand:
477 for l in cand:
478 if diffhelpers.testhunk(old, self.lines, l) == 0:
478 if diffhelpers.testhunk(old, self.lines, l) == 0:
479 newlines = h.new(fuzzlen, toponly)
479 newlines = h.new(fuzzlen, toponly)
480 self.lines[l : l + len(old)] = newlines
480 self.lines[l : l + len(old)] = newlines
481 self.offset += len(newlines) - len(old)
481 self.offset += len(newlines) - len(old)
482 self.dirty = 1
482 self.dirty = 1
483 if fuzzlen:
483 if fuzzlen:
484 fuzzstr = "with fuzz %d " % fuzzlen
484 fuzzstr = "with fuzz %d " % fuzzlen
485 f = self.ui.warn
485 f = self.ui.warn
486 self.printfile(True)
486 self.printfile(True)
487 else:
487 else:
488 fuzzstr = ""
488 fuzzstr = ""
489 f = self.ui.note
489 f = self.ui.note
490 offset = l - orig_start - fuzzlen
490 offset = l - orig_start - fuzzlen
491 if offset == 1:
491 if offset == 1:
492 linestr = "line"
492 linestr = "line"
493 else:
493 else:
494 linestr = "lines"
494 linestr = "lines"
495 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
495 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
496 (h.number, l+1, fuzzstr, offset, linestr))
496 (h.number, l+1, fuzzstr, offset, linestr))
497 return fuzzlen
497 return fuzzlen
498 self.printfile(True)
498 self.printfile(True)
499 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
499 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
500 self.rej.append(h)
500 self.rej.append(h)
501 return -1
501 return -1
502
502
503 class hunk:
503 class hunk:
504 def __init__(self, desc, num, lr, context, create=False, remove=False):
504 def __init__(self, desc, num, lr, context, create=False, remove=False):
505 self.number = num
505 self.number = num
506 self.desc = desc
506 self.desc = desc
507 self.hunk = [ desc ]
507 self.hunk = [ desc ]
508 self.a = []
508 self.a = []
509 self.b = []
509 self.b = []
510 if context:
510 if context:
511 self.read_context_hunk(lr)
511 self.read_context_hunk(lr)
512 else:
512 else:
513 self.read_unified_hunk(lr)
513 self.read_unified_hunk(lr)
514 self.create = create
514 self.create = create
515 self.remove = remove and not create
515 self.remove = remove and not create
516
516
517 def read_unified_hunk(self, lr):
517 def read_unified_hunk(self, lr):
518 m = unidesc.match(self.desc)
518 m = unidesc.match(self.desc)
519 if not m:
519 if not m:
520 raise PatchError(_("bad hunk #%d") % self.number)
520 raise PatchError(_("bad hunk #%d") % self.number)
521 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
521 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
522 if self.lena == None:
522 if self.lena == None:
523 self.lena = 1
523 self.lena = 1
524 else:
524 else:
525 self.lena = int(self.lena)
525 self.lena = int(self.lena)
526 if self.lenb == None:
526 if self.lenb == None:
527 self.lenb = 1
527 self.lenb = 1
528 else:
528 else:
529 self.lenb = int(self.lenb)
529 self.lenb = int(self.lenb)
530 self.starta = int(self.starta)
530 self.starta = int(self.starta)
531 self.startb = int(self.startb)
531 self.startb = int(self.startb)
532 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
532 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
533 # if we hit eof before finishing out the hunk, the last line will
533 # if we hit eof before finishing out the hunk, the last line will
534 # be zero length. Lets try to fix it up.
534 # be zero length. Lets try to fix it up.
535 while len(self.hunk[-1]) == 0:
535 while len(self.hunk[-1]) == 0:
536 del self.hunk[-1]
536 del self.hunk[-1]
537 del self.a[-1]
537 del self.a[-1]
538 del self.b[-1]
538 del self.b[-1]
539 self.lena -= 1
539 self.lena -= 1
540 self.lenb -= 1
540 self.lenb -= 1
541
541
542 def read_context_hunk(self, lr):
542 def read_context_hunk(self, lr):
543 self.desc = lr.readline()
543 self.desc = lr.readline()
544 m = contextdesc.match(self.desc)
544 m = contextdesc.match(self.desc)
545 if not m:
545 if not m:
546 raise PatchError(_("bad hunk #%d") % self.number)
546 raise PatchError(_("bad hunk #%d") % self.number)
547 foo, self.starta, foo2, aend, foo3 = m.groups()
547 foo, self.starta, foo2, aend, foo3 = m.groups()
548 self.starta = int(self.starta)
548 self.starta = int(self.starta)
549 if aend == None:
549 if aend == None:
550 aend = self.starta
550 aend = self.starta
551 self.lena = int(aend) - self.starta
551 self.lena = int(aend) - self.starta
552 if self.starta:
552 if self.starta:
553 self.lena += 1
553 self.lena += 1
554 for x in xrange(self.lena):
554 for x in xrange(self.lena):
555 l = lr.readline()
555 l = lr.readline()
556 if l.startswith('---'):
556 if l.startswith('---'):
557 lr.push(l)
557 lr.push(l)
558 break
558 break
559 s = l[2:]
559 s = l[2:]
560 if l.startswith('- ') or l.startswith('! '):
560 if l.startswith('- ') or l.startswith('! '):
561 u = '-' + s
561 u = '-' + s
562 elif l.startswith(' '):
562 elif l.startswith(' '):
563 u = ' ' + s
563 u = ' ' + s
564 else:
564 else:
565 raise PatchError(_("bad hunk #%d old text line %d") %
565 raise PatchError(_("bad hunk #%d old text line %d") %
566 (self.number, x))
566 (self.number, x))
567 self.a.append(u)
567 self.a.append(u)
568 self.hunk.append(u)
568 self.hunk.append(u)
569
569
570 l = lr.readline()
570 l = lr.readline()
571 if l.startswith('\ '):
571 if l.startswith('\ '):
572 s = self.a[-1][:-1]
572 s = self.a[-1][:-1]
573 self.a[-1] = s
573 self.a[-1] = s
574 self.hunk[-1] = s
574 self.hunk[-1] = s
575 l = lr.readline()
575 l = lr.readline()
576 m = contextdesc.match(l)
576 m = contextdesc.match(l)
577 if not m:
577 if not m:
578 raise PatchError(_("bad hunk #%d") % self.number)
578 raise PatchError(_("bad hunk #%d") % self.number)
579 foo, self.startb, foo2, bend, foo3 = m.groups()
579 foo, self.startb, foo2, bend, foo3 = m.groups()
580 self.startb = int(self.startb)
580 self.startb = int(self.startb)
581 if bend == None:
581 if bend == None:
582 bend = self.startb
582 bend = self.startb
583 self.lenb = int(bend) - self.startb
583 self.lenb = int(bend) - self.startb
584 if self.startb:
584 if self.startb:
585 self.lenb += 1
585 self.lenb += 1
586 hunki = 1
586 hunki = 1
587 for x in xrange(self.lenb):
587 for x in xrange(self.lenb):
588 l = lr.readline()
588 l = lr.readline()
589 if l.startswith('\ '):
589 if l.startswith('\ '):
590 s = self.b[-1][:-1]
590 s = self.b[-1][:-1]
591 self.b[-1] = s
591 self.b[-1] = s
592 self.hunk[hunki-1] = s
592 self.hunk[hunki-1] = s
593 continue
593 continue
594 if not l:
594 if not l:
595 lr.push(l)
595 lr.push(l)
596 break
596 break
597 s = l[2:]
597 s = l[2:]
598 if l.startswith('+ ') or l.startswith('! '):
598 if l.startswith('+ ') or l.startswith('! '):
599 u = '+' + s
599 u = '+' + s
600 elif l.startswith(' '):
600 elif l.startswith(' '):
601 u = ' ' + s
601 u = ' ' + s
602 elif len(self.b) == 0:
602 elif len(self.b) == 0:
603 # this can happen when the hunk does not add any lines
603 # this can happen when the hunk does not add any lines
604 lr.push(l)
604 lr.push(l)
605 break
605 break
606 else:
606 else:
607 raise PatchError(_("bad hunk #%d old text line %d") %
607 raise PatchError(_("bad hunk #%d old text line %d") %
608 (self.number, x))
608 (self.number, x))
609 self.b.append(s)
609 self.b.append(s)
610 while True:
610 while True:
611 if hunki >= len(self.hunk):
611 if hunki >= len(self.hunk):
612 h = ""
612 h = ""
613 else:
613 else:
614 h = self.hunk[hunki]
614 h = self.hunk[hunki]
615 hunki += 1
615 hunki += 1
616 if h == u:
616 if h == u:
617 break
617 break
618 elif h.startswith('-'):
618 elif h.startswith('-'):
619 continue
619 continue
620 else:
620 else:
621 self.hunk.insert(hunki-1, u)
621 self.hunk.insert(hunki-1, u)
622 break
622 break
623
623
624 if not self.a:
624 if not self.a:
625 # this happens when lines were only added to the hunk
625 # this happens when lines were only added to the hunk
626 for x in self.hunk:
626 for x in self.hunk:
627 if x.startswith('-') or x.startswith(' '):
627 if x.startswith('-') or x.startswith(' '):
628 self.a.append(x)
628 self.a.append(x)
629 if not self.b:
629 if not self.b:
630 # this happens when lines were only deleted from the hunk
630 # this happens when lines were only deleted from the hunk
631 for x in self.hunk:
631 for x in self.hunk:
632 if x.startswith('+') or x.startswith(' '):
632 if x.startswith('+') or x.startswith(' '):
633 self.b.append(x[1:])
633 self.b.append(x[1:])
634 # @@ -start,len +start,len @@
634 # @@ -start,len +start,len @@
635 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
635 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
636 self.startb, self.lenb)
636 self.startb, self.lenb)
637 self.hunk[0] = self.desc
637 self.hunk[0] = self.desc
638
638
639 def reverse(self):
639 def reverse(self):
640 self.create, self.remove = self.remove, self.create
640 self.create, self.remove = self.remove, self.create
641 origlena = self.lena
641 origlena = self.lena
642 origstarta = self.starta
642 origstarta = self.starta
643 self.lena = self.lenb
643 self.lena = self.lenb
644 self.starta = self.startb
644 self.starta = self.startb
645 self.lenb = origlena
645 self.lenb = origlena
646 self.startb = origstarta
646 self.startb = origstarta
647 self.a = []
647 self.a = []
648 self.b = []
648 self.b = []
649 # self.hunk[0] is the @@ description
649 # self.hunk[0] is the @@ description
650 for x in xrange(1, len(self.hunk)):
650 for x in xrange(1, len(self.hunk)):
651 o = self.hunk[x]
651 o = self.hunk[x]
652 if o.startswith('-'):
652 if o.startswith('-'):
653 n = '+' + o[1:]
653 n = '+' + o[1:]
654 self.b.append(o[1:])
654 self.b.append(o[1:])
655 elif o.startswith('+'):
655 elif o.startswith('+'):
656 n = '-' + o[1:]
656 n = '-' + o[1:]
657 self.a.append(n)
657 self.a.append(n)
658 else:
658 else:
659 n = o
659 n = o
660 self.b.append(o[1:])
660 self.b.append(o[1:])
661 self.a.append(o)
661 self.a.append(o)
662 self.hunk[x] = o
662 self.hunk[x] = o
663
663
664 def fix_newline(self):
664 def fix_newline(self):
665 diffhelpers.fix_newline(self.hunk, self.a, self.b)
665 diffhelpers.fix_newline(self.hunk, self.a, self.b)
666
666
667 def complete(self):
667 def complete(self):
668 return len(self.a) == self.lena and len(self.b) == self.lenb
668 return len(self.a) == self.lena and len(self.b) == self.lenb
669
669
670 def createfile(self):
670 def createfile(self):
671 return self.starta == 0 and self.lena == 0 and self.create
671 return self.starta == 0 and self.lena == 0 and self.create
672
672
673 def rmfile(self):
673 def rmfile(self):
674 return self.startb == 0 and self.lenb == 0 and self.remove
674 return self.startb == 0 and self.lenb == 0 and self.remove
675
675
676 def fuzzit(self, l, fuzz, toponly):
676 def fuzzit(self, l, fuzz, toponly):
677 # this removes context lines from the top and bottom of list 'l'. It
677 # this removes context lines from the top and bottom of list 'l'. It
678 # checks the hunk to make sure only context lines are removed, and then
678 # checks the hunk to make sure only context lines are removed, and then
679 # returns a new shortened list of lines.
679 # returns a new shortened list of lines.
680 fuzz = min(fuzz, len(l)-1)
680 fuzz = min(fuzz, len(l)-1)
681 if fuzz:
681 if fuzz:
682 top = 0
682 top = 0
683 bot = 0
683 bot = 0
684 hlen = len(self.hunk)
684 hlen = len(self.hunk)
685 for x in xrange(hlen-1):
685 for x in xrange(hlen-1):
686 # the hunk starts with the @@ line, so use x+1
686 # the hunk starts with the @@ line, so use x+1
687 if self.hunk[x+1][0] == ' ':
687 if self.hunk[x+1][0] == ' ':
688 top += 1
688 top += 1
689 else:
689 else:
690 break
690 break
691 if not toponly:
691 if not toponly:
692 for x in xrange(hlen-1):
692 for x in xrange(hlen-1):
693 if self.hunk[hlen-bot-1][0] == ' ':
693 if self.hunk[hlen-bot-1][0] == ' ':
694 bot += 1
694 bot += 1
695 else:
695 else:
696 break
696 break
697
697
698 # top and bot now count context in the hunk
698 # top and bot now count context in the hunk
699 # adjust them if either one is short
699 # adjust them if either one is short
700 context = max(top, bot, 3)
700 context = max(top, bot, 3)
701 if bot < context:
701 if bot < context:
702 bot = max(0, fuzz - (context - bot))
702 bot = max(0, fuzz - (context - bot))
703 else:
703 else:
704 bot = min(fuzz, bot)
704 bot = min(fuzz, bot)
705 if top < context:
705 if top < context:
706 top = max(0, fuzz - (context - top))
706 top = max(0, fuzz - (context - top))
707 else:
707 else:
708 top = min(fuzz, top)
708 top = min(fuzz, top)
709
709
710 return l[top:len(l)-bot]
710 return l[top:len(l)-bot]
711 return l
711 return l
712
712
713 def old(self, fuzz=0, toponly=False):
713 def old(self, fuzz=0, toponly=False):
714 return self.fuzzit(self.a, fuzz, toponly)
714 return self.fuzzit(self.a, fuzz, toponly)
715
715
716 def newctrl(self):
716 def newctrl(self):
717 res = []
717 res = []
718 for x in self.hunk:
718 for x in self.hunk:
719 c = x[0]
719 c = x[0]
720 if c == ' ' or c == '+':
720 if c == ' ' or c == '+':
721 res.append(x)
721 res.append(x)
722 return res
722 return res
723
723
724 def new(self, fuzz=0, toponly=False):
724 def new(self, fuzz=0, toponly=False):
725 return self.fuzzit(self.b, fuzz, toponly)
725 return self.fuzzit(self.b, fuzz, toponly)
726
726
727 class binhunk:
727 class binhunk:
728 'A binary patch file. Only understands literals so far.'
728 'A binary patch file. Only understands literals so far.'
729 def __init__(self, gitpatch):
729 def __init__(self, gitpatch):
730 self.gitpatch = gitpatch
730 self.gitpatch = gitpatch
731 self.text = None
731 self.text = None
732 self.hunk = ['GIT binary patch\n']
732 self.hunk = ['GIT binary patch\n']
733
733
734 def createfile(self):
734 def createfile(self):
735 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
735 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
736
736
737 def rmfile(self):
737 def rmfile(self):
738 return self.gitpatch.op == 'DELETE'
738 return self.gitpatch.op == 'DELETE'
739
739
740 def complete(self):
740 def complete(self):
741 return self.text is not None
741 return self.text is not None
742
742
743 def new(self):
743 def new(self):
744 return [self.text]
744 return [self.text]
745
745
746 def extract(self, fp):
746 def extract(self, fp):
747 line = fp.readline()
747 line = fp.readline()
748 self.hunk.append(line)
748 self.hunk.append(line)
749 while line and not line.startswith('literal '):
749 while line and not line.startswith('literal '):
750 line = fp.readline()
750 line = fp.readline()
751 self.hunk.append(line)
751 self.hunk.append(line)
752 if not line:
752 if not line:
753 raise PatchError(_('could not extract binary patch'))
753 raise PatchError(_('could not extract binary patch'))
754 size = int(line[8:].rstrip())
754 size = int(line[8:].rstrip())
755 dec = []
755 dec = []
756 line = fp.readline()
756 line = fp.readline()
757 self.hunk.append(line)
757 self.hunk.append(line)
758 while len(line) > 1:
758 while len(line) > 1:
759 l = line[0]
759 l = line[0]
760 if l <= 'Z' and l >= 'A':
760 if l <= 'Z' and l >= 'A':
761 l = ord(l) - ord('A') + 1
761 l = ord(l) - ord('A') + 1
762 else:
762 else:
763 l = ord(l) - ord('a') + 27
763 l = ord(l) - ord('a') + 27
764 dec.append(base85.b85decode(line[1:-1])[:l])
764 dec.append(base85.b85decode(line[1:-1])[:l])
765 line = fp.readline()
765 line = fp.readline()
766 self.hunk.append(line)
766 self.hunk.append(line)
767 text = zlib.decompress(''.join(dec))
767 text = zlib.decompress(''.join(dec))
768 if len(text) != size:
768 if len(text) != size:
769 raise PatchError(_('binary patch is %d bytes, not %d') %
769 raise PatchError(_('binary patch is %d bytes, not %d') %
770 len(text), size)
770 len(text), size)
771 self.text = text
771 self.text = text
772
772
773 def parsefilename(str):
773 def parsefilename(str):
774 # --- filename \t|space stuff
774 # --- filename \t|space stuff
775 s = str[4:].rstrip('\r\n')
775 s = str[4:].rstrip('\r\n')
776 i = s.find('\t')
776 i = s.find('\t')
777 if i < 0:
777 if i < 0:
778 i = s.find(' ')
778 i = s.find(' ')
779 if i < 0:
779 if i < 0:
780 return s
780 return s
781 return s[:i]
781 return s[:i]
782
782
783 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
783 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
784 def pathstrip(path, count=1):
784 def pathstrip(path, count=1):
785 pathlen = len(path)
785 pathlen = len(path)
786 i = 0
786 i = 0
787 if count == 0:
787 if count == 0:
788 return '', path.rstrip()
788 return '', path.rstrip()
789 while count > 0:
789 while count > 0:
790 i = path.find('/', i)
790 i = path.find('/', i)
791 if i == -1:
791 if i == -1:
792 raise PatchError(_("unable to strip away %d dirs from %s") %
792 raise PatchError(_("unable to strip away %d dirs from %s") %
793 (count, path))
793 (count, path))
794 i += 1
794 i += 1
795 # consume '//' in the path
795 # consume '//' in the path
796 while i < pathlen - 1 and path[i] == '/':
796 while i < pathlen - 1 and path[i] == '/':
797 i += 1
797 i += 1
798 count -= 1
798 count -= 1
799 return path[:i].lstrip(), path[i:].rstrip()
799 return path[:i].lstrip(), path[i:].rstrip()
800
800
801 nulla = afile_orig == "/dev/null"
801 nulla = afile_orig == "/dev/null"
802 nullb = bfile_orig == "/dev/null"
802 nullb = bfile_orig == "/dev/null"
803 abase, afile = pathstrip(afile_orig, strip)
803 abase, afile = pathstrip(afile_orig, strip)
804 gooda = not nulla and os.path.exists(afile)
804 gooda = not nulla and os.path.exists(afile)
805 bbase, bfile = pathstrip(bfile_orig, strip)
805 bbase, bfile = pathstrip(bfile_orig, strip)
806 if afile == bfile:
806 if afile == bfile:
807 goodb = gooda
807 goodb = gooda
808 else:
808 else:
809 goodb = not nullb and os.path.exists(bfile)
809 goodb = not nullb and os.path.exists(bfile)
810 createfunc = hunk.createfile
810 createfunc = hunk.createfile
811 if reverse:
811 if reverse:
812 createfunc = hunk.rmfile
812 createfunc = hunk.rmfile
813 missing = not goodb and not gooda and not createfunc()
813 missing = not goodb and not gooda and not createfunc()
814 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
814 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
815 # diff is between a file and its backup. In this case, the original
815 # diff is between a file and its backup. In this case, the original
816 # file should be patched (see original mpatch code).
816 # file should be patched (see original mpatch code).
817 isbackup = (abase == bbase and bfile.startswith(afile))
817 isbackup = (abase == bbase and bfile.startswith(afile))
818 fname = None
818 fname = None
819 if not missing:
819 if not missing:
820 if gooda and goodb:
820 if gooda and goodb:
821 fname = isbackup and afile or bfile
821 fname = isbackup and afile or bfile
822 elif gooda:
822 elif gooda:
823 fname = afile
823 fname = afile
824
824
825 if not fname:
825 if not fname:
826 if not nullb:
826 if not nullb:
827 fname = isbackup and afile or bfile
827 fname = isbackup and afile or bfile
828 elif not nulla:
828 elif not nulla:
829 fname = afile
829 fname = afile
830 else:
830 else:
831 raise PatchError(_("undefined source and destination files"))
831 raise PatchError(_("undefined source and destination files"))
832
832
833 return fname, missing
833 return fname, missing
834
834
835 class linereader:
835 class linereader:
836 # simple class to allow pushing lines back into the input stream
836 # simple class to allow pushing lines back into the input stream
837 def __init__(self, fp):
837 def __init__(self, fp):
838 self.fp = fp
838 self.fp = fp
839 self.buf = []
839 self.buf = []
840
840
841 def push(self, line):
841 def push(self, line):
842 self.buf.append(line)
842 self.buf.append(line)
843
843
844 def readline(self):
844 def readline(self):
845 if self.buf:
845 if self.buf:
846 l = self.buf[0]
846 l = self.buf[0]
847 del self.buf[0]
847 del self.buf[0]
848 return l
848 return l
849 return self.fp.readline()
849 return self.fp.readline()
850
850
851 def iterhunks(ui, fp, sourcefile=None):
851 def iterhunks(ui, fp, sourcefile=None):
852 """Read a patch and yield the following events:
852 """Read a patch and yield the following events:
853 - ("file", afile, bfile, firsthunk): select a new target file.
853 - ("file", afile, bfile, firsthunk): select a new target file.
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
855 "file" event.
855 "file" event.
856 - ("git", gitchanges): current diff is in git format, gitchanges
856 - ("git", gitchanges): current diff is in git format, gitchanges
857 maps filenames to gitpatch records. Unique event.
857 maps filenames to gitpatch records. Unique event.
858 """
858 """
859
859
860 def scangitpatch(fp, firstline):
860 def scangitpatch(fp, firstline):
861 '''git patches can modify a file, then copy that file to
861 '''git patches can modify a file, then copy that file to
862 a new file, but expect the source to be the unmodified form.
862 a new file, but expect the source to be the unmodified form.
863 So we scan the patch looking for that case so we can do
863 So we scan the patch looking for that case so we can do
864 the copies ahead of time.'''
864 the copies ahead of time.'''
865
865
866 pos = 0
866 pos = 0
867 try:
867 try:
868 pos = fp.tell()
868 pos = fp.tell()
869 except IOError:
869 except IOError:
870 fp = cStringIO.StringIO(fp.read())
870 fp = cStringIO.StringIO(fp.read())
871
871
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
873 fp.seek(pos)
873 fp.seek(pos)
874
874
875 return fp, dopatch, gitpatches
875 return fp, dopatch, gitpatches
876
876
877 changed = {}
877 changed = {}
878 current_hunk = None
878 current_hunk = None
879 afile = ""
879 afile = ""
880 bfile = ""
880 bfile = ""
881 state = None
881 state = None
882 hunknum = 0
882 hunknum = 0
883 emitfile = False
883 emitfile = False
884
884
885 git = False
885 git = False
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
887
887
888 # our states
888 # our states
889 BFILE = 1
889 BFILE = 1
890 context = None
890 context = None
891 lr = linereader(fp)
891 lr = linereader(fp)
892 dopatch = True
892 dopatch = True
893 # gitworkdone is True if a git operation (copy, rename, ...) was
893 # gitworkdone is True if a git operation (copy, rename, ...) was
894 # performed already for the current file. Useful when the file
894 # performed already for the current file. Useful when the file
895 # section may have no hunk.
895 # section may have no hunk.
896 gitworkdone = False
896 gitworkdone = False
897
897
898 while True:
898 while True:
899 newfile = False
899 newfile = False
900 x = lr.readline()
900 x = lr.readline()
901 if not x:
901 if not x:
902 break
902 break
903 if current_hunk:
903 if current_hunk:
904 if x.startswith('\ '):
904 if x.startswith('\ '):
905 current_hunk.fix_newline()
905 current_hunk.fix_newline()
906 yield 'hunk', current_hunk
906 yield 'hunk', current_hunk
907 current_hunk = None
907 current_hunk = None
908 gitworkdone = False
908 gitworkdone = False
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
910 ((context or context == None) and x.startswith('***************')))):
910 ((context or context == None) and x.startswith('***************')))):
911 try:
911 try:
912 if context == None and x.startswith('***************'):
912 if context == None and x.startswith('***************'):
913 context = True
913 context = True
914 gpatch = changed.get(bfile[2:], (None, None))[1]
914 gpatch = changed.get(bfile[2:], (None, None))[1]
915 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
915 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
916 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
916 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
917 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
917 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
918 except PatchError, err:
918 except PatchError, err:
919 ui.debug(err)
919 ui.debug(err)
920 current_hunk = None
920 current_hunk = None
921 continue
921 continue
922 hunknum += 1
922 hunknum += 1
923 if emitfile:
923 if emitfile:
924 emitfile = False
924 emitfile = False
925 yield 'file', (afile, bfile, current_hunk)
925 yield 'file', (afile, bfile, current_hunk)
926 elif state == BFILE and x.startswith('GIT binary patch'):
926 elif state == BFILE and x.startswith('GIT binary patch'):
927 current_hunk = binhunk(changed[bfile[2:]][1])
927 current_hunk = binhunk(changed[bfile[2:]][1])
928 hunknum += 1
928 hunknum += 1
929 if emitfile:
929 if emitfile:
930 emitfile = False
930 emitfile = False
931 yield 'file', (afile, bfile, current_hunk)
931 yield 'file', (afile, bfile, current_hunk)
932 current_hunk.extract(fp)
932 current_hunk.extract(fp)
933 elif x.startswith('diff --git'):
933 elif x.startswith('diff --git'):
934 # check for git diff, scanning the whole patch file if needed
934 # check for git diff, scanning the whole patch file if needed
935 m = gitre.match(x)
935 m = gitre.match(x)
936 if m:
936 if m:
937 afile, bfile = m.group(1, 2)
937 afile, bfile = m.group(1, 2)
938 if not git:
938 if not git:
939 git = True
939 git = True
940 fp, dopatch, gitpatches = scangitpatch(fp, x)
940 fp, dopatch, gitpatches = scangitpatch(fp, x)
941 yield 'git', gitpatches
941 yield 'git', gitpatches
942 for gp in gitpatches:
942 for gp in gitpatches:
943 changed[gp.path] = (gp.op, gp)
943 changed[gp.path] = (gp.op, gp)
944 # else error?
944 # else error?
945 # copy/rename + modify should modify target, not source
945 # copy/rename + modify should modify target, not source
946 gitop = changed.get(bfile[2:], (None, None))[0]
946 gitop = changed.get(bfile[2:], (None, None))[0]
947 if gitop in ('COPY', 'DELETE', 'RENAME'):
947 if gitop in ('COPY', 'DELETE', 'RENAME'):
948 afile = bfile
948 afile = bfile
949 gitworkdone = True
949 gitworkdone = True
950 newfile = True
950 newfile = True
951 elif x.startswith('---'):
951 elif x.startswith('---'):
952 # check for a unified diff
952 # check for a unified diff
953 l2 = lr.readline()
953 l2 = lr.readline()
954 if not l2.startswith('+++'):
954 if not l2.startswith('+++'):
955 lr.push(l2)
955 lr.push(l2)
956 continue
956 continue
957 newfile = True
957 newfile = True
958 context = False
958 context = False
959 afile = parsefilename(x)
959 afile = parsefilename(x)
960 bfile = parsefilename(l2)
960 bfile = parsefilename(l2)
961 elif x.startswith('***'):
961 elif x.startswith('***'):
962 # check for a context diff
962 # check for a context diff
963 l2 = lr.readline()
963 l2 = lr.readline()
964 if not l2.startswith('---'):
964 if not l2.startswith('---'):
965 lr.push(l2)
965 lr.push(l2)
966 continue
966 continue
967 l3 = lr.readline()
967 l3 = lr.readline()
968 lr.push(l3)
968 lr.push(l3)
969 if not l3.startswith("***************"):
969 if not l3.startswith("***************"):
970 lr.push(l2)
970 lr.push(l2)
971 continue
971 continue
972 newfile = True
972 newfile = True
973 context = True
973 context = True
974 afile = parsefilename(x)
974 afile = parsefilename(x)
975 bfile = parsefilename(l2)
975 bfile = parsefilename(l2)
976
976
977 if newfile:
977 if newfile:
978 emitfile = True
978 emitfile = True
979 state = BFILE
979 state = BFILE
980 hunknum = 0
980 hunknum = 0
981 if current_hunk:
981 if current_hunk:
982 if current_hunk.complete():
982 if current_hunk.complete():
983 yield 'hunk', current_hunk
983 yield 'hunk', current_hunk
984 else:
984 else:
985 raise PatchError(_("malformed patch %s %s") % (afile,
985 raise PatchError(_("malformed patch %s %s") % (afile,
986 current_hunk.desc))
986 current_hunk.desc))
987
987
988 if hunknum == 0 and dopatch and not gitworkdone:
988 if hunknum == 0 and dopatch and not gitworkdone:
989 raise NoHunks
989 raise NoHunks
990
990
991 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
991 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
992 rejmerge=None, updatedir=None):
992 rejmerge=None, updatedir=None):
993 """reads a patch from fp and tries to apply it. The dict 'changed' is
993 """reads a patch from fp and tries to apply it. The dict 'changed' is
994 filled in with all of the filenames changed by the patch. Returns 0
994 filled in with all of the filenames changed by the patch. Returns 0
995 for a clean patch, -1 if any rejects were found and 1 if there was
995 for a clean patch, -1 if any rejects were found and 1 if there was
996 any fuzz."""
996 any fuzz."""
997
997
998 rejects = 0
998 rejects = 0
999 err = 0
999 err = 0
1000 current_file = None
1000 current_file = None
1001 gitpatches = None
1001 gitpatches = None
1002
1002
1003 def closefile():
1003 def closefile():
1004 if not current_file:
1004 if not current_file:
1005 return 0
1005 return 0
1006 current_file.close()
1006 current_file.close()
1007 if rejmerge:
1007 if rejmerge:
1008 rejmerge(current_file)
1008 rejmerge(current_file)
1009 return len(current_file.rej)
1009 return len(current_file.rej)
1010
1010
1011 for state, values in iterhunks(ui, fp, sourcefile):
1011 for state, values in iterhunks(ui, fp, sourcefile):
1012 if state == 'hunk':
1012 if state == 'hunk':
1013 if not current_file:
1013 if not current_file:
1014 continue
1014 continue
1015 current_hunk = values
1015 current_hunk = values
1016 ret = current_file.apply(current_hunk, reverse)
1016 ret = current_file.apply(current_hunk, reverse)
1017 if ret >= 0:
1017 if ret >= 0:
1018 changed.setdefault(current_file.fname, (None, None))
1018 changed.setdefault(current_file.fname, (None, None))
1019 if ret > 0:
1019 if ret > 0:
1020 err = 1
1020 err = 1
1021 elif state == 'file':
1021 elif state == 'file':
1022 rejects += closefile()
1022 rejects += closefile()
1023 afile, bfile, first_hunk = values
1023 afile, bfile, first_hunk = values
1024 try:
1024 try:
1025 if sourcefile:
1025 if sourcefile:
1026 current_file = patchfile(ui, sourcefile)
1026 current_file = patchfile(ui, sourcefile)
1027 else:
1027 else:
1028 current_file, missing = selectfile(afile, bfile, first_hunk,
1028 current_file, missing = selectfile(afile, bfile, first_hunk,
1029 strip, reverse)
1029 strip, reverse)
1030 current_file = patchfile(ui, current_file, missing)
1030 current_file = patchfile(ui, current_file, missing)
1031 except PatchError, err:
1031 except PatchError, err:
1032 ui.warn(str(err) + '\n')
1032 ui.warn(str(err) + '\n')
1033 current_file, current_hunk = None, None
1033 current_file, current_hunk = None, None
1034 rejects += 1
1034 rejects += 1
1035 continue
1035 continue
1036 elif state == 'git':
1036 elif state == 'git':
1037 gitpatches = values
1037 gitpatches = values
1038 cwd = os.getcwd()
1038 cwd = os.getcwd()
1039 for gp in gitpatches:
1039 for gp in gitpatches:
1040 if gp.op in ('COPY', 'RENAME'):
1040 if gp.op in ('COPY', 'RENAME'):
1041 src, dst = [util.canonpath(cwd, cwd, x)
1041 src, dst = [util.canonpath(cwd, cwd, x)
1042 for x in [gp.oldpath, gp.path]]
1042 for x in [gp.oldpath, gp.path]]
1043 copyfile(src, dst)
1043 copyfile(src, dst)
1044 changed[gp.path] = (gp.op, gp)
1044 changed[gp.path] = (gp.op, gp)
1045 else:
1045 else:
1046 raise util.Abort(_('unsupported parser state: %s') % state)
1046 raise util.Abort(_('unsupported parser state: %s') % state)
1047
1047
1048 rejects += closefile()
1048 rejects += closefile()
1049
1049
1050 if updatedir and gitpatches:
1050 if updatedir and gitpatches:
1051 updatedir(gitpatches)
1051 updatedir(gitpatches)
1052 if rejects:
1052 if rejects:
1053 return -1
1053 return -1
1054 return err
1054 return err
1055
1055
1056 def diffopts(ui, opts={}, untrusted=False):
1056 def diffopts(ui, opts={}, untrusted=False):
1057 def get(key, name=None, getter=ui.configbool):
1057 def get(key, name=None, getter=ui.configbool):
1058 return (opts.get(key) or
1058 return (opts.get(key) or
1059 getter('diff', name or key, None, untrusted=untrusted))
1059 getter('diff', name or key, None, untrusted=untrusted))
1060 return mdiff.diffopts(
1060 return mdiff.diffopts(
1061 text=opts.get('text'),
1061 text=opts.get('text'),
1062 git=get('git'),
1062 git=get('git'),
1063 nodates=get('nodates'),
1063 nodates=get('nodates'),
1064 showfunc=get('show_function', 'showfunc'),
1064 showfunc=get('show_function', 'showfunc'),
1065 ignorews=get('ignore_all_space', 'ignorews'),
1065 ignorews=get('ignore_all_space', 'ignorews'),
1066 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1066 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1067 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1067 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1068 context=get('unified', getter=ui.config))
1068 context=get('unified', getter=ui.config))
1069
1069
1070 def updatedir(ui, repo, patches):
1070 def updatedir(ui, repo, patches):
1071 '''Update dirstate after patch application according to metadata'''
1071 '''Update dirstate after patch application according to metadata'''
1072 if not patches:
1072 if not patches:
1073 return
1073 return
1074 copies = []
1074 copies = []
1075 removes = {}
1075 removes = {}
1076 cfiles = patches.keys()
1076 cfiles = patches.keys()
1077 cwd = repo.getcwd()
1077 cwd = repo.getcwd()
1078 if cwd:
1078 if cwd:
1079 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1079 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1080 for f in patches:
1080 for f in patches:
1081 ctype, gp = patches[f]
1081 ctype, gp = patches[f]
1082 if ctype == 'RENAME':
1082 if ctype == 'RENAME':
1083 copies.append((gp.oldpath, gp.path))
1083 copies.append((gp.oldpath, gp.path))
1084 removes[gp.oldpath] = 1
1084 removes[gp.oldpath] = 1
1085 elif ctype == 'COPY':
1085 elif ctype == 'COPY':
1086 copies.append((gp.oldpath, gp.path))
1086 copies.append((gp.oldpath, gp.path))
1087 elif ctype == 'DELETE':
1087 elif ctype == 'DELETE':
1088 removes[gp.path] = 1
1088 removes[gp.path] = 1
1089 for src, dst in copies:
1089 for src, dst in copies:
1090 repo.copy(src, dst)
1090 repo.copy(src, dst)
1091 removes = removes.keys()
1091 removes = removes.keys()
1092 if removes:
1092 if removes:
1093 repo.remove(util.sort(removes), True)
1093 repo.remove(util.sort(removes), True)
1094 for f in patches:
1094 for f in patches:
1095 ctype, gp = patches[f]
1095 ctype, gp = patches[f]
1096 if gp and gp.mode:
1096 if gp and gp.mode:
1097 flags = ''
1097 flags = ''
1098 if gp.mode & 0100:
1098 if gp.mode & 0100:
1099 flags = 'x'
1099 flags = 'x'
1100 elif gp.mode & 020000:
1100 elif gp.mode & 020000:
1101 flags = 'l'
1101 flags = 'l'
1102 dst = os.path.join(repo.root, gp.path)
1102 dst = os.path.join(repo.root, gp.path)
1103 # patch won't create empty files
1103 # patch won't create empty files
1104 if ctype == 'ADD' and not os.path.exists(dst):
1104 if ctype == 'ADD' and not os.path.exists(dst):
1105 repo.wwrite(gp.path, '', flags)
1105 repo.wwrite(gp.path, '', flags)
1106 else:
1106 else:
1107 util.set_flags(dst, 'l' in flags, 'x' in flags)
1107 util.set_flags(dst, 'l' in flags, 'x' in flags)
1108 cmdutil.addremove(repo, cfiles)
1108 cmdutil.addremove(repo, cfiles)
1109 files = patches.keys()
1109 files = patches.keys()
1110 files.extend([r for r in removes if r not in files])
1110 files.extend([r for r in removes if r not in files])
1111 return util.sort(files)
1111 return util.sort(files)
1112
1112
1113 def b85diff(to, tn):
1113 def b85diff(to, tn):
1114 '''print base85-encoded binary diff'''
1114 '''print base85-encoded binary diff'''
1115 def gitindex(text):
1115 def gitindex(text):
1116 if not text:
1116 if not text:
1117 return '0' * 40
1117 return '0' * 40
1118 l = len(text)
1118 l = len(text)
1119 s = util.sha1('blob %d\0' % l)
1119 s = util.sha1('blob %d\0' % l)
1120 s.update(text)
1120 s.update(text)
1121 return s.hexdigest()
1121 return s.hexdigest()
1122
1122
1123 def fmtline(line):
1123 def fmtline(line):
1124 l = len(line)
1124 l = len(line)
1125 if l <= 26:
1125 if l <= 26:
1126 l = chr(ord('A') + l - 1)
1126 l = chr(ord('A') + l - 1)
1127 else:
1127 else:
1128 l = chr(l - 26 + ord('a') - 1)
1128 l = chr(l - 26 + ord('a') - 1)
1129 return '%c%s\n' % (l, base85.b85encode(line, True))
1129 return '%c%s\n' % (l, base85.b85encode(line, True))
1130
1130
1131 def chunk(text, csize=52):
1131 def chunk(text, csize=52):
1132 l = len(text)
1132 l = len(text)
1133 i = 0
1133 i = 0
1134 while i < l:
1134 while i < l:
1135 yield text[i:i+csize]
1135 yield text[i:i+csize]
1136 i += csize
1136 i += csize
1137
1137
1138 tohash = gitindex(to)
1138 tohash = gitindex(to)
1139 tnhash = gitindex(tn)
1139 tnhash = gitindex(tn)
1140 if tohash == tnhash:
1140 if tohash == tnhash:
1141 return ""
1141 return ""
1142
1142
1143 # TODO: deltas
1143 # TODO: deltas
1144 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1144 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1145 (tohash, tnhash, len(tn))]
1145 (tohash, tnhash, len(tn))]
1146 for l in chunk(zlib.compress(tn)):
1146 for l in chunk(zlib.compress(tn)):
1147 ret.append(fmtline(l))
1147 ret.append(fmtline(l))
1148 ret.append('\n')
1148 ret.append('\n')
1149 return ''.join(ret)
1149 return ''.join(ret)
1150
1150
1151 def diff(repo, node1=None, node2=None, match=None,
1151 def diff(repo, node1=None, node2=None, match=None,
1152 fp=None, changes=None, opts=None):
1152 fp=None, changes=None, opts=None):
1153 '''print diff of changes to files between two nodes, or node and
1153 '''print diff of changes to files between two nodes, or node and
1154 working directory.
1154 working directory.
1155
1155
1156 if node1 is None, use first dirstate parent instead.
1156 if node1 is None, use first dirstate parent instead.
1157 if node2 is None, compare node1 with working directory.'''
1157 if node2 is None, compare node1 with working directory.'''
1158
1158
1159 if not match:
1159 if not match:
1160 match = cmdutil.matchall(repo)
1160 match = cmdutil.matchall(repo)
1161
1161
1162 if opts is None:
1162 if opts is None:
1163 opts = mdiff.defaultopts
1163 opts = mdiff.defaultopts
1164 if fp is None:
1164 if fp is None:
1165 fp = repo.ui
1165 fp = repo.ui
1166
1166
1167 if not node1:
1167 if not node1:
1168 node1 = repo.dirstate.parents()[0]
1168 node1 = repo.dirstate.parents()[0]
1169
1169
1170 flcache = {}
1170 flcache = {}
1171 def getfilectx(f, ctx):
1171 def getfilectx(f, ctx):
1172 flctx = ctx.filectx(f, filelog=flcache.get(f))
1172 flctx = ctx.filectx(f, filelog=flcache.get(f))
1173 if f not in flcache:
1173 if f not in flcache:
1174 flcache[f] = flctx._filelog
1174 flcache[f] = flctx._filelog
1175 return flctx
1175 return flctx
1176
1176
1177 # reading the data for node1 early allows it to play nicely
1178 # with repo.status and the revlog cache.
1179 ctx1 = repo[node1]
1177 ctx1 = repo[node1]
1180 # force manifest reading
1178 ctx2 = repo[node2]
1181 man1 = ctx1.manifest()
1182 date1 = util.datestr(ctx1.date())
1183
1179
1184 if not changes:
1180 if not changes:
1185 changes = repo.status(node1, node2, match=match)
1181 changes = repo.status(ctx1, ctx2, match=match)
1186 modified, added, removed = changes[:3]
1182 modified, added, removed = changes[:3]
1187
1183
1188 if not modified and not added and not removed:
1184 if not modified and not added and not removed:
1189 return
1185 return
1190
1186
1191 ctx2 = repo[node2]
1187 date1 = util.datestr(ctx1.date())
1188 man1 = ctx1.manifest()
1192
1189
1193 if repo.ui.quiet:
1190 if repo.ui.quiet:
1194 r = None
1191 r = None
1195 else:
1192 else:
1196 hexfunc = repo.ui.debugflag and hex or short
1193 hexfunc = repo.ui.debugflag and hex or short
1197 r = [hexfunc(node) for node in [node1, node2] if node]
1194 r = [hexfunc(node) for node in [node1, node2] if node]
1198
1195
1199 if opts.git:
1196 if opts.git:
1200 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1197 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1201 for k, v in copy.items():
1198 for k, v in copy.items():
1202 copy[v] = k
1199 copy[v] = k
1203
1200
1204 gone = {}
1201 gone = {}
1205 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1202 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1206
1203
1207 for f in util.sort(modified + added + removed):
1204 for f in util.sort(modified + added + removed):
1208 to = None
1205 to = None
1209 tn = None
1206 tn = None
1210 dodiff = True
1207 dodiff = True
1211 header = []
1208 header = []
1212 if f in man1:
1209 if f in man1:
1213 to = getfilectx(f, ctx1).data()
1210 to = getfilectx(f, ctx1).data()
1214 if f not in removed:
1211 if f not in removed:
1215 tn = getfilectx(f, ctx2).data()
1212 tn = getfilectx(f, ctx2).data()
1216 a, b = f, f
1213 a, b = f, f
1217 if opts.git:
1214 if opts.git:
1218 def addmodehdr(header, omode, nmode):
1215 def addmodehdr(header, omode, nmode):
1219 if omode != nmode:
1216 if omode != nmode:
1220 header.append('old mode %s\n' % omode)
1217 header.append('old mode %s\n' % omode)
1221 header.append('new mode %s\n' % nmode)
1218 header.append('new mode %s\n' % nmode)
1222
1219
1223 if f in added:
1220 if f in added:
1224 mode = gitmode[ctx2.flags(f)]
1221 mode = gitmode[ctx2.flags(f)]
1225 if f in copy:
1222 if f in copy:
1226 a = copy[f]
1223 a = copy[f]
1227 omode = gitmode[man1.flags(a)]
1224 omode = gitmode[man1.flags(a)]
1228 addmodehdr(header, omode, mode)
1225 addmodehdr(header, omode, mode)
1229 if a in removed and a not in gone:
1226 if a in removed and a not in gone:
1230 op = 'rename'
1227 op = 'rename'
1231 gone[a] = 1
1228 gone[a] = 1
1232 else:
1229 else:
1233 op = 'copy'
1230 op = 'copy'
1234 header.append('%s from %s\n' % (op, a))
1231 header.append('%s from %s\n' % (op, a))
1235 header.append('%s to %s\n' % (op, f))
1232 header.append('%s to %s\n' % (op, f))
1236 to = getfilectx(a, ctx1).data()
1233 to = getfilectx(a, ctx1).data()
1237 else:
1234 else:
1238 header.append('new file mode %s\n' % mode)
1235 header.append('new file mode %s\n' % mode)
1239 if util.binary(tn):
1236 if util.binary(tn):
1240 dodiff = 'binary'
1237 dodiff = 'binary'
1241 elif f in removed:
1238 elif f in removed:
1242 # have we already reported a copy above?
1239 # have we already reported a copy above?
1243 if f in copy and copy[f] in added and copy[copy[f]] == f:
1240 if f in copy and copy[f] in added and copy[copy[f]] == f:
1244 dodiff = False
1241 dodiff = False
1245 else:
1242 else:
1246 header.append('deleted file mode %s\n' %
1243 header.append('deleted file mode %s\n' %
1247 gitmode[man1.flags(f)])
1244 gitmode[man1.flags(f)])
1248 else:
1245 else:
1249 omode = gitmode[man1.flags(f)]
1246 omode = gitmode[man1.flags(f)]
1250 nmode = gitmode[ctx2.flags(f)]
1247 nmode = gitmode[ctx2.flags(f)]
1251 addmodehdr(header, omode, nmode)
1248 addmodehdr(header, omode, nmode)
1252 if util.binary(to) or util.binary(tn):
1249 if util.binary(to) or util.binary(tn):
1253 dodiff = 'binary'
1250 dodiff = 'binary'
1254 r = None
1251 r = None
1255 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1252 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1256 if dodiff:
1253 if dodiff:
1257 if dodiff == 'binary':
1254 if dodiff == 'binary':
1258 text = b85diff(to, tn)
1255 text = b85diff(to, tn)
1259 else:
1256 else:
1260 text = mdiff.unidiff(to, date1,
1257 text = mdiff.unidiff(to, date1,
1261 # ctx2 date may be dynamic
1258 # ctx2 date may be dynamic
1262 tn, util.datestr(ctx2.date()),
1259 tn, util.datestr(ctx2.date()),
1263 a, b, r, opts=opts)
1260 a, b, r, opts=opts)
1264 if text or len(header) > 1:
1261 if text or len(header) > 1:
1265 fp.write(''.join(header))
1262 fp.write(''.join(header))
1266 fp.write(text)
1263 fp.write(text)
1267
1264
1268 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1265 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1269 opts=None):
1266 opts=None):
1270 '''export changesets as hg patches.'''
1267 '''export changesets as hg patches.'''
1271
1268
1272 total = len(revs)
1269 total = len(revs)
1273 revwidth = max([len(str(rev)) for rev in revs])
1270 revwidth = max([len(str(rev)) for rev in revs])
1274
1271
1275 def single(rev, seqno, fp):
1272 def single(rev, seqno, fp):
1276 ctx = repo[rev]
1273 ctx = repo[rev]
1277 node = ctx.node()
1274 node = ctx.node()
1278 parents = [p.node() for p in ctx.parents() if p]
1275 parents = [p.node() for p in ctx.parents() if p]
1279 branch = ctx.branch()
1276 branch = ctx.branch()
1280 if switch_parent:
1277 if switch_parent:
1281 parents.reverse()
1278 parents.reverse()
1282 prev = (parents and parents[0]) or nullid
1279 prev = (parents and parents[0]) or nullid
1283
1280
1284 if not fp:
1281 if not fp:
1285 fp = cmdutil.make_file(repo, template, node, total=total,
1282 fp = cmdutil.make_file(repo, template, node, total=total,
1286 seqno=seqno, revwidth=revwidth)
1283 seqno=seqno, revwidth=revwidth)
1287 if fp != sys.stdout and hasattr(fp, 'name'):
1284 if fp != sys.stdout and hasattr(fp, 'name'):
1288 repo.ui.note("%s\n" % fp.name)
1285 repo.ui.note("%s\n" % fp.name)
1289
1286
1290 fp.write("# HG changeset patch\n")
1287 fp.write("# HG changeset patch\n")
1291 fp.write("# User %s\n" % ctx.user())
1288 fp.write("# User %s\n" % ctx.user())
1292 fp.write("# Date %d %d\n" % ctx.date())
1289 fp.write("# Date %d %d\n" % ctx.date())
1293 if branch and (branch != 'default'):
1290 if branch and (branch != 'default'):
1294 fp.write("# Branch %s\n" % branch)
1291 fp.write("# Branch %s\n" % branch)
1295 fp.write("# Node ID %s\n" % hex(node))
1292 fp.write("# Node ID %s\n" % hex(node))
1296 fp.write("# Parent %s\n" % hex(prev))
1293 fp.write("# Parent %s\n" % hex(prev))
1297 if len(parents) > 1:
1294 if len(parents) > 1:
1298 fp.write("# Parent %s\n" % hex(parents[1]))
1295 fp.write("# Parent %s\n" % hex(parents[1]))
1299 fp.write(ctx.description().rstrip())
1296 fp.write(ctx.description().rstrip())
1300 fp.write("\n\n")
1297 fp.write("\n\n")
1301
1298
1302 diff(repo, prev, node, fp=fp, opts=opts)
1299 diff(repo, prev, node, fp=fp, opts=opts)
1303 if fp not in (sys.stdout, repo.ui):
1300 if fp not in (sys.stdout, repo.ui):
1304 fp.close()
1301 fp.close()
1305
1302
1306 for seqno, rev in enumerate(revs):
1303 for seqno, rev in enumerate(revs):
1307 single(rev, seqno+1, fp)
1304 single(rev, seqno+1, fp)
1308
1305
1309 def diffstat(patchlines):
1306 def diffstat(patchlines):
1310 if not util.find_exe('diffstat'):
1307 if not util.find_exe('diffstat'):
1311 return
1308 return
1312 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1309 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1313 try:
1310 try:
1314 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1311 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1315 try:
1312 try:
1316 for line in patchlines:
1313 for line in patchlines:
1317 p.tochild.write(line + "\n")
1314 p.tochild.write(line + "\n")
1318 p.tochild.close()
1315 p.tochild.close()
1319 if p.wait(): return
1316 if p.wait(): return
1320 fp = os.fdopen(fd, 'r')
1317 fp = os.fdopen(fd, 'r')
1321 stat = []
1318 stat = []
1322 for line in fp: stat.append(line.lstrip())
1319 for line in fp: stat.append(line.lstrip())
1323 last = stat.pop()
1320 last = stat.pop()
1324 stat.insert(0, last)
1321 stat.insert(0, last)
1325 stat = ''.join(stat)
1322 stat = ''.join(stat)
1326 return stat
1323 return stat
1327 except: raise
1324 except: raise
1328 finally:
1325 finally:
1329 try: os.unlink(name)
1326 try: os.unlink(name)
1330 except: pass
1327 except: pass
General Comments 0
You need to be logged in to leave comments. Login now