##// END OF EJS Templates
exec: add execfunc to simplify exec flag support on non-exec filesystems
Matt Mackall -
r3996:c190df14 default
parent child Browse files
Show More
@@ -1,516 +1,517 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, repo, revlog, util, os, errno
10 import ancestor, bdiff, repo, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=None):
15 def __init__(self, repo, changeid=None):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 self._repo = repo
17 self._repo = repo
18
18
19 if not changeid and changeid != 0:
19 if not changeid and changeid != 0:
20 p1, p2 = self._repo.dirstate.parents()
20 p1, p2 = self._repo.dirstate.parents()
21 self._rev = self._repo.changelog.rev(p1)
21 self._rev = self._repo.changelog.rev(p1)
22 if self._rev == -1:
22 if self._rev == -1:
23 changeid = 'tip'
23 changeid = 'tip'
24 else:
24 else:
25 self._node = p1
25 self._node = p1
26 return
26 return
27
27
28 self._node = self._repo.lookup(changeid)
28 self._node = self._repo.lookup(changeid)
29 self._rev = self._repo.changelog.rev(self._node)
29 self._rev = self._repo.changelog.rev(self._node)
30
30
31 def __str__(self):
31 def __str__(self):
32 return short(self.node())
32 return short(self.node())
33
33
34 def __repr__(self):
34 def __repr__(self):
35 return "<changectx %s>" % str(self)
35 return "<changectx %s>" % str(self)
36
36
37 def __eq__(self, other):
37 def __eq__(self, other):
38 try:
38 try:
39 return self._rev == other._rev
39 return self._rev == other._rev
40 except AttributeError:
40 except AttributeError:
41 return False
41 return False
42
42
43 def __nonzero__(self):
43 def __nonzero__(self):
44 return self._rev != nullrev
44 return self._rev != nullrev
45
45
46 def __getattr__(self, name):
46 def __getattr__(self, name):
47 if name == '_changeset':
47 if name == '_changeset':
48 self._changeset = self._repo.changelog.read(self.node())
48 self._changeset = self._repo.changelog.read(self.node())
49 return self._changeset
49 return self._changeset
50 elif name == '_manifest':
50 elif name == '_manifest':
51 self._manifest = self._repo.manifest.read(self._changeset[0])
51 self._manifest = self._repo.manifest.read(self._changeset[0])
52 return self._manifest
52 return self._manifest
53 elif name == '_manifestdelta':
53 elif name == '_manifestdelta':
54 md = self._repo.manifest.readdelta(self._changeset[0])
54 md = self._repo.manifest.readdelta(self._changeset[0])
55 self._manifestdelta = md
55 self._manifestdelta = md
56 return self._manifestdelta
56 return self._manifestdelta
57 else:
57 else:
58 raise AttributeError, name
58 raise AttributeError, name
59
59
60 def changeset(self): return self._changeset
60 def changeset(self): return self._changeset
61 def manifest(self): return self._manifest
61 def manifest(self): return self._manifest
62
62
63 def rev(self): return self._rev
63 def rev(self): return self._rev
64 def node(self): return self._node
64 def node(self): return self._node
65 def user(self): return self._changeset[1]
65 def user(self): return self._changeset[1]
66 def date(self): return self._changeset[2]
66 def date(self): return self._changeset[2]
67 def files(self): return self._changeset[3]
67 def files(self): return self._changeset[3]
68 def description(self): return self._changeset[4]
68 def description(self): return self._changeset[4]
69 def branch(self): return self._changeset[5].get("branch", "")
69 def branch(self): return self._changeset[5].get("branch", "")
70
70
71 def parents(self):
71 def parents(self):
72 """return contexts for each parent changeset"""
72 """return contexts for each parent changeset"""
73 p = self._repo.changelog.parents(self._node)
73 p = self._repo.changelog.parents(self._node)
74 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
75
75
76 def children(self):
76 def children(self):
77 """return contexts for each child changeset"""
77 """return contexts for each child changeset"""
78 c = self._repo.changelog.children(self._node)
78 c = self._repo.changelog.children(self._node)
79 return [changectx(self._repo, x) for x in c]
79 return [changectx(self._repo, x) for x in c]
80
80
81 def filenode(self, path):
81 def filenode(self, path):
82 if '_manifest' in self.__dict__:
82 if '_manifest' in self.__dict__:
83 try:
83 try:
84 return self._manifest[path]
84 return self._manifest[path]
85 except KeyError:
85 except KeyError:
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
87 if '_manifestdelta' in self.__dict__ or path in self.files():
87 if '_manifestdelta' in self.__dict__ or path in self.files():
88 if path in self._manifestdelta:
88 if path in self._manifestdelta:
89 return self._manifestdelta[path]
89 return self._manifestdelta[path]
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
91 if not node:
91 if not node:
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
93
93
94 return node
94 return node
95
95
96 def filectx(self, path, fileid=None, filelog=None):
96 def filectx(self, path, fileid=None, filelog=None):
97 """get a file context from this changeset"""
97 """get a file context from this changeset"""
98 if fileid is None:
98 if fileid is None:
99 fileid = self.filenode(path)
99 fileid = self.filenode(path)
100 return filectx(self._repo, path, fileid=fileid,
100 return filectx(self._repo, path, fileid=fileid,
101 changectx=self, filelog=filelog)
101 changectx=self, filelog=filelog)
102
102
103 def filectxs(self):
103 def filectxs(self):
104 """generate a file context for each file in this changeset's
104 """generate a file context for each file in this changeset's
105 manifest"""
105 manifest"""
106 mf = self.manifest()
106 mf = self.manifest()
107 m = mf.keys()
107 m = mf.keys()
108 m.sort()
108 m.sort()
109 for f in m:
109 for f in m:
110 yield self.filectx(f, fileid=mf[f])
110 yield self.filectx(f, fileid=mf[f])
111
111
112 def ancestor(self, c2):
112 def ancestor(self, c2):
113 """
113 """
114 return the ancestor context of self and c2
114 return the ancestor context of self and c2
115 """
115 """
116 n = self._repo.changelog.ancestor(self._node, c2._node)
116 n = self._repo.changelog.ancestor(self._node, c2._node)
117 return changectx(self._repo, n)
117 return changectx(self._repo, n)
118
118
119 class filectx(object):
119 class filectx(object):
120 """A filecontext object makes access to data related to a particular
120 """A filecontext object makes access to data related to a particular
121 filerevision convenient."""
121 filerevision convenient."""
122 def __init__(self, repo, path, changeid=None, fileid=None,
122 def __init__(self, repo, path, changeid=None, fileid=None,
123 filelog=None, changectx=None):
123 filelog=None, changectx=None):
124 """changeid can be a changeset revision, node, or tag.
124 """changeid can be a changeset revision, node, or tag.
125 fileid can be a file revision or node."""
125 fileid can be a file revision or node."""
126 self._repo = repo
126 self._repo = repo
127 self._path = path
127 self._path = path
128
128
129 assert (changeid is not None
129 assert (changeid is not None
130 or fileid is not None
130 or fileid is not None
131 or changectx is not None)
131 or changectx is not None)
132
132
133 if filelog:
133 if filelog:
134 self._filelog = filelog
134 self._filelog = filelog
135
135
136 if fileid is None:
136 if fileid is None:
137 if changectx is None:
137 if changectx is None:
138 self._changeid = changeid
138 self._changeid = changeid
139 else:
139 else:
140 self._changectx = changectx
140 self._changectx = changectx
141 else:
141 else:
142 self._fileid = fileid
142 self._fileid = fileid
143
143
144 def __getattr__(self, name):
144 def __getattr__(self, name):
145 if name == '_changectx':
145 if name == '_changectx':
146 self._changectx = changectx(self._repo, self._changeid)
146 self._changectx = changectx(self._repo, self._changeid)
147 return self._changectx
147 return self._changectx
148 elif name == '_filelog':
148 elif name == '_filelog':
149 self._filelog = self._repo.file(self._path)
149 self._filelog = self._repo.file(self._path)
150 return self._filelog
150 return self._filelog
151 elif name == '_changeid':
151 elif name == '_changeid':
152 self._changeid = self._filelog.linkrev(self._filenode)
152 self._changeid = self._filelog.linkrev(self._filenode)
153 return self._changeid
153 return self._changeid
154 elif name == '_filenode':
154 elif name == '_filenode':
155 if '_fileid' in self.__dict__:
155 if '_fileid' in self.__dict__:
156 self._filenode = self._filelog.lookup(self._fileid)
156 self._filenode = self._filelog.lookup(self._fileid)
157 else:
157 else:
158 self._filenode = self._changectx.filenode(self._path)
158 self._filenode = self._changectx.filenode(self._path)
159 return self._filenode
159 return self._filenode
160 elif name == '_filerev':
160 elif name == '_filerev':
161 self._filerev = self._filelog.rev(self._filenode)
161 self._filerev = self._filelog.rev(self._filenode)
162 return self._filerev
162 return self._filerev
163 else:
163 else:
164 raise AttributeError, name
164 raise AttributeError, name
165
165
166 def __nonzero__(self):
166 def __nonzero__(self):
167 try:
167 try:
168 n = self._filenode
168 n = self._filenode
169 return True
169 return True
170 except revlog.LookupError:
170 except revlog.LookupError:
171 # file is missing
171 # file is missing
172 return False
172 return False
173
173
174 def __str__(self):
174 def __str__(self):
175 return "%s@%s" % (self.path(), short(self.node()))
175 return "%s@%s" % (self.path(), short(self.node()))
176
176
177 def __repr__(self):
177 def __repr__(self):
178 return "<filectx %s>" % str(self)
178 return "<filectx %s>" % str(self)
179
179
180 def __eq__(self, other):
180 def __eq__(self, other):
181 try:
181 try:
182 return (self._path == other._path
182 return (self._path == other._path
183 and self._changeid == other._changeid)
183 and self._changeid == other._changeid)
184 except AttributeError:
184 except AttributeError:
185 return False
185 return False
186
186
187 def filectx(self, fileid):
187 def filectx(self, fileid):
188 '''opens an arbitrary revision of the file without
188 '''opens an arbitrary revision of the file without
189 opening a new filelog'''
189 opening a new filelog'''
190 return filectx(self._repo, self._path, fileid=fileid,
190 return filectx(self._repo, self._path, fileid=fileid,
191 filelog=self._filelog)
191 filelog=self._filelog)
192
192
193 def filerev(self): return self._filerev
193 def filerev(self): return self._filerev
194 def filenode(self): return self._filenode
194 def filenode(self): return self._filenode
195 def filelog(self): return self._filelog
195 def filelog(self): return self._filelog
196
196
197 def rev(self):
197 def rev(self):
198 if '_changectx' in self.__dict__:
198 if '_changectx' in self.__dict__:
199 return self._changectx.rev()
199 return self._changectx.rev()
200 return self._filelog.linkrev(self._filenode)
200 return self._filelog.linkrev(self._filenode)
201
201
202 def node(self): return self._changectx.node()
202 def node(self): return self._changectx.node()
203 def user(self): return self._changectx.user()
203 def user(self): return self._changectx.user()
204 def date(self): return self._changectx.date()
204 def date(self): return self._changectx.date()
205 def files(self): return self._changectx.files()
205 def files(self): return self._changectx.files()
206 def description(self): return self._changectx.description()
206 def description(self): return self._changectx.description()
207 def branch(self): return self._changectx.branch()
207 def branch(self): return self._changectx.branch()
208 def manifest(self): return self._changectx.manifest()
208 def manifest(self): return self._changectx.manifest()
209 def changectx(self): return self._changectx
209 def changectx(self): return self._changectx
210
210
211 def data(self): return self._filelog.read(self._filenode)
211 def data(self): return self._filelog.read(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
213 def path(self): return self._path
213 def path(self): return self._path
214 def size(self): return self._filelog.size(self._filerev)
214 def size(self): return self._filelog.size(self._filerev)
215
215
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
217
217
218 def parents(self):
218 def parents(self):
219 p = self._path
219 p = self._path
220 fl = self._filelog
220 fl = self._filelog
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
222
222
223 r = self.renamed()
223 r = self.renamed()
224 if r:
224 if r:
225 pl[0] = (r[0], r[1], None)
225 pl[0] = (r[0], r[1], None)
226
226
227 return [filectx(self._repo, p, fileid=n, filelog=l)
227 return [filectx(self._repo, p, fileid=n, filelog=l)
228 for p,n,l in pl if n != nullid]
228 for p,n,l in pl if n != nullid]
229
229
230 def children(self):
230 def children(self):
231 # hard for renames
231 # hard for renames
232 c = self._filelog.children(self._filenode)
232 c = self._filelog.children(self._filenode)
233 return [filectx(self._repo, self._path, fileid=x,
233 return [filectx(self._repo, self._path, fileid=x,
234 filelog=self._filelog) for x in c]
234 filelog=self._filelog) for x in c]
235
235
236 def annotate(self, follow=False):
236 def annotate(self, follow=False):
237 '''returns a list of tuples of (ctx, line) for each line
237 '''returns a list of tuples of (ctx, line) for each line
238 in the file, where ctx is the filectx of the node where
238 in the file, where ctx is the filectx of the node where
239 that line was last changed'''
239 that line was last changed'''
240
240
241 def decorate(text, rev):
241 def decorate(text, rev):
242 return ([rev] * len(text.splitlines()), text)
242 return ([rev] * len(text.splitlines()), text)
243
243
244 def pair(parent, child):
244 def pair(parent, child):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
246 child[0][b1:b2] = parent[0][a1:a2]
246 child[0][b1:b2] = parent[0][a1:a2]
247 return child
247 return child
248
248
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
250 def getctx(path, fileid):
250 def getctx(path, fileid):
251 log = path == self._path and self._filelog or getlog(path)
251 log = path == self._path and self._filelog or getlog(path)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
253 getctx = util.cachefunc(getctx)
253 getctx = util.cachefunc(getctx)
254
254
255 def parents(f):
255 def parents(f):
256 # we want to reuse filectx objects as much as possible
256 # we want to reuse filectx objects as much as possible
257 p = f._path
257 p = f._path
258 if f._filerev is None: # working dir
258 if f._filerev is None: # working dir
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
260 else:
260 else:
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
262
262
263 if follow:
263 if follow:
264 r = f.renamed()
264 r = f.renamed()
265 if r:
265 if r:
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
267
267
268 return [getctx(p, n) for p, n in pl if n != nullrev]
268 return [getctx(p, n) for p, n in pl if n != nullrev]
269
269
270 # use linkrev to find the first changeset where self appeared
270 # use linkrev to find the first changeset where self appeared
271 if self.rev() != self._filelog.linkrev(self._filenode):
271 if self.rev() != self._filelog.linkrev(self._filenode):
272 base = self.filectx(self.filerev())
272 base = self.filectx(self.filerev())
273 else:
273 else:
274 base = self
274 base = self
275
275
276 # find all ancestors
276 # find all ancestors
277 needed = {base: 1}
277 needed = {base: 1}
278 visit = [base]
278 visit = [base]
279 files = [base._path]
279 files = [base._path]
280 while visit:
280 while visit:
281 f = visit.pop(0)
281 f = visit.pop(0)
282 for p in parents(f):
282 for p in parents(f):
283 if p not in needed:
283 if p not in needed:
284 needed[p] = 1
284 needed[p] = 1
285 visit.append(p)
285 visit.append(p)
286 if p._path not in files:
286 if p._path not in files:
287 files.append(p._path)
287 files.append(p._path)
288 else:
288 else:
289 # count how many times we'll use this
289 # count how many times we'll use this
290 needed[p] += 1
290 needed[p] += 1
291
291
292 # sort by revision (per file) which is a topological order
292 # sort by revision (per file) which is a topological order
293 visit = []
293 visit = []
294 files.reverse()
294 files.reverse()
295 for f in files:
295 for f in files:
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
297 fn.sort()
297 fn.sort()
298 visit.extend(fn)
298 visit.extend(fn)
299 hist = {}
299 hist = {}
300
300
301 for r, f in visit:
301 for r, f in visit:
302 curr = decorate(f.data(), f)
302 curr = decorate(f.data(), f)
303 for p in parents(f):
303 for p in parents(f):
304 if p != nullid:
304 if p != nullid:
305 curr = pair(hist[p], curr)
305 curr = pair(hist[p], curr)
306 # trim the history of unneeded revs
306 # trim the history of unneeded revs
307 needed[p] -= 1
307 needed[p] -= 1
308 if not needed[p]:
308 if not needed[p]:
309 del hist[p]
309 del hist[p]
310 hist[f] = curr
310 hist[f] = curr
311
311
312 return zip(hist[f][0], hist[f][1].splitlines(1))
312 return zip(hist[f][0], hist[f][1].splitlines(1))
313
313
314 def ancestor(self, fc2):
314 def ancestor(self, fc2):
315 """
315 """
316 find the common ancestor file context, if any, of self, and fc2
316 find the common ancestor file context, if any, of self, and fc2
317 """
317 """
318
318
319 acache = {}
319 acache = {}
320
320
321 # prime the ancestor cache for the working directory
321 # prime the ancestor cache for the working directory
322 for c in (self, fc2):
322 for c in (self, fc2):
323 if c._filerev == None:
323 if c._filerev == None:
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
325 acache[(c._path, None)] = pl
325 acache[(c._path, None)] = pl
326
326
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
328 def parents(vertex):
328 def parents(vertex):
329 if vertex in acache:
329 if vertex in acache:
330 return acache[vertex]
330 return acache[vertex]
331 f, n = vertex
331 f, n = vertex
332 if f not in flcache:
332 if f not in flcache:
333 flcache[f] = self._repo.file(f)
333 flcache[f] = self._repo.file(f)
334 fl = flcache[f]
334 fl = flcache[f]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
336 re = fl.renamed(n)
336 re = fl.renamed(n)
337 if re:
337 if re:
338 pl.append(re)
338 pl.append(re)
339 acache[vertex] = pl
339 acache[vertex] = pl
340 return pl
340 return pl
341
341
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
343 v = ancestor.ancestor(a, b, parents)
343 v = ancestor.ancestor(a, b, parents)
344 if v:
344 if v:
345 f, n = v
345 f, n = v
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
347
347
348 return None
348 return None
349
349
350 class workingctx(changectx):
350 class workingctx(changectx):
351 """A workingctx object makes access to data related to
351 """A workingctx object makes access to data related to
352 the current working directory convenient."""
352 the current working directory convenient."""
353 def __init__(self, repo):
353 def __init__(self, repo):
354 self._repo = repo
354 self._repo = repo
355 self._rev = None
355 self._rev = None
356 self._node = None
356 self._node = None
357
357
358 def __str__(self):
358 def __str__(self):
359 return str(self._parents[0]) + "+"
359 return str(self._parents[0]) + "+"
360
360
361 def __nonzero__(self):
361 def __nonzero__(self):
362 return True
362 return True
363
363
364 def __getattr__(self, name):
364 def __getattr__(self, name):
365 if name == '_parents':
365 if name == '_parents':
366 self._parents = self._repo.parents()
366 self._parents = self._repo.parents()
367 return self._parents
367 return self._parents
368 if name == '_status':
368 if name == '_status':
369 self._status = self._repo.status()
369 self._status = self._repo.status()
370 return self._status
370 return self._status
371 if name == '_manifest':
371 if name == '_manifest':
372 self._buildmanifest()
372 self._buildmanifest()
373 return self._manifest
373 return self._manifest
374 else:
374 else:
375 raise AttributeError, name
375 raise AttributeError, name
376
376
377 def _buildmanifest(self):
377 def _buildmanifest(self):
378 """generate a manifest corresponding to the working directory"""
378 """generate a manifest corresponding to the working directory"""
379
379
380 man = self._parents[0].manifest().copy()
380 man = self._parents[0].manifest().copy()
381 is_exec = util.execfunc(self._repo.root, man.execf)
381 copied = self._repo.dirstate.copies()
382 copied = self._repo.dirstate.copies()
382 modified, added, removed, deleted, unknown = self._status[:5]
383 modified, added, removed, deleted, unknown = self._status[:5]
383 for i, l in (("a", added), ("m", modified), ("u", unknown)):
384 for i, l in (("a", added), ("m", modified), ("u", unknown)):
384 for f in l:
385 for f in l:
385 man[f] = man.get(copied.get(f, f), nullid) + i
386 man[f] = man.get(copied.get(f, f), nullid) + i
386 try:
387 try:
387 man.set(f, util.is_exec(self._repo.wjoin(f), man.execf(f)))
388 man.set(f, is_exec(f))
388 except OSError:
389 except OSError:
389 pass
390 pass
390
391
391 for f in deleted + removed:
392 for f in deleted + removed:
392 if f in man:
393 if f in man:
393 del man[f]
394 del man[f]
394
395
395 self._manifest = man
396 self._manifest = man
396
397
397 def manifest(self): return self._manifest
398 def manifest(self): return self._manifest
398
399
399 def user(self): return self._repo.ui.username()
400 def user(self): return self._repo.ui.username()
400 def date(self): return util.makedate()
401 def date(self): return util.makedate()
401 def description(self): return ""
402 def description(self): return ""
402 def files(self):
403 def files(self):
403 f = self.modified() + self.added() + self.removed()
404 f = self.modified() + self.added() + self.removed()
404 f.sort()
405 f.sort()
405 return f
406 return f
406
407
407 def modified(self): return self._status[0]
408 def modified(self): return self._status[0]
408 def added(self): return self._status[1]
409 def added(self): return self._status[1]
409 def removed(self): return self._status[2]
410 def removed(self): return self._status[2]
410 def deleted(self): return self._status[3]
411 def deleted(self): return self._status[3]
411 def unknown(self): return self._status[4]
412 def unknown(self): return self._status[4]
412 def clean(self): return self._status[5]
413 def clean(self): return self._status[5]
413 def branch(self):
414 def branch(self):
414 try:
415 try:
415 return self._repo.opener("branch").read().strip()
416 return self._repo.opener("branch").read().strip()
416 except IOError:
417 except IOError:
417 return ""
418 return ""
418
419
419 def parents(self):
420 def parents(self):
420 """return contexts for each parent changeset"""
421 """return contexts for each parent changeset"""
421 return self._parents
422 return self._parents
422
423
423 def children(self):
424 def children(self):
424 return []
425 return []
425
426
426 def filectx(self, path, filelog=None):
427 def filectx(self, path, filelog=None):
427 """get a file context from the working directory"""
428 """get a file context from the working directory"""
428 return workingfilectx(self._repo, path, workingctx=self,
429 return workingfilectx(self._repo, path, workingctx=self,
429 filelog=filelog)
430 filelog=filelog)
430
431
431 def ancestor(self, c2):
432 def ancestor(self, c2):
432 """return the ancestor context of self and c2"""
433 """return the ancestor context of self and c2"""
433 return self._parents[0].ancestor(c2) # punt on two parents for now
434 return self._parents[0].ancestor(c2) # punt on two parents for now
434
435
435 class workingfilectx(filectx):
436 class workingfilectx(filectx):
436 """A workingfilectx object makes access to data related to a particular
437 """A workingfilectx object makes access to data related to a particular
437 file in the working directory convenient."""
438 file in the working directory convenient."""
438 def __init__(self, repo, path, filelog=None, workingctx=None):
439 def __init__(self, repo, path, filelog=None, workingctx=None):
439 """changeid can be a changeset revision, node, or tag.
440 """changeid can be a changeset revision, node, or tag.
440 fileid can be a file revision or node."""
441 fileid can be a file revision or node."""
441 self._repo = repo
442 self._repo = repo
442 self._path = path
443 self._path = path
443 self._changeid = None
444 self._changeid = None
444 self._filerev = self._filenode = None
445 self._filerev = self._filenode = None
445
446
446 if filelog:
447 if filelog:
447 self._filelog = filelog
448 self._filelog = filelog
448 if workingctx:
449 if workingctx:
449 self._changectx = workingctx
450 self._changectx = workingctx
450
451
451 def __getattr__(self, name):
452 def __getattr__(self, name):
452 if name == '_changectx':
453 if name == '_changectx':
453 self._changectx = workingctx(repo)
454 self._changectx = workingctx(repo)
454 return self._changectx
455 return self._changectx
455 elif name == '_repopath':
456 elif name == '_repopath':
456 self._repopath = (self._repo.dirstate.copied(self._path)
457 self._repopath = (self._repo.dirstate.copied(self._path)
457 or self._path)
458 or self._path)
458 return self._repopath
459 return self._repopath
459 elif name == '_filelog':
460 elif name == '_filelog':
460 self._filelog = self._repo.file(self._repopath)
461 self._filelog = self._repo.file(self._repopath)
461 return self._filelog
462 return self._filelog
462 else:
463 else:
463 raise AttributeError, name
464 raise AttributeError, name
464
465
465 def __nonzero__(self):
466 def __nonzero__(self):
466 return True
467 return True
467
468
468 def __str__(self):
469 def __str__(self):
469 return "%s@%s" % (self.path(), self._changectx)
470 return "%s@%s" % (self.path(), self._changectx)
470
471
471 def filectx(self, fileid):
472 def filectx(self, fileid):
472 '''opens an arbitrary revision of the file without
473 '''opens an arbitrary revision of the file without
473 opening a new filelog'''
474 opening a new filelog'''
474 return filectx(self._repo, self._repopath, fileid=fileid,
475 return filectx(self._repo, self._repopath, fileid=fileid,
475 filelog=self._filelog)
476 filelog=self._filelog)
476
477
477 def rev(self):
478 def rev(self):
478 if '_changectx' in self.__dict__:
479 if '_changectx' in self.__dict__:
479 return self._changectx.rev()
480 return self._changectx.rev()
480 return self._filelog.linkrev(self._filenode)
481 return self._filelog.linkrev(self._filenode)
481
482
482 def data(self): return self._repo.wread(self._path)
483 def data(self): return self._repo.wread(self._path)
483 def renamed(self):
484 def renamed(self):
484 rp = self._repopath
485 rp = self._repopath
485 if rp == self._path:
486 if rp == self._path:
486 return None
487 return None
487 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
488 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
488
489
489 def parents(self):
490 def parents(self):
490 '''return parent filectxs, following copies if necessary'''
491 '''return parent filectxs, following copies if necessary'''
491 p = self._path
492 p = self._path
492 rp = self._repopath
493 rp = self._repopath
493 pcl = self._changectx._parents
494 pcl = self._changectx._parents
494 fl = self._filelog
495 fl = self._filelog
495 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
496 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
496 if len(pcl) > 1:
497 if len(pcl) > 1:
497 if rp != p:
498 if rp != p:
498 fl = None
499 fl = None
499 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
500 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
500
501
501 return [filectx(self._repo, p, fileid=n, filelog=l)
502 return [filectx(self._repo, p, fileid=n, filelog=l)
502 for p,n,l in pl if n != nullid]
503 for p,n,l in pl if n != nullid]
503
504
504 def children(self):
505 def children(self):
505 return []
506 return []
506
507
507 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
508 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
508 def date(self):
509 def date(self):
509 t, tz = self._changectx.date()
510 t, tz = self._changectx.date()
510 try:
511 try:
511 return (os.lstat(repo.wjoin(self._path)).st_mtime, tz)
512 return (os.lstat(repo.wjoin(self._path)).st_mtime, tz)
512 except OSError, err:
513 except OSError, err:
513 if err.errno != errno.ENOENT: raise
514 if err.errno != errno.ENOENT: raise
514 return (t, tz)
515 return (t, tz)
515
516
516 def cmp(self, text): return self._repo.wread(self._path) == text
517 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,1865 +1,1867 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34 self.root = os.path.realpath(path)
34 self.root = os.path.realpath(path)
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 os.mkdir(os.path.join(self.path, "store"))
44 os.mkdir(os.path.join(self.path, "store"))
45 requirements = ("revlogv1", "store")
45 requirements = ("revlogv1", "store")
46 reqfile = self.opener("requires", "w")
46 reqfile = self.opener("requires", "w")
47 for r in requirements:
47 for r in requirements:
48 reqfile.write("%s\n" % r)
48 reqfile.write("%s\n" % r)
49 reqfile.close()
49 reqfile.close()
50 # create an invalid changelog
50 # create an invalid changelog
51 self.opener("00changelog.i", "a").write(
51 self.opener("00changelog.i", "a").write(
52 '\0\0\0\2' # represents revlogv2
52 '\0\0\0\2' # represents revlogv2
53 ' dummy changelog to prevent using the old repo layout'
53 ' dummy changelog to prevent using the old repo layout'
54 )
54 )
55 else:
55 else:
56 raise repo.RepoError(_("repository %s not found") % path)
56 raise repo.RepoError(_("repository %s not found") % path)
57 elif create:
57 elif create:
58 raise repo.RepoError(_("repository %s already exists") % path)
58 raise repo.RepoError(_("repository %s already exists") % path)
59 else:
59 else:
60 # find requirements
60 # find requirements
61 try:
61 try:
62 requirements = self.opener("requires").read().splitlines()
62 requirements = self.opener("requires").read().splitlines()
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66 requirements = []
66 requirements = []
67 # check them
67 # check them
68 for r in requirements:
68 for r in requirements:
69 if r not in self.supported:
69 if r not in self.supported:
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71
71
72 # setup store
72 # setup store
73 if "store" in requirements:
73 if "store" in requirements:
74 self.encodefn = util.encodefilename
74 self.encodefn = util.encodefilename
75 self.decodefn = util.decodefilename
75 self.decodefn = util.decodefilename
76 self.spath = os.path.join(self.path, "store")
76 self.spath = os.path.join(self.path, "store")
77 else:
77 else:
78 self.encodefn = lambda x: x
78 self.encodefn = lambda x: x
79 self.decodefn = lambda x: x
79 self.decodefn = lambda x: x
80 self.spath = self.path
80 self.spath = self.path
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82
82
83 self.ui = ui.ui(parentui=parentui)
83 self.ui = ui.ui(parentui=parentui)
84 try:
84 try:
85 self.ui.readconfig(self.join("hgrc"), self.root)
85 self.ui.readconfig(self.join("hgrc"), self.root)
86 except IOError:
86 except IOError:
87 pass
87 pass
88
88
89 v = self.ui.configrevlog()
89 v = self.ui.configrevlog()
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 fl = v.get('flags', None)
92 fl = v.get('flags', None)
93 flags = 0
93 flags = 0
94 if fl != None:
94 if fl != None:
95 for x in fl.split():
95 for x in fl.split():
96 flags |= revlog.flagstr(x)
96 flags |= revlog.flagstr(x)
97 elif self.revlogv1:
97 elif self.revlogv1:
98 flags = revlog.REVLOG_DEFAULT_FLAGS
98 flags = revlog.REVLOG_DEFAULT_FLAGS
99
99
100 v = self.revlogversion | flags
100 v = self.revlogversion | flags
101 self.manifest = manifest.manifest(self.sopener, v)
101 self.manifest = manifest.manifest(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
103
103
104 fallback = self.ui.config('ui', 'fallbackencoding')
104 fallback = self.ui.config('ui', 'fallbackencoding')
105 if fallback:
105 if fallback:
106 util._fallbackencoding = fallback
106 util._fallbackencoding = fallback
107
107
108 # the changelog might not have the inline index flag
108 # the changelog might not have the inline index flag
109 # on. If the format of the changelog is the same as found in
109 # on. If the format of the changelog is the same as found in
110 # .hgrc, apply any flags found in the .hgrc as well.
110 # .hgrc, apply any flags found in the .hgrc as well.
111 # Otherwise, just version from the changelog
111 # Otherwise, just version from the changelog
112 v = self.changelog.version
112 v = self.changelog.version
113 if v == self.revlogversion:
113 if v == self.revlogversion:
114 v |= flags
114 v |= flags
115 self.revlogversion = v
115 self.revlogversion = v
116
116
117 self.tagscache = None
117 self.tagscache = None
118 self.branchcache = None
118 self.branchcache = None
119 self.nodetagscache = None
119 self.nodetagscache = None
120 self.encodepats = None
120 self.encodepats = None
121 self.decodepats = None
121 self.decodepats = None
122 self.transhandle = None
122 self.transhandle = None
123
123
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
125
125
126 def url(self):
126 def url(self):
127 return 'file:' + self.root
127 return 'file:' + self.root
128
128
129 def hook(self, name, throw=False, **args):
129 def hook(self, name, throw=False, **args):
130 def callhook(hname, funcname):
130 def callhook(hname, funcname):
131 '''call python hook. hook is callable object, looked up as
131 '''call python hook. hook is callable object, looked up as
132 name in python module. if callable returns "true", hook
132 name in python module. if callable returns "true", hook
133 fails, else passes. if hook raises exception, treated as
133 fails, else passes. if hook raises exception, treated as
134 hook failure. exception propagates if throw is "true".
134 hook failure. exception propagates if throw is "true".
135
135
136 reason for "true" meaning "hook failed" is so that
136 reason for "true" meaning "hook failed" is so that
137 unmodified commands (e.g. mercurial.commands.update) can
137 unmodified commands (e.g. mercurial.commands.update) can
138 be run as hooks without wrappers to convert return values.'''
138 be run as hooks without wrappers to convert return values.'''
139
139
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
141 d = funcname.rfind('.')
141 d = funcname.rfind('.')
142 if d == -1:
142 if d == -1:
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
144 % (hname, funcname))
144 % (hname, funcname))
145 modname = funcname[:d]
145 modname = funcname[:d]
146 try:
146 try:
147 obj = __import__(modname)
147 obj = __import__(modname)
148 except ImportError:
148 except ImportError:
149 try:
149 try:
150 # extensions are loaded with hgext_ prefix
150 # extensions are loaded with hgext_ prefix
151 obj = __import__("hgext_%s" % modname)
151 obj = __import__("hgext_%s" % modname)
152 except ImportError:
152 except ImportError:
153 raise util.Abort(_('%s hook is invalid '
153 raise util.Abort(_('%s hook is invalid '
154 '(import of "%s" failed)') %
154 '(import of "%s" failed)') %
155 (hname, modname))
155 (hname, modname))
156 try:
156 try:
157 for p in funcname.split('.')[1:]:
157 for p in funcname.split('.')[1:]:
158 obj = getattr(obj, p)
158 obj = getattr(obj, p)
159 except AttributeError, err:
159 except AttributeError, err:
160 raise util.Abort(_('%s hook is invalid '
160 raise util.Abort(_('%s hook is invalid '
161 '("%s" is not defined)') %
161 '("%s" is not defined)') %
162 (hname, funcname))
162 (hname, funcname))
163 if not callable(obj):
163 if not callable(obj):
164 raise util.Abort(_('%s hook is invalid '
164 raise util.Abort(_('%s hook is invalid '
165 '("%s" is not callable)') %
165 '("%s" is not callable)') %
166 (hname, funcname))
166 (hname, funcname))
167 try:
167 try:
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
169 except (KeyboardInterrupt, util.SignalInterrupt):
169 except (KeyboardInterrupt, util.SignalInterrupt):
170 raise
170 raise
171 except Exception, exc:
171 except Exception, exc:
172 if isinstance(exc, util.Abort):
172 if isinstance(exc, util.Abort):
173 self.ui.warn(_('error: %s hook failed: %s\n') %
173 self.ui.warn(_('error: %s hook failed: %s\n') %
174 (hname, exc.args[0]))
174 (hname, exc.args[0]))
175 else:
175 else:
176 self.ui.warn(_('error: %s hook raised an exception: '
176 self.ui.warn(_('error: %s hook raised an exception: '
177 '%s\n') % (hname, exc))
177 '%s\n') % (hname, exc))
178 if throw:
178 if throw:
179 raise
179 raise
180 self.ui.print_exc()
180 self.ui.print_exc()
181 return True
181 return True
182 if r:
182 if r:
183 if throw:
183 if throw:
184 raise util.Abort(_('%s hook failed') % hname)
184 raise util.Abort(_('%s hook failed') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
186 return r
186 return r
187
187
188 def runhook(name, cmd):
188 def runhook(name, cmd):
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
191 r = util.system(cmd, environ=env, cwd=self.root)
191 r = util.system(cmd, environ=env, cwd=self.root)
192 if r:
192 if r:
193 desc, r = util.explain_exit(r)
193 desc, r = util.explain_exit(r)
194 if throw:
194 if throw:
195 raise util.Abort(_('%s hook %s') % (name, desc))
195 raise util.Abort(_('%s hook %s') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
197 return r
197 return r
198
198
199 r = False
199 r = False
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
201 if hname.split(".", 1)[0] == name and cmd]
201 if hname.split(".", 1)[0] == name and cmd]
202 hooks.sort()
202 hooks.sort()
203 for hname, cmd in hooks:
203 for hname, cmd in hooks:
204 if cmd.startswith('python:'):
204 if cmd.startswith('python:'):
205 r = callhook(hname, cmd[7:].strip()) or r
205 r = callhook(hname, cmd[7:].strip()) or r
206 else:
206 else:
207 r = runhook(hname, cmd) or r
207 r = runhook(hname, cmd) or r
208 return r
208 return r
209
209
210 tag_disallowed = ':\r\n'
210 tag_disallowed = ':\r\n'
211
211
212 def tag(self, name, node, message, local, user, date):
212 def tag(self, name, node, message, local, user, date):
213 '''tag a revision with a symbolic name.
213 '''tag a revision with a symbolic name.
214
214
215 if local is True, the tag is stored in a per-repository file.
215 if local is True, the tag is stored in a per-repository file.
216 otherwise, it is stored in the .hgtags file, and a new
216 otherwise, it is stored in the .hgtags file, and a new
217 changeset is committed with the change.
217 changeset is committed with the change.
218
218
219 keyword arguments:
219 keyword arguments:
220
220
221 local: whether to store tag in non-version-controlled file
221 local: whether to store tag in non-version-controlled file
222 (default False)
222 (default False)
223
223
224 message: commit message to use if committing
224 message: commit message to use if committing
225
225
226 user: name of user to use if committing
226 user: name of user to use if committing
227
227
228 date: date tuple to use if committing'''
228 date: date tuple to use if committing'''
229
229
230 for c in self.tag_disallowed:
230 for c in self.tag_disallowed:
231 if c in name:
231 if c in name:
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
233
233
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
235
235
236 if local:
236 if local:
237 # local tags are stored in the current charset
237 # local tags are stored in the current charset
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
239 self.hook('tag', node=hex(node), tag=name, local=local)
239 self.hook('tag', node=hex(node), tag=name, local=local)
240 return
240 return
241
241
242 for x in self.status()[:5]:
242 for x in self.status()[:5]:
243 if '.hgtags' in x:
243 if '.hgtags' in x:
244 raise util.Abort(_('working copy of .hgtags is changed '
244 raise util.Abort(_('working copy of .hgtags is changed '
245 '(please commit .hgtags manually)'))
245 '(please commit .hgtags manually)'))
246
246
247 # committed tags are stored in UTF-8
247 # committed tags are stored in UTF-8
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
249 self.wfile('.hgtags', 'ab').write(line)
249 self.wfile('.hgtags', 'ab').write(line)
250 if self.dirstate.state('.hgtags') == '?':
250 if self.dirstate.state('.hgtags') == '?':
251 self.add(['.hgtags'])
251 self.add(['.hgtags'])
252
252
253 self.commit(['.hgtags'], message, user, date)
253 self.commit(['.hgtags'], message, user, date)
254 self.hook('tag', node=hex(node), tag=name, local=local)
254 self.hook('tag', node=hex(node), tag=name, local=local)
255
255
256 def tags(self):
256 def tags(self):
257 '''return a mapping of tag to node'''
257 '''return a mapping of tag to node'''
258 if not self.tagscache:
258 if not self.tagscache:
259 self.tagscache = {}
259 self.tagscache = {}
260
260
261 def parsetag(line, context):
261 def parsetag(line, context):
262 if not line:
262 if not line:
263 return
263 return
264 s = l.split(" ", 1)
264 s = l.split(" ", 1)
265 if len(s) != 2:
265 if len(s) != 2:
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
267 return
267 return
268 node, key = s
268 node, key = s
269 key = util.tolocal(key.strip()) # stored in UTF-8
269 key = util.tolocal(key.strip()) # stored in UTF-8
270 try:
270 try:
271 bin_n = bin(node)
271 bin_n = bin(node)
272 except TypeError:
272 except TypeError:
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
274 (context, node))
274 (context, node))
275 return
275 return
276 if bin_n not in self.changelog.nodemap:
276 if bin_n not in self.changelog.nodemap:
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
278 (context, key))
278 (context, key))
279 return
279 return
280 self.tagscache[key] = bin_n
280 self.tagscache[key] = bin_n
281
281
282 # read the tags file from each head, ending with the tip,
282 # read the tags file from each head, ending with the tip,
283 # and add each tag found to the map, with "newer" ones
283 # and add each tag found to the map, with "newer" ones
284 # taking precedence
284 # taking precedence
285 f = None
285 f = None
286 for rev, node, fnode in self._hgtagsnodes():
286 for rev, node, fnode in self._hgtagsnodes():
287 f = (f and f.filectx(fnode) or
287 f = (f and f.filectx(fnode) or
288 self.filectx('.hgtags', fileid=fnode))
288 self.filectx('.hgtags', fileid=fnode))
289 count = 0
289 count = 0
290 for l in f.data().splitlines():
290 for l in f.data().splitlines():
291 count += 1
291 count += 1
292 parsetag(l, _("%s, line %d") % (str(f), count))
292 parsetag(l, _("%s, line %d") % (str(f), count))
293
293
294 try:
294 try:
295 f = self.opener("localtags")
295 f = self.opener("localtags")
296 count = 0
296 count = 0
297 for l in f:
297 for l in f:
298 # localtags are stored in the local character set
298 # localtags are stored in the local character set
299 # while the internal tag table is stored in UTF-8
299 # while the internal tag table is stored in UTF-8
300 l = util.fromlocal(l)
300 l = util.fromlocal(l)
301 count += 1
301 count += 1
302 parsetag(l, _("localtags, line %d") % count)
302 parsetag(l, _("localtags, line %d") % count)
303 except IOError:
303 except IOError:
304 pass
304 pass
305
305
306 self.tagscache['tip'] = self.changelog.tip()
306 self.tagscache['tip'] = self.changelog.tip()
307
307
308 return self.tagscache
308 return self.tagscache
309
309
310 def _hgtagsnodes(self):
310 def _hgtagsnodes(self):
311 heads = self.heads()
311 heads = self.heads()
312 heads.reverse()
312 heads.reverse()
313 last = {}
313 last = {}
314 ret = []
314 ret = []
315 for node in heads:
315 for node in heads:
316 c = self.changectx(node)
316 c = self.changectx(node)
317 rev = c.rev()
317 rev = c.rev()
318 try:
318 try:
319 fnode = c.filenode('.hgtags')
319 fnode = c.filenode('.hgtags')
320 except revlog.LookupError:
320 except revlog.LookupError:
321 continue
321 continue
322 ret.append((rev, node, fnode))
322 ret.append((rev, node, fnode))
323 if fnode in last:
323 if fnode in last:
324 ret[last[fnode]] = None
324 ret[last[fnode]] = None
325 last[fnode] = len(ret) - 1
325 last[fnode] = len(ret) - 1
326 return [item for item in ret if item]
326 return [item for item in ret if item]
327
327
328 def tagslist(self):
328 def tagslist(self):
329 '''return a list of tags ordered by revision'''
329 '''return a list of tags ordered by revision'''
330 l = []
330 l = []
331 for t, n in self.tags().items():
331 for t, n in self.tags().items():
332 try:
332 try:
333 r = self.changelog.rev(n)
333 r = self.changelog.rev(n)
334 except:
334 except:
335 r = -2 # sort to the beginning of the list if unknown
335 r = -2 # sort to the beginning of the list if unknown
336 l.append((r, t, n))
336 l.append((r, t, n))
337 l.sort()
337 l.sort()
338 return [(t, n) for r, t, n in l]
338 return [(t, n) for r, t, n in l]
339
339
340 def nodetags(self, node):
340 def nodetags(self, node):
341 '''return the tags associated with a node'''
341 '''return the tags associated with a node'''
342 if not self.nodetagscache:
342 if not self.nodetagscache:
343 self.nodetagscache = {}
343 self.nodetagscache = {}
344 for t, n in self.tags().items():
344 for t, n in self.tags().items():
345 self.nodetagscache.setdefault(n, []).append(t)
345 self.nodetagscache.setdefault(n, []).append(t)
346 return self.nodetagscache.get(node, [])
346 return self.nodetagscache.get(node, [])
347
347
348 def _branchtags(self):
348 def _branchtags(self):
349 partial, last, lrev = self._readbranchcache()
349 partial, last, lrev = self._readbranchcache()
350
350
351 tiprev = self.changelog.count() - 1
351 tiprev = self.changelog.count() - 1
352 if lrev != tiprev:
352 if lrev != tiprev:
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355
355
356 return partial
356 return partial
357
357
358 def branchtags(self):
358 def branchtags(self):
359 if self.branchcache is not None:
359 if self.branchcache is not None:
360 return self.branchcache
360 return self.branchcache
361
361
362 self.branchcache = {} # avoid recursion in changectx
362 self.branchcache = {} # avoid recursion in changectx
363 partial = self._branchtags()
363 partial = self._branchtags()
364
364
365 # the branch cache is stored on disk as UTF-8, but in the local
365 # the branch cache is stored on disk as UTF-8, but in the local
366 # charset internally
366 # charset internally
367 for k, v in partial.items():
367 for k, v in partial.items():
368 self.branchcache[util.tolocal(k)] = v
368 self.branchcache[util.tolocal(k)] = v
369 return self.branchcache
369 return self.branchcache
370
370
371 def _readbranchcache(self):
371 def _readbranchcache(self):
372 partial = {}
372 partial = {}
373 try:
373 try:
374 f = self.opener("branches.cache")
374 f = self.opener("branches.cache")
375 lines = f.read().split('\n')
375 lines = f.read().split('\n')
376 f.close()
376 f.close()
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
378 last, lrev = bin(last), int(lrev)
378 last, lrev = bin(last), int(lrev)
379 if not (lrev < self.changelog.count() and
379 if not (lrev < self.changelog.count() and
380 self.changelog.node(lrev) == last): # sanity check
380 self.changelog.node(lrev) == last): # sanity check
381 # invalidate the cache
381 # invalidate the cache
382 raise ValueError('Invalid branch cache: unknown tip')
382 raise ValueError('Invalid branch cache: unknown tip')
383 for l in lines:
383 for l in lines:
384 if not l: continue
384 if not l: continue
385 node, label = l.rstrip().split(" ", 1)
385 node, label = l.rstrip().split(" ", 1)
386 partial[label] = bin(node)
386 partial[label] = bin(node)
387 except (KeyboardInterrupt, util.SignalInterrupt):
387 except (KeyboardInterrupt, util.SignalInterrupt):
388 raise
388 raise
389 except Exception, inst:
389 except Exception, inst:
390 if self.ui.debugflag:
390 if self.ui.debugflag:
391 self.ui.warn(str(inst), '\n')
391 self.ui.warn(str(inst), '\n')
392 partial, last, lrev = {}, nullid, nullrev
392 partial, last, lrev = {}, nullid, nullrev
393 return partial, last, lrev
393 return partial, last, lrev
394
394
395 def _writebranchcache(self, branches, tip, tiprev):
395 def _writebranchcache(self, branches, tip, tiprev):
396 try:
396 try:
397 f = self.opener("branches.cache", "w")
397 f = self.opener("branches.cache", "w")
398 f.write("%s %s\n" % (hex(tip), tiprev))
398 f.write("%s %s\n" % (hex(tip), tiprev))
399 for label, node in branches.iteritems():
399 for label, node in branches.iteritems():
400 f.write("%s %s\n" % (hex(node), label))
400 f.write("%s %s\n" % (hex(node), label))
401 except IOError:
401 except IOError:
402 pass
402 pass
403
403
404 def _updatebranchcache(self, partial, start, end):
404 def _updatebranchcache(self, partial, start, end):
405 for r in xrange(start, end):
405 for r in xrange(start, end):
406 c = self.changectx(r)
406 c = self.changectx(r)
407 b = c.branch()
407 b = c.branch()
408 if b:
408 if b:
409 partial[b] = c.node()
409 partial[b] = c.node()
410
410
411 def lookup(self, key):
411 def lookup(self, key):
412 if key == '.':
412 if key == '.':
413 key = self.dirstate.parents()[0]
413 key = self.dirstate.parents()[0]
414 if key == nullid:
414 if key == nullid:
415 raise repo.RepoError(_("no revision checked out"))
415 raise repo.RepoError(_("no revision checked out"))
416 elif key == 'null':
416 elif key == 'null':
417 return nullid
417 return nullid
418 n = self.changelog._match(key)
418 n = self.changelog._match(key)
419 if n:
419 if n:
420 return n
420 return n
421 if key in self.tags():
421 if key in self.tags():
422 return self.tags()[key]
422 return self.tags()[key]
423 if key in self.branchtags():
423 if key in self.branchtags():
424 return self.branchtags()[key]
424 return self.branchtags()[key]
425 n = self.changelog._partialmatch(key)
425 n = self.changelog._partialmatch(key)
426 if n:
426 if n:
427 return n
427 return n
428 raise repo.RepoError(_("unknown revision '%s'") % key)
428 raise repo.RepoError(_("unknown revision '%s'") % key)
429
429
430 def dev(self):
430 def dev(self):
431 return os.lstat(self.path).st_dev
431 return os.lstat(self.path).st_dev
432
432
433 def local(self):
433 def local(self):
434 return True
434 return True
435
435
436 def join(self, f):
436 def join(self, f):
437 return os.path.join(self.path, f)
437 return os.path.join(self.path, f)
438
438
439 def sjoin(self, f):
439 def sjoin(self, f):
440 f = self.encodefn(f)
440 f = self.encodefn(f)
441 return os.path.join(self.spath, f)
441 return os.path.join(self.spath, f)
442
442
443 def wjoin(self, f):
443 def wjoin(self, f):
444 return os.path.join(self.root, f)
444 return os.path.join(self.root, f)
445
445
446 def file(self, f):
446 def file(self, f):
447 if f[0] == '/':
447 if f[0] == '/':
448 f = f[1:]
448 f = f[1:]
449 return filelog.filelog(self.sopener, f, self.revlogversion)
449 return filelog.filelog(self.sopener, f, self.revlogversion)
450
450
451 def changectx(self, changeid=None):
451 def changectx(self, changeid=None):
452 return context.changectx(self, changeid)
452 return context.changectx(self, changeid)
453
453
454 def workingctx(self):
454 def workingctx(self):
455 return context.workingctx(self)
455 return context.workingctx(self)
456
456
457 def parents(self, changeid=None):
457 def parents(self, changeid=None):
458 '''
458 '''
459 get list of changectxs for parents of changeid or working directory
459 get list of changectxs for parents of changeid or working directory
460 '''
460 '''
461 if changeid is None:
461 if changeid is None:
462 pl = self.dirstate.parents()
462 pl = self.dirstate.parents()
463 else:
463 else:
464 n = self.changelog.lookup(changeid)
464 n = self.changelog.lookup(changeid)
465 pl = self.changelog.parents(n)
465 pl = self.changelog.parents(n)
466 if pl[1] == nullid:
466 if pl[1] == nullid:
467 return [self.changectx(pl[0])]
467 return [self.changectx(pl[0])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
469
469
470 def filectx(self, path, changeid=None, fileid=None):
470 def filectx(self, path, changeid=None, fileid=None):
471 """changeid can be a changeset revision, node, or tag.
471 """changeid can be a changeset revision, node, or tag.
472 fileid can be a file revision or node."""
472 fileid can be a file revision or node."""
473 return context.filectx(self, path, changeid, fileid)
473 return context.filectx(self, path, changeid, fileid)
474
474
475 def getcwd(self):
475 def getcwd(self):
476 return self.dirstate.getcwd()
476 return self.dirstate.getcwd()
477
477
478 def wfile(self, f, mode='r'):
478 def wfile(self, f, mode='r'):
479 return self.wopener(f, mode)
479 return self.wopener(f, mode)
480
480
481 def wread(self, filename):
481 def wread(self, filename):
482 if self.encodepats == None:
482 if self.encodepats == None:
483 l = []
483 l = []
484 for pat, cmd in self.ui.configitems("encode"):
484 for pat, cmd in self.ui.configitems("encode"):
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
486 l.append((mf, cmd))
486 l.append((mf, cmd))
487 self.encodepats = l
487 self.encodepats = l
488
488
489 data = self.wopener(filename, 'r').read()
489 data = self.wopener(filename, 'r').read()
490
490
491 for mf, cmd in self.encodepats:
491 for mf, cmd in self.encodepats:
492 if mf(filename):
492 if mf(filename):
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 data = util.filter(data, cmd)
494 data = util.filter(data, cmd)
495 break
495 break
496
496
497 return data
497 return data
498
498
499 def wwrite(self, filename, data, fd=None):
499 def wwrite(self, filename, data, fd=None):
500 if self.decodepats == None:
500 if self.decodepats == None:
501 l = []
501 l = []
502 for pat, cmd in self.ui.configitems("decode"):
502 for pat, cmd in self.ui.configitems("decode"):
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 l.append((mf, cmd))
504 l.append((mf, cmd))
505 self.decodepats = l
505 self.decodepats = l
506
506
507 for mf, cmd in self.decodepats:
507 for mf, cmd in self.decodepats:
508 if mf(filename):
508 if mf(filename):
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
510 data = util.filter(data, cmd)
510 data = util.filter(data, cmd)
511 break
511 break
512
512
513 if fd:
513 if fd:
514 return fd.write(data)
514 return fd.write(data)
515 return self.wopener(filename, 'w').write(data)
515 return self.wopener(filename, 'w').write(data)
516
516
517 def transaction(self):
517 def transaction(self):
518 tr = self.transhandle
518 tr = self.transhandle
519 if tr != None and tr.running():
519 if tr != None and tr.running():
520 return tr.nest()
520 return tr.nest()
521
521
522 # save dirstate for rollback
522 # save dirstate for rollback
523 try:
523 try:
524 ds = self.opener("dirstate").read()
524 ds = self.opener("dirstate").read()
525 except IOError:
525 except IOError:
526 ds = ""
526 ds = ""
527 self.opener("journal.dirstate", "w").write(ds)
527 self.opener("journal.dirstate", "w").write(ds)
528
528
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
531 tr = transaction.transaction(self.ui.warn, self.sopener,
531 tr = transaction.transaction(self.ui.warn, self.sopener,
532 self.sjoin("journal"),
532 self.sjoin("journal"),
533 aftertrans(renames))
533 aftertrans(renames))
534 self.transhandle = tr
534 self.transhandle = tr
535 return tr
535 return tr
536
536
537 def recover(self):
537 def recover(self):
538 l = self.lock()
538 l = self.lock()
539 if os.path.exists(self.sjoin("journal")):
539 if os.path.exists(self.sjoin("journal")):
540 self.ui.status(_("rolling back interrupted transaction\n"))
540 self.ui.status(_("rolling back interrupted transaction\n"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
542 self.reload()
542 self.reload()
543 return True
543 return True
544 else:
544 else:
545 self.ui.warn(_("no interrupted transaction available\n"))
545 self.ui.warn(_("no interrupted transaction available\n"))
546 return False
546 return False
547
547
548 def rollback(self, wlock=None):
548 def rollback(self, wlock=None):
549 if not wlock:
549 if not wlock:
550 wlock = self.wlock()
550 wlock = self.wlock()
551 l = self.lock()
551 l = self.lock()
552 if os.path.exists(self.sjoin("undo")):
552 if os.path.exists(self.sjoin("undo")):
553 self.ui.status(_("rolling back last transaction\n"))
553 self.ui.status(_("rolling back last transaction\n"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
556 self.reload()
556 self.reload()
557 self.wreload()
557 self.wreload()
558 else:
558 else:
559 self.ui.warn(_("no rollback information available\n"))
559 self.ui.warn(_("no rollback information available\n"))
560
560
561 def wreload(self):
561 def wreload(self):
562 self.dirstate.read()
562 self.dirstate.read()
563
563
564 def reload(self):
564 def reload(self):
565 self.changelog.load()
565 self.changelog.load()
566 self.manifest.load()
566 self.manifest.load()
567 self.tagscache = None
567 self.tagscache = None
568 self.nodetagscache = None
568 self.nodetagscache = None
569
569
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
571 desc=None):
571 desc=None):
572 try:
572 try:
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
574 except lock.LockHeld, inst:
574 except lock.LockHeld, inst:
575 if not wait:
575 if not wait:
576 raise
576 raise
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
578 (desc, inst.locker))
578 (desc, inst.locker))
579 # default to 600 seconds timeout
579 # default to 600 seconds timeout
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
581 releasefn, desc=desc)
581 releasefn, desc=desc)
582 if acquirefn:
582 if acquirefn:
583 acquirefn()
583 acquirefn()
584 return l
584 return l
585
585
586 def lock(self, wait=1):
586 def lock(self, wait=1):
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
588 desc=_('repository %s') % self.origroot)
588 desc=_('repository %s') % self.origroot)
589
589
590 def wlock(self, wait=1):
590 def wlock(self, wait=1):
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
592 self.wreload,
592 self.wreload,
593 desc=_('working directory of %s') % self.origroot)
593 desc=_('working directory of %s') % self.origroot)
594
594
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
596 """
596 """
597 commit an individual file as part of a larger transaction
597 commit an individual file as part of a larger transaction
598 """
598 """
599
599
600 t = self.wread(fn)
600 t = self.wread(fn)
601 fl = self.file(fn)
601 fl = self.file(fn)
602 fp1 = manifest1.get(fn, nullid)
602 fp1 = manifest1.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
604
604
605 meta = {}
605 meta = {}
606 cp = self.dirstate.copied(fn)
606 cp = self.dirstate.copied(fn)
607 if cp:
607 if cp:
608 meta["copy"] = cp
608 meta["copy"] = cp
609 if not manifest2: # not a branch merge
609 if not manifest2: # not a branch merge
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 fp2 = nullid
611 fp2 = nullid
612 elif fp2 != nullid: # copied on remote side
612 elif fp2 != nullid: # copied on remote side
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
614 elif fp1 != nullid: # copied on local side, reversed
614 elif fp1 != nullid: # copied on local side, reversed
615 meta["copyrev"] = hex(manifest2.get(cp))
615 meta["copyrev"] = hex(manifest2.get(cp))
616 fp2 = nullid
616 fp2 = nullid
617 else: # directory rename
617 else: # directory rename
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
619 self.ui.debug(_(" %s: copy %s:%s\n") %
619 self.ui.debug(_(" %s: copy %s:%s\n") %
620 (fn, cp, meta["copyrev"]))
620 (fn, cp, meta["copyrev"]))
621 fp1 = nullid
621 fp1 = nullid
622 elif fp2 != nullid:
622 elif fp2 != nullid:
623 # is one parent an ancestor of the other?
623 # is one parent an ancestor of the other?
624 fpa = fl.ancestor(fp1, fp2)
624 fpa = fl.ancestor(fp1, fp2)
625 if fpa == fp1:
625 if fpa == fp1:
626 fp1, fp2 = fp2, nullid
626 fp1, fp2 = fp2, nullid
627 elif fpa == fp2:
627 elif fpa == fp2:
628 fp2 = nullid
628 fp2 = nullid
629
629
630 # is the file unmodified from the parent? report existing entry
630 # is the file unmodified from the parent? report existing entry
631 if fp2 == nullid and not fl.cmp(fp1, t):
631 if fp2 == nullid and not fl.cmp(fp1, t):
632 return fp1
632 return fp1
633
633
634 changelist.append(fn)
634 changelist.append(fn)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
636
636
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
638 if p1 is None:
638 if p1 is None:
639 p1, p2 = self.dirstate.parents()
639 p1, p2 = self.dirstate.parents()
640 return self.commit(files=files, text=text, user=user, date=date,
640 return self.commit(files=files, text=text, user=user, date=date,
641 p1=p1, p2=p2, wlock=wlock, extra=extra)
641 p1=p1, p2=p2, wlock=wlock, extra=extra)
642
642
643 def commit(self, files=None, text="", user=None, date=None,
643 def commit(self, files=None, text="", user=None, date=None,
644 match=util.always, force=False, lock=None, wlock=None,
644 match=util.always, force=False, lock=None, wlock=None,
645 force_editor=False, p1=None, p2=None, extra={}):
645 force_editor=False, p1=None, p2=None, extra={}):
646
646
647 commit = []
647 commit = []
648 remove = []
648 remove = []
649 changed = []
649 changed = []
650 use_dirstate = (p1 is None) # not rawcommit
650 use_dirstate = (p1 is None) # not rawcommit
651 extra = extra.copy()
651 extra = extra.copy()
652
652
653 if use_dirstate:
653 if use_dirstate:
654 if files:
654 if files:
655 for f in files:
655 for f in files:
656 s = self.dirstate.state(f)
656 s = self.dirstate.state(f)
657 if s in 'nmai':
657 if s in 'nmai':
658 commit.append(f)
658 commit.append(f)
659 elif s == 'r':
659 elif s == 'r':
660 remove.append(f)
660 remove.append(f)
661 else:
661 else:
662 self.ui.warn(_("%s not tracked!\n") % f)
662 self.ui.warn(_("%s not tracked!\n") % f)
663 else:
663 else:
664 changes = self.status(match=match)[:5]
664 changes = self.status(match=match)[:5]
665 modified, added, removed, deleted, unknown = changes
665 modified, added, removed, deleted, unknown = changes
666 commit = modified + added
666 commit = modified + added
667 remove = removed
667 remove = removed
668 else:
668 else:
669 commit = files
669 commit = files
670
670
671 if use_dirstate:
671 if use_dirstate:
672 p1, p2 = self.dirstate.parents()
672 p1, p2 = self.dirstate.parents()
673 update_dirstate = True
673 update_dirstate = True
674 else:
674 else:
675 p1, p2 = p1, p2 or nullid
675 p1, p2 = p1, p2 or nullid
676 update_dirstate = (self.dirstate.parents()[0] == p1)
676 update_dirstate = (self.dirstate.parents()[0] == p1)
677
677
678 c1 = self.changelog.read(p1)
678 c1 = self.changelog.read(p1)
679 c2 = self.changelog.read(p2)
679 c2 = self.changelog.read(p2)
680 m1 = self.manifest.read(c1[0]).copy()
680 m1 = self.manifest.read(c1[0]).copy()
681 m2 = self.manifest.read(c2[0])
681 m2 = self.manifest.read(c2[0])
682
682
683 if use_dirstate:
683 if use_dirstate:
684 branchname = self.workingctx().branch()
684 branchname = self.workingctx().branch()
685 try:
685 try:
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
687 except UnicodeDecodeError:
687 except UnicodeDecodeError:
688 raise util.Abort(_('branch name not in UTF-8!'))
688 raise util.Abort(_('branch name not in UTF-8!'))
689 else:
689 else:
690 branchname = ""
690 branchname = ""
691
691
692 if use_dirstate:
692 if use_dirstate:
693 oldname = c1[5].get("branch", "") # stored in UTF-8
693 oldname = c1[5].get("branch", "") # stored in UTF-8
694 if not commit and not remove and not force and p2 == nullid and \
694 if not commit and not remove and not force and p2 == nullid and \
695 branchname == oldname:
695 branchname == oldname:
696 self.ui.status(_("nothing changed\n"))
696 self.ui.status(_("nothing changed\n"))
697 return None
697 return None
698
698
699 xp1 = hex(p1)
699 xp1 = hex(p1)
700 if p2 == nullid: xp2 = ''
700 if p2 == nullid: xp2 = ''
701 else: xp2 = hex(p2)
701 else: xp2 = hex(p2)
702
702
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
704
704
705 if not wlock:
705 if not wlock:
706 wlock = self.wlock()
706 wlock = self.wlock()
707 if not lock:
707 if not lock:
708 lock = self.lock()
708 lock = self.lock()
709 tr = self.transaction()
709 tr = self.transaction()
710
710
711 # check in files
711 # check in files
712 new = {}
712 new = {}
713 linkrev = self.changelog.count()
713 linkrev = self.changelog.count()
714 commit.sort()
714 commit.sort()
715 is_exec = util.execfunc(self.root, m1.execf)
715 for f in commit:
716 for f in commit:
716 self.ui.note(f + "\n")
717 self.ui.note(f + "\n")
717 try:
718 try:
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
720 m1.set(f, is_exec(f))
720 except IOError:
721 except IOError:
721 if use_dirstate:
722 if use_dirstate:
722 self.ui.warn(_("trouble committing %s!\n") % f)
723 self.ui.warn(_("trouble committing %s!\n") % f)
723 raise
724 raise
724 else:
725 else:
725 remove.append(f)
726 remove.append(f)
726
727
727 # update manifest
728 # update manifest
728 m1.update(new)
729 m1.update(new)
729 remove.sort()
730 remove.sort()
730 removed = []
731 removed = []
731
732
732 for f in remove:
733 for f in remove:
733 if f in m1:
734 if f in m1:
734 del m1[f]
735 del m1[f]
735 removed.append(f)
736 removed.append(f)
736 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
737 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
737
738
738 # add changeset
739 # add changeset
739 new = new.keys()
740 new = new.keys()
740 new.sort()
741 new.sort()
741
742
742 user = user or self.ui.username()
743 user = user or self.ui.username()
743 if not text or force_editor:
744 if not text or force_editor:
744 edittext = []
745 edittext = []
745 if text:
746 if text:
746 edittext.append(text)
747 edittext.append(text)
747 edittext.append("")
748 edittext.append("")
748 edittext.append("HG: user: %s" % user)
749 edittext.append("HG: user: %s" % user)
749 if p2 != nullid:
750 if p2 != nullid:
750 edittext.append("HG: branch merge")
751 edittext.append("HG: branch merge")
751 edittext.extend(["HG: changed %s" % f for f in changed])
752 edittext.extend(["HG: changed %s" % f for f in changed])
752 edittext.extend(["HG: removed %s" % f for f in removed])
753 edittext.extend(["HG: removed %s" % f for f in removed])
753 if not changed and not remove:
754 if not changed and not remove:
754 edittext.append("HG: no files changed")
755 edittext.append("HG: no files changed")
755 edittext.append("")
756 edittext.append("")
756 # run editor in the repository root
757 # run editor in the repository root
757 olddir = os.getcwd()
758 olddir = os.getcwd()
758 os.chdir(self.root)
759 os.chdir(self.root)
759 text = self.ui.edit("\n".join(edittext), user)
760 text = self.ui.edit("\n".join(edittext), user)
760 os.chdir(olddir)
761 os.chdir(olddir)
761
762
762 lines = [line.rstrip() for line in text.rstrip().splitlines()]
763 lines = [line.rstrip() for line in text.rstrip().splitlines()]
763 while lines and not lines[0]:
764 while lines and not lines[0]:
764 del lines[0]
765 del lines[0]
765 if not lines:
766 if not lines:
766 return None
767 return None
767 text = '\n'.join(lines)
768 text = '\n'.join(lines)
768 if branchname:
769 if branchname:
769 extra["branch"] = branchname
770 extra["branch"] = branchname
770 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
771 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
771 user, date, extra)
772 user, date, extra)
772 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
773 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
773 parent2=xp2)
774 parent2=xp2)
774 tr.close()
775 tr.close()
775
776
776 if use_dirstate or update_dirstate:
777 if use_dirstate or update_dirstate:
777 self.dirstate.setparents(n)
778 self.dirstate.setparents(n)
778 if use_dirstate:
779 if use_dirstate:
779 self.dirstate.update(new, "n")
780 self.dirstate.update(new, "n")
780 self.dirstate.forget(removed)
781 self.dirstate.forget(removed)
781
782
782 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
783 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
783 return n
784 return n
784
785
785 def walk(self, node=None, files=[], match=util.always, badmatch=None):
786 def walk(self, node=None, files=[], match=util.always, badmatch=None):
786 '''
787 '''
787 walk recursively through the directory tree or a given
788 walk recursively through the directory tree or a given
788 changeset, finding all files matched by the match
789 changeset, finding all files matched by the match
789 function
790 function
790
791
791 results are yielded in a tuple (src, filename), where src
792 results are yielded in a tuple (src, filename), where src
792 is one of:
793 is one of:
793 'f' the file was found in the directory tree
794 'f' the file was found in the directory tree
794 'm' the file was only in the dirstate and not in the tree
795 'm' the file was only in the dirstate and not in the tree
795 'b' file was not found and matched badmatch
796 'b' file was not found and matched badmatch
796 '''
797 '''
797
798
798 if node:
799 if node:
799 fdict = dict.fromkeys(files)
800 fdict = dict.fromkeys(files)
800 for fn in self.manifest.read(self.changelog.read(node)[0]):
801 for fn in self.manifest.read(self.changelog.read(node)[0]):
801 for ffn in fdict:
802 for ffn in fdict:
802 # match if the file is the exact name or a directory
803 # match if the file is the exact name or a directory
803 if ffn == fn or fn.startswith("%s/" % ffn):
804 if ffn == fn or fn.startswith("%s/" % ffn):
804 del fdict[ffn]
805 del fdict[ffn]
805 break
806 break
806 if match(fn):
807 if match(fn):
807 yield 'm', fn
808 yield 'm', fn
808 for fn in fdict:
809 for fn in fdict:
809 if badmatch and badmatch(fn):
810 if badmatch and badmatch(fn):
810 if match(fn):
811 if match(fn):
811 yield 'b', fn
812 yield 'b', fn
812 else:
813 else:
813 self.ui.warn(_('%s: No such file in rev %s\n') % (
814 self.ui.warn(_('%s: No such file in rev %s\n') % (
814 util.pathto(self.getcwd(), fn), short(node)))
815 util.pathto(self.getcwd(), fn), short(node)))
815 else:
816 else:
816 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
817 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
817 yield src, fn
818 yield src, fn
818
819
819 def status(self, node1=None, node2=None, files=[], match=util.always,
820 def status(self, node1=None, node2=None, files=[], match=util.always,
820 wlock=None, list_ignored=False, list_clean=False):
821 wlock=None, list_ignored=False, list_clean=False):
821 """return status of files between two nodes or node and working directory
822 """return status of files between two nodes or node and working directory
822
823
823 If node1 is None, use the first dirstate parent instead.
824 If node1 is None, use the first dirstate parent instead.
824 If node2 is None, compare node1 with working directory.
825 If node2 is None, compare node1 with working directory.
825 """
826 """
826
827
827 def fcmp(fn, mf):
828 def fcmp(fn, mf):
828 t1 = self.wread(fn)
829 t1 = self.wread(fn)
829 return self.file(fn).cmp(mf.get(fn, nullid), t1)
830 return self.file(fn).cmp(mf.get(fn, nullid), t1)
830
831
831 def mfmatches(node):
832 def mfmatches(node):
832 change = self.changelog.read(node)
833 change = self.changelog.read(node)
833 mf = self.manifest.read(change[0]).copy()
834 mf = self.manifest.read(change[0]).copy()
834 for fn in mf.keys():
835 for fn in mf.keys():
835 if not match(fn):
836 if not match(fn):
836 del mf[fn]
837 del mf[fn]
837 return mf
838 return mf
838
839
839 modified, added, removed, deleted, unknown = [], [], [], [], []
840 modified, added, removed, deleted, unknown = [], [], [], [], []
840 ignored, clean = [], []
841 ignored, clean = [], []
841
842
842 compareworking = False
843 compareworking = False
843 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
844 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
844 compareworking = True
845 compareworking = True
845
846
846 if not compareworking:
847 if not compareworking:
847 # read the manifest from node1 before the manifest from node2,
848 # read the manifest from node1 before the manifest from node2,
848 # so that we'll hit the manifest cache if we're going through
849 # so that we'll hit the manifest cache if we're going through
849 # all the revisions in parent->child order.
850 # all the revisions in parent->child order.
850 mf1 = mfmatches(node1)
851 mf1 = mfmatches(node1)
851
852
852 # are we comparing the working directory?
853 # are we comparing the working directory?
853 if not node2:
854 if not node2:
854 if not wlock:
855 if not wlock:
855 try:
856 try:
856 wlock = self.wlock(wait=0)
857 wlock = self.wlock(wait=0)
857 except lock.LockException:
858 except lock.LockException:
858 wlock = None
859 wlock = None
859 (lookup, modified, added, removed, deleted, unknown,
860 (lookup, modified, added, removed, deleted, unknown,
860 ignored, clean) = self.dirstate.status(files, match,
861 ignored, clean) = self.dirstate.status(files, match,
861 list_ignored, list_clean)
862 list_ignored, list_clean)
862
863
863 # are we comparing working dir against its parent?
864 # are we comparing working dir against its parent?
864 if compareworking:
865 if compareworking:
865 if lookup:
866 if lookup:
866 # do a full compare of any files that might have changed
867 # do a full compare of any files that might have changed
867 mf2 = mfmatches(self.dirstate.parents()[0])
868 mf2 = mfmatches(self.dirstate.parents()[0])
868 for f in lookup:
869 for f in lookup:
869 if fcmp(f, mf2):
870 if fcmp(f, mf2):
870 modified.append(f)
871 modified.append(f)
871 else:
872 else:
872 clean.append(f)
873 clean.append(f)
873 if wlock is not None:
874 if wlock is not None:
874 self.dirstate.update([f], "n")
875 self.dirstate.update([f], "n")
875 else:
876 else:
876 # we are comparing working dir against non-parent
877 # we are comparing working dir against non-parent
877 # generate a pseudo-manifest for the working dir
878 # generate a pseudo-manifest for the working dir
878 # XXX: create it in dirstate.py ?
879 # XXX: create it in dirstate.py ?
879 mf2 = mfmatches(self.dirstate.parents()[0])
880 mf2 = mfmatches(self.dirstate.parents()[0])
881 is_exec = util.execfunc(self.root, mf2.execf)
880 for f in lookup + modified + added:
882 for f in lookup + modified + added:
881 mf2[f] = ""
883 mf2[f] = ""
882 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
884 mf2.set(f, is_exec(f))
883 for f in removed:
885 for f in removed:
884 if f in mf2:
886 if f in mf2:
885 del mf2[f]
887 del mf2[f]
886 else:
888 else:
887 # we are comparing two revisions
889 # we are comparing two revisions
888 mf2 = mfmatches(node2)
890 mf2 = mfmatches(node2)
889
891
890 if not compareworking:
892 if not compareworking:
891 # flush lists from dirstate before comparing manifests
893 # flush lists from dirstate before comparing manifests
892 modified, added, clean = [], [], []
894 modified, added, clean = [], [], []
893
895
894 # make sure to sort the files so we talk to the disk in a
896 # make sure to sort the files so we talk to the disk in a
895 # reasonable order
897 # reasonable order
896 mf2keys = mf2.keys()
898 mf2keys = mf2.keys()
897 mf2keys.sort()
899 mf2keys.sort()
898 for fn in mf2keys:
900 for fn in mf2keys:
899 if mf1.has_key(fn):
901 if mf1.has_key(fn):
900 if mf1.flags(fn) != mf2.flags(fn) or \
902 if mf1.flags(fn) != mf2.flags(fn) or \
901 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
903 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
902 modified.append(fn)
904 modified.append(fn)
903 elif list_clean:
905 elif list_clean:
904 clean.append(fn)
906 clean.append(fn)
905 del mf1[fn]
907 del mf1[fn]
906 else:
908 else:
907 added.append(fn)
909 added.append(fn)
908
910
909 removed = mf1.keys()
911 removed = mf1.keys()
910
912
911 # sort and return results:
913 # sort and return results:
912 for l in modified, added, removed, deleted, unknown, ignored, clean:
914 for l in modified, added, removed, deleted, unknown, ignored, clean:
913 l.sort()
915 l.sort()
914 return (modified, added, removed, deleted, unknown, ignored, clean)
916 return (modified, added, removed, deleted, unknown, ignored, clean)
915
917
916 def add(self, list, wlock=None):
918 def add(self, list, wlock=None):
917 if not wlock:
919 if not wlock:
918 wlock = self.wlock()
920 wlock = self.wlock()
919 for f in list:
921 for f in list:
920 p = self.wjoin(f)
922 p = self.wjoin(f)
921 if not os.path.exists(p):
923 if not os.path.exists(p):
922 self.ui.warn(_("%s does not exist!\n") % f)
924 self.ui.warn(_("%s does not exist!\n") % f)
923 elif not os.path.isfile(p):
925 elif not os.path.isfile(p):
924 self.ui.warn(_("%s not added: only files supported currently\n")
926 self.ui.warn(_("%s not added: only files supported currently\n")
925 % f)
927 % f)
926 elif self.dirstate.state(f) in 'an':
928 elif self.dirstate.state(f) in 'an':
927 self.ui.warn(_("%s already tracked!\n") % f)
929 self.ui.warn(_("%s already tracked!\n") % f)
928 else:
930 else:
929 self.dirstate.update([f], "a")
931 self.dirstate.update([f], "a")
930
932
931 def forget(self, list, wlock=None):
933 def forget(self, list, wlock=None):
932 if not wlock:
934 if not wlock:
933 wlock = self.wlock()
935 wlock = self.wlock()
934 for f in list:
936 for f in list:
935 if self.dirstate.state(f) not in 'ai':
937 if self.dirstate.state(f) not in 'ai':
936 self.ui.warn(_("%s not added!\n") % f)
938 self.ui.warn(_("%s not added!\n") % f)
937 else:
939 else:
938 self.dirstate.forget([f])
940 self.dirstate.forget([f])
939
941
940 def remove(self, list, unlink=False, wlock=None):
942 def remove(self, list, unlink=False, wlock=None):
941 if unlink:
943 if unlink:
942 for f in list:
944 for f in list:
943 try:
945 try:
944 util.unlink(self.wjoin(f))
946 util.unlink(self.wjoin(f))
945 except OSError, inst:
947 except OSError, inst:
946 if inst.errno != errno.ENOENT:
948 if inst.errno != errno.ENOENT:
947 raise
949 raise
948 if not wlock:
950 if not wlock:
949 wlock = self.wlock()
951 wlock = self.wlock()
950 for f in list:
952 for f in list:
951 p = self.wjoin(f)
953 p = self.wjoin(f)
952 if os.path.exists(p):
954 if os.path.exists(p):
953 self.ui.warn(_("%s still exists!\n") % f)
955 self.ui.warn(_("%s still exists!\n") % f)
954 elif self.dirstate.state(f) == 'a':
956 elif self.dirstate.state(f) == 'a':
955 self.dirstate.forget([f])
957 self.dirstate.forget([f])
956 elif f not in self.dirstate:
958 elif f not in self.dirstate:
957 self.ui.warn(_("%s not tracked!\n") % f)
959 self.ui.warn(_("%s not tracked!\n") % f)
958 else:
960 else:
959 self.dirstate.update([f], "r")
961 self.dirstate.update([f], "r")
960
962
961 def undelete(self, list, wlock=None):
963 def undelete(self, list, wlock=None):
962 p = self.dirstate.parents()[0]
964 p = self.dirstate.parents()[0]
963 mn = self.changelog.read(p)[0]
965 mn = self.changelog.read(p)[0]
964 m = self.manifest.read(mn)
966 m = self.manifest.read(mn)
965 if not wlock:
967 if not wlock:
966 wlock = self.wlock()
968 wlock = self.wlock()
967 for f in list:
969 for f in list:
968 if self.dirstate.state(f) not in "r":
970 if self.dirstate.state(f) not in "r":
969 self.ui.warn("%s not removed!\n" % f)
971 self.ui.warn("%s not removed!\n" % f)
970 else:
972 else:
971 t = self.file(f).read(m[f])
973 t = self.file(f).read(m[f])
972 self.wwrite(f, t)
974 self.wwrite(f, t)
973 util.set_exec(self.wjoin(f), m.execf(f))
975 util.set_exec(self.wjoin(f), m.execf(f))
974 self.dirstate.update([f], "n")
976 self.dirstate.update([f], "n")
975
977
976 def copy(self, source, dest, wlock=None):
978 def copy(self, source, dest, wlock=None):
977 p = self.wjoin(dest)
979 p = self.wjoin(dest)
978 if not os.path.exists(p):
980 if not os.path.exists(p):
979 self.ui.warn(_("%s does not exist!\n") % dest)
981 self.ui.warn(_("%s does not exist!\n") % dest)
980 elif not os.path.isfile(p):
982 elif not os.path.isfile(p):
981 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
983 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
982 else:
984 else:
983 if not wlock:
985 if not wlock:
984 wlock = self.wlock()
986 wlock = self.wlock()
985 if self.dirstate.state(dest) == '?':
987 if self.dirstate.state(dest) == '?':
986 self.dirstate.update([dest], "a")
988 self.dirstate.update([dest], "a")
987 self.dirstate.copy(source, dest)
989 self.dirstate.copy(source, dest)
988
990
989 def heads(self, start=None):
991 def heads(self, start=None):
990 heads = self.changelog.heads(start)
992 heads = self.changelog.heads(start)
991 # sort the output in rev descending order
993 # sort the output in rev descending order
992 heads = [(-self.changelog.rev(h), h) for h in heads]
994 heads = [(-self.changelog.rev(h), h) for h in heads]
993 heads.sort()
995 heads.sort()
994 return [n for (r, n) in heads]
996 return [n for (r, n) in heads]
995
997
996 def branches(self, nodes):
998 def branches(self, nodes):
997 if not nodes:
999 if not nodes:
998 nodes = [self.changelog.tip()]
1000 nodes = [self.changelog.tip()]
999 b = []
1001 b = []
1000 for n in nodes:
1002 for n in nodes:
1001 t = n
1003 t = n
1002 while 1:
1004 while 1:
1003 p = self.changelog.parents(n)
1005 p = self.changelog.parents(n)
1004 if p[1] != nullid or p[0] == nullid:
1006 if p[1] != nullid or p[0] == nullid:
1005 b.append((t, n, p[0], p[1]))
1007 b.append((t, n, p[0], p[1]))
1006 break
1008 break
1007 n = p[0]
1009 n = p[0]
1008 return b
1010 return b
1009
1011
1010 def between(self, pairs):
1012 def between(self, pairs):
1011 r = []
1013 r = []
1012
1014
1013 for top, bottom in pairs:
1015 for top, bottom in pairs:
1014 n, l, i = top, [], 0
1016 n, l, i = top, [], 0
1015 f = 1
1017 f = 1
1016
1018
1017 while n != bottom:
1019 while n != bottom:
1018 p = self.changelog.parents(n)[0]
1020 p = self.changelog.parents(n)[0]
1019 if i == f:
1021 if i == f:
1020 l.append(n)
1022 l.append(n)
1021 f = f * 2
1023 f = f * 2
1022 n = p
1024 n = p
1023 i += 1
1025 i += 1
1024
1026
1025 r.append(l)
1027 r.append(l)
1026
1028
1027 return r
1029 return r
1028
1030
1029 def findincoming(self, remote, base=None, heads=None, force=False):
1031 def findincoming(self, remote, base=None, heads=None, force=False):
1030 """Return list of roots of the subsets of missing nodes from remote
1032 """Return list of roots of the subsets of missing nodes from remote
1031
1033
1032 If base dict is specified, assume that these nodes and their parents
1034 If base dict is specified, assume that these nodes and their parents
1033 exist on the remote side and that no child of a node of base exists
1035 exist on the remote side and that no child of a node of base exists
1034 in both remote and self.
1036 in both remote and self.
1035 Furthermore base will be updated to include the nodes that exists
1037 Furthermore base will be updated to include the nodes that exists
1036 in self and remote but no children exists in self and remote.
1038 in self and remote but no children exists in self and remote.
1037 If a list of heads is specified, return only nodes which are heads
1039 If a list of heads is specified, return only nodes which are heads
1038 or ancestors of these heads.
1040 or ancestors of these heads.
1039
1041
1040 All the ancestors of base are in self and in remote.
1042 All the ancestors of base are in self and in remote.
1041 All the descendants of the list returned are missing in self.
1043 All the descendants of the list returned are missing in self.
1042 (and so we know that the rest of the nodes are missing in remote, see
1044 (and so we know that the rest of the nodes are missing in remote, see
1043 outgoing)
1045 outgoing)
1044 """
1046 """
1045 m = self.changelog.nodemap
1047 m = self.changelog.nodemap
1046 search = []
1048 search = []
1047 fetch = {}
1049 fetch = {}
1048 seen = {}
1050 seen = {}
1049 seenbranch = {}
1051 seenbranch = {}
1050 if base == None:
1052 if base == None:
1051 base = {}
1053 base = {}
1052
1054
1053 if not heads:
1055 if not heads:
1054 heads = remote.heads()
1056 heads = remote.heads()
1055
1057
1056 if self.changelog.tip() == nullid:
1058 if self.changelog.tip() == nullid:
1057 base[nullid] = 1
1059 base[nullid] = 1
1058 if heads != [nullid]:
1060 if heads != [nullid]:
1059 return [nullid]
1061 return [nullid]
1060 return []
1062 return []
1061
1063
1062 # assume we're closer to the tip than the root
1064 # assume we're closer to the tip than the root
1063 # and start by examining the heads
1065 # and start by examining the heads
1064 self.ui.status(_("searching for changes\n"))
1066 self.ui.status(_("searching for changes\n"))
1065
1067
1066 unknown = []
1068 unknown = []
1067 for h in heads:
1069 for h in heads:
1068 if h not in m:
1070 if h not in m:
1069 unknown.append(h)
1071 unknown.append(h)
1070 else:
1072 else:
1071 base[h] = 1
1073 base[h] = 1
1072
1074
1073 if not unknown:
1075 if not unknown:
1074 return []
1076 return []
1075
1077
1076 req = dict.fromkeys(unknown)
1078 req = dict.fromkeys(unknown)
1077 reqcnt = 0
1079 reqcnt = 0
1078
1080
1079 # search through remote branches
1081 # search through remote branches
1080 # a 'branch' here is a linear segment of history, with four parts:
1082 # a 'branch' here is a linear segment of history, with four parts:
1081 # head, root, first parent, second parent
1083 # head, root, first parent, second parent
1082 # (a branch always has two parents (or none) by definition)
1084 # (a branch always has two parents (or none) by definition)
1083 unknown = remote.branches(unknown)
1085 unknown = remote.branches(unknown)
1084 while unknown:
1086 while unknown:
1085 r = []
1087 r = []
1086 while unknown:
1088 while unknown:
1087 n = unknown.pop(0)
1089 n = unknown.pop(0)
1088 if n[0] in seen:
1090 if n[0] in seen:
1089 continue
1091 continue
1090
1092
1091 self.ui.debug(_("examining %s:%s\n")
1093 self.ui.debug(_("examining %s:%s\n")
1092 % (short(n[0]), short(n[1])))
1094 % (short(n[0]), short(n[1])))
1093 if n[0] == nullid: # found the end of the branch
1095 if n[0] == nullid: # found the end of the branch
1094 pass
1096 pass
1095 elif n in seenbranch:
1097 elif n in seenbranch:
1096 self.ui.debug(_("branch already found\n"))
1098 self.ui.debug(_("branch already found\n"))
1097 continue
1099 continue
1098 elif n[1] and n[1] in m: # do we know the base?
1100 elif n[1] and n[1] in m: # do we know the base?
1099 self.ui.debug(_("found incomplete branch %s:%s\n")
1101 self.ui.debug(_("found incomplete branch %s:%s\n")
1100 % (short(n[0]), short(n[1])))
1102 % (short(n[0]), short(n[1])))
1101 search.append(n) # schedule branch range for scanning
1103 search.append(n) # schedule branch range for scanning
1102 seenbranch[n] = 1
1104 seenbranch[n] = 1
1103 else:
1105 else:
1104 if n[1] not in seen and n[1] not in fetch:
1106 if n[1] not in seen and n[1] not in fetch:
1105 if n[2] in m and n[3] in m:
1107 if n[2] in m and n[3] in m:
1106 self.ui.debug(_("found new changeset %s\n") %
1108 self.ui.debug(_("found new changeset %s\n") %
1107 short(n[1]))
1109 short(n[1]))
1108 fetch[n[1]] = 1 # earliest unknown
1110 fetch[n[1]] = 1 # earliest unknown
1109 for p in n[2:4]:
1111 for p in n[2:4]:
1110 if p in m:
1112 if p in m:
1111 base[p] = 1 # latest known
1113 base[p] = 1 # latest known
1112
1114
1113 for p in n[2:4]:
1115 for p in n[2:4]:
1114 if p not in req and p not in m:
1116 if p not in req and p not in m:
1115 r.append(p)
1117 r.append(p)
1116 req[p] = 1
1118 req[p] = 1
1117 seen[n[0]] = 1
1119 seen[n[0]] = 1
1118
1120
1119 if r:
1121 if r:
1120 reqcnt += 1
1122 reqcnt += 1
1121 self.ui.debug(_("request %d: %s\n") %
1123 self.ui.debug(_("request %d: %s\n") %
1122 (reqcnt, " ".join(map(short, r))))
1124 (reqcnt, " ".join(map(short, r))))
1123 for p in xrange(0, len(r), 10):
1125 for p in xrange(0, len(r), 10):
1124 for b in remote.branches(r[p:p+10]):
1126 for b in remote.branches(r[p:p+10]):
1125 self.ui.debug(_("received %s:%s\n") %
1127 self.ui.debug(_("received %s:%s\n") %
1126 (short(b[0]), short(b[1])))
1128 (short(b[0]), short(b[1])))
1127 unknown.append(b)
1129 unknown.append(b)
1128
1130
1129 # do binary search on the branches we found
1131 # do binary search on the branches we found
1130 while search:
1132 while search:
1131 n = search.pop(0)
1133 n = search.pop(0)
1132 reqcnt += 1
1134 reqcnt += 1
1133 l = remote.between([(n[0], n[1])])[0]
1135 l = remote.between([(n[0], n[1])])[0]
1134 l.append(n[1])
1136 l.append(n[1])
1135 p = n[0]
1137 p = n[0]
1136 f = 1
1138 f = 1
1137 for i in l:
1139 for i in l:
1138 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1140 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1139 if i in m:
1141 if i in m:
1140 if f <= 2:
1142 if f <= 2:
1141 self.ui.debug(_("found new branch changeset %s\n") %
1143 self.ui.debug(_("found new branch changeset %s\n") %
1142 short(p))
1144 short(p))
1143 fetch[p] = 1
1145 fetch[p] = 1
1144 base[i] = 1
1146 base[i] = 1
1145 else:
1147 else:
1146 self.ui.debug(_("narrowed branch search to %s:%s\n")
1148 self.ui.debug(_("narrowed branch search to %s:%s\n")
1147 % (short(p), short(i)))
1149 % (short(p), short(i)))
1148 search.append((p, i))
1150 search.append((p, i))
1149 break
1151 break
1150 p, f = i, f * 2
1152 p, f = i, f * 2
1151
1153
1152 # sanity check our fetch list
1154 # sanity check our fetch list
1153 for f in fetch.keys():
1155 for f in fetch.keys():
1154 if f in m:
1156 if f in m:
1155 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1157 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1156
1158
1157 if base.keys() == [nullid]:
1159 if base.keys() == [nullid]:
1158 if force:
1160 if force:
1159 self.ui.warn(_("warning: repository is unrelated\n"))
1161 self.ui.warn(_("warning: repository is unrelated\n"))
1160 else:
1162 else:
1161 raise util.Abort(_("repository is unrelated"))
1163 raise util.Abort(_("repository is unrelated"))
1162
1164
1163 self.ui.debug(_("found new changesets starting at ") +
1165 self.ui.debug(_("found new changesets starting at ") +
1164 " ".join([short(f) for f in fetch]) + "\n")
1166 " ".join([short(f) for f in fetch]) + "\n")
1165
1167
1166 self.ui.debug(_("%d total queries\n") % reqcnt)
1168 self.ui.debug(_("%d total queries\n") % reqcnt)
1167
1169
1168 return fetch.keys()
1170 return fetch.keys()
1169
1171
1170 def findoutgoing(self, remote, base=None, heads=None, force=False):
1172 def findoutgoing(self, remote, base=None, heads=None, force=False):
1171 """Return list of nodes that are roots of subsets not in remote
1173 """Return list of nodes that are roots of subsets not in remote
1172
1174
1173 If base dict is specified, assume that these nodes and their parents
1175 If base dict is specified, assume that these nodes and their parents
1174 exist on the remote side.
1176 exist on the remote side.
1175 If a list of heads is specified, return only nodes which are heads
1177 If a list of heads is specified, return only nodes which are heads
1176 or ancestors of these heads, and return a second element which
1178 or ancestors of these heads, and return a second element which
1177 contains all remote heads which get new children.
1179 contains all remote heads which get new children.
1178 """
1180 """
1179 if base == None:
1181 if base == None:
1180 base = {}
1182 base = {}
1181 self.findincoming(remote, base, heads, force=force)
1183 self.findincoming(remote, base, heads, force=force)
1182
1184
1183 self.ui.debug(_("common changesets up to ")
1185 self.ui.debug(_("common changesets up to ")
1184 + " ".join(map(short, base.keys())) + "\n")
1186 + " ".join(map(short, base.keys())) + "\n")
1185
1187
1186 remain = dict.fromkeys(self.changelog.nodemap)
1188 remain = dict.fromkeys(self.changelog.nodemap)
1187
1189
1188 # prune everything remote has from the tree
1190 # prune everything remote has from the tree
1189 del remain[nullid]
1191 del remain[nullid]
1190 remove = base.keys()
1192 remove = base.keys()
1191 while remove:
1193 while remove:
1192 n = remove.pop(0)
1194 n = remove.pop(0)
1193 if n in remain:
1195 if n in remain:
1194 del remain[n]
1196 del remain[n]
1195 for p in self.changelog.parents(n):
1197 for p in self.changelog.parents(n):
1196 remove.append(p)
1198 remove.append(p)
1197
1199
1198 # find every node whose parents have been pruned
1200 # find every node whose parents have been pruned
1199 subset = []
1201 subset = []
1200 # find every remote head that will get new children
1202 # find every remote head that will get new children
1201 updated_heads = {}
1203 updated_heads = {}
1202 for n in remain:
1204 for n in remain:
1203 p1, p2 = self.changelog.parents(n)
1205 p1, p2 = self.changelog.parents(n)
1204 if p1 not in remain and p2 not in remain:
1206 if p1 not in remain and p2 not in remain:
1205 subset.append(n)
1207 subset.append(n)
1206 if heads:
1208 if heads:
1207 if p1 in heads:
1209 if p1 in heads:
1208 updated_heads[p1] = True
1210 updated_heads[p1] = True
1209 if p2 in heads:
1211 if p2 in heads:
1210 updated_heads[p2] = True
1212 updated_heads[p2] = True
1211
1213
1212 # this is the set of all roots we have to push
1214 # this is the set of all roots we have to push
1213 if heads:
1215 if heads:
1214 return subset, updated_heads.keys()
1216 return subset, updated_heads.keys()
1215 else:
1217 else:
1216 return subset
1218 return subset
1217
1219
1218 def pull(self, remote, heads=None, force=False, lock=None):
1220 def pull(self, remote, heads=None, force=False, lock=None):
1219 mylock = False
1221 mylock = False
1220 if not lock:
1222 if not lock:
1221 lock = self.lock()
1223 lock = self.lock()
1222 mylock = True
1224 mylock = True
1223
1225
1224 try:
1226 try:
1225 fetch = self.findincoming(remote, force=force)
1227 fetch = self.findincoming(remote, force=force)
1226 if fetch == [nullid]:
1228 if fetch == [nullid]:
1227 self.ui.status(_("requesting all changes\n"))
1229 self.ui.status(_("requesting all changes\n"))
1228
1230
1229 if not fetch:
1231 if not fetch:
1230 self.ui.status(_("no changes found\n"))
1232 self.ui.status(_("no changes found\n"))
1231 return 0
1233 return 0
1232
1234
1233 if heads is None:
1235 if heads is None:
1234 cg = remote.changegroup(fetch, 'pull')
1236 cg = remote.changegroup(fetch, 'pull')
1235 else:
1237 else:
1236 if 'changegroupsubset' not in remote.capabilities:
1238 if 'changegroupsubset' not in remote.capabilities:
1237 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1239 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1238 cg = remote.changegroupsubset(fetch, heads, 'pull')
1240 cg = remote.changegroupsubset(fetch, heads, 'pull')
1239 return self.addchangegroup(cg, 'pull', remote.url())
1241 return self.addchangegroup(cg, 'pull', remote.url())
1240 finally:
1242 finally:
1241 if mylock:
1243 if mylock:
1242 lock.release()
1244 lock.release()
1243
1245
1244 def push(self, remote, force=False, revs=None):
1246 def push(self, remote, force=False, revs=None):
1245 # there are two ways to push to remote repo:
1247 # there are two ways to push to remote repo:
1246 #
1248 #
1247 # addchangegroup assumes local user can lock remote
1249 # addchangegroup assumes local user can lock remote
1248 # repo (local filesystem, old ssh servers).
1250 # repo (local filesystem, old ssh servers).
1249 #
1251 #
1250 # unbundle assumes local user cannot lock remote repo (new ssh
1252 # unbundle assumes local user cannot lock remote repo (new ssh
1251 # servers, http servers).
1253 # servers, http servers).
1252
1254
1253 if remote.capable('unbundle'):
1255 if remote.capable('unbundle'):
1254 return self.push_unbundle(remote, force, revs)
1256 return self.push_unbundle(remote, force, revs)
1255 return self.push_addchangegroup(remote, force, revs)
1257 return self.push_addchangegroup(remote, force, revs)
1256
1258
1257 def prepush(self, remote, force, revs):
1259 def prepush(self, remote, force, revs):
1258 base = {}
1260 base = {}
1259 remote_heads = remote.heads()
1261 remote_heads = remote.heads()
1260 inc = self.findincoming(remote, base, remote_heads, force=force)
1262 inc = self.findincoming(remote, base, remote_heads, force=force)
1261
1263
1262 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1264 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1263 if revs is not None:
1265 if revs is not None:
1264 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1266 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1265 else:
1267 else:
1266 bases, heads = update, self.changelog.heads()
1268 bases, heads = update, self.changelog.heads()
1267
1269
1268 if not bases:
1270 if not bases:
1269 self.ui.status(_("no changes found\n"))
1271 self.ui.status(_("no changes found\n"))
1270 return None, 1
1272 return None, 1
1271 elif not force:
1273 elif not force:
1272 # check if we're creating new remote heads
1274 # check if we're creating new remote heads
1273 # to be a remote head after push, node must be either
1275 # to be a remote head after push, node must be either
1274 # - unknown locally
1276 # - unknown locally
1275 # - a local outgoing head descended from update
1277 # - a local outgoing head descended from update
1276 # - a remote head that's known locally and not
1278 # - a remote head that's known locally and not
1277 # ancestral to an outgoing head
1279 # ancestral to an outgoing head
1278
1280
1279 warn = 0
1281 warn = 0
1280
1282
1281 if remote_heads == [nullid]:
1283 if remote_heads == [nullid]:
1282 warn = 0
1284 warn = 0
1283 elif not revs and len(heads) > len(remote_heads):
1285 elif not revs and len(heads) > len(remote_heads):
1284 warn = 1
1286 warn = 1
1285 else:
1287 else:
1286 newheads = list(heads)
1288 newheads = list(heads)
1287 for r in remote_heads:
1289 for r in remote_heads:
1288 if r in self.changelog.nodemap:
1290 if r in self.changelog.nodemap:
1289 desc = self.changelog.heads(r, heads)
1291 desc = self.changelog.heads(r, heads)
1290 l = [h for h in heads if h in desc]
1292 l = [h for h in heads if h in desc]
1291 if not l:
1293 if not l:
1292 newheads.append(r)
1294 newheads.append(r)
1293 else:
1295 else:
1294 newheads.append(r)
1296 newheads.append(r)
1295 if len(newheads) > len(remote_heads):
1297 if len(newheads) > len(remote_heads):
1296 warn = 1
1298 warn = 1
1297
1299
1298 if warn:
1300 if warn:
1299 self.ui.warn(_("abort: push creates new remote branches!\n"))
1301 self.ui.warn(_("abort: push creates new remote branches!\n"))
1300 self.ui.status(_("(did you forget to merge?"
1302 self.ui.status(_("(did you forget to merge?"
1301 " use push -f to force)\n"))
1303 " use push -f to force)\n"))
1302 return None, 1
1304 return None, 1
1303 elif inc:
1305 elif inc:
1304 self.ui.warn(_("note: unsynced remote changes!\n"))
1306 self.ui.warn(_("note: unsynced remote changes!\n"))
1305
1307
1306
1308
1307 if revs is None:
1309 if revs is None:
1308 cg = self.changegroup(update, 'push')
1310 cg = self.changegroup(update, 'push')
1309 else:
1311 else:
1310 cg = self.changegroupsubset(update, revs, 'push')
1312 cg = self.changegroupsubset(update, revs, 'push')
1311 return cg, remote_heads
1313 return cg, remote_heads
1312
1314
1313 def push_addchangegroup(self, remote, force, revs):
1315 def push_addchangegroup(self, remote, force, revs):
1314 lock = remote.lock()
1316 lock = remote.lock()
1315
1317
1316 ret = self.prepush(remote, force, revs)
1318 ret = self.prepush(remote, force, revs)
1317 if ret[0] is not None:
1319 if ret[0] is not None:
1318 cg, remote_heads = ret
1320 cg, remote_heads = ret
1319 return remote.addchangegroup(cg, 'push', self.url())
1321 return remote.addchangegroup(cg, 'push', self.url())
1320 return ret[1]
1322 return ret[1]
1321
1323
1322 def push_unbundle(self, remote, force, revs):
1324 def push_unbundle(self, remote, force, revs):
1323 # local repo finds heads on server, finds out what revs it
1325 # local repo finds heads on server, finds out what revs it
1324 # must push. once revs transferred, if server finds it has
1326 # must push. once revs transferred, if server finds it has
1325 # different heads (someone else won commit/push race), server
1327 # different heads (someone else won commit/push race), server
1326 # aborts.
1328 # aborts.
1327
1329
1328 ret = self.prepush(remote, force, revs)
1330 ret = self.prepush(remote, force, revs)
1329 if ret[0] is not None:
1331 if ret[0] is not None:
1330 cg, remote_heads = ret
1332 cg, remote_heads = ret
1331 if force: remote_heads = ['force']
1333 if force: remote_heads = ['force']
1332 return remote.unbundle(cg, remote_heads, 'push')
1334 return remote.unbundle(cg, remote_heads, 'push')
1333 return ret[1]
1335 return ret[1]
1334
1336
1335 def changegroupinfo(self, nodes):
1337 def changegroupinfo(self, nodes):
1336 self.ui.note(_("%d changesets found\n") % len(nodes))
1338 self.ui.note(_("%d changesets found\n") % len(nodes))
1337 if self.ui.debugflag:
1339 if self.ui.debugflag:
1338 self.ui.debug(_("List of changesets:\n"))
1340 self.ui.debug(_("List of changesets:\n"))
1339 for node in nodes:
1341 for node in nodes:
1340 self.ui.debug("%s\n" % hex(node))
1342 self.ui.debug("%s\n" % hex(node))
1341
1343
1342 def changegroupsubset(self, bases, heads, source):
1344 def changegroupsubset(self, bases, heads, source):
1343 """This function generates a changegroup consisting of all the nodes
1345 """This function generates a changegroup consisting of all the nodes
1344 that are descendents of any of the bases, and ancestors of any of
1346 that are descendents of any of the bases, and ancestors of any of
1345 the heads.
1347 the heads.
1346
1348
1347 It is fairly complex as determining which filenodes and which
1349 It is fairly complex as determining which filenodes and which
1348 manifest nodes need to be included for the changeset to be complete
1350 manifest nodes need to be included for the changeset to be complete
1349 is non-trivial.
1351 is non-trivial.
1350
1352
1351 Another wrinkle is doing the reverse, figuring out which changeset in
1353 Another wrinkle is doing the reverse, figuring out which changeset in
1352 the changegroup a particular filenode or manifestnode belongs to."""
1354 the changegroup a particular filenode or manifestnode belongs to."""
1353
1355
1354 self.hook('preoutgoing', throw=True, source=source)
1356 self.hook('preoutgoing', throw=True, source=source)
1355
1357
1356 # Set up some initial variables
1358 # Set up some initial variables
1357 # Make it easy to refer to self.changelog
1359 # Make it easy to refer to self.changelog
1358 cl = self.changelog
1360 cl = self.changelog
1359 # msng is short for missing - compute the list of changesets in this
1361 # msng is short for missing - compute the list of changesets in this
1360 # changegroup.
1362 # changegroup.
1361 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1363 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1362 self.changegroupinfo(msng_cl_lst)
1364 self.changegroupinfo(msng_cl_lst)
1363 # Some bases may turn out to be superfluous, and some heads may be
1365 # Some bases may turn out to be superfluous, and some heads may be
1364 # too. nodesbetween will return the minimal set of bases and heads
1366 # too. nodesbetween will return the minimal set of bases and heads
1365 # necessary to re-create the changegroup.
1367 # necessary to re-create the changegroup.
1366
1368
1367 # Known heads are the list of heads that it is assumed the recipient
1369 # Known heads are the list of heads that it is assumed the recipient
1368 # of this changegroup will know about.
1370 # of this changegroup will know about.
1369 knownheads = {}
1371 knownheads = {}
1370 # We assume that all parents of bases are known heads.
1372 # We assume that all parents of bases are known heads.
1371 for n in bases:
1373 for n in bases:
1372 for p in cl.parents(n):
1374 for p in cl.parents(n):
1373 if p != nullid:
1375 if p != nullid:
1374 knownheads[p] = 1
1376 knownheads[p] = 1
1375 knownheads = knownheads.keys()
1377 knownheads = knownheads.keys()
1376 if knownheads:
1378 if knownheads:
1377 # Now that we know what heads are known, we can compute which
1379 # Now that we know what heads are known, we can compute which
1378 # changesets are known. The recipient must know about all
1380 # changesets are known. The recipient must know about all
1379 # changesets required to reach the known heads from the null
1381 # changesets required to reach the known heads from the null
1380 # changeset.
1382 # changeset.
1381 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1383 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1382 junk = None
1384 junk = None
1383 # Transform the list into an ersatz set.
1385 # Transform the list into an ersatz set.
1384 has_cl_set = dict.fromkeys(has_cl_set)
1386 has_cl_set = dict.fromkeys(has_cl_set)
1385 else:
1387 else:
1386 # If there were no known heads, the recipient cannot be assumed to
1388 # If there were no known heads, the recipient cannot be assumed to
1387 # know about any changesets.
1389 # know about any changesets.
1388 has_cl_set = {}
1390 has_cl_set = {}
1389
1391
1390 # Make it easy to refer to self.manifest
1392 # Make it easy to refer to self.manifest
1391 mnfst = self.manifest
1393 mnfst = self.manifest
1392 # We don't know which manifests are missing yet
1394 # We don't know which manifests are missing yet
1393 msng_mnfst_set = {}
1395 msng_mnfst_set = {}
1394 # Nor do we know which filenodes are missing.
1396 # Nor do we know which filenodes are missing.
1395 msng_filenode_set = {}
1397 msng_filenode_set = {}
1396
1398
1397 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1399 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1398 junk = None
1400 junk = None
1399
1401
1400 # A changeset always belongs to itself, so the changenode lookup
1402 # A changeset always belongs to itself, so the changenode lookup
1401 # function for a changenode is identity.
1403 # function for a changenode is identity.
1402 def identity(x):
1404 def identity(x):
1403 return x
1405 return x
1404
1406
1405 # A function generating function. Sets up an environment for the
1407 # A function generating function. Sets up an environment for the
1406 # inner function.
1408 # inner function.
1407 def cmp_by_rev_func(revlog):
1409 def cmp_by_rev_func(revlog):
1408 # Compare two nodes by their revision number in the environment's
1410 # Compare two nodes by their revision number in the environment's
1409 # revision history. Since the revision number both represents the
1411 # revision history. Since the revision number both represents the
1410 # most efficient order to read the nodes in, and represents a
1412 # most efficient order to read the nodes in, and represents a
1411 # topological sorting of the nodes, this function is often useful.
1413 # topological sorting of the nodes, this function is often useful.
1412 def cmp_by_rev(a, b):
1414 def cmp_by_rev(a, b):
1413 return cmp(revlog.rev(a), revlog.rev(b))
1415 return cmp(revlog.rev(a), revlog.rev(b))
1414 return cmp_by_rev
1416 return cmp_by_rev
1415
1417
1416 # If we determine that a particular file or manifest node must be a
1418 # If we determine that a particular file or manifest node must be a
1417 # node that the recipient of the changegroup will already have, we can
1419 # node that the recipient of the changegroup will already have, we can
1418 # also assume the recipient will have all the parents. This function
1420 # also assume the recipient will have all the parents. This function
1419 # prunes them from the set of missing nodes.
1421 # prunes them from the set of missing nodes.
1420 def prune_parents(revlog, hasset, msngset):
1422 def prune_parents(revlog, hasset, msngset):
1421 haslst = hasset.keys()
1423 haslst = hasset.keys()
1422 haslst.sort(cmp_by_rev_func(revlog))
1424 haslst.sort(cmp_by_rev_func(revlog))
1423 for node in haslst:
1425 for node in haslst:
1424 parentlst = [p for p in revlog.parents(node) if p != nullid]
1426 parentlst = [p for p in revlog.parents(node) if p != nullid]
1425 while parentlst:
1427 while parentlst:
1426 n = parentlst.pop()
1428 n = parentlst.pop()
1427 if n not in hasset:
1429 if n not in hasset:
1428 hasset[n] = 1
1430 hasset[n] = 1
1429 p = [p for p in revlog.parents(n) if p != nullid]
1431 p = [p for p in revlog.parents(n) if p != nullid]
1430 parentlst.extend(p)
1432 parentlst.extend(p)
1431 for n in hasset:
1433 for n in hasset:
1432 msngset.pop(n, None)
1434 msngset.pop(n, None)
1433
1435
1434 # This is a function generating function used to set up an environment
1436 # This is a function generating function used to set up an environment
1435 # for the inner function to execute in.
1437 # for the inner function to execute in.
1436 def manifest_and_file_collector(changedfileset):
1438 def manifest_and_file_collector(changedfileset):
1437 # This is an information gathering function that gathers
1439 # This is an information gathering function that gathers
1438 # information from each changeset node that goes out as part of
1440 # information from each changeset node that goes out as part of
1439 # the changegroup. The information gathered is a list of which
1441 # the changegroup. The information gathered is a list of which
1440 # manifest nodes are potentially required (the recipient may
1442 # manifest nodes are potentially required (the recipient may
1441 # already have them) and total list of all files which were
1443 # already have them) and total list of all files which were
1442 # changed in any changeset in the changegroup.
1444 # changed in any changeset in the changegroup.
1443 #
1445 #
1444 # We also remember the first changenode we saw any manifest
1446 # We also remember the first changenode we saw any manifest
1445 # referenced by so we can later determine which changenode 'owns'
1447 # referenced by so we can later determine which changenode 'owns'
1446 # the manifest.
1448 # the manifest.
1447 def collect_manifests_and_files(clnode):
1449 def collect_manifests_and_files(clnode):
1448 c = cl.read(clnode)
1450 c = cl.read(clnode)
1449 for f in c[3]:
1451 for f in c[3]:
1450 # This is to make sure we only have one instance of each
1452 # This is to make sure we only have one instance of each
1451 # filename string for each filename.
1453 # filename string for each filename.
1452 changedfileset.setdefault(f, f)
1454 changedfileset.setdefault(f, f)
1453 msng_mnfst_set.setdefault(c[0], clnode)
1455 msng_mnfst_set.setdefault(c[0], clnode)
1454 return collect_manifests_and_files
1456 return collect_manifests_and_files
1455
1457
1456 # Figure out which manifest nodes (of the ones we think might be part
1458 # Figure out which manifest nodes (of the ones we think might be part
1457 # of the changegroup) the recipient must know about and remove them
1459 # of the changegroup) the recipient must know about and remove them
1458 # from the changegroup.
1460 # from the changegroup.
1459 def prune_manifests():
1461 def prune_manifests():
1460 has_mnfst_set = {}
1462 has_mnfst_set = {}
1461 for n in msng_mnfst_set:
1463 for n in msng_mnfst_set:
1462 # If a 'missing' manifest thinks it belongs to a changenode
1464 # If a 'missing' manifest thinks it belongs to a changenode
1463 # the recipient is assumed to have, obviously the recipient
1465 # the recipient is assumed to have, obviously the recipient
1464 # must have that manifest.
1466 # must have that manifest.
1465 linknode = cl.node(mnfst.linkrev(n))
1467 linknode = cl.node(mnfst.linkrev(n))
1466 if linknode in has_cl_set:
1468 if linknode in has_cl_set:
1467 has_mnfst_set[n] = 1
1469 has_mnfst_set[n] = 1
1468 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1470 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1469
1471
1470 # Use the information collected in collect_manifests_and_files to say
1472 # Use the information collected in collect_manifests_and_files to say
1471 # which changenode any manifestnode belongs to.
1473 # which changenode any manifestnode belongs to.
1472 def lookup_manifest_link(mnfstnode):
1474 def lookup_manifest_link(mnfstnode):
1473 return msng_mnfst_set[mnfstnode]
1475 return msng_mnfst_set[mnfstnode]
1474
1476
1475 # A function generating function that sets up the initial environment
1477 # A function generating function that sets up the initial environment
1476 # the inner function.
1478 # the inner function.
1477 def filenode_collector(changedfiles):
1479 def filenode_collector(changedfiles):
1478 next_rev = [0]
1480 next_rev = [0]
1479 # This gathers information from each manifestnode included in the
1481 # This gathers information from each manifestnode included in the
1480 # changegroup about which filenodes the manifest node references
1482 # changegroup about which filenodes the manifest node references
1481 # so we can include those in the changegroup too.
1483 # so we can include those in the changegroup too.
1482 #
1484 #
1483 # It also remembers which changenode each filenode belongs to. It
1485 # It also remembers which changenode each filenode belongs to. It
1484 # does this by assuming the a filenode belongs to the changenode
1486 # does this by assuming the a filenode belongs to the changenode
1485 # the first manifest that references it belongs to.
1487 # the first manifest that references it belongs to.
1486 def collect_msng_filenodes(mnfstnode):
1488 def collect_msng_filenodes(mnfstnode):
1487 r = mnfst.rev(mnfstnode)
1489 r = mnfst.rev(mnfstnode)
1488 if r == next_rev[0]:
1490 if r == next_rev[0]:
1489 # If the last rev we looked at was the one just previous,
1491 # If the last rev we looked at was the one just previous,
1490 # we only need to see a diff.
1492 # we only need to see a diff.
1491 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1493 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1492 # For each line in the delta
1494 # For each line in the delta
1493 for dline in delta.splitlines():
1495 for dline in delta.splitlines():
1494 # get the filename and filenode for that line
1496 # get the filename and filenode for that line
1495 f, fnode = dline.split('\0')
1497 f, fnode = dline.split('\0')
1496 fnode = bin(fnode[:40])
1498 fnode = bin(fnode[:40])
1497 f = changedfiles.get(f, None)
1499 f = changedfiles.get(f, None)
1498 # And if the file is in the list of files we care
1500 # And if the file is in the list of files we care
1499 # about.
1501 # about.
1500 if f is not None:
1502 if f is not None:
1501 # Get the changenode this manifest belongs to
1503 # Get the changenode this manifest belongs to
1502 clnode = msng_mnfst_set[mnfstnode]
1504 clnode = msng_mnfst_set[mnfstnode]
1503 # Create the set of filenodes for the file if
1505 # Create the set of filenodes for the file if
1504 # there isn't one already.
1506 # there isn't one already.
1505 ndset = msng_filenode_set.setdefault(f, {})
1507 ndset = msng_filenode_set.setdefault(f, {})
1506 # And set the filenode's changelog node to the
1508 # And set the filenode's changelog node to the
1507 # manifest's if it hasn't been set already.
1509 # manifest's if it hasn't been set already.
1508 ndset.setdefault(fnode, clnode)
1510 ndset.setdefault(fnode, clnode)
1509 else:
1511 else:
1510 # Otherwise we need a full manifest.
1512 # Otherwise we need a full manifest.
1511 m = mnfst.read(mnfstnode)
1513 m = mnfst.read(mnfstnode)
1512 # For every file in we care about.
1514 # For every file in we care about.
1513 for f in changedfiles:
1515 for f in changedfiles:
1514 fnode = m.get(f, None)
1516 fnode = m.get(f, None)
1515 # If it's in the manifest
1517 # If it's in the manifest
1516 if fnode is not None:
1518 if fnode is not None:
1517 # See comments above.
1519 # See comments above.
1518 clnode = msng_mnfst_set[mnfstnode]
1520 clnode = msng_mnfst_set[mnfstnode]
1519 ndset = msng_filenode_set.setdefault(f, {})
1521 ndset = msng_filenode_set.setdefault(f, {})
1520 ndset.setdefault(fnode, clnode)
1522 ndset.setdefault(fnode, clnode)
1521 # Remember the revision we hope to see next.
1523 # Remember the revision we hope to see next.
1522 next_rev[0] = r + 1
1524 next_rev[0] = r + 1
1523 return collect_msng_filenodes
1525 return collect_msng_filenodes
1524
1526
1525 # We have a list of filenodes we think we need for a file, lets remove
1527 # We have a list of filenodes we think we need for a file, lets remove
1526 # all those we now the recipient must have.
1528 # all those we now the recipient must have.
1527 def prune_filenodes(f, filerevlog):
1529 def prune_filenodes(f, filerevlog):
1528 msngset = msng_filenode_set[f]
1530 msngset = msng_filenode_set[f]
1529 hasset = {}
1531 hasset = {}
1530 # If a 'missing' filenode thinks it belongs to a changenode we
1532 # If a 'missing' filenode thinks it belongs to a changenode we
1531 # assume the recipient must have, then the recipient must have
1533 # assume the recipient must have, then the recipient must have
1532 # that filenode.
1534 # that filenode.
1533 for n in msngset:
1535 for n in msngset:
1534 clnode = cl.node(filerevlog.linkrev(n))
1536 clnode = cl.node(filerevlog.linkrev(n))
1535 if clnode in has_cl_set:
1537 if clnode in has_cl_set:
1536 hasset[n] = 1
1538 hasset[n] = 1
1537 prune_parents(filerevlog, hasset, msngset)
1539 prune_parents(filerevlog, hasset, msngset)
1538
1540
1539 # A function generator function that sets up the a context for the
1541 # A function generator function that sets up the a context for the
1540 # inner function.
1542 # inner function.
1541 def lookup_filenode_link_func(fname):
1543 def lookup_filenode_link_func(fname):
1542 msngset = msng_filenode_set[fname]
1544 msngset = msng_filenode_set[fname]
1543 # Lookup the changenode the filenode belongs to.
1545 # Lookup the changenode the filenode belongs to.
1544 def lookup_filenode_link(fnode):
1546 def lookup_filenode_link(fnode):
1545 return msngset[fnode]
1547 return msngset[fnode]
1546 return lookup_filenode_link
1548 return lookup_filenode_link
1547
1549
1548 # Now that we have all theses utility functions to help out and
1550 # Now that we have all theses utility functions to help out and
1549 # logically divide up the task, generate the group.
1551 # logically divide up the task, generate the group.
1550 def gengroup():
1552 def gengroup():
1551 # The set of changed files starts empty.
1553 # The set of changed files starts empty.
1552 changedfiles = {}
1554 changedfiles = {}
1553 # Create a changenode group generator that will call our functions
1555 # Create a changenode group generator that will call our functions
1554 # back to lookup the owning changenode and collect information.
1556 # back to lookup the owning changenode and collect information.
1555 group = cl.group(msng_cl_lst, identity,
1557 group = cl.group(msng_cl_lst, identity,
1556 manifest_and_file_collector(changedfiles))
1558 manifest_and_file_collector(changedfiles))
1557 for chnk in group:
1559 for chnk in group:
1558 yield chnk
1560 yield chnk
1559
1561
1560 # The list of manifests has been collected by the generator
1562 # The list of manifests has been collected by the generator
1561 # calling our functions back.
1563 # calling our functions back.
1562 prune_manifests()
1564 prune_manifests()
1563 msng_mnfst_lst = msng_mnfst_set.keys()
1565 msng_mnfst_lst = msng_mnfst_set.keys()
1564 # Sort the manifestnodes by revision number.
1566 # Sort the manifestnodes by revision number.
1565 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1567 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1566 # Create a generator for the manifestnodes that calls our lookup
1568 # Create a generator for the manifestnodes that calls our lookup
1567 # and data collection functions back.
1569 # and data collection functions back.
1568 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1570 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1569 filenode_collector(changedfiles))
1571 filenode_collector(changedfiles))
1570 for chnk in group:
1572 for chnk in group:
1571 yield chnk
1573 yield chnk
1572
1574
1573 # These are no longer needed, dereference and toss the memory for
1575 # These are no longer needed, dereference and toss the memory for
1574 # them.
1576 # them.
1575 msng_mnfst_lst = None
1577 msng_mnfst_lst = None
1576 msng_mnfst_set.clear()
1578 msng_mnfst_set.clear()
1577
1579
1578 changedfiles = changedfiles.keys()
1580 changedfiles = changedfiles.keys()
1579 changedfiles.sort()
1581 changedfiles.sort()
1580 # Go through all our files in order sorted by name.
1582 # Go through all our files in order sorted by name.
1581 for fname in changedfiles:
1583 for fname in changedfiles:
1582 filerevlog = self.file(fname)
1584 filerevlog = self.file(fname)
1583 # Toss out the filenodes that the recipient isn't really
1585 # Toss out the filenodes that the recipient isn't really
1584 # missing.
1586 # missing.
1585 if msng_filenode_set.has_key(fname):
1587 if msng_filenode_set.has_key(fname):
1586 prune_filenodes(fname, filerevlog)
1588 prune_filenodes(fname, filerevlog)
1587 msng_filenode_lst = msng_filenode_set[fname].keys()
1589 msng_filenode_lst = msng_filenode_set[fname].keys()
1588 else:
1590 else:
1589 msng_filenode_lst = []
1591 msng_filenode_lst = []
1590 # If any filenodes are left, generate the group for them,
1592 # If any filenodes are left, generate the group for them,
1591 # otherwise don't bother.
1593 # otherwise don't bother.
1592 if len(msng_filenode_lst) > 0:
1594 if len(msng_filenode_lst) > 0:
1593 yield changegroup.genchunk(fname)
1595 yield changegroup.genchunk(fname)
1594 # Sort the filenodes by their revision #
1596 # Sort the filenodes by their revision #
1595 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1597 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1596 # Create a group generator and only pass in a changenode
1598 # Create a group generator and only pass in a changenode
1597 # lookup function as we need to collect no information
1599 # lookup function as we need to collect no information
1598 # from filenodes.
1600 # from filenodes.
1599 group = filerevlog.group(msng_filenode_lst,
1601 group = filerevlog.group(msng_filenode_lst,
1600 lookup_filenode_link_func(fname))
1602 lookup_filenode_link_func(fname))
1601 for chnk in group:
1603 for chnk in group:
1602 yield chnk
1604 yield chnk
1603 if msng_filenode_set.has_key(fname):
1605 if msng_filenode_set.has_key(fname):
1604 # Don't need this anymore, toss it to free memory.
1606 # Don't need this anymore, toss it to free memory.
1605 del msng_filenode_set[fname]
1607 del msng_filenode_set[fname]
1606 # Signal that no more groups are left.
1608 # Signal that no more groups are left.
1607 yield changegroup.closechunk()
1609 yield changegroup.closechunk()
1608
1610
1609 if msng_cl_lst:
1611 if msng_cl_lst:
1610 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1612 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1611
1613
1612 return util.chunkbuffer(gengroup())
1614 return util.chunkbuffer(gengroup())
1613
1615
1614 def changegroup(self, basenodes, source):
1616 def changegroup(self, basenodes, source):
1615 """Generate a changegroup of all nodes that we have that a recipient
1617 """Generate a changegroup of all nodes that we have that a recipient
1616 doesn't.
1618 doesn't.
1617
1619
1618 This is much easier than the previous function as we can assume that
1620 This is much easier than the previous function as we can assume that
1619 the recipient has any changenode we aren't sending them."""
1621 the recipient has any changenode we aren't sending them."""
1620
1622
1621 self.hook('preoutgoing', throw=True, source=source)
1623 self.hook('preoutgoing', throw=True, source=source)
1622
1624
1623 cl = self.changelog
1625 cl = self.changelog
1624 nodes = cl.nodesbetween(basenodes, None)[0]
1626 nodes = cl.nodesbetween(basenodes, None)[0]
1625 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1627 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1626 self.changegroupinfo(nodes)
1628 self.changegroupinfo(nodes)
1627
1629
1628 def identity(x):
1630 def identity(x):
1629 return x
1631 return x
1630
1632
1631 def gennodelst(revlog):
1633 def gennodelst(revlog):
1632 for r in xrange(0, revlog.count()):
1634 for r in xrange(0, revlog.count()):
1633 n = revlog.node(r)
1635 n = revlog.node(r)
1634 if revlog.linkrev(n) in revset:
1636 if revlog.linkrev(n) in revset:
1635 yield n
1637 yield n
1636
1638
1637 def changed_file_collector(changedfileset):
1639 def changed_file_collector(changedfileset):
1638 def collect_changed_files(clnode):
1640 def collect_changed_files(clnode):
1639 c = cl.read(clnode)
1641 c = cl.read(clnode)
1640 for fname in c[3]:
1642 for fname in c[3]:
1641 changedfileset[fname] = 1
1643 changedfileset[fname] = 1
1642 return collect_changed_files
1644 return collect_changed_files
1643
1645
1644 def lookuprevlink_func(revlog):
1646 def lookuprevlink_func(revlog):
1645 def lookuprevlink(n):
1647 def lookuprevlink(n):
1646 return cl.node(revlog.linkrev(n))
1648 return cl.node(revlog.linkrev(n))
1647 return lookuprevlink
1649 return lookuprevlink
1648
1650
1649 def gengroup():
1651 def gengroup():
1650 # construct a list of all changed files
1652 # construct a list of all changed files
1651 changedfiles = {}
1653 changedfiles = {}
1652
1654
1653 for chnk in cl.group(nodes, identity,
1655 for chnk in cl.group(nodes, identity,
1654 changed_file_collector(changedfiles)):
1656 changed_file_collector(changedfiles)):
1655 yield chnk
1657 yield chnk
1656 changedfiles = changedfiles.keys()
1658 changedfiles = changedfiles.keys()
1657 changedfiles.sort()
1659 changedfiles.sort()
1658
1660
1659 mnfst = self.manifest
1661 mnfst = self.manifest
1660 nodeiter = gennodelst(mnfst)
1662 nodeiter = gennodelst(mnfst)
1661 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1663 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1662 yield chnk
1664 yield chnk
1663
1665
1664 for fname in changedfiles:
1666 for fname in changedfiles:
1665 filerevlog = self.file(fname)
1667 filerevlog = self.file(fname)
1666 nodeiter = gennodelst(filerevlog)
1668 nodeiter = gennodelst(filerevlog)
1667 nodeiter = list(nodeiter)
1669 nodeiter = list(nodeiter)
1668 if nodeiter:
1670 if nodeiter:
1669 yield changegroup.genchunk(fname)
1671 yield changegroup.genchunk(fname)
1670 lookup = lookuprevlink_func(filerevlog)
1672 lookup = lookuprevlink_func(filerevlog)
1671 for chnk in filerevlog.group(nodeiter, lookup):
1673 for chnk in filerevlog.group(nodeiter, lookup):
1672 yield chnk
1674 yield chnk
1673
1675
1674 yield changegroup.closechunk()
1676 yield changegroup.closechunk()
1675
1677
1676 if nodes:
1678 if nodes:
1677 self.hook('outgoing', node=hex(nodes[0]), source=source)
1679 self.hook('outgoing', node=hex(nodes[0]), source=source)
1678
1680
1679 return util.chunkbuffer(gengroup())
1681 return util.chunkbuffer(gengroup())
1680
1682
1681 def addchangegroup(self, source, srctype, url):
1683 def addchangegroup(self, source, srctype, url):
1682 """add changegroup to repo.
1684 """add changegroup to repo.
1683
1685
1684 return values:
1686 return values:
1685 - nothing changed or no source: 0
1687 - nothing changed or no source: 0
1686 - more heads than before: 1+added heads (2..n)
1688 - more heads than before: 1+added heads (2..n)
1687 - less heads than before: -1-removed heads (-2..-n)
1689 - less heads than before: -1-removed heads (-2..-n)
1688 - number of heads stays the same: 1
1690 - number of heads stays the same: 1
1689 """
1691 """
1690 def csmap(x):
1692 def csmap(x):
1691 self.ui.debug(_("add changeset %s\n") % short(x))
1693 self.ui.debug(_("add changeset %s\n") % short(x))
1692 return cl.count()
1694 return cl.count()
1693
1695
1694 def revmap(x):
1696 def revmap(x):
1695 return cl.rev(x)
1697 return cl.rev(x)
1696
1698
1697 if not source:
1699 if not source:
1698 return 0
1700 return 0
1699
1701
1700 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1702 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1701
1703
1702 changesets = files = revisions = 0
1704 changesets = files = revisions = 0
1703
1705
1704 tr = self.transaction()
1706 tr = self.transaction()
1705
1707
1706 # write changelog data to temp files so concurrent readers will not see
1708 # write changelog data to temp files so concurrent readers will not see
1707 # inconsistent view
1709 # inconsistent view
1708 cl = None
1710 cl = None
1709 try:
1711 try:
1710 cl = appendfile.appendchangelog(self.sopener,
1712 cl = appendfile.appendchangelog(self.sopener,
1711 self.changelog.version)
1713 self.changelog.version)
1712
1714
1713 oldheads = len(cl.heads())
1715 oldheads = len(cl.heads())
1714
1716
1715 # pull off the changeset group
1717 # pull off the changeset group
1716 self.ui.status(_("adding changesets\n"))
1718 self.ui.status(_("adding changesets\n"))
1717 cor = cl.count() - 1
1719 cor = cl.count() - 1
1718 chunkiter = changegroup.chunkiter(source)
1720 chunkiter = changegroup.chunkiter(source)
1719 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1721 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1720 raise util.Abort(_("received changelog group is empty"))
1722 raise util.Abort(_("received changelog group is empty"))
1721 cnr = cl.count() - 1
1723 cnr = cl.count() - 1
1722 changesets = cnr - cor
1724 changesets = cnr - cor
1723
1725
1724 # pull off the manifest group
1726 # pull off the manifest group
1725 self.ui.status(_("adding manifests\n"))
1727 self.ui.status(_("adding manifests\n"))
1726 chunkiter = changegroup.chunkiter(source)
1728 chunkiter = changegroup.chunkiter(source)
1727 # no need to check for empty manifest group here:
1729 # no need to check for empty manifest group here:
1728 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1730 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1729 # no new manifest will be created and the manifest group will
1731 # no new manifest will be created and the manifest group will
1730 # be empty during the pull
1732 # be empty during the pull
1731 self.manifest.addgroup(chunkiter, revmap, tr)
1733 self.manifest.addgroup(chunkiter, revmap, tr)
1732
1734
1733 # process the files
1735 # process the files
1734 self.ui.status(_("adding file changes\n"))
1736 self.ui.status(_("adding file changes\n"))
1735 while 1:
1737 while 1:
1736 f = changegroup.getchunk(source)
1738 f = changegroup.getchunk(source)
1737 if not f:
1739 if not f:
1738 break
1740 break
1739 self.ui.debug(_("adding %s revisions\n") % f)
1741 self.ui.debug(_("adding %s revisions\n") % f)
1740 fl = self.file(f)
1742 fl = self.file(f)
1741 o = fl.count()
1743 o = fl.count()
1742 chunkiter = changegroup.chunkiter(source)
1744 chunkiter = changegroup.chunkiter(source)
1743 if fl.addgroup(chunkiter, revmap, tr) is None:
1745 if fl.addgroup(chunkiter, revmap, tr) is None:
1744 raise util.Abort(_("received file revlog group is empty"))
1746 raise util.Abort(_("received file revlog group is empty"))
1745 revisions += fl.count() - o
1747 revisions += fl.count() - o
1746 files += 1
1748 files += 1
1747
1749
1748 cl.writedata()
1750 cl.writedata()
1749 finally:
1751 finally:
1750 if cl:
1752 if cl:
1751 cl.cleanup()
1753 cl.cleanup()
1752
1754
1753 # make changelog see real files again
1755 # make changelog see real files again
1754 self.changelog = changelog.changelog(self.sopener,
1756 self.changelog = changelog.changelog(self.sopener,
1755 self.changelog.version)
1757 self.changelog.version)
1756 self.changelog.checkinlinesize(tr)
1758 self.changelog.checkinlinesize(tr)
1757
1759
1758 newheads = len(self.changelog.heads())
1760 newheads = len(self.changelog.heads())
1759 heads = ""
1761 heads = ""
1760 if oldheads and newheads != oldheads:
1762 if oldheads and newheads != oldheads:
1761 heads = _(" (%+d heads)") % (newheads - oldheads)
1763 heads = _(" (%+d heads)") % (newheads - oldheads)
1762
1764
1763 self.ui.status(_("added %d changesets"
1765 self.ui.status(_("added %d changesets"
1764 " with %d changes to %d files%s\n")
1766 " with %d changes to %d files%s\n")
1765 % (changesets, revisions, files, heads))
1767 % (changesets, revisions, files, heads))
1766
1768
1767 if changesets > 0:
1769 if changesets > 0:
1768 self.hook('pretxnchangegroup', throw=True,
1770 self.hook('pretxnchangegroup', throw=True,
1769 node=hex(self.changelog.node(cor+1)), source=srctype,
1771 node=hex(self.changelog.node(cor+1)), source=srctype,
1770 url=url)
1772 url=url)
1771
1773
1772 tr.close()
1774 tr.close()
1773
1775
1774 if changesets > 0:
1776 if changesets > 0:
1775 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1777 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1776 source=srctype, url=url)
1778 source=srctype, url=url)
1777
1779
1778 for i in xrange(cor + 1, cnr + 1):
1780 for i in xrange(cor + 1, cnr + 1):
1779 self.hook("incoming", node=hex(self.changelog.node(i)),
1781 self.hook("incoming", node=hex(self.changelog.node(i)),
1780 source=srctype, url=url)
1782 source=srctype, url=url)
1781
1783
1782 # never return 0 here:
1784 # never return 0 here:
1783 if newheads < oldheads:
1785 if newheads < oldheads:
1784 return newheads - oldheads - 1
1786 return newheads - oldheads - 1
1785 else:
1787 else:
1786 return newheads - oldheads + 1
1788 return newheads - oldheads + 1
1787
1789
1788
1790
1789 def stream_in(self, remote):
1791 def stream_in(self, remote):
1790 fp = remote.stream_out()
1792 fp = remote.stream_out()
1791 l = fp.readline()
1793 l = fp.readline()
1792 try:
1794 try:
1793 resp = int(l)
1795 resp = int(l)
1794 except ValueError:
1796 except ValueError:
1795 raise util.UnexpectedOutput(
1797 raise util.UnexpectedOutput(
1796 _('Unexpected response from remote server:'), l)
1798 _('Unexpected response from remote server:'), l)
1797 if resp == 1:
1799 if resp == 1:
1798 raise util.Abort(_('operation forbidden by server'))
1800 raise util.Abort(_('operation forbidden by server'))
1799 elif resp == 2:
1801 elif resp == 2:
1800 raise util.Abort(_('locking the remote repository failed'))
1802 raise util.Abort(_('locking the remote repository failed'))
1801 elif resp != 0:
1803 elif resp != 0:
1802 raise util.Abort(_('the server sent an unknown error code'))
1804 raise util.Abort(_('the server sent an unknown error code'))
1803 self.ui.status(_('streaming all changes\n'))
1805 self.ui.status(_('streaming all changes\n'))
1804 l = fp.readline()
1806 l = fp.readline()
1805 try:
1807 try:
1806 total_files, total_bytes = map(int, l.split(' ', 1))
1808 total_files, total_bytes = map(int, l.split(' ', 1))
1807 except ValueError, TypeError:
1809 except ValueError, TypeError:
1808 raise util.UnexpectedOutput(
1810 raise util.UnexpectedOutput(
1809 _('Unexpected response from remote server:'), l)
1811 _('Unexpected response from remote server:'), l)
1810 self.ui.status(_('%d files to transfer, %s of data\n') %
1812 self.ui.status(_('%d files to transfer, %s of data\n') %
1811 (total_files, util.bytecount(total_bytes)))
1813 (total_files, util.bytecount(total_bytes)))
1812 start = time.time()
1814 start = time.time()
1813 for i in xrange(total_files):
1815 for i in xrange(total_files):
1814 # XXX doesn't support '\n' or '\r' in filenames
1816 # XXX doesn't support '\n' or '\r' in filenames
1815 l = fp.readline()
1817 l = fp.readline()
1816 try:
1818 try:
1817 name, size = l.split('\0', 1)
1819 name, size = l.split('\0', 1)
1818 size = int(size)
1820 size = int(size)
1819 except ValueError, TypeError:
1821 except ValueError, TypeError:
1820 raise util.UnexpectedOutput(
1822 raise util.UnexpectedOutput(
1821 _('Unexpected response from remote server:'), l)
1823 _('Unexpected response from remote server:'), l)
1822 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1824 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1823 ofp = self.sopener(name, 'w')
1825 ofp = self.sopener(name, 'w')
1824 for chunk in util.filechunkiter(fp, limit=size):
1826 for chunk in util.filechunkiter(fp, limit=size):
1825 ofp.write(chunk)
1827 ofp.write(chunk)
1826 ofp.close()
1828 ofp.close()
1827 elapsed = time.time() - start
1829 elapsed = time.time() - start
1828 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1830 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1829 (util.bytecount(total_bytes), elapsed,
1831 (util.bytecount(total_bytes), elapsed,
1830 util.bytecount(total_bytes / elapsed)))
1832 util.bytecount(total_bytes / elapsed)))
1831 self.reload()
1833 self.reload()
1832 return len(self.heads()) + 1
1834 return len(self.heads()) + 1
1833
1835
1834 def clone(self, remote, heads=[], stream=False):
1836 def clone(self, remote, heads=[], stream=False):
1835 '''clone remote repository.
1837 '''clone remote repository.
1836
1838
1837 keyword arguments:
1839 keyword arguments:
1838 heads: list of revs to clone (forces use of pull)
1840 heads: list of revs to clone (forces use of pull)
1839 stream: use streaming clone if possible'''
1841 stream: use streaming clone if possible'''
1840
1842
1841 # now, all clients that can request uncompressed clones can
1843 # now, all clients that can request uncompressed clones can
1842 # read repo formats supported by all servers that can serve
1844 # read repo formats supported by all servers that can serve
1843 # them.
1845 # them.
1844
1846
1845 # if revlog format changes, client will have to check version
1847 # if revlog format changes, client will have to check version
1846 # and format flags on "stream" capability, and use
1848 # and format flags on "stream" capability, and use
1847 # uncompressed only if compatible.
1849 # uncompressed only if compatible.
1848
1850
1849 if stream and not heads and remote.capable('stream'):
1851 if stream and not heads and remote.capable('stream'):
1850 return self.stream_in(remote)
1852 return self.stream_in(remote)
1851 return self.pull(remote, heads)
1853 return self.pull(remote, heads)
1852
1854
1853 # used to avoid circular references so destructors work
1855 # used to avoid circular references so destructors work
1854 def aftertrans(files):
1856 def aftertrans(files):
1855 renamefiles = [tuple(t) for t in files]
1857 renamefiles = [tuple(t) for t in files]
1856 def a():
1858 def a():
1857 for src, dest in renamefiles:
1859 for src, dest in renamefiles:
1858 util.rename(src, dest)
1860 util.rename(src, dest)
1859 return a
1861 return a
1860
1862
1861 def instance(ui, path, create):
1863 def instance(ui, path, create):
1862 return localrepository(ui, util.drop_scheme('file', path), create)
1864 return localrepository(ui, util.drop_scheme('file', path), create)
1863
1865
1864 def islocal(path):
1866 def islocal(path):
1865 return True
1867 return True
@@ -1,646 +1,647 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import *
9 from node import *
10 import base85, cmdutil, mdiff, util, context, revlog
10 import base85, cmdutil, mdiff, util, context, revlog
11 import cStringIO, email.Parser, os, popen2, re, sha
11 import cStringIO, email.Parser, os, popen2, re, sha
12 import sys, tempfile, zlib
12 import sys, tempfile, zlib
13
13
14 # helper functions
14 # helper functions
15
15
16 def copyfile(src, dst, basedir=None):
16 def copyfile(src, dst, basedir=None):
17 if not basedir:
17 if not basedir:
18 basedir = os.getcwd()
18 basedir = os.getcwd()
19
19
20 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
20 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
21 if os.path.exists(absdst):
21 if os.path.exists(absdst):
22 raise util.Abort(_("cannot create %s: destination already exists") %
22 raise util.Abort(_("cannot create %s: destination already exists") %
23 dst)
23 dst)
24
24
25 targetdir = os.path.dirname(absdst)
25 targetdir = os.path.dirname(absdst)
26 if not os.path.isdir(targetdir):
26 if not os.path.isdir(targetdir):
27 os.makedirs(targetdir)
27 os.makedirs(targetdir)
28
28
29 util.copyfile(abssrc, absdst)
29 util.copyfile(abssrc, absdst)
30
30
31 # public functions
31 # public functions
32
32
33 def extract(ui, fileobj):
33 def extract(ui, fileobj):
34 '''extract patch from data read from fileobj.
34 '''extract patch from data read from fileobj.
35
35
36 patch can be normal patch or contained in email message.
36 patch can be normal patch or contained in email message.
37
37
38 return tuple (filename, message, user, date). any item in returned
38 return tuple (filename, message, user, date). any item in returned
39 tuple can be None. if filename is None, fileobj did not contain
39 tuple can be None. if filename is None, fileobj did not contain
40 patch. caller must unlink filename when done.'''
40 patch. caller must unlink filename when done.'''
41
41
42 # attempt to detect the start of a patch
42 # attempt to detect the start of a patch
43 # (this heuristic is borrowed from quilt)
43 # (this heuristic is borrowed from quilt)
44 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
44 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
45 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
45 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
46 '(---|\*\*\*)[ \t])', re.MULTILINE)
46 '(---|\*\*\*)[ \t])', re.MULTILINE)
47
47
48 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
48 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
49 tmpfp = os.fdopen(fd, 'w')
49 tmpfp = os.fdopen(fd, 'w')
50 try:
50 try:
51 hgpatch = False
51 hgpatch = False
52
52
53 msg = email.Parser.Parser().parse(fileobj)
53 msg = email.Parser.Parser().parse(fileobj)
54
54
55 message = msg['Subject']
55 message = msg['Subject']
56 user = msg['From']
56 user = msg['From']
57 # should try to parse msg['Date']
57 # should try to parse msg['Date']
58 date = None
58 date = None
59
59
60 if message:
60 if message:
61 message = message.replace('\n\t', ' ')
61 message = message.replace('\n\t', ' ')
62 ui.debug('Subject: %s\n' % message)
62 ui.debug('Subject: %s\n' % message)
63 if user:
63 if user:
64 ui.debug('From: %s\n' % user)
64 ui.debug('From: %s\n' % user)
65 diffs_seen = 0
65 diffs_seen = 0
66 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
66 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
67
67
68 for part in msg.walk():
68 for part in msg.walk():
69 content_type = part.get_content_type()
69 content_type = part.get_content_type()
70 ui.debug('Content-Type: %s\n' % content_type)
70 ui.debug('Content-Type: %s\n' % content_type)
71 if content_type not in ok_types:
71 if content_type not in ok_types:
72 continue
72 continue
73 payload = part.get_payload(decode=True)
73 payload = part.get_payload(decode=True)
74 m = diffre.search(payload)
74 m = diffre.search(payload)
75 if m:
75 if m:
76 ui.debug(_('found patch at byte %d\n') % m.start(0))
76 ui.debug(_('found patch at byte %d\n') % m.start(0))
77 diffs_seen += 1
77 diffs_seen += 1
78 cfp = cStringIO.StringIO()
78 cfp = cStringIO.StringIO()
79 if message:
79 if message:
80 cfp.write(message)
80 cfp.write(message)
81 cfp.write('\n')
81 cfp.write('\n')
82 for line in payload[:m.start(0)].splitlines():
82 for line in payload[:m.start(0)].splitlines():
83 if line.startswith('# HG changeset patch'):
83 if line.startswith('# HG changeset patch'):
84 ui.debug(_('patch generated by hg export\n'))
84 ui.debug(_('patch generated by hg export\n'))
85 hgpatch = True
85 hgpatch = True
86 # drop earlier commit message content
86 # drop earlier commit message content
87 cfp.seek(0)
87 cfp.seek(0)
88 cfp.truncate()
88 cfp.truncate()
89 elif hgpatch:
89 elif hgpatch:
90 if line.startswith('# User '):
90 if line.startswith('# User '):
91 user = line[7:]
91 user = line[7:]
92 ui.debug('From: %s\n' % user)
92 ui.debug('From: %s\n' % user)
93 elif line.startswith("# Date "):
93 elif line.startswith("# Date "):
94 date = line[7:]
94 date = line[7:]
95 if not line.startswith('# '):
95 if not line.startswith('# '):
96 cfp.write(line)
96 cfp.write(line)
97 cfp.write('\n')
97 cfp.write('\n')
98 message = cfp.getvalue()
98 message = cfp.getvalue()
99 if tmpfp:
99 if tmpfp:
100 tmpfp.write(payload)
100 tmpfp.write(payload)
101 if not payload.endswith('\n'):
101 if not payload.endswith('\n'):
102 tmpfp.write('\n')
102 tmpfp.write('\n')
103 elif not diffs_seen and message and content_type == 'text/plain':
103 elif not diffs_seen and message and content_type == 'text/plain':
104 message += '\n' + payload
104 message += '\n' + payload
105 except:
105 except:
106 tmpfp.close()
106 tmpfp.close()
107 os.unlink(tmpname)
107 os.unlink(tmpname)
108 raise
108 raise
109
109
110 tmpfp.close()
110 tmpfp.close()
111 if not diffs_seen:
111 if not diffs_seen:
112 os.unlink(tmpname)
112 os.unlink(tmpname)
113 return None, message, user, date
113 return None, message, user, date
114 return tmpname, message, user, date
114 return tmpname, message, user, date
115
115
116 GP_PATCH = 1 << 0 # we have to run patch
116 GP_PATCH = 1 << 0 # we have to run patch
117 GP_FILTER = 1 << 1 # there's some copy/rename operation
117 GP_FILTER = 1 << 1 # there's some copy/rename operation
118 GP_BINARY = 1 << 2 # there's a binary patch
118 GP_BINARY = 1 << 2 # there's a binary patch
119
119
120 def readgitpatch(patchname):
120 def readgitpatch(patchname):
121 """extract git-style metadata about patches from <patchname>"""
121 """extract git-style metadata about patches from <patchname>"""
122 class gitpatch:
122 class gitpatch:
123 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
123 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
124 def __init__(self, path):
124 def __init__(self, path):
125 self.path = path
125 self.path = path
126 self.oldpath = None
126 self.oldpath = None
127 self.mode = None
127 self.mode = None
128 self.op = 'MODIFY'
128 self.op = 'MODIFY'
129 self.copymod = False
129 self.copymod = False
130 self.lineno = 0
130 self.lineno = 0
131 self.binary = False
131 self.binary = False
132
132
133 # Filter patch for git information
133 # Filter patch for git information
134 gitre = re.compile('diff --git a/(.*) b/(.*)')
134 gitre = re.compile('diff --git a/(.*) b/(.*)')
135 pf = file(patchname)
135 pf = file(patchname)
136 gp = None
136 gp = None
137 gitpatches = []
137 gitpatches = []
138 # Can have a git patch with only metadata, causing patch to complain
138 # Can have a git patch with only metadata, causing patch to complain
139 dopatch = 0
139 dopatch = 0
140
140
141 lineno = 0
141 lineno = 0
142 for line in pf:
142 for line in pf:
143 lineno += 1
143 lineno += 1
144 if line.startswith('diff --git'):
144 if line.startswith('diff --git'):
145 m = gitre.match(line)
145 m = gitre.match(line)
146 if m:
146 if m:
147 if gp:
147 if gp:
148 gitpatches.append(gp)
148 gitpatches.append(gp)
149 src, dst = m.group(1, 2)
149 src, dst = m.group(1, 2)
150 gp = gitpatch(dst)
150 gp = gitpatch(dst)
151 gp.lineno = lineno
151 gp.lineno = lineno
152 elif gp:
152 elif gp:
153 if line.startswith('--- '):
153 if line.startswith('--- '):
154 if gp.op in ('COPY', 'RENAME'):
154 if gp.op in ('COPY', 'RENAME'):
155 gp.copymod = True
155 gp.copymod = True
156 dopatch |= GP_FILTER
156 dopatch |= GP_FILTER
157 gitpatches.append(gp)
157 gitpatches.append(gp)
158 gp = None
158 gp = None
159 dopatch |= GP_PATCH
159 dopatch |= GP_PATCH
160 continue
160 continue
161 if line.startswith('rename from '):
161 if line.startswith('rename from '):
162 gp.op = 'RENAME'
162 gp.op = 'RENAME'
163 gp.oldpath = line[12:].rstrip()
163 gp.oldpath = line[12:].rstrip()
164 elif line.startswith('rename to '):
164 elif line.startswith('rename to '):
165 gp.path = line[10:].rstrip()
165 gp.path = line[10:].rstrip()
166 elif line.startswith('copy from '):
166 elif line.startswith('copy from '):
167 gp.op = 'COPY'
167 gp.op = 'COPY'
168 gp.oldpath = line[10:].rstrip()
168 gp.oldpath = line[10:].rstrip()
169 elif line.startswith('copy to '):
169 elif line.startswith('copy to '):
170 gp.path = line[8:].rstrip()
170 gp.path = line[8:].rstrip()
171 elif line.startswith('deleted file'):
171 elif line.startswith('deleted file'):
172 gp.op = 'DELETE'
172 gp.op = 'DELETE'
173 elif line.startswith('new file mode '):
173 elif line.startswith('new file mode '):
174 gp.op = 'ADD'
174 gp.op = 'ADD'
175 gp.mode = int(line.rstrip()[-3:], 8)
175 gp.mode = int(line.rstrip()[-3:], 8)
176 elif line.startswith('new mode '):
176 elif line.startswith('new mode '):
177 gp.mode = int(line.rstrip()[-3:], 8)
177 gp.mode = int(line.rstrip()[-3:], 8)
178 elif line.startswith('GIT binary patch'):
178 elif line.startswith('GIT binary patch'):
179 dopatch |= GP_BINARY
179 dopatch |= GP_BINARY
180 gp.binary = True
180 gp.binary = True
181 if gp:
181 if gp:
182 gitpatches.append(gp)
182 gitpatches.append(gp)
183
183
184 if not gitpatches:
184 if not gitpatches:
185 dopatch = GP_PATCH
185 dopatch = GP_PATCH
186
186
187 return (dopatch, gitpatches)
187 return (dopatch, gitpatches)
188
188
189 def dogitpatch(patchname, gitpatches, cwd=None):
189 def dogitpatch(patchname, gitpatches, cwd=None):
190 """Preprocess git patch so that vanilla patch can handle it"""
190 """Preprocess git patch so that vanilla patch can handle it"""
191 def extractbin(fp):
191 def extractbin(fp):
192 i = [0] # yuck
192 i = [0] # yuck
193 def readline():
193 def readline():
194 i[0] += 1
194 i[0] += 1
195 return fp.readline().rstrip()
195 return fp.readline().rstrip()
196 line = readline()
196 line = readline()
197 while line and not line.startswith('literal '):
197 while line and not line.startswith('literal '):
198 line = readline()
198 line = readline()
199 if not line:
199 if not line:
200 return None, i[0]
200 return None, i[0]
201 size = int(line[8:])
201 size = int(line[8:])
202 dec = []
202 dec = []
203 line = readline()
203 line = readline()
204 while line:
204 while line:
205 l = line[0]
205 l = line[0]
206 if l <= 'Z' and l >= 'A':
206 if l <= 'Z' and l >= 'A':
207 l = ord(l) - ord('A') + 1
207 l = ord(l) - ord('A') + 1
208 else:
208 else:
209 l = ord(l) - ord('a') + 27
209 l = ord(l) - ord('a') + 27
210 dec.append(base85.b85decode(line[1:])[:l])
210 dec.append(base85.b85decode(line[1:])[:l])
211 line = readline()
211 line = readline()
212 text = zlib.decompress(''.join(dec))
212 text = zlib.decompress(''.join(dec))
213 if len(text) != size:
213 if len(text) != size:
214 raise util.Abort(_('binary patch is %d bytes, not %d') %
214 raise util.Abort(_('binary patch is %d bytes, not %d') %
215 (len(text), size))
215 (len(text), size))
216 return text, i[0]
216 return text, i[0]
217
217
218 pf = file(patchname)
218 pf = file(patchname)
219 pfline = 1
219 pfline = 1
220
220
221 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
221 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
222 tmpfp = os.fdopen(fd, 'w')
222 tmpfp = os.fdopen(fd, 'w')
223
223
224 try:
224 try:
225 for i in xrange(len(gitpatches)):
225 for i in xrange(len(gitpatches)):
226 p = gitpatches[i]
226 p = gitpatches[i]
227 if not p.copymod and not p.binary:
227 if not p.copymod and not p.binary:
228 continue
228 continue
229
229
230 # rewrite patch hunk
230 # rewrite patch hunk
231 while pfline < p.lineno:
231 while pfline < p.lineno:
232 tmpfp.write(pf.readline())
232 tmpfp.write(pf.readline())
233 pfline += 1
233 pfline += 1
234
234
235 if p.binary:
235 if p.binary:
236 text, delta = extractbin(pf)
236 text, delta = extractbin(pf)
237 if not text:
237 if not text:
238 raise util.Abort(_('binary patch extraction failed'))
238 raise util.Abort(_('binary patch extraction failed'))
239 pfline += delta
239 pfline += delta
240 if not cwd:
240 if not cwd:
241 cwd = os.getcwd()
241 cwd = os.getcwd()
242 absdst = os.path.join(cwd, p.path)
242 absdst = os.path.join(cwd, p.path)
243 basedir = os.path.dirname(absdst)
243 basedir = os.path.dirname(absdst)
244 if not os.path.isdir(basedir):
244 if not os.path.isdir(basedir):
245 os.makedirs(basedir)
245 os.makedirs(basedir)
246 out = file(absdst, 'wb')
246 out = file(absdst, 'wb')
247 out.write(text)
247 out.write(text)
248 out.close()
248 out.close()
249 elif p.copymod:
249 elif p.copymod:
250 copyfile(p.oldpath, p.path, basedir=cwd)
250 copyfile(p.oldpath, p.path, basedir=cwd)
251 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
251 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
252 line = pf.readline()
252 line = pf.readline()
253 pfline += 1
253 pfline += 1
254 while not line.startswith('--- a/'):
254 while not line.startswith('--- a/'):
255 tmpfp.write(line)
255 tmpfp.write(line)
256 line = pf.readline()
256 line = pf.readline()
257 pfline += 1
257 pfline += 1
258 tmpfp.write('--- a/%s\n' % p.path)
258 tmpfp.write('--- a/%s\n' % p.path)
259
259
260 line = pf.readline()
260 line = pf.readline()
261 while line:
261 while line:
262 tmpfp.write(line)
262 tmpfp.write(line)
263 line = pf.readline()
263 line = pf.readline()
264 except:
264 except:
265 tmpfp.close()
265 tmpfp.close()
266 os.unlink(patchname)
266 os.unlink(patchname)
267 raise
267 raise
268
268
269 tmpfp.close()
269 tmpfp.close()
270 return patchname
270 return patchname
271
271
272 def patch(patchname, ui, strip=1, cwd=None, files={}):
272 def patch(patchname, ui, strip=1, cwd=None, files={}):
273 """apply the patch <patchname> to the working directory.
273 """apply the patch <patchname> to the working directory.
274 a list of patched files is returned"""
274 a list of patched files is returned"""
275
275
276 # helper function
276 # helper function
277 def __patch(patchname):
277 def __patch(patchname):
278 """patch and updates the files and fuzz variables"""
278 """patch and updates the files and fuzz variables"""
279 fuzz = False
279 fuzz = False
280
280
281 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''),
281 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''),
282 'patch')
282 'patch')
283 args = []
283 args = []
284 if cwd:
284 if cwd:
285 args.append('-d %s' % util.shellquote(cwd))
285 args.append('-d %s' % util.shellquote(cwd))
286 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
286 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
287 util.shellquote(patchname)))
287 util.shellquote(patchname)))
288
288
289 for line in fp:
289 for line in fp:
290 line = line.rstrip()
290 line = line.rstrip()
291 ui.note(line + '\n')
291 ui.note(line + '\n')
292 if line.startswith('patching file '):
292 if line.startswith('patching file '):
293 pf = util.parse_patch_output(line)
293 pf = util.parse_patch_output(line)
294 printed_file = False
294 printed_file = False
295 files.setdefault(pf, (None, None))
295 files.setdefault(pf, (None, None))
296 elif line.find('with fuzz') >= 0:
296 elif line.find('with fuzz') >= 0:
297 fuzz = True
297 fuzz = True
298 if not printed_file:
298 if not printed_file:
299 ui.warn(pf + '\n')
299 ui.warn(pf + '\n')
300 printed_file = True
300 printed_file = True
301 ui.warn(line + '\n')
301 ui.warn(line + '\n')
302 elif line.find('saving rejects to file') >= 0:
302 elif line.find('saving rejects to file') >= 0:
303 ui.warn(line + '\n')
303 ui.warn(line + '\n')
304 elif line.find('FAILED') >= 0:
304 elif line.find('FAILED') >= 0:
305 if not printed_file:
305 if not printed_file:
306 ui.warn(pf + '\n')
306 ui.warn(pf + '\n')
307 printed_file = True
307 printed_file = True
308 ui.warn(line + '\n')
308 ui.warn(line + '\n')
309 code = fp.close()
309 code = fp.close()
310 if code:
310 if code:
311 raise util.Abort(_("patch command failed: %s") %
311 raise util.Abort(_("patch command failed: %s") %
312 util.explain_exit(code)[0])
312 util.explain_exit(code)[0])
313 return fuzz
313 return fuzz
314
314
315 (dopatch, gitpatches) = readgitpatch(patchname)
315 (dopatch, gitpatches) = readgitpatch(patchname)
316 for gp in gitpatches:
316 for gp in gitpatches:
317 files[gp.path] = (gp.op, gp)
317 files[gp.path] = (gp.op, gp)
318
318
319 fuzz = False
319 fuzz = False
320 if dopatch:
320 if dopatch:
321 filterpatch = dopatch & (GP_FILTER | GP_BINARY)
321 filterpatch = dopatch & (GP_FILTER | GP_BINARY)
322 if filterpatch:
322 if filterpatch:
323 patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
323 patchname = dogitpatch(patchname, gitpatches, cwd=cwd)
324 try:
324 try:
325 if dopatch & GP_PATCH:
325 if dopatch & GP_PATCH:
326 fuzz = __patch(patchname)
326 fuzz = __patch(patchname)
327 finally:
327 finally:
328 if filterpatch:
328 if filterpatch:
329 os.unlink(patchname)
329 os.unlink(patchname)
330
330
331 return fuzz
331 return fuzz
332
332
333 def diffopts(ui, opts={}, untrusted=False):
333 def diffopts(ui, opts={}, untrusted=False):
334 def get(key, name=None):
334 def get(key, name=None):
335 return (opts.get(key) or
335 return (opts.get(key) or
336 ui.configbool('diff', name or key, None, untrusted=untrusted))
336 ui.configbool('diff', name or key, None, untrusted=untrusted))
337 return mdiff.diffopts(
337 return mdiff.diffopts(
338 text=opts.get('text'),
338 text=opts.get('text'),
339 git=get('git'),
339 git=get('git'),
340 nodates=get('nodates'),
340 nodates=get('nodates'),
341 showfunc=get('show_function', 'showfunc'),
341 showfunc=get('show_function', 'showfunc'),
342 ignorews=get('ignore_all_space', 'ignorews'),
342 ignorews=get('ignore_all_space', 'ignorews'),
343 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
343 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
344 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
344 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
345
345
346 def updatedir(ui, repo, patches, wlock=None):
346 def updatedir(ui, repo, patches, wlock=None):
347 '''Update dirstate after patch application according to metadata'''
347 '''Update dirstate after patch application according to metadata'''
348 if not patches:
348 if not patches:
349 return
349 return
350 copies = []
350 copies = []
351 removes = {}
351 removes = {}
352 cfiles = patches.keys()
352 cfiles = patches.keys()
353 cwd = repo.getcwd()
353 cwd = repo.getcwd()
354 if cwd:
354 if cwd:
355 cfiles = [util.pathto(cwd, f) for f in patches.keys()]
355 cfiles = [util.pathto(cwd, f) for f in patches.keys()]
356 for f in patches:
356 for f in patches:
357 ctype, gp = patches[f]
357 ctype, gp = patches[f]
358 if ctype == 'RENAME':
358 if ctype == 'RENAME':
359 copies.append((gp.oldpath, gp.path, gp.copymod))
359 copies.append((gp.oldpath, gp.path, gp.copymod))
360 removes[gp.oldpath] = 1
360 removes[gp.oldpath] = 1
361 elif ctype == 'COPY':
361 elif ctype == 'COPY':
362 copies.append((gp.oldpath, gp.path, gp.copymod))
362 copies.append((gp.oldpath, gp.path, gp.copymod))
363 elif ctype == 'DELETE':
363 elif ctype == 'DELETE':
364 removes[gp.path] = 1
364 removes[gp.path] = 1
365 for src, dst, after in copies:
365 for src, dst, after in copies:
366 if not after:
366 if not after:
367 copyfile(src, dst, repo.root)
367 copyfile(src, dst, repo.root)
368 repo.copy(src, dst, wlock=wlock)
368 repo.copy(src, dst, wlock=wlock)
369 removes = removes.keys()
369 removes = removes.keys()
370 if removes:
370 if removes:
371 removes.sort()
371 removes.sort()
372 repo.remove(removes, True, wlock=wlock)
372 repo.remove(removes, True, wlock=wlock)
373 for f in patches:
373 for f in patches:
374 ctype, gp = patches[f]
374 ctype, gp = patches[f]
375 if gp and gp.mode:
375 if gp and gp.mode:
376 x = gp.mode & 0100 != 0
376 x = gp.mode & 0100 != 0
377 dst = os.path.join(repo.root, gp.path)
377 dst = os.path.join(repo.root, gp.path)
378 # patch won't create empty files
378 # patch won't create empty files
379 if ctype == 'ADD' and not os.path.exists(dst):
379 if ctype == 'ADD' and not os.path.exists(dst):
380 repo.wwrite(gp.path, '')
380 repo.wwrite(gp.path, '')
381 util.set_exec(dst, x)
381 util.set_exec(dst, x)
382 cmdutil.addremove(repo, cfiles, wlock=wlock)
382 cmdutil.addremove(repo, cfiles, wlock=wlock)
383 files = patches.keys()
383 files = patches.keys()
384 files.extend([r for r in removes if r not in files])
384 files.extend([r for r in removes if r not in files])
385 files.sort()
385 files.sort()
386
386
387 return files
387 return files
388
388
389 def b85diff(fp, to, tn):
389 def b85diff(fp, to, tn):
390 '''print base85-encoded binary diff'''
390 '''print base85-encoded binary diff'''
391 def gitindex(text):
391 def gitindex(text):
392 if not text:
392 if not text:
393 return '0' * 40
393 return '0' * 40
394 l = len(text)
394 l = len(text)
395 s = sha.new('blob %d\0' % l)
395 s = sha.new('blob %d\0' % l)
396 s.update(text)
396 s.update(text)
397 return s.hexdigest()
397 return s.hexdigest()
398
398
399 def fmtline(line):
399 def fmtline(line):
400 l = len(line)
400 l = len(line)
401 if l <= 26:
401 if l <= 26:
402 l = chr(ord('A') + l - 1)
402 l = chr(ord('A') + l - 1)
403 else:
403 else:
404 l = chr(l - 26 + ord('a') - 1)
404 l = chr(l - 26 + ord('a') - 1)
405 return '%c%s\n' % (l, base85.b85encode(line, True))
405 return '%c%s\n' % (l, base85.b85encode(line, True))
406
406
407 def chunk(text, csize=52):
407 def chunk(text, csize=52):
408 l = len(text)
408 l = len(text)
409 i = 0
409 i = 0
410 while i < l:
410 while i < l:
411 yield text[i:i+csize]
411 yield text[i:i+csize]
412 i += csize
412 i += csize
413
413
414 # TODO: deltas
414 # TODO: deltas
415 l = len(tn)
415 l = len(tn)
416 fp.write('index %s..%s\nGIT binary patch\nliteral %s\n' %
416 fp.write('index %s..%s\nGIT binary patch\nliteral %s\n' %
417 (gitindex(to), gitindex(tn), len(tn)))
417 (gitindex(to), gitindex(tn), len(tn)))
418
418
419 tn = ''.join([fmtline(l) for l in chunk(zlib.compress(tn))])
419 tn = ''.join([fmtline(l) for l in chunk(zlib.compress(tn))])
420 fp.write(tn)
420 fp.write(tn)
421 fp.write('\n')
421 fp.write('\n')
422
422
423 def diff(repo, node1=None, node2=None, files=None, match=util.always,
423 def diff(repo, node1=None, node2=None, files=None, match=util.always,
424 fp=None, changes=None, opts=None):
424 fp=None, changes=None, opts=None):
425 '''print diff of changes to files between two nodes, or node and
425 '''print diff of changes to files between two nodes, or node and
426 working directory.
426 working directory.
427
427
428 if node1 is None, use first dirstate parent instead.
428 if node1 is None, use first dirstate parent instead.
429 if node2 is None, compare node1 with working directory.'''
429 if node2 is None, compare node1 with working directory.'''
430
430
431 if opts is None:
431 if opts is None:
432 opts = mdiff.defaultopts
432 opts = mdiff.defaultopts
433 if fp is None:
433 if fp is None:
434 fp = repo.ui
434 fp = repo.ui
435
435
436 if not node1:
436 if not node1:
437 node1 = repo.dirstate.parents()[0]
437 node1 = repo.dirstate.parents()[0]
438
438
439 ccache = {}
439 ccache = {}
440 def getctx(r):
440 def getctx(r):
441 if r not in ccache:
441 if r not in ccache:
442 ccache[r] = context.changectx(repo, r)
442 ccache[r] = context.changectx(repo, r)
443 return ccache[r]
443 return ccache[r]
444
444
445 flcache = {}
445 flcache = {}
446 def getfilectx(f, ctx):
446 def getfilectx(f, ctx):
447 flctx = ctx.filectx(f, filelog=flcache.get(f))
447 flctx = ctx.filectx(f, filelog=flcache.get(f))
448 if f not in flcache:
448 if f not in flcache:
449 flcache[f] = flctx._filelog
449 flcache[f] = flctx._filelog
450 return flctx
450 return flctx
451
451
452 # reading the data for node1 early allows it to play nicely
452 # reading the data for node1 early allows it to play nicely
453 # with repo.status and the revlog cache.
453 # with repo.status and the revlog cache.
454 ctx1 = context.changectx(repo, node1)
454 ctx1 = context.changectx(repo, node1)
455 # force manifest reading
455 # force manifest reading
456 man1 = ctx1.manifest()
456 man1 = ctx1.manifest()
457 date1 = util.datestr(ctx1.date())
457 date1 = util.datestr(ctx1.date())
458
458
459 if not changes:
459 if not changes:
460 changes = repo.status(node1, node2, files, match=match)[:5]
460 changes = repo.status(node1, node2, files, match=match)[:5]
461 modified, added, removed, deleted, unknown = changes
461 modified, added, removed, deleted, unknown = changes
462 if files:
462 if files:
463 def filterfiles(filters):
463 def filterfiles(filters):
464 l = [x for x in filters if x in files]
464 l = [x for x in filters if x in files]
465
465
466 for t in files:
466 for t in files:
467 if not t.endswith("/"):
467 if not t.endswith("/"):
468 t += "/"
468 t += "/"
469 l += [x for x in filters if x.startswith(t)]
469 l += [x for x in filters if x.startswith(t)]
470 return l
470 return l
471
471
472 modified, added, removed = map(filterfiles, (modified, added, removed))
472 modified, added, removed = map(filterfiles, (modified, added, removed))
473
473
474 if not modified and not added and not removed:
474 if not modified and not added and not removed:
475 return
475 return
476
476
477 if node2:
477 if node2:
478 ctx2 = context.changectx(repo, node2)
478 ctx2 = context.changectx(repo, node2)
479 else:
479 else:
480 ctx2 = context.workingctx(repo)
480 ctx2 = context.workingctx(repo)
481 man2 = ctx2.manifest()
481 man2 = ctx2.manifest()
482
482
483 # returns False if there was no rename between ctx1 and ctx2
483 # returns False if there was no rename between ctx1 and ctx2
484 # returns None if the file was created between ctx1 and ctx2
484 # returns None if the file was created between ctx1 and ctx2
485 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
485 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
486 def renamed(f):
486 def renamed(f):
487 startrev = ctx1.rev()
487 startrev = ctx1.rev()
488 c = ctx2
488 c = ctx2
489 crev = c.rev()
489 crev = c.rev()
490 if crev is None:
490 if crev is None:
491 crev = repo.changelog.count()
491 crev = repo.changelog.count()
492 orig = f
492 orig = f
493 while crev > startrev:
493 while crev > startrev:
494 if f in c.files():
494 if f in c.files():
495 try:
495 try:
496 src = getfilectx(f, c).renamed()
496 src = getfilectx(f, c).renamed()
497 except revlog.LookupError:
497 except revlog.LookupError:
498 return None
498 return None
499 if src:
499 if src:
500 f = src[0]
500 f = src[0]
501 crev = c.parents()[0].rev()
501 crev = c.parents()[0].rev()
502 # try to reuse
502 # try to reuse
503 c = getctx(crev)
503 c = getctx(crev)
504 if f not in man1:
504 if f not in man1:
505 return None
505 return None
506 if f == orig:
506 if f == orig:
507 return False
507 return False
508 return f
508 return f
509
509
510 if repo.ui.quiet:
510 if repo.ui.quiet:
511 r = None
511 r = None
512 else:
512 else:
513 hexfunc = repo.ui.debugflag and hex or short
513 hexfunc = repo.ui.debugflag and hex or short
514 r = [hexfunc(node) for node in [node1, node2] if node]
514 r = [hexfunc(node) for node in [node1, node2] if node]
515
515
516 if opts.git:
516 if opts.git:
517 copied = {}
517 copied = {}
518 for f in added:
518 for f in added:
519 src = renamed(f)
519 src = renamed(f)
520 if src:
520 if src:
521 copied[f] = src
521 copied[f] = src
522 srcs = [x[1] for x in copied.items()]
522 srcs = [x[1] for x in copied.items()]
523
523
524 all = modified + added + removed
524 all = modified + added + removed
525 all.sort()
525 all.sort()
526 gone = {}
526 gone = {}
527
527 for f in all:
528 for f in all:
528 to = None
529 to = None
529 tn = None
530 tn = None
530 dodiff = True
531 dodiff = True
531 header = []
532 header = []
532 if f in man1:
533 if f in man1:
533 to = getfilectx(f, ctx1).data()
534 to = getfilectx(f, ctx1).data()
534 if f not in removed:
535 if f not in removed:
535 tn = getfilectx(f, ctx2).data()
536 tn = getfilectx(f, ctx2).data()
536 if opts.git:
537 if opts.git:
537 def gitmode(x):
538 def gitmode(x):
538 return x and '100755' or '100644'
539 return x and '100755' or '100644'
539 def addmodehdr(header, omode, nmode):
540 def addmodehdr(header, omode, nmode):
540 if omode != nmode:
541 if omode != nmode:
541 header.append('old mode %s\n' % omode)
542 header.append('old mode %s\n' % omode)
542 header.append('new mode %s\n' % nmode)
543 header.append('new mode %s\n' % nmode)
543
544
544 a, b = f, f
545 a, b = f, f
545 if f in added:
546 if f in added:
546 mode = gitmode(man2.execf(f))
547 mode = gitmode(man2.execf(f))
547 if f in copied:
548 if f in copied:
548 a = copied[f]
549 a = copied[f]
549 omode = gitmode(man1.execf(a))
550 omode = gitmode(man1.execf(a))
550 addmodehdr(header, omode, mode)
551 addmodehdr(header, omode, mode)
551 if a in removed and a not in gone:
552 if a in removed and a not in gone:
552 op = 'rename'
553 op = 'rename'
553 gone[a] = 1
554 gone[a] = 1
554 else:
555 else:
555 op = 'copy'
556 op = 'copy'
556 header.append('%s from %s\n' % (op, a))
557 header.append('%s from %s\n' % (op, a))
557 header.append('%s to %s\n' % (op, f))
558 header.append('%s to %s\n' % (op, f))
558 to = getfilectx(a, ctx1).data()
559 to = getfilectx(a, ctx1).data()
559 else:
560 else:
560 header.append('new file mode %s\n' % mode)
561 header.append('new file mode %s\n' % mode)
561 if util.binary(tn):
562 if util.binary(tn):
562 dodiff = 'binary'
563 dodiff = 'binary'
563 elif f in removed:
564 elif f in removed:
564 if f in srcs:
565 if f in srcs:
565 dodiff = False
566 dodiff = False
566 else:
567 else:
567 mode = gitmode(man1.execf(f))
568 mode = gitmode(man1.execf(f))
568 header.append('deleted file mode %s\n' % mode)
569 header.append('deleted file mode %s\n' % mode)
569 else:
570 else:
570 omode = gitmode(man1.execf(f))
571 omode = gitmode(man1.execf(f))
571 nmode = gitmode(man2.execf(f))
572 nmode = gitmode(man2.execf(f))
572 addmodehdr(header, omode, nmode)
573 addmodehdr(header, omode, nmode)
573 if util.binary(to) or util.binary(tn):
574 if util.binary(to) or util.binary(tn):
574 dodiff = 'binary'
575 dodiff = 'binary'
575 r = None
576 r = None
576 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
577 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
577 if dodiff == 'binary':
578 if dodiff == 'binary':
578 fp.write(''.join(header))
579 fp.write(''.join(header))
579 b85diff(fp, to, tn)
580 b85diff(fp, to, tn)
580 elif dodiff:
581 elif dodiff:
581 text = mdiff.unidiff(to, date1,
582 text = mdiff.unidiff(to, date1,
582 # ctx2 date may be dynamic
583 # ctx2 date may be dynamic
583 tn, util.datestr(ctx2.date()),
584 tn, util.datestr(ctx2.date()),
584 f, r, opts=opts)
585 f, r, opts=opts)
585 if text or len(header) > 1:
586 if text or len(header) > 1:
586 fp.write(''.join(header))
587 fp.write(''.join(header))
587 fp.write(text)
588 fp.write(text)
588
589
589 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
590 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
590 opts=None):
591 opts=None):
591 '''export changesets as hg patches.'''
592 '''export changesets as hg patches.'''
592
593
593 total = len(revs)
594 total = len(revs)
594 revwidth = max([len(str(rev)) for rev in revs])
595 revwidth = max([len(str(rev)) for rev in revs])
595
596
596 def single(rev, seqno, fp):
597 def single(rev, seqno, fp):
597 ctx = repo.changectx(rev)
598 ctx = repo.changectx(rev)
598 node = ctx.node()
599 node = ctx.node()
599 parents = [p.node() for p in ctx.parents() if p]
600 parents = [p.node() for p in ctx.parents() if p]
600 if switch_parent:
601 if switch_parent:
601 parents.reverse()
602 parents.reverse()
602 prev = (parents and parents[0]) or nullid
603 prev = (parents and parents[0]) or nullid
603
604
604 if not fp:
605 if not fp:
605 fp = cmdutil.make_file(repo, template, node, total=total,
606 fp = cmdutil.make_file(repo, template, node, total=total,
606 seqno=seqno, revwidth=revwidth)
607 seqno=seqno, revwidth=revwidth)
607 if fp not in (sys.stdout, repo.ui):
608 if fp not in (sys.stdout, repo.ui):
608 repo.ui.note("%s\n" % fp.name)
609 repo.ui.note("%s\n" % fp.name)
609
610
610 fp.write("# HG changeset patch\n")
611 fp.write("# HG changeset patch\n")
611 fp.write("# User %s\n" % ctx.user())
612 fp.write("# User %s\n" % ctx.user())
612 fp.write("# Date %d %d\n" % ctx.date())
613 fp.write("# Date %d %d\n" % ctx.date())
613 fp.write("# Node ID %s\n" % hex(node))
614 fp.write("# Node ID %s\n" % hex(node))
614 fp.write("# Parent %s\n" % hex(prev))
615 fp.write("# Parent %s\n" % hex(prev))
615 if len(parents) > 1:
616 if len(parents) > 1:
616 fp.write("# Parent %s\n" % hex(parents[1]))
617 fp.write("# Parent %s\n" % hex(parents[1]))
617 fp.write(ctx.description().rstrip())
618 fp.write(ctx.description().rstrip())
618 fp.write("\n\n")
619 fp.write("\n\n")
619
620
620 diff(repo, prev, node, fp=fp, opts=opts)
621 diff(repo, prev, node, fp=fp, opts=opts)
621 if fp not in (sys.stdout, repo.ui):
622 if fp not in (sys.stdout, repo.ui):
622 fp.close()
623 fp.close()
623
624
624 for seqno, rev in enumerate(revs):
625 for seqno, rev in enumerate(revs):
625 single(rev, seqno+1, fp)
626 single(rev, seqno+1, fp)
626
627
627 def diffstat(patchlines):
628 def diffstat(patchlines):
628 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
629 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
629 try:
630 try:
630 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
631 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
631 try:
632 try:
632 for line in patchlines: print >> p.tochild, line
633 for line in patchlines: print >> p.tochild, line
633 p.tochild.close()
634 p.tochild.close()
634 if p.wait(): return
635 if p.wait(): return
635 fp = os.fdopen(fd, 'r')
636 fp = os.fdopen(fd, 'r')
636 stat = []
637 stat = []
637 for line in fp: stat.append(line.lstrip())
638 for line in fp: stat.append(line.lstrip())
638 last = stat.pop()
639 last = stat.pop()
639 stat.insert(0, last)
640 stat.insert(0, last)
640 stat = ''.join(stat)
641 stat = ''.join(stat)
641 if stat.startswith('0 files'): raise ValueError
642 if stat.startswith('0 files'): raise ValueError
642 return stat
643 return stat
643 except: raise
644 except: raise
644 finally:
645 finally:
645 try: os.unlink(name)
646 try: os.unlink(name)
646 except: pass
647 except: pass
@@ -1,1338 +1,1344 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale
17 import os, threading, time, calendar, ConfigParser, locale
18
18
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
20 or "ascii"
20 or "ascii"
21 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
21 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
22 _fallbackencoding = 'ISO-8859-1'
22 _fallbackencoding = 'ISO-8859-1'
23
23
24 def tolocal(s):
24 def tolocal(s):
25 """
25 """
26 Convert a string from internal UTF-8 to local encoding
26 Convert a string from internal UTF-8 to local encoding
27
27
28 All internal strings should be UTF-8 but some repos before the
28 All internal strings should be UTF-8 but some repos before the
29 implementation of locale support may contain latin1 or possibly
29 implementation of locale support may contain latin1 or possibly
30 other character sets. We attempt to decode everything strictly
30 other character sets. We attempt to decode everything strictly
31 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
31 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
32 replace unknown characters.
32 replace unknown characters.
33 """
33 """
34 for e in ('UTF-8', _fallbackencoding):
34 for e in ('UTF-8', _fallbackencoding):
35 try:
35 try:
36 u = s.decode(e) # attempt strict decoding
36 u = s.decode(e) # attempt strict decoding
37 return u.encode(_encoding, "replace")
37 return u.encode(_encoding, "replace")
38 except LookupError, k:
38 except LookupError, k:
39 raise Abort(_("%s, please check your locale settings") % k)
39 raise Abort(_("%s, please check your locale settings") % k)
40 except UnicodeDecodeError:
40 except UnicodeDecodeError:
41 pass
41 pass
42 u = s.decode("utf-8", "replace") # last ditch
42 u = s.decode("utf-8", "replace") # last ditch
43 return u.encode(_encoding, "replace")
43 return u.encode(_encoding, "replace")
44
44
45 def fromlocal(s):
45 def fromlocal(s):
46 """
46 """
47 Convert a string from the local character encoding to UTF-8
47 Convert a string from the local character encoding to UTF-8
48
48
49 We attempt to decode strings using the encoding mode set by
49 We attempt to decode strings using the encoding mode set by
50 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
50 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
51 characters will cause an error message. Other modes include
51 characters will cause an error message. Other modes include
52 'replace', which replaces unknown characters with a special
52 'replace', which replaces unknown characters with a special
53 Unicode character, and 'ignore', which drops the character.
53 Unicode character, and 'ignore', which drops the character.
54 """
54 """
55 try:
55 try:
56 return s.decode(_encoding, _encodingmode).encode("utf-8")
56 return s.decode(_encoding, _encodingmode).encode("utf-8")
57 except UnicodeDecodeError, inst:
57 except UnicodeDecodeError, inst:
58 sub = s[max(0, inst.start-10):inst.start+10]
58 sub = s[max(0, inst.start-10):inst.start+10]
59 raise Abort("decoding near '%s': %s!" % (sub, inst))
59 raise Abort("decoding near '%s': %s!" % (sub, inst))
60 except LookupError, k:
60 except LookupError, k:
61 raise Abort(_("%s, please check your locale settings") % k)
61 raise Abort(_("%s, please check your locale settings") % k)
62
62
63 def locallen(s):
63 def locallen(s):
64 """Find the length in characters of a local string"""
64 """Find the length in characters of a local string"""
65 return len(s.decode(_encoding, "replace"))
65 return len(s.decode(_encoding, "replace"))
66
66
67 def localsub(s, a, b=None):
67 def localsub(s, a, b=None):
68 try:
68 try:
69 u = s.decode(_encoding, _encodingmode)
69 u = s.decode(_encoding, _encodingmode)
70 if b is not None:
70 if b is not None:
71 u = u[a:b]
71 u = u[a:b]
72 else:
72 else:
73 u = u[:a]
73 u = u[:a]
74 return u.encode(_encoding, _encodingmode)
74 return u.encode(_encoding, _encodingmode)
75 except UnicodeDecodeError, inst:
75 except UnicodeDecodeError, inst:
76 sub = s[max(0, inst.start-10), inst.start+10]
76 sub = s[max(0, inst.start-10), inst.start+10]
77 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
77 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
78
78
79 # used by parsedate
79 # used by parsedate
80 defaultdateformats = (
80 defaultdateformats = (
81 '%Y-%m-%d %H:%M:%S',
81 '%Y-%m-%d %H:%M:%S',
82 '%Y-%m-%d %I:%M:%S%p',
82 '%Y-%m-%d %I:%M:%S%p',
83 '%Y-%m-%d %H:%M',
83 '%Y-%m-%d %H:%M',
84 '%Y-%m-%d %I:%M%p',
84 '%Y-%m-%d %I:%M%p',
85 '%Y-%m-%d',
85 '%Y-%m-%d',
86 '%m-%d',
86 '%m-%d',
87 '%m/%d',
87 '%m/%d',
88 '%m/%d/%y',
88 '%m/%d/%y',
89 '%m/%d/%Y',
89 '%m/%d/%Y',
90 '%a %b %d %H:%M:%S %Y',
90 '%a %b %d %H:%M:%S %Y',
91 '%a %b %d %I:%M:%S%p %Y',
91 '%a %b %d %I:%M:%S%p %Y',
92 '%b %d %H:%M:%S %Y',
92 '%b %d %H:%M:%S %Y',
93 '%b %d %I:%M:%S%p %Y',
93 '%b %d %I:%M:%S%p %Y',
94 '%b %d %H:%M:%S',
94 '%b %d %H:%M:%S',
95 '%b %d %I:%M:%S%p',
95 '%b %d %I:%M:%S%p',
96 '%b %d %H:%M',
96 '%b %d %H:%M',
97 '%b %d %I:%M%p',
97 '%b %d %I:%M%p',
98 '%b %d %Y',
98 '%b %d %Y',
99 '%b %d',
99 '%b %d',
100 '%H:%M:%S',
100 '%H:%M:%S',
101 '%I:%M:%SP',
101 '%I:%M:%SP',
102 '%H:%M',
102 '%H:%M',
103 '%I:%M%p',
103 '%I:%M%p',
104 )
104 )
105
105
106 extendeddateformats = defaultdateformats + (
106 extendeddateformats = defaultdateformats + (
107 "%Y",
107 "%Y",
108 "%Y-%m",
108 "%Y-%m",
109 "%b",
109 "%b",
110 "%b %Y",
110 "%b %Y",
111 )
111 )
112
112
113 class SignalInterrupt(Exception):
113 class SignalInterrupt(Exception):
114 """Exception raised on SIGTERM and SIGHUP."""
114 """Exception raised on SIGTERM and SIGHUP."""
115
115
116 # like SafeConfigParser but with case-sensitive keys
116 # like SafeConfigParser but with case-sensitive keys
117 class configparser(ConfigParser.SafeConfigParser):
117 class configparser(ConfigParser.SafeConfigParser):
118 def optionxform(self, optionstr):
118 def optionxform(self, optionstr):
119 return optionstr
119 return optionstr
120
120
121 def cachefunc(func):
121 def cachefunc(func):
122 '''cache the result of function calls'''
122 '''cache the result of function calls'''
123 # XXX doesn't handle keywords args
123 # XXX doesn't handle keywords args
124 cache = {}
124 cache = {}
125 if func.func_code.co_argcount == 1:
125 if func.func_code.co_argcount == 1:
126 # we gain a small amount of time because
126 # we gain a small amount of time because
127 # we don't need to pack/unpack the list
127 # we don't need to pack/unpack the list
128 def f(arg):
128 def f(arg):
129 if arg not in cache:
129 if arg not in cache:
130 cache[arg] = func(arg)
130 cache[arg] = func(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 cache[args] = func(*args)
135 cache[args] = func(*args)
136 return cache[args]
136 return cache[args]
137
137
138 return f
138 return f
139
139
140 def pipefilter(s, cmd):
140 def pipefilter(s, cmd):
141 '''filter string S through command CMD, returning its output'''
141 '''filter string S through command CMD, returning its output'''
142 (pout, pin) = popen2.popen2(cmd, -1, 'b')
142 (pout, pin) = popen2.popen2(cmd, -1, 'b')
143 def writer():
143 def writer():
144 try:
144 try:
145 pin.write(s)
145 pin.write(s)
146 pin.close()
146 pin.close()
147 except IOError, inst:
147 except IOError, inst:
148 if inst.errno != errno.EPIPE:
148 if inst.errno != errno.EPIPE:
149 raise
149 raise
150
150
151 # we should use select instead on UNIX, but this will work on most
151 # we should use select instead on UNIX, but this will work on most
152 # systems, including Windows
152 # systems, including Windows
153 w = threading.Thread(target=writer)
153 w = threading.Thread(target=writer)
154 w.start()
154 w.start()
155 f = pout.read()
155 f = pout.read()
156 pout.close()
156 pout.close()
157 w.join()
157 w.join()
158 return f
158 return f
159
159
160 def tempfilter(s, cmd):
160 def tempfilter(s, cmd):
161 '''filter string S through a pair of temporary files with CMD.
161 '''filter string S through a pair of temporary files with CMD.
162 CMD is used as a template to create the real command to be run,
162 CMD is used as a template to create the real command to be run,
163 with the strings INFILE and OUTFILE replaced by the real names of
163 with the strings INFILE and OUTFILE replaced by the real names of
164 the temporary files generated.'''
164 the temporary files generated.'''
165 inname, outname = None, None
165 inname, outname = None, None
166 try:
166 try:
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 fp = os.fdopen(infd, 'wb')
168 fp = os.fdopen(infd, 'wb')
169 fp.write(s)
169 fp.write(s)
170 fp.close()
170 fp.close()
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 os.close(outfd)
172 os.close(outfd)
173 cmd = cmd.replace('INFILE', inname)
173 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('OUTFILE', outname)
174 cmd = cmd.replace('OUTFILE', outname)
175 code = os.system(cmd)
175 code = os.system(cmd)
176 if code: raise Abort(_("command '%s' failed: %s") %
176 if code: raise Abort(_("command '%s' failed: %s") %
177 (cmd, explain_exit(code)))
177 (cmd, explain_exit(code)))
178 return open(outname, 'rb').read()
178 return open(outname, 'rb').read()
179 finally:
179 finally:
180 try:
180 try:
181 if inname: os.unlink(inname)
181 if inname: os.unlink(inname)
182 except: pass
182 except: pass
183 try:
183 try:
184 if outname: os.unlink(outname)
184 if outname: os.unlink(outname)
185 except: pass
185 except: pass
186
186
187 filtertable = {
187 filtertable = {
188 'tempfile:': tempfilter,
188 'tempfile:': tempfilter,
189 'pipe:': pipefilter,
189 'pipe:': pipefilter,
190 }
190 }
191
191
192 def filter(s, cmd):
192 def filter(s, cmd):
193 "filter a string through a command that transforms its input to its output"
193 "filter a string through a command that transforms its input to its output"
194 for name, fn in filtertable.iteritems():
194 for name, fn in filtertable.iteritems():
195 if cmd.startswith(name):
195 if cmd.startswith(name):
196 return fn(s, cmd[len(name):].lstrip())
196 return fn(s, cmd[len(name):].lstrip())
197 return pipefilter(s, cmd)
197 return pipefilter(s, cmd)
198
198
199 def find_in_path(name, path, default=None):
199 def find_in_path(name, path, default=None):
200 '''find name in search path. path can be string (will be split
200 '''find name in search path. path can be string (will be split
201 with os.pathsep), or iterable thing that returns strings. if name
201 with os.pathsep), or iterable thing that returns strings. if name
202 found, return path to name. else return default.'''
202 found, return path to name. else return default.'''
203 if isinstance(path, str):
203 if isinstance(path, str):
204 path = path.split(os.pathsep)
204 path = path.split(os.pathsep)
205 for p in path:
205 for p in path:
206 p_name = os.path.join(p, name)
206 p_name = os.path.join(p, name)
207 if os.path.exists(p_name):
207 if os.path.exists(p_name):
208 return p_name
208 return p_name
209 return default
209 return default
210
210
211 def binary(s):
211 def binary(s):
212 """return true if a string is binary data using diff's heuristic"""
212 """return true if a string is binary data using diff's heuristic"""
213 if s and '\0' in s[:4096]:
213 if s and '\0' in s[:4096]:
214 return True
214 return True
215 return False
215 return False
216
216
217 def unique(g):
217 def unique(g):
218 """return the uniq elements of iterable g"""
218 """return the uniq elements of iterable g"""
219 seen = {}
219 seen = {}
220 l = []
220 l = []
221 for f in g:
221 for f in g:
222 if f not in seen:
222 if f not in seen:
223 seen[f] = 1
223 seen[f] = 1
224 l.append(f)
224 l.append(f)
225 return l
225 return l
226
226
227 class Abort(Exception):
227 class Abort(Exception):
228 """Raised if a command needs to print an error and exit."""
228 """Raised if a command needs to print an error and exit."""
229
229
230 class UnexpectedOutput(Abort):
230 class UnexpectedOutput(Abort):
231 """Raised to print an error with part of output and exit."""
231 """Raised to print an error with part of output and exit."""
232
232
233 def always(fn): return True
233 def always(fn): return True
234 def never(fn): return False
234 def never(fn): return False
235
235
236 def patkind(name, dflt_pat='glob'):
236 def patkind(name, dflt_pat='glob'):
237 """Split a string into an optional pattern kind prefix and the
237 """Split a string into an optional pattern kind prefix and the
238 actual pattern."""
238 actual pattern."""
239 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
239 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
240 if name.startswith(prefix + ':'): return name.split(':', 1)
240 if name.startswith(prefix + ':'): return name.split(':', 1)
241 return dflt_pat, name
241 return dflt_pat, name
242
242
243 def globre(pat, head='^', tail='$'):
243 def globre(pat, head='^', tail='$'):
244 "convert a glob pattern into a regexp"
244 "convert a glob pattern into a regexp"
245 i, n = 0, len(pat)
245 i, n = 0, len(pat)
246 res = ''
246 res = ''
247 group = False
247 group = False
248 def peek(): return i < n and pat[i]
248 def peek(): return i < n and pat[i]
249 while i < n:
249 while i < n:
250 c = pat[i]
250 c = pat[i]
251 i = i+1
251 i = i+1
252 if c == '*':
252 if c == '*':
253 if peek() == '*':
253 if peek() == '*':
254 i += 1
254 i += 1
255 res += '.*'
255 res += '.*'
256 else:
256 else:
257 res += '[^/]*'
257 res += '[^/]*'
258 elif c == '?':
258 elif c == '?':
259 res += '.'
259 res += '.'
260 elif c == '[':
260 elif c == '[':
261 j = i
261 j = i
262 if j < n and pat[j] in '!]':
262 if j < n and pat[j] in '!]':
263 j += 1
263 j += 1
264 while j < n and pat[j] != ']':
264 while j < n and pat[j] != ']':
265 j += 1
265 j += 1
266 if j >= n:
266 if j >= n:
267 res += '\\['
267 res += '\\['
268 else:
268 else:
269 stuff = pat[i:j].replace('\\','\\\\')
269 stuff = pat[i:j].replace('\\','\\\\')
270 i = j + 1
270 i = j + 1
271 if stuff[0] == '!':
271 if stuff[0] == '!':
272 stuff = '^' + stuff[1:]
272 stuff = '^' + stuff[1:]
273 elif stuff[0] == '^':
273 elif stuff[0] == '^':
274 stuff = '\\' + stuff
274 stuff = '\\' + stuff
275 res = '%s[%s]' % (res, stuff)
275 res = '%s[%s]' % (res, stuff)
276 elif c == '{':
276 elif c == '{':
277 group = True
277 group = True
278 res += '(?:'
278 res += '(?:'
279 elif c == '}' and group:
279 elif c == '}' and group:
280 res += ')'
280 res += ')'
281 group = False
281 group = False
282 elif c == ',' and group:
282 elif c == ',' and group:
283 res += '|'
283 res += '|'
284 elif c == '\\':
284 elif c == '\\':
285 p = peek()
285 p = peek()
286 if p:
286 if p:
287 i += 1
287 i += 1
288 res += re.escape(p)
288 res += re.escape(p)
289 else:
289 else:
290 res += re.escape(c)
290 res += re.escape(c)
291 else:
291 else:
292 res += re.escape(c)
292 res += re.escape(c)
293 return head + res + tail
293 return head + res + tail
294
294
295 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
295 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
296
296
297 def pathto(n1, n2):
297 def pathto(n1, n2):
298 '''return the relative path from one place to another.
298 '''return the relative path from one place to another.
299 n1 should use os.sep to separate directories
299 n1 should use os.sep to separate directories
300 n2 should use "/" to separate directories
300 n2 should use "/" to separate directories
301 returns an os.sep-separated path.
301 returns an os.sep-separated path.
302 '''
302 '''
303 if not n1: return localpath(n2)
303 if not n1: return localpath(n2)
304 a, b = n1.split(os.sep), n2.split('/')
304 a, b = n1.split(os.sep), n2.split('/')
305 a.reverse()
305 a.reverse()
306 b.reverse()
306 b.reverse()
307 while a and b and a[-1] == b[-1]:
307 while a and b and a[-1] == b[-1]:
308 a.pop()
308 a.pop()
309 b.pop()
309 b.pop()
310 b.reverse()
310 b.reverse()
311 return os.sep.join((['..'] * len(a)) + b)
311 return os.sep.join((['..'] * len(a)) + b)
312
312
313 def canonpath(root, cwd, myname):
313 def canonpath(root, cwd, myname):
314 """return the canonical path of myname, given cwd and root"""
314 """return the canonical path of myname, given cwd and root"""
315 if root == os.sep:
315 if root == os.sep:
316 rootsep = os.sep
316 rootsep = os.sep
317 elif root.endswith(os.sep):
317 elif root.endswith(os.sep):
318 rootsep = root
318 rootsep = root
319 else:
319 else:
320 rootsep = root + os.sep
320 rootsep = root + os.sep
321 name = myname
321 name = myname
322 if not os.path.isabs(name):
322 if not os.path.isabs(name):
323 name = os.path.join(root, cwd, name)
323 name = os.path.join(root, cwd, name)
324 name = os.path.normpath(name)
324 name = os.path.normpath(name)
325 if name != rootsep and name.startswith(rootsep):
325 if name != rootsep and name.startswith(rootsep):
326 name = name[len(rootsep):]
326 name = name[len(rootsep):]
327 audit_path(name)
327 audit_path(name)
328 return pconvert(name)
328 return pconvert(name)
329 elif name == root:
329 elif name == root:
330 return ''
330 return ''
331 else:
331 else:
332 # Determine whether `name' is in the hierarchy at or beneath `root',
332 # Determine whether `name' is in the hierarchy at or beneath `root',
333 # by iterating name=dirname(name) until that causes no change (can't
333 # by iterating name=dirname(name) until that causes no change (can't
334 # check name == '/', because that doesn't work on windows). For each
334 # check name == '/', because that doesn't work on windows). For each
335 # `name', compare dev/inode numbers. If they match, the list `rel'
335 # `name', compare dev/inode numbers. If they match, the list `rel'
336 # holds the reversed list of components making up the relative file
336 # holds the reversed list of components making up the relative file
337 # name we want.
337 # name we want.
338 root_st = os.stat(root)
338 root_st = os.stat(root)
339 rel = []
339 rel = []
340 while True:
340 while True:
341 try:
341 try:
342 name_st = os.stat(name)
342 name_st = os.stat(name)
343 except OSError:
343 except OSError:
344 break
344 break
345 if samestat(name_st, root_st):
345 if samestat(name_st, root_st):
346 rel.reverse()
346 rel.reverse()
347 name = os.path.join(*rel)
347 name = os.path.join(*rel)
348 audit_path(name)
348 audit_path(name)
349 return pconvert(name)
349 return pconvert(name)
350 dirname, basename = os.path.split(name)
350 dirname, basename = os.path.split(name)
351 rel.append(basename)
351 rel.append(basename)
352 if dirname == name:
352 if dirname == name:
353 break
353 break
354 name = dirname
354 name = dirname
355
355
356 raise Abort('%s not under root' % myname)
356 raise Abort('%s not under root' % myname)
357
357
358 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
358 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
359 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
359 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
360
360
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
362 if os.name == 'nt':
362 if os.name == 'nt':
363 dflt_pat = 'glob'
363 dflt_pat = 'glob'
364 else:
364 else:
365 dflt_pat = 'relpath'
365 dflt_pat = 'relpath'
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
367
367
368 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
368 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
369 """build a function to match a set of file patterns
369 """build a function to match a set of file patterns
370
370
371 arguments:
371 arguments:
372 canonroot - the canonical root of the tree you're matching against
372 canonroot - the canonical root of the tree you're matching against
373 cwd - the current working directory, if relevant
373 cwd - the current working directory, if relevant
374 names - patterns to find
374 names - patterns to find
375 inc - patterns to include
375 inc - patterns to include
376 exc - patterns to exclude
376 exc - patterns to exclude
377 head - a regex to prepend to patterns to control whether a match is rooted
377 head - a regex to prepend to patterns to control whether a match is rooted
378
378
379 a pattern is one of:
379 a pattern is one of:
380 'glob:<rooted glob>'
380 'glob:<rooted glob>'
381 're:<rooted regexp>'
381 're:<rooted regexp>'
382 'path:<rooted path>'
382 'path:<rooted path>'
383 'relglob:<relative glob>'
383 'relglob:<relative glob>'
384 'relpath:<relative path>'
384 'relpath:<relative path>'
385 'relre:<relative regexp>'
385 'relre:<relative regexp>'
386 '<rooted path or regexp>'
386 '<rooted path or regexp>'
387
387
388 returns:
388 returns:
389 a 3-tuple containing
389 a 3-tuple containing
390 - list of explicit non-pattern names passed in
390 - list of explicit non-pattern names passed in
391 - a bool match(filename) function
391 - a bool match(filename) function
392 - a bool indicating if any patterns were passed in
392 - a bool indicating if any patterns were passed in
393
393
394 todo:
394 todo:
395 make head regex a rooted bool
395 make head regex a rooted bool
396 """
396 """
397
397
398 def contains_glob(name):
398 def contains_glob(name):
399 for c in name:
399 for c in name:
400 if c in _globchars: return True
400 if c in _globchars: return True
401 return False
401 return False
402
402
403 def regex(kind, name, tail):
403 def regex(kind, name, tail):
404 '''convert a pattern into a regular expression'''
404 '''convert a pattern into a regular expression'''
405 if kind == 're':
405 if kind == 're':
406 return name
406 return name
407 elif kind == 'path':
407 elif kind == 'path':
408 return '^' + re.escape(name) + '(?:/|$)'
408 return '^' + re.escape(name) + '(?:/|$)'
409 elif kind == 'relglob':
409 elif kind == 'relglob':
410 return head + globre(name, '(?:|.*/)', tail)
410 return head + globre(name, '(?:|.*/)', tail)
411 elif kind == 'relpath':
411 elif kind == 'relpath':
412 return head + re.escape(name) + tail
412 return head + re.escape(name) + tail
413 elif kind == 'relre':
413 elif kind == 'relre':
414 if name.startswith('^'):
414 if name.startswith('^'):
415 return name
415 return name
416 return '.*' + name
416 return '.*' + name
417 return head + globre(name, '', tail)
417 return head + globre(name, '', tail)
418
418
419 def matchfn(pats, tail):
419 def matchfn(pats, tail):
420 """build a matching function from a set of patterns"""
420 """build a matching function from a set of patterns"""
421 if not pats:
421 if not pats:
422 return
422 return
423 matches = []
423 matches = []
424 for k, p in pats:
424 for k, p in pats:
425 try:
425 try:
426 pat = '(?:%s)' % regex(k, p, tail)
426 pat = '(?:%s)' % regex(k, p, tail)
427 matches.append(re.compile(pat).match)
427 matches.append(re.compile(pat).match)
428 except re.error:
428 except re.error:
429 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
429 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
430 else: raise Abort("invalid pattern (%s): %s" % (k, p))
430 else: raise Abort("invalid pattern (%s): %s" % (k, p))
431
431
432 def buildfn(text):
432 def buildfn(text):
433 for m in matches:
433 for m in matches:
434 r = m(text)
434 r = m(text)
435 if r:
435 if r:
436 return r
436 return r
437
437
438 return buildfn
438 return buildfn
439
439
440 def globprefix(pat):
440 def globprefix(pat):
441 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
441 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
442 root = []
442 root = []
443 for p in pat.split(os.sep):
443 for p in pat.split(os.sep):
444 if contains_glob(p): break
444 if contains_glob(p): break
445 root.append(p)
445 root.append(p)
446 return '/'.join(root)
446 return '/'.join(root)
447
447
448 pats = []
448 pats = []
449 files = []
449 files = []
450 roots = []
450 roots = []
451 for kind, name in [patkind(p, dflt_pat) for p in names]:
451 for kind, name in [patkind(p, dflt_pat) for p in names]:
452 if kind in ('glob', 'relpath'):
452 if kind in ('glob', 'relpath'):
453 name = canonpath(canonroot, cwd, name)
453 name = canonpath(canonroot, cwd, name)
454 if name == '':
454 if name == '':
455 kind, name = 'glob', '**'
455 kind, name = 'glob', '**'
456 if kind in ('glob', 'path', 're'):
456 if kind in ('glob', 'path', 're'):
457 pats.append((kind, name))
457 pats.append((kind, name))
458 if kind == 'glob':
458 if kind == 'glob':
459 root = globprefix(name)
459 root = globprefix(name)
460 if root: roots.append(root)
460 if root: roots.append(root)
461 elif kind == 'relpath':
461 elif kind == 'relpath':
462 files.append((kind, name))
462 files.append((kind, name))
463 roots.append(name)
463 roots.append(name)
464
464
465 patmatch = matchfn(pats, '$') or always
465 patmatch = matchfn(pats, '$') or always
466 filematch = matchfn(files, '(?:/|$)') or always
466 filematch = matchfn(files, '(?:/|$)') or always
467 incmatch = always
467 incmatch = always
468 if inc:
468 if inc:
469 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
469 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
470 incmatch = matchfn(inckinds, '(?:/|$)')
470 incmatch = matchfn(inckinds, '(?:/|$)')
471 excmatch = lambda fn: False
471 excmatch = lambda fn: False
472 if exc:
472 if exc:
473 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
473 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
474 excmatch = matchfn(exckinds, '(?:/|$)')
474 excmatch = matchfn(exckinds, '(?:/|$)')
475
475
476 return (roots,
476 return (roots,
477 lambda fn: (incmatch(fn) and not excmatch(fn) and
477 lambda fn: (incmatch(fn) and not excmatch(fn) and
478 (fn.endswith('/') or
478 (fn.endswith('/') or
479 (not pats and not files) or
479 (not pats and not files) or
480 (pats and patmatch(fn)) or
480 (pats and patmatch(fn)) or
481 (files and filematch(fn)))),
481 (files and filematch(fn)))),
482 (inc or exc or (pats and pats != [('glob', '**')])) and True)
482 (inc or exc or (pats and pats != [('glob', '**')])) and True)
483
483
484 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
484 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
485 '''enhanced shell command execution.
485 '''enhanced shell command execution.
486 run with environment maybe modified, maybe in different dir.
486 run with environment maybe modified, maybe in different dir.
487
487
488 if command fails and onerr is None, return status. if ui object,
488 if command fails and onerr is None, return status. if ui object,
489 print error message and return status, else raise onerr object as
489 print error message and return status, else raise onerr object as
490 exception.'''
490 exception.'''
491 def py2shell(val):
491 def py2shell(val):
492 'convert python object into string that is useful to shell'
492 'convert python object into string that is useful to shell'
493 if val in (None, False):
493 if val in (None, False):
494 return '0'
494 return '0'
495 if val == True:
495 if val == True:
496 return '1'
496 return '1'
497 return str(val)
497 return str(val)
498 oldenv = {}
498 oldenv = {}
499 for k in environ:
499 for k in environ:
500 oldenv[k] = os.environ.get(k)
500 oldenv[k] = os.environ.get(k)
501 if cwd is not None:
501 if cwd is not None:
502 oldcwd = os.getcwd()
502 oldcwd = os.getcwd()
503 origcmd = cmd
503 origcmd = cmd
504 if os.name == 'nt':
504 if os.name == 'nt':
505 cmd = '"%s"' % cmd
505 cmd = '"%s"' % cmd
506 try:
506 try:
507 for k, v in environ.iteritems():
507 for k, v in environ.iteritems():
508 os.environ[k] = py2shell(v)
508 os.environ[k] = py2shell(v)
509 if cwd is not None and oldcwd != cwd:
509 if cwd is not None and oldcwd != cwd:
510 os.chdir(cwd)
510 os.chdir(cwd)
511 rc = os.system(cmd)
511 rc = os.system(cmd)
512 if rc and onerr:
512 if rc and onerr:
513 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
513 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
514 explain_exit(rc)[0])
514 explain_exit(rc)[0])
515 if errprefix:
515 if errprefix:
516 errmsg = '%s: %s' % (errprefix, errmsg)
516 errmsg = '%s: %s' % (errprefix, errmsg)
517 try:
517 try:
518 onerr.warn(errmsg + '\n')
518 onerr.warn(errmsg + '\n')
519 except AttributeError:
519 except AttributeError:
520 raise onerr(errmsg)
520 raise onerr(errmsg)
521 return rc
521 return rc
522 finally:
522 finally:
523 for k, v in oldenv.iteritems():
523 for k, v in oldenv.iteritems():
524 if v is None:
524 if v is None:
525 del os.environ[k]
525 del os.environ[k]
526 else:
526 else:
527 os.environ[k] = v
527 os.environ[k] = v
528 if cwd is not None and oldcwd != cwd:
528 if cwd is not None and oldcwd != cwd:
529 os.chdir(oldcwd)
529 os.chdir(oldcwd)
530
530
531 def rename(src, dst):
531 def rename(src, dst):
532 """forcibly rename a file"""
532 """forcibly rename a file"""
533 try:
533 try:
534 os.rename(src, dst)
534 os.rename(src, dst)
535 except OSError, err:
535 except OSError, err:
536 # on windows, rename to existing file is not allowed, so we
536 # on windows, rename to existing file is not allowed, so we
537 # must delete destination first. but if file is open, unlink
537 # must delete destination first. but if file is open, unlink
538 # schedules it for delete but does not delete it. rename
538 # schedules it for delete but does not delete it. rename
539 # happens immediately even for open files, so we create
539 # happens immediately even for open files, so we create
540 # temporary file, delete it, rename destination to that name,
540 # temporary file, delete it, rename destination to that name,
541 # then delete that. then rename is safe to do.
541 # then delete that. then rename is safe to do.
542 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
542 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
543 os.close(fd)
543 os.close(fd)
544 os.unlink(temp)
544 os.unlink(temp)
545 os.rename(dst, temp)
545 os.rename(dst, temp)
546 os.unlink(temp)
546 os.unlink(temp)
547 os.rename(src, dst)
547 os.rename(src, dst)
548
548
549 def unlink(f):
549 def unlink(f):
550 """unlink and remove the directory if it is empty"""
550 """unlink and remove the directory if it is empty"""
551 os.unlink(f)
551 os.unlink(f)
552 # try removing directories that might now be empty
552 # try removing directories that might now be empty
553 try:
553 try:
554 os.removedirs(os.path.dirname(f))
554 os.removedirs(os.path.dirname(f))
555 except OSError:
555 except OSError:
556 pass
556 pass
557
557
558 def copyfile(src, dest):
558 def copyfile(src, dest):
559 "copy a file, preserving mode"
559 "copy a file, preserving mode"
560 try:
560 try:
561 shutil.copyfile(src, dest)
561 shutil.copyfile(src, dest)
562 shutil.copymode(src, dest)
562 shutil.copymode(src, dest)
563 except shutil.Error, inst:
563 except shutil.Error, inst:
564 raise util.Abort(str(inst))
564 raise util.Abort(str(inst))
565
565
566 def copyfiles(src, dst, hardlink=None):
566 def copyfiles(src, dst, hardlink=None):
567 """Copy a directory tree using hardlinks if possible"""
567 """Copy a directory tree using hardlinks if possible"""
568
568
569 if hardlink is None:
569 if hardlink is None:
570 hardlink = (os.stat(src).st_dev ==
570 hardlink = (os.stat(src).st_dev ==
571 os.stat(os.path.dirname(dst)).st_dev)
571 os.stat(os.path.dirname(dst)).st_dev)
572
572
573 if os.path.isdir(src):
573 if os.path.isdir(src):
574 os.mkdir(dst)
574 os.mkdir(dst)
575 for name in os.listdir(src):
575 for name in os.listdir(src):
576 srcname = os.path.join(src, name)
576 srcname = os.path.join(src, name)
577 dstname = os.path.join(dst, name)
577 dstname = os.path.join(dst, name)
578 copyfiles(srcname, dstname, hardlink)
578 copyfiles(srcname, dstname, hardlink)
579 else:
579 else:
580 if hardlink:
580 if hardlink:
581 try:
581 try:
582 os_link(src, dst)
582 os_link(src, dst)
583 except (IOError, OSError):
583 except (IOError, OSError):
584 hardlink = False
584 hardlink = False
585 shutil.copy(src, dst)
585 shutil.copy(src, dst)
586 else:
586 else:
587 shutil.copy(src, dst)
587 shutil.copy(src, dst)
588
588
589 def audit_path(path):
589 def audit_path(path):
590 """Abort if path contains dangerous components"""
590 """Abort if path contains dangerous components"""
591 parts = os.path.normcase(path).split(os.sep)
591 parts = os.path.normcase(path).split(os.sep)
592 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
592 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
593 or os.pardir in parts):
593 or os.pardir in parts):
594 raise Abort(_("path contains illegal component: %s\n") % path)
594 raise Abort(_("path contains illegal component: %s\n") % path)
595
595
596 def _makelock_file(info, pathname):
596 def _makelock_file(info, pathname):
597 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
597 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
598 os.write(ld, info)
598 os.write(ld, info)
599 os.close(ld)
599 os.close(ld)
600
600
601 def _readlock_file(pathname):
601 def _readlock_file(pathname):
602 return posixfile(pathname).read()
602 return posixfile(pathname).read()
603
603
604 def nlinks(pathname):
604 def nlinks(pathname):
605 """Return number of hardlinks for the given file."""
605 """Return number of hardlinks for the given file."""
606 return os.lstat(pathname).st_nlink
606 return os.lstat(pathname).st_nlink
607
607
608 if hasattr(os, 'link'):
608 if hasattr(os, 'link'):
609 os_link = os.link
609 os_link = os.link
610 else:
610 else:
611 def os_link(src, dst):
611 def os_link(src, dst):
612 raise OSError(0, _("Hardlinks not supported"))
612 raise OSError(0, _("Hardlinks not supported"))
613
613
614 def fstat(fp):
614 def fstat(fp):
615 '''stat file object that may not have fileno method.'''
615 '''stat file object that may not have fileno method.'''
616 try:
616 try:
617 return os.fstat(fp.fileno())
617 return os.fstat(fp.fileno())
618 except AttributeError:
618 except AttributeError:
619 return os.stat(fp.name)
619 return os.stat(fp.name)
620
620
621 posixfile = file
621 posixfile = file
622
622
623 def is_win_9x():
623 def is_win_9x():
624 '''return true if run on windows 95, 98 or me.'''
624 '''return true if run on windows 95, 98 or me.'''
625 try:
625 try:
626 return sys.getwindowsversion()[3] == 1
626 return sys.getwindowsversion()[3] == 1
627 except AttributeError:
627 except AttributeError:
628 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
628 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
629
629
630 getuser_fallback = None
630 getuser_fallback = None
631
631
632 def getuser():
632 def getuser():
633 '''return name of current user'''
633 '''return name of current user'''
634 try:
634 try:
635 return getpass.getuser()
635 return getpass.getuser()
636 except ImportError:
636 except ImportError:
637 # import of pwd will fail on windows - try fallback
637 # import of pwd will fail on windows - try fallback
638 if getuser_fallback:
638 if getuser_fallback:
639 return getuser_fallback()
639 return getuser_fallback()
640 # raised if win32api not available
640 # raised if win32api not available
641 raise Abort(_('user name not available - set USERNAME '
641 raise Abort(_('user name not available - set USERNAME '
642 'environment variable'))
642 'environment variable'))
643
643
644 def username(uid=None):
644 def username(uid=None):
645 """Return the name of the user with the given uid.
645 """Return the name of the user with the given uid.
646
646
647 If uid is None, return the name of the current user."""
647 If uid is None, return the name of the current user."""
648 try:
648 try:
649 import pwd
649 import pwd
650 if uid is None:
650 if uid is None:
651 uid = os.getuid()
651 uid = os.getuid()
652 try:
652 try:
653 return pwd.getpwuid(uid)[0]
653 return pwd.getpwuid(uid)[0]
654 except KeyError:
654 except KeyError:
655 return str(uid)
655 return str(uid)
656 except ImportError:
656 except ImportError:
657 return None
657 return None
658
658
659 def groupname(gid=None):
659 def groupname(gid=None):
660 """Return the name of the group with the given gid.
660 """Return the name of the group with the given gid.
661
661
662 If gid is None, return the name of the current group."""
662 If gid is None, return the name of the current group."""
663 try:
663 try:
664 import grp
664 import grp
665 if gid is None:
665 if gid is None:
666 gid = os.getgid()
666 gid = os.getgid()
667 try:
667 try:
668 return grp.getgrgid(gid)[0]
668 return grp.getgrgid(gid)[0]
669 except KeyError:
669 except KeyError:
670 return str(gid)
670 return str(gid)
671 except ImportError:
671 except ImportError:
672 return None
672 return None
673
673
674 # File system features
674 # File system features
675
675
676 def checkfolding(path):
676 def checkfolding(path):
677 """
677 """
678 Check whether the given path is on a case-sensitive filesystem
678 Check whether the given path is on a case-sensitive filesystem
679
679
680 Requires a path (like /foo/.hg) ending with a foldable final
680 Requires a path (like /foo/.hg) ending with a foldable final
681 directory component.
681 directory component.
682 """
682 """
683 s1 = os.stat(path)
683 s1 = os.stat(path)
684 d, b = os.path.split(path)
684 d, b = os.path.split(path)
685 p2 = os.path.join(d, b.upper())
685 p2 = os.path.join(d, b.upper())
686 if path == p2:
686 if path == p2:
687 p2 = os.path.join(d, b.lower())
687 p2 = os.path.join(d, b.lower())
688 try:
688 try:
689 s2 = os.stat(p2)
689 s2 = os.stat(p2)
690 if s2 == s1:
690 if s2 == s1:
691 return False
691 return False
692 return True
692 return True
693 except:
693 except:
694 return True
694 return True
695
695
696 def checkexec(path):
696 def checkexec(path):
697 """
697 """
698 Check whether the given path is on a filesystem with UNIX-like exec flags
698 Check whether the given path is on a filesystem with UNIX-like exec flags
699
699
700 Requires a directory (like /foo/.hg)
700 Requires a directory (like /foo/.hg)
701 """
701 """
702 fh, fn = tempfile.mkstemp("", "", path)
702 fh, fn = tempfile.mkstemp("", "", path)
703 os.close(fh)
703 os.close(fh)
704 m = os.stat(fn).st_mode
704 m = os.stat(fn).st_mode
705 os.chmod(fn, m ^ 0111)
705 os.chmod(fn, m ^ 0111)
706 r = (os.stat(fn).st_mode != m)
706 r = (os.stat(fn).st_mode != m)
707 os.unlink(fn)
707 os.unlink(fn)
708 return r
708 return r
709
709
710 def execfunc(path, fallback):
711 '''return an is_exec() function with default to fallback'''
712 if checkexec(path):
713 return lambda x: is_exec(os.path.join(path, x), False)
714 return fallback
715
710 # Platform specific variants
716 # Platform specific variants
711 if os.name == 'nt':
717 if os.name == 'nt':
712 import msvcrt
718 import msvcrt
713 nulldev = 'NUL:'
719 nulldev = 'NUL:'
714
720
715 class winstdout:
721 class winstdout:
716 '''stdout on windows misbehaves if sent through a pipe'''
722 '''stdout on windows misbehaves if sent through a pipe'''
717
723
718 def __init__(self, fp):
724 def __init__(self, fp):
719 self.fp = fp
725 self.fp = fp
720
726
721 def __getattr__(self, key):
727 def __getattr__(self, key):
722 return getattr(self.fp, key)
728 return getattr(self.fp, key)
723
729
724 def close(self):
730 def close(self):
725 try:
731 try:
726 self.fp.close()
732 self.fp.close()
727 except: pass
733 except: pass
728
734
729 def write(self, s):
735 def write(self, s):
730 try:
736 try:
731 return self.fp.write(s)
737 return self.fp.write(s)
732 except IOError, inst:
738 except IOError, inst:
733 if inst.errno != 0: raise
739 if inst.errno != 0: raise
734 self.close()
740 self.close()
735 raise IOError(errno.EPIPE, 'Broken pipe')
741 raise IOError(errno.EPIPE, 'Broken pipe')
736
742
737 sys.stdout = winstdout(sys.stdout)
743 sys.stdout = winstdout(sys.stdout)
738
744
739 def system_rcpath():
745 def system_rcpath():
740 try:
746 try:
741 return system_rcpath_win32()
747 return system_rcpath_win32()
742 except:
748 except:
743 return [r'c:\mercurial\mercurial.ini']
749 return [r'c:\mercurial\mercurial.ini']
744
750
745 def os_rcpath():
751 def os_rcpath():
746 '''return default os-specific hgrc search path'''
752 '''return default os-specific hgrc search path'''
747 path = system_rcpath()
753 path = system_rcpath()
748 path.append(user_rcpath())
754 path.append(user_rcpath())
749 userprofile = os.environ.get('USERPROFILE')
755 userprofile = os.environ.get('USERPROFILE')
750 if userprofile:
756 if userprofile:
751 path.append(os.path.join(userprofile, 'mercurial.ini'))
757 path.append(os.path.join(userprofile, 'mercurial.ini'))
752 return path
758 return path
753
759
754 def user_rcpath():
760 def user_rcpath():
755 '''return os-specific hgrc search path to the user dir'''
761 '''return os-specific hgrc search path to the user dir'''
756 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
762 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
757
763
758 def parse_patch_output(output_line):
764 def parse_patch_output(output_line):
759 """parses the output produced by patch and returns the file name"""
765 """parses the output produced by patch and returns the file name"""
760 pf = output_line[14:]
766 pf = output_line[14:]
761 if pf[0] == '`':
767 if pf[0] == '`':
762 pf = pf[1:-1] # Remove the quotes
768 pf = pf[1:-1] # Remove the quotes
763 return pf
769 return pf
764
770
765 def testpid(pid):
771 def testpid(pid):
766 '''return False if pid dead, True if running or not known'''
772 '''return False if pid dead, True if running or not known'''
767 return True
773 return True
768
774
769 def is_exec(f, last):
775 def is_exec(f, last):
770 return last
776 return last
771
777
772 def set_exec(f, mode):
778 def set_exec(f, mode):
773 pass
779 pass
774
780
775 def set_binary(fd):
781 def set_binary(fd):
776 msvcrt.setmode(fd.fileno(), os.O_BINARY)
782 msvcrt.setmode(fd.fileno(), os.O_BINARY)
777
783
778 def pconvert(path):
784 def pconvert(path):
779 return path.replace("\\", "/")
785 return path.replace("\\", "/")
780
786
781 def localpath(path):
787 def localpath(path):
782 return path.replace('/', '\\')
788 return path.replace('/', '\\')
783
789
784 def normpath(path):
790 def normpath(path):
785 return pconvert(os.path.normpath(path))
791 return pconvert(os.path.normpath(path))
786
792
787 makelock = _makelock_file
793 makelock = _makelock_file
788 readlock = _readlock_file
794 readlock = _readlock_file
789
795
790 def samestat(s1, s2):
796 def samestat(s1, s2):
791 return False
797 return False
792
798
793 def shellquote(s):
799 def shellquote(s):
794 return '"%s"' % s.replace('"', '\\"')
800 return '"%s"' % s.replace('"', '\\"')
795
801
796 def explain_exit(code):
802 def explain_exit(code):
797 return _("exited with status %d") % code, code
803 return _("exited with status %d") % code, code
798
804
799 # if you change this stub into a real check, please try to implement the
805 # if you change this stub into a real check, please try to implement the
800 # username and groupname functions above, too.
806 # username and groupname functions above, too.
801 def isowner(fp, st=None):
807 def isowner(fp, st=None):
802 return True
808 return True
803
809
804 try:
810 try:
805 # override functions with win32 versions if possible
811 # override functions with win32 versions if possible
806 from util_win32 import *
812 from util_win32 import *
807 if not is_win_9x():
813 if not is_win_9x():
808 posixfile = posixfile_nt
814 posixfile = posixfile_nt
809 except ImportError:
815 except ImportError:
810 pass
816 pass
811
817
812 else:
818 else:
813 nulldev = '/dev/null'
819 nulldev = '/dev/null'
814 _umask = os.umask(0)
820 _umask = os.umask(0)
815 os.umask(_umask)
821 os.umask(_umask)
816
822
817 def rcfiles(path):
823 def rcfiles(path):
818 rcs = [os.path.join(path, 'hgrc')]
824 rcs = [os.path.join(path, 'hgrc')]
819 rcdir = os.path.join(path, 'hgrc.d')
825 rcdir = os.path.join(path, 'hgrc.d')
820 try:
826 try:
821 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
827 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
822 if f.endswith(".rc")])
828 if f.endswith(".rc")])
823 except OSError:
829 except OSError:
824 pass
830 pass
825 return rcs
831 return rcs
826
832
827 def os_rcpath():
833 def os_rcpath():
828 '''return default os-specific hgrc search path'''
834 '''return default os-specific hgrc search path'''
829 path = []
835 path = []
830 # old mod_python does not set sys.argv
836 # old mod_python does not set sys.argv
831 if len(getattr(sys, 'argv', [])) > 0:
837 if len(getattr(sys, 'argv', [])) > 0:
832 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
838 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
833 '/../etc/mercurial'))
839 '/../etc/mercurial'))
834 path.extend(rcfiles('/etc/mercurial'))
840 path.extend(rcfiles('/etc/mercurial'))
835 path.append(os.path.expanduser('~/.hgrc'))
841 path.append(os.path.expanduser('~/.hgrc'))
836 path = [os.path.normpath(f) for f in path]
842 path = [os.path.normpath(f) for f in path]
837 return path
843 return path
838
844
839 def parse_patch_output(output_line):
845 def parse_patch_output(output_line):
840 """parses the output produced by patch and returns the file name"""
846 """parses the output produced by patch and returns the file name"""
841 pf = output_line[14:]
847 pf = output_line[14:]
842 if pf.startswith("'") and pf.endswith("'") and " " in pf:
848 if pf.startswith("'") and pf.endswith("'") and " " in pf:
843 pf = pf[1:-1] # Remove the quotes
849 pf = pf[1:-1] # Remove the quotes
844 return pf
850 return pf
845
851
846 def is_exec(f, last):
852 def is_exec(f, last):
847 """check whether a file is executable"""
853 """check whether a file is executable"""
848 return (os.lstat(f).st_mode & 0100 != 0)
854 return (os.lstat(f).st_mode & 0100 != 0)
849
855
850 def set_exec(f, mode):
856 def set_exec(f, mode):
851 s = os.lstat(f).st_mode
857 s = os.lstat(f).st_mode
852 if (s & 0100 != 0) == mode:
858 if (s & 0100 != 0) == mode:
853 return
859 return
854 if mode:
860 if mode:
855 # Turn on +x for every +r bit when making a file executable
861 # Turn on +x for every +r bit when making a file executable
856 # and obey umask.
862 # and obey umask.
857 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
863 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
858 else:
864 else:
859 os.chmod(f, s & 0666)
865 os.chmod(f, s & 0666)
860
866
861 def set_binary(fd):
867 def set_binary(fd):
862 pass
868 pass
863
869
864 def pconvert(path):
870 def pconvert(path):
865 return path
871 return path
866
872
867 def localpath(path):
873 def localpath(path):
868 return path
874 return path
869
875
870 normpath = os.path.normpath
876 normpath = os.path.normpath
871 samestat = os.path.samestat
877 samestat = os.path.samestat
872
878
873 def makelock(info, pathname):
879 def makelock(info, pathname):
874 try:
880 try:
875 os.symlink(info, pathname)
881 os.symlink(info, pathname)
876 except OSError, why:
882 except OSError, why:
877 if why.errno == errno.EEXIST:
883 if why.errno == errno.EEXIST:
878 raise
884 raise
879 else:
885 else:
880 _makelock_file(info, pathname)
886 _makelock_file(info, pathname)
881
887
882 def readlock(pathname):
888 def readlock(pathname):
883 try:
889 try:
884 return os.readlink(pathname)
890 return os.readlink(pathname)
885 except OSError, why:
891 except OSError, why:
886 if why.errno == errno.EINVAL:
892 if why.errno == errno.EINVAL:
887 return _readlock_file(pathname)
893 return _readlock_file(pathname)
888 else:
894 else:
889 raise
895 raise
890
896
891 def shellquote(s):
897 def shellquote(s):
892 return "'%s'" % s.replace("'", "'\\''")
898 return "'%s'" % s.replace("'", "'\\''")
893
899
894 def testpid(pid):
900 def testpid(pid):
895 '''return False if pid dead, True if running or not sure'''
901 '''return False if pid dead, True if running or not sure'''
896 try:
902 try:
897 os.kill(pid, 0)
903 os.kill(pid, 0)
898 return True
904 return True
899 except OSError, inst:
905 except OSError, inst:
900 return inst.errno != errno.ESRCH
906 return inst.errno != errno.ESRCH
901
907
902 def explain_exit(code):
908 def explain_exit(code):
903 """return a 2-tuple (desc, code) describing a process's status"""
909 """return a 2-tuple (desc, code) describing a process's status"""
904 if os.WIFEXITED(code):
910 if os.WIFEXITED(code):
905 val = os.WEXITSTATUS(code)
911 val = os.WEXITSTATUS(code)
906 return _("exited with status %d") % val, val
912 return _("exited with status %d") % val, val
907 elif os.WIFSIGNALED(code):
913 elif os.WIFSIGNALED(code):
908 val = os.WTERMSIG(code)
914 val = os.WTERMSIG(code)
909 return _("killed by signal %d") % val, val
915 return _("killed by signal %d") % val, val
910 elif os.WIFSTOPPED(code):
916 elif os.WIFSTOPPED(code):
911 val = os.WSTOPSIG(code)
917 val = os.WSTOPSIG(code)
912 return _("stopped by signal %d") % val, val
918 return _("stopped by signal %d") % val, val
913 raise ValueError(_("invalid exit code"))
919 raise ValueError(_("invalid exit code"))
914
920
915 def isowner(fp, st=None):
921 def isowner(fp, st=None):
916 """Return True if the file object f belongs to the current user.
922 """Return True if the file object f belongs to the current user.
917
923
918 The return value of a util.fstat(f) may be passed as the st argument.
924 The return value of a util.fstat(f) may be passed as the st argument.
919 """
925 """
920 if st is None:
926 if st is None:
921 st = fstat(fp)
927 st = fstat(fp)
922 return st.st_uid == os.getuid()
928 return st.st_uid == os.getuid()
923
929
924 def _buildencodefun():
930 def _buildencodefun():
925 e = '_'
931 e = '_'
926 win_reserved = [ord(x) for x in '\\:*?"<>|']
932 win_reserved = [ord(x) for x in '\\:*?"<>|']
927 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
933 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
928 for x in (range(32) + range(126, 256) + win_reserved):
934 for x in (range(32) + range(126, 256) + win_reserved):
929 cmap[chr(x)] = "~%02x" % x
935 cmap[chr(x)] = "~%02x" % x
930 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
936 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
931 cmap[chr(x)] = e + chr(x).lower()
937 cmap[chr(x)] = e + chr(x).lower()
932 dmap = {}
938 dmap = {}
933 for k, v in cmap.iteritems():
939 for k, v in cmap.iteritems():
934 dmap[v] = k
940 dmap[v] = k
935 def decode(s):
941 def decode(s):
936 i = 0
942 i = 0
937 while i < len(s):
943 while i < len(s):
938 for l in xrange(1, 4):
944 for l in xrange(1, 4):
939 try:
945 try:
940 yield dmap[s[i:i+l]]
946 yield dmap[s[i:i+l]]
941 i += l
947 i += l
942 break
948 break
943 except KeyError:
949 except KeyError:
944 pass
950 pass
945 else:
951 else:
946 raise KeyError
952 raise KeyError
947 return (lambda s: "".join([cmap[c] for c in s]),
953 return (lambda s: "".join([cmap[c] for c in s]),
948 lambda s: "".join(list(decode(s))))
954 lambda s: "".join(list(decode(s))))
949
955
950 encodefilename, decodefilename = _buildencodefun()
956 encodefilename, decodefilename = _buildencodefun()
951
957
952 def encodedopener(openerfn, fn):
958 def encodedopener(openerfn, fn):
953 def o(path, *args, **kw):
959 def o(path, *args, **kw):
954 return openerfn(fn(path), *args, **kw)
960 return openerfn(fn(path), *args, **kw)
955 return o
961 return o
956
962
957 def opener(base, audit=True):
963 def opener(base, audit=True):
958 """
964 """
959 return a function that opens files relative to base
965 return a function that opens files relative to base
960
966
961 this function is used to hide the details of COW semantics and
967 this function is used to hide the details of COW semantics and
962 remote file access from higher level code.
968 remote file access from higher level code.
963 """
969 """
964 p = base
970 p = base
965 audit_p = audit
971 audit_p = audit
966
972
967 def mktempcopy(name):
973 def mktempcopy(name):
968 d, fn = os.path.split(name)
974 d, fn = os.path.split(name)
969 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
975 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
970 os.close(fd)
976 os.close(fd)
971 ofp = posixfile(temp, "wb")
977 ofp = posixfile(temp, "wb")
972 try:
978 try:
973 try:
979 try:
974 ifp = posixfile(name, "rb")
980 ifp = posixfile(name, "rb")
975 except IOError, inst:
981 except IOError, inst:
976 if not getattr(inst, 'filename', None):
982 if not getattr(inst, 'filename', None):
977 inst.filename = name
983 inst.filename = name
978 raise
984 raise
979 for chunk in filechunkiter(ifp):
985 for chunk in filechunkiter(ifp):
980 ofp.write(chunk)
986 ofp.write(chunk)
981 ifp.close()
987 ifp.close()
982 ofp.close()
988 ofp.close()
983 except:
989 except:
984 try: os.unlink(temp)
990 try: os.unlink(temp)
985 except: pass
991 except: pass
986 raise
992 raise
987 st = os.lstat(name)
993 st = os.lstat(name)
988 os.chmod(temp, st.st_mode)
994 os.chmod(temp, st.st_mode)
989 return temp
995 return temp
990
996
991 class atomictempfile(posixfile):
997 class atomictempfile(posixfile):
992 """the file will only be copied when rename is called"""
998 """the file will only be copied when rename is called"""
993 def __init__(self, name, mode):
999 def __init__(self, name, mode):
994 self.__name = name
1000 self.__name = name
995 self.temp = mktempcopy(name)
1001 self.temp = mktempcopy(name)
996 posixfile.__init__(self, self.temp, mode)
1002 posixfile.__init__(self, self.temp, mode)
997 def rename(self):
1003 def rename(self):
998 if not self.closed:
1004 if not self.closed:
999 posixfile.close(self)
1005 posixfile.close(self)
1000 rename(self.temp, localpath(self.__name))
1006 rename(self.temp, localpath(self.__name))
1001 def __del__(self):
1007 def __del__(self):
1002 if not self.closed:
1008 if not self.closed:
1003 try:
1009 try:
1004 os.unlink(self.temp)
1010 os.unlink(self.temp)
1005 except: pass
1011 except: pass
1006 posixfile.close(self)
1012 posixfile.close(self)
1007
1013
1008 class atomicfile(atomictempfile):
1014 class atomicfile(atomictempfile):
1009 """the file will only be copied on close"""
1015 """the file will only be copied on close"""
1010 def __init__(self, name, mode):
1016 def __init__(self, name, mode):
1011 atomictempfile.__init__(self, name, mode)
1017 atomictempfile.__init__(self, name, mode)
1012 def close(self):
1018 def close(self):
1013 self.rename()
1019 self.rename()
1014 def __del__(self):
1020 def __del__(self):
1015 self.rename()
1021 self.rename()
1016
1022
1017 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1023 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1018 if audit_p:
1024 if audit_p:
1019 audit_path(path)
1025 audit_path(path)
1020 f = os.path.join(p, path)
1026 f = os.path.join(p, path)
1021
1027
1022 if not text:
1028 if not text:
1023 mode += "b" # for that other OS
1029 mode += "b" # for that other OS
1024
1030
1025 if mode[0] != "r":
1031 if mode[0] != "r":
1026 try:
1032 try:
1027 nlink = nlinks(f)
1033 nlink = nlinks(f)
1028 except OSError:
1034 except OSError:
1029 d = os.path.dirname(f)
1035 d = os.path.dirname(f)
1030 if not os.path.isdir(d):
1036 if not os.path.isdir(d):
1031 os.makedirs(d)
1037 os.makedirs(d)
1032 else:
1038 else:
1033 if atomic:
1039 if atomic:
1034 return atomicfile(f, mode)
1040 return atomicfile(f, mode)
1035 elif atomictemp:
1041 elif atomictemp:
1036 return atomictempfile(f, mode)
1042 return atomictempfile(f, mode)
1037 if nlink > 1:
1043 if nlink > 1:
1038 rename(mktempcopy(f), f)
1044 rename(mktempcopy(f), f)
1039 return posixfile(f, mode)
1045 return posixfile(f, mode)
1040
1046
1041 return o
1047 return o
1042
1048
1043 class chunkbuffer(object):
1049 class chunkbuffer(object):
1044 """Allow arbitrary sized chunks of data to be efficiently read from an
1050 """Allow arbitrary sized chunks of data to be efficiently read from an
1045 iterator over chunks of arbitrary size."""
1051 iterator over chunks of arbitrary size."""
1046
1052
1047 def __init__(self, in_iter, targetsize = 2**16):
1053 def __init__(self, in_iter, targetsize = 2**16):
1048 """in_iter is the iterator that's iterating over the input chunks.
1054 """in_iter is the iterator that's iterating over the input chunks.
1049 targetsize is how big a buffer to try to maintain."""
1055 targetsize is how big a buffer to try to maintain."""
1050 self.in_iter = iter(in_iter)
1056 self.in_iter = iter(in_iter)
1051 self.buf = ''
1057 self.buf = ''
1052 self.targetsize = int(targetsize)
1058 self.targetsize = int(targetsize)
1053 if self.targetsize <= 0:
1059 if self.targetsize <= 0:
1054 raise ValueError(_("targetsize must be greater than 0, was %d") %
1060 raise ValueError(_("targetsize must be greater than 0, was %d") %
1055 targetsize)
1061 targetsize)
1056 self.iterempty = False
1062 self.iterempty = False
1057
1063
1058 def fillbuf(self):
1064 def fillbuf(self):
1059 """Ignore target size; read every chunk from iterator until empty."""
1065 """Ignore target size; read every chunk from iterator until empty."""
1060 if not self.iterempty:
1066 if not self.iterempty:
1061 collector = cStringIO.StringIO()
1067 collector = cStringIO.StringIO()
1062 collector.write(self.buf)
1068 collector.write(self.buf)
1063 for ch in self.in_iter:
1069 for ch in self.in_iter:
1064 collector.write(ch)
1070 collector.write(ch)
1065 self.buf = collector.getvalue()
1071 self.buf = collector.getvalue()
1066 self.iterempty = True
1072 self.iterempty = True
1067
1073
1068 def read(self, l):
1074 def read(self, l):
1069 """Read L bytes of data from the iterator of chunks of data.
1075 """Read L bytes of data from the iterator of chunks of data.
1070 Returns less than L bytes if the iterator runs dry."""
1076 Returns less than L bytes if the iterator runs dry."""
1071 if l > len(self.buf) and not self.iterempty:
1077 if l > len(self.buf) and not self.iterempty:
1072 # Clamp to a multiple of self.targetsize
1078 # Clamp to a multiple of self.targetsize
1073 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1079 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1074 collector = cStringIO.StringIO()
1080 collector = cStringIO.StringIO()
1075 collector.write(self.buf)
1081 collector.write(self.buf)
1076 collected = len(self.buf)
1082 collected = len(self.buf)
1077 for chunk in self.in_iter:
1083 for chunk in self.in_iter:
1078 collector.write(chunk)
1084 collector.write(chunk)
1079 collected += len(chunk)
1085 collected += len(chunk)
1080 if collected >= targetsize:
1086 if collected >= targetsize:
1081 break
1087 break
1082 if collected < targetsize:
1088 if collected < targetsize:
1083 self.iterempty = True
1089 self.iterempty = True
1084 self.buf = collector.getvalue()
1090 self.buf = collector.getvalue()
1085 s, self.buf = self.buf[:l], buffer(self.buf, l)
1091 s, self.buf = self.buf[:l], buffer(self.buf, l)
1086 return s
1092 return s
1087
1093
1088 def filechunkiter(f, size=65536, limit=None):
1094 def filechunkiter(f, size=65536, limit=None):
1089 """Create a generator that produces the data in the file size
1095 """Create a generator that produces the data in the file size
1090 (default 65536) bytes at a time, up to optional limit (default is
1096 (default 65536) bytes at a time, up to optional limit (default is
1091 to read all data). Chunks may be less than size bytes if the
1097 to read all data). Chunks may be less than size bytes if the
1092 chunk is the last chunk in the file, or the file is a socket or
1098 chunk is the last chunk in the file, or the file is a socket or
1093 some other type of file that sometimes reads less data than is
1099 some other type of file that sometimes reads less data than is
1094 requested."""
1100 requested."""
1095 assert size >= 0
1101 assert size >= 0
1096 assert limit is None or limit >= 0
1102 assert limit is None or limit >= 0
1097 while True:
1103 while True:
1098 if limit is None: nbytes = size
1104 if limit is None: nbytes = size
1099 else: nbytes = min(limit, size)
1105 else: nbytes = min(limit, size)
1100 s = nbytes and f.read(nbytes)
1106 s = nbytes and f.read(nbytes)
1101 if not s: break
1107 if not s: break
1102 if limit: limit -= len(s)
1108 if limit: limit -= len(s)
1103 yield s
1109 yield s
1104
1110
1105 def makedate():
1111 def makedate():
1106 lt = time.localtime()
1112 lt = time.localtime()
1107 if lt[8] == 1 and time.daylight:
1113 if lt[8] == 1 and time.daylight:
1108 tz = time.altzone
1114 tz = time.altzone
1109 else:
1115 else:
1110 tz = time.timezone
1116 tz = time.timezone
1111 return time.mktime(lt), tz
1117 return time.mktime(lt), tz
1112
1118
1113 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1119 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1114 """represent a (unixtime, offset) tuple as a localized time.
1120 """represent a (unixtime, offset) tuple as a localized time.
1115 unixtime is seconds since the epoch, and offset is the time zone's
1121 unixtime is seconds since the epoch, and offset is the time zone's
1116 number of seconds away from UTC. if timezone is false, do not
1122 number of seconds away from UTC. if timezone is false, do not
1117 append time zone to string."""
1123 append time zone to string."""
1118 t, tz = date or makedate()
1124 t, tz = date or makedate()
1119 s = time.strftime(format, time.gmtime(float(t) - tz))
1125 s = time.strftime(format, time.gmtime(float(t) - tz))
1120 if timezone:
1126 if timezone:
1121 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1127 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1122 return s
1128 return s
1123
1129
1124 def strdate(string, format, defaults):
1130 def strdate(string, format, defaults):
1125 """parse a localized time string and return a (unixtime, offset) tuple.
1131 """parse a localized time string and return a (unixtime, offset) tuple.
1126 if the string cannot be parsed, ValueError is raised."""
1132 if the string cannot be parsed, ValueError is raised."""
1127 def timezone(string):
1133 def timezone(string):
1128 tz = string.split()[-1]
1134 tz = string.split()[-1]
1129 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1135 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1130 tz = int(tz)
1136 tz = int(tz)
1131 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1137 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1132 return offset
1138 return offset
1133 if tz == "GMT" or tz == "UTC":
1139 if tz == "GMT" or tz == "UTC":
1134 return 0
1140 return 0
1135 return None
1141 return None
1136
1142
1137 # NOTE: unixtime = localunixtime + offset
1143 # NOTE: unixtime = localunixtime + offset
1138 offset, date = timezone(string), string
1144 offset, date = timezone(string), string
1139 if offset != None:
1145 if offset != None:
1140 date = " ".join(string.split()[:-1])
1146 date = " ".join(string.split()[:-1])
1141
1147
1142 # add missing elements from defaults
1148 # add missing elements from defaults
1143 for part in defaults:
1149 for part in defaults:
1144 found = [True for p in part if ("%"+p) in format]
1150 found = [True for p in part if ("%"+p) in format]
1145 if not found:
1151 if not found:
1146 date += "@" + defaults[part]
1152 date += "@" + defaults[part]
1147 format += "@%" + part[0]
1153 format += "@%" + part[0]
1148
1154
1149 timetuple = time.strptime(date, format)
1155 timetuple = time.strptime(date, format)
1150 localunixtime = int(calendar.timegm(timetuple))
1156 localunixtime = int(calendar.timegm(timetuple))
1151 if offset is None:
1157 if offset is None:
1152 # local timezone
1158 # local timezone
1153 unixtime = int(time.mktime(timetuple))
1159 unixtime = int(time.mktime(timetuple))
1154 offset = unixtime - localunixtime
1160 offset = unixtime - localunixtime
1155 else:
1161 else:
1156 unixtime = localunixtime + offset
1162 unixtime = localunixtime + offset
1157 return unixtime, offset
1163 return unixtime, offset
1158
1164
1159 def parsedate(string, formats=None, defaults=None):
1165 def parsedate(string, formats=None, defaults=None):
1160 """parse a localized time string and return a (unixtime, offset) tuple.
1166 """parse a localized time string and return a (unixtime, offset) tuple.
1161 The date may be a "unixtime offset" string or in one of the specified
1167 The date may be a "unixtime offset" string or in one of the specified
1162 formats."""
1168 formats."""
1163 if not string:
1169 if not string:
1164 return 0, 0
1170 return 0, 0
1165 if not formats:
1171 if not formats:
1166 formats = defaultdateformats
1172 formats = defaultdateformats
1167 string = string.strip()
1173 string = string.strip()
1168 try:
1174 try:
1169 when, offset = map(int, string.split(' '))
1175 when, offset = map(int, string.split(' '))
1170 except ValueError:
1176 except ValueError:
1171 # fill out defaults
1177 # fill out defaults
1172 if not defaults:
1178 if not defaults:
1173 defaults = {}
1179 defaults = {}
1174 now = makedate()
1180 now = makedate()
1175 for part in "d mb yY HI M S".split():
1181 for part in "d mb yY HI M S".split():
1176 if part not in defaults:
1182 if part not in defaults:
1177 if part[0] in "HMS":
1183 if part[0] in "HMS":
1178 defaults[part] = "00"
1184 defaults[part] = "00"
1179 elif part[0] in "dm":
1185 elif part[0] in "dm":
1180 defaults[part] = "1"
1186 defaults[part] = "1"
1181 else:
1187 else:
1182 defaults[part] = datestr(now, "%" + part[0], False)
1188 defaults[part] = datestr(now, "%" + part[0], False)
1183
1189
1184 for format in formats:
1190 for format in formats:
1185 try:
1191 try:
1186 when, offset = strdate(string, format, defaults)
1192 when, offset = strdate(string, format, defaults)
1187 except ValueError:
1193 except ValueError:
1188 pass
1194 pass
1189 else:
1195 else:
1190 break
1196 break
1191 else:
1197 else:
1192 raise Abort(_('invalid date: %r ') % string)
1198 raise Abort(_('invalid date: %r ') % string)
1193 # validate explicit (probably user-specified) date and
1199 # validate explicit (probably user-specified) date and
1194 # time zone offset. values must fit in signed 32 bits for
1200 # time zone offset. values must fit in signed 32 bits for
1195 # current 32-bit linux runtimes. timezones go from UTC-12
1201 # current 32-bit linux runtimes. timezones go from UTC-12
1196 # to UTC+14
1202 # to UTC+14
1197 if abs(when) > 0x7fffffff:
1203 if abs(when) > 0x7fffffff:
1198 raise Abort(_('date exceeds 32 bits: %d') % when)
1204 raise Abort(_('date exceeds 32 bits: %d') % when)
1199 if offset < -50400 or offset > 43200:
1205 if offset < -50400 or offset > 43200:
1200 raise Abort(_('impossible time zone offset: %d') % offset)
1206 raise Abort(_('impossible time zone offset: %d') % offset)
1201 return when, offset
1207 return when, offset
1202
1208
1203 def matchdate(date):
1209 def matchdate(date):
1204 """Return a function that matches a given date match specifier
1210 """Return a function that matches a given date match specifier
1205
1211
1206 Formats include:
1212 Formats include:
1207
1213
1208 '{date}' match a given date to the accuracy provided
1214 '{date}' match a given date to the accuracy provided
1209
1215
1210 '<{date}' on or before a given date
1216 '<{date}' on or before a given date
1211
1217
1212 '>{date}' on or after a given date
1218 '>{date}' on or after a given date
1213
1219
1214 """
1220 """
1215
1221
1216 def lower(date):
1222 def lower(date):
1217 return parsedate(date, extendeddateformats)[0]
1223 return parsedate(date, extendeddateformats)[0]
1218
1224
1219 def upper(date):
1225 def upper(date):
1220 d = dict(mb="12", HI="23", M="59", S="59")
1226 d = dict(mb="12", HI="23", M="59", S="59")
1221 for days in "31 30 29".split():
1227 for days in "31 30 29".split():
1222 try:
1228 try:
1223 d["d"] = days
1229 d["d"] = days
1224 return parsedate(date, extendeddateformats, d)[0]
1230 return parsedate(date, extendeddateformats, d)[0]
1225 except:
1231 except:
1226 pass
1232 pass
1227 d["d"] = "28"
1233 d["d"] = "28"
1228 return parsedate(date, extendeddateformats, d)[0]
1234 return parsedate(date, extendeddateformats, d)[0]
1229
1235
1230 if date[0] == "<":
1236 if date[0] == "<":
1231 when = upper(date[1:])
1237 when = upper(date[1:])
1232 return lambda x: x <= when
1238 return lambda x: x <= when
1233 elif date[0] == ">":
1239 elif date[0] == ">":
1234 when = lower(date[1:])
1240 when = lower(date[1:])
1235 return lambda x: x >= when
1241 return lambda x: x >= when
1236 elif date[0] == "-":
1242 elif date[0] == "-":
1237 try:
1243 try:
1238 days = int(date[1:])
1244 days = int(date[1:])
1239 except ValueError:
1245 except ValueError:
1240 raise Abort(_("invalid day spec: %s") % date[1:])
1246 raise Abort(_("invalid day spec: %s") % date[1:])
1241 when = makedate()[0] - days * 3600 * 24
1247 when = makedate()[0] - days * 3600 * 24
1242 return lambda x: x >= when
1248 return lambda x: x >= when
1243 elif " to " in date:
1249 elif " to " in date:
1244 a, b = date.split(" to ")
1250 a, b = date.split(" to ")
1245 start, stop = lower(a), upper(b)
1251 start, stop = lower(a), upper(b)
1246 return lambda x: x >= start and x <= stop
1252 return lambda x: x >= start and x <= stop
1247 else:
1253 else:
1248 start, stop = lower(date), upper(date)
1254 start, stop = lower(date), upper(date)
1249 return lambda x: x >= start and x <= stop
1255 return lambda x: x >= start and x <= stop
1250
1256
1251 def shortuser(user):
1257 def shortuser(user):
1252 """Return a short representation of a user name or email address."""
1258 """Return a short representation of a user name or email address."""
1253 f = user.find('@')
1259 f = user.find('@')
1254 if f >= 0:
1260 if f >= 0:
1255 user = user[:f]
1261 user = user[:f]
1256 f = user.find('<')
1262 f = user.find('<')
1257 if f >= 0:
1263 if f >= 0:
1258 user = user[f+1:]
1264 user = user[f+1:]
1259 f = user.find(' ')
1265 f = user.find(' ')
1260 if f >= 0:
1266 if f >= 0:
1261 user = user[:f]
1267 user = user[:f]
1262 f = user.find('.')
1268 f = user.find('.')
1263 if f >= 0:
1269 if f >= 0:
1264 user = user[:f]
1270 user = user[:f]
1265 return user
1271 return user
1266
1272
1267 def ellipsis(text, maxlength=400):
1273 def ellipsis(text, maxlength=400):
1268 """Trim string to at most maxlength (default: 400) characters."""
1274 """Trim string to at most maxlength (default: 400) characters."""
1269 if len(text) <= maxlength:
1275 if len(text) <= maxlength:
1270 return text
1276 return text
1271 else:
1277 else:
1272 return "%s..." % (text[:maxlength-3])
1278 return "%s..." % (text[:maxlength-3])
1273
1279
1274 def walkrepos(path):
1280 def walkrepos(path):
1275 '''yield every hg repository under path, recursively.'''
1281 '''yield every hg repository under path, recursively.'''
1276 def errhandler(err):
1282 def errhandler(err):
1277 if err.filename == path:
1283 if err.filename == path:
1278 raise err
1284 raise err
1279
1285
1280 for root, dirs, files in os.walk(path, onerror=errhandler):
1286 for root, dirs, files in os.walk(path, onerror=errhandler):
1281 for d in dirs:
1287 for d in dirs:
1282 if d == '.hg':
1288 if d == '.hg':
1283 yield root
1289 yield root
1284 dirs[:] = []
1290 dirs[:] = []
1285 break
1291 break
1286
1292
1287 _rcpath = None
1293 _rcpath = None
1288
1294
1289 def rcpath():
1295 def rcpath():
1290 '''return hgrc search path. if env var HGRCPATH is set, use it.
1296 '''return hgrc search path. if env var HGRCPATH is set, use it.
1291 for each item in path, if directory, use files ending in .rc,
1297 for each item in path, if directory, use files ending in .rc,
1292 else use item.
1298 else use item.
1293 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1299 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1294 if no HGRCPATH, use default os-specific path.'''
1300 if no HGRCPATH, use default os-specific path.'''
1295 global _rcpath
1301 global _rcpath
1296 if _rcpath is None:
1302 if _rcpath is None:
1297 if 'HGRCPATH' in os.environ:
1303 if 'HGRCPATH' in os.environ:
1298 _rcpath = []
1304 _rcpath = []
1299 for p in os.environ['HGRCPATH'].split(os.pathsep):
1305 for p in os.environ['HGRCPATH'].split(os.pathsep):
1300 if not p: continue
1306 if not p: continue
1301 if os.path.isdir(p):
1307 if os.path.isdir(p):
1302 for f in os.listdir(p):
1308 for f in os.listdir(p):
1303 if f.endswith('.rc'):
1309 if f.endswith('.rc'):
1304 _rcpath.append(os.path.join(p, f))
1310 _rcpath.append(os.path.join(p, f))
1305 else:
1311 else:
1306 _rcpath.append(p)
1312 _rcpath.append(p)
1307 else:
1313 else:
1308 _rcpath = os_rcpath()
1314 _rcpath = os_rcpath()
1309 return _rcpath
1315 return _rcpath
1310
1316
1311 def bytecount(nbytes):
1317 def bytecount(nbytes):
1312 '''return byte count formatted as readable string, with units'''
1318 '''return byte count formatted as readable string, with units'''
1313
1319
1314 units = (
1320 units = (
1315 (100, 1<<30, _('%.0f GB')),
1321 (100, 1<<30, _('%.0f GB')),
1316 (10, 1<<30, _('%.1f GB')),
1322 (10, 1<<30, _('%.1f GB')),
1317 (1, 1<<30, _('%.2f GB')),
1323 (1, 1<<30, _('%.2f GB')),
1318 (100, 1<<20, _('%.0f MB')),
1324 (100, 1<<20, _('%.0f MB')),
1319 (10, 1<<20, _('%.1f MB')),
1325 (10, 1<<20, _('%.1f MB')),
1320 (1, 1<<20, _('%.2f MB')),
1326 (1, 1<<20, _('%.2f MB')),
1321 (100, 1<<10, _('%.0f KB')),
1327 (100, 1<<10, _('%.0f KB')),
1322 (10, 1<<10, _('%.1f KB')),
1328 (10, 1<<10, _('%.1f KB')),
1323 (1, 1<<10, _('%.2f KB')),
1329 (1, 1<<10, _('%.2f KB')),
1324 (1, 1, _('%.0f bytes')),
1330 (1, 1, _('%.0f bytes')),
1325 )
1331 )
1326
1332
1327 for multiplier, divisor, format in units:
1333 for multiplier, divisor, format in units:
1328 if nbytes >= divisor * multiplier:
1334 if nbytes >= divisor * multiplier:
1329 return format % (nbytes / float(divisor))
1335 return format % (nbytes / float(divisor))
1330 return units[-1][2] % nbytes
1336 return units[-1][2] % nbytes
1331
1337
1332 def drop_scheme(scheme, path):
1338 def drop_scheme(scheme, path):
1333 sc = scheme + ':'
1339 sc = scheme + ':'
1334 if path.startswith(sc):
1340 if path.startswith(sc):
1335 path = path[len(sc):]
1341 path = path[len(sc):]
1336 if path.startswith('//'):
1342 if path.startswith('//'):
1337 path = path[2:]
1343 path = path[2:]
1338 return path
1344 return path
General Comments 0
You need to be logged in to leave comments. Login now