##// END OF EJS Templates
symlinks: use is_link wherever is_exec is used
Matt Mackall -
r4002:d7b9ec58 default
parent child Browse files
Show More
@@ -1,517 +1,518 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, repo, revlog, util, os, errno
10 import ancestor, bdiff, repo, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=None):
15 def __init__(self, repo, changeid=None):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 self._repo = repo
17 self._repo = repo
18
18
19 if not changeid and changeid != 0:
19 if not changeid and changeid != 0:
20 p1, p2 = self._repo.dirstate.parents()
20 p1, p2 = self._repo.dirstate.parents()
21 self._rev = self._repo.changelog.rev(p1)
21 self._rev = self._repo.changelog.rev(p1)
22 if self._rev == -1:
22 if self._rev == -1:
23 changeid = 'tip'
23 changeid = 'tip'
24 else:
24 else:
25 self._node = p1
25 self._node = p1
26 return
26 return
27
27
28 self._node = self._repo.lookup(changeid)
28 self._node = self._repo.lookup(changeid)
29 self._rev = self._repo.changelog.rev(self._node)
29 self._rev = self._repo.changelog.rev(self._node)
30
30
31 def __str__(self):
31 def __str__(self):
32 return short(self.node())
32 return short(self.node())
33
33
34 def __repr__(self):
34 def __repr__(self):
35 return "<changectx %s>" % str(self)
35 return "<changectx %s>" % str(self)
36
36
37 def __eq__(self, other):
37 def __eq__(self, other):
38 try:
38 try:
39 return self._rev == other._rev
39 return self._rev == other._rev
40 except AttributeError:
40 except AttributeError:
41 return False
41 return False
42
42
43 def __nonzero__(self):
43 def __nonzero__(self):
44 return self._rev != nullrev
44 return self._rev != nullrev
45
45
46 def __getattr__(self, name):
46 def __getattr__(self, name):
47 if name == '_changeset':
47 if name == '_changeset':
48 self._changeset = self._repo.changelog.read(self.node())
48 self._changeset = self._repo.changelog.read(self.node())
49 return self._changeset
49 return self._changeset
50 elif name == '_manifest':
50 elif name == '_manifest':
51 self._manifest = self._repo.manifest.read(self._changeset[0])
51 self._manifest = self._repo.manifest.read(self._changeset[0])
52 return self._manifest
52 return self._manifest
53 elif name == '_manifestdelta':
53 elif name == '_manifestdelta':
54 md = self._repo.manifest.readdelta(self._changeset[0])
54 md = self._repo.manifest.readdelta(self._changeset[0])
55 self._manifestdelta = md
55 self._manifestdelta = md
56 return self._manifestdelta
56 return self._manifestdelta
57 else:
57 else:
58 raise AttributeError, name
58 raise AttributeError, name
59
59
60 def changeset(self): return self._changeset
60 def changeset(self): return self._changeset
61 def manifest(self): return self._manifest
61 def manifest(self): return self._manifest
62
62
63 def rev(self): return self._rev
63 def rev(self): return self._rev
64 def node(self): return self._node
64 def node(self): return self._node
65 def user(self): return self._changeset[1]
65 def user(self): return self._changeset[1]
66 def date(self): return self._changeset[2]
66 def date(self): return self._changeset[2]
67 def files(self): return self._changeset[3]
67 def files(self): return self._changeset[3]
68 def description(self): return self._changeset[4]
68 def description(self): return self._changeset[4]
69 def branch(self): return self._changeset[5].get("branch", "")
69 def branch(self): return self._changeset[5].get("branch", "")
70
70
71 def parents(self):
71 def parents(self):
72 """return contexts for each parent changeset"""
72 """return contexts for each parent changeset"""
73 p = self._repo.changelog.parents(self._node)
73 p = self._repo.changelog.parents(self._node)
74 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
75
75
76 def children(self):
76 def children(self):
77 """return contexts for each child changeset"""
77 """return contexts for each child changeset"""
78 c = self._repo.changelog.children(self._node)
78 c = self._repo.changelog.children(self._node)
79 return [changectx(self._repo, x) for x in c]
79 return [changectx(self._repo, x) for x in c]
80
80
81 def filenode(self, path):
81 def filenode(self, path):
82 if '_manifest' in self.__dict__:
82 if '_manifest' in self.__dict__:
83 try:
83 try:
84 return self._manifest[path]
84 return self._manifest[path]
85 except KeyError:
85 except KeyError:
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
86 raise revlog.LookupError(_("'%s' not found in manifest") % path)
87 if '_manifestdelta' in self.__dict__ or path in self.files():
87 if '_manifestdelta' in self.__dict__ or path in self.files():
88 if path in self._manifestdelta:
88 if path in self._manifestdelta:
89 return self._manifestdelta[path]
89 return self._manifestdelta[path]
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
90 node, flag = self._repo.manifest.find(self._changeset[0], path)
91 if not node:
91 if not node:
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
92 raise revlog.LookupError(_("'%s' not found in manifest") % path)
93
93
94 return node
94 return node
95
95
96 def filectx(self, path, fileid=None, filelog=None):
96 def filectx(self, path, fileid=None, filelog=None):
97 """get a file context from this changeset"""
97 """get a file context from this changeset"""
98 if fileid is None:
98 if fileid is None:
99 fileid = self.filenode(path)
99 fileid = self.filenode(path)
100 return filectx(self._repo, path, fileid=fileid,
100 return filectx(self._repo, path, fileid=fileid,
101 changectx=self, filelog=filelog)
101 changectx=self, filelog=filelog)
102
102
103 def filectxs(self):
103 def filectxs(self):
104 """generate a file context for each file in this changeset's
104 """generate a file context for each file in this changeset's
105 manifest"""
105 manifest"""
106 mf = self.manifest()
106 mf = self.manifest()
107 m = mf.keys()
107 m = mf.keys()
108 m.sort()
108 m.sort()
109 for f in m:
109 for f in m:
110 yield self.filectx(f, fileid=mf[f])
110 yield self.filectx(f, fileid=mf[f])
111
111
112 def ancestor(self, c2):
112 def ancestor(self, c2):
113 """
113 """
114 return the ancestor context of self and c2
114 return the ancestor context of self and c2
115 """
115 """
116 n = self._repo.changelog.ancestor(self._node, c2._node)
116 n = self._repo.changelog.ancestor(self._node, c2._node)
117 return changectx(self._repo, n)
117 return changectx(self._repo, n)
118
118
119 class filectx(object):
119 class filectx(object):
120 """A filecontext object makes access to data related to a particular
120 """A filecontext object makes access to data related to a particular
121 filerevision convenient."""
121 filerevision convenient."""
122 def __init__(self, repo, path, changeid=None, fileid=None,
122 def __init__(self, repo, path, changeid=None, fileid=None,
123 filelog=None, changectx=None):
123 filelog=None, changectx=None):
124 """changeid can be a changeset revision, node, or tag.
124 """changeid can be a changeset revision, node, or tag.
125 fileid can be a file revision or node."""
125 fileid can be a file revision or node."""
126 self._repo = repo
126 self._repo = repo
127 self._path = path
127 self._path = path
128
128
129 assert (changeid is not None
129 assert (changeid is not None
130 or fileid is not None
130 or fileid is not None
131 or changectx is not None)
131 or changectx is not None)
132
132
133 if filelog:
133 if filelog:
134 self._filelog = filelog
134 self._filelog = filelog
135
135
136 if fileid is None:
136 if fileid is None:
137 if changectx is None:
137 if changectx is None:
138 self._changeid = changeid
138 self._changeid = changeid
139 else:
139 else:
140 self._changectx = changectx
140 self._changectx = changectx
141 else:
141 else:
142 self._fileid = fileid
142 self._fileid = fileid
143
143
144 def __getattr__(self, name):
144 def __getattr__(self, name):
145 if name == '_changectx':
145 if name == '_changectx':
146 self._changectx = changectx(self._repo, self._changeid)
146 self._changectx = changectx(self._repo, self._changeid)
147 return self._changectx
147 return self._changectx
148 elif name == '_filelog':
148 elif name == '_filelog':
149 self._filelog = self._repo.file(self._path)
149 self._filelog = self._repo.file(self._path)
150 return self._filelog
150 return self._filelog
151 elif name == '_changeid':
151 elif name == '_changeid':
152 self._changeid = self._filelog.linkrev(self._filenode)
152 self._changeid = self._filelog.linkrev(self._filenode)
153 return self._changeid
153 return self._changeid
154 elif name == '_filenode':
154 elif name == '_filenode':
155 if '_fileid' in self.__dict__:
155 if '_fileid' in self.__dict__:
156 self._filenode = self._filelog.lookup(self._fileid)
156 self._filenode = self._filelog.lookup(self._fileid)
157 else:
157 else:
158 self._filenode = self._changectx.filenode(self._path)
158 self._filenode = self._changectx.filenode(self._path)
159 return self._filenode
159 return self._filenode
160 elif name == '_filerev':
160 elif name == '_filerev':
161 self._filerev = self._filelog.rev(self._filenode)
161 self._filerev = self._filelog.rev(self._filenode)
162 return self._filerev
162 return self._filerev
163 else:
163 else:
164 raise AttributeError, name
164 raise AttributeError, name
165
165
166 def __nonzero__(self):
166 def __nonzero__(self):
167 try:
167 try:
168 n = self._filenode
168 n = self._filenode
169 return True
169 return True
170 except revlog.LookupError:
170 except revlog.LookupError:
171 # file is missing
171 # file is missing
172 return False
172 return False
173
173
174 def __str__(self):
174 def __str__(self):
175 return "%s@%s" % (self.path(), short(self.node()))
175 return "%s@%s" % (self.path(), short(self.node()))
176
176
177 def __repr__(self):
177 def __repr__(self):
178 return "<filectx %s>" % str(self)
178 return "<filectx %s>" % str(self)
179
179
180 def __eq__(self, other):
180 def __eq__(self, other):
181 try:
181 try:
182 return (self._path == other._path
182 return (self._path == other._path
183 and self._changeid == other._changeid)
183 and self._changeid == other._changeid)
184 except AttributeError:
184 except AttributeError:
185 return False
185 return False
186
186
187 def filectx(self, fileid):
187 def filectx(self, fileid):
188 '''opens an arbitrary revision of the file without
188 '''opens an arbitrary revision of the file without
189 opening a new filelog'''
189 opening a new filelog'''
190 return filectx(self._repo, self._path, fileid=fileid,
190 return filectx(self._repo, self._path, fileid=fileid,
191 filelog=self._filelog)
191 filelog=self._filelog)
192
192
193 def filerev(self): return self._filerev
193 def filerev(self): return self._filerev
194 def filenode(self): return self._filenode
194 def filenode(self): return self._filenode
195 def filelog(self): return self._filelog
195 def filelog(self): return self._filelog
196
196
197 def rev(self):
197 def rev(self):
198 if '_changectx' in self.__dict__:
198 if '_changectx' in self.__dict__:
199 return self._changectx.rev()
199 return self._changectx.rev()
200 return self._filelog.linkrev(self._filenode)
200 return self._filelog.linkrev(self._filenode)
201
201
202 def node(self): return self._changectx.node()
202 def node(self): return self._changectx.node()
203 def user(self): return self._changectx.user()
203 def user(self): return self._changectx.user()
204 def date(self): return self._changectx.date()
204 def date(self): return self._changectx.date()
205 def files(self): return self._changectx.files()
205 def files(self): return self._changectx.files()
206 def description(self): return self._changectx.description()
206 def description(self): return self._changectx.description()
207 def branch(self): return self._changectx.branch()
207 def branch(self): return self._changectx.branch()
208 def manifest(self): return self._changectx.manifest()
208 def manifest(self): return self._changectx.manifest()
209 def changectx(self): return self._changectx
209 def changectx(self): return self._changectx
210
210
211 def data(self): return self._filelog.read(self._filenode)
211 def data(self): return self._filelog.read(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
212 def renamed(self): return self._filelog.renamed(self._filenode)
213 def path(self): return self._path
213 def path(self): return self._path
214 def size(self): return self._filelog.size(self._filerev)
214 def size(self): return self._filelog.size(self._filerev)
215
215
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
216 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
217
217
218 def parents(self):
218 def parents(self):
219 p = self._path
219 p = self._path
220 fl = self._filelog
220 fl = self._filelog
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
221 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
222
222
223 r = self.renamed()
223 r = self.renamed()
224 if r:
224 if r:
225 pl[0] = (r[0], r[1], None)
225 pl[0] = (r[0], r[1], None)
226
226
227 return [filectx(self._repo, p, fileid=n, filelog=l)
227 return [filectx(self._repo, p, fileid=n, filelog=l)
228 for p,n,l in pl if n != nullid]
228 for p,n,l in pl if n != nullid]
229
229
230 def children(self):
230 def children(self):
231 # hard for renames
231 # hard for renames
232 c = self._filelog.children(self._filenode)
232 c = self._filelog.children(self._filenode)
233 return [filectx(self._repo, self._path, fileid=x,
233 return [filectx(self._repo, self._path, fileid=x,
234 filelog=self._filelog) for x in c]
234 filelog=self._filelog) for x in c]
235
235
236 def annotate(self, follow=False):
236 def annotate(self, follow=False):
237 '''returns a list of tuples of (ctx, line) for each line
237 '''returns a list of tuples of (ctx, line) for each line
238 in the file, where ctx is the filectx of the node where
238 in the file, where ctx is the filectx of the node where
239 that line was last changed'''
239 that line was last changed'''
240
240
241 def decorate(text, rev):
241 def decorate(text, rev):
242 return ([rev] * len(text.splitlines()), text)
242 return ([rev] * len(text.splitlines()), text)
243
243
244 def pair(parent, child):
244 def pair(parent, child):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
245 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
246 child[0][b1:b2] = parent[0][a1:a2]
246 child[0][b1:b2] = parent[0][a1:a2]
247 return child
247 return child
248
248
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
249 getlog = util.cachefunc(lambda x: self._repo.file(x))
250 def getctx(path, fileid):
250 def getctx(path, fileid):
251 log = path == self._path and self._filelog or getlog(path)
251 log = path == self._path and self._filelog or getlog(path)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
252 return filectx(self._repo, path, fileid=fileid, filelog=log)
253 getctx = util.cachefunc(getctx)
253 getctx = util.cachefunc(getctx)
254
254
255 def parents(f):
255 def parents(f):
256 # we want to reuse filectx objects as much as possible
256 # we want to reuse filectx objects as much as possible
257 p = f._path
257 p = f._path
258 if f._filerev is None: # working dir
258 if f._filerev is None: # working dir
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
259 pl = [(n.path(), n.filerev()) for n in f.parents()]
260 else:
260 else:
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
261 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
262
262
263 if follow:
263 if follow:
264 r = f.renamed()
264 r = f.renamed()
265 if r:
265 if r:
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
266 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
267
267
268 return [getctx(p, n) for p, n in pl if n != nullrev]
268 return [getctx(p, n) for p, n in pl if n != nullrev]
269
269
270 # use linkrev to find the first changeset where self appeared
270 # use linkrev to find the first changeset where self appeared
271 if self.rev() != self._filelog.linkrev(self._filenode):
271 if self.rev() != self._filelog.linkrev(self._filenode):
272 base = self.filectx(self.filerev())
272 base = self.filectx(self.filerev())
273 else:
273 else:
274 base = self
274 base = self
275
275
276 # find all ancestors
276 # find all ancestors
277 needed = {base: 1}
277 needed = {base: 1}
278 visit = [base]
278 visit = [base]
279 files = [base._path]
279 files = [base._path]
280 while visit:
280 while visit:
281 f = visit.pop(0)
281 f = visit.pop(0)
282 for p in parents(f):
282 for p in parents(f):
283 if p not in needed:
283 if p not in needed:
284 needed[p] = 1
284 needed[p] = 1
285 visit.append(p)
285 visit.append(p)
286 if p._path not in files:
286 if p._path not in files:
287 files.append(p._path)
287 files.append(p._path)
288 else:
288 else:
289 # count how many times we'll use this
289 # count how many times we'll use this
290 needed[p] += 1
290 needed[p] += 1
291
291
292 # sort by revision (per file) which is a topological order
292 # sort by revision (per file) which is a topological order
293 visit = []
293 visit = []
294 files.reverse()
294 files.reverse()
295 for f in files:
295 for f in files:
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
296 fn = [(n._filerev, n) for n in needed.keys() if n._path == f]
297 fn.sort()
297 fn.sort()
298 visit.extend(fn)
298 visit.extend(fn)
299 hist = {}
299 hist = {}
300
300
301 for r, f in visit:
301 for r, f in visit:
302 curr = decorate(f.data(), f)
302 curr = decorate(f.data(), f)
303 for p in parents(f):
303 for p in parents(f):
304 if p != nullid:
304 if p != nullid:
305 curr = pair(hist[p], curr)
305 curr = pair(hist[p], curr)
306 # trim the history of unneeded revs
306 # trim the history of unneeded revs
307 needed[p] -= 1
307 needed[p] -= 1
308 if not needed[p]:
308 if not needed[p]:
309 del hist[p]
309 del hist[p]
310 hist[f] = curr
310 hist[f] = curr
311
311
312 return zip(hist[f][0], hist[f][1].splitlines(1))
312 return zip(hist[f][0], hist[f][1].splitlines(1))
313
313
314 def ancestor(self, fc2):
314 def ancestor(self, fc2):
315 """
315 """
316 find the common ancestor file context, if any, of self, and fc2
316 find the common ancestor file context, if any, of self, and fc2
317 """
317 """
318
318
319 acache = {}
319 acache = {}
320
320
321 # prime the ancestor cache for the working directory
321 # prime the ancestor cache for the working directory
322 for c in (self, fc2):
322 for c in (self, fc2):
323 if c._filerev == None:
323 if c._filerev == None:
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
324 pl = [(n.path(), n.filenode()) for n in c.parents()]
325 acache[(c._path, None)] = pl
325 acache[(c._path, None)] = pl
326
326
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
327 flcache = {self._path:self._filelog, fc2._path:fc2._filelog}
328 def parents(vertex):
328 def parents(vertex):
329 if vertex in acache:
329 if vertex in acache:
330 return acache[vertex]
330 return acache[vertex]
331 f, n = vertex
331 f, n = vertex
332 if f not in flcache:
332 if f not in flcache:
333 flcache[f] = self._repo.file(f)
333 flcache[f] = self._repo.file(f)
334 fl = flcache[f]
334 fl = flcache[f]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
335 pl = [(f, p) for p in fl.parents(n) if p != nullid]
336 re = fl.renamed(n)
336 re = fl.renamed(n)
337 if re:
337 if re:
338 pl.append(re)
338 pl.append(re)
339 acache[vertex] = pl
339 acache[vertex] = pl
340 return pl
340 return pl
341
341
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
342 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
343 v = ancestor.ancestor(a, b, parents)
343 v = ancestor.ancestor(a, b, parents)
344 if v:
344 if v:
345 f, n = v
345 f, n = v
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
346 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
347
347
348 return None
348 return None
349
349
350 class workingctx(changectx):
350 class workingctx(changectx):
351 """A workingctx object makes access to data related to
351 """A workingctx object makes access to data related to
352 the current working directory convenient."""
352 the current working directory convenient."""
353 def __init__(self, repo):
353 def __init__(self, repo):
354 self._repo = repo
354 self._repo = repo
355 self._rev = None
355 self._rev = None
356 self._node = None
356 self._node = None
357
357
358 def __str__(self):
358 def __str__(self):
359 return str(self._parents[0]) + "+"
359 return str(self._parents[0]) + "+"
360
360
361 def __nonzero__(self):
361 def __nonzero__(self):
362 return True
362 return True
363
363
364 def __getattr__(self, name):
364 def __getattr__(self, name):
365 if name == '_parents':
365 if name == '_parents':
366 self._parents = self._repo.parents()
366 self._parents = self._repo.parents()
367 return self._parents
367 return self._parents
368 if name == '_status':
368 if name == '_status':
369 self._status = self._repo.status()
369 self._status = self._repo.status()
370 return self._status
370 return self._status
371 if name == '_manifest':
371 if name == '_manifest':
372 self._buildmanifest()
372 self._buildmanifest()
373 return self._manifest
373 return self._manifest
374 else:
374 else:
375 raise AttributeError, name
375 raise AttributeError, name
376
376
377 def _buildmanifest(self):
377 def _buildmanifest(self):
378 """generate a manifest corresponding to the working directory"""
378 """generate a manifest corresponding to the working directory"""
379
379
380 man = self._parents[0].manifest().copy()
380 man = self._parents[0].manifest().copy()
381 is_exec = util.execfunc(self._repo.root, man.execf)
381 is_exec = util.execfunc(self._repo.root, man.execf)
382 is_link = util.linkfunc(self._repo.root, man.linkf)
382 copied = self._repo.dirstate.copies()
383 copied = self._repo.dirstate.copies()
383 modified, added, removed, deleted, unknown = self._status[:5]
384 modified, added, removed, deleted, unknown = self._status[:5]
384 for i, l in (("a", added), ("m", modified), ("u", unknown)):
385 for i, l in (("a", added), ("m", modified), ("u", unknown)):
385 for f in l:
386 for f in l:
386 man[f] = man.get(copied.get(f, f), nullid) + i
387 man[f] = man.get(copied.get(f, f), nullid) + i
387 try:
388 try:
388 man.set(f, is_exec(f))
389 man.set(f, is_exec(f), is_link(f))
389 except OSError:
390 except OSError:
390 pass
391 pass
391
392
392 for f in deleted + removed:
393 for f in deleted + removed:
393 if f in man:
394 if f in man:
394 del man[f]
395 del man[f]
395
396
396 self._manifest = man
397 self._manifest = man
397
398
398 def manifest(self): return self._manifest
399 def manifest(self): return self._manifest
399
400
400 def user(self): return self._repo.ui.username()
401 def user(self): return self._repo.ui.username()
401 def date(self): return util.makedate()
402 def date(self): return util.makedate()
402 def description(self): return ""
403 def description(self): return ""
403 def files(self):
404 def files(self):
404 f = self.modified() + self.added() + self.removed()
405 f = self.modified() + self.added() + self.removed()
405 f.sort()
406 f.sort()
406 return f
407 return f
407
408
408 def modified(self): return self._status[0]
409 def modified(self): return self._status[0]
409 def added(self): return self._status[1]
410 def added(self): return self._status[1]
410 def removed(self): return self._status[2]
411 def removed(self): return self._status[2]
411 def deleted(self): return self._status[3]
412 def deleted(self): return self._status[3]
412 def unknown(self): return self._status[4]
413 def unknown(self): return self._status[4]
413 def clean(self): return self._status[5]
414 def clean(self): return self._status[5]
414 def branch(self):
415 def branch(self):
415 try:
416 try:
416 return self._repo.opener("branch").read().strip()
417 return self._repo.opener("branch").read().strip()
417 except IOError:
418 except IOError:
418 return ""
419 return ""
419
420
420 def parents(self):
421 def parents(self):
421 """return contexts for each parent changeset"""
422 """return contexts for each parent changeset"""
422 return self._parents
423 return self._parents
423
424
424 def children(self):
425 def children(self):
425 return []
426 return []
426
427
427 def filectx(self, path, filelog=None):
428 def filectx(self, path, filelog=None):
428 """get a file context from the working directory"""
429 """get a file context from the working directory"""
429 return workingfilectx(self._repo, path, workingctx=self,
430 return workingfilectx(self._repo, path, workingctx=self,
430 filelog=filelog)
431 filelog=filelog)
431
432
432 def ancestor(self, c2):
433 def ancestor(self, c2):
433 """return the ancestor context of self and c2"""
434 """return the ancestor context of self and c2"""
434 return self._parents[0].ancestor(c2) # punt on two parents for now
435 return self._parents[0].ancestor(c2) # punt on two parents for now
435
436
436 class workingfilectx(filectx):
437 class workingfilectx(filectx):
437 """A workingfilectx object makes access to data related to a particular
438 """A workingfilectx object makes access to data related to a particular
438 file in the working directory convenient."""
439 file in the working directory convenient."""
439 def __init__(self, repo, path, filelog=None, workingctx=None):
440 def __init__(self, repo, path, filelog=None, workingctx=None):
440 """changeid can be a changeset revision, node, or tag.
441 """changeid can be a changeset revision, node, or tag.
441 fileid can be a file revision or node."""
442 fileid can be a file revision or node."""
442 self._repo = repo
443 self._repo = repo
443 self._path = path
444 self._path = path
444 self._changeid = None
445 self._changeid = None
445 self._filerev = self._filenode = None
446 self._filerev = self._filenode = None
446
447
447 if filelog:
448 if filelog:
448 self._filelog = filelog
449 self._filelog = filelog
449 if workingctx:
450 if workingctx:
450 self._changectx = workingctx
451 self._changectx = workingctx
451
452
452 def __getattr__(self, name):
453 def __getattr__(self, name):
453 if name == '_changectx':
454 if name == '_changectx':
454 self._changectx = workingctx(repo)
455 self._changectx = workingctx(repo)
455 return self._changectx
456 return self._changectx
456 elif name == '_repopath':
457 elif name == '_repopath':
457 self._repopath = (self._repo.dirstate.copied(self._path)
458 self._repopath = (self._repo.dirstate.copied(self._path)
458 or self._path)
459 or self._path)
459 return self._repopath
460 return self._repopath
460 elif name == '_filelog':
461 elif name == '_filelog':
461 self._filelog = self._repo.file(self._repopath)
462 self._filelog = self._repo.file(self._repopath)
462 return self._filelog
463 return self._filelog
463 else:
464 else:
464 raise AttributeError, name
465 raise AttributeError, name
465
466
466 def __nonzero__(self):
467 def __nonzero__(self):
467 return True
468 return True
468
469
469 def __str__(self):
470 def __str__(self):
470 return "%s@%s" % (self.path(), self._changectx)
471 return "%s@%s" % (self.path(), self._changectx)
471
472
472 def filectx(self, fileid):
473 def filectx(self, fileid):
473 '''opens an arbitrary revision of the file without
474 '''opens an arbitrary revision of the file without
474 opening a new filelog'''
475 opening a new filelog'''
475 return filectx(self._repo, self._repopath, fileid=fileid,
476 return filectx(self._repo, self._repopath, fileid=fileid,
476 filelog=self._filelog)
477 filelog=self._filelog)
477
478
478 def rev(self):
479 def rev(self):
479 if '_changectx' in self.__dict__:
480 if '_changectx' in self.__dict__:
480 return self._changectx.rev()
481 return self._changectx.rev()
481 return self._filelog.linkrev(self._filenode)
482 return self._filelog.linkrev(self._filenode)
482
483
483 def data(self): return self._repo.wread(self._path)
484 def data(self): return self._repo.wread(self._path)
484 def renamed(self):
485 def renamed(self):
485 rp = self._repopath
486 rp = self._repopath
486 if rp == self._path:
487 if rp == self._path:
487 return None
488 return None
488 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
489 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
489
490
490 def parents(self):
491 def parents(self):
491 '''return parent filectxs, following copies if necessary'''
492 '''return parent filectxs, following copies if necessary'''
492 p = self._path
493 p = self._path
493 rp = self._repopath
494 rp = self._repopath
494 pcl = self._changectx._parents
495 pcl = self._changectx._parents
495 fl = self._filelog
496 fl = self._filelog
496 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
497 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
497 if len(pcl) > 1:
498 if len(pcl) > 1:
498 if rp != p:
499 if rp != p:
499 fl = None
500 fl = None
500 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
501 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
501
502
502 return [filectx(self._repo, p, fileid=n, filelog=l)
503 return [filectx(self._repo, p, fileid=n, filelog=l)
503 for p,n,l in pl if n != nullid]
504 for p,n,l in pl if n != nullid]
504
505
505 def children(self):
506 def children(self):
506 return []
507 return []
507
508
508 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
509 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
509 def date(self):
510 def date(self):
510 t, tz = self._changectx.date()
511 t, tz = self._changectx.date()
511 try:
512 try:
512 return (os.lstat(repo.wjoin(self._path)).st_mtime, tz)
513 return (os.lstat(repo.wjoin(self._path)).st_mtime, tz)
513 except OSError, err:
514 except OSError, err:
514 if err.errno != errno.ENOENT: raise
515 if err.errno != errno.ENOENT: raise
515 return (t, tz)
516 return (t, tz)
516
517
517 def cmp(self, text): return self._repo.wread(self._path) == text
518 def cmp(self, text): return self._repo.wread(self._path) == text
@@ -1,1867 +1,1869 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34 self.root = os.path.realpath(path)
34 self.root = os.path.realpath(path)
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 os.mkdir(os.path.join(self.path, "store"))
44 os.mkdir(os.path.join(self.path, "store"))
45 requirements = ("revlogv1", "store")
45 requirements = ("revlogv1", "store")
46 reqfile = self.opener("requires", "w")
46 reqfile = self.opener("requires", "w")
47 for r in requirements:
47 for r in requirements:
48 reqfile.write("%s\n" % r)
48 reqfile.write("%s\n" % r)
49 reqfile.close()
49 reqfile.close()
50 # create an invalid changelog
50 # create an invalid changelog
51 self.opener("00changelog.i", "a").write(
51 self.opener("00changelog.i", "a").write(
52 '\0\0\0\2' # represents revlogv2
52 '\0\0\0\2' # represents revlogv2
53 ' dummy changelog to prevent using the old repo layout'
53 ' dummy changelog to prevent using the old repo layout'
54 )
54 )
55 else:
55 else:
56 raise repo.RepoError(_("repository %s not found") % path)
56 raise repo.RepoError(_("repository %s not found") % path)
57 elif create:
57 elif create:
58 raise repo.RepoError(_("repository %s already exists") % path)
58 raise repo.RepoError(_("repository %s already exists") % path)
59 else:
59 else:
60 # find requirements
60 # find requirements
61 try:
61 try:
62 requirements = self.opener("requires").read().splitlines()
62 requirements = self.opener("requires").read().splitlines()
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66 requirements = []
66 requirements = []
67 # check them
67 # check them
68 for r in requirements:
68 for r in requirements:
69 if r not in self.supported:
69 if r not in self.supported:
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71
71
72 # setup store
72 # setup store
73 if "store" in requirements:
73 if "store" in requirements:
74 self.encodefn = util.encodefilename
74 self.encodefn = util.encodefilename
75 self.decodefn = util.decodefilename
75 self.decodefn = util.decodefilename
76 self.spath = os.path.join(self.path, "store")
76 self.spath = os.path.join(self.path, "store")
77 else:
77 else:
78 self.encodefn = lambda x: x
78 self.encodefn = lambda x: x
79 self.decodefn = lambda x: x
79 self.decodefn = lambda x: x
80 self.spath = self.path
80 self.spath = self.path
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82
82
83 self.ui = ui.ui(parentui=parentui)
83 self.ui = ui.ui(parentui=parentui)
84 try:
84 try:
85 self.ui.readconfig(self.join("hgrc"), self.root)
85 self.ui.readconfig(self.join("hgrc"), self.root)
86 except IOError:
86 except IOError:
87 pass
87 pass
88
88
89 v = self.ui.configrevlog()
89 v = self.ui.configrevlog()
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 fl = v.get('flags', None)
92 fl = v.get('flags', None)
93 flags = 0
93 flags = 0
94 if fl != None:
94 if fl != None:
95 for x in fl.split():
95 for x in fl.split():
96 flags |= revlog.flagstr(x)
96 flags |= revlog.flagstr(x)
97 elif self.revlogv1:
97 elif self.revlogv1:
98 flags = revlog.REVLOG_DEFAULT_FLAGS
98 flags = revlog.REVLOG_DEFAULT_FLAGS
99
99
100 v = self.revlogversion | flags
100 v = self.revlogversion | flags
101 self.manifest = manifest.manifest(self.sopener, v)
101 self.manifest = manifest.manifest(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
103
103
104 fallback = self.ui.config('ui', 'fallbackencoding')
104 fallback = self.ui.config('ui', 'fallbackencoding')
105 if fallback:
105 if fallback:
106 util._fallbackencoding = fallback
106 util._fallbackencoding = fallback
107
107
108 # the changelog might not have the inline index flag
108 # the changelog might not have the inline index flag
109 # on. If the format of the changelog is the same as found in
109 # on. If the format of the changelog is the same as found in
110 # .hgrc, apply any flags found in the .hgrc as well.
110 # .hgrc, apply any flags found in the .hgrc as well.
111 # Otherwise, just version from the changelog
111 # Otherwise, just version from the changelog
112 v = self.changelog.version
112 v = self.changelog.version
113 if v == self.revlogversion:
113 if v == self.revlogversion:
114 v |= flags
114 v |= flags
115 self.revlogversion = v
115 self.revlogversion = v
116
116
117 self.tagscache = None
117 self.tagscache = None
118 self.branchcache = None
118 self.branchcache = None
119 self.nodetagscache = None
119 self.nodetagscache = None
120 self.encodepats = None
120 self.encodepats = None
121 self.decodepats = None
121 self.decodepats = None
122 self.transhandle = None
122 self.transhandle = None
123
123
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
124 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
125
125
126 def url(self):
126 def url(self):
127 return 'file:' + self.root
127 return 'file:' + self.root
128
128
129 def hook(self, name, throw=False, **args):
129 def hook(self, name, throw=False, **args):
130 def callhook(hname, funcname):
130 def callhook(hname, funcname):
131 '''call python hook. hook is callable object, looked up as
131 '''call python hook. hook is callable object, looked up as
132 name in python module. if callable returns "true", hook
132 name in python module. if callable returns "true", hook
133 fails, else passes. if hook raises exception, treated as
133 fails, else passes. if hook raises exception, treated as
134 hook failure. exception propagates if throw is "true".
134 hook failure. exception propagates if throw is "true".
135
135
136 reason for "true" meaning "hook failed" is so that
136 reason for "true" meaning "hook failed" is so that
137 unmodified commands (e.g. mercurial.commands.update) can
137 unmodified commands (e.g. mercurial.commands.update) can
138 be run as hooks without wrappers to convert return values.'''
138 be run as hooks without wrappers to convert return values.'''
139
139
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
140 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
141 d = funcname.rfind('.')
141 d = funcname.rfind('.')
142 if d == -1:
142 if d == -1:
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
143 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
144 % (hname, funcname))
144 % (hname, funcname))
145 modname = funcname[:d]
145 modname = funcname[:d]
146 try:
146 try:
147 obj = __import__(modname)
147 obj = __import__(modname)
148 except ImportError:
148 except ImportError:
149 try:
149 try:
150 # extensions are loaded with hgext_ prefix
150 # extensions are loaded with hgext_ prefix
151 obj = __import__("hgext_%s" % modname)
151 obj = __import__("hgext_%s" % modname)
152 except ImportError:
152 except ImportError:
153 raise util.Abort(_('%s hook is invalid '
153 raise util.Abort(_('%s hook is invalid '
154 '(import of "%s" failed)') %
154 '(import of "%s" failed)') %
155 (hname, modname))
155 (hname, modname))
156 try:
156 try:
157 for p in funcname.split('.')[1:]:
157 for p in funcname.split('.')[1:]:
158 obj = getattr(obj, p)
158 obj = getattr(obj, p)
159 except AttributeError, err:
159 except AttributeError, err:
160 raise util.Abort(_('%s hook is invalid '
160 raise util.Abort(_('%s hook is invalid '
161 '("%s" is not defined)') %
161 '("%s" is not defined)') %
162 (hname, funcname))
162 (hname, funcname))
163 if not callable(obj):
163 if not callable(obj):
164 raise util.Abort(_('%s hook is invalid '
164 raise util.Abort(_('%s hook is invalid '
165 '("%s" is not callable)') %
165 '("%s" is not callable)') %
166 (hname, funcname))
166 (hname, funcname))
167 try:
167 try:
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
168 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
169 except (KeyboardInterrupt, util.SignalInterrupt):
169 except (KeyboardInterrupt, util.SignalInterrupt):
170 raise
170 raise
171 except Exception, exc:
171 except Exception, exc:
172 if isinstance(exc, util.Abort):
172 if isinstance(exc, util.Abort):
173 self.ui.warn(_('error: %s hook failed: %s\n') %
173 self.ui.warn(_('error: %s hook failed: %s\n') %
174 (hname, exc.args[0]))
174 (hname, exc.args[0]))
175 else:
175 else:
176 self.ui.warn(_('error: %s hook raised an exception: '
176 self.ui.warn(_('error: %s hook raised an exception: '
177 '%s\n') % (hname, exc))
177 '%s\n') % (hname, exc))
178 if throw:
178 if throw:
179 raise
179 raise
180 self.ui.print_exc()
180 self.ui.print_exc()
181 return True
181 return True
182 if r:
182 if r:
183 if throw:
183 if throw:
184 raise util.Abort(_('%s hook failed') % hname)
184 raise util.Abort(_('%s hook failed') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
185 self.ui.warn(_('warning: %s hook failed\n') % hname)
186 return r
186 return r
187
187
188 def runhook(name, cmd):
188 def runhook(name, cmd):
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
189 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
190 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
191 r = util.system(cmd, environ=env, cwd=self.root)
191 r = util.system(cmd, environ=env, cwd=self.root)
192 if r:
192 if r:
193 desc, r = util.explain_exit(r)
193 desc, r = util.explain_exit(r)
194 if throw:
194 if throw:
195 raise util.Abort(_('%s hook %s') % (name, desc))
195 raise util.Abort(_('%s hook %s') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
196 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
197 return r
197 return r
198
198
199 r = False
199 r = False
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
200 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
201 if hname.split(".", 1)[0] == name and cmd]
201 if hname.split(".", 1)[0] == name and cmd]
202 hooks.sort()
202 hooks.sort()
203 for hname, cmd in hooks:
203 for hname, cmd in hooks:
204 if cmd.startswith('python:'):
204 if cmd.startswith('python:'):
205 r = callhook(hname, cmd[7:].strip()) or r
205 r = callhook(hname, cmd[7:].strip()) or r
206 else:
206 else:
207 r = runhook(hname, cmd) or r
207 r = runhook(hname, cmd) or r
208 return r
208 return r
209
209
210 tag_disallowed = ':\r\n'
210 tag_disallowed = ':\r\n'
211
211
212 def tag(self, name, node, message, local, user, date):
212 def tag(self, name, node, message, local, user, date):
213 '''tag a revision with a symbolic name.
213 '''tag a revision with a symbolic name.
214
214
215 if local is True, the tag is stored in a per-repository file.
215 if local is True, the tag is stored in a per-repository file.
216 otherwise, it is stored in the .hgtags file, and a new
216 otherwise, it is stored in the .hgtags file, and a new
217 changeset is committed with the change.
217 changeset is committed with the change.
218
218
219 keyword arguments:
219 keyword arguments:
220
220
221 local: whether to store tag in non-version-controlled file
221 local: whether to store tag in non-version-controlled file
222 (default False)
222 (default False)
223
223
224 message: commit message to use if committing
224 message: commit message to use if committing
225
225
226 user: name of user to use if committing
226 user: name of user to use if committing
227
227
228 date: date tuple to use if committing'''
228 date: date tuple to use if committing'''
229
229
230 for c in self.tag_disallowed:
230 for c in self.tag_disallowed:
231 if c in name:
231 if c in name:
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
232 raise util.Abort(_('%r cannot be used in a tag name') % c)
233
233
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
234 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
235
235
236 if local:
236 if local:
237 # local tags are stored in the current charset
237 # local tags are stored in the current charset
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
238 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
239 self.hook('tag', node=hex(node), tag=name, local=local)
239 self.hook('tag', node=hex(node), tag=name, local=local)
240 return
240 return
241
241
242 for x in self.status()[:5]:
242 for x in self.status()[:5]:
243 if '.hgtags' in x:
243 if '.hgtags' in x:
244 raise util.Abort(_('working copy of .hgtags is changed '
244 raise util.Abort(_('working copy of .hgtags is changed '
245 '(please commit .hgtags manually)'))
245 '(please commit .hgtags manually)'))
246
246
247 # committed tags are stored in UTF-8
247 # committed tags are stored in UTF-8
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
248 line = '%s %s\n' % (hex(node), util.fromlocal(name))
249 self.wfile('.hgtags', 'ab').write(line)
249 self.wfile('.hgtags', 'ab').write(line)
250 if self.dirstate.state('.hgtags') == '?':
250 if self.dirstate.state('.hgtags') == '?':
251 self.add(['.hgtags'])
251 self.add(['.hgtags'])
252
252
253 self.commit(['.hgtags'], message, user, date)
253 self.commit(['.hgtags'], message, user, date)
254 self.hook('tag', node=hex(node), tag=name, local=local)
254 self.hook('tag', node=hex(node), tag=name, local=local)
255
255
256 def tags(self):
256 def tags(self):
257 '''return a mapping of tag to node'''
257 '''return a mapping of tag to node'''
258 if not self.tagscache:
258 if not self.tagscache:
259 self.tagscache = {}
259 self.tagscache = {}
260
260
261 def parsetag(line, context):
261 def parsetag(line, context):
262 if not line:
262 if not line:
263 return
263 return
264 s = l.split(" ", 1)
264 s = l.split(" ", 1)
265 if len(s) != 2:
265 if len(s) != 2:
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
266 self.ui.warn(_("%s: cannot parse entry\n") % context)
267 return
267 return
268 node, key = s
268 node, key = s
269 key = util.tolocal(key.strip()) # stored in UTF-8
269 key = util.tolocal(key.strip()) # stored in UTF-8
270 try:
270 try:
271 bin_n = bin(node)
271 bin_n = bin(node)
272 except TypeError:
272 except TypeError:
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
273 self.ui.warn(_("%s: node '%s' is not well formed\n") %
274 (context, node))
274 (context, node))
275 return
275 return
276 if bin_n not in self.changelog.nodemap:
276 if bin_n not in self.changelog.nodemap:
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
277 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
278 (context, key))
278 (context, key))
279 return
279 return
280 self.tagscache[key] = bin_n
280 self.tagscache[key] = bin_n
281
281
282 # read the tags file from each head, ending with the tip,
282 # read the tags file from each head, ending with the tip,
283 # and add each tag found to the map, with "newer" ones
283 # and add each tag found to the map, with "newer" ones
284 # taking precedence
284 # taking precedence
285 f = None
285 f = None
286 for rev, node, fnode in self._hgtagsnodes():
286 for rev, node, fnode in self._hgtagsnodes():
287 f = (f and f.filectx(fnode) or
287 f = (f and f.filectx(fnode) or
288 self.filectx('.hgtags', fileid=fnode))
288 self.filectx('.hgtags', fileid=fnode))
289 count = 0
289 count = 0
290 for l in f.data().splitlines():
290 for l in f.data().splitlines():
291 count += 1
291 count += 1
292 parsetag(l, _("%s, line %d") % (str(f), count))
292 parsetag(l, _("%s, line %d") % (str(f), count))
293
293
294 try:
294 try:
295 f = self.opener("localtags")
295 f = self.opener("localtags")
296 count = 0
296 count = 0
297 for l in f:
297 for l in f:
298 # localtags are stored in the local character set
298 # localtags are stored in the local character set
299 # while the internal tag table is stored in UTF-8
299 # while the internal tag table is stored in UTF-8
300 l = util.fromlocal(l)
300 l = util.fromlocal(l)
301 count += 1
301 count += 1
302 parsetag(l, _("localtags, line %d") % count)
302 parsetag(l, _("localtags, line %d") % count)
303 except IOError:
303 except IOError:
304 pass
304 pass
305
305
306 self.tagscache['tip'] = self.changelog.tip()
306 self.tagscache['tip'] = self.changelog.tip()
307
307
308 return self.tagscache
308 return self.tagscache
309
309
310 def _hgtagsnodes(self):
310 def _hgtagsnodes(self):
311 heads = self.heads()
311 heads = self.heads()
312 heads.reverse()
312 heads.reverse()
313 last = {}
313 last = {}
314 ret = []
314 ret = []
315 for node in heads:
315 for node in heads:
316 c = self.changectx(node)
316 c = self.changectx(node)
317 rev = c.rev()
317 rev = c.rev()
318 try:
318 try:
319 fnode = c.filenode('.hgtags')
319 fnode = c.filenode('.hgtags')
320 except revlog.LookupError:
320 except revlog.LookupError:
321 continue
321 continue
322 ret.append((rev, node, fnode))
322 ret.append((rev, node, fnode))
323 if fnode in last:
323 if fnode in last:
324 ret[last[fnode]] = None
324 ret[last[fnode]] = None
325 last[fnode] = len(ret) - 1
325 last[fnode] = len(ret) - 1
326 return [item for item in ret if item]
326 return [item for item in ret if item]
327
327
328 def tagslist(self):
328 def tagslist(self):
329 '''return a list of tags ordered by revision'''
329 '''return a list of tags ordered by revision'''
330 l = []
330 l = []
331 for t, n in self.tags().items():
331 for t, n in self.tags().items():
332 try:
332 try:
333 r = self.changelog.rev(n)
333 r = self.changelog.rev(n)
334 except:
334 except:
335 r = -2 # sort to the beginning of the list if unknown
335 r = -2 # sort to the beginning of the list if unknown
336 l.append((r, t, n))
336 l.append((r, t, n))
337 l.sort()
337 l.sort()
338 return [(t, n) for r, t, n in l]
338 return [(t, n) for r, t, n in l]
339
339
340 def nodetags(self, node):
340 def nodetags(self, node):
341 '''return the tags associated with a node'''
341 '''return the tags associated with a node'''
342 if not self.nodetagscache:
342 if not self.nodetagscache:
343 self.nodetagscache = {}
343 self.nodetagscache = {}
344 for t, n in self.tags().items():
344 for t, n in self.tags().items():
345 self.nodetagscache.setdefault(n, []).append(t)
345 self.nodetagscache.setdefault(n, []).append(t)
346 return self.nodetagscache.get(node, [])
346 return self.nodetagscache.get(node, [])
347
347
348 def _branchtags(self):
348 def _branchtags(self):
349 partial, last, lrev = self._readbranchcache()
349 partial, last, lrev = self._readbranchcache()
350
350
351 tiprev = self.changelog.count() - 1
351 tiprev = self.changelog.count() - 1
352 if lrev != tiprev:
352 if lrev != tiprev:
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
353 self._updatebranchcache(partial, lrev+1, tiprev+1)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
354 self._writebranchcache(partial, self.changelog.tip(), tiprev)
355
355
356 return partial
356 return partial
357
357
358 def branchtags(self):
358 def branchtags(self):
359 if self.branchcache is not None:
359 if self.branchcache is not None:
360 return self.branchcache
360 return self.branchcache
361
361
362 self.branchcache = {} # avoid recursion in changectx
362 self.branchcache = {} # avoid recursion in changectx
363 partial = self._branchtags()
363 partial = self._branchtags()
364
364
365 # the branch cache is stored on disk as UTF-8, but in the local
365 # the branch cache is stored on disk as UTF-8, but in the local
366 # charset internally
366 # charset internally
367 for k, v in partial.items():
367 for k, v in partial.items():
368 self.branchcache[util.tolocal(k)] = v
368 self.branchcache[util.tolocal(k)] = v
369 return self.branchcache
369 return self.branchcache
370
370
371 def _readbranchcache(self):
371 def _readbranchcache(self):
372 partial = {}
372 partial = {}
373 try:
373 try:
374 f = self.opener("branches.cache")
374 f = self.opener("branches.cache")
375 lines = f.read().split('\n')
375 lines = f.read().split('\n')
376 f.close()
376 f.close()
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
377 last, lrev = lines.pop(0).rstrip().split(" ", 1)
378 last, lrev = bin(last), int(lrev)
378 last, lrev = bin(last), int(lrev)
379 if not (lrev < self.changelog.count() and
379 if not (lrev < self.changelog.count() and
380 self.changelog.node(lrev) == last): # sanity check
380 self.changelog.node(lrev) == last): # sanity check
381 # invalidate the cache
381 # invalidate the cache
382 raise ValueError('Invalid branch cache: unknown tip')
382 raise ValueError('Invalid branch cache: unknown tip')
383 for l in lines:
383 for l in lines:
384 if not l: continue
384 if not l: continue
385 node, label = l.rstrip().split(" ", 1)
385 node, label = l.rstrip().split(" ", 1)
386 partial[label] = bin(node)
386 partial[label] = bin(node)
387 except (KeyboardInterrupt, util.SignalInterrupt):
387 except (KeyboardInterrupt, util.SignalInterrupt):
388 raise
388 raise
389 except Exception, inst:
389 except Exception, inst:
390 if self.ui.debugflag:
390 if self.ui.debugflag:
391 self.ui.warn(str(inst), '\n')
391 self.ui.warn(str(inst), '\n')
392 partial, last, lrev = {}, nullid, nullrev
392 partial, last, lrev = {}, nullid, nullrev
393 return partial, last, lrev
393 return partial, last, lrev
394
394
395 def _writebranchcache(self, branches, tip, tiprev):
395 def _writebranchcache(self, branches, tip, tiprev):
396 try:
396 try:
397 f = self.opener("branches.cache", "w")
397 f = self.opener("branches.cache", "w")
398 f.write("%s %s\n" % (hex(tip), tiprev))
398 f.write("%s %s\n" % (hex(tip), tiprev))
399 for label, node in branches.iteritems():
399 for label, node in branches.iteritems():
400 f.write("%s %s\n" % (hex(node), label))
400 f.write("%s %s\n" % (hex(node), label))
401 except IOError:
401 except IOError:
402 pass
402 pass
403
403
404 def _updatebranchcache(self, partial, start, end):
404 def _updatebranchcache(self, partial, start, end):
405 for r in xrange(start, end):
405 for r in xrange(start, end):
406 c = self.changectx(r)
406 c = self.changectx(r)
407 b = c.branch()
407 b = c.branch()
408 if b:
408 if b:
409 partial[b] = c.node()
409 partial[b] = c.node()
410
410
411 def lookup(self, key):
411 def lookup(self, key):
412 if key == '.':
412 if key == '.':
413 key = self.dirstate.parents()[0]
413 key = self.dirstate.parents()[0]
414 if key == nullid:
414 if key == nullid:
415 raise repo.RepoError(_("no revision checked out"))
415 raise repo.RepoError(_("no revision checked out"))
416 elif key == 'null':
416 elif key == 'null':
417 return nullid
417 return nullid
418 n = self.changelog._match(key)
418 n = self.changelog._match(key)
419 if n:
419 if n:
420 return n
420 return n
421 if key in self.tags():
421 if key in self.tags():
422 return self.tags()[key]
422 return self.tags()[key]
423 if key in self.branchtags():
423 if key in self.branchtags():
424 return self.branchtags()[key]
424 return self.branchtags()[key]
425 n = self.changelog._partialmatch(key)
425 n = self.changelog._partialmatch(key)
426 if n:
426 if n:
427 return n
427 return n
428 raise repo.RepoError(_("unknown revision '%s'") % key)
428 raise repo.RepoError(_("unknown revision '%s'") % key)
429
429
430 def dev(self):
430 def dev(self):
431 return os.lstat(self.path).st_dev
431 return os.lstat(self.path).st_dev
432
432
433 def local(self):
433 def local(self):
434 return True
434 return True
435
435
436 def join(self, f):
436 def join(self, f):
437 return os.path.join(self.path, f)
437 return os.path.join(self.path, f)
438
438
439 def sjoin(self, f):
439 def sjoin(self, f):
440 f = self.encodefn(f)
440 f = self.encodefn(f)
441 return os.path.join(self.spath, f)
441 return os.path.join(self.spath, f)
442
442
443 def wjoin(self, f):
443 def wjoin(self, f):
444 return os.path.join(self.root, f)
444 return os.path.join(self.root, f)
445
445
446 def file(self, f):
446 def file(self, f):
447 if f[0] == '/':
447 if f[0] == '/':
448 f = f[1:]
448 f = f[1:]
449 return filelog.filelog(self.sopener, f, self.revlogversion)
449 return filelog.filelog(self.sopener, f, self.revlogversion)
450
450
451 def changectx(self, changeid=None):
451 def changectx(self, changeid=None):
452 return context.changectx(self, changeid)
452 return context.changectx(self, changeid)
453
453
454 def workingctx(self):
454 def workingctx(self):
455 return context.workingctx(self)
455 return context.workingctx(self)
456
456
457 def parents(self, changeid=None):
457 def parents(self, changeid=None):
458 '''
458 '''
459 get list of changectxs for parents of changeid or working directory
459 get list of changectxs for parents of changeid or working directory
460 '''
460 '''
461 if changeid is None:
461 if changeid is None:
462 pl = self.dirstate.parents()
462 pl = self.dirstate.parents()
463 else:
463 else:
464 n = self.changelog.lookup(changeid)
464 n = self.changelog.lookup(changeid)
465 pl = self.changelog.parents(n)
465 pl = self.changelog.parents(n)
466 if pl[1] == nullid:
466 if pl[1] == nullid:
467 return [self.changectx(pl[0])]
467 return [self.changectx(pl[0])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
468 return [self.changectx(pl[0]), self.changectx(pl[1])]
469
469
470 def filectx(self, path, changeid=None, fileid=None):
470 def filectx(self, path, changeid=None, fileid=None):
471 """changeid can be a changeset revision, node, or tag.
471 """changeid can be a changeset revision, node, or tag.
472 fileid can be a file revision or node."""
472 fileid can be a file revision or node."""
473 return context.filectx(self, path, changeid, fileid)
473 return context.filectx(self, path, changeid, fileid)
474
474
475 def getcwd(self):
475 def getcwd(self):
476 return self.dirstate.getcwd()
476 return self.dirstate.getcwd()
477
477
478 def wfile(self, f, mode='r'):
478 def wfile(self, f, mode='r'):
479 return self.wopener(f, mode)
479 return self.wopener(f, mode)
480
480
481 def wread(self, filename):
481 def wread(self, filename):
482 if self.encodepats == None:
482 if self.encodepats == None:
483 l = []
483 l = []
484 for pat, cmd in self.ui.configitems("encode"):
484 for pat, cmd in self.ui.configitems("encode"):
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
485 mf = util.matcher(self.root, "", [pat], [], [])[1]
486 l.append((mf, cmd))
486 l.append((mf, cmd))
487 self.encodepats = l
487 self.encodepats = l
488
488
489 data = self.wopener(filename, 'r').read()
489 data = self.wopener(filename, 'r').read()
490
490
491 for mf, cmd in self.encodepats:
491 for mf, cmd in self.encodepats:
492 if mf(filename):
492 if mf(filename):
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 data = util.filter(data, cmd)
494 data = util.filter(data, cmd)
495 break
495 break
496
496
497 return data
497 return data
498
498
499 def wwrite(self, filename, data, fd=None):
499 def wwrite(self, filename, data, fd=None):
500 if self.decodepats == None:
500 if self.decodepats == None:
501 l = []
501 l = []
502 for pat, cmd in self.ui.configitems("decode"):
502 for pat, cmd in self.ui.configitems("decode"):
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
503 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 l.append((mf, cmd))
504 l.append((mf, cmd))
505 self.decodepats = l
505 self.decodepats = l
506
506
507 for mf, cmd in self.decodepats:
507 for mf, cmd in self.decodepats:
508 if mf(filename):
508 if mf(filename):
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
509 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
510 data = util.filter(data, cmd)
510 data = util.filter(data, cmd)
511 break
511 break
512
512
513 if fd:
513 if fd:
514 return fd.write(data)
514 return fd.write(data)
515 return self.wopener(filename, 'w').write(data)
515 return self.wopener(filename, 'w').write(data)
516
516
517 def transaction(self):
517 def transaction(self):
518 tr = self.transhandle
518 tr = self.transhandle
519 if tr != None and tr.running():
519 if tr != None and tr.running():
520 return tr.nest()
520 return tr.nest()
521
521
522 # save dirstate for rollback
522 # save dirstate for rollback
523 try:
523 try:
524 ds = self.opener("dirstate").read()
524 ds = self.opener("dirstate").read()
525 except IOError:
525 except IOError:
526 ds = ""
526 ds = ""
527 self.opener("journal.dirstate", "w").write(ds)
527 self.opener("journal.dirstate", "w").write(ds)
528
528
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
529 renames = [(self.sjoin("journal"), self.sjoin("undo")),
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
530 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
531 tr = transaction.transaction(self.ui.warn, self.sopener,
531 tr = transaction.transaction(self.ui.warn, self.sopener,
532 self.sjoin("journal"),
532 self.sjoin("journal"),
533 aftertrans(renames))
533 aftertrans(renames))
534 self.transhandle = tr
534 self.transhandle = tr
535 return tr
535 return tr
536
536
537 def recover(self):
537 def recover(self):
538 l = self.lock()
538 l = self.lock()
539 if os.path.exists(self.sjoin("journal")):
539 if os.path.exists(self.sjoin("journal")):
540 self.ui.status(_("rolling back interrupted transaction\n"))
540 self.ui.status(_("rolling back interrupted transaction\n"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
541 transaction.rollback(self.sopener, self.sjoin("journal"))
542 self.reload()
542 self.reload()
543 return True
543 return True
544 else:
544 else:
545 self.ui.warn(_("no interrupted transaction available\n"))
545 self.ui.warn(_("no interrupted transaction available\n"))
546 return False
546 return False
547
547
548 def rollback(self, wlock=None):
548 def rollback(self, wlock=None):
549 if not wlock:
549 if not wlock:
550 wlock = self.wlock()
550 wlock = self.wlock()
551 l = self.lock()
551 l = self.lock()
552 if os.path.exists(self.sjoin("undo")):
552 if os.path.exists(self.sjoin("undo")):
553 self.ui.status(_("rolling back last transaction\n"))
553 self.ui.status(_("rolling back last transaction\n"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
554 transaction.rollback(self.sopener, self.sjoin("undo"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
555 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
556 self.reload()
556 self.reload()
557 self.wreload()
557 self.wreload()
558 else:
558 else:
559 self.ui.warn(_("no rollback information available\n"))
559 self.ui.warn(_("no rollback information available\n"))
560
560
561 def wreload(self):
561 def wreload(self):
562 self.dirstate.read()
562 self.dirstate.read()
563
563
564 def reload(self):
564 def reload(self):
565 self.changelog.load()
565 self.changelog.load()
566 self.manifest.load()
566 self.manifest.load()
567 self.tagscache = None
567 self.tagscache = None
568 self.nodetagscache = None
568 self.nodetagscache = None
569
569
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
570 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
571 desc=None):
571 desc=None):
572 try:
572 try:
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
573 l = lock.lock(lockname, 0, releasefn, desc=desc)
574 except lock.LockHeld, inst:
574 except lock.LockHeld, inst:
575 if not wait:
575 if not wait:
576 raise
576 raise
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
577 self.ui.warn(_("waiting for lock on %s held by %r\n") %
578 (desc, inst.locker))
578 (desc, inst.locker))
579 # default to 600 seconds timeout
579 # default to 600 seconds timeout
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
580 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
581 releasefn, desc=desc)
581 releasefn, desc=desc)
582 if acquirefn:
582 if acquirefn:
583 acquirefn()
583 acquirefn()
584 return l
584 return l
585
585
586 def lock(self, wait=1):
586 def lock(self, wait=1):
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
587 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
588 desc=_('repository %s') % self.origroot)
588 desc=_('repository %s') % self.origroot)
589
589
590 def wlock(self, wait=1):
590 def wlock(self, wait=1):
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
591 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
592 self.wreload,
592 self.wreload,
593 desc=_('working directory of %s') % self.origroot)
593 desc=_('working directory of %s') % self.origroot)
594
594
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
595 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
596 """
596 """
597 commit an individual file as part of a larger transaction
597 commit an individual file as part of a larger transaction
598 """
598 """
599
599
600 t = self.wread(fn)
600 t = self.wread(fn)
601 fl = self.file(fn)
601 fl = self.file(fn)
602 fp1 = manifest1.get(fn, nullid)
602 fp1 = manifest1.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
603 fp2 = manifest2.get(fn, nullid)
604
604
605 meta = {}
605 meta = {}
606 cp = self.dirstate.copied(fn)
606 cp = self.dirstate.copied(fn)
607 if cp:
607 if cp:
608 meta["copy"] = cp
608 meta["copy"] = cp
609 if not manifest2: # not a branch merge
609 if not manifest2: # not a branch merge
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
610 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 fp2 = nullid
611 fp2 = nullid
612 elif fp2 != nullid: # copied on remote side
612 elif fp2 != nullid: # copied on remote side
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 meta["copyrev"] = hex(manifest1.get(cp, nullid))
614 elif fp1 != nullid: # copied on local side, reversed
614 elif fp1 != nullid: # copied on local side, reversed
615 meta["copyrev"] = hex(manifest2.get(cp))
615 meta["copyrev"] = hex(manifest2.get(cp))
616 fp2 = nullid
616 fp2 = nullid
617 else: # directory rename
617 else: # directory rename
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
618 meta["copyrev"] = hex(manifest1.get(cp, nullid))
619 self.ui.debug(_(" %s: copy %s:%s\n") %
619 self.ui.debug(_(" %s: copy %s:%s\n") %
620 (fn, cp, meta["copyrev"]))
620 (fn, cp, meta["copyrev"]))
621 fp1 = nullid
621 fp1 = nullid
622 elif fp2 != nullid:
622 elif fp2 != nullid:
623 # is one parent an ancestor of the other?
623 # is one parent an ancestor of the other?
624 fpa = fl.ancestor(fp1, fp2)
624 fpa = fl.ancestor(fp1, fp2)
625 if fpa == fp1:
625 if fpa == fp1:
626 fp1, fp2 = fp2, nullid
626 fp1, fp2 = fp2, nullid
627 elif fpa == fp2:
627 elif fpa == fp2:
628 fp2 = nullid
628 fp2 = nullid
629
629
630 # is the file unmodified from the parent? report existing entry
630 # is the file unmodified from the parent? report existing entry
631 if fp2 == nullid and not fl.cmp(fp1, t):
631 if fp2 == nullid and not fl.cmp(fp1, t):
632 return fp1
632 return fp1
633
633
634 changelist.append(fn)
634 changelist.append(fn)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
635 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
636
636
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
637 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
638 if p1 is None:
638 if p1 is None:
639 p1, p2 = self.dirstate.parents()
639 p1, p2 = self.dirstate.parents()
640 return self.commit(files=files, text=text, user=user, date=date,
640 return self.commit(files=files, text=text, user=user, date=date,
641 p1=p1, p2=p2, wlock=wlock, extra=extra)
641 p1=p1, p2=p2, wlock=wlock, extra=extra)
642
642
643 def commit(self, files=None, text="", user=None, date=None,
643 def commit(self, files=None, text="", user=None, date=None,
644 match=util.always, force=False, lock=None, wlock=None,
644 match=util.always, force=False, lock=None, wlock=None,
645 force_editor=False, p1=None, p2=None, extra={}):
645 force_editor=False, p1=None, p2=None, extra={}):
646
646
647 commit = []
647 commit = []
648 remove = []
648 remove = []
649 changed = []
649 changed = []
650 use_dirstate = (p1 is None) # not rawcommit
650 use_dirstate = (p1 is None) # not rawcommit
651 extra = extra.copy()
651 extra = extra.copy()
652
652
653 if use_dirstate:
653 if use_dirstate:
654 if files:
654 if files:
655 for f in files:
655 for f in files:
656 s = self.dirstate.state(f)
656 s = self.dirstate.state(f)
657 if s in 'nmai':
657 if s in 'nmai':
658 commit.append(f)
658 commit.append(f)
659 elif s == 'r':
659 elif s == 'r':
660 remove.append(f)
660 remove.append(f)
661 else:
661 else:
662 self.ui.warn(_("%s not tracked!\n") % f)
662 self.ui.warn(_("%s not tracked!\n") % f)
663 else:
663 else:
664 changes = self.status(match=match)[:5]
664 changes = self.status(match=match)[:5]
665 modified, added, removed, deleted, unknown = changes
665 modified, added, removed, deleted, unknown = changes
666 commit = modified + added
666 commit = modified + added
667 remove = removed
667 remove = removed
668 else:
668 else:
669 commit = files
669 commit = files
670
670
671 if use_dirstate:
671 if use_dirstate:
672 p1, p2 = self.dirstate.parents()
672 p1, p2 = self.dirstate.parents()
673 update_dirstate = True
673 update_dirstate = True
674 else:
674 else:
675 p1, p2 = p1, p2 or nullid
675 p1, p2 = p1, p2 or nullid
676 update_dirstate = (self.dirstate.parents()[0] == p1)
676 update_dirstate = (self.dirstate.parents()[0] == p1)
677
677
678 c1 = self.changelog.read(p1)
678 c1 = self.changelog.read(p1)
679 c2 = self.changelog.read(p2)
679 c2 = self.changelog.read(p2)
680 m1 = self.manifest.read(c1[0]).copy()
680 m1 = self.manifest.read(c1[0]).copy()
681 m2 = self.manifest.read(c2[0])
681 m2 = self.manifest.read(c2[0])
682
682
683 if use_dirstate:
683 if use_dirstate:
684 branchname = self.workingctx().branch()
684 branchname = self.workingctx().branch()
685 try:
685 try:
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
686 branchname = branchname.decode('UTF-8').encode('UTF-8')
687 except UnicodeDecodeError:
687 except UnicodeDecodeError:
688 raise util.Abort(_('branch name not in UTF-8!'))
688 raise util.Abort(_('branch name not in UTF-8!'))
689 else:
689 else:
690 branchname = ""
690 branchname = ""
691
691
692 if use_dirstate:
692 if use_dirstate:
693 oldname = c1[5].get("branch", "") # stored in UTF-8
693 oldname = c1[5].get("branch", "") # stored in UTF-8
694 if not commit and not remove and not force and p2 == nullid and \
694 if not commit and not remove and not force and p2 == nullid and \
695 branchname == oldname:
695 branchname == oldname:
696 self.ui.status(_("nothing changed\n"))
696 self.ui.status(_("nothing changed\n"))
697 return None
697 return None
698
698
699 xp1 = hex(p1)
699 xp1 = hex(p1)
700 if p2 == nullid: xp2 = ''
700 if p2 == nullid: xp2 = ''
701 else: xp2 = hex(p2)
701 else: xp2 = hex(p2)
702
702
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
703 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
704
704
705 if not wlock:
705 if not wlock:
706 wlock = self.wlock()
706 wlock = self.wlock()
707 if not lock:
707 if not lock:
708 lock = self.lock()
708 lock = self.lock()
709 tr = self.transaction()
709 tr = self.transaction()
710
710
711 # check in files
711 # check in files
712 new = {}
712 new = {}
713 linkrev = self.changelog.count()
713 linkrev = self.changelog.count()
714 commit.sort()
714 commit.sort()
715 is_exec = util.execfunc(self.root, m1.execf)
715 is_exec = util.execfunc(self.root, m1.execf)
716 is_link = util.linkfunc(self.root, m1.linkf)
716 for f in commit:
717 for f in commit:
717 self.ui.note(f + "\n")
718 self.ui.note(f + "\n")
718 try:
719 try:
719 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
720 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
720 m1.set(f, is_exec(f))
721 m1.set(f, is_exec(f), is_link(f))
721 except IOError:
722 except IOError:
722 if use_dirstate:
723 if use_dirstate:
723 self.ui.warn(_("trouble committing %s!\n") % f)
724 self.ui.warn(_("trouble committing %s!\n") % f)
724 raise
725 raise
725 else:
726 else:
726 remove.append(f)
727 remove.append(f)
727
728
728 # update manifest
729 # update manifest
729 m1.update(new)
730 m1.update(new)
730 remove.sort()
731 remove.sort()
731 removed = []
732 removed = []
732
733
733 for f in remove:
734 for f in remove:
734 if f in m1:
735 if f in m1:
735 del m1[f]
736 del m1[f]
736 removed.append(f)
737 removed.append(f)
737 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
738 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
738
739
739 # add changeset
740 # add changeset
740 new = new.keys()
741 new = new.keys()
741 new.sort()
742 new.sort()
742
743
743 user = user or self.ui.username()
744 user = user or self.ui.username()
744 if not text or force_editor:
745 if not text or force_editor:
745 edittext = []
746 edittext = []
746 if text:
747 if text:
747 edittext.append(text)
748 edittext.append(text)
748 edittext.append("")
749 edittext.append("")
749 edittext.append("HG: user: %s" % user)
750 edittext.append("HG: user: %s" % user)
750 if p2 != nullid:
751 if p2 != nullid:
751 edittext.append("HG: branch merge")
752 edittext.append("HG: branch merge")
752 edittext.extend(["HG: changed %s" % f for f in changed])
753 edittext.extend(["HG: changed %s" % f for f in changed])
753 edittext.extend(["HG: removed %s" % f for f in removed])
754 edittext.extend(["HG: removed %s" % f for f in removed])
754 if not changed and not remove:
755 if not changed and not remove:
755 edittext.append("HG: no files changed")
756 edittext.append("HG: no files changed")
756 edittext.append("")
757 edittext.append("")
757 # run editor in the repository root
758 # run editor in the repository root
758 olddir = os.getcwd()
759 olddir = os.getcwd()
759 os.chdir(self.root)
760 os.chdir(self.root)
760 text = self.ui.edit("\n".join(edittext), user)
761 text = self.ui.edit("\n".join(edittext), user)
761 os.chdir(olddir)
762 os.chdir(olddir)
762
763
763 lines = [line.rstrip() for line in text.rstrip().splitlines()]
764 lines = [line.rstrip() for line in text.rstrip().splitlines()]
764 while lines and not lines[0]:
765 while lines and not lines[0]:
765 del lines[0]
766 del lines[0]
766 if not lines:
767 if not lines:
767 return None
768 return None
768 text = '\n'.join(lines)
769 text = '\n'.join(lines)
769 if branchname:
770 if branchname:
770 extra["branch"] = branchname
771 extra["branch"] = branchname
771 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
772 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
772 user, date, extra)
773 user, date, extra)
773 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
774 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
774 parent2=xp2)
775 parent2=xp2)
775 tr.close()
776 tr.close()
776
777
777 if use_dirstate or update_dirstate:
778 if use_dirstate or update_dirstate:
778 self.dirstate.setparents(n)
779 self.dirstate.setparents(n)
779 if use_dirstate:
780 if use_dirstate:
780 self.dirstate.update(new, "n")
781 self.dirstate.update(new, "n")
781 self.dirstate.forget(removed)
782 self.dirstate.forget(removed)
782
783
783 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
784 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
784 return n
785 return n
785
786
786 def walk(self, node=None, files=[], match=util.always, badmatch=None):
787 def walk(self, node=None, files=[], match=util.always, badmatch=None):
787 '''
788 '''
788 walk recursively through the directory tree or a given
789 walk recursively through the directory tree or a given
789 changeset, finding all files matched by the match
790 changeset, finding all files matched by the match
790 function
791 function
791
792
792 results are yielded in a tuple (src, filename), where src
793 results are yielded in a tuple (src, filename), where src
793 is one of:
794 is one of:
794 'f' the file was found in the directory tree
795 'f' the file was found in the directory tree
795 'm' the file was only in the dirstate and not in the tree
796 'm' the file was only in the dirstate and not in the tree
796 'b' file was not found and matched badmatch
797 'b' file was not found and matched badmatch
797 '''
798 '''
798
799
799 if node:
800 if node:
800 fdict = dict.fromkeys(files)
801 fdict = dict.fromkeys(files)
801 for fn in self.manifest.read(self.changelog.read(node)[0]):
802 for fn in self.manifest.read(self.changelog.read(node)[0]):
802 for ffn in fdict:
803 for ffn in fdict:
803 # match if the file is the exact name or a directory
804 # match if the file is the exact name or a directory
804 if ffn == fn or fn.startswith("%s/" % ffn):
805 if ffn == fn or fn.startswith("%s/" % ffn):
805 del fdict[ffn]
806 del fdict[ffn]
806 break
807 break
807 if match(fn):
808 if match(fn):
808 yield 'm', fn
809 yield 'm', fn
809 for fn in fdict:
810 for fn in fdict:
810 if badmatch and badmatch(fn):
811 if badmatch and badmatch(fn):
811 if match(fn):
812 if match(fn):
812 yield 'b', fn
813 yield 'b', fn
813 else:
814 else:
814 self.ui.warn(_('%s: No such file in rev %s\n') % (
815 self.ui.warn(_('%s: No such file in rev %s\n') % (
815 util.pathto(self.getcwd(), fn), short(node)))
816 util.pathto(self.getcwd(), fn), short(node)))
816 else:
817 else:
817 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
818 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
818 yield src, fn
819 yield src, fn
819
820
820 def status(self, node1=None, node2=None, files=[], match=util.always,
821 def status(self, node1=None, node2=None, files=[], match=util.always,
821 wlock=None, list_ignored=False, list_clean=False):
822 wlock=None, list_ignored=False, list_clean=False):
822 """return status of files between two nodes or node and working directory
823 """return status of files between two nodes or node and working directory
823
824
824 If node1 is None, use the first dirstate parent instead.
825 If node1 is None, use the first dirstate parent instead.
825 If node2 is None, compare node1 with working directory.
826 If node2 is None, compare node1 with working directory.
826 """
827 """
827
828
828 def fcmp(fn, mf):
829 def fcmp(fn, mf):
829 t1 = self.wread(fn)
830 t1 = self.wread(fn)
830 return self.file(fn).cmp(mf.get(fn, nullid), t1)
831 return self.file(fn).cmp(mf.get(fn, nullid), t1)
831
832
832 def mfmatches(node):
833 def mfmatches(node):
833 change = self.changelog.read(node)
834 change = self.changelog.read(node)
834 mf = self.manifest.read(change[0]).copy()
835 mf = self.manifest.read(change[0]).copy()
835 for fn in mf.keys():
836 for fn in mf.keys():
836 if not match(fn):
837 if not match(fn):
837 del mf[fn]
838 del mf[fn]
838 return mf
839 return mf
839
840
840 modified, added, removed, deleted, unknown = [], [], [], [], []
841 modified, added, removed, deleted, unknown = [], [], [], [], []
841 ignored, clean = [], []
842 ignored, clean = [], []
842
843
843 compareworking = False
844 compareworking = False
844 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
845 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
845 compareworking = True
846 compareworking = True
846
847
847 if not compareworking:
848 if not compareworking:
848 # read the manifest from node1 before the manifest from node2,
849 # read the manifest from node1 before the manifest from node2,
849 # so that we'll hit the manifest cache if we're going through
850 # so that we'll hit the manifest cache if we're going through
850 # all the revisions in parent->child order.
851 # all the revisions in parent->child order.
851 mf1 = mfmatches(node1)
852 mf1 = mfmatches(node1)
852
853
853 # are we comparing the working directory?
854 # are we comparing the working directory?
854 if not node2:
855 if not node2:
855 if not wlock:
856 if not wlock:
856 try:
857 try:
857 wlock = self.wlock(wait=0)
858 wlock = self.wlock(wait=0)
858 except lock.LockException:
859 except lock.LockException:
859 wlock = None
860 wlock = None
860 (lookup, modified, added, removed, deleted, unknown,
861 (lookup, modified, added, removed, deleted, unknown,
861 ignored, clean) = self.dirstate.status(files, match,
862 ignored, clean) = self.dirstate.status(files, match,
862 list_ignored, list_clean)
863 list_ignored, list_clean)
863
864
864 # are we comparing working dir against its parent?
865 # are we comparing working dir against its parent?
865 if compareworking:
866 if compareworking:
866 if lookup:
867 if lookup:
867 # do a full compare of any files that might have changed
868 # do a full compare of any files that might have changed
868 mf2 = mfmatches(self.dirstate.parents()[0])
869 mf2 = mfmatches(self.dirstate.parents()[0])
869 for f in lookup:
870 for f in lookup:
870 if fcmp(f, mf2):
871 if fcmp(f, mf2):
871 modified.append(f)
872 modified.append(f)
872 else:
873 else:
873 clean.append(f)
874 clean.append(f)
874 if wlock is not None:
875 if wlock is not None:
875 self.dirstate.update([f], "n")
876 self.dirstate.update([f], "n")
876 else:
877 else:
877 # we are comparing working dir against non-parent
878 # we are comparing working dir against non-parent
878 # generate a pseudo-manifest for the working dir
879 # generate a pseudo-manifest for the working dir
879 # XXX: create it in dirstate.py ?
880 # XXX: create it in dirstate.py ?
880 mf2 = mfmatches(self.dirstate.parents()[0])
881 mf2 = mfmatches(self.dirstate.parents()[0])
881 is_exec = util.execfunc(self.root, mf2.execf)
882 is_exec = util.execfunc(self.root, mf2.execf)
883 is_link = util.linkfunc(self.root, mf2.linkf)
882 for f in lookup + modified + added:
884 for f in lookup + modified + added:
883 mf2[f] = ""
885 mf2[f] = ""
884 mf2.set(f, is_exec(f))
886 mf2.set(f, is_exec(f), is_link(f))
885 for f in removed:
887 for f in removed:
886 if f in mf2:
888 if f in mf2:
887 del mf2[f]
889 del mf2[f]
888 else:
890 else:
889 # we are comparing two revisions
891 # we are comparing two revisions
890 mf2 = mfmatches(node2)
892 mf2 = mfmatches(node2)
891
893
892 if not compareworking:
894 if not compareworking:
893 # flush lists from dirstate before comparing manifests
895 # flush lists from dirstate before comparing manifests
894 modified, added, clean = [], [], []
896 modified, added, clean = [], [], []
895
897
896 # make sure to sort the files so we talk to the disk in a
898 # make sure to sort the files so we talk to the disk in a
897 # reasonable order
899 # reasonable order
898 mf2keys = mf2.keys()
900 mf2keys = mf2.keys()
899 mf2keys.sort()
901 mf2keys.sort()
900 for fn in mf2keys:
902 for fn in mf2keys:
901 if mf1.has_key(fn):
903 if mf1.has_key(fn):
902 if mf1.flags(fn) != mf2.flags(fn) or \
904 if mf1.flags(fn) != mf2.flags(fn) or \
903 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
905 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
904 modified.append(fn)
906 modified.append(fn)
905 elif list_clean:
907 elif list_clean:
906 clean.append(fn)
908 clean.append(fn)
907 del mf1[fn]
909 del mf1[fn]
908 else:
910 else:
909 added.append(fn)
911 added.append(fn)
910
912
911 removed = mf1.keys()
913 removed = mf1.keys()
912
914
913 # sort and return results:
915 # sort and return results:
914 for l in modified, added, removed, deleted, unknown, ignored, clean:
916 for l in modified, added, removed, deleted, unknown, ignored, clean:
915 l.sort()
917 l.sort()
916 return (modified, added, removed, deleted, unknown, ignored, clean)
918 return (modified, added, removed, deleted, unknown, ignored, clean)
917
919
918 def add(self, list, wlock=None):
920 def add(self, list, wlock=None):
919 if not wlock:
921 if not wlock:
920 wlock = self.wlock()
922 wlock = self.wlock()
921 for f in list:
923 for f in list:
922 p = self.wjoin(f)
924 p = self.wjoin(f)
923 if not os.path.exists(p):
925 if not os.path.exists(p):
924 self.ui.warn(_("%s does not exist!\n") % f)
926 self.ui.warn(_("%s does not exist!\n") % f)
925 elif not os.path.isfile(p):
927 elif not os.path.isfile(p):
926 self.ui.warn(_("%s not added: only files supported currently\n")
928 self.ui.warn(_("%s not added: only files supported currently\n")
927 % f)
929 % f)
928 elif self.dirstate.state(f) in 'an':
930 elif self.dirstate.state(f) in 'an':
929 self.ui.warn(_("%s already tracked!\n") % f)
931 self.ui.warn(_("%s already tracked!\n") % f)
930 else:
932 else:
931 self.dirstate.update([f], "a")
933 self.dirstate.update([f], "a")
932
934
933 def forget(self, list, wlock=None):
935 def forget(self, list, wlock=None):
934 if not wlock:
936 if not wlock:
935 wlock = self.wlock()
937 wlock = self.wlock()
936 for f in list:
938 for f in list:
937 if self.dirstate.state(f) not in 'ai':
939 if self.dirstate.state(f) not in 'ai':
938 self.ui.warn(_("%s not added!\n") % f)
940 self.ui.warn(_("%s not added!\n") % f)
939 else:
941 else:
940 self.dirstate.forget([f])
942 self.dirstate.forget([f])
941
943
942 def remove(self, list, unlink=False, wlock=None):
944 def remove(self, list, unlink=False, wlock=None):
943 if unlink:
945 if unlink:
944 for f in list:
946 for f in list:
945 try:
947 try:
946 util.unlink(self.wjoin(f))
948 util.unlink(self.wjoin(f))
947 except OSError, inst:
949 except OSError, inst:
948 if inst.errno != errno.ENOENT:
950 if inst.errno != errno.ENOENT:
949 raise
951 raise
950 if not wlock:
952 if not wlock:
951 wlock = self.wlock()
953 wlock = self.wlock()
952 for f in list:
954 for f in list:
953 p = self.wjoin(f)
955 p = self.wjoin(f)
954 if os.path.exists(p):
956 if os.path.exists(p):
955 self.ui.warn(_("%s still exists!\n") % f)
957 self.ui.warn(_("%s still exists!\n") % f)
956 elif self.dirstate.state(f) == 'a':
958 elif self.dirstate.state(f) == 'a':
957 self.dirstate.forget([f])
959 self.dirstate.forget([f])
958 elif f not in self.dirstate:
960 elif f not in self.dirstate:
959 self.ui.warn(_("%s not tracked!\n") % f)
961 self.ui.warn(_("%s not tracked!\n") % f)
960 else:
962 else:
961 self.dirstate.update([f], "r")
963 self.dirstate.update([f], "r")
962
964
963 def undelete(self, list, wlock=None):
965 def undelete(self, list, wlock=None):
964 p = self.dirstate.parents()[0]
966 p = self.dirstate.parents()[0]
965 mn = self.changelog.read(p)[0]
967 mn = self.changelog.read(p)[0]
966 m = self.manifest.read(mn)
968 m = self.manifest.read(mn)
967 if not wlock:
969 if not wlock:
968 wlock = self.wlock()
970 wlock = self.wlock()
969 for f in list:
971 for f in list:
970 if self.dirstate.state(f) not in "r":
972 if self.dirstate.state(f) not in "r":
971 self.ui.warn("%s not removed!\n" % f)
973 self.ui.warn("%s not removed!\n" % f)
972 else:
974 else:
973 t = self.file(f).read(m[f])
975 t = self.file(f).read(m[f])
974 self.wwrite(f, t)
976 self.wwrite(f, t)
975 util.set_exec(self.wjoin(f), m.execf(f))
977 util.set_exec(self.wjoin(f), m.execf(f))
976 self.dirstate.update([f], "n")
978 self.dirstate.update([f], "n")
977
979
978 def copy(self, source, dest, wlock=None):
980 def copy(self, source, dest, wlock=None):
979 p = self.wjoin(dest)
981 p = self.wjoin(dest)
980 if not os.path.exists(p):
982 if not os.path.exists(p):
981 self.ui.warn(_("%s does not exist!\n") % dest)
983 self.ui.warn(_("%s does not exist!\n") % dest)
982 elif not os.path.isfile(p):
984 elif not os.path.isfile(p):
983 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
985 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
984 else:
986 else:
985 if not wlock:
987 if not wlock:
986 wlock = self.wlock()
988 wlock = self.wlock()
987 if self.dirstate.state(dest) == '?':
989 if self.dirstate.state(dest) == '?':
988 self.dirstate.update([dest], "a")
990 self.dirstate.update([dest], "a")
989 self.dirstate.copy(source, dest)
991 self.dirstate.copy(source, dest)
990
992
991 def heads(self, start=None):
993 def heads(self, start=None):
992 heads = self.changelog.heads(start)
994 heads = self.changelog.heads(start)
993 # sort the output in rev descending order
995 # sort the output in rev descending order
994 heads = [(-self.changelog.rev(h), h) for h in heads]
996 heads = [(-self.changelog.rev(h), h) for h in heads]
995 heads.sort()
997 heads.sort()
996 return [n for (r, n) in heads]
998 return [n for (r, n) in heads]
997
999
998 def branches(self, nodes):
1000 def branches(self, nodes):
999 if not nodes:
1001 if not nodes:
1000 nodes = [self.changelog.tip()]
1002 nodes = [self.changelog.tip()]
1001 b = []
1003 b = []
1002 for n in nodes:
1004 for n in nodes:
1003 t = n
1005 t = n
1004 while 1:
1006 while 1:
1005 p = self.changelog.parents(n)
1007 p = self.changelog.parents(n)
1006 if p[1] != nullid or p[0] == nullid:
1008 if p[1] != nullid or p[0] == nullid:
1007 b.append((t, n, p[0], p[1]))
1009 b.append((t, n, p[0], p[1]))
1008 break
1010 break
1009 n = p[0]
1011 n = p[0]
1010 return b
1012 return b
1011
1013
1012 def between(self, pairs):
1014 def between(self, pairs):
1013 r = []
1015 r = []
1014
1016
1015 for top, bottom in pairs:
1017 for top, bottom in pairs:
1016 n, l, i = top, [], 0
1018 n, l, i = top, [], 0
1017 f = 1
1019 f = 1
1018
1020
1019 while n != bottom:
1021 while n != bottom:
1020 p = self.changelog.parents(n)[0]
1022 p = self.changelog.parents(n)[0]
1021 if i == f:
1023 if i == f:
1022 l.append(n)
1024 l.append(n)
1023 f = f * 2
1025 f = f * 2
1024 n = p
1026 n = p
1025 i += 1
1027 i += 1
1026
1028
1027 r.append(l)
1029 r.append(l)
1028
1030
1029 return r
1031 return r
1030
1032
1031 def findincoming(self, remote, base=None, heads=None, force=False):
1033 def findincoming(self, remote, base=None, heads=None, force=False):
1032 """Return list of roots of the subsets of missing nodes from remote
1034 """Return list of roots of the subsets of missing nodes from remote
1033
1035
1034 If base dict is specified, assume that these nodes and their parents
1036 If base dict is specified, assume that these nodes and their parents
1035 exist on the remote side and that no child of a node of base exists
1037 exist on the remote side and that no child of a node of base exists
1036 in both remote and self.
1038 in both remote and self.
1037 Furthermore base will be updated to include the nodes that exists
1039 Furthermore base will be updated to include the nodes that exists
1038 in self and remote but no children exists in self and remote.
1040 in self and remote but no children exists in self and remote.
1039 If a list of heads is specified, return only nodes which are heads
1041 If a list of heads is specified, return only nodes which are heads
1040 or ancestors of these heads.
1042 or ancestors of these heads.
1041
1043
1042 All the ancestors of base are in self and in remote.
1044 All the ancestors of base are in self and in remote.
1043 All the descendants of the list returned are missing in self.
1045 All the descendants of the list returned are missing in self.
1044 (and so we know that the rest of the nodes are missing in remote, see
1046 (and so we know that the rest of the nodes are missing in remote, see
1045 outgoing)
1047 outgoing)
1046 """
1048 """
1047 m = self.changelog.nodemap
1049 m = self.changelog.nodemap
1048 search = []
1050 search = []
1049 fetch = {}
1051 fetch = {}
1050 seen = {}
1052 seen = {}
1051 seenbranch = {}
1053 seenbranch = {}
1052 if base == None:
1054 if base == None:
1053 base = {}
1055 base = {}
1054
1056
1055 if not heads:
1057 if not heads:
1056 heads = remote.heads()
1058 heads = remote.heads()
1057
1059
1058 if self.changelog.tip() == nullid:
1060 if self.changelog.tip() == nullid:
1059 base[nullid] = 1
1061 base[nullid] = 1
1060 if heads != [nullid]:
1062 if heads != [nullid]:
1061 return [nullid]
1063 return [nullid]
1062 return []
1064 return []
1063
1065
1064 # assume we're closer to the tip than the root
1066 # assume we're closer to the tip than the root
1065 # and start by examining the heads
1067 # and start by examining the heads
1066 self.ui.status(_("searching for changes\n"))
1068 self.ui.status(_("searching for changes\n"))
1067
1069
1068 unknown = []
1070 unknown = []
1069 for h in heads:
1071 for h in heads:
1070 if h not in m:
1072 if h not in m:
1071 unknown.append(h)
1073 unknown.append(h)
1072 else:
1074 else:
1073 base[h] = 1
1075 base[h] = 1
1074
1076
1075 if not unknown:
1077 if not unknown:
1076 return []
1078 return []
1077
1079
1078 req = dict.fromkeys(unknown)
1080 req = dict.fromkeys(unknown)
1079 reqcnt = 0
1081 reqcnt = 0
1080
1082
1081 # search through remote branches
1083 # search through remote branches
1082 # a 'branch' here is a linear segment of history, with four parts:
1084 # a 'branch' here is a linear segment of history, with four parts:
1083 # head, root, first parent, second parent
1085 # head, root, first parent, second parent
1084 # (a branch always has two parents (or none) by definition)
1086 # (a branch always has two parents (or none) by definition)
1085 unknown = remote.branches(unknown)
1087 unknown = remote.branches(unknown)
1086 while unknown:
1088 while unknown:
1087 r = []
1089 r = []
1088 while unknown:
1090 while unknown:
1089 n = unknown.pop(0)
1091 n = unknown.pop(0)
1090 if n[0] in seen:
1092 if n[0] in seen:
1091 continue
1093 continue
1092
1094
1093 self.ui.debug(_("examining %s:%s\n")
1095 self.ui.debug(_("examining %s:%s\n")
1094 % (short(n[0]), short(n[1])))
1096 % (short(n[0]), short(n[1])))
1095 if n[0] == nullid: # found the end of the branch
1097 if n[0] == nullid: # found the end of the branch
1096 pass
1098 pass
1097 elif n in seenbranch:
1099 elif n in seenbranch:
1098 self.ui.debug(_("branch already found\n"))
1100 self.ui.debug(_("branch already found\n"))
1099 continue
1101 continue
1100 elif n[1] and n[1] in m: # do we know the base?
1102 elif n[1] and n[1] in m: # do we know the base?
1101 self.ui.debug(_("found incomplete branch %s:%s\n")
1103 self.ui.debug(_("found incomplete branch %s:%s\n")
1102 % (short(n[0]), short(n[1])))
1104 % (short(n[0]), short(n[1])))
1103 search.append(n) # schedule branch range for scanning
1105 search.append(n) # schedule branch range for scanning
1104 seenbranch[n] = 1
1106 seenbranch[n] = 1
1105 else:
1107 else:
1106 if n[1] not in seen and n[1] not in fetch:
1108 if n[1] not in seen and n[1] not in fetch:
1107 if n[2] in m and n[3] in m:
1109 if n[2] in m and n[3] in m:
1108 self.ui.debug(_("found new changeset %s\n") %
1110 self.ui.debug(_("found new changeset %s\n") %
1109 short(n[1]))
1111 short(n[1]))
1110 fetch[n[1]] = 1 # earliest unknown
1112 fetch[n[1]] = 1 # earliest unknown
1111 for p in n[2:4]:
1113 for p in n[2:4]:
1112 if p in m:
1114 if p in m:
1113 base[p] = 1 # latest known
1115 base[p] = 1 # latest known
1114
1116
1115 for p in n[2:4]:
1117 for p in n[2:4]:
1116 if p not in req and p not in m:
1118 if p not in req and p not in m:
1117 r.append(p)
1119 r.append(p)
1118 req[p] = 1
1120 req[p] = 1
1119 seen[n[0]] = 1
1121 seen[n[0]] = 1
1120
1122
1121 if r:
1123 if r:
1122 reqcnt += 1
1124 reqcnt += 1
1123 self.ui.debug(_("request %d: %s\n") %
1125 self.ui.debug(_("request %d: %s\n") %
1124 (reqcnt, " ".join(map(short, r))))
1126 (reqcnt, " ".join(map(short, r))))
1125 for p in xrange(0, len(r), 10):
1127 for p in xrange(0, len(r), 10):
1126 for b in remote.branches(r[p:p+10]):
1128 for b in remote.branches(r[p:p+10]):
1127 self.ui.debug(_("received %s:%s\n") %
1129 self.ui.debug(_("received %s:%s\n") %
1128 (short(b[0]), short(b[1])))
1130 (short(b[0]), short(b[1])))
1129 unknown.append(b)
1131 unknown.append(b)
1130
1132
1131 # do binary search on the branches we found
1133 # do binary search on the branches we found
1132 while search:
1134 while search:
1133 n = search.pop(0)
1135 n = search.pop(0)
1134 reqcnt += 1
1136 reqcnt += 1
1135 l = remote.between([(n[0], n[1])])[0]
1137 l = remote.between([(n[0], n[1])])[0]
1136 l.append(n[1])
1138 l.append(n[1])
1137 p = n[0]
1139 p = n[0]
1138 f = 1
1140 f = 1
1139 for i in l:
1141 for i in l:
1140 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1142 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1141 if i in m:
1143 if i in m:
1142 if f <= 2:
1144 if f <= 2:
1143 self.ui.debug(_("found new branch changeset %s\n") %
1145 self.ui.debug(_("found new branch changeset %s\n") %
1144 short(p))
1146 short(p))
1145 fetch[p] = 1
1147 fetch[p] = 1
1146 base[i] = 1
1148 base[i] = 1
1147 else:
1149 else:
1148 self.ui.debug(_("narrowed branch search to %s:%s\n")
1150 self.ui.debug(_("narrowed branch search to %s:%s\n")
1149 % (short(p), short(i)))
1151 % (short(p), short(i)))
1150 search.append((p, i))
1152 search.append((p, i))
1151 break
1153 break
1152 p, f = i, f * 2
1154 p, f = i, f * 2
1153
1155
1154 # sanity check our fetch list
1156 # sanity check our fetch list
1155 for f in fetch.keys():
1157 for f in fetch.keys():
1156 if f in m:
1158 if f in m:
1157 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1159 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1158
1160
1159 if base.keys() == [nullid]:
1161 if base.keys() == [nullid]:
1160 if force:
1162 if force:
1161 self.ui.warn(_("warning: repository is unrelated\n"))
1163 self.ui.warn(_("warning: repository is unrelated\n"))
1162 else:
1164 else:
1163 raise util.Abort(_("repository is unrelated"))
1165 raise util.Abort(_("repository is unrelated"))
1164
1166
1165 self.ui.debug(_("found new changesets starting at ") +
1167 self.ui.debug(_("found new changesets starting at ") +
1166 " ".join([short(f) for f in fetch]) + "\n")
1168 " ".join([short(f) for f in fetch]) + "\n")
1167
1169
1168 self.ui.debug(_("%d total queries\n") % reqcnt)
1170 self.ui.debug(_("%d total queries\n") % reqcnt)
1169
1171
1170 return fetch.keys()
1172 return fetch.keys()
1171
1173
1172 def findoutgoing(self, remote, base=None, heads=None, force=False):
1174 def findoutgoing(self, remote, base=None, heads=None, force=False):
1173 """Return list of nodes that are roots of subsets not in remote
1175 """Return list of nodes that are roots of subsets not in remote
1174
1176
1175 If base dict is specified, assume that these nodes and their parents
1177 If base dict is specified, assume that these nodes and their parents
1176 exist on the remote side.
1178 exist on the remote side.
1177 If a list of heads is specified, return only nodes which are heads
1179 If a list of heads is specified, return only nodes which are heads
1178 or ancestors of these heads, and return a second element which
1180 or ancestors of these heads, and return a second element which
1179 contains all remote heads which get new children.
1181 contains all remote heads which get new children.
1180 """
1182 """
1181 if base == None:
1183 if base == None:
1182 base = {}
1184 base = {}
1183 self.findincoming(remote, base, heads, force=force)
1185 self.findincoming(remote, base, heads, force=force)
1184
1186
1185 self.ui.debug(_("common changesets up to ")
1187 self.ui.debug(_("common changesets up to ")
1186 + " ".join(map(short, base.keys())) + "\n")
1188 + " ".join(map(short, base.keys())) + "\n")
1187
1189
1188 remain = dict.fromkeys(self.changelog.nodemap)
1190 remain = dict.fromkeys(self.changelog.nodemap)
1189
1191
1190 # prune everything remote has from the tree
1192 # prune everything remote has from the tree
1191 del remain[nullid]
1193 del remain[nullid]
1192 remove = base.keys()
1194 remove = base.keys()
1193 while remove:
1195 while remove:
1194 n = remove.pop(0)
1196 n = remove.pop(0)
1195 if n in remain:
1197 if n in remain:
1196 del remain[n]
1198 del remain[n]
1197 for p in self.changelog.parents(n):
1199 for p in self.changelog.parents(n):
1198 remove.append(p)
1200 remove.append(p)
1199
1201
1200 # find every node whose parents have been pruned
1202 # find every node whose parents have been pruned
1201 subset = []
1203 subset = []
1202 # find every remote head that will get new children
1204 # find every remote head that will get new children
1203 updated_heads = {}
1205 updated_heads = {}
1204 for n in remain:
1206 for n in remain:
1205 p1, p2 = self.changelog.parents(n)
1207 p1, p2 = self.changelog.parents(n)
1206 if p1 not in remain and p2 not in remain:
1208 if p1 not in remain and p2 not in remain:
1207 subset.append(n)
1209 subset.append(n)
1208 if heads:
1210 if heads:
1209 if p1 in heads:
1211 if p1 in heads:
1210 updated_heads[p1] = True
1212 updated_heads[p1] = True
1211 if p2 in heads:
1213 if p2 in heads:
1212 updated_heads[p2] = True
1214 updated_heads[p2] = True
1213
1215
1214 # this is the set of all roots we have to push
1216 # this is the set of all roots we have to push
1215 if heads:
1217 if heads:
1216 return subset, updated_heads.keys()
1218 return subset, updated_heads.keys()
1217 else:
1219 else:
1218 return subset
1220 return subset
1219
1221
1220 def pull(self, remote, heads=None, force=False, lock=None):
1222 def pull(self, remote, heads=None, force=False, lock=None):
1221 mylock = False
1223 mylock = False
1222 if not lock:
1224 if not lock:
1223 lock = self.lock()
1225 lock = self.lock()
1224 mylock = True
1226 mylock = True
1225
1227
1226 try:
1228 try:
1227 fetch = self.findincoming(remote, force=force)
1229 fetch = self.findincoming(remote, force=force)
1228 if fetch == [nullid]:
1230 if fetch == [nullid]:
1229 self.ui.status(_("requesting all changes\n"))
1231 self.ui.status(_("requesting all changes\n"))
1230
1232
1231 if not fetch:
1233 if not fetch:
1232 self.ui.status(_("no changes found\n"))
1234 self.ui.status(_("no changes found\n"))
1233 return 0
1235 return 0
1234
1236
1235 if heads is None:
1237 if heads is None:
1236 cg = remote.changegroup(fetch, 'pull')
1238 cg = remote.changegroup(fetch, 'pull')
1237 else:
1239 else:
1238 if 'changegroupsubset' not in remote.capabilities:
1240 if 'changegroupsubset' not in remote.capabilities:
1239 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1241 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1240 cg = remote.changegroupsubset(fetch, heads, 'pull')
1242 cg = remote.changegroupsubset(fetch, heads, 'pull')
1241 return self.addchangegroup(cg, 'pull', remote.url())
1243 return self.addchangegroup(cg, 'pull', remote.url())
1242 finally:
1244 finally:
1243 if mylock:
1245 if mylock:
1244 lock.release()
1246 lock.release()
1245
1247
1246 def push(self, remote, force=False, revs=None):
1248 def push(self, remote, force=False, revs=None):
1247 # there are two ways to push to remote repo:
1249 # there are two ways to push to remote repo:
1248 #
1250 #
1249 # addchangegroup assumes local user can lock remote
1251 # addchangegroup assumes local user can lock remote
1250 # repo (local filesystem, old ssh servers).
1252 # repo (local filesystem, old ssh servers).
1251 #
1253 #
1252 # unbundle assumes local user cannot lock remote repo (new ssh
1254 # unbundle assumes local user cannot lock remote repo (new ssh
1253 # servers, http servers).
1255 # servers, http servers).
1254
1256
1255 if remote.capable('unbundle'):
1257 if remote.capable('unbundle'):
1256 return self.push_unbundle(remote, force, revs)
1258 return self.push_unbundle(remote, force, revs)
1257 return self.push_addchangegroup(remote, force, revs)
1259 return self.push_addchangegroup(remote, force, revs)
1258
1260
1259 def prepush(self, remote, force, revs):
1261 def prepush(self, remote, force, revs):
1260 base = {}
1262 base = {}
1261 remote_heads = remote.heads()
1263 remote_heads = remote.heads()
1262 inc = self.findincoming(remote, base, remote_heads, force=force)
1264 inc = self.findincoming(remote, base, remote_heads, force=force)
1263
1265
1264 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1266 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1265 if revs is not None:
1267 if revs is not None:
1266 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1268 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1267 else:
1269 else:
1268 bases, heads = update, self.changelog.heads()
1270 bases, heads = update, self.changelog.heads()
1269
1271
1270 if not bases:
1272 if not bases:
1271 self.ui.status(_("no changes found\n"))
1273 self.ui.status(_("no changes found\n"))
1272 return None, 1
1274 return None, 1
1273 elif not force:
1275 elif not force:
1274 # check if we're creating new remote heads
1276 # check if we're creating new remote heads
1275 # to be a remote head after push, node must be either
1277 # to be a remote head after push, node must be either
1276 # - unknown locally
1278 # - unknown locally
1277 # - a local outgoing head descended from update
1279 # - a local outgoing head descended from update
1278 # - a remote head that's known locally and not
1280 # - a remote head that's known locally and not
1279 # ancestral to an outgoing head
1281 # ancestral to an outgoing head
1280
1282
1281 warn = 0
1283 warn = 0
1282
1284
1283 if remote_heads == [nullid]:
1285 if remote_heads == [nullid]:
1284 warn = 0
1286 warn = 0
1285 elif not revs and len(heads) > len(remote_heads):
1287 elif not revs and len(heads) > len(remote_heads):
1286 warn = 1
1288 warn = 1
1287 else:
1289 else:
1288 newheads = list(heads)
1290 newheads = list(heads)
1289 for r in remote_heads:
1291 for r in remote_heads:
1290 if r in self.changelog.nodemap:
1292 if r in self.changelog.nodemap:
1291 desc = self.changelog.heads(r, heads)
1293 desc = self.changelog.heads(r, heads)
1292 l = [h for h in heads if h in desc]
1294 l = [h for h in heads if h in desc]
1293 if not l:
1295 if not l:
1294 newheads.append(r)
1296 newheads.append(r)
1295 else:
1297 else:
1296 newheads.append(r)
1298 newheads.append(r)
1297 if len(newheads) > len(remote_heads):
1299 if len(newheads) > len(remote_heads):
1298 warn = 1
1300 warn = 1
1299
1301
1300 if warn:
1302 if warn:
1301 self.ui.warn(_("abort: push creates new remote branches!\n"))
1303 self.ui.warn(_("abort: push creates new remote branches!\n"))
1302 self.ui.status(_("(did you forget to merge?"
1304 self.ui.status(_("(did you forget to merge?"
1303 " use push -f to force)\n"))
1305 " use push -f to force)\n"))
1304 return None, 1
1306 return None, 1
1305 elif inc:
1307 elif inc:
1306 self.ui.warn(_("note: unsynced remote changes!\n"))
1308 self.ui.warn(_("note: unsynced remote changes!\n"))
1307
1309
1308
1310
1309 if revs is None:
1311 if revs is None:
1310 cg = self.changegroup(update, 'push')
1312 cg = self.changegroup(update, 'push')
1311 else:
1313 else:
1312 cg = self.changegroupsubset(update, revs, 'push')
1314 cg = self.changegroupsubset(update, revs, 'push')
1313 return cg, remote_heads
1315 return cg, remote_heads
1314
1316
1315 def push_addchangegroup(self, remote, force, revs):
1317 def push_addchangegroup(self, remote, force, revs):
1316 lock = remote.lock()
1318 lock = remote.lock()
1317
1319
1318 ret = self.prepush(remote, force, revs)
1320 ret = self.prepush(remote, force, revs)
1319 if ret[0] is not None:
1321 if ret[0] is not None:
1320 cg, remote_heads = ret
1322 cg, remote_heads = ret
1321 return remote.addchangegroup(cg, 'push', self.url())
1323 return remote.addchangegroup(cg, 'push', self.url())
1322 return ret[1]
1324 return ret[1]
1323
1325
1324 def push_unbundle(self, remote, force, revs):
1326 def push_unbundle(self, remote, force, revs):
1325 # local repo finds heads on server, finds out what revs it
1327 # local repo finds heads on server, finds out what revs it
1326 # must push. once revs transferred, if server finds it has
1328 # must push. once revs transferred, if server finds it has
1327 # different heads (someone else won commit/push race), server
1329 # different heads (someone else won commit/push race), server
1328 # aborts.
1330 # aborts.
1329
1331
1330 ret = self.prepush(remote, force, revs)
1332 ret = self.prepush(remote, force, revs)
1331 if ret[0] is not None:
1333 if ret[0] is not None:
1332 cg, remote_heads = ret
1334 cg, remote_heads = ret
1333 if force: remote_heads = ['force']
1335 if force: remote_heads = ['force']
1334 return remote.unbundle(cg, remote_heads, 'push')
1336 return remote.unbundle(cg, remote_heads, 'push')
1335 return ret[1]
1337 return ret[1]
1336
1338
1337 def changegroupinfo(self, nodes):
1339 def changegroupinfo(self, nodes):
1338 self.ui.note(_("%d changesets found\n") % len(nodes))
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1339 if self.ui.debugflag:
1341 if self.ui.debugflag:
1340 self.ui.debug(_("List of changesets:\n"))
1342 self.ui.debug(_("List of changesets:\n"))
1341 for node in nodes:
1343 for node in nodes:
1342 self.ui.debug("%s\n" % hex(node))
1344 self.ui.debug("%s\n" % hex(node))
1343
1345
1344 def changegroupsubset(self, bases, heads, source):
1346 def changegroupsubset(self, bases, heads, source):
1345 """This function generates a changegroup consisting of all the nodes
1347 """This function generates a changegroup consisting of all the nodes
1346 that are descendents of any of the bases, and ancestors of any of
1348 that are descendents of any of the bases, and ancestors of any of
1347 the heads.
1349 the heads.
1348
1350
1349 It is fairly complex as determining which filenodes and which
1351 It is fairly complex as determining which filenodes and which
1350 manifest nodes need to be included for the changeset to be complete
1352 manifest nodes need to be included for the changeset to be complete
1351 is non-trivial.
1353 is non-trivial.
1352
1354
1353 Another wrinkle is doing the reverse, figuring out which changeset in
1355 Another wrinkle is doing the reverse, figuring out which changeset in
1354 the changegroup a particular filenode or manifestnode belongs to."""
1356 the changegroup a particular filenode or manifestnode belongs to."""
1355
1357
1356 self.hook('preoutgoing', throw=True, source=source)
1358 self.hook('preoutgoing', throw=True, source=source)
1357
1359
1358 # Set up some initial variables
1360 # Set up some initial variables
1359 # Make it easy to refer to self.changelog
1361 # Make it easy to refer to self.changelog
1360 cl = self.changelog
1362 cl = self.changelog
1361 # msng is short for missing - compute the list of changesets in this
1363 # msng is short for missing - compute the list of changesets in this
1362 # changegroup.
1364 # changegroup.
1363 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1364 self.changegroupinfo(msng_cl_lst)
1366 self.changegroupinfo(msng_cl_lst)
1365 # Some bases may turn out to be superfluous, and some heads may be
1367 # Some bases may turn out to be superfluous, and some heads may be
1366 # too. nodesbetween will return the minimal set of bases and heads
1368 # too. nodesbetween will return the minimal set of bases and heads
1367 # necessary to re-create the changegroup.
1369 # necessary to re-create the changegroup.
1368
1370
1369 # Known heads are the list of heads that it is assumed the recipient
1371 # Known heads are the list of heads that it is assumed the recipient
1370 # of this changegroup will know about.
1372 # of this changegroup will know about.
1371 knownheads = {}
1373 knownheads = {}
1372 # We assume that all parents of bases are known heads.
1374 # We assume that all parents of bases are known heads.
1373 for n in bases:
1375 for n in bases:
1374 for p in cl.parents(n):
1376 for p in cl.parents(n):
1375 if p != nullid:
1377 if p != nullid:
1376 knownheads[p] = 1
1378 knownheads[p] = 1
1377 knownheads = knownheads.keys()
1379 knownheads = knownheads.keys()
1378 if knownheads:
1380 if knownheads:
1379 # Now that we know what heads are known, we can compute which
1381 # Now that we know what heads are known, we can compute which
1380 # changesets are known. The recipient must know about all
1382 # changesets are known. The recipient must know about all
1381 # changesets required to reach the known heads from the null
1383 # changesets required to reach the known heads from the null
1382 # changeset.
1384 # changeset.
1383 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1384 junk = None
1386 junk = None
1385 # Transform the list into an ersatz set.
1387 # Transform the list into an ersatz set.
1386 has_cl_set = dict.fromkeys(has_cl_set)
1388 has_cl_set = dict.fromkeys(has_cl_set)
1387 else:
1389 else:
1388 # If there were no known heads, the recipient cannot be assumed to
1390 # If there were no known heads, the recipient cannot be assumed to
1389 # know about any changesets.
1391 # know about any changesets.
1390 has_cl_set = {}
1392 has_cl_set = {}
1391
1393
1392 # Make it easy to refer to self.manifest
1394 # Make it easy to refer to self.manifest
1393 mnfst = self.manifest
1395 mnfst = self.manifest
1394 # We don't know which manifests are missing yet
1396 # We don't know which manifests are missing yet
1395 msng_mnfst_set = {}
1397 msng_mnfst_set = {}
1396 # Nor do we know which filenodes are missing.
1398 # Nor do we know which filenodes are missing.
1397 msng_filenode_set = {}
1399 msng_filenode_set = {}
1398
1400
1399 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1400 junk = None
1402 junk = None
1401
1403
1402 # A changeset always belongs to itself, so the changenode lookup
1404 # A changeset always belongs to itself, so the changenode lookup
1403 # function for a changenode is identity.
1405 # function for a changenode is identity.
1404 def identity(x):
1406 def identity(x):
1405 return x
1407 return x
1406
1408
1407 # A function generating function. Sets up an environment for the
1409 # A function generating function. Sets up an environment for the
1408 # inner function.
1410 # inner function.
1409 def cmp_by_rev_func(revlog):
1411 def cmp_by_rev_func(revlog):
1410 # Compare two nodes by their revision number in the environment's
1412 # Compare two nodes by their revision number in the environment's
1411 # revision history. Since the revision number both represents the
1413 # revision history. Since the revision number both represents the
1412 # most efficient order to read the nodes in, and represents a
1414 # most efficient order to read the nodes in, and represents a
1413 # topological sorting of the nodes, this function is often useful.
1415 # topological sorting of the nodes, this function is often useful.
1414 def cmp_by_rev(a, b):
1416 def cmp_by_rev(a, b):
1415 return cmp(revlog.rev(a), revlog.rev(b))
1417 return cmp(revlog.rev(a), revlog.rev(b))
1416 return cmp_by_rev
1418 return cmp_by_rev
1417
1419
1418 # If we determine that a particular file or manifest node must be a
1420 # If we determine that a particular file or manifest node must be a
1419 # node that the recipient of the changegroup will already have, we can
1421 # node that the recipient of the changegroup will already have, we can
1420 # also assume the recipient will have all the parents. This function
1422 # also assume the recipient will have all the parents. This function
1421 # prunes them from the set of missing nodes.
1423 # prunes them from the set of missing nodes.
1422 def prune_parents(revlog, hasset, msngset):
1424 def prune_parents(revlog, hasset, msngset):
1423 haslst = hasset.keys()
1425 haslst = hasset.keys()
1424 haslst.sort(cmp_by_rev_func(revlog))
1426 haslst.sort(cmp_by_rev_func(revlog))
1425 for node in haslst:
1427 for node in haslst:
1426 parentlst = [p for p in revlog.parents(node) if p != nullid]
1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1427 while parentlst:
1429 while parentlst:
1428 n = parentlst.pop()
1430 n = parentlst.pop()
1429 if n not in hasset:
1431 if n not in hasset:
1430 hasset[n] = 1
1432 hasset[n] = 1
1431 p = [p for p in revlog.parents(n) if p != nullid]
1433 p = [p for p in revlog.parents(n) if p != nullid]
1432 parentlst.extend(p)
1434 parentlst.extend(p)
1433 for n in hasset:
1435 for n in hasset:
1434 msngset.pop(n, None)
1436 msngset.pop(n, None)
1435
1437
1436 # This is a function generating function used to set up an environment
1438 # This is a function generating function used to set up an environment
1437 # for the inner function to execute in.
1439 # for the inner function to execute in.
1438 def manifest_and_file_collector(changedfileset):
1440 def manifest_and_file_collector(changedfileset):
1439 # This is an information gathering function that gathers
1441 # This is an information gathering function that gathers
1440 # information from each changeset node that goes out as part of
1442 # information from each changeset node that goes out as part of
1441 # the changegroup. The information gathered is a list of which
1443 # the changegroup. The information gathered is a list of which
1442 # manifest nodes are potentially required (the recipient may
1444 # manifest nodes are potentially required (the recipient may
1443 # already have them) and total list of all files which were
1445 # already have them) and total list of all files which were
1444 # changed in any changeset in the changegroup.
1446 # changed in any changeset in the changegroup.
1445 #
1447 #
1446 # We also remember the first changenode we saw any manifest
1448 # We also remember the first changenode we saw any manifest
1447 # referenced by so we can later determine which changenode 'owns'
1449 # referenced by so we can later determine which changenode 'owns'
1448 # the manifest.
1450 # the manifest.
1449 def collect_manifests_and_files(clnode):
1451 def collect_manifests_and_files(clnode):
1450 c = cl.read(clnode)
1452 c = cl.read(clnode)
1451 for f in c[3]:
1453 for f in c[3]:
1452 # This is to make sure we only have one instance of each
1454 # This is to make sure we only have one instance of each
1453 # filename string for each filename.
1455 # filename string for each filename.
1454 changedfileset.setdefault(f, f)
1456 changedfileset.setdefault(f, f)
1455 msng_mnfst_set.setdefault(c[0], clnode)
1457 msng_mnfst_set.setdefault(c[0], clnode)
1456 return collect_manifests_and_files
1458 return collect_manifests_and_files
1457
1459
1458 # Figure out which manifest nodes (of the ones we think might be part
1460 # Figure out which manifest nodes (of the ones we think might be part
1459 # of the changegroup) the recipient must know about and remove them
1461 # of the changegroup) the recipient must know about and remove them
1460 # from the changegroup.
1462 # from the changegroup.
1461 def prune_manifests():
1463 def prune_manifests():
1462 has_mnfst_set = {}
1464 has_mnfst_set = {}
1463 for n in msng_mnfst_set:
1465 for n in msng_mnfst_set:
1464 # If a 'missing' manifest thinks it belongs to a changenode
1466 # If a 'missing' manifest thinks it belongs to a changenode
1465 # the recipient is assumed to have, obviously the recipient
1467 # the recipient is assumed to have, obviously the recipient
1466 # must have that manifest.
1468 # must have that manifest.
1467 linknode = cl.node(mnfst.linkrev(n))
1469 linknode = cl.node(mnfst.linkrev(n))
1468 if linknode in has_cl_set:
1470 if linknode in has_cl_set:
1469 has_mnfst_set[n] = 1
1471 has_mnfst_set[n] = 1
1470 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1471
1473
1472 # Use the information collected in collect_manifests_and_files to say
1474 # Use the information collected in collect_manifests_and_files to say
1473 # which changenode any manifestnode belongs to.
1475 # which changenode any manifestnode belongs to.
1474 def lookup_manifest_link(mnfstnode):
1476 def lookup_manifest_link(mnfstnode):
1475 return msng_mnfst_set[mnfstnode]
1477 return msng_mnfst_set[mnfstnode]
1476
1478
1477 # A function generating function that sets up the initial environment
1479 # A function generating function that sets up the initial environment
1478 # the inner function.
1480 # the inner function.
1479 def filenode_collector(changedfiles):
1481 def filenode_collector(changedfiles):
1480 next_rev = [0]
1482 next_rev = [0]
1481 # This gathers information from each manifestnode included in the
1483 # This gathers information from each manifestnode included in the
1482 # changegroup about which filenodes the manifest node references
1484 # changegroup about which filenodes the manifest node references
1483 # so we can include those in the changegroup too.
1485 # so we can include those in the changegroup too.
1484 #
1486 #
1485 # It also remembers which changenode each filenode belongs to. It
1487 # It also remembers which changenode each filenode belongs to. It
1486 # does this by assuming the a filenode belongs to the changenode
1488 # does this by assuming the a filenode belongs to the changenode
1487 # the first manifest that references it belongs to.
1489 # the first manifest that references it belongs to.
1488 def collect_msng_filenodes(mnfstnode):
1490 def collect_msng_filenodes(mnfstnode):
1489 r = mnfst.rev(mnfstnode)
1491 r = mnfst.rev(mnfstnode)
1490 if r == next_rev[0]:
1492 if r == next_rev[0]:
1491 # If the last rev we looked at was the one just previous,
1493 # If the last rev we looked at was the one just previous,
1492 # we only need to see a diff.
1494 # we only need to see a diff.
1493 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1494 # For each line in the delta
1496 # For each line in the delta
1495 for dline in delta.splitlines():
1497 for dline in delta.splitlines():
1496 # get the filename and filenode for that line
1498 # get the filename and filenode for that line
1497 f, fnode = dline.split('\0')
1499 f, fnode = dline.split('\0')
1498 fnode = bin(fnode[:40])
1500 fnode = bin(fnode[:40])
1499 f = changedfiles.get(f, None)
1501 f = changedfiles.get(f, None)
1500 # And if the file is in the list of files we care
1502 # And if the file is in the list of files we care
1501 # about.
1503 # about.
1502 if f is not None:
1504 if f is not None:
1503 # Get the changenode this manifest belongs to
1505 # Get the changenode this manifest belongs to
1504 clnode = msng_mnfst_set[mnfstnode]
1506 clnode = msng_mnfst_set[mnfstnode]
1505 # Create the set of filenodes for the file if
1507 # Create the set of filenodes for the file if
1506 # there isn't one already.
1508 # there isn't one already.
1507 ndset = msng_filenode_set.setdefault(f, {})
1509 ndset = msng_filenode_set.setdefault(f, {})
1508 # And set the filenode's changelog node to the
1510 # And set the filenode's changelog node to the
1509 # manifest's if it hasn't been set already.
1511 # manifest's if it hasn't been set already.
1510 ndset.setdefault(fnode, clnode)
1512 ndset.setdefault(fnode, clnode)
1511 else:
1513 else:
1512 # Otherwise we need a full manifest.
1514 # Otherwise we need a full manifest.
1513 m = mnfst.read(mnfstnode)
1515 m = mnfst.read(mnfstnode)
1514 # For every file in we care about.
1516 # For every file in we care about.
1515 for f in changedfiles:
1517 for f in changedfiles:
1516 fnode = m.get(f, None)
1518 fnode = m.get(f, None)
1517 # If it's in the manifest
1519 # If it's in the manifest
1518 if fnode is not None:
1520 if fnode is not None:
1519 # See comments above.
1521 # See comments above.
1520 clnode = msng_mnfst_set[mnfstnode]
1522 clnode = msng_mnfst_set[mnfstnode]
1521 ndset = msng_filenode_set.setdefault(f, {})
1523 ndset = msng_filenode_set.setdefault(f, {})
1522 ndset.setdefault(fnode, clnode)
1524 ndset.setdefault(fnode, clnode)
1523 # Remember the revision we hope to see next.
1525 # Remember the revision we hope to see next.
1524 next_rev[0] = r + 1
1526 next_rev[0] = r + 1
1525 return collect_msng_filenodes
1527 return collect_msng_filenodes
1526
1528
1527 # We have a list of filenodes we think we need for a file, lets remove
1529 # We have a list of filenodes we think we need for a file, lets remove
1528 # all those we now the recipient must have.
1530 # all those we now the recipient must have.
1529 def prune_filenodes(f, filerevlog):
1531 def prune_filenodes(f, filerevlog):
1530 msngset = msng_filenode_set[f]
1532 msngset = msng_filenode_set[f]
1531 hasset = {}
1533 hasset = {}
1532 # If a 'missing' filenode thinks it belongs to a changenode we
1534 # If a 'missing' filenode thinks it belongs to a changenode we
1533 # assume the recipient must have, then the recipient must have
1535 # assume the recipient must have, then the recipient must have
1534 # that filenode.
1536 # that filenode.
1535 for n in msngset:
1537 for n in msngset:
1536 clnode = cl.node(filerevlog.linkrev(n))
1538 clnode = cl.node(filerevlog.linkrev(n))
1537 if clnode in has_cl_set:
1539 if clnode in has_cl_set:
1538 hasset[n] = 1
1540 hasset[n] = 1
1539 prune_parents(filerevlog, hasset, msngset)
1541 prune_parents(filerevlog, hasset, msngset)
1540
1542
1541 # A function generator function that sets up the a context for the
1543 # A function generator function that sets up the a context for the
1542 # inner function.
1544 # inner function.
1543 def lookup_filenode_link_func(fname):
1545 def lookup_filenode_link_func(fname):
1544 msngset = msng_filenode_set[fname]
1546 msngset = msng_filenode_set[fname]
1545 # Lookup the changenode the filenode belongs to.
1547 # Lookup the changenode the filenode belongs to.
1546 def lookup_filenode_link(fnode):
1548 def lookup_filenode_link(fnode):
1547 return msngset[fnode]
1549 return msngset[fnode]
1548 return lookup_filenode_link
1550 return lookup_filenode_link
1549
1551
1550 # Now that we have all theses utility functions to help out and
1552 # Now that we have all theses utility functions to help out and
1551 # logically divide up the task, generate the group.
1553 # logically divide up the task, generate the group.
1552 def gengroup():
1554 def gengroup():
1553 # The set of changed files starts empty.
1555 # The set of changed files starts empty.
1554 changedfiles = {}
1556 changedfiles = {}
1555 # Create a changenode group generator that will call our functions
1557 # Create a changenode group generator that will call our functions
1556 # back to lookup the owning changenode and collect information.
1558 # back to lookup the owning changenode and collect information.
1557 group = cl.group(msng_cl_lst, identity,
1559 group = cl.group(msng_cl_lst, identity,
1558 manifest_and_file_collector(changedfiles))
1560 manifest_and_file_collector(changedfiles))
1559 for chnk in group:
1561 for chnk in group:
1560 yield chnk
1562 yield chnk
1561
1563
1562 # The list of manifests has been collected by the generator
1564 # The list of manifests has been collected by the generator
1563 # calling our functions back.
1565 # calling our functions back.
1564 prune_manifests()
1566 prune_manifests()
1565 msng_mnfst_lst = msng_mnfst_set.keys()
1567 msng_mnfst_lst = msng_mnfst_set.keys()
1566 # Sort the manifestnodes by revision number.
1568 # Sort the manifestnodes by revision number.
1567 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1568 # Create a generator for the manifestnodes that calls our lookup
1570 # Create a generator for the manifestnodes that calls our lookup
1569 # and data collection functions back.
1571 # and data collection functions back.
1570 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1571 filenode_collector(changedfiles))
1573 filenode_collector(changedfiles))
1572 for chnk in group:
1574 for chnk in group:
1573 yield chnk
1575 yield chnk
1574
1576
1575 # These are no longer needed, dereference and toss the memory for
1577 # These are no longer needed, dereference and toss the memory for
1576 # them.
1578 # them.
1577 msng_mnfst_lst = None
1579 msng_mnfst_lst = None
1578 msng_mnfst_set.clear()
1580 msng_mnfst_set.clear()
1579
1581
1580 changedfiles = changedfiles.keys()
1582 changedfiles = changedfiles.keys()
1581 changedfiles.sort()
1583 changedfiles.sort()
1582 # Go through all our files in order sorted by name.
1584 # Go through all our files in order sorted by name.
1583 for fname in changedfiles:
1585 for fname in changedfiles:
1584 filerevlog = self.file(fname)
1586 filerevlog = self.file(fname)
1585 # Toss out the filenodes that the recipient isn't really
1587 # Toss out the filenodes that the recipient isn't really
1586 # missing.
1588 # missing.
1587 if msng_filenode_set.has_key(fname):
1589 if msng_filenode_set.has_key(fname):
1588 prune_filenodes(fname, filerevlog)
1590 prune_filenodes(fname, filerevlog)
1589 msng_filenode_lst = msng_filenode_set[fname].keys()
1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1590 else:
1592 else:
1591 msng_filenode_lst = []
1593 msng_filenode_lst = []
1592 # If any filenodes are left, generate the group for them,
1594 # If any filenodes are left, generate the group for them,
1593 # otherwise don't bother.
1595 # otherwise don't bother.
1594 if len(msng_filenode_lst) > 0:
1596 if len(msng_filenode_lst) > 0:
1595 yield changegroup.genchunk(fname)
1597 yield changegroup.genchunk(fname)
1596 # Sort the filenodes by their revision #
1598 # Sort the filenodes by their revision #
1597 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1598 # Create a group generator and only pass in a changenode
1600 # Create a group generator and only pass in a changenode
1599 # lookup function as we need to collect no information
1601 # lookup function as we need to collect no information
1600 # from filenodes.
1602 # from filenodes.
1601 group = filerevlog.group(msng_filenode_lst,
1603 group = filerevlog.group(msng_filenode_lst,
1602 lookup_filenode_link_func(fname))
1604 lookup_filenode_link_func(fname))
1603 for chnk in group:
1605 for chnk in group:
1604 yield chnk
1606 yield chnk
1605 if msng_filenode_set.has_key(fname):
1607 if msng_filenode_set.has_key(fname):
1606 # Don't need this anymore, toss it to free memory.
1608 # Don't need this anymore, toss it to free memory.
1607 del msng_filenode_set[fname]
1609 del msng_filenode_set[fname]
1608 # Signal that no more groups are left.
1610 # Signal that no more groups are left.
1609 yield changegroup.closechunk()
1611 yield changegroup.closechunk()
1610
1612
1611 if msng_cl_lst:
1613 if msng_cl_lst:
1612 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1613
1615
1614 return util.chunkbuffer(gengroup())
1616 return util.chunkbuffer(gengroup())
1615
1617
1616 def changegroup(self, basenodes, source):
1618 def changegroup(self, basenodes, source):
1617 """Generate a changegroup of all nodes that we have that a recipient
1619 """Generate a changegroup of all nodes that we have that a recipient
1618 doesn't.
1620 doesn't.
1619
1621
1620 This is much easier than the previous function as we can assume that
1622 This is much easier than the previous function as we can assume that
1621 the recipient has any changenode we aren't sending them."""
1623 the recipient has any changenode we aren't sending them."""
1622
1624
1623 self.hook('preoutgoing', throw=True, source=source)
1625 self.hook('preoutgoing', throw=True, source=source)
1624
1626
1625 cl = self.changelog
1627 cl = self.changelog
1626 nodes = cl.nodesbetween(basenodes, None)[0]
1628 nodes = cl.nodesbetween(basenodes, None)[0]
1627 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1628 self.changegroupinfo(nodes)
1630 self.changegroupinfo(nodes)
1629
1631
1630 def identity(x):
1632 def identity(x):
1631 return x
1633 return x
1632
1634
1633 def gennodelst(revlog):
1635 def gennodelst(revlog):
1634 for r in xrange(0, revlog.count()):
1636 for r in xrange(0, revlog.count()):
1635 n = revlog.node(r)
1637 n = revlog.node(r)
1636 if revlog.linkrev(n) in revset:
1638 if revlog.linkrev(n) in revset:
1637 yield n
1639 yield n
1638
1640
1639 def changed_file_collector(changedfileset):
1641 def changed_file_collector(changedfileset):
1640 def collect_changed_files(clnode):
1642 def collect_changed_files(clnode):
1641 c = cl.read(clnode)
1643 c = cl.read(clnode)
1642 for fname in c[3]:
1644 for fname in c[3]:
1643 changedfileset[fname] = 1
1645 changedfileset[fname] = 1
1644 return collect_changed_files
1646 return collect_changed_files
1645
1647
1646 def lookuprevlink_func(revlog):
1648 def lookuprevlink_func(revlog):
1647 def lookuprevlink(n):
1649 def lookuprevlink(n):
1648 return cl.node(revlog.linkrev(n))
1650 return cl.node(revlog.linkrev(n))
1649 return lookuprevlink
1651 return lookuprevlink
1650
1652
1651 def gengroup():
1653 def gengroup():
1652 # construct a list of all changed files
1654 # construct a list of all changed files
1653 changedfiles = {}
1655 changedfiles = {}
1654
1656
1655 for chnk in cl.group(nodes, identity,
1657 for chnk in cl.group(nodes, identity,
1656 changed_file_collector(changedfiles)):
1658 changed_file_collector(changedfiles)):
1657 yield chnk
1659 yield chnk
1658 changedfiles = changedfiles.keys()
1660 changedfiles = changedfiles.keys()
1659 changedfiles.sort()
1661 changedfiles.sort()
1660
1662
1661 mnfst = self.manifest
1663 mnfst = self.manifest
1662 nodeiter = gennodelst(mnfst)
1664 nodeiter = gennodelst(mnfst)
1663 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1664 yield chnk
1666 yield chnk
1665
1667
1666 for fname in changedfiles:
1668 for fname in changedfiles:
1667 filerevlog = self.file(fname)
1669 filerevlog = self.file(fname)
1668 nodeiter = gennodelst(filerevlog)
1670 nodeiter = gennodelst(filerevlog)
1669 nodeiter = list(nodeiter)
1671 nodeiter = list(nodeiter)
1670 if nodeiter:
1672 if nodeiter:
1671 yield changegroup.genchunk(fname)
1673 yield changegroup.genchunk(fname)
1672 lookup = lookuprevlink_func(filerevlog)
1674 lookup = lookuprevlink_func(filerevlog)
1673 for chnk in filerevlog.group(nodeiter, lookup):
1675 for chnk in filerevlog.group(nodeiter, lookup):
1674 yield chnk
1676 yield chnk
1675
1677
1676 yield changegroup.closechunk()
1678 yield changegroup.closechunk()
1677
1679
1678 if nodes:
1680 if nodes:
1679 self.hook('outgoing', node=hex(nodes[0]), source=source)
1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1680
1682
1681 return util.chunkbuffer(gengroup())
1683 return util.chunkbuffer(gengroup())
1682
1684
1683 def addchangegroup(self, source, srctype, url):
1685 def addchangegroup(self, source, srctype, url):
1684 """add changegroup to repo.
1686 """add changegroup to repo.
1685
1687
1686 return values:
1688 return values:
1687 - nothing changed or no source: 0
1689 - nothing changed or no source: 0
1688 - more heads than before: 1+added heads (2..n)
1690 - more heads than before: 1+added heads (2..n)
1689 - less heads than before: -1-removed heads (-2..-n)
1691 - less heads than before: -1-removed heads (-2..-n)
1690 - number of heads stays the same: 1
1692 - number of heads stays the same: 1
1691 """
1693 """
1692 def csmap(x):
1694 def csmap(x):
1693 self.ui.debug(_("add changeset %s\n") % short(x))
1695 self.ui.debug(_("add changeset %s\n") % short(x))
1694 return cl.count()
1696 return cl.count()
1695
1697
1696 def revmap(x):
1698 def revmap(x):
1697 return cl.rev(x)
1699 return cl.rev(x)
1698
1700
1699 if not source:
1701 if not source:
1700 return 0
1702 return 0
1701
1703
1702 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1704 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1703
1705
1704 changesets = files = revisions = 0
1706 changesets = files = revisions = 0
1705
1707
1706 tr = self.transaction()
1708 tr = self.transaction()
1707
1709
1708 # write changelog data to temp files so concurrent readers will not see
1710 # write changelog data to temp files so concurrent readers will not see
1709 # inconsistent view
1711 # inconsistent view
1710 cl = None
1712 cl = None
1711 try:
1713 try:
1712 cl = appendfile.appendchangelog(self.sopener,
1714 cl = appendfile.appendchangelog(self.sopener,
1713 self.changelog.version)
1715 self.changelog.version)
1714
1716
1715 oldheads = len(cl.heads())
1717 oldheads = len(cl.heads())
1716
1718
1717 # pull off the changeset group
1719 # pull off the changeset group
1718 self.ui.status(_("adding changesets\n"))
1720 self.ui.status(_("adding changesets\n"))
1719 cor = cl.count() - 1
1721 cor = cl.count() - 1
1720 chunkiter = changegroup.chunkiter(source)
1722 chunkiter = changegroup.chunkiter(source)
1721 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1723 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1722 raise util.Abort(_("received changelog group is empty"))
1724 raise util.Abort(_("received changelog group is empty"))
1723 cnr = cl.count() - 1
1725 cnr = cl.count() - 1
1724 changesets = cnr - cor
1726 changesets = cnr - cor
1725
1727
1726 # pull off the manifest group
1728 # pull off the manifest group
1727 self.ui.status(_("adding manifests\n"))
1729 self.ui.status(_("adding manifests\n"))
1728 chunkiter = changegroup.chunkiter(source)
1730 chunkiter = changegroup.chunkiter(source)
1729 # no need to check for empty manifest group here:
1731 # no need to check for empty manifest group here:
1730 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1732 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1731 # no new manifest will be created and the manifest group will
1733 # no new manifest will be created and the manifest group will
1732 # be empty during the pull
1734 # be empty during the pull
1733 self.manifest.addgroup(chunkiter, revmap, tr)
1735 self.manifest.addgroup(chunkiter, revmap, tr)
1734
1736
1735 # process the files
1737 # process the files
1736 self.ui.status(_("adding file changes\n"))
1738 self.ui.status(_("adding file changes\n"))
1737 while 1:
1739 while 1:
1738 f = changegroup.getchunk(source)
1740 f = changegroup.getchunk(source)
1739 if not f:
1741 if not f:
1740 break
1742 break
1741 self.ui.debug(_("adding %s revisions\n") % f)
1743 self.ui.debug(_("adding %s revisions\n") % f)
1742 fl = self.file(f)
1744 fl = self.file(f)
1743 o = fl.count()
1745 o = fl.count()
1744 chunkiter = changegroup.chunkiter(source)
1746 chunkiter = changegroup.chunkiter(source)
1745 if fl.addgroup(chunkiter, revmap, tr) is None:
1747 if fl.addgroup(chunkiter, revmap, tr) is None:
1746 raise util.Abort(_("received file revlog group is empty"))
1748 raise util.Abort(_("received file revlog group is empty"))
1747 revisions += fl.count() - o
1749 revisions += fl.count() - o
1748 files += 1
1750 files += 1
1749
1751
1750 cl.writedata()
1752 cl.writedata()
1751 finally:
1753 finally:
1752 if cl:
1754 if cl:
1753 cl.cleanup()
1755 cl.cleanup()
1754
1756
1755 # make changelog see real files again
1757 # make changelog see real files again
1756 self.changelog = changelog.changelog(self.sopener,
1758 self.changelog = changelog.changelog(self.sopener,
1757 self.changelog.version)
1759 self.changelog.version)
1758 self.changelog.checkinlinesize(tr)
1760 self.changelog.checkinlinesize(tr)
1759
1761
1760 newheads = len(self.changelog.heads())
1762 newheads = len(self.changelog.heads())
1761 heads = ""
1763 heads = ""
1762 if oldheads and newheads != oldheads:
1764 if oldheads and newheads != oldheads:
1763 heads = _(" (%+d heads)") % (newheads - oldheads)
1765 heads = _(" (%+d heads)") % (newheads - oldheads)
1764
1766
1765 self.ui.status(_("added %d changesets"
1767 self.ui.status(_("added %d changesets"
1766 " with %d changes to %d files%s\n")
1768 " with %d changes to %d files%s\n")
1767 % (changesets, revisions, files, heads))
1769 % (changesets, revisions, files, heads))
1768
1770
1769 if changesets > 0:
1771 if changesets > 0:
1770 self.hook('pretxnchangegroup', throw=True,
1772 self.hook('pretxnchangegroup', throw=True,
1771 node=hex(self.changelog.node(cor+1)), source=srctype,
1773 node=hex(self.changelog.node(cor+1)), source=srctype,
1772 url=url)
1774 url=url)
1773
1775
1774 tr.close()
1776 tr.close()
1775
1777
1776 if changesets > 0:
1778 if changesets > 0:
1777 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1779 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1778 source=srctype, url=url)
1780 source=srctype, url=url)
1779
1781
1780 for i in xrange(cor + 1, cnr + 1):
1782 for i in xrange(cor + 1, cnr + 1):
1781 self.hook("incoming", node=hex(self.changelog.node(i)),
1783 self.hook("incoming", node=hex(self.changelog.node(i)),
1782 source=srctype, url=url)
1784 source=srctype, url=url)
1783
1785
1784 # never return 0 here:
1786 # never return 0 here:
1785 if newheads < oldheads:
1787 if newheads < oldheads:
1786 return newheads - oldheads - 1
1788 return newheads - oldheads - 1
1787 else:
1789 else:
1788 return newheads - oldheads + 1
1790 return newheads - oldheads + 1
1789
1791
1790
1792
1791 def stream_in(self, remote):
1793 def stream_in(self, remote):
1792 fp = remote.stream_out()
1794 fp = remote.stream_out()
1793 l = fp.readline()
1795 l = fp.readline()
1794 try:
1796 try:
1795 resp = int(l)
1797 resp = int(l)
1796 except ValueError:
1798 except ValueError:
1797 raise util.UnexpectedOutput(
1799 raise util.UnexpectedOutput(
1798 _('Unexpected response from remote server:'), l)
1800 _('Unexpected response from remote server:'), l)
1799 if resp == 1:
1801 if resp == 1:
1800 raise util.Abort(_('operation forbidden by server'))
1802 raise util.Abort(_('operation forbidden by server'))
1801 elif resp == 2:
1803 elif resp == 2:
1802 raise util.Abort(_('locking the remote repository failed'))
1804 raise util.Abort(_('locking the remote repository failed'))
1803 elif resp != 0:
1805 elif resp != 0:
1804 raise util.Abort(_('the server sent an unknown error code'))
1806 raise util.Abort(_('the server sent an unknown error code'))
1805 self.ui.status(_('streaming all changes\n'))
1807 self.ui.status(_('streaming all changes\n'))
1806 l = fp.readline()
1808 l = fp.readline()
1807 try:
1809 try:
1808 total_files, total_bytes = map(int, l.split(' ', 1))
1810 total_files, total_bytes = map(int, l.split(' ', 1))
1809 except ValueError, TypeError:
1811 except ValueError, TypeError:
1810 raise util.UnexpectedOutput(
1812 raise util.UnexpectedOutput(
1811 _('Unexpected response from remote server:'), l)
1813 _('Unexpected response from remote server:'), l)
1812 self.ui.status(_('%d files to transfer, %s of data\n') %
1814 self.ui.status(_('%d files to transfer, %s of data\n') %
1813 (total_files, util.bytecount(total_bytes)))
1815 (total_files, util.bytecount(total_bytes)))
1814 start = time.time()
1816 start = time.time()
1815 for i in xrange(total_files):
1817 for i in xrange(total_files):
1816 # XXX doesn't support '\n' or '\r' in filenames
1818 # XXX doesn't support '\n' or '\r' in filenames
1817 l = fp.readline()
1819 l = fp.readline()
1818 try:
1820 try:
1819 name, size = l.split('\0', 1)
1821 name, size = l.split('\0', 1)
1820 size = int(size)
1822 size = int(size)
1821 except ValueError, TypeError:
1823 except ValueError, TypeError:
1822 raise util.UnexpectedOutput(
1824 raise util.UnexpectedOutput(
1823 _('Unexpected response from remote server:'), l)
1825 _('Unexpected response from remote server:'), l)
1824 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1826 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1825 ofp = self.sopener(name, 'w')
1827 ofp = self.sopener(name, 'w')
1826 for chunk in util.filechunkiter(fp, limit=size):
1828 for chunk in util.filechunkiter(fp, limit=size):
1827 ofp.write(chunk)
1829 ofp.write(chunk)
1828 ofp.close()
1830 ofp.close()
1829 elapsed = time.time() - start
1831 elapsed = time.time() - start
1830 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1832 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1831 (util.bytecount(total_bytes), elapsed,
1833 (util.bytecount(total_bytes), elapsed,
1832 util.bytecount(total_bytes / elapsed)))
1834 util.bytecount(total_bytes / elapsed)))
1833 self.reload()
1835 self.reload()
1834 return len(self.heads()) + 1
1836 return len(self.heads()) + 1
1835
1837
1836 def clone(self, remote, heads=[], stream=False):
1838 def clone(self, remote, heads=[], stream=False):
1837 '''clone remote repository.
1839 '''clone remote repository.
1838
1840
1839 keyword arguments:
1841 keyword arguments:
1840 heads: list of revs to clone (forces use of pull)
1842 heads: list of revs to clone (forces use of pull)
1841 stream: use streaming clone if possible'''
1843 stream: use streaming clone if possible'''
1842
1844
1843 # now, all clients that can request uncompressed clones can
1845 # now, all clients that can request uncompressed clones can
1844 # read repo formats supported by all servers that can serve
1846 # read repo formats supported by all servers that can serve
1845 # them.
1847 # them.
1846
1848
1847 # if revlog format changes, client will have to check version
1849 # if revlog format changes, client will have to check version
1848 # and format flags on "stream" capability, and use
1850 # and format flags on "stream" capability, and use
1849 # uncompressed only if compatible.
1851 # uncompressed only if compatible.
1850
1852
1851 if stream and not heads and remote.capable('stream'):
1853 if stream and not heads and remote.capable('stream'):
1852 return self.stream_in(remote)
1854 return self.stream_in(remote)
1853 return self.pull(remote, heads)
1855 return self.pull(remote, heads)
1854
1856
1855 # used to avoid circular references so destructors work
1857 # used to avoid circular references so destructors work
1856 def aftertrans(files):
1858 def aftertrans(files):
1857 renamefiles = [tuple(t) for t in files]
1859 renamefiles = [tuple(t) for t in files]
1858 def a():
1860 def a():
1859 for src, dest in renamefiles:
1861 for src, dest in renamefiles:
1860 util.rename(src, dest)
1862 util.rename(src, dest)
1861 return a
1863 return a
1862
1864
1863 def instance(ui, path, create):
1865 def instance(ui, path, create):
1864 return localrepository(ui, util.drop_scheme('file', path), create)
1866 return localrepository(ui, util.drop_scheme('file', path), create)
1865
1867
1866 def islocal(path):
1868 def islocal(path):
1867 return True
1869 return True
@@ -1,1386 +1,1386 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale
17 import os, threading, time, calendar, ConfigParser, locale
18
18
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
19 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
20 or "ascii"
20 or "ascii"
21 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
21 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
22 _fallbackencoding = 'ISO-8859-1'
22 _fallbackencoding = 'ISO-8859-1'
23
23
24 def tolocal(s):
24 def tolocal(s):
25 """
25 """
26 Convert a string from internal UTF-8 to local encoding
26 Convert a string from internal UTF-8 to local encoding
27
27
28 All internal strings should be UTF-8 but some repos before the
28 All internal strings should be UTF-8 but some repos before the
29 implementation of locale support may contain latin1 or possibly
29 implementation of locale support may contain latin1 or possibly
30 other character sets. We attempt to decode everything strictly
30 other character sets. We attempt to decode everything strictly
31 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
31 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
32 replace unknown characters.
32 replace unknown characters.
33 """
33 """
34 for e in ('UTF-8', _fallbackencoding):
34 for e in ('UTF-8', _fallbackencoding):
35 try:
35 try:
36 u = s.decode(e) # attempt strict decoding
36 u = s.decode(e) # attempt strict decoding
37 return u.encode(_encoding, "replace")
37 return u.encode(_encoding, "replace")
38 except LookupError, k:
38 except LookupError, k:
39 raise Abort(_("%s, please check your locale settings") % k)
39 raise Abort(_("%s, please check your locale settings") % k)
40 except UnicodeDecodeError:
40 except UnicodeDecodeError:
41 pass
41 pass
42 u = s.decode("utf-8", "replace") # last ditch
42 u = s.decode("utf-8", "replace") # last ditch
43 return u.encode(_encoding, "replace")
43 return u.encode(_encoding, "replace")
44
44
45 def fromlocal(s):
45 def fromlocal(s):
46 """
46 """
47 Convert a string from the local character encoding to UTF-8
47 Convert a string from the local character encoding to UTF-8
48
48
49 We attempt to decode strings using the encoding mode set by
49 We attempt to decode strings using the encoding mode set by
50 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
50 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
51 characters will cause an error message. Other modes include
51 characters will cause an error message. Other modes include
52 'replace', which replaces unknown characters with a special
52 'replace', which replaces unknown characters with a special
53 Unicode character, and 'ignore', which drops the character.
53 Unicode character, and 'ignore', which drops the character.
54 """
54 """
55 try:
55 try:
56 return s.decode(_encoding, _encodingmode).encode("utf-8")
56 return s.decode(_encoding, _encodingmode).encode("utf-8")
57 except UnicodeDecodeError, inst:
57 except UnicodeDecodeError, inst:
58 sub = s[max(0, inst.start-10):inst.start+10]
58 sub = s[max(0, inst.start-10):inst.start+10]
59 raise Abort("decoding near '%s': %s!" % (sub, inst))
59 raise Abort("decoding near '%s': %s!" % (sub, inst))
60 except LookupError, k:
60 except LookupError, k:
61 raise Abort(_("%s, please check your locale settings") % k)
61 raise Abort(_("%s, please check your locale settings") % k)
62
62
63 def locallen(s):
63 def locallen(s):
64 """Find the length in characters of a local string"""
64 """Find the length in characters of a local string"""
65 return len(s.decode(_encoding, "replace"))
65 return len(s.decode(_encoding, "replace"))
66
66
67 def localsub(s, a, b=None):
67 def localsub(s, a, b=None):
68 try:
68 try:
69 u = s.decode(_encoding, _encodingmode)
69 u = s.decode(_encoding, _encodingmode)
70 if b is not None:
70 if b is not None:
71 u = u[a:b]
71 u = u[a:b]
72 else:
72 else:
73 u = u[:a]
73 u = u[:a]
74 return u.encode(_encoding, _encodingmode)
74 return u.encode(_encoding, _encodingmode)
75 except UnicodeDecodeError, inst:
75 except UnicodeDecodeError, inst:
76 sub = s[max(0, inst.start-10), inst.start+10]
76 sub = s[max(0, inst.start-10), inst.start+10]
77 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
77 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
78
78
79 # used by parsedate
79 # used by parsedate
80 defaultdateformats = (
80 defaultdateformats = (
81 '%Y-%m-%d %H:%M:%S',
81 '%Y-%m-%d %H:%M:%S',
82 '%Y-%m-%d %I:%M:%S%p',
82 '%Y-%m-%d %I:%M:%S%p',
83 '%Y-%m-%d %H:%M',
83 '%Y-%m-%d %H:%M',
84 '%Y-%m-%d %I:%M%p',
84 '%Y-%m-%d %I:%M%p',
85 '%Y-%m-%d',
85 '%Y-%m-%d',
86 '%m-%d',
86 '%m-%d',
87 '%m/%d',
87 '%m/%d',
88 '%m/%d/%y',
88 '%m/%d/%y',
89 '%m/%d/%Y',
89 '%m/%d/%Y',
90 '%a %b %d %H:%M:%S %Y',
90 '%a %b %d %H:%M:%S %Y',
91 '%a %b %d %I:%M:%S%p %Y',
91 '%a %b %d %I:%M:%S%p %Y',
92 '%b %d %H:%M:%S %Y',
92 '%b %d %H:%M:%S %Y',
93 '%b %d %I:%M:%S%p %Y',
93 '%b %d %I:%M:%S%p %Y',
94 '%b %d %H:%M:%S',
94 '%b %d %H:%M:%S',
95 '%b %d %I:%M:%S%p',
95 '%b %d %I:%M:%S%p',
96 '%b %d %H:%M',
96 '%b %d %H:%M',
97 '%b %d %I:%M%p',
97 '%b %d %I:%M%p',
98 '%b %d %Y',
98 '%b %d %Y',
99 '%b %d',
99 '%b %d',
100 '%H:%M:%S',
100 '%H:%M:%S',
101 '%I:%M:%SP',
101 '%I:%M:%SP',
102 '%H:%M',
102 '%H:%M',
103 '%I:%M%p',
103 '%I:%M%p',
104 )
104 )
105
105
106 extendeddateformats = defaultdateformats + (
106 extendeddateformats = defaultdateformats + (
107 "%Y",
107 "%Y",
108 "%Y-%m",
108 "%Y-%m",
109 "%b",
109 "%b",
110 "%b %Y",
110 "%b %Y",
111 )
111 )
112
112
113 class SignalInterrupt(Exception):
113 class SignalInterrupt(Exception):
114 """Exception raised on SIGTERM and SIGHUP."""
114 """Exception raised on SIGTERM and SIGHUP."""
115
115
116 # like SafeConfigParser but with case-sensitive keys
116 # like SafeConfigParser but with case-sensitive keys
117 class configparser(ConfigParser.SafeConfigParser):
117 class configparser(ConfigParser.SafeConfigParser):
118 def optionxform(self, optionstr):
118 def optionxform(self, optionstr):
119 return optionstr
119 return optionstr
120
120
121 def cachefunc(func):
121 def cachefunc(func):
122 '''cache the result of function calls'''
122 '''cache the result of function calls'''
123 # XXX doesn't handle keywords args
123 # XXX doesn't handle keywords args
124 cache = {}
124 cache = {}
125 if func.func_code.co_argcount == 1:
125 if func.func_code.co_argcount == 1:
126 # we gain a small amount of time because
126 # we gain a small amount of time because
127 # we don't need to pack/unpack the list
127 # we don't need to pack/unpack the list
128 def f(arg):
128 def f(arg):
129 if arg not in cache:
129 if arg not in cache:
130 cache[arg] = func(arg)
130 cache[arg] = func(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 cache[args] = func(*args)
135 cache[args] = func(*args)
136 return cache[args]
136 return cache[args]
137
137
138 return f
138 return f
139
139
140 def pipefilter(s, cmd):
140 def pipefilter(s, cmd):
141 '''filter string S through command CMD, returning its output'''
141 '''filter string S through command CMD, returning its output'''
142 (pout, pin) = popen2.popen2(cmd, -1, 'b')
142 (pout, pin) = popen2.popen2(cmd, -1, 'b')
143 def writer():
143 def writer():
144 try:
144 try:
145 pin.write(s)
145 pin.write(s)
146 pin.close()
146 pin.close()
147 except IOError, inst:
147 except IOError, inst:
148 if inst.errno != errno.EPIPE:
148 if inst.errno != errno.EPIPE:
149 raise
149 raise
150
150
151 # we should use select instead on UNIX, but this will work on most
151 # we should use select instead on UNIX, but this will work on most
152 # systems, including Windows
152 # systems, including Windows
153 w = threading.Thread(target=writer)
153 w = threading.Thread(target=writer)
154 w.start()
154 w.start()
155 f = pout.read()
155 f = pout.read()
156 pout.close()
156 pout.close()
157 w.join()
157 w.join()
158 return f
158 return f
159
159
160 def tempfilter(s, cmd):
160 def tempfilter(s, cmd):
161 '''filter string S through a pair of temporary files with CMD.
161 '''filter string S through a pair of temporary files with CMD.
162 CMD is used as a template to create the real command to be run,
162 CMD is used as a template to create the real command to be run,
163 with the strings INFILE and OUTFILE replaced by the real names of
163 with the strings INFILE and OUTFILE replaced by the real names of
164 the temporary files generated.'''
164 the temporary files generated.'''
165 inname, outname = None, None
165 inname, outname = None, None
166 try:
166 try:
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 fp = os.fdopen(infd, 'wb')
168 fp = os.fdopen(infd, 'wb')
169 fp.write(s)
169 fp.write(s)
170 fp.close()
170 fp.close()
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 os.close(outfd)
172 os.close(outfd)
173 cmd = cmd.replace('INFILE', inname)
173 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('OUTFILE', outname)
174 cmd = cmd.replace('OUTFILE', outname)
175 code = os.system(cmd)
175 code = os.system(cmd)
176 if code: raise Abort(_("command '%s' failed: %s") %
176 if code: raise Abort(_("command '%s' failed: %s") %
177 (cmd, explain_exit(code)))
177 (cmd, explain_exit(code)))
178 return open(outname, 'rb').read()
178 return open(outname, 'rb').read()
179 finally:
179 finally:
180 try:
180 try:
181 if inname: os.unlink(inname)
181 if inname: os.unlink(inname)
182 except: pass
182 except: pass
183 try:
183 try:
184 if outname: os.unlink(outname)
184 if outname: os.unlink(outname)
185 except: pass
185 except: pass
186
186
187 filtertable = {
187 filtertable = {
188 'tempfile:': tempfilter,
188 'tempfile:': tempfilter,
189 'pipe:': pipefilter,
189 'pipe:': pipefilter,
190 }
190 }
191
191
192 def filter(s, cmd):
192 def filter(s, cmd):
193 "filter a string through a command that transforms its input to its output"
193 "filter a string through a command that transforms its input to its output"
194 for name, fn in filtertable.iteritems():
194 for name, fn in filtertable.iteritems():
195 if cmd.startswith(name):
195 if cmd.startswith(name):
196 return fn(s, cmd[len(name):].lstrip())
196 return fn(s, cmd[len(name):].lstrip())
197 return pipefilter(s, cmd)
197 return pipefilter(s, cmd)
198
198
199 def find_in_path(name, path, default=None):
199 def find_in_path(name, path, default=None):
200 '''find name in search path. path can be string (will be split
200 '''find name in search path. path can be string (will be split
201 with os.pathsep), or iterable thing that returns strings. if name
201 with os.pathsep), or iterable thing that returns strings. if name
202 found, return path to name. else return default.'''
202 found, return path to name. else return default.'''
203 if isinstance(path, str):
203 if isinstance(path, str):
204 path = path.split(os.pathsep)
204 path = path.split(os.pathsep)
205 for p in path:
205 for p in path:
206 p_name = os.path.join(p, name)
206 p_name = os.path.join(p, name)
207 if os.path.exists(p_name):
207 if os.path.exists(p_name):
208 return p_name
208 return p_name
209 return default
209 return default
210
210
211 def binary(s):
211 def binary(s):
212 """return true if a string is binary data using diff's heuristic"""
212 """return true if a string is binary data using diff's heuristic"""
213 if s and '\0' in s[:4096]:
213 if s and '\0' in s[:4096]:
214 return True
214 return True
215 return False
215 return False
216
216
217 def unique(g):
217 def unique(g):
218 """return the uniq elements of iterable g"""
218 """return the uniq elements of iterable g"""
219 seen = {}
219 seen = {}
220 l = []
220 l = []
221 for f in g:
221 for f in g:
222 if f not in seen:
222 if f not in seen:
223 seen[f] = 1
223 seen[f] = 1
224 l.append(f)
224 l.append(f)
225 return l
225 return l
226
226
227 class Abort(Exception):
227 class Abort(Exception):
228 """Raised if a command needs to print an error and exit."""
228 """Raised if a command needs to print an error and exit."""
229
229
230 class UnexpectedOutput(Abort):
230 class UnexpectedOutput(Abort):
231 """Raised to print an error with part of output and exit."""
231 """Raised to print an error with part of output and exit."""
232
232
233 def always(fn): return True
233 def always(fn): return True
234 def never(fn): return False
234 def never(fn): return False
235
235
236 def patkind(name, dflt_pat='glob'):
236 def patkind(name, dflt_pat='glob'):
237 """Split a string into an optional pattern kind prefix and the
237 """Split a string into an optional pattern kind prefix and the
238 actual pattern."""
238 actual pattern."""
239 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
239 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
240 if name.startswith(prefix + ':'): return name.split(':', 1)
240 if name.startswith(prefix + ':'): return name.split(':', 1)
241 return dflt_pat, name
241 return dflt_pat, name
242
242
243 def globre(pat, head='^', tail='$'):
243 def globre(pat, head='^', tail='$'):
244 "convert a glob pattern into a regexp"
244 "convert a glob pattern into a regexp"
245 i, n = 0, len(pat)
245 i, n = 0, len(pat)
246 res = ''
246 res = ''
247 group = False
247 group = False
248 def peek(): return i < n and pat[i]
248 def peek(): return i < n and pat[i]
249 while i < n:
249 while i < n:
250 c = pat[i]
250 c = pat[i]
251 i = i+1
251 i = i+1
252 if c == '*':
252 if c == '*':
253 if peek() == '*':
253 if peek() == '*':
254 i += 1
254 i += 1
255 res += '.*'
255 res += '.*'
256 else:
256 else:
257 res += '[^/]*'
257 res += '[^/]*'
258 elif c == '?':
258 elif c == '?':
259 res += '.'
259 res += '.'
260 elif c == '[':
260 elif c == '[':
261 j = i
261 j = i
262 if j < n and pat[j] in '!]':
262 if j < n and pat[j] in '!]':
263 j += 1
263 j += 1
264 while j < n and pat[j] != ']':
264 while j < n and pat[j] != ']':
265 j += 1
265 j += 1
266 if j >= n:
266 if j >= n:
267 res += '\\['
267 res += '\\['
268 else:
268 else:
269 stuff = pat[i:j].replace('\\','\\\\')
269 stuff = pat[i:j].replace('\\','\\\\')
270 i = j + 1
270 i = j + 1
271 if stuff[0] == '!':
271 if stuff[0] == '!':
272 stuff = '^' + stuff[1:]
272 stuff = '^' + stuff[1:]
273 elif stuff[0] == '^':
273 elif stuff[0] == '^':
274 stuff = '\\' + stuff
274 stuff = '\\' + stuff
275 res = '%s[%s]' % (res, stuff)
275 res = '%s[%s]' % (res, stuff)
276 elif c == '{':
276 elif c == '{':
277 group = True
277 group = True
278 res += '(?:'
278 res += '(?:'
279 elif c == '}' and group:
279 elif c == '}' and group:
280 res += ')'
280 res += ')'
281 group = False
281 group = False
282 elif c == ',' and group:
282 elif c == ',' and group:
283 res += '|'
283 res += '|'
284 elif c == '\\':
284 elif c == '\\':
285 p = peek()
285 p = peek()
286 if p:
286 if p:
287 i += 1
287 i += 1
288 res += re.escape(p)
288 res += re.escape(p)
289 else:
289 else:
290 res += re.escape(c)
290 res += re.escape(c)
291 else:
291 else:
292 res += re.escape(c)
292 res += re.escape(c)
293 return head + res + tail
293 return head + res + tail
294
294
295 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
295 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
296
296
297 def pathto(n1, n2):
297 def pathto(n1, n2):
298 '''return the relative path from one place to another.
298 '''return the relative path from one place to another.
299 n1 should use os.sep to separate directories
299 n1 should use os.sep to separate directories
300 n2 should use "/" to separate directories
300 n2 should use "/" to separate directories
301 returns an os.sep-separated path.
301 returns an os.sep-separated path.
302 '''
302 '''
303 if not n1: return localpath(n2)
303 if not n1: return localpath(n2)
304 a, b = n1.split(os.sep), n2.split('/')
304 a, b = n1.split(os.sep), n2.split('/')
305 a.reverse()
305 a.reverse()
306 b.reverse()
306 b.reverse()
307 while a and b and a[-1] == b[-1]:
307 while a and b and a[-1] == b[-1]:
308 a.pop()
308 a.pop()
309 b.pop()
309 b.pop()
310 b.reverse()
310 b.reverse()
311 return os.sep.join((['..'] * len(a)) + b)
311 return os.sep.join((['..'] * len(a)) + b)
312
312
313 def canonpath(root, cwd, myname):
313 def canonpath(root, cwd, myname):
314 """return the canonical path of myname, given cwd and root"""
314 """return the canonical path of myname, given cwd and root"""
315 if root == os.sep:
315 if root == os.sep:
316 rootsep = os.sep
316 rootsep = os.sep
317 elif root.endswith(os.sep):
317 elif root.endswith(os.sep):
318 rootsep = root
318 rootsep = root
319 else:
319 else:
320 rootsep = root + os.sep
320 rootsep = root + os.sep
321 name = myname
321 name = myname
322 if not os.path.isabs(name):
322 if not os.path.isabs(name):
323 name = os.path.join(root, cwd, name)
323 name = os.path.join(root, cwd, name)
324 name = os.path.normpath(name)
324 name = os.path.normpath(name)
325 if name != rootsep and name.startswith(rootsep):
325 if name != rootsep and name.startswith(rootsep):
326 name = name[len(rootsep):]
326 name = name[len(rootsep):]
327 audit_path(name)
327 audit_path(name)
328 return pconvert(name)
328 return pconvert(name)
329 elif name == root:
329 elif name == root:
330 return ''
330 return ''
331 else:
331 else:
332 # Determine whether `name' is in the hierarchy at or beneath `root',
332 # Determine whether `name' is in the hierarchy at or beneath `root',
333 # by iterating name=dirname(name) until that causes no change (can't
333 # by iterating name=dirname(name) until that causes no change (can't
334 # check name == '/', because that doesn't work on windows). For each
334 # check name == '/', because that doesn't work on windows). For each
335 # `name', compare dev/inode numbers. If they match, the list `rel'
335 # `name', compare dev/inode numbers. If they match, the list `rel'
336 # holds the reversed list of components making up the relative file
336 # holds the reversed list of components making up the relative file
337 # name we want.
337 # name we want.
338 root_st = os.stat(root)
338 root_st = os.stat(root)
339 rel = []
339 rel = []
340 while True:
340 while True:
341 try:
341 try:
342 name_st = os.stat(name)
342 name_st = os.stat(name)
343 except OSError:
343 except OSError:
344 break
344 break
345 if samestat(name_st, root_st):
345 if samestat(name_st, root_st):
346 rel.reverse()
346 rel.reverse()
347 name = os.path.join(*rel)
347 name = os.path.join(*rel)
348 audit_path(name)
348 audit_path(name)
349 return pconvert(name)
349 return pconvert(name)
350 dirname, basename = os.path.split(name)
350 dirname, basename = os.path.split(name)
351 rel.append(basename)
351 rel.append(basename)
352 if dirname == name:
352 if dirname == name:
353 break
353 break
354 name = dirname
354 name = dirname
355
355
356 raise Abort('%s not under root' % myname)
356 raise Abort('%s not under root' % myname)
357
357
358 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
358 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
359 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
359 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
360
360
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
361 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
362 if os.name == 'nt':
362 if os.name == 'nt':
363 dflt_pat = 'glob'
363 dflt_pat = 'glob'
364 else:
364 else:
365 dflt_pat = 'relpath'
365 dflt_pat = 'relpath'
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
366 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
367
367
368 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
368 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
369 """build a function to match a set of file patterns
369 """build a function to match a set of file patterns
370
370
371 arguments:
371 arguments:
372 canonroot - the canonical root of the tree you're matching against
372 canonroot - the canonical root of the tree you're matching against
373 cwd - the current working directory, if relevant
373 cwd - the current working directory, if relevant
374 names - patterns to find
374 names - patterns to find
375 inc - patterns to include
375 inc - patterns to include
376 exc - patterns to exclude
376 exc - patterns to exclude
377 head - a regex to prepend to patterns to control whether a match is rooted
377 head - a regex to prepend to patterns to control whether a match is rooted
378
378
379 a pattern is one of:
379 a pattern is one of:
380 'glob:<rooted glob>'
380 'glob:<rooted glob>'
381 're:<rooted regexp>'
381 're:<rooted regexp>'
382 'path:<rooted path>'
382 'path:<rooted path>'
383 'relglob:<relative glob>'
383 'relglob:<relative glob>'
384 'relpath:<relative path>'
384 'relpath:<relative path>'
385 'relre:<relative regexp>'
385 'relre:<relative regexp>'
386 '<rooted path or regexp>'
386 '<rooted path or regexp>'
387
387
388 returns:
388 returns:
389 a 3-tuple containing
389 a 3-tuple containing
390 - list of explicit non-pattern names passed in
390 - list of explicit non-pattern names passed in
391 - a bool match(filename) function
391 - a bool match(filename) function
392 - a bool indicating if any patterns were passed in
392 - a bool indicating if any patterns were passed in
393
393
394 todo:
394 todo:
395 make head regex a rooted bool
395 make head regex a rooted bool
396 """
396 """
397
397
398 def contains_glob(name):
398 def contains_glob(name):
399 for c in name:
399 for c in name:
400 if c in _globchars: return True
400 if c in _globchars: return True
401 return False
401 return False
402
402
403 def regex(kind, name, tail):
403 def regex(kind, name, tail):
404 '''convert a pattern into a regular expression'''
404 '''convert a pattern into a regular expression'''
405 if kind == 're':
405 if kind == 're':
406 return name
406 return name
407 elif kind == 'path':
407 elif kind == 'path':
408 return '^' + re.escape(name) + '(?:/|$)'
408 return '^' + re.escape(name) + '(?:/|$)'
409 elif kind == 'relglob':
409 elif kind == 'relglob':
410 return head + globre(name, '(?:|.*/)', tail)
410 return head + globre(name, '(?:|.*/)', tail)
411 elif kind == 'relpath':
411 elif kind == 'relpath':
412 return head + re.escape(name) + tail
412 return head + re.escape(name) + tail
413 elif kind == 'relre':
413 elif kind == 'relre':
414 if name.startswith('^'):
414 if name.startswith('^'):
415 return name
415 return name
416 return '.*' + name
416 return '.*' + name
417 return head + globre(name, '', tail)
417 return head + globre(name, '', tail)
418
418
419 def matchfn(pats, tail):
419 def matchfn(pats, tail):
420 """build a matching function from a set of patterns"""
420 """build a matching function from a set of patterns"""
421 if not pats:
421 if not pats:
422 return
422 return
423 matches = []
423 matches = []
424 for k, p in pats:
424 for k, p in pats:
425 try:
425 try:
426 pat = '(?:%s)' % regex(k, p, tail)
426 pat = '(?:%s)' % regex(k, p, tail)
427 matches.append(re.compile(pat).match)
427 matches.append(re.compile(pat).match)
428 except re.error:
428 except re.error:
429 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
429 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
430 else: raise Abort("invalid pattern (%s): %s" % (k, p))
430 else: raise Abort("invalid pattern (%s): %s" % (k, p))
431
431
432 def buildfn(text):
432 def buildfn(text):
433 for m in matches:
433 for m in matches:
434 r = m(text)
434 r = m(text)
435 if r:
435 if r:
436 return r
436 return r
437
437
438 return buildfn
438 return buildfn
439
439
440 def globprefix(pat):
440 def globprefix(pat):
441 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
441 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
442 root = []
442 root = []
443 for p in pat.split(os.sep):
443 for p in pat.split(os.sep):
444 if contains_glob(p): break
444 if contains_glob(p): break
445 root.append(p)
445 root.append(p)
446 return '/'.join(root)
446 return '/'.join(root)
447
447
448 pats = []
448 pats = []
449 files = []
449 files = []
450 roots = []
450 roots = []
451 for kind, name in [patkind(p, dflt_pat) for p in names]:
451 for kind, name in [patkind(p, dflt_pat) for p in names]:
452 if kind in ('glob', 'relpath'):
452 if kind in ('glob', 'relpath'):
453 name = canonpath(canonroot, cwd, name)
453 name = canonpath(canonroot, cwd, name)
454 if name == '':
454 if name == '':
455 kind, name = 'glob', '**'
455 kind, name = 'glob', '**'
456 if kind in ('glob', 'path', 're'):
456 if kind in ('glob', 'path', 're'):
457 pats.append((kind, name))
457 pats.append((kind, name))
458 if kind == 'glob':
458 if kind == 'glob':
459 root = globprefix(name)
459 root = globprefix(name)
460 if root: roots.append(root)
460 if root: roots.append(root)
461 elif kind == 'relpath':
461 elif kind == 'relpath':
462 files.append((kind, name))
462 files.append((kind, name))
463 roots.append(name)
463 roots.append(name)
464
464
465 patmatch = matchfn(pats, '$') or always
465 patmatch = matchfn(pats, '$') or always
466 filematch = matchfn(files, '(?:/|$)') or always
466 filematch = matchfn(files, '(?:/|$)') or always
467 incmatch = always
467 incmatch = always
468 if inc:
468 if inc:
469 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
469 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
470 incmatch = matchfn(inckinds, '(?:/|$)')
470 incmatch = matchfn(inckinds, '(?:/|$)')
471 excmatch = lambda fn: False
471 excmatch = lambda fn: False
472 if exc:
472 if exc:
473 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
473 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
474 excmatch = matchfn(exckinds, '(?:/|$)')
474 excmatch = matchfn(exckinds, '(?:/|$)')
475
475
476 return (roots,
476 return (roots,
477 lambda fn: (incmatch(fn) and not excmatch(fn) and
477 lambda fn: (incmatch(fn) and not excmatch(fn) and
478 (fn.endswith('/') or
478 (fn.endswith('/') or
479 (not pats and not files) or
479 (not pats and not files) or
480 (pats and patmatch(fn)) or
480 (pats and patmatch(fn)) or
481 (files and filematch(fn)))),
481 (files and filematch(fn)))),
482 (inc or exc or (pats and pats != [('glob', '**')])) and True)
482 (inc or exc or (pats and pats != [('glob', '**')])) and True)
483
483
484 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
484 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
485 '''enhanced shell command execution.
485 '''enhanced shell command execution.
486 run with environment maybe modified, maybe in different dir.
486 run with environment maybe modified, maybe in different dir.
487
487
488 if command fails and onerr is None, return status. if ui object,
488 if command fails and onerr is None, return status. if ui object,
489 print error message and return status, else raise onerr object as
489 print error message and return status, else raise onerr object as
490 exception.'''
490 exception.'''
491 def py2shell(val):
491 def py2shell(val):
492 'convert python object into string that is useful to shell'
492 'convert python object into string that is useful to shell'
493 if val in (None, False):
493 if val in (None, False):
494 return '0'
494 return '0'
495 if val == True:
495 if val == True:
496 return '1'
496 return '1'
497 return str(val)
497 return str(val)
498 oldenv = {}
498 oldenv = {}
499 for k in environ:
499 for k in environ:
500 oldenv[k] = os.environ.get(k)
500 oldenv[k] = os.environ.get(k)
501 if cwd is not None:
501 if cwd is not None:
502 oldcwd = os.getcwd()
502 oldcwd = os.getcwd()
503 origcmd = cmd
503 origcmd = cmd
504 if os.name == 'nt':
504 if os.name == 'nt':
505 cmd = '"%s"' % cmd
505 cmd = '"%s"' % cmd
506 try:
506 try:
507 for k, v in environ.iteritems():
507 for k, v in environ.iteritems():
508 os.environ[k] = py2shell(v)
508 os.environ[k] = py2shell(v)
509 if cwd is not None and oldcwd != cwd:
509 if cwd is not None and oldcwd != cwd:
510 os.chdir(cwd)
510 os.chdir(cwd)
511 rc = os.system(cmd)
511 rc = os.system(cmd)
512 if rc and onerr:
512 if rc and onerr:
513 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
513 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
514 explain_exit(rc)[0])
514 explain_exit(rc)[0])
515 if errprefix:
515 if errprefix:
516 errmsg = '%s: %s' % (errprefix, errmsg)
516 errmsg = '%s: %s' % (errprefix, errmsg)
517 try:
517 try:
518 onerr.warn(errmsg + '\n')
518 onerr.warn(errmsg + '\n')
519 except AttributeError:
519 except AttributeError:
520 raise onerr(errmsg)
520 raise onerr(errmsg)
521 return rc
521 return rc
522 finally:
522 finally:
523 for k, v in oldenv.iteritems():
523 for k, v in oldenv.iteritems():
524 if v is None:
524 if v is None:
525 del os.environ[k]
525 del os.environ[k]
526 else:
526 else:
527 os.environ[k] = v
527 os.environ[k] = v
528 if cwd is not None and oldcwd != cwd:
528 if cwd is not None and oldcwd != cwd:
529 os.chdir(oldcwd)
529 os.chdir(oldcwd)
530
530
531 def rename(src, dst):
531 def rename(src, dst):
532 """forcibly rename a file"""
532 """forcibly rename a file"""
533 try:
533 try:
534 os.rename(src, dst)
534 os.rename(src, dst)
535 except OSError, err:
535 except OSError, err:
536 # on windows, rename to existing file is not allowed, so we
536 # on windows, rename to existing file is not allowed, so we
537 # must delete destination first. but if file is open, unlink
537 # must delete destination first. but if file is open, unlink
538 # schedules it for delete but does not delete it. rename
538 # schedules it for delete but does not delete it. rename
539 # happens immediately even for open files, so we create
539 # happens immediately even for open files, so we create
540 # temporary file, delete it, rename destination to that name,
540 # temporary file, delete it, rename destination to that name,
541 # then delete that. then rename is safe to do.
541 # then delete that. then rename is safe to do.
542 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
542 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
543 os.close(fd)
543 os.close(fd)
544 os.unlink(temp)
544 os.unlink(temp)
545 os.rename(dst, temp)
545 os.rename(dst, temp)
546 os.unlink(temp)
546 os.unlink(temp)
547 os.rename(src, dst)
547 os.rename(src, dst)
548
548
549 def unlink(f):
549 def unlink(f):
550 """unlink and remove the directory if it is empty"""
550 """unlink and remove the directory if it is empty"""
551 os.unlink(f)
551 os.unlink(f)
552 # try removing directories that might now be empty
552 # try removing directories that might now be empty
553 try:
553 try:
554 os.removedirs(os.path.dirname(f))
554 os.removedirs(os.path.dirname(f))
555 except OSError:
555 except OSError:
556 pass
556 pass
557
557
558 def copyfile(src, dest):
558 def copyfile(src, dest):
559 "copy a file, preserving mode"
559 "copy a file, preserving mode"
560 try:
560 try:
561 shutil.copyfile(src, dest)
561 shutil.copyfile(src, dest)
562 shutil.copymode(src, dest)
562 shutil.copymode(src, dest)
563 except shutil.Error, inst:
563 except shutil.Error, inst:
564 raise util.Abort(str(inst))
564 raise util.Abort(str(inst))
565
565
566 def copyfiles(src, dst, hardlink=None):
566 def copyfiles(src, dst, hardlink=None):
567 """Copy a directory tree using hardlinks if possible"""
567 """Copy a directory tree using hardlinks if possible"""
568
568
569 if hardlink is None:
569 if hardlink is None:
570 hardlink = (os.stat(src).st_dev ==
570 hardlink = (os.stat(src).st_dev ==
571 os.stat(os.path.dirname(dst)).st_dev)
571 os.stat(os.path.dirname(dst)).st_dev)
572
572
573 if os.path.isdir(src):
573 if os.path.isdir(src):
574 os.mkdir(dst)
574 os.mkdir(dst)
575 for name in os.listdir(src):
575 for name in os.listdir(src):
576 srcname = os.path.join(src, name)
576 srcname = os.path.join(src, name)
577 dstname = os.path.join(dst, name)
577 dstname = os.path.join(dst, name)
578 copyfiles(srcname, dstname, hardlink)
578 copyfiles(srcname, dstname, hardlink)
579 else:
579 else:
580 if hardlink:
580 if hardlink:
581 try:
581 try:
582 os_link(src, dst)
582 os_link(src, dst)
583 except (IOError, OSError):
583 except (IOError, OSError):
584 hardlink = False
584 hardlink = False
585 shutil.copy(src, dst)
585 shutil.copy(src, dst)
586 else:
586 else:
587 shutil.copy(src, dst)
587 shutil.copy(src, dst)
588
588
589 def audit_path(path):
589 def audit_path(path):
590 """Abort if path contains dangerous components"""
590 """Abort if path contains dangerous components"""
591 parts = os.path.normcase(path).split(os.sep)
591 parts = os.path.normcase(path).split(os.sep)
592 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
592 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
593 or os.pardir in parts):
593 or os.pardir in parts):
594 raise Abort(_("path contains illegal component: %s\n") % path)
594 raise Abort(_("path contains illegal component: %s\n") % path)
595
595
596 def _makelock_file(info, pathname):
596 def _makelock_file(info, pathname):
597 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
597 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
598 os.write(ld, info)
598 os.write(ld, info)
599 os.close(ld)
599 os.close(ld)
600
600
601 def _readlock_file(pathname):
601 def _readlock_file(pathname):
602 return posixfile(pathname).read()
602 return posixfile(pathname).read()
603
603
604 def nlinks(pathname):
604 def nlinks(pathname):
605 """Return number of hardlinks for the given file."""
605 """Return number of hardlinks for the given file."""
606 return os.lstat(pathname).st_nlink
606 return os.lstat(pathname).st_nlink
607
607
608 if hasattr(os, 'link'):
608 if hasattr(os, 'link'):
609 os_link = os.link
609 os_link = os.link
610 else:
610 else:
611 def os_link(src, dst):
611 def os_link(src, dst):
612 raise OSError(0, _("Hardlinks not supported"))
612 raise OSError(0, _("Hardlinks not supported"))
613
613
614 def fstat(fp):
614 def fstat(fp):
615 '''stat file object that may not have fileno method.'''
615 '''stat file object that may not have fileno method.'''
616 try:
616 try:
617 return os.fstat(fp.fileno())
617 return os.fstat(fp.fileno())
618 except AttributeError:
618 except AttributeError:
619 return os.stat(fp.name)
619 return os.stat(fp.name)
620
620
621 posixfile = file
621 posixfile = file
622
622
623 def is_win_9x():
623 def is_win_9x():
624 '''return true if run on windows 95, 98 or me.'''
624 '''return true if run on windows 95, 98 or me.'''
625 try:
625 try:
626 return sys.getwindowsversion()[3] == 1
626 return sys.getwindowsversion()[3] == 1
627 except AttributeError:
627 except AttributeError:
628 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
628 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
629
629
630 getuser_fallback = None
630 getuser_fallback = None
631
631
632 def getuser():
632 def getuser():
633 '''return name of current user'''
633 '''return name of current user'''
634 try:
634 try:
635 return getpass.getuser()
635 return getpass.getuser()
636 except ImportError:
636 except ImportError:
637 # import of pwd will fail on windows - try fallback
637 # import of pwd will fail on windows - try fallback
638 if getuser_fallback:
638 if getuser_fallback:
639 return getuser_fallback()
639 return getuser_fallback()
640 # raised if win32api not available
640 # raised if win32api not available
641 raise Abort(_('user name not available - set USERNAME '
641 raise Abort(_('user name not available - set USERNAME '
642 'environment variable'))
642 'environment variable'))
643
643
644 def username(uid=None):
644 def username(uid=None):
645 """Return the name of the user with the given uid.
645 """Return the name of the user with the given uid.
646
646
647 If uid is None, return the name of the current user."""
647 If uid is None, return the name of the current user."""
648 try:
648 try:
649 import pwd
649 import pwd
650 if uid is None:
650 if uid is None:
651 uid = os.getuid()
651 uid = os.getuid()
652 try:
652 try:
653 return pwd.getpwuid(uid)[0]
653 return pwd.getpwuid(uid)[0]
654 except KeyError:
654 except KeyError:
655 return str(uid)
655 return str(uid)
656 except ImportError:
656 except ImportError:
657 return None
657 return None
658
658
659 def groupname(gid=None):
659 def groupname(gid=None):
660 """Return the name of the group with the given gid.
660 """Return the name of the group with the given gid.
661
661
662 If gid is None, return the name of the current group."""
662 If gid is None, return the name of the current group."""
663 try:
663 try:
664 import grp
664 import grp
665 if gid is None:
665 if gid is None:
666 gid = os.getgid()
666 gid = os.getgid()
667 try:
667 try:
668 return grp.getgrgid(gid)[0]
668 return grp.getgrgid(gid)[0]
669 except KeyError:
669 except KeyError:
670 return str(gid)
670 return str(gid)
671 except ImportError:
671 except ImportError:
672 return None
672 return None
673
673
674 # File system features
674 # File system features
675
675
676 def checkfolding(path):
676 def checkfolding(path):
677 """
677 """
678 Check whether the given path is on a case-sensitive filesystem
678 Check whether the given path is on a case-sensitive filesystem
679
679
680 Requires a path (like /foo/.hg) ending with a foldable final
680 Requires a path (like /foo/.hg) ending with a foldable final
681 directory component.
681 directory component.
682 """
682 """
683 s1 = os.stat(path)
683 s1 = os.stat(path)
684 d, b = os.path.split(path)
684 d, b = os.path.split(path)
685 p2 = os.path.join(d, b.upper())
685 p2 = os.path.join(d, b.upper())
686 if path == p2:
686 if path == p2:
687 p2 = os.path.join(d, b.lower())
687 p2 = os.path.join(d, b.lower())
688 try:
688 try:
689 s2 = os.stat(p2)
689 s2 = os.stat(p2)
690 if s2 == s1:
690 if s2 == s1:
691 return False
691 return False
692 return True
692 return True
693 except:
693 except:
694 return True
694 return True
695
695
696 def checkexec(path):
696 def checkexec(path):
697 """
697 """
698 Check whether the given path is on a filesystem with UNIX-like exec flags
698 Check whether the given path is on a filesystem with UNIX-like exec flags
699
699
700 Requires a directory (like /foo/.hg)
700 Requires a directory (like /foo/.hg)
701 """
701 """
702 fh, fn = tempfile.mkstemp("", "", path)
702 fh, fn = tempfile.mkstemp("", "", path)
703 os.close(fh)
703 os.close(fh)
704 m = os.stat(fn).st_mode
704 m = os.stat(fn).st_mode
705 os.chmod(fn, m ^ 0111)
705 os.chmod(fn, m ^ 0111)
706 r = (os.stat(fn).st_mode != m)
706 r = (os.stat(fn).st_mode != m)
707 os.unlink(fn)
707 os.unlink(fn)
708 return r
708 return r
709
709
710 def execfunc(path, fallback):
710 def execfunc(path, fallback):
711 '''return an is_exec() function with default to fallback'''
711 '''return an is_exec() function with default to fallback'''
712 if checkexec(path):
712 if checkexec(path):
713 return lambda x: is_exec(os.path.join(path, x))
713 return lambda x: is_exec(os.path.join(path, x))
714 return fallback
714 return fallback
715
715
716 def checksymlink(path):
716 def checklink(path):
717 """check whether the given path is on a symlink-capable filesystem"""
717 """check whether the given path is on a symlink-capable filesystem"""
718 # mktemp is not racy because symlink creation will fail if the
718 # mktemp is not racy because symlink creation will fail if the
719 # file already exists
719 # file already exists
720 name = tempfile.mktemp(dir=path)
720 name = tempfile.mktemp(dir=path)
721 try:
721 try:
722 os.symlink(".", name)
722 os.symlink(".", name)
723 os.unlink(name)
723 os.unlink(name)
724 return True
724 return True
725 except OSError:
725 except OSError:
726 return False
726 return False
727
727
728 def linkfunc(path, fallback):
728 def linkfunc(path, fallback):
729 '''return an is_link() function with default to fallback'''
729 '''return an is_link() function with default to fallback'''
730 if checklink(path):
730 if checklink(path):
731 return lambda x: is_link(os.path.join(path, x))
731 return lambda x: is_link(os.path.join(path, x))
732 return fallback
732 return fallback
733
733
734 # Platform specific variants
734 # Platform specific variants
735 if os.name == 'nt':
735 if os.name == 'nt':
736 import msvcrt
736 import msvcrt
737 nulldev = 'NUL:'
737 nulldev = 'NUL:'
738
738
739 class winstdout:
739 class winstdout:
740 '''stdout on windows misbehaves if sent through a pipe'''
740 '''stdout on windows misbehaves if sent through a pipe'''
741
741
742 def __init__(self, fp):
742 def __init__(self, fp):
743 self.fp = fp
743 self.fp = fp
744
744
745 def __getattr__(self, key):
745 def __getattr__(self, key):
746 return getattr(self.fp, key)
746 return getattr(self.fp, key)
747
747
748 def close(self):
748 def close(self):
749 try:
749 try:
750 self.fp.close()
750 self.fp.close()
751 except: pass
751 except: pass
752
752
753 def write(self, s):
753 def write(self, s):
754 try:
754 try:
755 return self.fp.write(s)
755 return self.fp.write(s)
756 except IOError, inst:
756 except IOError, inst:
757 if inst.errno != 0: raise
757 if inst.errno != 0: raise
758 self.close()
758 self.close()
759 raise IOError(errno.EPIPE, 'Broken pipe')
759 raise IOError(errno.EPIPE, 'Broken pipe')
760
760
761 sys.stdout = winstdout(sys.stdout)
761 sys.stdout = winstdout(sys.stdout)
762
762
763 def system_rcpath():
763 def system_rcpath():
764 try:
764 try:
765 return system_rcpath_win32()
765 return system_rcpath_win32()
766 except:
766 except:
767 return [r'c:\mercurial\mercurial.ini']
767 return [r'c:\mercurial\mercurial.ini']
768
768
769 def os_rcpath():
769 def os_rcpath():
770 '''return default os-specific hgrc search path'''
770 '''return default os-specific hgrc search path'''
771 path = system_rcpath()
771 path = system_rcpath()
772 path.append(user_rcpath())
772 path.append(user_rcpath())
773 userprofile = os.environ.get('USERPROFILE')
773 userprofile = os.environ.get('USERPROFILE')
774 if userprofile:
774 if userprofile:
775 path.append(os.path.join(userprofile, 'mercurial.ini'))
775 path.append(os.path.join(userprofile, 'mercurial.ini'))
776 return path
776 return path
777
777
778 def user_rcpath():
778 def user_rcpath():
779 '''return os-specific hgrc search path to the user dir'''
779 '''return os-specific hgrc search path to the user dir'''
780 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
780 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
781
781
782 def parse_patch_output(output_line):
782 def parse_patch_output(output_line):
783 """parses the output produced by patch and returns the file name"""
783 """parses the output produced by patch and returns the file name"""
784 pf = output_line[14:]
784 pf = output_line[14:]
785 if pf[0] == '`':
785 if pf[0] == '`':
786 pf = pf[1:-1] # Remove the quotes
786 pf = pf[1:-1] # Remove the quotes
787 return pf
787 return pf
788
788
789 def testpid(pid):
789 def testpid(pid):
790 '''return False if pid dead, True if running or not known'''
790 '''return False if pid dead, True if running or not known'''
791 return True
791 return True
792
792
793 def set_exec(f, mode):
793 def set_exec(f, mode):
794 pass
794 pass
795
795
796 def set_link(f, mode):
796 def set_link(f, mode):
797 pass
797 pass
798
798
799 def set_binary(fd):
799 def set_binary(fd):
800 msvcrt.setmode(fd.fileno(), os.O_BINARY)
800 msvcrt.setmode(fd.fileno(), os.O_BINARY)
801
801
802 def pconvert(path):
802 def pconvert(path):
803 return path.replace("\\", "/")
803 return path.replace("\\", "/")
804
804
805 def localpath(path):
805 def localpath(path):
806 return path.replace('/', '\\')
806 return path.replace('/', '\\')
807
807
808 def normpath(path):
808 def normpath(path):
809 return pconvert(os.path.normpath(path))
809 return pconvert(os.path.normpath(path))
810
810
811 makelock = _makelock_file
811 makelock = _makelock_file
812 readlock = _readlock_file
812 readlock = _readlock_file
813
813
814 def samestat(s1, s2):
814 def samestat(s1, s2):
815 return False
815 return False
816
816
817 def shellquote(s):
817 def shellquote(s):
818 return '"%s"' % s.replace('"', '\\"')
818 return '"%s"' % s.replace('"', '\\"')
819
819
820 def explain_exit(code):
820 def explain_exit(code):
821 return _("exited with status %d") % code, code
821 return _("exited with status %d") % code, code
822
822
823 # if you change this stub into a real check, please try to implement the
823 # if you change this stub into a real check, please try to implement the
824 # username and groupname functions above, too.
824 # username and groupname functions above, too.
825 def isowner(fp, st=None):
825 def isowner(fp, st=None):
826 return True
826 return True
827
827
828 try:
828 try:
829 # override functions with win32 versions if possible
829 # override functions with win32 versions if possible
830 from util_win32 import *
830 from util_win32 import *
831 if not is_win_9x():
831 if not is_win_9x():
832 posixfile = posixfile_nt
832 posixfile = posixfile_nt
833 except ImportError:
833 except ImportError:
834 pass
834 pass
835
835
836 else:
836 else:
837 nulldev = '/dev/null'
837 nulldev = '/dev/null'
838 _umask = os.umask(0)
838 _umask = os.umask(0)
839 os.umask(_umask)
839 os.umask(_umask)
840
840
841 def rcfiles(path):
841 def rcfiles(path):
842 rcs = [os.path.join(path, 'hgrc')]
842 rcs = [os.path.join(path, 'hgrc')]
843 rcdir = os.path.join(path, 'hgrc.d')
843 rcdir = os.path.join(path, 'hgrc.d')
844 try:
844 try:
845 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
845 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
846 if f.endswith(".rc")])
846 if f.endswith(".rc")])
847 except OSError:
847 except OSError:
848 pass
848 pass
849 return rcs
849 return rcs
850
850
851 def os_rcpath():
851 def os_rcpath():
852 '''return default os-specific hgrc search path'''
852 '''return default os-specific hgrc search path'''
853 path = []
853 path = []
854 # old mod_python does not set sys.argv
854 # old mod_python does not set sys.argv
855 if len(getattr(sys, 'argv', [])) > 0:
855 if len(getattr(sys, 'argv', [])) > 0:
856 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
856 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
857 '/../etc/mercurial'))
857 '/../etc/mercurial'))
858 path.extend(rcfiles('/etc/mercurial'))
858 path.extend(rcfiles('/etc/mercurial'))
859 path.append(os.path.expanduser('~/.hgrc'))
859 path.append(os.path.expanduser('~/.hgrc'))
860 path = [os.path.normpath(f) for f in path]
860 path = [os.path.normpath(f) for f in path]
861 return path
861 return path
862
862
863 def parse_patch_output(output_line):
863 def parse_patch_output(output_line):
864 """parses the output produced by patch and returns the file name"""
864 """parses the output produced by patch and returns the file name"""
865 pf = output_line[14:]
865 pf = output_line[14:]
866 if pf.startswith("'") and pf.endswith("'") and " " in pf:
866 if pf.startswith("'") and pf.endswith("'") and " " in pf:
867 pf = pf[1:-1] # Remove the quotes
867 pf = pf[1:-1] # Remove the quotes
868 return pf
868 return pf
869
869
870 def is_exec(f):
870 def is_exec(f):
871 """check whether a file is executable"""
871 """check whether a file is executable"""
872 return (os.lstat(f).st_mode & 0100 != 0)
872 return (os.lstat(f).st_mode & 0100 != 0)
873
873
874 def set_exec(f, mode):
874 def set_exec(f, mode):
875 s = os.lstat(f).st_mode
875 s = os.lstat(f).st_mode
876 if (s & 0100 != 0) == mode:
876 if (s & 0100 != 0) == mode:
877 return
877 return
878 if mode:
878 if mode:
879 # Turn on +x for every +r bit when making a file executable
879 # Turn on +x for every +r bit when making a file executable
880 # and obey umask.
880 # and obey umask.
881 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
881 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
882 else:
882 else:
883 os.chmod(f, s & 0666)
883 os.chmod(f, s & 0666)
884
884
885 def is_link(f):
885 def is_link(f):
886 """check whether a file is a symlink"""
886 """check whether a file is a symlink"""
887 return (os.lstat(f).st_mode & 0120000 == 0120000)
887 return (os.lstat(f).st_mode & 0120000 == 0120000)
888
888
889 def set_link(f, mode):
889 def set_link(f, mode):
890 """make a file a symbolic link/regular file
890 """make a file a symbolic link/regular file
891
891
892 if a file is changed to a link, its contents become the link data
892 if a file is changed to a link, its contents become the link data
893 if a link is changed to a file, its link data become its contents
893 if a link is changed to a file, its link data become its contents
894 """
894 """
895
895
896 m = is_link(f)
896 m = is_link(f)
897 if m == bool(mode):
897 if m == bool(mode):
898 return
898 return
899
899
900 if mode: # switch file to link
900 if mode: # switch file to link
901 data = file(f).read()
901 data = file(f).read()
902 os.unlink(f)
902 os.unlink(f)
903 os.symlink(data, f)
903 os.symlink(data, f)
904 else:
904 else:
905 data = os.readlink(f)
905 data = os.readlink(f)
906 os.unlink(f)
906 os.unlink(f)
907 file(f, "w").write(data)
907 file(f, "w").write(data)
908
908
909 def set_binary(fd):
909 def set_binary(fd):
910 pass
910 pass
911
911
912 def pconvert(path):
912 def pconvert(path):
913 return path
913 return path
914
914
915 def localpath(path):
915 def localpath(path):
916 return path
916 return path
917
917
918 normpath = os.path.normpath
918 normpath = os.path.normpath
919 samestat = os.path.samestat
919 samestat = os.path.samestat
920
920
921 def makelock(info, pathname):
921 def makelock(info, pathname):
922 try:
922 try:
923 os.symlink(info, pathname)
923 os.symlink(info, pathname)
924 except OSError, why:
924 except OSError, why:
925 if why.errno == errno.EEXIST:
925 if why.errno == errno.EEXIST:
926 raise
926 raise
927 else:
927 else:
928 _makelock_file(info, pathname)
928 _makelock_file(info, pathname)
929
929
930 def readlock(pathname):
930 def readlock(pathname):
931 try:
931 try:
932 return os.readlink(pathname)
932 return os.readlink(pathname)
933 except OSError, why:
933 except OSError, why:
934 if why.errno == errno.EINVAL:
934 if why.errno == errno.EINVAL:
935 return _readlock_file(pathname)
935 return _readlock_file(pathname)
936 else:
936 else:
937 raise
937 raise
938
938
939 def shellquote(s):
939 def shellquote(s):
940 return "'%s'" % s.replace("'", "'\\''")
940 return "'%s'" % s.replace("'", "'\\''")
941
941
942 def testpid(pid):
942 def testpid(pid):
943 '''return False if pid dead, True if running or not sure'''
943 '''return False if pid dead, True if running or not sure'''
944 try:
944 try:
945 os.kill(pid, 0)
945 os.kill(pid, 0)
946 return True
946 return True
947 except OSError, inst:
947 except OSError, inst:
948 return inst.errno != errno.ESRCH
948 return inst.errno != errno.ESRCH
949
949
950 def explain_exit(code):
950 def explain_exit(code):
951 """return a 2-tuple (desc, code) describing a process's status"""
951 """return a 2-tuple (desc, code) describing a process's status"""
952 if os.WIFEXITED(code):
952 if os.WIFEXITED(code):
953 val = os.WEXITSTATUS(code)
953 val = os.WEXITSTATUS(code)
954 return _("exited with status %d") % val, val
954 return _("exited with status %d") % val, val
955 elif os.WIFSIGNALED(code):
955 elif os.WIFSIGNALED(code):
956 val = os.WTERMSIG(code)
956 val = os.WTERMSIG(code)
957 return _("killed by signal %d") % val, val
957 return _("killed by signal %d") % val, val
958 elif os.WIFSTOPPED(code):
958 elif os.WIFSTOPPED(code):
959 val = os.WSTOPSIG(code)
959 val = os.WSTOPSIG(code)
960 return _("stopped by signal %d") % val, val
960 return _("stopped by signal %d") % val, val
961 raise ValueError(_("invalid exit code"))
961 raise ValueError(_("invalid exit code"))
962
962
963 def isowner(fp, st=None):
963 def isowner(fp, st=None):
964 """Return True if the file object f belongs to the current user.
964 """Return True if the file object f belongs to the current user.
965
965
966 The return value of a util.fstat(f) may be passed as the st argument.
966 The return value of a util.fstat(f) may be passed as the st argument.
967 """
967 """
968 if st is None:
968 if st is None:
969 st = fstat(fp)
969 st = fstat(fp)
970 return st.st_uid == os.getuid()
970 return st.st_uid == os.getuid()
971
971
972 def _buildencodefun():
972 def _buildencodefun():
973 e = '_'
973 e = '_'
974 win_reserved = [ord(x) for x in '\\:*?"<>|']
974 win_reserved = [ord(x) for x in '\\:*?"<>|']
975 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
975 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
976 for x in (range(32) + range(126, 256) + win_reserved):
976 for x in (range(32) + range(126, 256) + win_reserved):
977 cmap[chr(x)] = "~%02x" % x
977 cmap[chr(x)] = "~%02x" % x
978 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
978 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
979 cmap[chr(x)] = e + chr(x).lower()
979 cmap[chr(x)] = e + chr(x).lower()
980 dmap = {}
980 dmap = {}
981 for k, v in cmap.iteritems():
981 for k, v in cmap.iteritems():
982 dmap[v] = k
982 dmap[v] = k
983 def decode(s):
983 def decode(s):
984 i = 0
984 i = 0
985 while i < len(s):
985 while i < len(s):
986 for l in xrange(1, 4):
986 for l in xrange(1, 4):
987 try:
987 try:
988 yield dmap[s[i:i+l]]
988 yield dmap[s[i:i+l]]
989 i += l
989 i += l
990 break
990 break
991 except KeyError:
991 except KeyError:
992 pass
992 pass
993 else:
993 else:
994 raise KeyError
994 raise KeyError
995 return (lambda s: "".join([cmap[c] for c in s]),
995 return (lambda s: "".join([cmap[c] for c in s]),
996 lambda s: "".join(list(decode(s))))
996 lambda s: "".join(list(decode(s))))
997
997
998 encodefilename, decodefilename = _buildencodefun()
998 encodefilename, decodefilename = _buildencodefun()
999
999
1000 def encodedopener(openerfn, fn):
1000 def encodedopener(openerfn, fn):
1001 def o(path, *args, **kw):
1001 def o(path, *args, **kw):
1002 return openerfn(fn(path), *args, **kw)
1002 return openerfn(fn(path), *args, **kw)
1003 return o
1003 return o
1004
1004
1005 def opener(base, audit=True):
1005 def opener(base, audit=True):
1006 """
1006 """
1007 return a function that opens files relative to base
1007 return a function that opens files relative to base
1008
1008
1009 this function is used to hide the details of COW semantics and
1009 this function is used to hide the details of COW semantics and
1010 remote file access from higher level code.
1010 remote file access from higher level code.
1011 """
1011 """
1012 p = base
1012 p = base
1013 audit_p = audit
1013 audit_p = audit
1014
1014
1015 def mktempcopy(name):
1015 def mktempcopy(name):
1016 d, fn = os.path.split(name)
1016 d, fn = os.path.split(name)
1017 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1017 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1018 os.close(fd)
1018 os.close(fd)
1019 ofp = posixfile(temp, "wb")
1019 ofp = posixfile(temp, "wb")
1020 try:
1020 try:
1021 try:
1021 try:
1022 ifp = posixfile(name, "rb")
1022 ifp = posixfile(name, "rb")
1023 except IOError, inst:
1023 except IOError, inst:
1024 if not getattr(inst, 'filename', None):
1024 if not getattr(inst, 'filename', None):
1025 inst.filename = name
1025 inst.filename = name
1026 raise
1026 raise
1027 for chunk in filechunkiter(ifp):
1027 for chunk in filechunkiter(ifp):
1028 ofp.write(chunk)
1028 ofp.write(chunk)
1029 ifp.close()
1029 ifp.close()
1030 ofp.close()
1030 ofp.close()
1031 except:
1031 except:
1032 try: os.unlink(temp)
1032 try: os.unlink(temp)
1033 except: pass
1033 except: pass
1034 raise
1034 raise
1035 st = os.lstat(name)
1035 st = os.lstat(name)
1036 os.chmod(temp, st.st_mode)
1036 os.chmod(temp, st.st_mode)
1037 return temp
1037 return temp
1038
1038
1039 class atomictempfile(posixfile):
1039 class atomictempfile(posixfile):
1040 """the file will only be copied when rename is called"""
1040 """the file will only be copied when rename is called"""
1041 def __init__(self, name, mode):
1041 def __init__(self, name, mode):
1042 self.__name = name
1042 self.__name = name
1043 self.temp = mktempcopy(name)
1043 self.temp = mktempcopy(name)
1044 posixfile.__init__(self, self.temp, mode)
1044 posixfile.__init__(self, self.temp, mode)
1045 def rename(self):
1045 def rename(self):
1046 if not self.closed:
1046 if not self.closed:
1047 posixfile.close(self)
1047 posixfile.close(self)
1048 rename(self.temp, localpath(self.__name))
1048 rename(self.temp, localpath(self.__name))
1049 def __del__(self):
1049 def __del__(self):
1050 if not self.closed:
1050 if not self.closed:
1051 try:
1051 try:
1052 os.unlink(self.temp)
1052 os.unlink(self.temp)
1053 except: pass
1053 except: pass
1054 posixfile.close(self)
1054 posixfile.close(self)
1055
1055
1056 class atomicfile(atomictempfile):
1056 class atomicfile(atomictempfile):
1057 """the file will only be copied on close"""
1057 """the file will only be copied on close"""
1058 def __init__(self, name, mode):
1058 def __init__(self, name, mode):
1059 atomictempfile.__init__(self, name, mode)
1059 atomictempfile.__init__(self, name, mode)
1060 def close(self):
1060 def close(self):
1061 self.rename()
1061 self.rename()
1062 def __del__(self):
1062 def __del__(self):
1063 self.rename()
1063 self.rename()
1064
1064
1065 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1065 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1066 if audit_p:
1066 if audit_p:
1067 audit_path(path)
1067 audit_path(path)
1068 f = os.path.join(p, path)
1068 f = os.path.join(p, path)
1069
1069
1070 if not text:
1070 if not text:
1071 mode += "b" # for that other OS
1071 mode += "b" # for that other OS
1072
1072
1073 if mode[0] != "r":
1073 if mode[0] != "r":
1074 try:
1074 try:
1075 nlink = nlinks(f)
1075 nlink = nlinks(f)
1076 except OSError:
1076 except OSError:
1077 d = os.path.dirname(f)
1077 d = os.path.dirname(f)
1078 if not os.path.isdir(d):
1078 if not os.path.isdir(d):
1079 os.makedirs(d)
1079 os.makedirs(d)
1080 else:
1080 else:
1081 if atomic:
1081 if atomic:
1082 return atomicfile(f, mode)
1082 return atomicfile(f, mode)
1083 elif atomictemp:
1083 elif atomictemp:
1084 return atomictempfile(f, mode)
1084 return atomictempfile(f, mode)
1085 if nlink > 1:
1085 if nlink > 1:
1086 rename(mktempcopy(f), f)
1086 rename(mktempcopy(f), f)
1087 return posixfile(f, mode)
1087 return posixfile(f, mode)
1088
1088
1089 return o
1089 return o
1090
1090
1091 class chunkbuffer(object):
1091 class chunkbuffer(object):
1092 """Allow arbitrary sized chunks of data to be efficiently read from an
1092 """Allow arbitrary sized chunks of data to be efficiently read from an
1093 iterator over chunks of arbitrary size."""
1093 iterator over chunks of arbitrary size."""
1094
1094
1095 def __init__(self, in_iter, targetsize = 2**16):
1095 def __init__(self, in_iter, targetsize = 2**16):
1096 """in_iter is the iterator that's iterating over the input chunks.
1096 """in_iter is the iterator that's iterating over the input chunks.
1097 targetsize is how big a buffer to try to maintain."""
1097 targetsize is how big a buffer to try to maintain."""
1098 self.in_iter = iter(in_iter)
1098 self.in_iter = iter(in_iter)
1099 self.buf = ''
1099 self.buf = ''
1100 self.targetsize = int(targetsize)
1100 self.targetsize = int(targetsize)
1101 if self.targetsize <= 0:
1101 if self.targetsize <= 0:
1102 raise ValueError(_("targetsize must be greater than 0, was %d") %
1102 raise ValueError(_("targetsize must be greater than 0, was %d") %
1103 targetsize)
1103 targetsize)
1104 self.iterempty = False
1104 self.iterempty = False
1105
1105
1106 def fillbuf(self):
1106 def fillbuf(self):
1107 """Ignore target size; read every chunk from iterator until empty."""
1107 """Ignore target size; read every chunk from iterator until empty."""
1108 if not self.iterempty:
1108 if not self.iterempty:
1109 collector = cStringIO.StringIO()
1109 collector = cStringIO.StringIO()
1110 collector.write(self.buf)
1110 collector.write(self.buf)
1111 for ch in self.in_iter:
1111 for ch in self.in_iter:
1112 collector.write(ch)
1112 collector.write(ch)
1113 self.buf = collector.getvalue()
1113 self.buf = collector.getvalue()
1114 self.iterempty = True
1114 self.iterempty = True
1115
1115
1116 def read(self, l):
1116 def read(self, l):
1117 """Read L bytes of data from the iterator of chunks of data.
1117 """Read L bytes of data from the iterator of chunks of data.
1118 Returns less than L bytes if the iterator runs dry."""
1118 Returns less than L bytes if the iterator runs dry."""
1119 if l > len(self.buf) and not self.iterempty:
1119 if l > len(self.buf) and not self.iterempty:
1120 # Clamp to a multiple of self.targetsize
1120 # Clamp to a multiple of self.targetsize
1121 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1121 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1122 collector = cStringIO.StringIO()
1122 collector = cStringIO.StringIO()
1123 collector.write(self.buf)
1123 collector.write(self.buf)
1124 collected = len(self.buf)
1124 collected = len(self.buf)
1125 for chunk in self.in_iter:
1125 for chunk in self.in_iter:
1126 collector.write(chunk)
1126 collector.write(chunk)
1127 collected += len(chunk)
1127 collected += len(chunk)
1128 if collected >= targetsize:
1128 if collected >= targetsize:
1129 break
1129 break
1130 if collected < targetsize:
1130 if collected < targetsize:
1131 self.iterempty = True
1131 self.iterempty = True
1132 self.buf = collector.getvalue()
1132 self.buf = collector.getvalue()
1133 s, self.buf = self.buf[:l], buffer(self.buf, l)
1133 s, self.buf = self.buf[:l], buffer(self.buf, l)
1134 return s
1134 return s
1135
1135
1136 def filechunkiter(f, size=65536, limit=None):
1136 def filechunkiter(f, size=65536, limit=None):
1137 """Create a generator that produces the data in the file size
1137 """Create a generator that produces the data in the file size
1138 (default 65536) bytes at a time, up to optional limit (default is
1138 (default 65536) bytes at a time, up to optional limit (default is
1139 to read all data). Chunks may be less than size bytes if the
1139 to read all data). Chunks may be less than size bytes if the
1140 chunk is the last chunk in the file, or the file is a socket or
1140 chunk is the last chunk in the file, or the file is a socket or
1141 some other type of file that sometimes reads less data than is
1141 some other type of file that sometimes reads less data than is
1142 requested."""
1142 requested."""
1143 assert size >= 0
1143 assert size >= 0
1144 assert limit is None or limit >= 0
1144 assert limit is None or limit >= 0
1145 while True:
1145 while True:
1146 if limit is None: nbytes = size
1146 if limit is None: nbytes = size
1147 else: nbytes = min(limit, size)
1147 else: nbytes = min(limit, size)
1148 s = nbytes and f.read(nbytes)
1148 s = nbytes and f.read(nbytes)
1149 if not s: break
1149 if not s: break
1150 if limit: limit -= len(s)
1150 if limit: limit -= len(s)
1151 yield s
1151 yield s
1152
1152
1153 def makedate():
1153 def makedate():
1154 lt = time.localtime()
1154 lt = time.localtime()
1155 if lt[8] == 1 and time.daylight:
1155 if lt[8] == 1 and time.daylight:
1156 tz = time.altzone
1156 tz = time.altzone
1157 else:
1157 else:
1158 tz = time.timezone
1158 tz = time.timezone
1159 return time.mktime(lt), tz
1159 return time.mktime(lt), tz
1160
1160
1161 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1161 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1162 """represent a (unixtime, offset) tuple as a localized time.
1162 """represent a (unixtime, offset) tuple as a localized time.
1163 unixtime is seconds since the epoch, and offset is the time zone's
1163 unixtime is seconds since the epoch, and offset is the time zone's
1164 number of seconds away from UTC. if timezone is false, do not
1164 number of seconds away from UTC. if timezone is false, do not
1165 append time zone to string."""
1165 append time zone to string."""
1166 t, tz = date or makedate()
1166 t, tz = date or makedate()
1167 s = time.strftime(format, time.gmtime(float(t) - tz))
1167 s = time.strftime(format, time.gmtime(float(t) - tz))
1168 if timezone:
1168 if timezone:
1169 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1169 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1170 return s
1170 return s
1171
1171
1172 def strdate(string, format, defaults):
1172 def strdate(string, format, defaults):
1173 """parse a localized time string and return a (unixtime, offset) tuple.
1173 """parse a localized time string and return a (unixtime, offset) tuple.
1174 if the string cannot be parsed, ValueError is raised."""
1174 if the string cannot be parsed, ValueError is raised."""
1175 def timezone(string):
1175 def timezone(string):
1176 tz = string.split()[-1]
1176 tz = string.split()[-1]
1177 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1177 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1178 tz = int(tz)
1178 tz = int(tz)
1179 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1179 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1180 return offset
1180 return offset
1181 if tz == "GMT" or tz == "UTC":
1181 if tz == "GMT" or tz == "UTC":
1182 return 0
1182 return 0
1183 return None
1183 return None
1184
1184
1185 # NOTE: unixtime = localunixtime + offset
1185 # NOTE: unixtime = localunixtime + offset
1186 offset, date = timezone(string), string
1186 offset, date = timezone(string), string
1187 if offset != None:
1187 if offset != None:
1188 date = " ".join(string.split()[:-1])
1188 date = " ".join(string.split()[:-1])
1189
1189
1190 # add missing elements from defaults
1190 # add missing elements from defaults
1191 for part in defaults:
1191 for part in defaults:
1192 found = [True for p in part if ("%"+p) in format]
1192 found = [True for p in part if ("%"+p) in format]
1193 if not found:
1193 if not found:
1194 date += "@" + defaults[part]
1194 date += "@" + defaults[part]
1195 format += "@%" + part[0]
1195 format += "@%" + part[0]
1196
1196
1197 timetuple = time.strptime(date, format)
1197 timetuple = time.strptime(date, format)
1198 localunixtime = int(calendar.timegm(timetuple))
1198 localunixtime = int(calendar.timegm(timetuple))
1199 if offset is None:
1199 if offset is None:
1200 # local timezone
1200 # local timezone
1201 unixtime = int(time.mktime(timetuple))
1201 unixtime = int(time.mktime(timetuple))
1202 offset = unixtime - localunixtime
1202 offset = unixtime - localunixtime
1203 else:
1203 else:
1204 unixtime = localunixtime + offset
1204 unixtime = localunixtime + offset
1205 return unixtime, offset
1205 return unixtime, offset
1206
1206
1207 def parsedate(string, formats=None, defaults=None):
1207 def parsedate(string, formats=None, defaults=None):
1208 """parse a localized time string and return a (unixtime, offset) tuple.
1208 """parse a localized time string and return a (unixtime, offset) tuple.
1209 The date may be a "unixtime offset" string or in one of the specified
1209 The date may be a "unixtime offset" string or in one of the specified
1210 formats."""
1210 formats."""
1211 if not string:
1211 if not string:
1212 return 0, 0
1212 return 0, 0
1213 if not formats:
1213 if not formats:
1214 formats = defaultdateformats
1214 formats = defaultdateformats
1215 string = string.strip()
1215 string = string.strip()
1216 try:
1216 try:
1217 when, offset = map(int, string.split(' '))
1217 when, offset = map(int, string.split(' '))
1218 except ValueError:
1218 except ValueError:
1219 # fill out defaults
1219 # fill out defaults
1220 if not defaults:
1220 if not defaults:
1221 defaults = {}
1221 defaults = {}
1222 now = makedate()
1222 now = makedate()
1223 for part in "d mb yY HI M S".split():
1223 for part in "d mb yY HI M S".split():
1224 if part not in defaults:
1224 if part not in defaults:
1225 if part[0] in "HMS":
1225 if part[0] in "HMS":
1226 defaults[part] = "00"
1226 defaults[part] = "00"
1227 elif part[0] in "dm":
1227 elif part[0] in "dm":
1228 defaults[part] = "1"
1228 defaults[part] = "1"
1229 else:
1229 else:
1230 defaults[part] = datestr(now, "%" + part[0], False)
1230 defaults[part] = datestr(now, "%" + part[0], False)
1231
1231
1232 for format in formats:
1232 for format in formats:
1233 try:
1233 try:
1234 when, offset = strdate(string, format, defaults)
1234 when, offset = strdate(string, format, defaults)
1235 except ValueError:
1235 except ValueError:
1236 pass
1236 pass
1237 else:
1237 else:
1238 break
1238 break
1239 else:
1239 else:
1240 raise Abort(_('invalid date: %r ') % string)
1240 raise Abort(_('invalid date: %r ') % string)
1241 # validate explicit (probably user-specified) date and
1241 # validate explicit (probably user-specified) date and
1242 # time zone offset. values must fit in signed 32 bits for
1242 # time zone offset. values must fit in signed 32 bits for
1243 # current 32-bit linux runtimes. timezones go from UTC-12
1243 # current 32-bit linux runtimes. timezones go from UTC-12
1244 # to UTC+14
1244 # to UTC+14
1245 if abs(when) > 0x7fffffff:
1245 if abs(when) > 0x7fffffff:
1246 raise Abort(_('date exceeds 32 bits: %d') % when)
1246 raise Abort(_('date exceeds 32 bits: %d') % when)
1247 if offset < -50400 or offset > 43200:
1247 if offset < -50400 or offset > 43200:
1248 raise Abort(_('impossible time zone offset: %d') % offset)
1248 raise Abort(_('impossible time zone offset: %d') % offset)
1249 return when, offset
1249 return when, offset
1250
1250
1251 def matchdate(date):
1251 def matchdate(date):
1252 """Return a function that matches a given date match specifier
1252 """Return a function that matches a given date match specifier
1253
1253
1254 Formats include:
1254 Formats include:
1255
1255
1256 '{date}' match a given date to the accuracy provided
1256 '{date}' match a given date to the accuracy provided
1257
1257
1258 '<{date}' on or before a given date
1258 '<{date}' on or before a given date
1259
1259
1260 '>{date}' on or after a given date
1260 '>{date}' on or after a given date
1261
1261
1262 """
1262 """
1263
1263
1264 def lower(date):
1264 def lower(date):
1265 return parsedate(date, extendeddateformats)[0]
1265 return parsedate(date, extendeddateformats)[0]
1266
1266
1267 def upper(date):
1267 def upper(date):
1268 d = dict(mb="12", HI="23", M="59", S="59")
1268 d = dict(mb="12", HI="23", M="59", S="59")
1269 for days in "31 30 29".split():
1269 for days in "31 30 29".split():
1270 try:
1270 try:
1271 d["d"] = days
1271 d["d"] = days
1272 return parsedate(date, extendeddateformats, d)[0]
1272 return parsedate(date, extendeddateformats, d)[0]
1273 except:
1273 except:
1274 pass
1274 pass
1275 d["d"] = "28"
1275 d["d"] = "28"
1276 return parsedate(date, extendeddateformats, d)[0]
1276 return parsedate(date, extendeddateformats, d)[0]
1277
1277
1278 if date[0] == "<":
1278 if date[0] == "<":
1279 when = upper(date[1:])
1279 when = upper(date[1:])
1280 return lambda x: x <= when
1280 return lambda x: x <= when
1281 elif date[0] == ">":
1281 elif date[0] == ">":
1282 when = lower(date[1:])
1282 when = lower(date[1:])
1283 return lambda x: x >= when
1283 return lambda x: x >= when
1284 elif date[0] == "-":
1284 elif date[0] == "-":
1285 try:
1285 try:
1286 days = int(date[1:])
1286 days = int(date[1:])
1287 except ValueError:
1287 except ValueError:
1288 raise Abort(_("invalid day spec: %s") % date[1:])
1288 raise Abort(_("invalid day spec: %s") % date[1:])
1289 when = makedate()[0] - days * 3600 * 24
1289 when = makedate()[0] - days * 3600 * 24
1290 return lambda x: x >= when
1290 return lambda x: x >= when
1291 elif " to " in date:
1291 elif " to " in date:
1292 a, b = date.split(" to ")
1292 a, b = date.split(" to ")
1293 start, stop = lower(a), upper(b)
1293 start, stop = lower(a), upper(b)
1294 return lambda x: x >= start and x <= stop
1294 return lambda x: x >= start and x <= stop
1295 else:
1295 else:
1296 start, stop = lower(date), upper(date)
1296 start, stop = lower(date), upper(date)
1297 return lambda x: x >= start and x <= stop
1297 return lambda x: x >= start and x <= stop
1298
1298
1299 def shortuser(user):
1299 def shortuser(user):
1300 """Return a short representation of a user name or email address."""
1300 """Return a short representation of a user name or email address."""
1301 f = user.find('@')
1301 f = user.find('@')
1302 if f >= 0:
1302 if f >= 0:
1303 user = user[:f]
1303 user = user[:f]
1304 f = user.find('<')
1304 f = user.find('<')
1305 if f >= 0:
1305 if f >= 0:
1306 user = user[f+1:]
1306 user = user[f+1:]
1307 f = user.find(' ')
1307 f = user.find(' ')
1308 if f >= 0:
1308 if f >= 0:
1309 user = user[:f]
1309 user = user[:f]
1310 f = user.find('.')
1310 f = user.find('.')
1311 if f >= 0:
1311 if f >= 0:
1312 user = user[:f]
1312 user = user[:f]
1313 return user
1313 return user
1314
1314
1315 def ellipsis(text, maxlength=400):
1315 def ellipsis(text, maxlength=400):
1316 """Trim string to at most maxlength (default: 400) characters."""
1316 """Trim string to at most maxlength (default: 400) characters."""
1317 if len(text) <= maxlength:
1317 if len(text) <= maxlength:
1318 return text
1318 return text
1319 else:
1319 else:
1320 return "%s..." % (text[:maxlength-3])
1320 return "%s..." % (text[:maxlength-3])
1321
1321
1322 def walkrepos(path):
1322 def walkrepos(path):
1323 '''yield every hg repository under path, recursively.'''
1323 '''yield every hg repository under path, recursively.'''
1324 def errhandler(err):
1324 def errhandler(err):
1325 if err.filename == path:
1325 if err.filename == path:
1326 raise err
1326 raise err
1327
1327
1328 for root, dirs, files in os.walk(path, onerror=errhandler):
1328 for root, dirs, files in os.walk(path, onerror=errhandler):
1329 for d in dirs:
1329 for d in dirs:
1330 if d == '.hg':
1330 if d == '.hg':
1331 yield root
1331 yield root
1332 dirs[:] = []
1332 dirs[:] = []
1333 break
1333 break
1334
1334
1335 _rcpath = None
1335 _rcpath = None
1336
1336
1337 def rcpath():
1337 def rcpath():
1338 '''return hgrc search path. if env var HGRCPATH is set, use it.
1338 '''return hgrc search path. if env var HGRCPATH is set, use it.
1339 for each item in path, if directory, use files ending in .rc,
1339 for each item in path, if directory, use files ending in .rc,
1340 else use item.
1340 else use item.
1341 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1341 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1342 if no HGRCPATH, use default os-specific path.'''
1342 if no HGRCPATH, use default os-specific path.'''
1343 global _rcpath
1343 global _rcpath
1344 if _rcpath is None:
1344 if _rcpath is None:
1345 if 'HGRCPATH' in os.environ:
1345 if 'HGRCPATH' in os.environ:
1346 _rcpath = []
1346 _rcpath = []
1347 for p in os.environ['HGRCPATH'].split(os.pathsep):
1347 for p in os.environ['HGRCPATH'].split(os.pathsep):
1348 if not p: continue
1348 if not p: continue
1349 if os.path.isdir(p):
1349 if os.path.isdir(p):
1350 for f in os.listdir(p):
1350 for f in os.listdir(p):
1351 if f.endswith('.rc'):
1351 if f.endswith('.rc'):
1352 _rcpath.append(os.path.join(p, f))
1352 _rcpath.append(os.path.join(p, f))
1353 else:
1353 else:
1354 _rcpath.append(p)
1354 _rcpath.append(p)
1355 else:
1355 else:
1356 _rcpath = os_rcpath()
1356 _rcpath = os_rcpath()
1357 return _rcpath
1357 return _rcpath
1358
1358
1359 def bytecount(nbytes):
1359 def bytecount(nbytes):
1360 '''return byte count formatted as readable string, with units'''
1360 '''return byte count formatted as readable string, with units'''
1361
1361
1362 units = (
1362 units = (
1363 (100, 1<<30, _('%.0f GB')),
1363 (100, 1<<30, _('%.0f GB')),
1364 (10, 1<<30, _('%.1f GB')),
1364 (10, 1<<30, _('%.1f GB')),
1365 (1, 1<<30, _('%.2f GB')),
1365 (1, 1<<30, _('%.2f GB')),
1366 (100, 1<<20, _('%.0f MB')),
1366 (100, 1<<20, _('%.0f MB')),
1367 (10, 1<<20, _('%.1f MB')),
1367 (10, 1<<20, _('%.1f MB')),
1368 (1, 1<<20, _('%.2f MB')),
1368 (1, 1<<20, _('%.2f MB')),
1369 (100, 1<<10, _('%.0f KB')),
1369 (100, 1<<10, _('%.0f KB')),
1370 (10, 1<<10, _('%.1f KB')),
1370 (10, 1<<10, _('%.1f KB')),
1371 (1, 1<<10, _('%.2f KB')),
1371 (1, 1<<10, _('%.2f KB')),
1372 (1, 1, _('%.0f bytes')),
1372 (1, 1, _('%.0f bytes')),
1373 )
1373 )
1374
1374
1375 for multiplier, divisor, format in units:
1375 for multiplier, divisor, format in units:
1376 if nbytes >= divisor * multiplier:
1376 if nbytes >= divisor * multiplier:
1377 return format % (nbytes / float(divisor))
1377 return format % (nbytes / float(divisor))
1378 return units[-1][2] % nbytes
1378 return units[-1][2] % nbytes
1379
1379
1380 def drop_scheme(scheme, path):
1380 def drop_scheme(scheme, path):
1381 sc = scheme + ':'
1381 sc = scheme + ':'
1382 if path.startswith(sc):
1382 if path.startswith(sc):
1383 path = path[len(sc):]
1383 path = path[len(sc):]
1384 if path.startswith('//'):
1384 if path.startswith('//'):
1385 path = path[2:]
1385 path = path[2:]
1386 return path
1386 return path
@@ -1,67 +1,66 b''
1 % commit date test
1 % commit date test
2 abort: impossible time zone offset: 4444444
2 abort: impossible time zone offset: 4444444
3 transaction abort!
3 transaction abort!
4 rollback completed
4 rollback completed
5 abort: invalid date: '1\t15.1'
5 abort: invalid date: '1\t15.1'
6 transaction abort!
6 transaction abort!
7 rollback completed
7 rollback completed
8 abort: invalid date: 'foo bar'
8 abort: invalid date: 'foo bar'
9 transaction abort!
9 transaction abort!
10 rollback completed
10 rollback completed
11 nothing changed
11 nothing changed
12 % partial commit test
12 % partial commit test
13 trouble committing bar!
13 trouble committing bar!
14 abort: No such file or directory: .../test/bar
14 abort: No such file or directory: .../test/bar
15 adding dir/file
15 adding dir/file
16 dir/file
16 dir/file
17 adding dir.file
17 adding dir.file
18 abort: no match under directory .../test/dir!
18 abort: no match under directory .../test/dir!
19 abort: no match under directory .../test/bleh!
19 abort: no match under directory .../test/bleh!
20 abort: no match under directory .../test/dir2!
20 abort: no match under directory .../test/dir2!
21 dir/file
21 dir/file
22 does-not-exist: No such file or directory
22 does-not-exist: No such file or directory
23 abort: file .../test/does-not-exist not found!
23 abort: file .../test/does-not-exist not found!
24 baz: unsupported file type (type is symbolic link)
24 abort: file .../test/baz not tracked!
25 abort: can't commit .../test/baz: unsupported file type!
26 abort: file .../test/quux not tracked!
25 abort: file .../test/quux not tracked!
27 dir/file
26 dir/file
28 % partial subdir commit test
27 % partial subdir commit test
29 adding bar/bar
28 adding bar/bar
30 adding foo/foo
29 adding foo/foo
31 % subdir log 1
30 % subdir log 1
32 changeset: 0:6ef3cb06bb80
31 changeset: 0:6ef3cb06bb80
33 user: test
32 user: test
34 date: Mon Jan 12 13:46:40 1970 +0000
33 date: Mon Jan 12 13:46:40 1970 +0000
35 files: foo/foo
34 files: foo/foo
36 description:
35 description:
37 commit-subdir-1
36 commit-subdir-1
38
37
39
38
40 % subdir log 2
39 % subdir log 2
41 changeset: 1:f2e51572cf5a
40 changeset: 1:f2e51572cf5a
42 tag: tip
41 tag: tip
43 user: test
42 user: test
44 date: Mon Jan 12 13:46:41 1970 +0000
43 date: Mon Jan 12 13:46:41 1970 +0000
45 files: bar/bar
44 files: bar/bar
46 description:
45 description:
47 commit-subdir-2
46 commit-subdir-2
48
47
49
48
50 % full log
49 % full log
51 changeset: 1:f2e51572cf5a
50 changeset: 1:f2e51572cf5a
52 tag: tip
51 tag: tip
53 user: test
52 user: test
54 date: Mon Jan 12 13:46:41 1970 +0000
53 date: Mon Jan 12 13:46:41 1970 +0000
55 files: bar/bar
54 files: bar/bar
56 description:
55 description:
57 commit-subdir-2
56 commit-subdir-2
58
57
59
58
60 changeset: 0:6ef3cb06bb80
59 changeset: 0:6ef3cb06bb80
61 user: test
60 user: test
62 date: Mon Jan 12 13:46:40 1970 +0000
61 date: Mon Jan 12 13:46:40 1970 +0000
63 files: foo/foo
62 files: foo/foo
64 description:
63 description:
65 commit-subdir-1
64 commit-subdir-1
66
65
67
66
@@ -1,15 +1,16 b''
1 adding bar
1 adding foo
2 adding foo
2 adding bomb
3 adding bomb
3 adding a.c
4 adding a.c
4 adding dir/a.o
5 adding dir/a.o
5 adding dir/b.o
6 adding dir/b.o
7 M dir/b.o
6 ! a.c
8 ! a.c
7 ! dir/a.o
9 ! dir/a.o
8 ! dir/b.o
9 ? .hgignore
10 ? .hgignore
10 a.c: unsupported file type (type is fifo)
11 a.c: unsupported file type (type is fifo)
11 ! a.c
12 ! a.c
12 # test absolute path through symlink outside repo
13 # test absolute path through symlink outside repo
13 A f
14 A f
14 # try symlink outside repo to file inside
15 # try symlink outside repo to file inside
15 abort: ../z not under root
16 abort: ../z not under root
General Comments 0
You need to be logged in to leave comments. Login now