##// END OF EJS Templates
context: clean up parents()
Matt Mackall -
r6742:2d54e7c1 default
parent child Browse files
Show More
@@ -1,753 +1,757 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, short
8 from node import nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, revlog, util, os, errno
10 import ancestor, bdiff, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=''):
15 def __init__(self, repo, changeid=''):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 if changeid == '':
17 if changeid == '':
18 changeid = '.'
18 changeid = '.'
19 self._repo = repo
19 self._repo = repo
20 self._node = self._repo.lookup(changeid)
20 self._node = self._repo.lookup(changeid)
21 self._rev = self._repo.changelog.rev(self._node)
21 self._rev = self._repo.changelog.rev(self._node)
22
22
23 def __str__(self):
23 def __str__(self):
24 return short(self.node())
24 return short(self.node())
25
25
26 def __repr__(self):
26 def __repr__(self):
27 return "<changectx %s>" % str(self)
27 return "<changectx %s>" % str(self)
28
28
29 def __hash__(self):
29 def __hash__(self):
30 try:
30 try:
31 return hash(self._rev)
31 return hash(self._rev)
32 except AttributeError:
32 except AttributeError:
33 return id(self)
33 return id(self)
34
34
35 def __eq__(self, other):
35 def __eq__(self, other):
36 try:
36 try:
37 return self._rev == other._rev
37 return self._rev == other._rev
38 except AttributeError:
38 except AttributeError:
39 return False
39 return False
40
40
41 def __ne__(self, other):
41 def __ne__(self, other):
42 return not (self == other)
42 return not (self == other)
43
43
44 def __nonzero__(self):
44 def __nonzero__(self):
45 return self._rev != nullrev
45 return self._rev != nullrev
46
46
47 def __getattr__(self, name):
47 def __getattr__(self, name):
48 if name == '_changeset':
48 if name == '_changeset':
49 self._changeset = self._repo.changelog.read(self.node())
49 self._changeset = self._repo.changelog.read(self.node())
50 return self._changeset
50 return self._changeset
51 elif name == '_manifest':
51 elif name == '_manifest':
52 self._manifest = self._repo.manifest.read(self._changeset[0])
52 self._manifest = self._repo.manifest.read(self._changeset[0])
53 return self._manifest
53 return self._manifest
54 elif name == '_manifestdelta':
54 elif name == '_manifestdelta':
55 md = self._repo.manifest.readdelta(self._changeset[0])
55 md = self._repo.manifest.readdelta(self._changeset[0])
56 self._manifestdelta = md
56 self._manifestdelta = md
57 return self._manifestdelta
57 return self._manifestdelta
58 elif name == '_parents':
59 p = self._repo.changelog.parents(self._node)
60 if p[1] == nullid:
61 p = p[:-1]
62 self._parents = [changectx(self._repo, x) for x in p]
63 return self._parents
58 else:
64 else:
59 raise AttributeError, name
65 raise AttributeError, name
60
66
61 def __contains__(self, key):
67 def __contains__(self, key):
62 return key in self._manifest
68 return key in self._manifest
63
69
64 def __getitem__(self, key):
70 def __getitem__(self, key):
65 return self.filectx(key)
71 return self.filectx(key)
66
72
67 def __iter__(self):
73 def __iter__(self):
68 a = self._manifest.keys()
74 a = self._manifest.keys()
69 a.sort()
75 a.sort()
70 for f in a:
76 for f in a:
71 yield f
77 yield f
72
78
73 def changeset(self): return self._changeset
79 def changeset(self): return self._changeset
74 def manifest(self): return self._manifest
80 def manifest(self): return self._manifest
75
81
76 def rev(self): return self._rev
82 def rev(self): return self._rev
77 def node(self): return self._node
83 def node(self): return self._node
78 def user(self): return self._changeset[1]
84 def user(self): return self._changeset[1]
79 def date(self): return self._changeset[2]
85 def date(self): return self._changeset[2]
80 def files(self): return self._changeset[3]
86 def files(self): return self._changeset[3]
81 def description(self): return self._changeset[4]
87 def description(self): return self._changeset[4]
82 def branch(self): return self._changeset[5].get("branch")
88 def branch(self): return self._changeset[5].get("branch")
83 def extra(self): return self._changeset[5]
89 def extra(self): return self._changeset[5]
84 def tags(self): return self._repo.nodetags(self._node)
90 def tags(self): return self._repo.nodetags(self._node)
85
91
86 def parents(self):
92 def parents(self):
87 """return contexts for each parent changeset"""
93 """return contexts for each parent changeset"""
88 p = self._repo.changelog.parents(self._node)
94 return self._parents
89 return [changectx(self._repo, x) for x in p]
90
95
91 def children(self):
96 def children(self):
92 """return contexts for each child changeset"""
97 """return contexts for each child changeset"""
93 c = self._repo.changelog.children(self._node)
98 c = self._repo.changelog.children(self._node)
94 return [changectx(self._repo, x) for x in c]
99 return [changectx(self._repo, x) for x in c]
95
100
96 def _fileinfo(self, path):
101 def _fileinfo(self, path):
97 if '_manifest' in self.__dict__:
102 if '_manifest' in self.__dict__:
98 try:
103 try:
99 return self._manifest[path], self._manifest.flags(path)
104 return self._manifest[path], self._manifest.flags(path)
100 except KeyError:
105 except KeyError:
101 raise revlog.LookupError(self._node, path,
106 raise revlog.LookupError(self._node, path,
102 _('not found in manifest'))
107 _('not found in manifest'))
103 if '_manifestdelta' in self.__dict__ or path in self.files():
108 if '_manifestdelta' in self.__dict__ or path in self.files():
104 if path in self._manifestdelta:
109 if path in self._manifestdelta:
105 return self._manifestdelta[path], self._manifestdelta.flags(path)
110 return self._manifestdelta[path], self._manifestdelta.flags(path)
106 node, flag = self._repo.manifest.find(self._changeset[0], path)
111 node, flag = self._repo.manifest.find(self._changeset[0], path)
107 if not node:
112 if not node:
108 raise revlog.LookupError(self._node, path,
113 raise revlog.LookupError(self._node, path,
109 _('not found in manifest'))
114 _('not found in manifest'))
110
115
111 return node, flag
116 return node, flag
112
117
113 def filenode(self, path):
118 def filenode(self, path):
114 return self._fileinfo(path)[0]
119 return self._fileinfo(path)[0]
115
120
116 def fileflags(self, path):
121 def fileflags(self, path):
117 try:
122 try:
118 return self._fileinfo(path)[1]
123 return self._fileinfo(path)[1]
119 except revlog.LookupError:
124 except revlog.LookupError:
120 return ''
125 return ''
121
126
122 def filectx(self, path, fileid=None, filelog=None):
127 def filectx(self, path, fileid=None, filelog=None):
123 """get a file context from this changeset"""
128 """get a file context from this changeset"""
124 if fileid is None:
129 if fileid is None:
125 fileid = self.filenode(path)
130 fileid = self.filenode(path)
126 return filectx(self._repo, path, fileid=fileid,
131 return filectx(self._repo, path, fileid=fileid,
127 changectx=self, filelog=filelog)
132 changectx=self, filelog=filelog)
128
133
129 def filectxs(self):
134 def filectxs(self):
130 """generate a file context for each file in this changeset's
135 """generate a file context for each file in this changeset's
131 manifest"""
136 manifest"""
132 mf = self.manifest()
137 mf = self.manifest()
133 m = mf.keys()
138 m = mf.keys()
134 m.sort()
139 m.sort()
135 for f in m:
140 for f in m:
136 yield self.filectx(f, fileid=mf[f])
141 yield self.filectx(f, fileid=mf[f])
137
142
138 def ancestor(self, c2):
143 def ancestor(self, c2):
139 """
144 """
140 return the ancestor context of self and c2
145 return the ancestor context of self and c2
141 """
146 """
142 n = self._repo.changelog.ancestor(self._node, c2._node)
147 n = self._repo.changelog.ancestor(self._node, c2._node)
143 return changectx(self._repo, n)
148 return changectx(self._repo, n)
144
149
145 class filectx(object):
150 class filectx(object):
146 """A filecontext object makes access to data related to a particular
151 """A filecontext object makes access to data related to a particular
147 filerevision convenient."""
152 filerevision convenient."""
148 def __init__(self, repo, path, changeid=None, fileid=None,
153 def __init__(self, repo, path, changeid=None, fileid=None,
149 filelog=None, changectx=None):
154 filelog=None, changectx=None):
150 """changeid can be a changeset revision, node, or tag.
155 """changeid can be a changeset revision, node, or tag.
151 fileid can be a file revision or node."""
156 fileid can be a file revision or node."""
152 self._repo = repo
157 self._repo = repo
153 self._path = path
158 self._path = path
154
159
155 assert (changeid is not None
160 assert (changeid is not None
156 or fileid is not None
161 or fileid is not None
157 or changectx is not None)
162 or changectx is not None)
158
163
159 if filelog:
164 if filelog:
160 self._filelog = filelog
165 self._filelog = filelog
161
166
162 if changeid is not None:
167 if changeid is not None:
163 self._changeid = changeid
168 self._changeid = changeid
164 if changectx is not None:
169 if changectx is not None:
165 self._changectx = changectx
170 self._changectx = changectx
166 if fileid is not None:
171 if fileid is not None:
167 self._fileid = fileid
172 self._fileid = fileid
168
173
169 def __getattr__(self, name):
174 def __getattr__(self, name):
170 if name == '_changectx':
175 if name == '_changectx':
171 self._changectx = changectx(self._repo, self._changeid)
176 self._changectx = changectx(self._repo, self._changeid)
172 return self._changectx
177 return self._changectx
173 elif name == '_filelog':
178 elif name == '_filelog':
174 self._filelog = self._repo.file(self._path)
179 self._filelog = self._repo.file(self._path)
175 return self._filelog
180 return self._filelog
176 elif name == '_changeid':
181 elif name == '_changeid':
177 if '_changectx' in self.__dict__:
182 if '_changectx' in self.__dict__:
178 self._changeid = self._changectx.rev()
183 self._changeid = self._changectx.rev()
179 else:
184 else:
180 self._changeid = self._filelog.linkrev(self._filenode)
185 self._changeid = self._filelog.linkrev(self._filenode)
181 return self._changeid
186 return self._changeid
182 elif name == '_filenode':
187 elif name == '_filenode':
183 if '_fileid' in self.__dict__:
188 if '_fileid' in self.__dict__:
184 self._filenode = self._filelog.lookup(self._fileid)
189 self._filenode = self._filelog.lookup(self._fileid)
185 else:
190 else:
186 self._filenode = self._changectx.filenode(self._path)
191 self._filenode = self._changectx.filenode(self._path)
187 return self._filenode
192 return self._filenode
188 elif name == '_filerev':
193 elif name == '_filerev':
189 self._filerev = self._filelog.rev(self._filenode)
194 self._filerev = self._filelog.rev(self._filenode)
190 return self._filerev
195 return self._filerev
191 elif name == '_repopath':
196 elif name == '_repopath':
192 self._repopath = self._path
197 self._repopath = self._path
193 return self._repopath
198 return self._repopath
194 else:
199 else:
195 raise AttributeError, name
200 raise AttributeError, name
196
201
197 def __nonzero__(self):
202 def __nonzero__(self):
198 try:
203 try:
199 n = self._filenode
204 n = self._filenode
200 return True
205 return True
201 except revlog.LookupError:
206 except revlog.LookupError:
202 # file is missing
207 # file is missing
203 return False
208 return False
204
209
205 def __str__(self):
210 def __str__(self):
206 return "%s@%s" % (self.path(), short(self.node()))
211 return "%s@%s" % (self.path(), short(self.node()))
207
212
208 def __repr__(self):
213 def __repr__(self):
209 return "<filectx %s>" % str(self)
214 return "<filectx %s>" % str(self)
210
215
211 def __hash__(self):
216 def __hash__(self):
212 try:
217 try:
213 return hash((self._path, self._fileid))
218 return hash((self._path, self._fileid))
214 except AttributeError:
219 except AttributeError:
215 return id(self)
220 return id(self)
216
221
217 def __eq__(self, other):
222 def __eq__(self, other):
218 try:
223 try:
219 return (self._path == other._path
224 return (self._path == other._path
220 and self._fileid == other._fileid)
225 and self._fileid == other._fileid)
221 except AttributeError:
226 except AttributeError:
222 return False
227 return False
223
228
224 def __ne__(self, other):
229 def __ne__(self, other):
225 return not (self == other)
230 return not (self == other)
226
231
227 def filectx(self, fileid):
232 def filectx(self, fileid):
228 '''opens an arbitrary revision of the file without
233 '''opens an arbitrary revision of the file without
229 opening a new filelog'''
234 opening a new filelog'''
230 return filectx(self._repo, self._path, fileid=fileid,
235 return filectx(self._repo, self._path, fileid=fileid,
231 filelog=self._filelog)
236 filelog=self._filelog)
232
237
233 def filerev(self): return self._filerev
238 def filerev(self): return self._filerev
234 def filenode(self): return self._filenode
239 def filenode(self): return self._filenode
235 def fileflags(self): return self._changectx.fileflags(self._path)
240 def fileflags(self): return self._changectx.fileflags(self._path)
236 def isexec(self): return 'x' in self.fileflags()
241 def isexec(self): return 'x' in self.fileflags()
237 def islink(self): return 'l' in self.fileflags()
242 def islink(self): return 'l' in self.fileflags()
238 def filelog(self): return self._filelog
243 def filelog(self): return self._filelog
239
244
240 def rev(self):
245 def rev(self):
241 if '_changectx' in self.__dict__:
246 if '_changectx' in self.__dict__:
242 return self._changectx.rev()
247 return self._changectx.rev()
243 if '_changeid' in self.__dict__:
248 if '_changeid' in self.__dict__:
244 return self._changectx.rev()
249 return self._changectx.rev()
245 return self._filelog.linkrev(self._filenode)
250 return self._filelog.linkrev(self._filenode)
246
251
247 def linkrev(self): return self._filelog.linkrev(self._filenode)
252 def linkrev(self): return self._filelog.linkrev(self._filenode)
248 def node(self): return self._changectx.node()
253 def node(self): return self._changectx.node()
249 def user(self): return self._changectx.user()
254 def user(self): return self._changectx.user()
250 def date(self): return self._changectx.date()
255 def date(self): return self._changectx.date()
251 def files(self): return self._changectx.files()
256 def files(self): return self._changectx.files()
252 def description(self): return self._changectx.description()
257 def description(self): return self._changectx.description()
253 def branch(self): return self._changectx.branch()
258 def branch(self): return self._changectx.branch()
254 def manifest(self): return self._changectx.manifest()
259 def manifest(self): return self._changectx.manifest()
255 def changectx(self): return self._changectx
260 def changectx(self): return self._changectx
256
261
257 def data(self): return self._filelog.read(self._filenode)
262 def data(self): return self._filelog.read(self._filenode)
258 def path(self): return self._path
263 def path(self): return self._path
259 def size(self): return self._filelog.size(self._filerev)
264 def size(self): return self._filelog.size(self._filerev)
260
265
261 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
266 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
262
267
263 def renamed(self):
268 def renamed(self):
264 """check if file was actually renamed in this changeset revision
269 """check if file was actually renamed in this changeset revision
265
270
266 If rename logged in file revision, we report copy for changeset only
271 If rename logged in file revision, we report copy for changeset only
267 if file revisions linkrev points back to the changeset in question
272 if file revisions linkrev points back to the changeset in question
268 or both changeset parents contain different file revisions.
273 or both changeset parents contain different file revisions.
269 """
274 """
270
275
271 renamed = self._filelog.renamed(self._filenode)
276 renamed = self._filelog.renamed(self._filenode)
272 if not renamed:
277 if not renamed:
273 return renamed
278 return renamed
274
279
275 if self.rev() == self.linkrev():
280 if self.rev() == self.linkrev():
276 return renamed
281 return renamed
277
282
278 name = self.path()
283 name = self.path()
279 fnode = self._filenode
284 fnode = self._filenode
280 for p in self._changectx.parents():
285 for p in self._changectx.parents():
281 try:
286 try:
282 if fnode == p.filenode(name):
287 if fnode == p.filenode(name):
283 return None
288 return None
284 except revlog.LookupError:
289 except revlog.LookupError:
285 pass
290 pass
286 return renamed
291 return renamed
287
292
288 def parents(self):
293 def parents(self):
289 p = self._path
294 p = self._path
290 fl = self._filelog
295 fl = self._filelog
291 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
296 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
292
297
293 r = self._filelog.renamed(self._filenode)
298 r = self._filelog.renamed(self._filenode)
294 if r:
299 if r:
295 pl[0] = (r[0], r[1], None)
300 pl[0] = (r[0], r[1], None)
296
301
297 return [filectx(self._repo, p, fileid=n, filelog=l)
302 return [filectx(self._repo, p, fileid=n, filelog=l)
298 for p,n,l in pl if n != nullid]
303 for p,n,l in pl if n != nullid]
299
304
300 def children(self):
305 def children(self):
301 # hard for renames
306 # hard for renames
302 c = self._filelog.children(self._filenode)
307 c = self._filelog.children(self._filenode)
303 return [filectx(self._repo, self._path, fileid=x,
308 return [filectx(self._repo, self._path, fileid=x,
304 filelog=self._filelog) for x in c]
309 filelog=self._filelog) for x in c]
305
310
306 def annotate(self, follow=False, linenumber=None):
311 def annotate(self, follow=False, linenumber=None):
307 '''returns a list of tuples of (ctx, line) for each line
312 '''returns a list of tuples of (ctx, line) for each line
308 in the file, where ctx is the filectx of the node where
313 in the file, where ctx is the filectx of the node where
309 that line was last changed.
314 that line was last changed.
310 This returns tuples of ((ctx, linenumber), line) for each line,
315 This returns tuples of ((ctx, linenumber), line) for each line,
311 if "linenumber" parameter is NOT "None".
316 if "linenumber" parameter is NOT "None".
312 In such tuples, linenumber means one at the first appearance
317 In such tuples, linenumber means one at the first appearance
313 in the managed file.
318 in the managed file.
314 To reduce annotation cost,
319 To reduce annotation cost,
315 this returns fixed value(False is used) as linenumber,
320 this returns fixed value(False is used) as linenumber,
316 if "linenumber" parameter is "False".'''
321 if "linenumber" parameter is "False".'''
317
322
318 def decorate_compat(text, rev):
323 def decorate_compat(text, rev):
319 return ([rev] * len(text.splitlines()), text)
324 return ([rev] * len(text.splitlines()), text)
320
325
321 def without_linenumber(text, rev):
326 def without_linenumber(text, rev):
322 return ([(rev, False)] * len(text.splitlines()), text)
327 return ([(rev, False)] * len(text.splitlines()), text)
323
328
324 def with_linenumber(text, rev):
329 def with_linenumber(text, rev):
325 size = len(text.splitlines())
330 size = len(text.splitlines())
326 return ([(rev, i) for i in xrange(1, size + 1)], text)
331 return ([(rev, i) for i in xrange(1, size + 1)], text)
327
332
328 decorate = (((linenumber is None) and decorate_compat) or
333 decorate = (((linenumber is None) and decorate_compat) or
329 (linenumber and with_linenumber) or
334 (linenumber and with_linenumber) or
330 without_linenumber)
335 without_linenumber)
331
336
332 def pair(parent, child):
337 def pair(parent, child):
333 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
338 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
334 child[0][b1:b2] = parent[0][a1:a2]
339 child[0][b1:b2] = parent[0][a1:a2]
335 return child
340 return child
336
341
337 getlog = util.cachefunc(lambda x: self._repo.file(x))
342 getlog = util.cachefunc(lambda x: self._repo.file(x))
338 def getctx(path, fileid):
343 def getctx(path, fileid):
339 log = path == self._path and self._filelog or getlog(path)
344 log = path == self._path and self._filelog or getlog(path)
340 return filectx(self._repo, path, fileid=fileid, filelog=log)
345 return filectx(self._repo, path, fileid=fileid, filelog=log)
341 getctx = util.cachefunc(getctx)
346 getctx = util.cachefunc(getctx)
342
347
343 def parents(f):
348 def parents(f):
344 # we want to reuse filectx objects as much as possible
349 # we want to reuse filectx objects as much as possible
345 p = f._path
350 p = f._path
346 if f._filerev is None: # working dir
351 if f._filerev is None: # working dir
347 pl = [(n.path(), n.filerev()) for n in f.parents()]
352 pl = [(n.path(), n.filerev()) for n in f.parents()]
348 else:
353 else:
349 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
354 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
350
355
351 if follow:
356 if follow:
352 r = f.renamed()
357 r = f.renamed()
353 if r:
358 if r:
354 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
359 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
355
360
356 return [getctx(p, n) for p, n in pl if n != nullrev]
361 return [getctx(p, n) for p, n in pl if n != nullrev]
357
362
358 # use linkrev to find the first changeset where self appeared
363 # use linkrev to find the first changeset where self appeared
359 if self.rev() != self.linkrev():
364 if self.rev() != self.linkrev():
360 base = self.filectx(self.filerev())
365 base = self.filectx(self.filerev())
361 else:
366 else:
362 base = self
367 base = self
363
368
364 # find all ancestors
369 # find all ancestors
365 needed = {base: 1}
370 needed = {base: 1}
366 visit = [base]
371 visit = [base]
367 files = [base._path]
372 files = [base._path]
368 while visit:
373 while visit:
369 f = visit.pop(0)
374 f = visit.pop(0)
370 for p in parents(f):
375 for p in parents(f):
371 if p not in needed:
376 if p not in needed:
372 needed[p] = 1
377 needed[p] = 1
373 visit.append(p)
378 visit.append(p)
374 if p._path not in files:
379 if p._path not in files:
375 files.append(p._path)
380 files.append(p._path)
376 else:
381 else:
377 # count how many times we'll use this
382 # count how many times we'll use this
378 needed[p] += 1
383 needed[p] += 1
379
384
380 # sort by revision (per file) which is a topological order
385 # sort by revision (per file) which is a topological order
381 visit = []
386 visit = []
382 for f in files:
387 for f in files:
383 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
388 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
384 visit.extend(fn)
389 visit.extend(fn)
385 visit.sort()
390 visit.sort()
386 hist = {}
391 hist = {}
387
392
388 for r, f in visit:
393 for r, f in visit:
389 curr = decorate(f.data(), f)
394 curr = decorate(f.data(), f)
390 for p in parents(f):
395 for p in parents(f):
391 if p != nullid:
396 if p != nullid:
392 curr = pair(hist[p], curr)
397 curr = pair(hist[p], curr)
393 # trim the history of unneeded revs
398 # trim the history of unneeded revs
394 needed[p] -= 1
399 needed[p] -= 1
395 if not needed[p]:
400 if not needed[p]:
396 del hist[p]
401 del hist[p]
397 hist[f] = curr
402 hist[f] = curr
398
403
399 return zip(hist[f][0], hist[f][1].splitlines(1))
404 return zip(hist[f][0], hist[f][1].splitlines(1))
400
405
401 def ancestor(self, fc2):
406 def ancestor(self, fc2):
402 """
407 """
403 find the common ancestor file context, if any, of self, and fc2
408 find the common ancestor file context, if any, of self, and fc2
404 """
409 """
405
410
406 acache = {}
411 acache = {}
407
412
408 # prime the ancestor cache for the working directory
413 # prime the ancestor cache for the working directory
409 for c in (self, fc2):
414 for c in (self, fc2):
410 if c._filerev == None:
415 if c._filerev == None:
411 pl = [(n.path(), n.filenode()) for n in c.parents()]
416 pl = [(n.path(), n.filenode()) for n in c.parents()]
412 acache[(c._path, None)] = pl
417 acache[(c._path, None)] = pl
413
418
414 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
419 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
415 def parents(vertex):
420 def parents(vertex):
416 if vertex in acache:
421 if vertex in acache:
417 return acache[vertex]
422 return acache[vertex]
418 f, n = vertex
423 f, n = vertex
419 if f not in flcache:
424 if f not in flcache:
420 flcache[f] = self._repo.file(f)
425 flcache[f] = self._repo.file(f)
421 fl = flcache[f]
426 fl = flcache[f]
422 pl = [(f, p) for p in fl.parents(n) if p != nullid]
427 pl = [(f, p) for p in fl.parents(n) if p != nullid]
423 re = fl.renamed(n)
428 re = fl.renamed(n)
424 if re:
429 if re:
425 pl.append(re)
430 pl.append(re)
426 acache[vertex] = pl
431 acache[vertex] = pl
427 return pl
432 return pl
428
433
429 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
434 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
430 v = ancestor.ancestor(a, b, parents)
435 v = ancestor.ancestor(a, b, parents)
431 if v:
436 if v:
432 f, n = v
437 f, n = v
433 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
438 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
434
439
435 return None
440 return None
436
441
437 class workingctx(changectx):
442 class workingctx(changectx):
438 """A workingctx object makes access to data related to
443 """A workingctx object makes access to data related to
439 the current working directory convenient.
444 the current working directory convenient.
440 parents - a pair of parent nodeids, or None to use the dirstate.
445 parents - a pair of parent nodeids, or None to use the dirstate.
441 date - any valid date string or (unixtime, offset), or None.
446 date - any valid date string or (unixtime, offset), or None.
442 user - username string, or None.
447 user - username string, or None.
443 extra - a dictionary of extra values, or None.
448 extra - a dictionary of extra values, or None.
444 changes - a list of file lists as returned by localrepo.status()
449 changes - a list of file lists as returned by localrepo.status()
445 or None to use the repository status.
450 or None to use the repository status.
446 """
451 """
447 def __init__(self, repo, parents=None, text="", user=None, date=None,
452 def __init__(self, repo, parents=None, text="", user=None, date=None,
448 extra=None, changes=None):
453 extra=None, changes=None):
449 self._repo = repo
454 self._repo = repo
450 self._rev = None
455 self._rev = None
451 self._node = None
456 self._node = None
452 self._text = text
457 self._text = text
453 if date:
458 if date:
454 self._date = util.parsedate(date)
459 self._date = util.parsedate(date)
455 else:
460 else:
456 self._date = util.makedate()
461 self._date = util.makedate()
457 if user:
462 if user:
458 self._user = user
463 self._user = user
459 else:
464 else:
460 self._user = self._repo.ui.username()
465 self._user = self._repo.ui.username()
461 if parents:
466 if parents:
462 p1, p2 = parents
467 p1, p2 = parents
463 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
468 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
464 if changes:
469 if changes:
465 self._status = list(changes)
470 self._status = list(changes)
466
471
467 self._extra = {}
472 self._extra = {}
468 if extra:
473 if extra:
469 self._extra = extra.copy()
474 self._extra = extra.copy()
470 if 'branch' not in self._extra:
475 if 'branch' not in self._extra:
471 branch = self._repo.dirstate.branch()
476 branch = self._repo.dirstate.branch()
472 try:
477 try:
473 branch = branch.decode('UTF-8').encode('UTF-8')
478 branch = branch.decode('UTF-8').encode('UTF-8')
474 except UnicodeDecodeError:
479 except UnicodeDecodeError:
475 raise util.Abort(_('branch name not in UTF-8!'))
480 raise util.Abort(_('branch name not in UTF-8!'))
476 self._extra['branch'] = branch
481 self._extra['branch'] = branch
477 if self._extra['branch'] == '':
482 if self._extra['branch'] == '':
478 self._extra['branch'] = 'default'
483 self._extra['branch'] = 'default'
479
484
480 def __str__(self):
485 def __str__(self):
481 return str(self._parents[0]) + "+"
486 return str(self._parents[0]) + "+"
482
487
483 def __nonzero__(self):
488 def __nonzero__(self):
484 return True
489 return True
485
490
486 def __getattr__(self, name):
491 def __getattr__(self, name):
487 if name == '_parents':
488 self._parents = self._repo.parents()
489 return self._parents
490 if name == '_status':
492 if name == '_status':
491 self._status = self._repo.status()
493 self._status = self._repo.status()
492 return self._status
494 return self._status
493 if name == '_manifest':
495 if name == '_manifest':
494 self._buildmanifest()
496 self._buildmanifest()
495 return self._manifest
497 return self._manifest
498 elif name == '_parents':
499 p = self._repo.dirstate.parents()
500 if p[1] == nullid:
501 p = p[:-1]
502 self._parents = [changectx(self._repo, x) for x in p]
503 return self._parents
496 else:
504 else:
497 raise AttributeError, name
505 raise AttributeError, name
498
506
499 def _buildmanifest(self):
507 def _buildmanifest(self):
500 """generate a manifest corresponding to the working directory"""
508 """generate a manifest corresponding to the working directory"""
501
509
502 man = self._parents[0].manifest().copy()
510 man = self._parents[0].manifest().copy()
503 copied = self._repo.dirstate.copies()
511 copied = self._repo.dirstate.copies()
504 is_exec = util.execfunc(self._repo.root,
512 is_exec = util.execfunc(self._repo.root,
505 lambda p: man.execf(copied.get(p,p)))
513 lambda p: man.execf(copied.get(p,p)))
506 is_link = util.linkfunc(self._repo.root,
514 is_link = util.linkfunc(self._repo.root,
507 lambda p: man.linkf(copied.get(p,p)))
515 lambda p: man.linkf(copied.get(p,p)))
508 modified, added, removed, deleted, unknown = self._status[:5]
516 modified, added, removed, deleted, unknown = self._status[:5]
509 for i, l in (("a", added), ("m", modified), ("u", unknown)):
517 for i, l in (("a", added), ("m", modified), ("u", unknown)):
510 for f in l:
518 for f in l:
511 man[f] = man.get(copied.get(f, f), nullid) + i
519 man[f] = man.get(copied.get(f, f), nullid) + i
512 try:
520 try:
513 man.set(f, is_exec(f), is_link(f))
521 man.set(f, is_exec(f), is_link(f))
514 except OSError:
522 except OSError:
515 pass
523 pass
516
524
517 for f in deleted + removed:
525 for f in deleted + removed:
518 if f in man:
526 if f in man:
519 del man[f]
527 del man[f]
520
528
521 self._manifest = man
529 self._manifest = man
522
530
523 def manifest(self): return self._manifest
531 def manifest(self): return self._manifest
524
532
525 def user(self): return self._user
533 def user(self): return self._user
526 def date(self): return self._date
534 def date(self): return self._date
527 def description(self): return self._text
535 def description(self): return self._text
528 def files(self):
536 def files(self):
529 f = self.modified() + self.added() + self.removed()
537 f = self.modified() + self.added() + self.removed()
530 f.sort()
538 f.sort()
531 return f
539 return f
532
540
533 def modified(self): return self._status[0]
541 def modified(self): return self._status[0]
534 def added(self): return self._status[1]
542 def added(self): return self._status[1]
535 def removed(self): return self._status[2]
543 def removed(self): return self._status[2]
536 def deleted(self): return self._status[3]
544 def deleted(self): return self._status[3]
537 def unknown(self): return self._status[4]
545 def unknown(self): return self._status[4]
538 def clean(self): return self._status[5]
546 def clean(self): return self._status[5]
539 def branch(self): return self._extra['branch']
547 def branch(self): return self._extra['branch']
540 def extra(self): return self._extra
548 def extra(self): return self._extra
541
549
542 def tags(self):
550 def tags(self):
543 t = []
551 t = []
544 [t.extend(p.tags()) for p in self.parents()]
552 [t.extend(p.tags()) for p in self.parents()]
545 return t
553 return t
546
554
547 def parents(self):
548 """return contexts for each parent changeset"""
549 return self._parents
550
551 def children(self):
555 def children(self):
552 return []
556 return []
553
557
554 def fileflags(self, path):
558 def fileflags(self, path):
555 if '_manifest' in self.__dict__:
559 if '_manifest' in self.__dict__:
556 try:
560 try:
557 return self._manifest.flags(path)
561 return self._manifest.flags(path)
558 except KeyError:
562 except KeyError:
559 return ''
563 return ''
560
564
561 pnode = self._parents[0].changeset()[0]
565 pnode = self._parents[0].changeset()[0]
562 orig = self._repo.dirstate.copies().get(path, path)
566 orig = self._repo.dirstate.copies().get(path, path)
563 node, flag = self._repo.manifest.find(pnode, orig)
567 node, flag = self._repo.manifest.find(pnode, orig)
564 is_link = util.linkfunc(self._repo.root,
568 is_link = util.linkfunc(self._repo.root,
565 lambda p: flag and 'l' in flag)
569 lambda p: flag and 'l' in flag)
566 is_exec = util.execfunc(self._repo.root,
570 is_exec = util.execfunc(self._repo.root,
567 lambda p: flag and 'x' in flag)
571 lambda p: flag and 'x' in flag)
568 try:
572 try:
569 return (is_link(path) and 'l' or '') + (is_exec(path) and 'x' or '')
573 return (is_link(path) and 'l' or '') + (is_exec(path) and 'x' or '')
570 except OSError:
574 except OSError:
571 pass
575 pass
572
576
573 if not node or path in self.deleted() or path in self.removed():
577 if not node or path in self.deleted() or path in self.removed():
574 return ''
578 return ''
575 return flag
579 return flag
576
580
577 def filectx(self, path, filelog=None):
581 def filectx(self, path, filelog=None):
578 """get a file context from the working directory"""
582 """get a file context from the working directory"""
579 return workingfilectx(self._repo, path, workingctx=self,
583 return workingfilectx(self._repo, path, workingctx=self,
580 filelog=filelog)
584 filelog=filelog)
581
585
582 def ancestor(self, c2):
586 def ancestor(self, c2):
583 """return the ancestor context of self and c2"""
587 """return the ancestor context of self and c2"""
584 return self._parents[0].ancestor(c2) # punt on two parents for now
588 return self._parents[0].ancestor(c2) # punt on two parents for now
585
589
586 class workingfilectx(filectx):
590 class workingfilectx(filectx):
587 """A workingfilectx object makes access to data related to a particular
591 """A workingfilectx object makes access to data related to a particular
588 file in the working directory convenient."""
592 file in the working directory convenient."""
589 def __init__(self, repo, path, filelog=None, workingctx=None):
593 def __init__(self, repo, path, filelog=None, workingctx=None):
590 """changeid can be a changeset revision, node, or tag.
594 """changeid can be a changeset revision, node, or tag.
591 fileid can be a file revision or node."""
595 fileid can be a file revision or node."""
592 self._repo = repo
596 self._repo = repo
593 self._path = path
597 self._path = path
594 self._changeid = None
598 self._changeid = None
595 self._filerev = self._filenode = None
599 self._filerev = self._filenode = None
596
600
597 if filelog:
601 if filelog:
598 self._filelog = filelog
602 self._filelog = filelog
599 if workingctx:
603 if workingctx:
600 self._changectx = workingctx
604 self._changectx = workingctx
601
605
602 def __getattr__(self, name):
606 def __getattr__(self, name):
603 if name == '_changectx':
607 if name == '_changectx':
604 self._changectx = workingctx(self._repo)
608 self._changectx = workingctx(self._repo)
605 return self._changectx
609 return self._changectx
606 elif name == '_repopath':
610 elif name == '_repopath':
607 self._repopath = (self._repo.dirstate.copied(self._path)
611 self._repopath = (self._repo.dirstate.copied(self._path)
608 or self._path)
612 or self._path)
609 return self._repopath
613 return self._repopath
610 elif name == '_filelog':
614 elif name == '_filelog':
611 self._filelog = self._repo.file(self._repopath)
615 self._filelog = self._repo.file(self._repopath)
612 return self._filelog
616 return self._filelog
613 else:
617 else:
614 raise AttributeError, name
618 raise AttributeError, name
615
619
616 def __nonzero__(self):
620 def __nonzero__(self):
617 return True
621 return True
618
622
619 def __str__(self):
623 def __str__(self):
620 return "%s@%s" % (self.path(), self._changectx)
624 return "%s@%s" % (self.path(), self._changectx)
621
625
622 def filectx(self, fileid):
626 def filectx(self, fileid):
623 '''opens an arbitrary revision of the file without
627 '''opens an arbitrary revision of the file without
624 opening a new filelog'''
628 opening a new filelog'''
625 return filectx(self._repo, self._repopath, fileid=fileid,
629 return filectx(self._repo, self._repopath, fileid=fileid,
626 filelog=self._filelog)
630 filelog=self._filelog)
627
631
628 def rev(self):
632 def rev(self):
629 if '_changectx' in self.__dict__:
633 if '_changectx' in self.__dict__:
630 return self._changectx.rev()
634 return self._changectx.rev()
631 return self._filelog.linkrev(self._filenode)
635 return self._filelog.linkrev(self._filenode)
632
636
633 def data(self): return self._repo.wread(self._path)
637 def data(self): return self._repo.wread(self._path)
634 def renamed(self):
638 def renamed(self):
635 rp = self._repopath
639 rp = self._repopath
636 if rp == self._path:
640 if rp == self._path:
637 return None
641 return None
638 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
642 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
639
643
640 def parents(self):
644 def parents(self):
641 '''return parent filectxs, following copies if necessary'''
645 '''return parent filectxs, following copies if necessary'''
642 p = self._path
646 p = self._path
643 rp = self._repopath
647 rp = self._repopath
644 pcl = self._changectx._parents
648 pcl = self._changectx._parents
645 fl = self._filelog
649 fl = self._filelog
646 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
650 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
647 if len(pcl) > 1:
651 if len(pcl) > 1:
648 if rp != p:
652 if rp != p:
649 fl = None
653 fl = None
650 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
654 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
651
655
652 return [filectx(self._repo, p, fileid=n, filelog=l)
656 return [filectx(self._repo, p, fileid=n, filelog=l)
653 for p,n,l in pl if n != nullid]
657 for p,n,l in pl if n != nullid]
654
658
655 def children(self):
659 def children(self):
656 return []
660 return []
657
661
658 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
662 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
659 def date(self):
663 def date(self):
660 t, tz = self._changectx.date()
664 t, tz = self._changectx.date()
661 try:
665 try:
662 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
666 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
663 except OSError, err:
667 except OSError, err:
664 if err.errno != errno.ENOENT: raise
668 if err.errno != errno.ENOENT: raise
665 return (t, tz)
669 return (t, tz)
666
670
667 def cmp(self, text): return self._repo.wread(self._path) == text
671 def cmp(self, text): return self._repo.wread(self._path) == text
668
672
669 class memctx(object):
673 class memctx(object):
670 """A memctx is a subset of changectx supposed to be built on memory
674 """A memctx is a subset of changectx supposed to be built on memory
671 and passed to commit functions.
675 and passed to commit functions.
672
676
673 NOTE: this interface and the related memfilectx are experimental and
677 NOTE: this interface and the related memfilectx are experimental and
674 may change without notice.
678 may change without notice.
675
679
676 parents - a pair of parent nodeids.
680 parents - a pair of parent nodeids.
677 filectxfn - a callable taking (repo, memctx, path) arguments and
681 filectxfn - a callable taking (repo, memctx, path) arguments and
678 returning a memctx object.
682 returning a memctx object.
679 date - any valid date string or (unixtime, offset), or None.
683 date - any valid date string or (unixtime, offset), or None.
680 user - username string, or None.
684 user - username string, or None.
681 extra - a dictionary of extra values, or None.
685 extra - a dictionary of extra values, or None.
682 """
686 """
683 def __init__(self, repo, parents, text, files, filectxfn, user=None,
687 def __init__(self, repo, parents, text, files, filectxfn, user=None,
684 date=None, extra=None):
688 date=None, extra=None):
685 self._repo = repo
689 self._repo = repo
686 self._rev = None
690 self._rev = None
687 self._node = None
691 self._node = None
688 self._text = text
692 self._text = text
689 self._date = date and util.parsedate(date) or util.makedate()
693 self._date = date and util.parsedate(date) or util.makedate()
690 self._user = user or self._repo.ui.username()
694 self._user = user or self._repo.ui.username()
691 parents = [(p or nullid) for p in parents]
695 parents = [(p or nullid) for p in parents]
692 p1, p2 = parents
696 p1, p2 = parents
693 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
697 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
694 files = list(files)
698 files = list(files)
695 files.sort()
699 files.sort()
696 self._status = [files, [], [], [], []]
700 self._status = [files, [], [], [], []]
697 self._filectxfn = filectxfn
701 self._filectxfn = filectxfn
698
702
699 self._extra = extra and extra.copy() or {}
703 self._extra = extra and extra.copy() or {}
700 if 'branch' not in self._extra:
704 if 'branch' not in self._extra:
701 self._extra['branch'] = 'default'
705 self._extra['branch'] = 'default'
702 elif self._extra.get('branch') == '':
706 elif self._extra.get('branch') == '':
703 self._extra['branch'] = 'default'
707 self._extra['branch'] = 'default'
704
708
705 def __str__(self):
709 def __str__(self):
706 return str(self._parents[0]) + "+"
710 return str(self._parents[0]) + "+"
707
711
708 def __nonzero__(self):
712 def __nonzero__(self):
709 return True
713 return True
710
714
711 def user(self): return self._user
715 def user(self): return self._user
712 def date(self): return self._date
716 def date(self): return self._date
713 def description(self): return self._text
717 def description(self): return self._text
714 def files(self): return self.modified()
718 def files(self): return self.modified()
715 def modified(self): return self._status[0]
719 def modified(self): return self._status[0]
716 def added(self): return self._status[1]
720 def added(self): return self._status[1]
717 def removed(self): return self._status[2]
721 def removed(self): return self._status[2]
718 def deleted(self): return self._status[3]
722 def deleted(self): return self._status[3]
719 def unknown(self): return self._status[4]
723 def unknown(self): return self._status[4]
720 def clean(self): return self._status[5]
724 def clean(self): return self._status[5]
721 def branch(self): return self._extra['branch']
725 def branch(self): return self._extra['branch']
722 def extra(self): return self._extra
726 def extra(self): return self._extra
723
727
724 def parents(self):
728 def parents(self):
725 """return contexts for each parent changeset"""
729 """return contexts for each parent changeset"""
726 return self._parents
730 return self._parents
727
731
728 def filectx(self, path, filelog=None):
732 def filectx(self, path, filelog=None):
729 """get a file context from the working directory"""
733 """get a file context from the working directory"""
730 return self._filectxfn(self._repo, self, path)
734 return self._filectxfn(self._repo, self, path)
731
735
732 class memfilectx(object):
736 class memfilectx(object):
733 """A memfilectx is a subset of filectx supposed to be built by client
737 """A memfilectx is a subset of filectx supposed to be built by client
734 code and passed to commit functions.
738 code and passed to commit functions.
735 """
739 """
736 def __init__(self, path, data, islink, isexec, copied):
740 def __init__(self, path, data, islink, isexec, copied):
737 """copied is the source file path, or None."""
741 """copied is the source file path, or None."""
738 self._path = path
742 self._path = path
739 self._data = data
743 self._data = data
740 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
744 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
741 self._copied = None
745 self._copied = None
742 if copied:
746 if copied:
743 self._copied = (copied, nullid)
747 self._copied = (copied, nullid)
744
748
745 def __nonzero__(self): return True
749 def __nonzero__(self): return True
746 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
750 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
747 def path(self): return self._path
751 def path(self): return self._path
748 def data(self): return self._data
752 def data(self): return self._data
749 def fileflags(self): return self._flags
753 def fileflags(self): return self._flags
750 def isexec(self): return 'x' in self._flags
754 def isexec(self): return 'x' in self._flags
751 def islink(self): return 'l' in self._flags
755 def islink(self): return 'l' in self._flags
752 def renamed(self): return self._copied
756 def renamed(self): return self._copied
753
757
@@ -1,2141 +1,2132 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui
12 import lock, transaction, stat, errno, ui
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14 import match as match_
14 import match as match_
15
15
16 class localrepository(repo.repository):
16 class localrepository(repo.repository):
17 capabilities = util.set(('lookup', 'changegroupsubset'))
17 capabilities = util.set(('lookup', 'changegroupsubset'))
18 supported = ('revlogv1', 'store')
18 supported = ('revlogv1', 'store')
19
19
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 repo.repository.__init__(self)
21 repo.repository.__init__(self)
22 self.root = os.path.realpath(path)
22 self.root = os.path.realpath(path)
23 self.path = os.path.join(self.root, ".hg")
23 self.path = os.path.join(self.root, ".hg")
24 self.origroot = path
24 self.origroot = path
25 self.opener = util.opener(self.path)
25 self.opener = util.opener(self.path)
26 self.wopener = util.opener(self.root)
26 self.wopener = util.opener(self.root)
27
27
28 if not os.path.isdir(self.path):
28 if not os.path.isdir(self.path):
29 if create:
29 if create:
30 if not os.path.exists(path):
30 if not os.path.exists(path):
31 os.mkdir(path)
31 os.mkdir(path)
32 os.mkdir(self.path)
32 os.mkdir(self.path)
33 requirements = ["revlogv1"]
33 requirements = ["revlogv1"]
34 if parentui.configbool('format', 'usestore', True):
34 if parentui.configbool('format', 'usestore', True):
35 os.mkdir(os.path.join(self.path, "store"))
35 os.mkdir(os.path.join(self.path, "store"))
36 requirements.append("store")
36 requirements.append("store")
37 # create an invalid changelog
37 # create an invalid changelog
38 self.opener("00changelog.i", "a").write(
38 self.opener("00changelog.i", "a").write(
39 '\0\0\0\2' # represents revlogv2
39 '\0\0\0\2' # represents revlogv2
40 ' dummy changelog to prevent using the old repo layout'
40 ' dummy changelog to prevent using the old repo layout'
41 )
41 )
42 reqfile = self.opener("requires", "w")
42 reqfile = self.opener("requires", "w")
43 for r in requirements:
43 for r in requirements:
44 reqfile.write("%s\n" % r)
44 reqfile.write("%s\n" % r)
45 reqfile.close()
45 reqfile.close()
46 else:
46 else:
47 raise repo.RepoError(_("repository %s not found") % path)
47 raise repo.RepoError(_("repository %s not found") % path)
48 elif create:
48 elif create:
49 raise repo.RepoError(_("repository %s already exists") % path)
49 raise repo.RepoError(_("repository %s already exists") % path)
50 else:
50 else:
51 # find requirements
51 # find requirements
52 try:
52 try:
53 requirements = self.opener("requires").read().splitlines()
53 requirements = self.opener("requires").read().splitlines()
54 except IOError, inst:
54 except IOError, inst:
55 if inst.errno != errno.ENOENT:
55 if inst.errno != errno.ENOENT:
56 raise
56 raise
57 requirements = []
57 requirements = []
58 # check them
58 # check them
59 for r in requirements:
59 for r in requirements:
60 if r not in self.supported:
60 if r not in self.supported:
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62
62
63 # setup store
63 # setup store
64 if "store" in requirements:
64 if "store" in requirements:
65 self.encodefn = util.encodefilename
65 self.encodefn = util.encodefilename
66 self.decodefn = util.decodefilename
66 self.decodefn = util.decodefilename
67 self.spath = os.path.join(self.path, "store")
67 self.spath = os.path.join(self.path, "store")
68 else:
68 else:
69 self.encodefn = lambda x: x
69 self.encodefn = lambda x: x
70 self.decodefn = lambda x: x
70 self.decodefn = lambda x: x
71 self.spath = self.path
71 self.spath = self.path
72
72
73 try:
73 try:
74 # files in .hg/ will be created using this mode
74 # files in .hg/ will be created using this mode
75 mode = os.stat(self.spath).st_mode
75 mode = os.stat(self.spath).st_mode
76 # avoid some useless chmods
76 # avoid some useless chmods
77 if (0777 & ~util._umask) == (0777 & mode):
77 if (0777 & ~util._umask) == (0777 & mode):
78 mode = None
78 mode = None
79 except OSError:
79 except OSError:
80 mode = None
80 mode = None
81
81
82 self._createmode = mode
82 self._createmode = mode
83 self.opener.createmode = mode
83 self.opener.createmode = mode
84 sopener = util.opener(self.spath)
84 sopener = util.opener(self.spath)
85 sopener.createmode = mode
85 sopener.createmode = mode
86 self.sopener = util.encodedopener(sopener, self.encodefn)
86 self.sopener = util.encodedopener(sopener, self.encodefn)
87
87
88 self.ui = ui.ui(parentui=parentui)
88 self.ui = ui.ui(parentui=parentui)
89 try:
89 try:
90 self.ui.readconfig(self.join("hgrc"), self.root)
90 self.ui.readconfig(self.join("hgrc"), self.root)
91 extensions.loadall(self.ui)
91 extensions.loadall(self.ui)
92 except IOError:
92 except IOError:
93 pass
93 pass
94
94
95 self.tagscache = None
95 self.tagscache = None
96 self._tagstypecache = None
96 self._tagstypecache = None
97 self.branchcache = None
97 self.branchcache = None
98 self._ubranchcache = None # UTF-8 version of branchcache
98 self._ubranchcache = None # UTF-8 version of branchcache
99 self._branchcachetip = None
99 self._branchcachetip = None
100 self.nodetagscache = None
100 self.nodetagscache = None
101 self.filterpats = {}
101 self.filterpats = {}
102 self._datafilters = {}
102 self._datafilters = {}
103 self._transref = self._lockref = self._wlockref = None
103 self._transref = self._lockref = self._wlockref = None
104
104
105 def __getattr__(self, name):
105 def __getattr__(self, name):
106 if name == 'changelog':
106 if name == 'changelog':
107 self.changelog = changelog.changelog(self.sopener)
107 self.changelog = changelog.changelog(self.sopener)
108 self.sopener.defversion = self.changelog.version
108 self.sopener.defversion = self.changelog.version
109 return self.changelog
109 return self.changelog
110 if name == 'manifest':
110 if name == 'manifest':
111 self.changelog
111 self.changelog
112 self.manifest = manifest.manifest(self.sopener)
112 self.manifest = manifest.manifest(self.sopener)
113 return self.manifest
113 return self.manifest
114 if name == 'dirstate':
114 if name == 'dirstate':
115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
116 return self.dirstate
116 return self.dirstate
117 else:
117 else:
118 raise AttributeError, name
118 raise AttributeError, name
119
119
120 def url(self):
120 def url(self):
121 return 'file:' + self.root
121 return 'file:' + self.root
122
122
123 def hook(self, name, throw=False, **args):
123 def hook(self, name, throw=False, **args):
124 return hook.hook(self.ui, self, name, throw, **args)
124 return hook.hook(self.ui, self, name, throw, **args)
125
125
126 tag_disallowed = ':\r\n'
126 tag_disallowed = ':\r\n'
127
127
128 def _tag(self, names, node, message, local, user, date, parent=None,
128 def _tag(self, names, node, message, local, user, date, parent=None,
129 extra={}):
129 extra={}):
130 use_dirstate = parent is None
130 use_dirstate = parent is None
131
131
132 if isinstance(names, str):
132 if isinstance(names, str):
133 allchars = names
133 allchars = names
134 names = (names,)
134 names = (names,)
135 else:
135 else:
136 allchars = ''.join(names)
136 allchars = ''.join(names)
137 for c in self.tag_disallowed:
137 for c in self.tag_disallowed:
138 if c in allchars:
138 if c in allchars:
139 raise util.Abort(_('%r cannot be used in a tag name') % c)
139 raise util.Abort(_('%r cannot be used in a tag name') % c)
140
140
141 for name in names:
141 for name in names:
142 self.hook('pretag', throw=True, node=hex(node), tag=name,
142 self.hook('pretag', throw=True, node=hex(node), tag=name,
143 local=local)
143 local=local)
144
144
145 def writetags(fp, names, munge, prevtags):
145 def writetags(fp, names, munge, prevtags):
146 fp.seek(0, 2)
146 fp.seek(0, 2)
147 if prevtags and prevtags[-1] != '\n':
147 if prevtags and prevtags[-1] != '\n':
148 fp.write('\n')
148 fp.write('\n')
149 for name in names:
149 for name in names:
150 m = munge and munge(name) or name
150 m = munge and munge(name) or name
151 if self._tagstypecache and name in self._tagstypecache:
151 if self._tagstypecache and name in self._tagstypecache:
152 old = self.tagscache.get(name, nullid)
152 old = self.tagscache.get(name, nullid)
153 fp.write('%s %s\n' % (hex(old), m))
153 fp.write('%s %s\n' % (hex(old), m))
154 fp.write('%s %s\n' % (hex(node), m))
154 fp.write('%s %s\n' % (hex(node), m))
155 fp.close()
155 fp.close()
156
156
157 prevtags = ''
157 prevtags = ''
158 if local:
158 if local:
159 try:
159 try:
160 fp = self.opener('localtags', 'r+')
160 fp = self.opener('localtags', 'r+')
161 except IOError, err:
161 except IOError, err:
162 fp = self.opener('localtags', 'a')
162 fp = self.opener('localtags', 'a')
163 else:
163 else:
164 prevtags = fp.read()
164 prevtags = fp.read()
165
165
166 # local tags are stored in the current charset
166 # local tags are stored in the current charset
167 writetags(fp, names, None, prevtags)
167 writetags(fp, names, None, prevtags)
168 for name in names:
168 for name in names:
169 self.hook('tag', node=hex(node), tag=name, local=local)
169 self.hook('tag', node=hex(node), tag=name, local=local)
170 return
170 return
171
171
172 if use_dirstate:
172 if use_dirstate:
173 try:
173 try:
174 fp = self.wfile('.hgtags', 'rb+')
174 fp = self.wfile('.hgtags', 'rb+')
175 except IOError, err:
175 except IOError, err:
176 fp = self.wfile('.hgtags', 'ab')
176 fp = self.wfile('.hgtags', 'ab')
177 else:
177 else:
178 prevtags = fp.read()
178 prevtags = fp.read()
179 else:
179 else:
180 try:
180 try:
181 prevtags = self.filectx('.hgtags', parent).data()
181 prevtags = self.filectx('.hgtags', parent).data()
182 except revlog.LookupError:
182 except revlog.LookupError:
183 pass
183 pass
184 fp = self.wfile('.hgtags', 'wb')
184 fp = self.wfile('.hgtags', 'wb')
185 if prevtags:
185 if prevtags:
186 fp.write(prevtags)
186 fp.write(prevtags)
187
187
188 # committed tags are stored in UTF-8
188 # committed tags are stored in UTF-8
189 writetags(fp, names, util.fromlocal, prevtags)
189 writetags(fp, names, util.fromlocal, prevtags)
190
190
191 if use_dirstate and '.hgtags' not in self.dirstate:
191 if use_dirstate and '.hgtags' not in self.dirstate:
192 self.add(['.hgtags'])
192 self.add(['.hgtags'])
193
193
194 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
194 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
195 extra=extra)
195 extra=extra)
196
196
197 for name in names:
197 for name in names:
198 self.hook('tag', node=hex(node), tag=name, local=local)
198 self.hook('tag', node=hex(node), tag=name, local=local)
199
199
200 return tagnode
200 return tagnode
201
201
202 def tag(self, names, node, message, local, user, date):
202 def tag(self, names, node, message, local, user, date):
203 '''tag a revision with one or more symbolic names.
203 '''tag a revision with one or more symbolic names.
204
204
205 names is a list of strings or, when adding a single tag, names may be a
205 names is a list of strings or, when adding a single tag, names may be a
206 string.
206 string.
207
207
208 if local is True, the tags are stored in a per-repository file.
208 if local is True, the tags are stored in a per-repository file.
209 otherwise, they are stored in the .hgtags file, and a new
209 otherwise, they are stored in the .hgtags file, and a new
210 changeset is committed with the change.
210 changeset is committed with the change.
211
211
212 keyword arguments:
212 keyword arguments:
213
213
214 local: whether to store tags in non-version-controlled file
214 local: whether to store tags in non-version-controlled file
215 (default False)
215 (default False)
216
216
217 message: commit message to use if committing
217 message: commit message to use if committing
218
218
219 user: name of user to use if committing
219 user: name of user to use if committing
220
220
221 date: date tuple to use if committing'''
221 date: date tuple to use if committing'''
222
222
223 for x in self.status()[:5]:
223 for x in self.status()[:5]:
224 if '.hgtags' in x:
224 if '.hgtags' in x:
225 raise util.Abort(_('working copy of .hgtags is changed '
225 raise util.Abort(_('working copy of .hgtags is changed '
226 '(please commit .hgtags manually)'))
226 '(please commit .hgtags manually)'))
227
227
228 self._tag(names, node, message, local, user, date)
228 self._tag(names, node, message, local, user, date)
229
229
230 def tags(self):
230 def tags(self):
231 '''return a mapping of tag to node'''
231 '''return a mapping of tag to node'''
232 if self.tagscache:
232 if self.tagscache:
233 return self.tagscache
233 return self.tagscache
234
234
235 globaltags = {}
235 globaltags = {}
236 tagtypes = {}
236 tagtypes = {}
237
237
238 def readtags(lines, fn, tagtype):
238 def readtags(lines, fn, tagtype):
239 filetags = {}
239 filetags = {}
240 count = 0
240 count = 0
241
241
242 def warn(msg):
242 def warn(msg):
243 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
243 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
244
244
245 for l in lines:
245 for l in lines:
246 count += 1
246 count += 1
247 if not l:
247 if not l:
248 continue
248 continue
249 s = l.split(" ", 1)
249 s = l.split(" ", 1)
250 if len(s) != 2:
250 if len(s) != 2:
251 warn(_("cannot parse entry"))
251 warn(_("cannot parse entry"))
252 continue
252 continue
253 node, key = s
253 node, key = s
254 key = util.tolocal(key.strip()) # stored in UTF-8
254 key = util.tolocal(key.strip()) # stored in UTF-8
255 try:
255 try:
256 bin_n = bin(node)
256 bin_n = bin(node)
257 except TypeError:
257 except TypeError:
258 warn(_("node '%s' is not well formed") % node)
258 warn(_("node '%s' is not well formed") % node)
259 continue
259 continue
260 if bin_n not in self.changelog.nodemap:
260 if bin_n not in self.changelog.nodemap:
261 warn(_("tag '%s' refers to unknown node") % key)
261 warn(_("tag '%s' refers to unknown node") % key)
262 continue
262 continue
263
263
264 h = []
264 h = []
265 if key in filetags:
265 if key in filetags:
266 n, h = filetags[key]
266 n, h = filetags[key]
267 h.append(n)
267 h.append(n)
268 filetags[key] = (bin_n, h)
268 filetags[key] = (bin_n, h)
269
269
270 for k, nh in filetags.items():
270 for k, nh in filetags.items():
271 if k not in globaltags:
271 if k not in globaltags:
272 globaltags[k] = nh
272 globaltags[k] = nh
273 tagtypes[k] = tagtype
273 tagtypes[k] = tagtype
274 continue
274 continue
275
275
276 # we prefer the global tag if:
276 # we prefer the global tag if:
277 # it supercedes us OR
277 # it supercedes us OR
278 # mutual supercedes and it has a higher rank
278 # mutual supercedes and it has a higher rank
279 # otherwise we win because we're tip-most
279 # otherwise we win because we're tip-most
280 an, ah = nh
280 an, ah = nh
281 bn, bh = globaltags[k]
281 bn, bh = globaltags[k]
282 if (bn != an and an in bh and
282 if (bn != an and an in bh and
283 (bn not in ah or len(bh) > len(ah))):
283 (bn not in ah or len(bh) > len(ah))):
284 an = bn
284 an = bn
285 ah.extend([n for n in bh if n not in ah])
285 ah.extend([n for n in bh if n not in ah])
286 globaltags[k] = an, ah
286 globaltags[k] = an, ah
287 tagtypes[k] = tagtype
287 tagtypes[k] = tagtype
288
288
289 # read the tags file from each head, ending with the tip
289 # read the tags file from each head, ending with the tip
290 f = None
290 f = None
291 for rev, node, fnode in self._hgtagsnodes():
291 for rev, node, fnode in self._hgtagsnodes():
292 f = (f and f.filectx(fnode) or
292 f = (f and f.filectx(fnode) or
293 self.filectx('.hgtags', fileid=fnode))
293 self.filectx('.hgtags', fileid=fnode))
294 readtags(f.data().splitlines(), f, "global")
294 readtags(f.data().splitlines(), f, "global")
295
295
296 try:
296 try:
297 data = util.fromlocal(self.opener("localtags").read())
297 data = util.fromlocal(self.opener("localtags").read())
298 # localtags are stored in the local character set
298 # localtags are stored in the local character set
299 # while the internal tag table is stored in UTF-8
299 # while the internal tag table is stored in UTF-8
300 readtags(data.splitlines(), "localtags", "local")
300 readtags(data.splitlines(), "localtags", "local")
301 except IOError:
301 except IOError:
302 pass
302 pass
303
303
304 self.tagscache = {}
304 self.tagscache = {}
305 self._tagstypecache = {}
305 self._tagstypecache = {}
306 for k,nh in globaltags.items():
306 for k,nh in globaltags.items():
307 n = nh[0]
307 n = nh[0]
308 if n != nullid:
308 if n != nullid:
309 self.tagscache[k] = n
309 self.tagscache[k] = n
310 self._tagstypecache[k] = tagtypes[k]
310 self._tagstypecache[k] = tagtypes[k]
311 self.tagscache['tip'] = self.changelog.tip()
311 self.tagscache['tip'] = self.changelog.tip()
312 return self.tagscache
312 return self.tagscache
313
313
314 def tagtype(self, tagname):
314 def tagtype(self, tagname):
315 '''
315 '''
316 return the type of the given tag. result can be:
316 return the type of the given tag. result can be:
317
317
318 'local' : a local tag
318 'local' : a local tag
319 'global' : a global tag
319 'global' : a global tag
320 None : tag does not exist
320 None : tag does not exist
321 '''
321 '''
322
322
323 self.tags()
323 self.tags()
324
324
325 return self._tagstypecache.get(tagname)
325 return self._tagstypecache.get(tagname)
326
326
327 def _hgtagsnodes(self):
327 def _hgtagsnodes(self):
328 heads = self.heads()
328 heads = self.heads()
329 heads.reverse()
329 heads.reverse()
330 last = {}
330 last = {}
331 ret = []
331 ret = []
332 for node in heads:
332 for node in heads:
333 c = self.changectx(node)
333 c = self.changectx(node)
334 rev = c.rev()
334 rev = c.rev()
335 try:
335 try:
336 fnode = c.filenode('.hgtags')
336 fnode = c.filenode('.hgtags')
337 except revlog.LookupError:
337 except revlog.LookupError:
338 continue
338 continue
339 ret.append((rev, node, fnode))
339 ret.append((rev, node, fnode))
340 if fnode in last:
340 if fnode in last:
341 ret[last[fnode]] = None
341 ret[last[fnode]] = None
342 last[fnode] = len(ret) - 1
342 last[fnode] = len(ret) - 1
343 return [item for item in ret if item]
343 return [item for item in ret if item]
344
344
345 def tagslist(self):
345 def tagslist(self):
346 '''return a list of tags ordered by revision'''
346 '''return a list of tags ordered by revision'''
347 l = []
347 l = []
348 for t, n in self.tags().items():
348 for t, n in self.tags().items():
349 try:
349 try:
350 r = self.changelog.rev(n)
350 r = self.changelog.rev(n)
351 except:
351 except:
352 r = -2 # sort to the beginning of the list if unknown
352 r = -2 # sort to the beginning of the list if unknown
353 l.append((r, t, n))
353 l.append((r, t, n))
354 l.sort()
354 l.sort()
355 return [(t, n) for r, t, n in l]
355 return [(t, n) for r, t, n in l]
356
356
357 def nodetags(self, node):
357 def nodetags(self, node):
358 '''return the tags associated with a node'''
358 '''return the tags associated with a node'''
359 if not self.nodetagscache:
359 if not self.nodetagscache:
360 self.nodetagscache = {}
360 self.nodetagscache = {}
361 for t, n in self.tags().items():
361 for t, n in self.tags().items():
362 self.nodetagscache.setdefault(n, []).append(t)
362 self.nodetagscache.setdefault(n, []).append(t)
363 return self.nodetagscache.get(node, [])
363 return self.nodetagscache.get(node, [])
364
364
365 def _branchtags(self, partial, lrev):
365 def _branchtags(self, partial, lrev):
366 tiprev = self.changelog.count() - 1
366 tiprev = self.changelog.count() - 1
367 if lrev != tiprev:
367 if lrev != tiprev:
368 self._updatebranchcache(partial, lrev+1, tiprev+1)
368 self._updatebranchcache(partial, lrev+1, tiprev+1)
369 self._writebranchcache(partial, self.changelog.tip(), tiprev)
369 self._writebranchcache(partial, self.changelog.tip(), tiprev)
370
370
371 return partial
371 return partial
372
372
373 def branchtags(self):
373 def branchtags(self):
374 tip = self.changelog.tip()
374 tip = self.changelog.tip()
375 if self.branchcache is not None and self._branchcachetip == tip:
375 if self.branchcache is not None and self._branchcachetip == tip:
376 return self.branchcache
376 return self.branchcache
377
377
378 oldtip = self._branchcachetip
378 oldtip = self._branchcachetip
379 self._branchcachetip = tip
379 self._branchcachetip = tip
380 if self.branchcache is None:
380 if self.branchcache is None:
381 self.branchcache = {} # avoid recursion in changectx
381 self.branchcache = {} # avoid recursion in changectx
382 else:
382 else:
383 self.branchcache.clear() # keep using the same dict
383 self.branchcache.clear() # keep using the same dict
384 if oldtip is None or oldtip not in self.changelog.nodemap:
384 if oldtip is None or oldtip not in self.changelog.nodemap:
385 partial, last, lrev = self._readbranchcache()
385 partial, last, lrev = self._readbranchcache()
386 else:
386 else:
387 lrev = self.changelog.rev(oldtip)
387 lrev = self.changelog.rev(oldtip)
388 partial = self._ubranchcache
388 partial = self._ubranchcache
389
389
390 self._branchtags(partial, lrev)
390 self._branchtags(partial, lrev)
391
391
392 # the branch cache is stored on disk as UTF-8, but in the local
392 # the branch cache is stored on disk as UTF-8, but in the local
393 # charset internally
393 # charset internally
394 for k, v in partial.items():
394 for k, v in partial.items():
395 self.branchcache[util.tolocal(k)] = v
395 self.branchcache[util.tolocal(k)] = v
396 self._ubranchcache = partial
396 self._ubranchcache = partial
397 return self.branchcache
397 return self.branchcache
398
398
399 def _readbranchcache(self):
399 def _readbranchcache(self):
400 partial = {}
400 partial = {}
401 try:
401 try:
402 f = self.opener("branch.cache")
402 f = self.opener("branch.cache")
403 lines = f.read().split('\n')
403 lines = f.read().split('\n')
404 f.close()
404 f.close()
405 except (IOError, OSError):
405 except (IOError, OSError):
406 return {}, nullid, nullrev
406 return {}, nullid, nullrev
407
407
408 try:
408 try:
409 last, lrev = lines.pop(0).split(" ", 1)
409 last, lrev = lines.pop(0).split(" ", 1)
410 last, lrev = bin(last), int(lrev)
410 last, lrev = bin(last), int(lrev)
411 if not (lrev < self.changelog.count() and
411 if not (lrev < self.changelog.count() and
412 self.changelog.node(lrev) == last): # sanity check
412 self.changelog.node(lrev) == last): # sanity check
413 # invalidate the cache
413 # invalidate the cache
414 raise ValueError('invalidating branch cache (tip differs)')
414 raise ValueError('invalidating branch cache (tip differs)')
415 for l in lines:
415 for l in lines:
416 if not l: continue
416 if not l: continue
417 node, label = l.split(" ", 1)
417 node, label = l.split(" ", 1)
418 partial[label.strip()] = bin(node)
418 partial[label.strip()] = bin(node)
419 except (KeyboardInterrupt, util.SignalInterrupt):
419 except (KeyboardInterrupt, util.SignalInterrupt):
420 raise
420 raise
421 except Exception, inst:
421 except Exception, inst:
422 if self.ui.debugflag:
422 if self.ui.debugflag:
423 self.ui.warn(str(inst), '\n')
423 self.ui.warn(str(inst), '\n')
424 partial, last, lrev = {}, nullid, nullrev
424 partial, last, lrev = {}, nullid, nullrev
425 return partial, last, lrev
425 return partial, last, lrev
426
426
427 def _writebranchcache(self, branches, tip, tiprev):
427 def _writebranchcache(self, branches, tip, tiprev):
428 try:
428 try:
429 f = self.opener("branch.cache", "w", atomictemp=True)
429 f = self.opener("branch.cache", "w", atomictemp=True)
430 f.write("%s %s\n" % (hex(tip), tiprev))
430 f.write("%s %s\n" % (hex(tip), tiprev))
431 for label, node in branches.iteritems():
431 for label, node in branches.iteritems():
432 f.write("%s %s\n" % (hex(node), label))
432 f.write("%s %s\n" % (hex(node), label))
433 f.rename()
433 f.rename()
434 except (IOError, OSError):
434 except (IOError, OSError):
435 pass
435 pass
436
436
437 def _updatebranchcache(self, partial, start, end):
437 def _updatebranchcache(self, partial, start, end):
438 for r in xrange(start, end):
438 for r in xrange(start, end):
439 c = self.changectx(r)
439 c = self.changectx(r)
440 b = c.branch()
440 b = c.branch()
441 partial[b] = c.node()
441 partial[b] = c.node()
442
442
443 def lookup(self, key):
443 def lookup(self, key):
444 if key == '.':
444 if key == '.':
445 return self.dirstate.parents()[0]
445 return self.dirstate.parents()[0]
446 elif key == 'null':
446 elif key == 'null':
447 return nullid
447 return nullid
448 n = self.changelog._match(key)
448 n = self.changelog._match(key)
449 if n:
449 if n:
450 return n
450 return n
451 if key in self.tags():
451 if key in self.tags():
452 return self.tags()[key]
452 return self.tags()[key]
453 if key in self.branchtags():
453 if key in self.branchtags():
454 return self.branchtags()[key]
454 return self.branchtags()[key]
455 n = self.changelog._partialmatch(key)
455 n = self.changelog._partialmatch(key)
456 if n:
456 if n:
457 return n
457 return n
458 try:
458 try:
459 if len(key) == 20:
459 if len(key) == 20:
460 key = hex(key)
460 key = hex(key)
461 except:
461 except:
462 pass
462 pass
463 raise repo.RepoError(_("unknown revision '%s'") % key)
463 raise repo.RepoError(_("unknown revision '%s'") % key)
464
464
465 def local(self):
465 def local(self):
466 return True
466 return True
467
467
468 def join(self, f):
468 def join(self, f):
469 return os.path.join(self.path, f)
469 return os.path.join(self.path, f)
470
470
471 def sjoin(self, f):
471 def sjoin(self, f):
472 f = self.encodefn(f)
472 f = self.encodefn(f)
473 return os.path.join(self.spath, f)
473 return os.path.join(self.spath, f)
474
474
475 def wjoin(self, f):
475 def wjoin(self, f):
476 return os.path.join(self.root, f)
476 return os.path.join(self.root, f)
477
477
478 def rjoin(self, f):
478 def rjoin(self, f):
479 return os.path.join(self.root, util.pconvert(f))
479 return os.path.join(self.root, util.pconvert(f))
480
480
481 def file(self, f):
481 def file(self, f):
482 if f[0] == '/':
482 if f[0] == '/':
483 f = f[1:]
483 f = f[1:]
484 return filelog.filelog(self.sopener, f)
484 return filelog.filelog(self.sopener, f)
485
485
486 def changectx(self, changeid):
486 def changectx(self, changeid):
487 if changeid == None:
487 if changeid == None:
488 return context.workingctx(self)
488 return context.workingctx(self)
489 return context.changectx(self, changeid)
489 return context.changectx(self, changeid)
490
490
491 def parents(self, changeid=None):
491 def parents(self, changeid=None):
492 '''
492 '''get list of changectxs for parents of changeid'''
493 get list of changectxs for parents of changeid or working directory
493 return self.changectx(changeid).parents()
494 '''
495 if changeid is None:
496 pl = self.dirstate.parents()
497 else:
498 n = self.changelog.lookup(changeid)
499 pl = self.changelog.parents(n)
500 if pl[1] == nullid:
501 return [self.changectx(pl[0])]
502 return [self.changectx(pl[0]), self.changectx(pl[1])]
503
494
504 def filectx(self, path, changeid=None, fileid=None):
495 def filectx(self, path, changeid=None, fileid=None):
505 """changeid can be a changeset revision, node, or tag.
496 """changeid can be a changeset revision, node, or tag.
506 fileid can be a file revision or node."""
497 fileid can be a file revision or node."""
507 return context.filectx(self, path, changeid, fileid)
498 return context.filectx(self, path, changeid, fileid)
508
499
509 def getcwd(self):
500 def getcwd(self):
510 return self.dirstate.getcwd()
501 return self.dirstate.getcwd()
511
502
512 def pathto(self, f, cwd=None):
503 def pathto(self, f, cwd=None):
513 return self.dirstate.pathto(f, cwd)
504 return self.dirstate.pathto(f, cwd)
514
505
515 def wfile(self, f, mode='r'):
506 def wfile(self, f, mode='r'):
516 return self.wopener(f, mode)
507 return self.wopener(f, mode)
517
508
518 def _link(self, f):
509 def _link(self, f):
519 return os.path.islink(self.wjoin(f))
510 return os.path.islink(self.wjoin(f))
520
511
521 def _filter(self, filter, filename, data):
512 def _filter(self, filter, filename, data):
522 if filter not in self.filterpats:
513 if filter not in self.filterpats:
523 l = []
514 l = []
524 for pat, cmd in self.ui.configitems(filter):
515 for pat, cmd in self.ui.configitems(filter):
525 mf = util.matcher(self.root, "", [pat], [], [])[1]
516 mf = util.matcher(self.root, "", [pat], [], [])[1]
526 fn = None
517 fn = None
527 params = cmd
518 params = cmd
528 for name, filterfn in self._datafilters.iteritems():
519 for name, filterfn in self._datafilters.iteritems():
529 if cmd.startswith(name):
520 if cmd.startswith(name):
530 fn = filterfn
521 fn = filterfn
531 params = cmd[len(name):].lstrip()
522 params = cmd[len(name):].lstrip()
532 break
523 break
533 if not fn:
524 if not fn:
534 fn = lambda s, c, **kwargs: util.filter(s, c)
525 fn = lambda s, c, **kwargs: util.filter(s, c)
535 # Wrap old filters not supporting keyword arguments
526 # Wrap old filters not supporting keyword arguments
536 if not inspect.getargspec(fn)[2]:
527 if not inspect.getargspec(fn)[2]:
537 oldfn = fn
528 oldfn = fn
538 fn = lambda s, c, **kwargs: oldfn(s, c)
529 fn = lambda s, c, **kwargs: oldfn(s, c)
539 l.append((mf, fn, params))
530 l.append((mf, fn, params))
540 self.filterpats[filter] = l
531 self.filterpats[filter] = l
541
532
542 for mf, fn, cmd in self.filterpats[filter]:
533 for mf, fn, cmd in self.filterpats[filter]:
543 if mf(filename):
534 if mf(filename):
544 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
545 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
546 break
537 break
547
538
548 return data
539 return data
549
540
550 def adddatafilter(self, name, filter):
541 def adddatafilter(self, name, filter):
551 self._datafilters[name] = filter
542 self._datafilters[name] = filter
552
543
553 def wread(self, filename):
544 def wread(self, filename):
554 if self._link(filename):
545 if self._link(filename):
555 data = os.readlink(self.wjoin(filename))
546 data = os.readlink(self.wjoin(filename))
556 else:
547 else:
557 data = self.wopener(filename, 'r').read()
548 data = self.wopener(filename, 'r').read()
558 return self._filter("encode", filename, data)
549 return self._filter("encode", filename, data)
559
550
560 def wwrite(self, filename, data, flags):
551 def wwrite(self, filename, data, flags):
561 data = self._filter("decode", filename, data)
552 data = self._filter("decode", filename, data)
562 try:
553 try:
563 os.unlink(self.wjoin(filename))
554 os.unlink(self.wjoin(filename))
564 except OSError:
555 except OSError:
565 pass
556 pass
566 self.wopener(filename, 'w').write(data)
557 self.wopener(filename, 'w').write(data)
567 util.set_flags(self.wjoin(filename), flags)
558 util.set_flags(self.wjoin(filename), flags)
568
559
569 def wwritedata(self, filename, data):
560 def wwritedata(self, filename, data):
570 return self._filter("decode", filename, data)
561 return self._filter("decode", filename, data)
571
562
572 def transaction(self):
563 def transaction(self):
573 if self._transref and self._transref():
564 if self._transref and self._transref():
574 return self._transref().nest()
565 return self._transref().nest()
575
566
576 # abort here if the journal already exists
567 # abort here if the journal already exists
577 if os.path.exists(self.sjoin("journal")):
568 if os.path.exists(self.sjoin("journal")):
578 raise repo.RepoError(_("journal already exists - run hg recover"))
569 raise repo.RepoError(_("journal already exists - run hg recover"))
579
570
580 # save dirstate for rollback
571 # save dirstate for rollback
581 try:
572 try:
582 ds = self.opener("dirstate").read()
573 ds = self.opener("dirstate").read()
583 except IOError:
574 except IOError:
584 ds = ""
575 ds = ""
585 self.opener("journal.dirstate", "w").write(ds)
576 self.opener("journal.dirstate", "w").write(ds)
586 self.opener("journal.branch", "w").write(self.dirstate.branch())
577 self.opener("journal.branch", "w").write(self.dirstate.branch())
587
578
588 renames = [(self.sjoin("journal"), self.sjoin("undo")),
579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
589 (self.join("journal.dirstate"), self.join("undo.dirstate")),
580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
590 (self.join("journal.branch"), self.join("undo.branch"))]
581 (self.join("journal.branch"), self.join("undo.branch"))]
591 tr = transaction.transaction(self.ui.warn, self.sopener,
582 tr = transaction.transaction(self.ui.warn, self.sopener,
592 self.sjoin("journal"),
583 self.sjoin("journal"),
593 aftertrans(renames),
584 aftertrans(renames),
594 self._createmode)
585 self._createmode)
595 self._transref = weakref.ref(tr)
586 self._transref = weakref.ref(tr)
596 return tr
587 return tr
597
588
598 def recover(self):
589 def recover(self):
599 l = self.lock()
590 l = self.lock()
600 try:
591 try:
601 if os.path.exists(self.sjoin("journal")):
592 if os.path.exists(self.sjoin("journal")):
602 self.ui.status(_("rolling back interrupted transaction\n"))
593 self.ui.status(_("rolling back interrupted transaction\n"))
603 transaction.rollback(self.sopener, self.sjoin("journal"))
594 transaction.rollback(self.sopener, self.sjoin("journal"))
604 self.invalidate()
595 self.invalidate()
605 return True
596 return True
606 else:
597 else:
607 self.ui.warn(_("no interrupted transaction available\n"))
598 self.ui.warn(_("no interrupted transaction available\n"))
608 return False
599 return False
609 finally:
600 finally:
610 del l
601 del l
611
602
612 def rollback(self):
603 def rollback(self):
613 wlock = lock = None
604 wlock = lock = None
614 try:
605 try:
615 wlock = self.wlock()
606 wlock = self.wlock()
616 lock = self.lock()
607 lock = self.lock()
617 if os.path.exists(self.sjoin("undo")):
608 if os.path.exists(self.sjoin("undo")):
618 self.ui.status(_("rolling back last transaction\n"))
609 self.ui.status(_("rolling back last transaction\n"))
619 transaction.rollback(self.sopener, self.sjoin("undo"))
610 transaction.rollback(self.sopener, self.sjoin("undo"))
620 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
621 try:
612 try:
622 branch = self.opener("undo.branch").read()
613 branch = self.opener("undo.branch").read()
623 self.dirstate.setbranch(branch)
614 self.dirstate.setbranch(branch)
624 except IOError:
615 except IOError:
625 self.ui.warn(_("Named branch could not be reset, "
616 self.ui.warn(_("Named branch could not be reset, "
626 "current branch still is: %s\n")
617 "current branch still is: %s\n")
627 % util.tolocal(self.dirstate.branch()))
618 % util.tolocal(self.dirstate.branch()))
628 self.invalidate()
619 self.invalidate()
629 self.dirstate.invalidate()
620 self.dirstate.invalidate()
630 else:
621 else:
631 self.ui.warn(_("no rollback information available\n"))
622 self.ui.warn(_("no rollback information available\n"))
632 finally:
623 finally:
633 del lock, wlock
624 del lock, wlock
634
625
635 def invalidate(self):
626 def invalidate(self):
636 for a in "changelog manifest".split():
627 for a in "changelog manifest".split():
637 if a in self.__dict__:
628 if a in self.__dict__:
638 delattr(self, a)
629 delattr(self, a)
639 self.tagscache = None
630 self.tagscache = None
640 self._tagstypecache = None
631 self._tagstypecache = None
641 self.nodetagscache = None
632 self.nodetagscache = None
642 self.branchcache = None
633 self.branchcache = None
643 self._ubranchcache = None
634 self._ubranchcache = None
644 self._branchcachetip = None
635 self._branchcachetip = None
645
636
646 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
647 try:
638 try:
648 l = lock.lock(lockname, 0, releasefn, desc=desc)
639 l = lock.lock(lockname, 0, releasefn, desc=desc)
649 except lock.LockHeld, inst:
640 except lock.LockHeld, inst:
650 if not wait:
641 if not wait:
651 raise
642 raise
652 self.ui.warn(_("waiting for lock on %s held by %r\n") %
643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
653 (desc, inst.locker))
644 (desc, inst.locker))
654 # default to 600 seconds timeout
645 # default to 600 seconds timeout
655 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
656 releasefn, desc=desc)
647 releasefn, desc=desc)
657 if acquirefn:
648 if acquirefn:
658 acquirefn()
649 acquirefn()
659 return l
650 return l
660
651
661 def lock(self, wait=True):
652 def lock(self, wait=True):
662 if self._lockref and self._lockref():
653 if self._lockref and self._lockref():
663 return self._lockref()
654 return self._lockref()
664
655
665 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
666 _('repository %s') % self.origroot)
657 _('repository %s') % self.origroot)
667 self._lockref = weakref.ref(l)
658 self._lockref = weakref.ref(l)
668 return l
659 return l
669
660
670 def wlock(self, wait=True):
661 def wlock(self, wait=True):
671 if self._wlockref and self._wlockref():
662 if self._wlockref and self._wlockref():
672 return self._wlockref()
663 return self._wlockref()
673
664
674 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
675 self.dirstate.invalidate, _('working directory of %s') %
666 self.dirstate.invalidate, _('working directory of %s') %
676 self.origroot)
667 self.origroot)
677 self._wlockref = weakref.ref(l)
668 self._wlockref = weakref.ref(l)
678 return l
669 return l
679
670
680 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
671 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
681 """
672 """
682 commit an individual file as part of a larger transaction
673 commit an individual file as part of a larger transaction
683 """
674 """
684
675
685 fn = fctx.path()
676 fn = fctx.path()
686 t = fctx.data()
677 t = fctx.data()
687 fl = self.file(fn)
678 fl = self.file(fn)
688 fp1 = manifest1.get(fn, nullid)
679 fp1 = manifest1.get(fn, nullid)
689 fp2 = manifest2.get(fn, nullid)
680 fp2 = manifest2.get(fn, nullid)
690
681
691 meta = {}
682 meta = {}
692 cp = fctx.renamed()
683 cp = fctx.renamed()
693 if cp and cp[0] != fn:
684 if cp and cp[0] != fn:
694 cp = cp[0]
685 cp = cp[0]
695 # Mark the new revision of this file as a copy of another
686 # Mark the new revision of this file as a copy of another
696 # file. This copy data will effectively act as a parent
687 # file. This copy data will effectively act as a parent
697 # of this new revision. If this is a merge, the first
688 # of this new revision. If this is a merge, the first
698 # parent will be the nullid (meaning "look up the copy data")
689 # parent will be the nullid (meaning "look up the copy data")
699 # and the second one will be the other parent. For example:
690 # and the second one will be the other parent. For example:
700 #
691 #
701 # 0 --- 1 --- 3 rev1 changes file foo
692 # 0 --- 1 --- 3 rev1 changes file foo
702 # \ / rev2 renames foo to bar and changes it
693 # \ / rev2 renames foo to bar and changes it
703 # \- 2 -/ rev3 should have bar with all changes and
694 # \- 2 -/ rev3 should have bar with all changes and
704 # should record that bar descends from
695 # should record that bar descends from
705 # bar in rev2 and foo in rev1
696 # bar in rev2 and foo in rev1
706 #
697 #
707 # this allows this merge to succeed:
698 # this allows this merge to succeed:
708 #
699 #
709 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
700 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
710 # \ / merging rev3 and rev4 should use bar@rev2
701 # \ / merging rev3 and rev4 should use bar@rev2
711 # \- 2 --- 4 as the merge base
702 # \- 2 --- 4 as the merge base
712 #
703 #
713 meta["copy"] = cp
704 meta["copy"] = cp
714 if not manifest2: # not a branch merge
705 if not manifest2: # not a branch merge
715 meta["copyrev"] = hex(manifest1[cp])
706 meta["copyrev"] = hex(manifest1[cp])
716 fp2 = nullid
707 fp2 = nullid
717 elif fp2 != nullid: # copied on remote side
708 elif fp2 != nullid: # copied on remote side
718 meta["copyrev"] = hex(manifest1[cp])
709 meta["copyrev"] = hex(manifest1[cp])
719 elif fp1 != nullid: # copied on local side, reversed
710 elif fp1 != nullid: # copied on local side, reversed
720 meta["copyrev"] = hex(manifest2[cp])
711 meta["copyrev"] = hex(manifest2[cp])
721 fp2 = fp1
712 fp2 = fp1
722 elif cp in manifest2: # directory rename on local side
713 elif cp in manifest2: # directory rename on local side
723 meta["copyrev"] = hex(manifest2[cp])
714 meta["copyrev"] = hex(manifest2[cp])
724 else: # directory rename on remote side
715 else: # directory rename on remote side
725 meta["copyrev"] = hex(manifest1[cp])
716 meta["copyrev"] = hex(manifest1[cp])
726 self.ui.debug(_(" %s: copy %s:%s\n") %
717 self.ui.debug(_(" %s: copy %s:%s\n") %
727 (fn, cp, meta["copyrev"]))
718 (fn, cp, meta["copyrev"]))
728 fp1 = nullid
719 fp1 = nullid
729 elif fp2 != nullid:
720 elif fp2 != nullid:
730 # is one parent an ancestor of the other?
721 # is one parent an ancestor of the other?
731 fpa = fl.ancestor(fp1, fp2)
722 fpa = fl.ancestor(fp1, fp2)
732 if fpa == fp1:
723 if fpa == fp1:
733 fp1, fp2 = fp2, nullid
724 fp1, fp2 = fp2, nullid
734 elif fpa == fp2:
725 elif fpa == fp2:
735 fp2 = nullid
726 fp2 = nullid
736
727
737 # is the file unmodified from the parent? report existing entry
728 # is the file unmodified from the parent? report existing entry
738 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
729 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
739 return fp1
730 return fp1
740
731
741 changelist.append(fn)
732 changelist.append(fn)
742 return fl.add(t, meta, tr, linkrev, fp1, fp2)
733 return fl.add(t, meta, tr, linkrev, fp1, fp2)
743
734
744 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
735 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
745 if p1 is None:
736 if p1 is None:
746 p1, p2 = self.dirstate.parents()
737 p1, p2 = self.dirstate.parents()
747 return self.commit(files=files, text=text, user=user, date=date,
738 return self.commit(files=files, text=text, user=user, date=date,
748 p1=p1, p2=p2, extra=extra, empty_ok=True)
739 p1=p1, p2=p2, extra=extra, empty_ok=True)
749
740
750 def commit(self, files=None, text="", user=None, date=None,
741 def commit(self, files=None, text="", user=None, date=None,
751 match=None, force=False, force_editor=False,
742 match=None, force=False, force_editor=False,
752 p1=None, p2=None, extra={}, empty_ok=False):
743 p1=None, p2=None, extra={}, empty_ok=False):
753 wlock = lock = None
744 wlock = lock = None
754 if files:
745 if files:
755 files = util.unique(files)
746 files = util.unique(files)
756 try:
747 try:
757 wlock = self.wlock()
748 wlock = self.wlock()
758 lock = self.lock()
749 lock = self.lock()
759 use_dirstate = (p1 is None) # not rawcommit
750 use_dirstate = (p1 is None) # not rawcommit
760
751
761 if use_dirstate:
752 if use_dirstate:
762 p1, p2 = self.dirstate.parents()
753 p1, p2 = self.dirstate.parents()
763 update_dirstate = True
754 update_dirstate = True
764
755
765 if (not force and p2 != nullid and
756 if (not force and p2 != nullid and
766 (match and (match.files() or match.anypats()))):
757 (match and (match.files() or match.anypats()))):
767 raise util.Abort(_('cannot partially commit a merge '
758 raise util.Abort(_('cannot partially commit a merge '
768 '(do not specify files or patterns)'))
759 '(do not specify files or patterns)'))
769
760
770 if files:
761 if files:
771 modified, removed = [], []
762 modified, removed = [], []
772 for f in files:
763 for f in files:
773 s = self.dirstate[f]
764 s = self.dirstate[f]
774 if s in 'nma':
765 if s in 'nma':
775 modified.append(f)
766 modified.append(f)
776 elif s == 'r':
767 elif s == 'r':
777 removed.append(f)
768 removed.append(f)
778 else:
769 else:
779 self.ui.warn(_("%s not tracked!\n") % f)
770 self.ui.warn(_("%s not tracked!\n") % f)
780 changes = [modified, [], removed, [], []]
771 changes = [modified, [], removed, [], []]
781 else:
772 else:
782 changes = self.status(match=match)
773 changes = self.status(match=match)
783 else:
774 else:
784 p1, p2 = p1, p2 or nullid
775 p1, p2 = p1, p2 or nullid
785 update_dirstate = (self.dirstate.parents()[0] == p1)
776 update_dirstate = (self.dirstate.parents()[0] == p1)
786 changes = [files, [], [], [], []]
777 changes = [files, [], [], [], []]
787
778
788 wctx = context.workingctx(self, (p1, p2), text, user, date,
779 wctx = context.workingctx(self, (p1, p2), text, user, date,
789 extra, changes)
780 extra, changes)
790 return self._commitctx(wctx, force, force_editor, empty_ok,
781 return self._commitctx(wctx, force, force_editor, empty_ok,
791 use_dirstate, update_dirstate)
782 use_dirstate, update_dirstate)
792 finally:
783 finally:
793 del lock, wlock
784 del lock, wlock
794
785
795 def commitctx(self, ctx):
786 def commitctx(self, ctx):
796 wlock = lock = None
787 wlock = lock = None
797 try:
788 try:
798 wlock = self.wlock()
789 wlock = self.wlock()
799 lock = self.lock()
790 lock = self.lock()
800 return self._commitctx(ctx, force=True, force_editor=False,
791 return self._commitctx(ctx, force=True, force_editor=False,
801 empty_ok=True, use_dirstate=False,
792 empty_ok=True, use_dirstate=False,
802 update_dirstate=False)
793 update_dirstate=False)
803 finally:
794 finally:
804 del lock, wlock
795 del lock, wlock
805
796
806 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
797 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
807 use_dirstate=True, update_dirstate=True):
798 use_dirstate=True, update_dirstate=True):
808 tr = None
799 tr = None
809 valid = 0 # don't save the dirstate if this isn't set
800 valid = 0 # don't save the dirstate if this isn't set
810 try:
801 try:
811 commit = wctx.modified() + wctx.added()
802 commit = wctx.modified() + wctx.added()
812 remove = wctx.removed()
803 remove = wctx.removed()
813 extra = wctx.extra().copy()
804 extra = wctx.extra().copy()
814 branchname = extra['branch']
805 branchname = extra['branch']
815 user = wctx.user()
806 user = wctx.user()
816 text = wctx.description()
807 text = wctx.description()
817
808
818 p1, p2 = [p.node() for p in wctx.parents()]
809 p1, p2 = [p.node() for p in wctx.parents()]
819 c1 = self.changelog.read(p1)
810 c1 = self.changelog.read(p1)
820 c2 = self.changelog.read(p2)
811 c2 = self.changelog.read(p2)
821 m1 = self.manifest.read(c1[0]).copy()
812 m1 = self.manifest.read(c1[0]).copy()
822 m2 = self.manifest.read(c2[0])
813 m2 = self.manifest.read(c2[0])
823
814
824 if use_dirstate:
815 if use_dirstate:
825 oldname = c1[5].get("branch") # stored in UTF-8
816 oldname = c1[5].get("branch") # stored in UTF-8
826 if (not commit and not remove and not force and p2 == nullid
817 if (not commit and not remove and not force and p2 == nullid
827 and branchname == oldname):
818 and branchname == oldname):
828 self.ui.status(_("nothing changed\n"))
819 self.ui.status(_("nothing changed\n"))
829 return None
820 return None
830
821
831 xp1 = hex(p1)
822 xp1 = hex(p1)
832 if p2 == nullid: xp2 = ''
823 if p2 == nullid: xp2 = ''
833 else: xp2 = hex(p2)
824 else: xp2 = hex(p2)
834
825
835 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
826 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
836
827
837 tr = self.transaction()
828 tr = self.transaction()
838 trp = weakref.proxy(tr)
829 trp = weakref.proxy(tr)
839
830
840 # check in files
831 # check in files
841 new = {}
832 new = {}
842 changed = []
833 changed = []
843 linkrev = self.changelog.count()
834 linkrev = self.changelog.count()
844 commit.sort()
835 commit.sort()
845 for f in commit:
836 for f in commit:
846 self.ui.note(f + "\n")
837 self.ui.note(f + "\n")
847 try:
838 try:
848 fctx = wctx.filectx(f)
839 fctx = wctx.filectx(f)
849 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
840 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
850 new_exec = fctx.isexec()
841 new_exec = fctx.isexec()
851 new_link = fctx.islink()
842 new_link = fctx.islink()
852 if ((not changed or changed[-1] != f) and
843 if ((not changed or changed[-1] != f) and
853 m2.get(f) != new[f]):
844 m2.get(f) != new[f]):
854 # mention the file in the changelog if some
845 # mention the file in the changelog if some
855 # flag changed, even if there was no content
846 # flag changed, even if there was no content
856 # change.
847 # change.
857 old_exec = m1.execf(f)
848 old_exec = m1.execf(f)
858 old_link = m1.linkf(f)
849 old_link = m1.linkf(f)
859 if old_exec != new_exec or old_link != new_link:
850 if old_exec != new_exec or old_link != new_link:
860 changed.append(f)
851 changed.append(f)
861 m1.set(f, new_exec, new_link)
852 m1.set(f, new_exec, new_link)
862 if use_dirstate:
853 if use_dirstate:
863 self.dirstate.normal(f)
854 self.dirstate.normal(f)
864
855
865 except (OSError, IOError):
856 except (OSError, IOError):
866 if use_dirstate:
857 if use_dirstate:
867 self.ui.warn(_("trouble committing %s!\n") % f)
858 self.ui.warn(_("trouble committing %s!\n") % f)
868 raise
859 raise
869 else:
860 else:
870 remove.append(f)
861 remove.append(f)
871
862
872 # update manifest
863 # update manifest
873 m1.update(new)
864 m1.update(new)
874 remove.sort()
865 remove.sort()
875 removed = []
866 removed = []
876
867
877 for f in remove:
868 for f in remove:
878 if f in m1:
869 if f in m1:
879 del m1[f]
870 del m1[f]
880 removed.append(f)
871 removed.append(f)
881 elif f in m2:
872 elif f in m2:
882 removed.append(f)
873 removed.append(f)
883 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
874 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
884 (new, removed))
875 (new, removed))
885
876
886 # add changeset
877 # add changeset
887 if (not empty_ok and not text) or force_editor:
878 if (not empty_ok and not text) or force_editor:
888 edittext = []
879 edittext = []
889 if text:
880 if text:
890 edittext.append(text)
881 edittext.append(text)
891 edittext.append("")
882 edittext.append("")
892 edittext.append(_("HG: Enter commit message."
883 edittext.append(_("HG: Enter commit message."
893 " Lines beginning with 'HG:' are removed."))
884 " Lines beginning with 'HG:' are removed."))
894 edittext.append("HG: --")
885 edittext.append("HG: --")
895 edittext.append("HG: user: %s" % user)
886 edittext.append("HG: user: %s" % user)
896 if p2 != nullid:
887 if p2 != nullid:
897 edittext.append("HG: branch merge")
888 edittext.append("HG: branch merge")
898 if branchname:
889 if branchname:
899 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
900 edittext.extend(["HG: changed %s" % f for f in changed])
891 edittext.extend(["HG: changed %s" % f for f in changed])
901 edittext.extend(["HG: removed %s" % f for f in removed])
892 edittext.extend(["HG: removed %s" % f for f in removed])
902 if not changed and not remove:
893 if not changed and not remove:
903 edittext.append("HG: no files changed")
894 edittext.append("HG: no files changed")
904 edittext.append("")
895 edittext.append("")
905 # run editor in the repository root
896 # run editor in the repository root
906 olddir = os.getcwd()
897 olddir = os.getcwd()
907 os.chdir(self.root)
898 os.chdir(self.root)
908 text = self.ui.edit("\n".join(edittext), user)
899 text = self.ui.edit("\n".join(edittext), user)
909 os.chdir(olddir)
900 os.chdir(olddir)
910
901
911 lines = [line.rstrip() for line in text.rstrip().splitlines()]
902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
912 while lines and not lines[0]:
903 while lines and not lines[0]:
913 del lines[0]
904 del lines[0]
914 if not lines and use_dirstate:
905 if not lines and use_dirstate:
915 raise util.Abort(_("empty commit message"))
906 raise util.Abort(_("empty commit message"))
916 text = '\n'.join(lines)
907 text = '\n'.join(lines)
917
908
918 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
919 user, wctx.date(), extra)
910 user, wctx.date(), extra)
920 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
921 parent2=xp2)
912 parent2=xp2)
922 tr.close()
913 tr.close()
923
914
924 if self.branchcache:
915 if self.branchcache:
925 self.branchtags()
916 self.branchtags()
926
917
927 if use_dirstate or update_dirstate:
918 if use_dirstate or update_dirstate:
928 self.dirstate.setparents(n)
919 self.dirstate.setparents(n)
929 if use_dirstate:
920 if use_dirstate:
930 for f in removed:
921 for f in removed:
931 self.dirstate.forget(f)
922 self.dirstate.forget(f)
932 valid = 1 # our dirstate updates are complete
923 valid = 1 # our dirstate updates are complete
933
924
934 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
935 return n
926 return n
936 finally:
927 finally:
937 if not valid: # don't save our updated dirstate
928 if not valid: # don't save our updated dirstate
938 self.dirstate.invalidate()
929 self.dirstate.invalidate()
939 del tr
930 del tr
940
931
941 def walk(self, match, node=None):
932 def walk(self, match, node=None):
942 '''
933 '''
943 walk recursively through the directory tree or a given
934 walk recursively through the directory tree or a given
944 changeset, finding all files matched by the match
935 changeset, finding all files matched by the match
945 function
936 function
946 '''
937 '''
947
938
948 if node:
939 if node:
949 fdict = dict.fromkeys(match.files())
940 fdict = dict.fromkeys(match.files())
950 # for dirstate.walk, files=['.'] means "walk the whole tree".
941 # for dirstate.walk, files=['.'] means "walk the whole tree".
951 # follow that here, too
942 # follow that here, too
952 fdict.pop('.', None)
943 fdict.pop('.', None)
953 mdict = self.manifest.read(self.changelog.read(node)[0])
944 mdict = self.manifest.read(self.changelog.read(node)[0])
954 mfiles = mdict.keys()
945 mfiles = mdict.keys()
955 mfiles.sort()
946 mfiles.sort()
956 for fn in mfiles:
947 for fn in mfiles:
957 for ffn in fdict:
948 for ffn in fdict:
958 # match if the file is the exact name or a directory
949 # match if the file is the exact name or a directory
959 if ffn == fn or fn.startswith("%s/" % ffn):
950 if ffn == fn or fn.startswith("%s/" % ffn):
960 del fdict[ffn]
951 del fdict[ffn]
961 break
952 break
962 if match(fn):
953 if match(fn):
963 yield fn
954 yield fn
964 ffiles = fdict.keys()
955 ffiles = fdict.keys()
965 ffiles.sort()
956 ffiles.sort()
966 for fn in ffiles:
957 for fn in ffiles:
967 if match.bad(fn, 'No such file in rev ' + short(node)) \
958 if match.bad(fn, 'No such file in rev ' + short(node)) \
968 and match(fn):
959 and match(fn):
969 yield fn
960 yield fn
970 else:
961 else:
971 for fn in self.dirstate.walk(match):
962 for fn in self.dirstate.walk(match):
972 yield fn
963 yield fn
973
964
974 def status(self, node1=None, node2=None, match=None,
965 def status(self, node1=None, node2=None, match=None,
975 list_ignored=False, list_clean=False, list_unknown=True):
966 list_ignored=False, list_clean=False, list_unknown=True):
976 """return status of files between two nodes or node and working directory
967 """return status of files between two nodes or node and working directory
977
968
978 If node1 is None, use the first dirstate parent instead.
969 If node1 is None, use the first dirstate parent instead.
979 If node2 is None, compare node1 with working directory.
970 If node2 is None, compare node1 with working directory.
980 """
971 """
981
972
982 def fcmp(fn, getnode):
973 def fcmp(fn, getnode):
983 t1 = self.wread(fn)
974 t1 = self.wread(fn)
984 return self.file(fn).cmp(getnode(fn), t1)
975 return self.file(fn).cmp(getnode(fn), t1)
985
976
986 def mfmatches(node):
977 def mfmatches(node):
987 change = self.changelog.read(node)
978 change = self.changelog.read(node)
988 mf = self.manifest.read(change[0]).copy()
979 mf = self.manifest.read(change[0]).copy()
989 for fn in mf.keys():
980 for fn in mf.keys():
990 if not match(fn):
981 if not match(fn):
991 del mf[fn]
982 del mf[fn]
992 return mf
983 return mf
993
984
994 if not match:
985 if not match:
995 match = match_.always(self.root, self.getcwd())
986 match = match_.always(self.root, self.getcwd())
996
987
997 modified, added, removed, deleted, unknown = [], [], [], [], []
988 modified, added, removed, deleted, unknown = [], [], [], [], []
998 ignored, clean = [], []
989 ignored, clean = [], []
999
990
1000 compareworking = False
991 compareworking = False
1001 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
992 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
1002 compareworking = True
993 compareworking = True
1003
994
1004 if not compareworking:
995 if not compareworking:
1005 # read the manifest from node1 before the manifest from node2,
996 # read the manifest from node1 before the manifest from node2,
1006 # so that we'll hit the manifest cache if we're going through
997 # so that we'll hit the manifest cache if we're going through
1007 # all the revisions in parent->child order.
998 # all the revisions in parent->child order.
1008 mf1 = mfmatches(node1)
999 mf1 = mfmatches(node1)
1009
1000
1010 # are we comparing the working directory?
1001 # are we comparing the working directory?
1011 if not node2:
1002 if not node2:
1012 (lookup, modified, added, removed, deleted, unknown,
1003 (lookup, modified, added, removed, deleted, unknown,
1013 ignored, clean) = self.dirstate.status(match, list_ignored,
1004 ignored, clean) = self.dirstate.status(match, list_ignored,
1014 list_clean, list_unknown)
1005 list_clean, list_unknown)
1015 # are we comparing working dir against its parent?
1006 # are we comparing working dir against its parent?
1016 if compareworking:
1007 if compareworking:
1017 if lookup:
1008 if lookup:
1018 fixup = []
1009 fixup = []
1019 # do a full compare of any files that might have changed
1010 # do a full compare of any files that might have changed
1020 ctx = self.changectx('')
1011 ctx = self.changectx('')
1021 mexec = lambda f: 'x' in ctx.fileflags(f)
1012 mexec = lambda f: 'x' in ctx.fileflags(f)
1022 mlink = lambda f: 'l' in ctx.fileflags(f)
1013 mlink = lambda f: 'l' in ctx.fileflags(f)
1023 is_exec = util.execfunc(self.root, mexec)
1014 is_exec = util.execfunc(self.root, mexec)
1024 is_link = util.linkfunc(self.root, mlink)
1015 is_link = util.linkfunc(self.root, mlink)
1025 def flags(f):
1016 def flags(f):
1026 return is_link(f) and 'l' or is_exec(f) and 'x' or ''
1017 return is_link(f) and 'l' or is_exec(f) and 'x' or ''
1027 for f in lookup:
1018 for f in lookup:
1028 if (f not in ctx or flags(f) != ctx.fileflags(f)
1019 if (f not in ctx or flags(f) != ctx.fileflags(f)
1029 or ctx[f].cmp(self.wread(f))):
1020 or ctx[f].cmp(self.wread(f))):
1030 modified.append(f)
1021 modified.append(f)
1031 else:
1022 else:
1032 fixup.append(f)
1023 fixup.append(f)
1033 if list_clean:
1024 if list_clean:
1034 clean.append(f)
1025 clean.append(f)
1035
1026
1036 # update dirstate for files that are actually clean
1027 # update dirstate for files that are actually clean
1037 if fixup:
1028 if fixup:
1038 wlock = None
1029 wlock = None
1039 try:
1030 try:
1040 try:
1031 try:
1041 wlock = self.wlock(False)
1032 wlock = self.wlock(False)
1042 except lock.LockException:
1033 except lock.LockException:
1043 pass
1034 pass
1044 if wlock:
1035 if wlock:
1045 for f in fixup:
1036 for f in fixup:
1046 self.dirstate.normal(f)
1037 self.dirstate.normal(f)
1047 finally:
1038 finally:
1048 del wlock
1039 del wlock
1049 else:
1040 else:
1050 # we are comparing working dir against non-parent
1041 # we are comparing working dir against non-parent
1051 # generate a pseudo-manifest for the working dir
1042 # generate a pseudo-manifest for the working dir
1052 # XXX: create it in dirstate.py ?
1043 # XXX: create it in dirstate.py ?
1053 mf2 = mfmatches(self.dirstate.parents()[0])
1044 mf2 = mfmatches(self.dirstate.parents()[0])
1054 is_exec = util.execfunc(self.root, mf2.execf)
1045 is_exec = util.execfunc(self.root, mf2.execf)
1055 is_link = util.linkfunc(self.root, mf2.linkf)
1046 is_link = util.linkfunc(self.root, mf2.linkf)
1056 for f in lookup + modified + added:
1047 for f in lookup + modified + added:
1057 mf2[f] = ""
1048 mf2[f] = ""
1058 mf2.set(f, is_exec(f), is_link(f))
1049 mf2.set(f, is_exec(f), is_link(f))
1059 for f in removed:
1050 for f in removed:
1060 if f in mf2:
1051 if f in mf2:
1061 del mf2[f]
1052 del mf2[f]
1062
1053
1063 else:
1054 else:
1064 # we are comparing two revisions
1055 # we are comparing two revisions
1065 mf2 = mfmatches(node2)
1056 mf2 = mfmatches(node2)
1066
1057
1067 if not compareworking:
1058 if not compareworking:
1068 # flush lists from dirstate before comparing manifests
1059 # flush lists from dirstate before comparing manifests
1069 modified, added, clean = [], [], []
1060 modified, added, clean = [], [], []
1070
1061
1071 # make sure to sort the files so we talk to the disk in a
1062 # make sure to sort the files so we talk to the disk in a
1072 # reasonable order
1063 # reasonable order
1073 mf2keys = mf2.keys()
1064 mf2keys = mf2.keys()
1074 mf2keys.sort()
1065 mf2keys.sort()
1075 getnode = lambda fn: mf1.get(fn, nullid)
1066 getnode = lambda fn: mf1.get(fn, nullid)
1076 for fn in mf2keys:
1067 for fn in mf2keys:
1077 if fn in mf1:
1068 if fn in mf1:
1078 if (mf1.flags(fn) != mf2.flags(fn) or
1069 if (mf1.flags(fn) != mf2.flags(fn) or
1079 (mf1[fn] != mf2[fn] and
1070 (mf1[fn] != mf2[fn] and
1080 (mf2[fn] != "" or fcmp(fn, getnode)))):
1071 (mf2[fn] != "" or fcmp(fn, getnode)))):
1081 modified.append(fn)
1072 modified.append(fn)
1082 elif list_clean:
1073 elif list_clean:
1083 clean.append(fn)
1074 clean.append(fn)
1084 del mf1[fn]
1075 del mf1[fn]
1085 else:
1076 else:
1086 added.append(fn)
1077 added.append(fn)
1087
1078
1088 removed = mf1.keys()
1079 removed = mf1.keys()
1089
1080
1090 # sort and return results:
1081 # sort and return results:
1091 for l in modified, added, removed, deleted, unknown, ignored, clean:
1082 for l in modified, added, removed, deleted, unknown, ignored, clean:
1092 l.sort()
1083 l.sort()
1093 return (modified, added, removed, deleted, unknown, ignored, clean)
1084 return (modified, added, removed, deleted, unknown, ignored, clean)
1094
1085
1095 def add(self, list):
1086 def add(self, list):
1096 wlock = self.wlock()
1087 wlock = self.wlock()
1097 try:
1088 try:
1098 rejected = []
1089 rejected = []
1099 for f in list:
1090 for f in list:
1100 p = self.wjoin(f)
1091 p = self.wjoin(f)
1101 try:
1092 try:
1102 st = os.lstat(p)
1093 st = os.lstat(p)
1103 except:
1094 except:
1104 self.ui.warn(_("%s does not exist!\n") % f)
1095 self.ui.warn(_("%s does not exist!\n") % f)
1105 rejected.append(f)
1096 rejected.append(f)
1106 continue
1097 continue
1107 if st.st_size > 10000000:
1098 if st.st_size > 10000000:
1108 self.ui.warn(_("%s: files over 10MB may cause memory and"
1099 self.ui.warn(_("%s: files over 10MB may cause memory and"
1109 " performance problems\n"
1100 " performance problems\n"
1110 "(use 'hg revert %s' to unadd the file)\n")
1101 "(use 'hg revert %s' to unadd the file)\n")
1111 % (f, f))
1102 % (f, f))
1112 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1103 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1113 self.ui.warn(_("%s not added: only files and symlinks "
1104 self.ui.warn(_("%s not added: only files and symlinks "
1114 "supported currently\n") % f)
1105 "supported currently\n") % f)
1115 rejected.append(p)
1106 rejected.append(p)
1116 elif self.dirstate[f] in 'amn':
1107 elif self.dirstate[f] in 'amn':
1117 self.ui.warn(_("%s already tracked!\n") % f)
1108 self.ui.warn(_("%s already tracked!\n") % f)
1118 elif self.dirstate[f] == 'r':
1109 elif self.dirstate[f] == 'r':
1119 self.dirstate.normallookup(f)
1110 self.dirstate.normallookup(f)
1120 else:
1111 else:
1121 self.dirstate.add(f)
1112 self.dirstate.add(f)
1122 return rejected
1113 return rejected
1123 finally:
1114 finally:
1124 del wlock
1115 del wlock
1125
1116
1126 def forget(self, list):
1117 def forget(self, list):
1127 wlock = self.wlock()
1118 wlock = self.wlock()
1128 try:
1119 try:
1129 for f in list:
1120 for f in list:
1130 if self.dirstate[f] != 'a':
1121 if self.dirstate[f] != 'a':
1131 self.ui.warn(_("%s not added!\n") % f)
1122 self.ui.warn(_("%s not added!\n") % f)
1132 else:
1123 else:
1133 self.dirstate.forget(f)
1124 self.dirstate.forget(f)
1134 finally:
1125 finally:
1135 del wlock
1126 del wlock
1136
1127
1137 def remove(self, list, unlink=False):
1128 def remove(self, list, unlink=False):
1138 wlock = None
1129 wlock = None
1139 try:
1130 try:
1140 if unlink:
1131 if unlink:
1141 for f in list:
1132 for f in list:
1142 try:
1133 try:
1143 util.unlink(self.wjoin(f))
1134 util.unlink(self.wjoin(f))
1144 except OSError, inst:
1135 except OSError, inst:
1145 if inst.errno != errno.ENOENT:
1136 if inst.errno != errno.ENOENT:
1146 raise
1137 raise
1147 wlock = self.wlock()
1138 wlock = self.wlock()
1148 for f in list:
1139 for f in list:
1149 if unlink and os.path.exists(self.wjoin(f)):
1140 if unlink and os.path.exists(self.wjoin(f)):
1150 self.ui.warn(_("%s still exists!\n") % f)
1141 self.ui.warn(_("%s still exists!\n") % f)
1151 elif self.dirstate[f] == 'a':
1142 elif self.dirstate[f] == 'a':
1152 self.dirstate.forget(f)
1143 self.dirstate.forget(f)
1153 elif f not in self.dirstate:
1144 elif f not in self.dirstate:
1154 self.ui.warn(_("%s not tracked!\n") % f)
1145 self.ui.warn(_("%s not tracked!\n") % f)
1155 else:
1146 else:
1156 self.dirstate.remove(f)
1147 self.dirstate.remove(f)
1157 finally:
1148 finally:
1158 del wlock
1149 del wlock
1159
1150
1160 def undelete(self, list):
1151 def undelete(self, list):
1161 wlock = None
1152 wlock = None
1162 try:
1153 try:
1163 manifests = [self.manifest.read(self.changelog.read(p)[0])
1154 manifests = [self.manifest.read(self.changelog.read(p)[0])
1164 for p in self.dirstate.parents() if p != nullid]
1155 for p in self.dirstate.parents() if p != nullid]
1165 wlock = self.wlock()
1156 wlock = self.wlock()
1166 for f in list:
1157 for f in list:
1167 if self.dirstate[f] != 'r':
1158 if self.dirstate[f] != 'r':
1168 self.ui.warn("%s not removed!\n" % f)
1159 self.ui.warn("%s not removed!\n" % f)
1169 else:
1160 else:
1170 m = f in manifests[0] and manifests[0] or manifests[1]
1161 m = f in manifests[0] and manifests[0] or manifests[1]
1171 t = self.file(f).read(m[f])
1162 t = self.file(f).read(m[f])
1172 self.wwrite(f, t, m.flags(f))
1163 self.wwrite(f, t, m.flags(f))
1173 self.dirstate.normal(f)
1164 self.dirstate.normal(f)
1174 finally:
1165 finally:
1175 del wlock
1166 del wlock
1176
1167
1177 def copy(self, source, dest):
1168 def copy(self, source, dest):
1178 wlock = None
1169 wlock = None
1179 try:
1170 try:
1180 p = self.wjoin(dest)
1171 p = self.wjoin(dest)
1181 if not (os.path.exists(p) or os.path.islink(p)):
1172 if not (os.path.exists(p) or os.path.islink(p)):
1182 self.ui.warn(_("%s does not exist!\n") % dest)
1173 self.ui.warn(_("%s does not exist!\n") % dest)
1183 elif not (os.path.isfile(p) or os.path.islink(p)):
1174 elif not (os.path.isfile(p) or os.path.islink(p)):
1184 self.ui.warn(_("copy failed: %s is not a file or a "
1175 self.ui.warn(_("copy failed: %s is not a file or a "
1185 "symbolic link\n") % dest)
1176 "symbolic link\n") % dest)
1186 else:
1177 else:
1187 wlock = self.wlock()
1178 wlock = self.wlock()
1188 if dest not in self.dirstate:
1179 if dest not in self.dirstate:
1189 self.dirstate.add(dest)
1180 self.dirstate.add(dest)
1190 self.dirstate.copy(source, dest)
1181 self.dirstate.copy(source, dest)
1191 finally:
1182 finally:
1192 del wlock
1183 del wlock
1193
1184
1194 def heads(self, start=None):
1185 def heads(self, start=None):
1195 heads = self.changelog.heads(start)
1186 heads = self.changelog.heads(start)
1196 # sort the output in rev descending order
1187 # sort the output in rev descending order
1197 heads = [(-self.changelog.rev(h), h) for h in heads]
1188 heads = [(-self.changelog.rev(h), h) for h in heads]
1198 heads.sort()
1189 heads.sort()
1199 return [n for (r, n) in heads]
1190 return [n for (r, n) in heads]
1200
1191
1201 def branchheads(self, branch=None, start=None):
1192 def branchheads(self, branch=None, start=None):
1202 branch = branch is None and self.changectx(None).branch() or branch
1193 branch = branch is None and self.changectx(None).branch() or branch
1203 branches = self.branchtags()
1194 branches = self.branchtags()
1204 if branch not in branches:
1195 if branch not in branches:
1205 return []
1196 return []
1206 # The basic algorithm is this:
1197 # The basic algorithm is this:
1207 #
1198 #
1208 # Start from the branch tip since there are no later revisions that can
1199 # Start from the branch tip since there are no later revisions that can
1209 # possibly be in this branch, and the tip is a guaranteed head.
1200 # possibly be in this branch, and the tip is a guaranteed head.
1210 #
1201 #
1211 # Remember the tip's parents as the first ancestors, since these by
1202 # Remember the tip's parents as the first ancestors, since these by
1212 # definition are not heads.
1203 # definition are not heads.
1213 #
1204 #
1214 # Step backwards from the brach tip through all the revisions. We are
1205 # Step backwards from the brach tip through all the revisions. We are
1215 # guaranteed by the rules of Mercurial that we will now be visiting the
1206 # guaranteed by the rules of Mercurial that we will now be visiting the
1216 # nodes in reverse topological order (children before parents).
1207 # nodes in reverse topological order (children before parents).
1217 #
1208 #
1218 # If a revision is one of the ancestors of a head then we can toss it
1209 # If a revision is one of the ancestors of a head then we can toss it
1219 # out of the ancestors set (we've already found it and won't be
1210 # out of the ancestors set (we've already found it and won't be
1220 # visiting it again) and put its parents in the ancestors set.
1211 # visiting it again) and put its parents in the ancestors set.
1221 #
1212 #
1222 # Otherwise, if a revision is in the branch it's another head, since it
1213 # Otherwise, if a revision is in the branch it's another head, since it
1223 # wasn't in the ancestor list of an existing head. So add it to the
1214 # wasn't in the ancestor list of an existing head. So add it to the
1224 # head list, and add its parents to the ancestor list.
1215 # head list, and add its parents to the ancestor list.
1225 #
1216 #
1226 # If it is not in the branch ignore it.
1217 # If it is not in the branch ignore it.
1227 #
1218 #
1228 # Once we have a list of heads, use nodesbetween to filter out all the
1219 # Once we have a list of heads, use nodesbetween to filter out all the
1229 # heads that cannot be reached from startrev. There may be a more
1220 # heads that cannot be reached from startrev. There may be a more
1230 # efficient way to do this as part of the previous algorithm.
1221 # efficient way to do this as part of the previous algorithm.
1231
1222
1232 set = util.set
1223 set = util.set
1233 heads = [self.changelog.rev(branches[branch])]
1224 heads = [self.changelog.rev(branches[branch])]
1234 # Don't care if ancestors contains nullrev or not.
1225 # Don't care if ancestors contains nullrev or not.
1235 ancestors = set(self.changelog.parentrevs(heads[0]))
1226 ancestors = set(self.changelog.parentrevs(heads[0]))
1236 for rev in xrange(heads[0] - 1, nullrev, -1):
1227 for rev in xrange(heads[0] - 1, nullrev, -1):
1237 if rev in ancestors:
1228 if rev in ancestors:
1238 ancestors.update(self.changelog.parentrevs(rev))
1229 ancestors.update(self.changelog.parentrevs(rev))
1239 ancestors.remove(rev)
1230 ancestors.remove(rev)
1240 elif self.changectx(rev).branch() == branch:
1231 elif self.changectx(rev).branch() == branch:
1241 heads.append(rev)
1232 heads.append(rev)
1242 ancestors.update(self.changelog.parentrevs(rev))
1233 ancestors.update(self.changelog.parentrevs(rev))
1243 heads = [self.changelog.node(rev) for rev in heads]
1234 heads = [self.changelog.node(rev) for rev in heads]
1244 if start is not None:
1235 if start is not None:
1245 heads = self.changelog.nodesbetween([start], heads)[2]
1236 heads = self.changelog.nodesbetween([start], heads)[2]
1246 return heads
1237 return heads
1247
1238
1248 def branches(self, nodes):
1239 def branches(self, nodes):
1249 if not nodes:
1240 if not nodes:
1250 nodes = [self.changelog.tip()]
1241 nodes = [self.changelog.tip()]
1251 b = []
1242 b = []
1252 for n in nodes:
1243 for n in nodes:
1253 t = n
1244 t = n
1254 while 1:
1245 while 1:
1255 p = self.changelog.parents(n)
1246 p = self.changelog.parents(n)
1256 if p[1] != nullid or p[0] == nullid:
1247 if p[1] != nullid or p[0] == nullid:
1257 b.append((t, n, p[0], p[1]))
1248 b.append((t, n, p[0], p[1]))
1258 break
1249 break
1259 n = p[0]
1250 n = p[0]
1260 return b
1251 return b
1261
1252
1262 def between(self, pairs):
1253 def between(self, pairs):
1263 r = []
1254 r = []
1264
1255
1265 for top, bottom in pairs:
1256 for top, bottom in pairs:
1266 n, l, i = top, [], 0
1257 n, l, i = top, [], 0
1267 f = 1
1258 f = 1
1268
1259
1269 while n != bottom:
1260 while n != bottom:
1270 p = self.changelog.parents(n)[0]
1261 p = self.changelog.parents(n)[0]
1271 if i == f:
1262 if i == f:
1272 l.append(n)
1263 l.append(n)
1273 f = f * 2
1264 f = f * 2
1274 n = p
1265 n = p
1275 i += 1
1266 i += 1
1276
1267
1277 r.append(l)
1268 r.append(l)
1278
1269
1279 return r
1270 return r
1280
1271
1281 def findincoming(self, remote, base=None, heads=None, force=False):
1272 def findincoming(self, remote, base=None, heads=None, force=False):
1282 """Return list of roots of the subsets of missing nodes from remote
1273 """Return list of roots of the subsets of missing nodes from remote
1283
1274
1284 If base dict is specified, assume that these nodes and their parents
1275 If base dict is specified, assume that these nodes and their parents
1285 exist on the remote side and that no child of a node of base exists
1276 exist on the remote side and that no child of a node of base exists
1286 in both remote and self.
1277 in both remote and self.
1287 Furthermore base will be updated to include the nodes that exists
1278 Furthermore base will be updated to include the nodes that exists
1288 in self and remote but no children exists in self and remote.
1279 in self and remote but no children exists in self and remote.
1289 If a list of heads is specified, return only nodes which are heads
1280 If a list of heads is specified, return only nodes which are heads
1290 or ancestors of these heads.
1281 or ancestors of these heads.
1291
1282
1292 All the ancestors of base are in self and in remote.
1283 All the ancestors of base are in self and in remote.
1293 All the descendants of the list returned are missing in self.
1284 All the descendants of the list returned are missing in self.
1294 (and so we know that the rest of the nodes are missing in remote, see
1285 (and so we know that the rest of the nodes are missing in remote, see
1295 outgoing)
1286 outgoing)
1296 """
1287 """
1297 m = self.changelog.nodemap
1288 m = self.changelog.nodemap
1298 search = []
1289 search = []
1299 fetch = {}
1290 fetch = {}
1300 seen = {}
1291 seen = {}
1301 seenbranch = {}
1292 seenbranch = {}
1302 if base == None:
1293 if base == None:
1303 base = {}
1294 base = {}
1304
1295
1305 if not heads:
1296 if not heads:
1306 heads = remote.heads()
1297 heads = remote.heads()
1307
1298
1308 if self.changelog.tip() == nullid:
1299 if self.changelog.tip() == nullid:
1309 base[nullid] = 1
1300 base[nullid] = 1
1310 if heads != [nullid]:
1301 if heads != [nullid]:
1311 return [nullid]
1302 return [nullid]
1312 return []
1303 return []
1313
1304
1314 # assume we're closer to the tip than the root
1305 # assume we're closer to the tip than the root
1315 # and start by examining the heads
1306 # and start by examining the heads
1316 self.ui.status(_("searching for changes\n"))
1307 self.ui.status(_("searching for changes\n"))
1317
1308
1318 unknown = []
1309 unknown = []
1319 for h in heads:
1310 for h in heads:
1320 if h not in m:
1311 if h not in m:
1321 unknown.append(h)
1312 unknown.append(h)
1322 else:
1313 else:
1323 base[h] = 1
1314 base[h] = 1
1324
1315
1325 if not unknown:
1316 if not unknown:
1326 return []
1317 return []
1327
1318
1328 req = dict.fromkeys(unknown)
1319 req = dict.fromkeys(unknown)
1329 reqcnt = 0
1320 reqcnt = 0
1330
1321
1331 # search through remote branches
1322 # search through remote branches
1332 # a 'branch' here is a linear segment of history, with four parts:
1323 # a 'branch' here is a linear segment of history, with four parts:
1333 # head, root, first parent, second parent
1324 # head, root, first parent, second parent
1334 # (a branch always has two parents (or none) by definition)
1325 # (a branch always has two parents (or none) by definition)
1335 unknown = remote.branches(unknown)
1326 unknown = remote.branches(unknown)
1336 while unknown:
1327 while unknown:
1337 r = []
1328 r = []
1338 while unknown:
1329 while unknown:
1339 n = unknown.pop(0)
1330 n = unknown.pop(0)
1340 if n[0] in seen:
1331 if n[0] in seen:
1341 continue
1332 continue
1342
1333
1343 self.ui.debug(_("examining %s:%s\n")
1334 self.ui.debug(_("examining %s:%s\n")
1344 % (short(n[0]), short(n[1])))
1335 % (short(n[0]), short(n[1])))
1345 if n[0] == nullid: # found the end of the branch
1336 if n[0] == nullid: # found the end of the branch
1346 pass
1337 pass
1347 elif n in seenbranch:
1338 elif n in seenbranch:
1348 self.ui.debug(_("branch already found\n"))
1339 self.ui.debug(_("branch already found\n"))
1349 continue
1340 continue
1350 elif n[1] and n[1] in m: # do we know the base?
1341 elif n[1] and n[1] in m: # do we know the base?
1351 self.ui.debug(_("found incomplete branch %s:%s\n")
1342 self.ui.debug(_("found incomplete branch %s:%s\n")
1352 % (short(n[0]), short(n[1])))
1343 % (short(n[0]), short(n[1])))
1353 search.append(n) # schedule branch range for scanning
1344 search.append(n) # schedule branch range for scanning
1354 seenbranch[n] = 1
1345 seenbranch[n] = 1
1355 else:
1346 else:
1356 if n[1] not in seen and n[1] not in fetch:
1347 if n[1] not in seen and n[1] not in fetch:
1357 if n[2] in m and n[3] in m:
1348 if n[2] in m and n[3] in m:
1358 self.ui.debug(_("found new changeset %s\n") %
1349 self.ui.debug(_("found new changeset %s\n") %
1359 short(n[1]))
1350 short(n[1]))
1360 fetch[n[1]] = 1 # earliest unknown
1351 fetch[n[1]] = 1 # earliest unknown
1361 for p in n[2:4]:
1352 for p in n[2:4]:
1362 if p in m:
1353 if p in m:
1363 base[p] = 1 # latest known
1354 base[p] = 1 # latest known
1364
1355
1365 for p in n[2:4]:
1356 for p in n[2:4]:
1366 if p not in req and p not in m:
1357 if p not in req and p not in m:
1367 r.append(p)
1358 r.append(p)
1368 req[p] = 1
1359 req[p] = 1
1369 seen[n[0]] = 1
1360 seen[n[0]] = 1
1370
1361
1371 if r:
1362 if r:
1372 reqcnt += 1
1363 reqcnt += 1
1373 self.ui.debug(_("request %d: %s\n") %
1364 self.ui.debug(_("request %d: %s\n") %
1374 (reqcnt, " ".join(map(short, r))))
1365 (reqcnt, " ".join(map(short, r))))
1375 for p in xrange(0, len(r), 10):
1366 for p in xrange(0, len(r), 10):
1376 for b in remote.branches(r[p:p+10]):
1367 for b in remote.branches(r[p:p+10]):
1377 self.ui.debug(_("received %s:%s\n") %
1368 self.ui.debug(_("received %s:%s\n") %
1378 (short(b[0]), short(b[1])))
1369 (short(b[0]), short(b[1])))
1379 unknown.append(b)
1370 unknown.append(b)
1380
1371
1381 # do binary search on the branches we found
1372 # do binary search on the branches we found
1382 while search:
1373 while search:
1383 n = search.pop(0)
1374 n = search.pop(0)
1384 reqcnt += 1
1375 reqcnt += 1
1385 l = remote.between([(n[0], n[1])])[0]
1376 l = remote.between([(n[0], n[1])])[0]
1386 l.append(n[1])
1377 l.append(n[1])
1387 p = n[0]
1378 p = n[0]
1388 f = 1
1379 f = 1
1389 for i in l:
1380 for i in l:
1390 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1381 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1391 if i in m:
1382 if i in m:
1392 if f <= 2:
1383 if f <= 2:
1393 self.ui.debug(_("found new branch changeset %s\n") %
1384 self.ui.debug(_("found new branch changeset %s\n") %
1394 short(p))
1385 short(p))
1395 fetch[p] = 1
1386 fetch[p] = 1
1396 base[i] = 1
1387 base[i] = 1
1397 else:
1388 else:
1398 self.ui.debug(_("narrowed branch search to %s:%s\n")
1389 self.ui.debug(_("narrowed branch search to %s:%s\n")
1399 % (short(p), short(i)))
1390 % (short(p), short(i)))
1400 search.append((p, i))
1391 search.append((p, i))
1401 break
1392 break
1402 p, f = i, f * 2
1393 p, f = i, f * 2
1403
1394
1404 # sanity check our fetch list
1395 # sanity check our fetch list
1405 for f in fetch.keys():
1396 for f in fetch.keys():
1406 if f in m:
1397 if f in m:
1407 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1398 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1408
1399
1409 if base.keys() == [nullid]:
1400 if base.keys() == [nullid]:
1410 if force:
1401 if force:
1411 self.ui.warn(_("warning: repository is unrelated\n"))
1402 self.ui.warn(_("warning: repository is unrelated\n"))
1412 else:
1403 else:
1413 raise util.Abort(_("repository is unrelated"))
1404 raise util.Abort(_("repository is unrelated"))
1414
1405
1415 self.ui.debug(_("found new changesets starting at ") +
1406 self.ui.debug(_("found new changesets starting at ") +
1416 " ".join([short(f) for f in fetch]) + "\n")
1407 " ".join([short(f) for f in fetch]) + "\n")
1417
1408
1418 self.ui.debug(_("%d total queries\n") % reqcnt)
1409 self.ui.debug(_("%d total queries\n") % reqcnt)
1419
1410
1420 return fetch.keys()
1411 return fetch.keys()
1421
1412
1422 def findoutgoing(self, remote, base=None, heads=None, force=False):
1413 def findoutgoing(self, remote, base=None, heads=None, force=False):
1423 """Return list of nodes that are roots of subsets not in remote
1414 """Return list of nodes that are roots of subsets not in remote
1424
1415
1425 If base dict is specified, assume that these nodes and their parents
1416 If base dict is specified, assume that these nodes and their parents
1426 exist on the remote side.
1417 exist on the remote side.
1427 If a list of heads is specified, return only nodes which are heads
1418 If a list of heads is specified, return only nodes which are heads
1428 or ancestors of these heads, and return a second element which
1419 or ancestors of these heads, and return a second element which
1429 contains all remote heads which get new children.
1420 contains all remote heads which get new children.
1430 """
1421 """
1431 if base == None:
1422 if base == None:
1432 base = {}
1423 base = {}
1433 self.findincoming(remote, base, heads, force=force)
1424 self.findincoming(remote, base, heads, force=force)
1434
1425
1435 self.ui.debug(_("common changesets up to ")
1426 self.ui.debug(_("common changesets up to ")
1436 + " ".join(map(short, base.keys())) + "\n")
1427 + " ".join(map(short, base.keys())) + "\n")
1437
1428
1438 remain = dict.fromkeys(self.changelog.nodemap)
1429 remain = dict.fromkeys(self.changelog.nodemap)
1439
1430
1440 # prune everything remote has from the tree
1431 # prune everything remote has from the tree
1441 del remain[nullid]
1432 del remain[nullid]
1442 remove = base.keys()
1433 remove = base.keys()
1443 while remove:
1434 while remove:
1444 n = remove.pop(0)
1435 n = remove.pop(0)
1445 if n in remain:
1436 if n in remain:
1446 del remain[n]
1437 del remain[n]
1447 for p in self.changelog.parents(n):
1438 for p in self.changelog.parents(n):
1448 remove.append(p)
1439 remove.append(p)
1449
1440
1450 # find every node whose parents have been pruned
1441 # find every node whose parents have been pruned
1451 subset = []
1442 subset = []
1452 # find every remote head that will get new children
1443 # find every remote head that will get new children
1453 updated_heads = {}
1444 updated_heads = {}
1454 for n in remain:
1445 for n in remain:
1455 p1, p2 = self.changelog.parents(n)
1446 p1, p2 = self.changelog.parents(n)
1456 if p1 not in remain and p2 not in remain:
1447 if p1 not in remain and p2 not in remain:
1457 subset.append(n)
1448 subset.append(n)
1458 if heads:
1449 if heads:
1459 if p1 in heads:
1450 if p1 in heads:
1460 updated_heads[p1] = True
1451 updated_heads[p1] = True
1461 if p2 in heads:
1452 if p2 in heads:
1462 updated_heads[p2] = True
1453 updated_heads[p2] = True
1463
1454
1464 # this is the set of all roots we have to push
1455 # this is the set of all roots we have to push
1465 if heads:
1456 if heads:
1466 return subset, updated_heads.keys()
1457 return subset, updated_heads.keys()
1467 else:
1458 else:
1468 return subset
1459 return subset
1469
1460
1470 def pull(self, remote, heads=None, force=False):
1461 def pull(self, remote, heads=None, force=False):
1471 lock = self.lock()
1462 lock = self.lock()
1472 try:
1463 try:
1473 fetch = self.findincoming(remote, heads=heads, force=force)
1464 fetch = self.findincoming(remote, heads=heads, force=force)
1474 if fetch == [nullid]:
1465 if fetch == [nullid]:
1475 self.ui.status(_("requesting all changes\n"))
1466 self.ui.status(_("requesting all changes\n"))
1476
1467
1477 if not fetch:
1468 if not fetch:
1478 self.ui.status(_("no changes found\n"))
1469 self.ui.status(_("no changes found\n"))
1479 return 0
1470 return 0
1480
1471
1481 if heads is None:
1472 if heads is None:
1482 cg = remote.changegroup(fetch, 'pull')
1473 cg = remote.changegroup(fetch, 'pull')
1483 else:
1474 else:
1484 if 'changegroupsubset' not in remote.capabilities:
1475 if 'changegroupsubset' not in remote.capabilities:
1485 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1476 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1486 cg = remote.changegroupsubset(fetch, heads, 'pull')
1477 cg = remote.changegroupsubset(fetch, heads, 'pull')
1487 return self.addchangegroup(cg, 'pull', remote.url())
1478 return self.addchangegroup(cg, 'pull', remote.url())
1488 finally:
1479 finally:
1489 del lock
1480 del lock
1490
1481
1491 def push(self, remote, force=False, revs=None):
1482 def push(self, remote, force=False, revs=None):
1492 # there are two ways to push to remote repo:
1483 # there are two ways to push to remote repo:
1493 #
1484 #
1494 # addchangegroup assumes local user can lock remote
1485 # addchangegroup assumes local user can lock remote
1495 # repo (local filesystem, old ssh servers).
1486 # repo (local filesystem, old ssh servers).
1496 #
1487 #
1497 # unbundle assumes local user cannot lock remote repo (new ssh
1488 # unbundle assumes local user cannot lock remote repo (new ssh
1498 # servers, http servers).
1489 # servers, http servers).
1499
1490
1500 if remote.capable('unbundle'):
1491 if remote.capable('unbundle'):
1501 return self.push_unbundle(remote, force, revs)
1492 return self.push_unbundle(remote, force, revs)
1502 return self.push_addchangegroup(remote, force, revs)
1493 return self.push_addchangegroup(remote, force, revs)
1503
1494
1504 def prepush(self, remote, force, revs):
1495 def prepush(self, remote, force, revs):
1505 base = {}
1496 base = {}
1506 remote_heads = remote.heads()
1497 remote_heads = remote.heads()
1507 inc = self.findincoming(remote, base, remote_heads, force=force)
1498 inc = self.findincoming(remote, base, remote_heads, force=force)
1508
1499
1509 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1500 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1510 if revs is not None:
1501 if revs is not None:
1511 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1502 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1512 else:
1503 else:
1513 bases, heads = update, self.changelog.heads()
1504 bases, heads = update, self.changelog.heads()
1514
1505
1515 if not bases:
1506 if not bases:
1516 self.ui.status(_("no changes found\n"))
1507 self.ui.status(_("no changes found\n"))
1517 return None, 1
1508 return None, 1
1518 elif not force:
1509 elif not force:
1519 # check if we're creating new remote heads
1510 # check if we're creating new remote heads
1520 # to be a remote head after push, node must be either
1511 # to be a remote head after push, node must be either
1521 # - unknown locally
1512 # - unknown locally
1522 # - a local outgoing head descended from update
1513 # - a local outgoing head descended from update
1523 # - a remote head that's known locally and not
1514 # - a remote head that's known locally and not
1524 # ancestral to an outgoing head
1515 # ancestral to an outgoing head
1525
1516
1526 warn = 0
1517 warn = 0
1527
1518
1528 if remote_heads == [nullid]:
1519 if remote_heads == [nullid]:
1529 warn = 0
1520 warn = 0
1530 elif not revs and len(heads) > len(remote_heads):
1521 elif not revs and len(heads) > len(remote_heads):
1531 warn = 1
1522 warn = 1
1532 else:
1523 else:
1533 newheads = list(heads)
1524 newheads = list(heads)
1534 for r in remote_heads:
1525 for r in remote_heads:
1535 if r in self.changelog.nodemap:
1526 if r in self.changelog.nodemap:
1536 desc = self.changelog.heads(r, heads)
1527 desc = self.changelog.heads(r, heads)
1537 l = [h for h in heads if h in desc]
1528 l = [h for h in heads if h in desc]
1538 if not l:
1529 if not l:
1539 newheads.append(r)
1530 newheads.append(r)
1540 else:
1531 else:
1541 newheads.append(r)
1532 newheads.append(r)
1542 if len(newheads) > len(remote_heads):
1533 if len(newheads) > len(remote_heads):
1543 warn = 1
1534 warn = 1
1544
1535
1545 if warn:
1536 if warn:
1546 self.ui.warn(_("abort: push creates new remote heads!\n"))
1537 self.ui.warn(_("abort: push creates new remote heads!\n"))
1547 self.ui.status(_("(did you forget to merge?"
1538 self.ui.status(_("(did you forget to merge?"
1548 " use push -f to force)\n"))
1539 " use push -f to force)\n"))
1549 return None, 0
1540 return None, 0
1550 elif inc:
1541 elif inc:
1551 self.ui.warn(_("note: unsynced remote changes!\n"))
1542 self.ui.warn(_("note: unsynced remote changes!\n"))
1552
1543
1553
1544
1554 if revs is None:
1545 if revs is None:
1555 cg = self.changegroup(update, 'push')
1546 cg = self.changegroup(update, 'push')
1556 else:
1547 else:
1557 cg = self.changegroupsubset(update, revs, 'push')
1548 cg = self.changegroupsubset(update, revs, 'push')
1558 return cg, remote_heads
1549 return cg, remote_heads
1559
1550
1560 def push_addchangegroup(self, remote, force, revs):
1551 def push_addchangegroup(self, remote, force, revs):
1561 lock = remote.lock()
1552 lock = remote.lock()
1562 try:
1553 try:
1563 ret = self.prepush(remote, force, revs)
1554 ret = self.prepush(remote, force, revs)
1564 if ret[0] is not None:
1555 if ret[0] is not None:
1565 cg, remote_heads = ret
1556 cg, remote_heads = ret
1566 return remote.addchangegroup(cg, 'push', self.url())
1557 return remote.addchangegroup(cg, 'push', self.url())
1567 return ret[1]
1558 return ret[1]
1568 finally:
1559 finally:
1569 del lock
1560 del lock
1570
1561
1571 def push_unbundle(self, remote, force, revs):
1562 def push_unbundle(self, remote, force, revs):
1572 # local repo finds heads on server, finds out what revs it
1563 # local repo finds heads on server, finds out what revs it
1573 # must push. once revs transferred, if server finds it has
1564 # must push. once revs transferred, if server finds it has
1574 # different heads (someone else won commit/push race), server
1565 # different heads (someone else won commit/push race), server
1575 # aborts.
1566 # aborts.
1576
1567
1577 ret = self.prepush(remote, force, revs)
1568 ret = self.prepush(remote, force, revs)
1578 if ret[0] is not None:
1569 if ret[0] is not None:
1579 cg, remote_heads = ret
1570 cg, remote_heads = ret
1580 if force: remote_heads = ['force']
1571 if force: remote_heads = ['force']
1581 return remote.unbundle(cg, remote_heads, 'push')
1572 return remote.unbundle(cg, remote_heads, 'push')
1582 return ret[1]
1573 return ret[1]
1583
1574
1584 def changegroupinfo(self, nodes, source):
1575 def changegroupinfo(self, nodes, source):
1585 if self.ui.verbose or source == 'bundle':
1576 if self.ui.verbose or source == 'bundle':
1586 self.ui.status(_("%d changesets found\n") % len(nodes))
1577 self.ui.status(_("%d changesets found\n") % len(nodes))
1587 if self.ui.debugflag:
1578 if self.ui.debugflag:
1588 self.ui.debug(_("List of changesets:\n"))
1579 self.ui.debug(_("List of changesets:\n"))
1589 for node in nodes:
1580 for node in nodes:
1590 self.ui.debug("%s\n" % hex(node))
1581 self.ui.debug("%s\n" % hex(node))
1591
1582
1592 def changegroupsubset(self, bases, heads, source, extranodes=None):
1583 def changegroupsubset(self, bases, heads, source, extranodes=None):
1593 """This function generates a changegroup consisting of all the nodes
1584 """This function generates a changegroup consisting of all the nodes
1594 that are descendents of any of the bases, and ancestors of any of
1585 that are descendents of any of the bases, and ancestors of any of
1595 the heads.
1586 the heads.
1596
1587
1597 It is fairly complex as determining which filenodes and which
1588 It is fairly complex as determining which filenodes and which
1598 manifest nodes need to be included for the changeset to be complete
1589 manifest nodes need to be included for the changeset to be complete
1599 is non-trivial.
1590 is non-trivial.
1600
1591
1601 Another wrinkle is doing the reverse, figuring out which changeset in
1592 Another wrinkle is doing the reverse, figuring out which changeset in
1602 the changegroup a particular filenode or manifestnode belongs to.
1593 the changegroup a particular filenode or manifestnode belongs to.
1603
1594
1604 The caller can specify some nodes that must be included in the
1595 The caller can specify some nodes that must be included in the
1605 changegroup using the extranodes argument. It should be a dict
1596 changegroup using the extranodes argument. It should be a dict
1606 where the keys are the filenames (or 1 for the manifest), and the
1597 where the keys are the filenames (or 1 for the manifest), and the
1607 values are lists of (node, linknode) tuples, where node is a wanted
1598 values are lists of (node, linknode) tuples, where node is a wanted
1608 node and linknode is the changelog node that should be transmitted as
1599 node and linknode is the changelog node that should be transmitted as
1609 the linkrev.
1600 the linkrev.
1610 """
1601 """
1611
1602
1612 self.hook('preoutgoing', throw=True, source=source)
1603 self.hook('preoutgoing', throw=True, source=source)
1613
1604
1614 # Set up some initial variables
1605 # Set up some initial variables
1615 # Make it easy to refer to self.changelog
1606 # Make it easy to refer to self.changelog
1616 cl = self.changelog
1607 cl = self.changelog
1617 # msng is short for missing - compute the list of changesets in this
1608 # msng is short for missing - compute the list of changesets in this
1618 # changegroup.
1609 # changegroup.
1619 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1610 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1620 self.changegroupinfo(msng_cl_lst, source)
1611 self.changegroupinfo(msng_cl_lst, source)
1621 # Some bases may turn out to be superfluous, and some heads may be
1612 # Some bases may turn out to be superfluous, and some heads may be
1622 # too. nodesbetween will return the minimal set of bases and heads
1613 # too. nodesbetween will return the minimal set of bases and heads
1623 # necessary to re-create the changegroup.
1614 # necessary to re-create the changegroup.
1624
1615
1625 # Known heads are the list of heads that it is assumed the recipient
1616 # Known heads are the list of heads that it is assumed the recipient
1626 # of this changegroup will know about.
1617 # of this changegroup will know about.
1627 knownheads = {}
1618 knownheads = {}
1628 # We assume that all parents of bases are known heads.
1619 # We assume that all parents of bases are known heads.
1629 for n in bases:
1620 for n in bases:
1630 for p in cl.parents(n):
1621 for p in cl.parents(n):
1631 if p != nullid:
1622 if p != nullid:
1632 knownheads[p] = 1
1623 knownheads[p] = 1
1633 knownheads = knownheads.keys()
1624 knownheads = knownheads.keys()
1634 if knownheads:
1625 if knownheads:
1635 # Now that we know what heads are known, we can compute which
1626 # Now that we know what heads are known, we can compute which
1636 # changesets are known. The recipient must know about all
1627 # changesets are known. The recipient must know about all
1637 # changesets required to reach the known heads from the null
1628 # changesets required to reach the known heads from the null
1638 # changeset.
1629 # changeset.
1639 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1630 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1640 junk = None
1631 junk = None
1641 # Transform the list into an ersatz set.
1632 # Transform the list into an ersatz set.
1642 has_cl_set = dict.fromkeys(has_cl_set)
1633 has_cl_set = dict.fromkeys(has_cl_set)
1643 else:
1634 else:
1644 # If there were no known heads, the recipient cannot be assumed to
1635 # If there were no known heads, the recipient cannot be assumed to
1645 # know about any changesets.
1636 # know about any changesets.
1646 has_cl_set = {}
1637 has_cl_set = {}
1647
1638
1648 # Make it easy to refer to self.manifest
1639 # Make it easy to refer to self.manifest
1649 mnfst = self.manifest
1640 mnfst = self.manifest
1650 # We don't know which manifests are missing yet
1641 # We don't know which manifests are missing yet
1651 msng_mnfst_set = {}
1642 msng_mnfst_set = {}
1652 # Nor do we know which filenodes are missing.
1643 # Nor do we know which filenodes are missing.
1653 msng_filenode_set = {}
1644 msng_filenode_set = {}
1654
1645
1655 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1646 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1656 junk = None
1647 junk = None
1657
1648
1658 # A changeset always belongs to itself, so the changenode lookup
1649 # A changeset always belongs to itself, so the changenode lookup
1659 # function for a changenode is identity.
1650 # function for a changenode is identity.
1660 def identity(x):
1651 def identity(x):
1661 return x
1652 return x
1662
1653
1663 # A function generating function. Sets up an environment for the
1654 # A function generating function. Sets up an environment for the
1664 # inner function.
1655 # inner function.
1665 def cmp_by_rev_func(revlog):
1656 def cmp_by_rev_func(revlog):
1666 # Compare two nodes by their revision number in the environment's
1657 # Compare two nodes by their revision number in the environment's
1667 # revision history. Since the revision number both represents the
1658 # revision history. Since the revision number both represents the
1668 # most efficient order to read the nodes in, and represents a
1659 # most efficient order to read the nodes in, and represents a
1669 # topological sorting of the nodes, this function is often useful.
1660 # topological sorting of the nodes, this function is often useful.
1670 def cmp_by_rev(a, b):
1661 def cmp_by_rev(a, b):
1671 return cmp(revlog.rev(a), revlog.rev(b))
1662 return cmp(revlog.rev(a), revlog.rev(b))
1672 return cmp_by_rev
1663 return cmp_by_rev
1673
1664
1674 # If we determine that a particular file or manifest node must be a
1665 # If we determine that a particular file or manifest node must be a
1675 # node that the recipient of the changegroup will already have, we can
1666 # node that the recipient of the changegroup will already have, we can
1676 # also assume the recipient will have all the parents. This function
1667 # also assume the recipient will have all the parents. This function
1677 # prunes them from the set of missing nodes.
1668 # prunes them from the set of missing nodes.
1678 def prune_parents(revlog, hasset, msngset):
1669 def prune_parents(revlog, hasset, msngset):
1679 haslst = hasset.keys()
1670 haslst = hasset.keys()
1680 haslst.sort(cmp_by_rev_func(revlog))
1671 haslst.sort(cmp_by_rev_func(revlog))
1681 for node in haslst:
1672 for node in haslst:
1682 parentlst = [p for p in revlog.parents(node) if p != nullid]
1673 parentlst = [p for p in revlog.parents(node) if p != nullid]
1683 while parentlst:
1674 while parentlst:
1684 n = parentlst.pop()
1675 n = parentlst.pop()
1685 if n not in hasset:
1676 if n not in hasset:
1686 hasset[n] = 1
1677 hasset[n] = 1
1687 p = [p for p in revlog.parents(n) if p != nullid]
1678 p = [p for p in revlog.parents(n) if p != nullid]
1688 parentlst.extend(p)
1679 parentlst.extend(p)
1689 for n in hasset:
1680 for n in hasset:
1690 msngset.pop(n, None)
1681 msngset.pop(n, None)
1691
1682
1692 # This is a function generating function used to set up an environment
1683 # This is a function generating function used to set up an environment
1693 # for the inner function to execute in.
1684 # for the inner function to execute in.
1694 def manifest_and_file_collector(changedfileset):
1685 def manifest_and_file_collector(changedfileset):
1695 # This is an information gathering function that gathers
1686 # This is an information gathering function that gathers
1696 # information from each changeset node that goes out as part of
1687 # information from each changeset node that goes out as part of
1697 # the changegroup. The information gathered is a list of which
1688 # the changegroup. The information gathered is a list of which
1698 # manifest nodes are potentially required (the recipient may
1689 # manifest nodes are potentially required (the recipient may
1699 # already have them) and total list of all files which were
1690 # already have them) and total list of all files which were
1700 # changed in any changeset in the changegroup.
1691 # changed in any changeset in the changegroup.
1701 #
1692 #
1702 # We also remember the first changenode we saw any manifest
1693 # We also remember the first changenode we saw any manifest
1703 # referenced by so we can later determine which changenode 'owns'
1694 # referenced by so we can later determine which changenode 'owns'
1704 # the manifest.
1695 # the manifest.
1705 def collect_manifests_and_files(clnode):
1696 def collect_manifests_and_files(clnode):
1706 c = cl.read(clnode)
1697 c = cl.read(clnode)
1707 for f in c[3]:
1698 for f in c[3]:
1708 # This is to make sure we only have one instance of each
1699 # This is to make sure we only have one instance of each
1709 # filename string for each filename.
1700 # filename string for each filename.
1710 changedfileset.setdefault(f, f)
1701 changedfileset.setdefault(f, f)
1711 msng_mnfst_set.setdefault(c[0], clnode)
1702 msng_mnfst_set.setdefault(c[0], clnode)
1712 return collect_manifests_and_files
1703 return collect_manifests_and_files
1713
1704
1714 # Figure out which manifest nodes (of the ones we think might be part
1705 # Figure out which manifest nodes (of the ones we think might be part
1715 # of the changegroup) the recipient must know about and remove them
1706 # of the changegroup) the recipient must know about and remove them
1716 # from the changegroup.
1707 # from the changegroup.
1717 def prune_manifests():
1708 def prune_manifests():
1718 has_mnfst_set = {}
1709 has_mnfst_set = {}
1719 for n in msng_mnfst_set:
1710 for n in msng_mnfst_set:
1720 # If a 'missing' manifest thinks it belongs to a changenode
1711 # If a 'missing' manifest thinks it belongs to a changenode
1721 # the recipient is assumed to have, obviously the recipient
1712 # the recipient is assumed to have, obviously the recipient
1722 # must have that manifest.
1713 # must have that manifest.
1723 linknode = cl.node(mnfst.linkrev(n))
1714 linknode = cl.node(mnfst.linkrev(n))
1724 if linknode in has_cl_set:
1715 if linknode in has_cl_set:
1725 has_mnfst_set[n] = 1
1716 has_mnfst_set[n] = 1
1726 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1717 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1727
1718
1728 # Use the information collected in collect_manifests_and_files to say
1719 # Use the information collected in collect_manifests_and_files to say
1729 # which changenode any manifestnode belongs to.
1720 # which changenode any manifestnode belongs to.
1730 def lookup_manifest_link(mnfstnode):
1721 def lookup_manifest_link(mnfstnode):
1731 return msng_mnfst_set[mnfstnode]
1722 return msng_mnfst_set[mnfstnode]
1732
1723
1733 # A function generating function that sets up the initial environment
1724 # A function generating function that sets up the initial environment
1734 # the inner function.
1725 # the inner function.
1735 def filenode_collector(changedfiles):
1726 def filenode_collector(changedfiles):
1736 next_rev = [0]
1727 next_rev = [0]
1737 # This gathers information from each manifestnode included in the
1728 # This gathers information from each manifestnode included in the
1738 # changegroup about which filenodes the manifest node references
1729 # changegroup about which filenodes the manifest node references
1739 # so we can include those in the changegroup too.
1730 # so we can include those in the changegroup too.
1740 #
1731 #
1741 # It also remembers which changenode each filenode belongs to. It
1732 # It also remembers which changenode each filenode belongs to. It
1742 # does this by assuming the a filenode belongs to the changenode
1733 # does this by assuming the a filenode belongs to the changenode
1743 # the first manifest that references it belongs to.
1734 # the first manifest that references it belongs to.
1744 def collect_msng_filenodes(mnfstnode):
1735 def collect_msng_filenodes(mnfstnode):
1745 r = mnfst.rev(mnfstnode)
1736 r = mnfst.rev(mnfstnode)
1746 if r == next_rev[0]:
1737 if r == next_rev[0]:
1747 # If the last rev we looked at was the one just previous,
1738 # If the last rev we looked at was the one just previous,
1748 # we only need to see a diff.
1739 # we only need to see a diff.
1749 deltamf = mnfst.readdelta(mnfstnode)
1740 deltamf = mnfst.readdelta(mnfstnode)
1750 # For each line in the delta
1741 # For each line in the delta
1751 for f, fnode in deltamf.items():
1742 for f, fnode in deltamf.items():
1752 f = changedfiles.get(f, None)
1743 f = changedfiles.get(f, None)
1753 # And if the file is in the list of files we care
1744 # And if the file is in the list of files we care
1754 # about.
1745 # about.
1755 if f is not None:
1746 if f is not None:
1756 # Get the changenode this manifest belongs to
1747 # Get the changenode this manifest belongs to
1757 clnode = msng_mnfst_set[mnfstnode]
1748 clnode = msng_mnfst_set[mnfstnode]
1758 # Create the set of filenodes for the file if
1749 # Create the set of filenodes for the file if
1759 # there isn't one already.
1750 # there isn't one already.
1760 ndset = msng_filenode_set.setdefault(f, {})
1751 ndset = msng_filenode_set.setdefault(f, {})
1761 # And set the filenode's changelog node to the
1752 # And set the filenode's changelog node to the
1762 # manifest's if it hasn't been set already.
1753 # manifest's if it hasn't been set already.
1763 ndset.setdefault(fnode, clnode)
1754 ndset.setdefault(fnode, clnode)
1764 else:
1755 else:
1765 # Otherwise we need a full manifest.
1756 # Otherwise we need a full manifest.
1766 m = mnfst.read(mnfstnode)
1757 m = mnfst.read(mnfstnode)
1767 # For every file in we care about.
1758 # For every file in we care about.
1768 for f in changedfiles:
1759 for f in changedfiles:
1769 fnode = m.get(f, None)
1760 fnode = m.get(f, None)
1770 # If it's in the manifest
1761 # If it's in the manifest
1771 if fnode is not None:
1762 if fnode is not None:
1772 # See comments above.
1763 # See comments above.
1773 clnode = msng_mnfst_set[mnfstnode]
1764 clnode = msng_mnfst_set[mnfstnode]
1774 ndset = msng_filenode_set.setdefault(f, {})
1765 ndset = msng_filenode_set.setdefault(f, {})
1775 ndset.setdefault(fnode, clnode)
1766 ndset.setdefault(fnode, clnode)
1776 # Remember the revision we hope to see next.
1767 # Remember the revision we hope to see next.
1777 next_rev[0] = r + 1
1768 next_rev[0] = r + 1
1778 return collect_msng_filenodes
1769 return collect_msng_filenodes
1779
1770
1780 # We have a list of filenodes we think we need for a file, lets remove
1771 # We have a list of filenodes we think we need for a file, lets remove
1781 # all those we now the recipient must have.
1772 # all those we now the recipient must have.
1782 def prune_filenodes(f, filerevlog):
1773 def prune_filenodes(f, filerevlog):
1783 msngset = msng_filenode_set[f]
1774 msngset = msng_filenode_set[f]
1784 hasset = {}
1775 hasset = {}
1785 # If a 'missing' filenode thinks it belongs to a changenode we
1776 # If a 'missing' filenode thinks it belongs to a changenode we
1786 # assume the recipient must have, then the recipient must have
1777 # assume the recipient must have, then the recipient must have
1787 # that filenode.
1778 # that filenode.
1788 for n in msngset:
1779 for n in msngset:
1789 clnode = cl.node(filerevlog.linkrev(n))
1780 clnode = cl.node(filerevlog.linkrev(n))
1790 if clnode in has_cl_set:
1781 if clnode in has_cl_set:
1791 hasset[n] = 1
1782 hasset[n] = 1
1792 prune_parents(filerevlog, hasset, msngset)
1783 prune_parents(filerevlog, hasset, msngset)
1793
1784
1794 # A function generator function that sets up the a context for the
1785 # A function generator function that sets up the a context for the
1795 # inner function.
1786 # inner function.
1796 def lookup_filenode_link_func(fname):
1787 def lookup_filenode_link_func(fname):
1797 msngset = msng_filenode_set[fname]
1788 msngset = msng_filenode_set[fname]
1798 # Lookup the changenode the filenode belongs to.
1789 # Lookup the changenode the filenode belongs to.
1799 def lookup_filenode_link(fnode):
1790 def lookup_filenode_link(fnode):
1800 return msngset[fnode]
1791 return msngset[fnode]
1801 return lookup_filenode_link
1792 return lookup_filenode_link
1802
1793
1803 # Add the nodes that were explicitly requested.
1794 # Add the nodes that were explicitly requested.
1804 def add_extra_nodes(name, nodes):
1795 def add_extra_nodes(name, nodes):
1805 if not extranodes or name not in extranodes:
1796 if not extranodes or name not in extranodes:
1806 return
1797 return
1807
1798
1808 for node, linknode in extranodes[name]:
1799 for node, linknode in extranodes[name]:
1809 if node not in nodes:
1800 if node not in nodes:
1810 nodes[node] = linknode
1801 nodes[node] = linknode
1811
1802
1812 # Now that we have all theses utility functions to help out and
1803 # Now that we have all theses utility functions to help out and
1813 # logically divide up the task, generate the group.
1804 # logically divide up the task, generate the group.
1814 def gengroup():
1805 def gengroup():
1815 # The set of changed files starts empty.
1806 # The set of changed files starts empty.
1816 changedfiles = {}
1807 changedfiles = {}
1817 # Create a changenode group generator that will call our functions
1808 # Create a changenode group generator that will call our functions
1818 # back to lookup the owning changenode and collect information.
1809 # back to lookup the owning changenode and collect information.
1819 group = cl.group(msng_cl_lst, identity,
1810 group = cl.group(msng_cl_lst, identity,
1820 manifest_and_file_collector(changedfiles))
1811 manifest_and_file_collector(changedfiles))
1821 for chnk in group:
1812 for chnk in group:
1822 yield chnk
1813 yield chnk
1823
1814
1824 # The list of manifests has been collected by the generator
1815 # The list of manifests has been collected by the generator
1825 # calling our functions back.
1816 # calling our functions back.
1826 prune_manifests()
1817 prune_manifests()
1827 add_extra_nodes(1, msng_mnfst_set)
1818 add_extra_nodes(1, msng_mnfst_set)
1828 msng_mnfst_lst = msng_mnfst_set.keys()
1819 msng_mnfst_lst = msng_mnfst_set.keys()
1829 # Sort the manifestnodes by revision number.
1820 # Sort the manifestnodes by revision number.
1830 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1821 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1831 # Create a generator for the manifestnodes that calls our lookup
1822 # Create a generator for the manifestnodes that calls our lookup
1832 # and data collection functions back.
1823 # and data collection functions back.
1833 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1824 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1834 filenode_collector(changedfiles))
1825 filenode_collector(changedfiles))
1835 for chnk in group:
1826 for chnk in group:
1836 yield chnk
1827 yield chnk
1837
1828
1838 # These are no longer needed, dereference and toss the memory for
1829 # These are no longer needed, dereference and toss the memory for
1839 # them.
1830 # them.
1840 msng_mnfst_lst = None
1831 msng_mnfst_lst = None
1841 msng_mnfst_set.clear()
1832 msng_mnfst_set.clear()
1842
1833
1843 if extranodes:
1834 if extranodes:
1844 for fname in extranodes:
1835 for fname in extranodes:
1845 if isinstance(fname, int):
1836 if isinstance(fname, int):
1846 continue
1837 continue
1847 add_extra_nodes(fname,
1838 add_extra_nodes(fname,
1848 msng_filenode_set.setdefault(fname, {}))
1839 msng_filenode_set.setdefault(fname, {}))
1849 changedfiles[fname] = 1
1840 changedfiles[fname] = 1
1850 changedfiles = changedfiles.keys()
1841 changedfiles = changedfiles.keys()
1851 changedfiles.sort()
1842 changedfiles.sort()
1852 # Go through all our files in order sorted by name.
1843 # Go through all our files in order sorted by name.
1853 for fname in changedfiles:
1844 for fname in changedfiles:
1854 filerevlog = self.file(fname)
1845 filerevlog = self.file(fname)
1855 if filerevlog.count() == 0:
1846 if filerevlog.count() == 0:
1856 raise util.Abort(_("empty or missing revlog for %s") % fname)
1847 raise util.Abort(_("empty or missing revlog for %s") % fname)
1857 # Toss out the filenodes that the recipient isn't really
1848 # Toss out the filenodes that the recipient isn't really
1858 # missing.
1849 # missing.
1859 if fname in msng_filenode_set:
1850 if fname in msng_filenode_set:
1860 prune_filenodes(fname, filerevlog)
1851 prune_filenodes(fname, filerevlog)
1861 msng_filenode_lst = msng_filenode_set[fname].keys()
1852 msng_filenode_lst = msng_filenode_set[fname].keys()
1862 else:
1853 else:
1863 msng_filenode_lst = []
1854 msng_filenode_lst = []
1864 # If any filenodes are left, generate the group for them,
1855 # If any filenodes are left, generate the group for them,
1865 # otherwise don't bother.
1856 # otherwise don't bother.
1866 if len(msng_filenode_lst) > 0:
1857 if len(msng_filenode_lst) > 0:
1867 yield changegroup.chunkheader(len(fname))
1858 yield changegroup.chunkheader(len(fname))
1868 yield fname
1859 yield fname
1869 # Sort the filenodes by their revision #
1860 # Sort the filenodes by their revision #
1870 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1861 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1871 # Create a group generator and only pass in a changenode
1862 # Create a group generator and only pass in a changenode
1872 # lookup function as we need to collect no information
1863 # lookup function as we need to collect no information
1873 # from filenodes.
1864 # from filenodes.
1874 group = filerevlog.group(msng_filenode_lst,
1865 group = filerevlog.group(msng_filenode_lst,
1875 lookup_filenode_link_func(fname))
1866 lookup_filenode_link_func(fname))
1876 for chnk in group:
1867 for chnk in group:
1877 yield chnk
1868 yield chnk
1878 if fname in msng_filenode_set:
1869 if fname in msng_filenode_set:
1879 # Don't need this anymore, toss it to free memory.
1870 # Don't need this anymore, toss it to free memory.
1880 del msng_filenode_set[fname]
1871 del msng_filenode_set[fname]
1881 # Signal that no more groups are left.
1872 # Signal that no more groups are left.
1882 yield changegroup.closechunk()
1873 yield changegroup.closechunk()
1883
1874
1884 if msng_cl_lst:
1875 if msng_cl_lst:
1885 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1876 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1886
1877
1887 return util.chunkbuffer(gengroup())
1878 return util.chunkbuffer(gengroup())
1888
1879
1889 def changegroup(self, basenodes, source):
1880 def changegroup(self, basenodes, source):
1890 """Generate a changegroup of all nodes that we have that a recipient
1881 """Generate a changegroup of all nodes that we have that a recipient
1891 doesn't.
1882 doesn't.
1892
1883
1893 This is much easier than the previous function as we can assume that
1884 This is much easier than the previous function as we can assume that
1894 the recipient has any changenode we aren't sending them."""
1885 the recipient has any changenode we aren't sending them."""
1895
1886
1896 self.hook('preoutgoing', throw=True, source=source)
1887 self.hook('preoutgoing', throw=True, source=source)
1897
1888
1898 cl = self.changelog
1889 cl = self.changelog
1899 nodes = cl.nodesbetween(basenodes, None)[0]
1890 nodes = cl.nodesbetween(basenodes, None)[0]
1900 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1891 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1901 self.changegroupinfo(nodes, source)
1892 self.changegroupinfo(nodes, source)
1902
1893
1903 def identity(x):
1894 def identity(x):
1904 return x
1895 return x
1905
1896
1906 def gennodelst(revlog):
1897 def gennodelst(revlog):
1907 for r in xrange(0, revlog.count()):
1898 for r in xrange(0, revlog.count()):
1908 n = revlog.node(r)
1899 n = revlog.node(r)
1909 if revlog.linkrev(n) in revset:
1900 if revlog.linkrev(n) in revset:
1910 yield n
1901 yield n
1911
1902
1912 def changed_file_collector(changedfileset):
1903 def changed_file_collector(changedfileset):
1913 def collect_changed_files(clnode):
1904 def collect_changed_files(clnode):
1914 c = cl.read(clnode)
1905 c = cl.read(clnode)
1915 for fname in c[3]:
1906 for fname in c[3]:
1916 changedfileset[fname] = 1
1907 changedfileset[fname] = 1
1917 return collect_changed_files
1908 return collect_changed_files
1918
1909
1919 def lookuprevlink_func(revlog):
1910 def lookuprevlink_func(revlog):
1920 def lookuprevlink(n):
1911 def lookuprevlink(n):
1921 return cl.node(revlog.linkrev(n))
1912 return cl.node(revlog.linkrev(n))
1922 return lookuprevlink
1913 return lookuprevlink
1923
1914
1924 def gengroup():
1915 def gengroup():
1925 # construct a list of all changed files
1916 # construct a list of all changed files
1926 changedfiles = {}
1917 changedfiles = {}
1927
1918
1928 for chnk in cl.group(nodes, identity,
1919 for chnk in cl.group(nodes, identity,
1929 changed_file_collector(changedfiles)):
1920 changed_file_collector(changedfiles)):
1930 yield chnk
1921 yield chnk
1931 changedfiles = changedfiles.keys()
1922 changedfiles = changedfiles.keys()
1932 changedfiles.sort()
1923 changedfiles.sort()
1933
1924
1934 mnfst = self.manifest
1925 mnfst = self.manifest
1935 nodeiter = gennodelst(mnfst)
1926 nodeiter = gennodelst(mnfst)
1936 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1927 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1937 yield chnk
1928 yield chnk
1938
1929
1939 for fname in changedfiles:
1930 for fname in changedfiles:
1940 filerevlog = self.file(fname)
1931 filerevlog = self.file(fname)
1941 if filerevlog.count() == 0:
1932 if filerevlog.count() == 0:
1942 raise util.Abort(_("empty or missing revlog for %s") % fname)
1933 raise util.Abort(_("empty or missing revlog for %s") % fname)
1943 nodeiter = gennodelst(filerevlog)
1934 nodeiter = gennodelst(filerevlog)
1944 nodeiter = list(nodeiter)
1935 nodeiter = list(nodeiter)
1945 if nodeiter:
1936 if nodeiter:
1946 yield changegroup.chunkheader(len(fname))
1937 yield changegroup.chunkheader(len(fname))
1947 yield fname
1938 yield fname
1948 lookup = lookuprevlink_func(filerevlog)
1939 lookup = lookuprevlink_func(filerevlog)
1949 for chnk in filerevlog.group(nodeiter, lookup):
1940 for chnk in filerevlog.group(nodeiter, lookup):
1950 yield chnk
1941 yield chnk
1951
1942
1952 yield changegroup.closechunk()
1943 yield changegroup.closechunk()
1953
1944
1954 if nodes:
1945 if nodes:
1955 self.hook('outgoing', node=hex(nodes[0]), source=source)
1946 self.hook('outgoing', node=hex(nodes[0]), source=source)
1956
1947
1957 return util.chunkbuffer(gengroup())
1948 return util.chunkbuffer(gengroup())
1958
1949
1959 def addchangegroup(self, source, srctype, url, emptyok=False):
1950 def addchangegroup(self, source, srctype, url, emptyok=False):
1960 """add changegroup to repo.
1951 """add changegroup to repo.
1961
1952
1962 return values:
1953 return values:
1963 - nothing changed or no source: 0
1954 - nothing changed or no source: 0
1964 - more heads than before: 1+added heads (2..n)
1955 - more heads than before: 1+added heads (2..n)
1965 - less heads than before: -1-removed heads (-2..-n)
1956 - less heads than before: -1-removed heads (-2..-n)
1966 - number of heads stays the same: 1
1957 - number of heads stays the same: 1
1967 """
1958 """
1968 def csmap(x):
1959 def csmap(x):
1969 self.ui.debug(_("add changeset %s\n") % short(x))
1960 self.ui.debug(_("add changeset %s\n") % short(x))
1970 return cl.count()
1961 return cl.count()
1971
1962
1972 def revmap(x):
1963 def revmap(x):
1973 return cl.rev(x)
1964 return cl.rev(x)
1974
1965
1975 if not source:
1966 if not source:
1976 return 0
1967 return 0
1977
1968
1978 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1969 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1979
1970
1980 changesets = files = revisions = 0
1971 changesets = files = revisions = 0
1981
1972
1982 # write changelog data to temp files so concurrent readers will not see
1973 # write changelog data to temp files so concurrent readers will not see
1983 # inconsistent view
1974 # inconsistent view
1984 cl = self.changelog
1975 cl = self.changelog
1985 cl.delayupdate()
1976 cl.delayupdate()
1986 oldheads = len(cl.heads())
1977 oldheads = len(cl.heads())
1987
1978
1988 tr = self.transaction()
1979 tr = self.transaction()
1989 try:
1980 try:
1990 trp = weakref.proxy(tr)
1981 trp = weakref.proxy(tr)
1991 # pull off the changeset group
1982 # pull off the changeset group
1992 self.ui.status(_("adding changesets\n"))
1983 self.ui.status(_("adding changesets\n"))
1993 cor = cl.count() - 1
1984 cor = cl.count() - 1
1994 chunkiter = changegroup.chunkiter(source)
1985 chunkiter = changegroup.chunkiter(source)
1995 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1986 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1996 raise util.Abort(_("received changelog group is empty"))
1987 raise util.Abort(_("received changelog group is empty"))
1997 cnr = cl.count() - 1
1988 cnr = cl.count() - 1
1998 changesets = cnr - cor
1989 changesets = cnr - cor
1999
1990
2000 # pull off the manifest group
1991 # pull off the manifest group
2001 self.ui.status(_("adding manifests\n"))
1992 self.ui.status(_("adding manifests\n"))
2002 chunkiter = changegroup.chunkiter(source)
1993 chunkiter = changegroup.chunkiter(source)
2003 # no need to check for empty manifest group here:
1994 # no need to check for empty manifest group here:
2004 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1995 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2005 # no new manifest will be created and the manifest group will
1996 # no new manifest will be created and the manifest group will
2006 # be empty during the pull
1997 # be empty during the pull
2007 self.manifest.addgroup(chunkiter, revmap, trp)
1998 self.manifest.addgroup(chunkiter, revmap, trp)
2008
1999
2009 # process the files
2000 # process the files
2010 self.ui.status(_("adding file changes\n"))
2001 self.ui.status(_("adding file changes\n"))
2011 while 1:
2002 while 1:
2012 f = changegroup.getchunk(source)
2003 f = changegroup.getchunk(source)
2013 if not f:
2004 if not f:
2014 break
2005 break
2015 self.ui.debug(_("adding %s revisions\n") % f)
2006 self.ui.debug(_("adding %s revisions\n") % f)
2016 fl = self.file(f)
2007 fl = self.file(f)
2017 o = fl.count()
2008 o = fl.count()
2018 chunkiter = changegroup.chunkiter(source)
2009 chunkiter = changegroup.chunkiter(source)
2019 if fl.addgroup(chunkiter, revmap, trp) is None:
2010 if fl.addgroup(chunkiter, revmap, trp) is None:
2020 raise util.Abort(_("received file revlog group is empty"))
2011 raise util.Abort(_("received file revlog group is empty"))
2021 revisions += fl.count() - o
2012 revisions += fl.count() - o
2022 files += 1
2013 files += 1
2023
2014
2024 # make changelog see real files again
2015 # make changelog see real files again
2025 cl.finalize(trp)
2016 cl.finalize(trp)
2026
2017
2027 newheads = len(self.changelog.heads())
2018 newheads = len(self.changelog.heads())
2028 heads = ""
2019 heads = ""
2029 if oldheads and newheads != oldheads:
2020 if oldheads and newheads != oldheads:
2030 heads = _(" (%+d heads)") % (newheads - oldheads)
2021 heads = _(" (%+d heads)") % (newheads - oldheads)
2031
2022
2032 self.ui.status(_("added %d changesets"
2023 self.ui.status(_("added %d changesets"
2033 " with %d changes to %d files%s\n")
2024 " with %d changes to %d files%s\n")
2034 % (changesets, revisions, files, heads))
2025 % (changesets, revisions, files, heads))
2035
2026
2036 if changesets > 0:
2027 if changesets > 0:
2037 self.hook('pretxnchangegroup', throw=True,
2028 self.hook('pretxnchangegroup', throw=True,
2038 node=hex(self.changelog.node(cor+1)), source=srctype,
2029 node=hex(self.changelog.node(cor+1)), source=srctype,
2039 url=url)
2030 url=url)
2040
2031
2041 tr.close()
2032 tr.close()
2042 finally:
2033 finally:
2043 del tr
2034 del tr
2044
2035
2045 if changesets > 0:
2036 if changesets > 0:
2046 # forcefully update the on-disk branch cache
2037 # forcefully update the on-disk branch cache
2047 self.ui.debug(_("updating the branch cache\n"))
2038 self.ui.debug(_("updating the branch cache\n"))
2048 self.branchtags()
2039 self.branchtags()
2049 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2040 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2050 source=srctype, url=url)
2041 source=srctype, url=url)
2051
2042
2052 for i in xrange(cor + 1, cnr + 1):
2043 for i in xrange(cor + 1, cnr + 1):
2053 self.hook("incoming", node=hex(self.changelog.node(i)),
2044 self.hook("incoming", node=hex(self.changelog.node(i)),
2054 source=srctype, url=url)
2045 source=srctype, url=url)
2055
2046
2056 # never return 0 here:
2047 # never return 0 here:
2057 if newheads < oldheads:
2048 if newheads < oldheads:
2058 return newheads - oldheads - 1
2049 return newheads - oldheads - 1
2059 else:
2050 else:
2060 return newheads - oldheads + 1
2051 return newheads - oldheads + 1
2061
2052
2062
2053
2063 def stream_in(self, remote):
2054 def stream_in(self, remote):
2064 fp = remote.stream_out()
2055 fp = remote.stream_out()
2065 l = fp.readline()
2056 l = fp.readline()
2066 try:
2057 try:
2067 resp = int(l)
2058 resp = int(l)
2068 except ValueError:
2059 except ValueError:
2069 raise util.UnexpectedOutput(
2060 raise util.UnexpectedOutput(
2070 _('Unexpected response from remote server:'), l)
2061 _('Unexpected response from remote server:'), l)
2071 if resp == 1:
2062 if resp == 1:
2072 raise util.Abort(_('operation forbidden by server'))
2063 raise util.Abort(_('operation forbidden by server'))
2073 elif resp == 2:
2064 elif resp == 2:
2074 raise util.Abort(_('locking the remote repository failed'))
2065 raise util.Abort(_('locking the remote repository failed'))
2075 elif resp != 0:
2066 elif resp != 0:
2076 raise util.Abort(_('the server sent an unknown error code'))
2067 raise util.Abort(_('the server sent an unknown error code'))
2077 self.ui.status(_('streaming all changes\n'))
2068 self.ui.status(_('streaming all changes\n'))
2078 l = fp.readline()
2069 l = fp.readline()
2079 try:
2070 try:
2080 total_files, total_bytes = map(int, l.split(' ', 1))
2071 total_files, total_bytes = map(int, l.split(' ', 1))
2081 except (ValueError, TypeError):
2072 except (ValueError, TypeError):
2082 raise util.UnexpectedOutput(
2073 raise util.UnexpectedOutput(
2083 _('Unexpected response from remote server:'), l)
2074 _('Unexpected response from remote server:'), l)
2084 self.ui.status(_('%d files to transfer, %s of data\n') %
2075 self.ui.status(_('%d files to transfer, %s of data\n') %
2085 (total_files, util.bytecount(total_bytes)))
2076 (total_files, util.bytecount(total_bytes)))
2086 start = time.time()
2077 start = time.time()
2087 for i in xrange(total_files):
2078 for i in xrange(total_files):
2088 # XXX doesn't support '\n' or '\r' in filenames
2079 # XXX doesn't support '\n' or '\r' in filenames
2089 l = fp.readline()
2080 l = fp.readline()
2090 try:
2081 try:
2091 name, size = l.split('\0', 1)
2082 name, size = l.split('\0', 1)
2092 size = int(size)
2083 size = int(size)
2093 except ValueError, TypeError:
2084 except ValueError, TypeError:
2094 raise util.UnexpectedOutput(
2085 raise util.UnexpectedOutput(
2095 _('Unexpected response from remote server:'), l)
2086 _('Unexpected response from remote server:'), l)
2096 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2087 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2097 ofp = self.sopener(name, 'w')
2088 ofp = self.sopener(name, 'w')
2098 for chunk in util.filechunkiter(fp, limit=size):
2089 for chunk in util.filechunkiter(fp, limit=size):
2099 ofp.write(chunk)
2090 ofp.write(chunk)
2100 ofp.close()
2091 ofp.close()
2101 elapsed = time.time() - start
2092 elapsed = time.time() - start
2102 if elapsed <= 0:
2093 if elapsed <= 0:
2103 elapsed = 0.001
2094 elapsed = 0.001
2104 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2095 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2105 (util.bytecount(total_bytes), elapsed,
2096 (util.bytecount(total_bytes), elapsed,
2106 util.bytecount(total_bytes / elapsed)))
2097 util.bytecount(total_bytes / elapsed)))
2107 self.invalidate()
2098 self.invalidate()
2108 return len(self.heads()) + 1
2099 return len(self.heads()) + 1
2109
2100
2110 def clone(self, remote, heads=[], stream=False):
2101 def clone(self, remote, heads=[], stream=False):
2111 '''clone remote repository.
2102 '''clone remote repository.
2112
2103
2113 keyword arguments:
2104 keyword arguments:
2114 heads: list of revs to clone (forces use of pull)
2105 heads: list of revs to clone (forces use of pull)
2115 stream: use streaming clone if possible'''
2106 stream: use streaming clone if possible'''
2116
2107
2117 # now, all clients that can request uncompressed clones can
2108 # now, all clients that can request uncompressed clones can
2118 # read repo formats supported by all servers that can serve
2109 # read repo formats supported by all servers that can serve
2119 # them.
2110 # them.
2120
2111
2121 # if revlog format changes, client will have to check version
2112 # if revlog format changes, client will have to check version
2122 # and format flags on "stream" capability, and use
2113 # and format flags on "stream" capability, and use
2123 # uncompressed only if compatible.
2114 # uncompressed only if compatible.
2124
2115
2125 if stream and not heads and remote.capable('stream'):
2116 if stream and not heads and remote.capable('stream'):
2126 return self.stream_in(remote)
2117 return self.stream_in(remote)
2127 return self.pull(remote, heads)
2118 return self.pull(remote, heads)
2128
2119
2129 # used to avoid circular references so destructors work
2120 # used to avoid circular references so destructors work
2130 def aftertrans(files):
2121 def aftertrans(files):
2131 renamefiles = [tuple(t) for t in files]
2122 renamefiles = [tuple(t) for t in files]
2132 def a():
2123 def a():
2133 for src, dest in renamefiles:
2124 for src, dest in renamefiles:
2134 util.rename(src, dest)
2125 util.rename(src, dest)
2135 return a
2126 return a
2136
2127
2137 def instance(ui, path, create):
2128 def instance(ui, path, create):
2138 return localrepository(ui, util.drop_scheme('file', path), create)
2129 return localrepository(ui, util.drop_scheme('file', path), create)
2139
2130
2140 def islocal(path):
2131 def islocal(path):
2141 return True
2132 return True
General Comments 0
You need to be logged in to leave comments. Login now