##// END OF EJS Templates
cleanup: prefer matchmod.{always,never}() over accessing matchers directly...
Martin von Zweigbergk -
r41822:1db5ae4b default
parent child Browse files
Show More
@@ -1,1418 +1,1418 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import weakref
12 import weakref
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 mdiff,
25 mdiff,
26 phases,
26 phases,
27 pycompat,
27 pycompat,
28 repository,
28 repository,
29 util,
29 util,
30 )
30 )
31
31
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
35
35
36 LFS_REQUIREMENT = 'lfs'
36 LFS_REQUIREMENT = 'lfs'
37
37
38 readexactly = util.readexactly
38 readexactly = util.readexactly
39
39
40 def getchunk(stream):
40 def getchunk(stream):
41 """return the next chunk from stream as a string"""
41 """return the next chunk from stream as a string"""
42 d = readexactly(stream, 4)
42 d = readexactly(stream, 4)
43 l = struct.unpack(">l", d)[0]
43 l = struct.unpack(">l", d)[0]
44 if l <= 4:
44 if l <= 4:
45 if l:
45 if l:
46 raise error.Abort(_("invalid chunk length %d") % l)
46 raise error.Abort(_("invalid chunk length %d") % l)
47 return ""
47 return ""
48 return readexactly(stream, l - 4)
48 return readexactly(stream, l - 4)
49
49
50 def chunkheader(length):
50 def chunkheader(length):
51 """return a changegroup chunk header (string)"""
51 """return a changegroup chunk header (string)"""
52 return struct.pack(">l", length + 4)
52 return struct.pack(">l", length + 4)
53
53
54 def closechunk():
54 def closechunk():
55 """return a changegroup chunk header (string) for a zero-length chunk"""
55 """return a changegroup chunk header (string) for a zero-length chunk"""
56 return struct.pack(">l", 0)
56 return struct.pack(">l", 0)
57
57
58 def _fileheader(path):
58 def _fileheader(path):
59 """Obtain a changegroup chunk header for a named path."""
59 """Obtain a changegroup chunk header for a named path."""
60 return chunkheader(len(path)) + path
60 return chunkheader(len(path)) + path
61
61
62 def writechunks(ui, chunks, filename, vfs=None):
62 def writechunks(ui, chunks, filename, vfs=None):
63 """Write chunks to a file and return its filename.
63 """Write chunks to a file and return its filename.
64
64
65 The stream is assumed to be a bundle file.
65 The stream is assumed to be a bundle file.
66 Existing files will not be overwritten.
66 Existing files will not be overwritten.
67 If no filename is specified, a temporary file is created.
67 If no filename is specified, a temporary file is created.
68 """
68 """
69 fh = None
69 fh = None
70 cleanup = None
70 cleanup = None
71 try:
71 try:
72 if filename:
72 if filename:
73 if vfs:
73 if vfs:
74 fh = vfs.open(filename, "wb")
74 fh = vfs.open(filename, "wb")
75 else:
75 else:
76 # Increase default buffer size because default is usually
76 # Increase default buffer size because default is usually
77 # small (4k is common on Linux).
77 # small (4k is common on Linux).
78 fh = open(filename, "wb", 131072)
78 fh = open(filename, "wb", 131072)
79 else:
79 else:
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
81 fh = os.fdopen(fd, r"wb")
81 fh = os.fdopen(fd, r"wb")
82 cleanup = filename
82 cleanup = filename
83 for c in chunks:
83 for c in chunks:
84 fh.write(c)
84 fh.write(c)
85 cleanup = None
85 cleanup = None
86 return filename
86 return filename
87 finally:
87 finally:
88 if fh is not None:
88 if fh is not None:
89 fh.close()
89 fh.close()
90 if cleanup is not None:
90 if cleanup is not None:
91 if filename and vfs:
91 if filename and vfs:
92 vfs.unlink(cleanup)
92 vfs.unlink(cleanup)
93 else:
93 else:
94 os.unlink(cleanup)
94 os.unlink(cleanup)
95
95
96 class cg1unpacker(object):
96 class cg1unpacker(object):
97 """Unpacker for cg1 changegroup streams.
97 """Unpacker for cg1 changegroup streams.
98
98
99 A changegroup unpacker handles the framing of the revision data in
99 A changegroup unpacker handles the framing of the revision data in
100 the wire format. Most consumers will want to use the apply()
100 the wire format. Most consumers will want to use the apply()
101 method to add the changes from the changegroup to a repository.
101 method to add the changes from the changegroup to a repository.
102
102
103 If you're forwarding a changegroup unmodified to another consumer,
103 If you're forwarding a changegroup unmodified to another consumer,
104 use getchunks(), which returns an iterator of changegroup
104 use getchunks(), which returns an iterator of changegroup
105 chunks. This is mostly useful for cases where you need to know the
105 chunks. This is mostly useful for cases where you need to know the
106 data stream has ended by observing the end of the changegroup.
106 data stream has ended by observing the end of the changegroup.
107
107
108 deltachunk() is useful only if you're applying delta data. Most
108 deltachunk() is useful only if you're applying delta data. Most
109 consumers should prefer apply() instead.
109 consumers should prefer apply() instead.
110
110
111 A few other public methods exist. Those are used only for
111 A few other public methods exist. Those are used only for
112 bundlerepo and some debug commands - their use is discouraged.
112 bundlerepo and some debug commands - their use is discouraged.
113 """
113 """
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
115 deltaheadersize = deltaheader.size
115 deltaheadersize = deltaheader.size
116 version = '01'
116 version = '01'
117 _grouplistcount = 1 # One list of files after the manifests
117 _grouplistcount = 1 # One list of files after the manifests
118
118
119 def __init__(self, fh, alg, extras=None):
119 def __init__(self, fh, alg, extras=None):
120 if alg is None:
120 if alg is None:
121 alg = 'UN'
121 alg = 'UN'
122 if alg not in util.compengines.supportedbundletypes:
122 if alg not in util.compengines.supportedbundletypes:
123 raise error.Abort(_('unknown stream compression type: %s')
123 raise error.Abort(_('unknown stream compression type: %s')
124 % alg)
124 % alg)
125 if alg == 'BZ':
125 if alg == 'BZ':
126 alg = '_truncatedBZ'
126 alg = '_truncatedBZ'
127
127
128 compengine = util.compengines.forbundletype(alg)
128 compengine = util.compengines.forbundletype(alg)
129 self._stream = compengine.decompressorreader(fh)
129 self._stream = compengine.decompressorreader(fh)
130 self._type = alg
130 self._type = alg
131 self.extras = extras or {}
131 self.extras = extras or {}
132 self.callback = None
132 self.callback = None
133
133
134 # These methods (compressed, read, seek, tell) all appear to only
134 # These methods (compressed, read, seek, tell) all appear to only
135 # be used by bundlerepo, but it's a little hard to tell.
135 # be used by bundlerepo, but it's a little hard to tell.
136 def compressed(self):
136 def compressed(self):
137 return self._type is not None and self._type != 'UN'
137 return self._type is not None and self._type != 'UN'
138 def read(self, l):
138 def read(self, l):
139 return self._stream.read(l)
139 return self._stream.read(l)
140 def seek(self, pos):
140 def seek(self, pos):
141 return self._stream.seek(pos)
141 return self._stream.seek(pos)
142 def tell(self):
142 def tell(self):
143 return self._stream.tell()
143 return self._stream.tell()
144 def close(self):
144 def close(self):
145 return self._stream.close()
145 return self._stream.close()
146
146
147 def _chunklength(self):
147 def _chunklength(self):
148 d = readexactly(self._stream, 4)
148 d = readexactly(self._stream, 4)
149 l = struct.unpack(">l", d)[0]
149 l = struct.unpack(">l", d)[0]
150 if l <= 4:
150 if l <= 4:
151 if l:
151 if l:
152 raise error.Abort(_("invalid chunk length %d") % l)
152 raise error.Abort(_("invalid chunk length %d") % l)
153 return 0
153 return 0
154 if self.callback:
154 if self.callback:
155 self.callback()
155 self.callback()
156 return l - 4
156 return l - 4
157
157
158 def changelogheader(self):
158 def changelogheader(self):
159 """v10 does not have a changelog header chunk"""
159 """v10 does not have a changelog header chunk"""
160 return {}
160 return {}
161
161
162 def manifestheader(self):
162 def manifestheader(self):
163 """v10 does not have a manifest header chunk"""
163 """v10 does not have a manifest header chunk"""
164 return {}
164 return {}
165
165
166 def filelogheader(self):
166 def filelogheader(self):
167 """return the header of the filelogs chunk, v10 only has the filename"""
167 """return the header of the filelogs chunk, v10 only has the filename"""
168 l = self._chunklength()
168 l = self._chunklength()
169 if not l:
169 if not l:
170 return {}
170 return {}
171 fname = readexactly(self._stream, l)
171 fname = readexactly(self._stream, l)
172 return {'filename': fname}
172 return {'filename': fname}
173
173
174 def _deltaheader(self, headertuple, prevnode):
174 def _deltaheader(self, headertuple, prevnode):
175 node, p1, p2, cs = headertuple
175 node, p1, p2, cs = headertuple
176 if prevnode is None:
176 if prevnode is None:
177 deltabase = p1
177 deltabase = p1
178 else:
178 else:
179 deltabase = prevnode
179 deltabase = prevnode
180 flags = 0
180 flags = 0
181 return node, p1, p2, deltabase, cs, flags
181 return node, p1, p2, deltabase, cs, flags
182
182
183 def deltachunk(self, prevnode):
183 def deltachunk(self, prevnode):
184 l = self._chunklength()
184 l = self._chunklength()
185 if not l:
185 if not l:
186 return {}
186 return {}
187 headerdata = readexactly(self._stream, self.deltaheadersize)
187 headerdata = readexactly(self._stream, self.deltaheadersize)
188 header = self.deltaheader.unpack(headerdata)
188 header = self.deltaheader.unpack(headerdata)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
191 return (node, p1, p2, cs, deltabase, delta, flags)
191 return (node, p1, p2, cs, deltabase, delta, flags)
192
192
193 def getchunks(self):
193 def getchunks(self):
194 """returns all the chunks contains in the bundle
194 """returns all the chunks contains in the bundle
195
195
196 Used when you need to forward the binary stream to a file or another
196 Used when you need to forward the binary stream to a file or another
197 network API. To do so, it parse the changegroup data, otherwise it will
197 network API. To do so, it parse the changegroup data, otherwise it will
198 block in case of sshrepo because it don't know the end of the stream.
198 block in case of sshrepo because it don't know the end of the stream.
199 """
199 """
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
203 # filelogs.
203 # filelogs.
204 #
204 #
205 # Changelog and manifestlog parts are terminated with empty chunks. The
205 # Changelog and manifestlog parts are terminated with empty chunks. The
206 # tree and file parts are a list of entry sections. Each entry section
206 # tree and file parts are a list of entry sections. Each entry section
207 # is a series of chunks terminating in an empty chunk. The list of these
207 # is a series of chunks terminating in an empty chunk. The list of these
208 # entry sections is terminated in yet another empty chunk, so we know
208 # entry sections is terminated in yet another empty chunk, so we know
209 # we've reached the end of the tree/file list when we reach an empty
209 # we've reached the end of the tree/file list when we reach an empty
210 # chunk that was proceeded by no non-empty chunks.
210 # chunk that was proceeded by no non-empty chunks.
211
211
212 parts = 0
212 parts = 0
213 while parts < 2 + self._grouplistcount:
213 while parts < 2 + self._grouplistcount:
214 noentries = True
214 noentries = True
215 while True:
215 while True:
216 chunk = getchunk(self)
216 chunk = getchunk(self)
217 if not chunk:
217 if not chunk:
218 # The first two empty chunks represent the end of the
218 # The first two empty chunks represent the end of the
219 # changelog and the manifestlog portions. The remaining
219 # changelog and the manifestlog portions. The remaining
220 # empty chunks represent either A) the end of individual
220 # empty chunks represent either A) the end of individual
221 # tree or file entries in the file list, or B) the end of
221 # tree or file entries in the file list, or B) the end of
222 # the entire list. It's the end of the entire list if there
222 # the entire list. It's the end of the entire list if there
223 # were no entries (i.e. noentries is True).
223 # were no entries (i.e. noentries is True).
224 if parts < 2:
224 if parts < 2:
225 parts += 1
225 parts += 1
226 elif noentries:
226 elif noentries:
227 parts += 1
227 parts += 1
228 break
228 break
229 noentries = False
229 noentries = False
230 yield chunkheader(len(chunk))
230 yield chunkheader(len(chunk))
231 pos = 0
231 pos = 0
232 while pos < len(chunk):
232 while pos < len(chunk):
233 next = pos + 2**20
233 next = pos + 2**20
234 yield chunk[pos:next]
234 yield chunk[pos:next]
235 pos = next
235 pos = next
236 yield closechunk()
236 yield closechunk()
237
237
238 def _unpackmanifests(self, repo, revmap, trp, prog):
238 def _unpackmanifests(self, repo, revmap, trp, prog):
239 self.callback = prog.increment
239 self.callback = prog.increment
240 # no need to check for empty manifest group here:
240 # no need to check for empty manifest group here:
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
242 # no new manifest will be created and the manifest group will
242 # no new manifest will be created and the manifest group will
243 # be empty during the pull
243 # be empty during the pull
244 self.manifestheader()
244 self.manifestheader()
245 deltas = self.deltaiter()
245 deltas = self.deltaiter()
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
247 prog.complete()
247 prog.complete()
248 self.callback = None
248 self.callback = None
249
249
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
251 expectedtotal=None):
251 expectedtotal=None):
252 """Add the changegroup returned by source.read() to this repo.
252 """Add the changegroup returned by source.read() to this repo.
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
254 the URL of the repo where this changegroup is coming from.
254 the URL of the repo where this changegroup is coming from.
255
255
256 Return an integer summarizing the change to this repo:
256 Return an integer summarizing the change to this repo:
257 - nothing changed or no source: 0
257 - nothing changed or no source: 0
258 - more heads than before: 1+added heads (2..n)
258 - more heads than before: 1+added heads (2..n)
259 - fewer heads than before: -1-removed heads (-2..-n)
259 - fewer heads than before: -1-removed heads (-2..-n)
260 - number of heads stays the same: 1
260 - number of heads stays the same: 1
261 """
261 """
262 repo = repo.unfiltered()
262 repo = repo.unfiltered()
263 def csmap(x):
263 def csmap(x):
264 repo.ui.debug("add changeset %s\n" % short(x))
264 repo.ui.debug("add changeset %s\n" % short(x))
265 return len(cl)
265 return len(cl)
266
266
267 def revmap(x):
267 def revmap(x):
268 return cl.rev(x)
268 return cl.rev(x)
269
269
270 changesets = files = revisions = 0
270 changesets = files = revisions = 0
271
271
272 try:
272 try:
273 # The transaction may already carry source information. In this
273 # The transaction may already carry source information. In this
274 # case we use the top level data. We overwrite the argument
274 # case we use the top level data. We overwrite the argument
275 # because we need to use the top level value (if they exist)
275 # because we need to use the top level value (if they exist)
276 # in this function.
276 # in this function.
277 srctype = tr.hookargs.setdefault('source', srctype)
277 srctype = tr.hookargs.setdefault('source', srctype)
278 tr.hookargs.setdefault('url', url)
278 tr.hookargs.setdefault('url', url)
279 repo.hook('prechangegroup',
279 repo.hook('prechangegroup',
280 throw=True, **pycompat.strkwargs(tr.hookargs))
280 throw=True, **pycompat.strkwargs(tr.hookargs))
281
281
282 # write changelog data to temp files so concurrent readers
282 # write changelog data to temp files so concurrent readers
283 # will not see an inconsistent view
283 # will not see an inconsistent view
284 cl = repo.changelog
284 cl = repo.changelog
285 cl.delayupdate(tr)
285 cl.delayupdate(tr)
286 oldheads = set(cl.heads())
286 oldheads = set(cl.heads())
287
287
288 trp = weakref.proxy(tr)
288 trp = weakref.proxy(tr)
289 # pull off the changeset group
289 # pull off the changeset group
290 repo.ui.status(_("adding changesets\n"))
290 repo.ui.status(_("adding changesets\n"))
291 clstart = len(cl)
291 clstart = len(cl)
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
293 total=expectedtotal)
293 total=expectedtotal)
294 self.callback = progress.increment
294 self.callback = progress.increment
295
295
296 efiles = set()
296 efiles = set()
297 def onchangelog(cl, node):
297 def onchangelog(cl, node):
298 efiles.update(cl.readfiles(node))
298 efiles.update(cl.readfiles(node))
299
299
300 self.changelogheader()
300 self.changelogheader()
301 deltas = self.deltaiter()
301 deltas = self.deltaiter()
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
303 efiles = len(efiles)
303 efiles = len(efiles)
304
304
305 if not cgnodes:
305 if not cgnodes:
306 repo.ui.develwarn('applied empty changelog from changegroup',
306 repo.ui.develwarn('applied empty changelog from changegroup',
307 config='warn-empty-changegroup')
307 config='warn-empty-changegroup')
308 clend = len(cl)
308 clend = len(cl)
309 changesets = clend - clstart
309 changesets = clend - clstart
310 progress.complete()
310 progress.complete()
311 self.callback = None
311 self.callback = None
312
312
313 # pull off the manifest group
313 # pull off the manifest group
314 repo.ui.status(_("adding manifests\n"))
314 repo.ui.status(_("adding manifests\n"))
315 # We know that we'll never have more manifests than we had
315 # We know that we'll never have more manifests than we had
316 # changesets.
316 # changesets.
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
318 total=changesets)
318 total=changesets)
319 self._unpackmanifests(repo, revmap, trp, progress)
319 self._unpackmanifests(repo, revmap, trp, progress)
320
320
321 needfiles = {}
321 needfiles = {}
322 if repo.ui.configbool('server', 'validate'):
322 if repo.ui.configbool('server', 'validate'):
323 cl = repo.changelog
323 cl = repo.changelog
324 ml = repo.manifestlog
324 ml = repo.manifestlog
325 # validate incoming csets have their manifests
325 # validate incoming csets have their manifests
326 for cset in pycompat.xrange(clstart, clend):
326 for cset in pycompat.xrange(clstart, clend):
327 mfnode = cl.changelogrevision(cset).manifest
327 mfnode = cl.changelogrevision(cset).manifest
328 mfest = ml[mfnode].readdelta()
328 mfest = ml[mfnode].readdelta()
329 # store file cgnodes we must see
329 # store file cgnodes we must see
330 for f, n in mfest.iteritems():
330 for f, n in mfest.iteritems():
331 needfiles.setdefault(f, set()).add(n)
331 needfiles.setdefault(f, set()).add(n)
332
332
333 # process the files
333 # process the files
334 repo.ui.status(_("adding file changes\n"))
334 repo.ui.status(_("adding file changes\n"))
335 newrevs, newfiles = _addchangegroupfiles(
335 newrevs, newfiles = _addchangegroupfiles(
336 repo, self, revmap, trp, efiles, needfiles)
336 repo, self, revmap, trp, efiles, needfiles)
337 revisions += newrevs
337 revisions += newrevs
338 files += newfiles
338 files += newfiles
339
339
340 deltaheads = 0
340 deltaheads = 0
341 if oldheads:
341 if oldheads:
342 heads = cl.heads()
342 heads = cl.heads()
343 deltaheads = len(heads) - len(oldheads)
343 deltaheads = len(heads) - len(oldheads)
344 for h in heads:
344 for h in heads:
345 if h not in oldheads and repo[h].closesbranch():
345 if h not in oldheads and repo[h].closesbranch():
346 deltaheads -= 1
346 deltaheads -= 1
347 htext = ""
347 htext = ""
348 if deltaheads:
348 if deltaheads:
349 htext = _(" (%+d heads)") % deltaheads
349 htext = _(" (%+d heads)") % deltaheads
350
350
351 repo.ui.status(_("added %d changesets"
351 repo.ui.status(_("added %d changesets"
352 " with %d changes to %d files%s\n")
352 " with %d changes to %d files%s\n")
353 % (changesets, revisions, files, htext))
353 % (changesets, revisions, files, htext))
354 repo.invalidatevolatilesets()
354 repo.invalidatevolatilesets()
355
355
356 if changesets > 0:
356 if changesets > 0:
357 if 'node' not in tr.hookargs:
357 if 'node' not in tr.hookargs:
358 tr.hookargs['node'] = hex(cl.node(clstart))
358 tr.hookargs['node'] = hex(cl.node(clstart))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
360 hookargs = dict(tr.hookargs)
360 hookargs = dict(tr.hookargs)
361 else:
361 else:
362 hookargs = dict(tr.hookargs)
362 hookargs = dict(tr.hookargs)
363 hookargs['node'] = hex(cl.node(clstart))
363 hookargs['node'] = hex(cl.node(clstart))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
365 repo.hook('pretxnchangegroup',
365 repo.hook('pretxnchangegroup',
366 throw=True, **pycompat.strkwargs(hookargs))
366 throw=True, **pycompat.strkwargs(hookargs))
367
367
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
369 phaseall = None
369 phaseall = None
370 if srctype in ('push', 'serve'):
370 if srctype in ('push', 'serve'):
371 # Old servers can not push the boundary themselves.
371 # Old servers can not push the boundary themselves.
372 # New servers won't push the boundary if changeset already
372 # New servers won't push the boundary if changeset already
373 # exists locally as secret
373 # exists locally as secret
374 #
374 #
375 # We should not use added here but the list of all change in
375 # We should not use added here but the list of all change in
376 # the bundle
376 # the bundle
377 if repo.publishing():
377 if repo.publishing():
378 targetphase = phaseall = phases.public
378 targetphase = phaseall = phases.public
379 else:
379 else:
380 # closer target phase computation
380 # closer target phase computation
381
381
382 # Those changesets have been pushed from the
382 # Those changesets have been pushed from the
383 # outside, their phases are going to be pushed
383 # outside, their phases are going to be pushed
384 # alongside. Therefor `targetphase` is
384 # alongside. Therefor `targetphase` is
385 # ignored.
385 # ignored.
386 targetphase = phaseall = phases.draft
386 targetphase = phaseall = phases.draft
387 if added:
387 if added:
388 phases.registernew(repo, tr, targetphase, added)
388 phases.registernew(repo, tr, targetphase, added)
389 if phaseall is not None:
389 if phaseall is not None:
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
391
391
392 if changesets > 0:
392 if changesets > 0:
393
393
394 def runhooks():
394 def runhooks():
395 # These hooks run when the lock releases, not when the
395 # These hooks run when the lock releases, not when the
396 # transaction closes. So it's possible for the changelog
396 # transaction closes. So it's possible for the changelog
397 # to have changed since we last saw it.
397 # to have changed since we last saw it.
398 if clstart >= len(repo):
398 if clstart >= len(repo):
399 return
399 return
400
400
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
402
402
403 for n in added:
403 for n in added:
404 args = hookargs.copy()
404 args = hookargs.copy()
405 args['node'] = hex(n)
405 args['node'] = hex(n)
406 del args['node_last']
406 del args['node_last']
407 repo.hook("incoming", **pycompat.strkwargs(args))
407 repo.hook("incoming", **pycompat.strkwargs(args))
408
408
409 newheads = [h for h in repo.heads()
409 newheads = [h for h in repo.heads()
410 if h not in oldheads]
410 if h not in oldheads]
411 repo.ui.log("incoming",
411 repo.ui.log("incoming",
412 "%d incoming changes - new heads: %s\n",
412 "%d incoming changes - new heads: %s\n",
413 len(added),
413 len(added),
414 ', '.join([hex(c[:6]) for c in newheads]))
414 ', '.join([hex(c[:6]) for c in newheads]))
415
415
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
417 lambda tr: repo._afterlock(runhooks))
417 lambda tr: repo._afterlock(runhooks))
418 finally:
418 finally:
419 repo.ui.flush()
419 repo.ui.flush()
420 # never return 0 here:
420 # never return 0 here:
421 if deltaheads < 0:
421 if deltaheads < 0:
422 ret = deltaheads - 1
422 ret = deltaheads - 1
423 else:
423 else:
424 ret = deltaheads + 1
424 ret = deltaheads + 1
425 return ret
425 return ret
426
426
427 def deltaiter(self):
427 def deltaiter(self):
428 """
428 """
429 returns an iterator of the deltas in this changegroup
429 returns an iterator of the deltas in this changegroup
430
430
431 Useful for passing to the underlying storage system to be stored.
431 Useful for passing to the underlying storage system to be stored.
432 """
432 """
433 chain = None
433 chain = None
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
436 yield chunkdata
436 yield chunkdata
437 chain = chunkdata[0]
437 chain = chunkdata[0]
438
438
439 class cg2unpacker(cg1unpacker):
439 class cg2unpacker(cg1unpacker):
440 """Unpacker for cg2 streams.
440 """Unpacker for cg2 streams.
441
441
442 cg2 streams add support for generaldelta, so the delta header
442 cg2 streams add support for generaldelta, so the delta header
443 format is slightly different. All other features about the data
443 format is slightly different. All other features about the data
444 remain the same.
444 remain the same.
445 """
445 """
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
447 deltaheadersize = deltaheader.size
447 deltaheadersize = deltaheader.size
448 version = '02'
448 version = '02'
449
449
450 def _deltaheader(self, headertuple, prevnode):
450 def _deltaheader(self, headertuple, prevnode):
451 node, p1, p2, deltabase, cs = headertuple
451 node, p1, p2, deltabase, cs = headertuple
452 flags = 0
452 flags = 0
453 return node, p1, p2, deltabase, cs, flags
453 return node, p1, p2, deltabase, cs, flags
454
454
455 class cg3unpacker(cg2unpacker):
455 class cg3unpacker(cg2unpacker):
456 """Unpacker for cg3 streams.
456 """Unpacker for cg3 streams.
457
457
458 cg3 streams add support for exchanging treemanifests and revlog
458 cg3 streams add support for exchanging treemanifests and revlog
459 flags. It adds the revlog flags to the delta header and an empty chunk
459 flags. It adds the revlog flags to the delta header and an empty chunk
460 separating manifests and files.
460 separating manifests and files.
461 """
461 """
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
463 deltaheadersize = deltaheader.size
463 deltaheadersize = deltaheader.size
464 version = '03'
464 version = '03'
465 _grouplistcount = 2 # One list of manifests and one list of files
465 _grouplistcount = 2 # One list of manifests and one list of files
466
466
467 def _deltaheader(self, headertuple, prevnode):
467 def _deltaheader(self, headertuple, prevnode):
468 node, p1, p2, deltabase, cs, flags = headertuple
468 node, p1, p2, deltabase, cs, flags = headertuple
469 return node, p1, p2, deltabase, cs, flags
469 return node, p1, p2, deltabase, cs, flags
470
470
471 def _unpackmanifests(self, repo, revmap, trp, prog):
471 def _unpackmanifests(self, repo, revmap, trp, prog):
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
473 for chunkdata in iter(self.filelogheader, {}):
473 for chunkdata in iter(self.filelogheader, {}):
474 # If we get here, there are directory manifests in the changegroup
474 # If we get here, there are directory manifests in the changegroup
475 d = chunkdata["filename"]
475 d = chunkdata["filename"]
476 repo.ui.debug("adding %s revisions\n" % d)
476 repo.ui.debug("adding %s revisions\n" % d)
477 deltas = self.deltaiter()
477 deltas = self.deltaiter()
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
479 raise error.Abort(_("received dir revlog group is empty"))
479 raise error.Abort(_("received dir revlog group is empty"))
480
480
481 class headerlessfixup(object):
481 class headerlessfixup(object):
482 def __init__(self, fh, h):
482 def __init__(self, fh, h):
483 self._h = h
483 self._h = h
484 self._fh = fh
484 self._fh = fh
485 def read(self, n):
485 def read(self, n):
486 if self._h:
486 if self._h:
487 d, self._h = self._h[:n], self._h[n:]
487 d, self._h = self._h[:n], self._h[n:]
488 if len(d) < n:
488 if len(d) < n:
489 d += readexactly(self._fh, n - len(d))
489 d += readexactly(self._fh, n - len(d))
490 return d
490 return d
491 return readexactly(self._fh, n)
491 return readexactly(self._fh, n)
492
492
493 def _revisiondeltatochunks(delta, headerfn):
493 def _revisiondeltatochunks(delta, headerfn):
494 """Serialize a revisiondelta to changegroup chunks."""
494 """Serialize a revisiondelta to changegroup chunks."""
495
495
496 # The captured revision delta may be encoded as a delta against
496 # The captured revision delta may be encoded as a delta against
497 # a base revision or as a full revision. The changegroup format
497 # a base revision or as a full revision. The changegroup format
498 # requires that everything on the wire be deltas. So for full
498 # requires that everything on the wire be deltas. So for full
499 # revisions, we need to invent a header that says to rewrite
499 # revisions, we need to invent a header that says to rewrite
500 # data.
500 # data.
501
501
502 if delta.delta is not None:
502 if delta.delta is not None:
503 prefix, data = b'', delta.delta
503 prefix, data = b'', delta.delta
504 elif delta.basenode == nullid:
504 elif delta.basenode == nullid:
505 data = delta.revision
505 data = delta.revision
506 prefix = mdiff.trivialdiffheader(len(data))
506 prefix = mdiff.trivialdiffheader(len(data))
507 else:
507 else:
508 data = delta.revision
508 data = delta.revision
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
510 len(data))
510 len(data))
511
511
512 meta = headerfn(delta)
512 meta = headerfn(delta)
513
513
514 yield chunkheader(len(meta) + len(prefix) + len(data))
514 yield chunkheader(len(meta) + len(prefix) + len(data))
515 yield meta
515 yield meta
516 if prefix:
516 if prefix:
517 yield prefix
517 yield prefix
518 yield data
518 yield data
519
519
520 def _sortnodesellipsis(store, nodes, cl, lookup):
520 def _sortnodesellipsis(store, nodes, cl, lookup):
521 """Sort nodes for changegroup generation."""
521 """Sort nodes for changegroup generation."""
522 # Ellipses serving mode.
522 # Ellipses serving mode.
523 #
523 #
524 # In a perfect world, we'd generate better ellipsis-ified graphs
524 # In a perfect world, we'd generate better ellipsis-ified graphs
525 # for non-changelog revlogs. In practice, we haven't started doing
525 # for non-changelog revlogs. In practice, we haven't started doing
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
527 # are actually full of bogus parentage on all the ellipsis
527 # are actually full of bogus parentage on all the ellipsis
528 # nodes. This has the side effect that, while the contents are
528 # nodes. This has the side effect that, while the contents are
529 # correct, the individual DAGs might be completely out of whack in
529 # correct, the individual DAGs might be completely out of whack in
530 # a case like 882681bc3166 and its ancestors (back about 10
530 # a case like 882681bc3166 and its ancestors (back about 10
531 # revisions or so) in the main hg repo.
531 # revisions or so) in the main hg repo.
532 #
532 #
533 # The one invariant we *know* holds is that the new (potentially
533 # The one invariant we *know* holds is that the new (potentially
534 # bogus) DAG shape will be valid if we order the nodes in the
534 # bogus) DAG shape will be valid if we order the nodes in the
535 # order that they're introduced in dramatis personae by the
535 # order that they're introduced in dramatis personae by the
536 # changelog, so what we do is we sort the non-changelog histories
536 # changelog, so what we do is we sort the non-changelog histories
537 # by the order in which they are used by the changelog.
537 # by the order in which they are used by the changelog.
538 key = lambda n: cl.rev(lookup(n))
538 key = lambda n: cl.rev(lookup(n))
539 return sorted(nodes, key=key)
539 return sorted(nodes, key=key)
540
540
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
542 linknode, clrevtolocalrev, fullclnodes,
542 linknode, clrevtolocalrev, fullclnodes,
543 precomputedellipsis):
543 precomputedellipsis):
544 linkparents = precomputedellipsis[linkrev]
544 linkparents = precomputedellipsis[linkrev]
545 def local(clrev):
545 def local(clrev):
546 """Turn a changelog revnum into a local revnum.
546 """Turn a changelog revnum into a local revnum.
547
547
548 The ellipsis dag is stored as revnums on the changelog,
548 The ellipsis dag is stored as revnums on the changelog,
549 but when we're producing ellipsis entries for
549 but when we're producing ellipsis entries for
550 non-changelog revlogs, we need to turn those numbers into
550 non-changelog revlogs, we need to turn those numbers into
551 something local. This does that for us, and during the
551 something local. This does that for us, and during the
552 changelog sending phase will also expand the stored
552 changelog sending phase will also expand the stored
553 mappings as needed.
553 mappings as needed.
554 """
554 """
555 if clrev == nullrev:
555 if clrev == nullrev:
556 return nullrev
556 return nullrev
557
557
558 if ischangelog:
558 if ischangelog:
559 return clrev
559 return clrev
560
560
561 # Walk the ellipsis-ized changelog breadth-first looking for a
561 # Walk the ellipsis-ized changelog breadth-first looking for a
562 # change that has been linked from the current revlog.
562 # change that has been linked from the current revlog.
563 #
563 #
564 # For a flat manifest revlog only a single step should be necessary
564 # For a flat manifest revlog only a single step should be necessary
565 # as all relevant changelog entries are relevant to the flat
565 # as all relevant changelog entries are relevant to the flat
566 # manifest.
566 # manifest.
567 #
567 #
568 # For a filelog or tree manifest dirlog however not every changelog
568 # For a filelog or tree manifest dirlog however not every changelog
569 # entry will have been relevant, so we need to skip some changelog
569 # entry will have been relevant, so we need to skip some changelog
570 # nodes even after ellipsis-izing.
570 # nodes even after ellipsis-izing.
571 walk = [clrev]
571 walk = [clrev]
572 while walk:
572 while walk:
573 p = walk[0]
573 p = walk[0]
574 walk = walk[1:]
574 walk = walk[1:]
575 if p in clrevtolocalrev:
575 if p in clrevtolocalrev:
576 return clrevtolocalrev[p]
576 return clrevtolocalrev[p]
577 elif p in fullclnodes:
577 elif p in fullclnodes:
578 walk.extend([pp for pp in cl.parentrevs(p)
578 walk.extend([pp for pp in cl.parentrevs(p)
579 if pp != nullrev])
579 if pp != nullrev])
580 elif p in precomputedellipsis:
580 elif p in precomputedellipsis:
581 walk.extend([pp for pp in precomputedellipsis[p]
581 walk.extend([pp for pp in precomputedellipsis[p]
582 if pp != nullrev])
582 if pp != nullrev])
583 else:
583 else:
584 # In this case, we've got an ellipsis with parents
584 # In this case, we've got an ellipsis with parents
585 # outside the current bundle (likely an
585 # outside the current bundle (likely an
586 # incremental pull). We "know" that we can use the
586 # incremental pull). We "know" that we can use the
587 # value of this same revlog at whatever revision
587 # value of this same revlog at whatever revision
588 # is pointed to by linknode. "Know" is in scare
588 # is pointed to by linknode. "Know" is in scare
589 # quotes because I haven't done enough examination
589 # quotes because I haven't done enough examination
590 # of edge cases to convince myself this is really
590 # of edge cases to convince myself this is really
591 # a fact - it works for all the (admittedly
591 # a fact - it works for all the (admittedly
592 # thorough) cases in our testsuite, but I would be
592 # thorough) cases in our testsuite, but I would be
593 # somewhat unsurprised to find a case in the wild
593 # somewhat unsurprised to find a case in the wild
594 # where this breaks down a bit. That said, I don't
594 # where this breaks down a bit. That said, I don't
595 # know if it would hurt anything.
595 # know if it would hurt anything.
596 for i in pycompat.xrange(rev, 0, -1):
596 for i in pycompat.xrange(rev, 0, -1):
597 if store.linkrev(i) == clrev:
597 if store.linkrev(i) == clrev:
598 return i
598 return i
599 # We failed to resolve a parent for this node, so
599 # We failed to resolve a parent for this node, so
600 # we crash the changegroup construction.
600 # we crash the changegroup construction.
601 raise error.Abort(
601 raise error.Abort(
602 'unable to resolve parent while packing %r %r'
602 'unable to resolve parent while packing %r %r'
603 ' for changeset %r' % (store.indexfile, rev, clrev))
603 ' for changeset %r' % (store.indexfile, rev, clrev))
604
604
605 return nullrev
605 return nullrev
606
606
607 if not linkparents or (
607 if not linkparents or (
608 store.parentrevs(rev) == (nullrev, nullrev)):
608 store.parentrevs(rev) == (nullrev, nullrev)):
609 p1, p2 = nullrev, nullrev
609 p1, p2 = nullrev, nullrev
610 elif len(linkparents) == 1:
610 elif len(linkparents) == 1:
611 p1, = sorted(local(p) for p in linkparents)
611 p1, = sorted(local(p) for p in linkparents)
612 p2 = nullrev
612 p2 = nullrev
613 else:
613 else:
614 p1, p2 = sorted(local(p) for p in linkparents)
614 p1, p2 = sorted(local(p) for p in linkparents)
615
615
616 p1node, p2node = store.node(p1), store.node(p2)
616 p1node, p2node = store.node(p1), store.node(p2)
617
617
618 return p1node, p2node, linknode
618 return p1node, p2node, linknode
619
619
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
621 topic=None,
621 topic=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
623 precomputedellipsis=None):
623 precomputedellipsis=None):
624 """Calculate deltas for a set of revisions.
624 """Calculate deltas for a set of revisions.
625
625
626 Is a generator of ``revisiondelta`` instances.
626 Is a generator of ``revisiondelta`` instances.
627
627
628 If topic is not None, progress detail will be generated using this
628 If topic is not None, progress detail will be generated using this
629 topic name (e.g. changesets, manifests, etc).
629 topic name (e.g. changesets, manifests, etc).
630 """
630 """
631 if not nodes:
631 if not nodes:
632 return
632 return
633
633
634 cl = repo.changelog
634 cl = repo.changelog
635
635
636 if ischangelog:
636 if ischangelog:
637 # `hg log` shows changesets in storage order. To preserve order
637 # `hg log` shows changesets in storage order. To preserve order
638 # across clones, send out changesets in storage order.
638 # across clones, send out changesets in storage order.
639 nodesorder = 'storage'
639 nodesorder = 'storage'
640 elif ellipses:
640 elif ellipses:
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
642 nodesorder = 'nodes'
642 nodesorder = 'nodes'
643 else:
643 else:
644 nodesorder = None
644 nodesorder = None
645
645
646 # Perform ellipses filtering and revision massaging. We do this before
646 # Perform ellipses filtering and revision massaging. We do this before
647 # emitrevisions() because a) filtering out revisions creates less work
647 # emitrevisions() because a) filtering out revisions creates less work
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
649 # assumptions about delta choices and we would possibly send a delta
649 # assumptions about delta choices and we would possibly send a delta
650 # referencing a missing base revision.
650 # referencing a missing base revision.
651 #
651 #
652 # Also, calling lookup() has side-effects with regards to populating
652 # Also, calling lookup() has side-effects with regards to populating
653 # data structures. If we don't call lookup() for each node or if we call
653 # data structures. If we don't call lookup() for each node or if we call
654 # lookup() after the first pass through each node, things can break -
654 # lookup() after the first pass through each node, things can break -
655 # possibly intermittently depending on the python hash seed! For that
655 # possibly intermittently depending on the python hash seed! For that
656 # reason, we store a mapping of all linknodes during the initial node
656 # reason, we store a mapping of all linknodes during the initial node
657 # pass rather than use lookup() on the output side.
657 # pass rather than use lookup() on the output side.
658 if ellipses:
658 if ellipses:
659 filtered = []
659 filtered = []
660 adjustedparents = {}
660 adjustedparents = {}
661 linknodes = {}
661 linknodes = {}
662
662
663 for node in nodes:
663 for node in nodes:
664 rev = store.rev(node)
664 rev = store.rev(node)
665 linknode = lookup(node)
665 linknode = lookup(node)
666 linkrev = cl.rev(linknode)
666 linkrev = cl.rev(linknode)
667 clrevtolocalrev[linkrev] = rev
667 clrevtolocalrev[linkrev] = rev
668
668
669 # If linknode is in fullclnodes, it means the corresponding
669 # If linknode is in fullclnodes, it means the corresponding
670 # changeset was a full changeset and is being sent unaltered.
670 # changeset was a full changeset and is being sent unaltered.
671 if linknode in fullclnodes:
671 if linknode in fullclnodes:
672 linknodes[node] = linknode
672 linknodes[node] = linknode
673
673
674 # If the corresponding changeset wasn't in the set computed
674 # If the corresponding changeset wasn't in the set computed
675 # as relevant to us, it should be dropped outright.
675 # as relevant to us, it should be dropped outright.
676 elif linkrev not in precomputedellipsis:
676 elif linkrev not in precomputedellipsis:
677 continue
677 continue
678
678
679 else:
679 else:
680 # We could probably do this later and avoid the dict
680 # We could probably do this later and avoid the dict
681 # holding state. But it likely doesn't matter.
681 # holding state. But it likely doesn't matter.
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
683 cl, store, ischangelog, rev, linkrev, linknode,
683 cl, store, ischangelog, rev, linkrev, linknode,
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
685
685
686 adjustedparents[node] = (p1node, p2node)
686 adjustedparents[node] = (p1node, p2node)
687 linknodes[node] = linknode
687 linknodes[node] = linknode
688
688
689 filtered.append(node)
689 filtered.append(node)
690
690
691 nodes = filtered
691 nodes = filtered
692
692
693 # We expect the first pass to be fast, so we only engage the progress
693 # We expect the first pass to be fast, so we only engage the progress
694 # meter for constructing the revision deltas.
694 # meter for constructing the revision deltas.
695 progress = None
695 progress = None
696 if topic is not None:
696 if topic is not None:
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
698 total=len(nodes))
698 total=len(nodes))
699
699
700 configtarget = repo.ui.config('devel', 'bundle.delta')
700 configtarget = repo.ui.config('devel', 'bundle.delta')
701 if configtarget not in ('', 'p1', 'full'):
701 if configtarget not in ('', 'p1', 'full'):
702 msg = _("""config "devel.bundle.delta" as unknown value: %s""")
702 msg = _("""config "devel.bundle.delta" as unknown value: %s""")
703 repo.ui.warn(msg % configtarget)
703 repo.ui.warn(msg % configtarget)
704
704
705 deltamode = repository.CG_DELTAMODE_STD
705 deltamode = repository.CG_DELTAMODE_STD
706 if forcedeltaparentprev:
706 if forcedeltaparentprev:
707 deltamode = repository.CG_DELTAMODE_PREV
707 deltamode = repository.CG_DELTAMODE_PREV
708 elif configtarget == 'p1':
708 elif configtarget == 'p1':
709 deltamode = repository.CG_DELTAMODE_P1
709 deltamode = repository.CG_DELTAMODE_P1
710 elif configtarget == 'full':
710 elif configtarget == 'full':
711 deltamode = repository.CG_DELTAMODE_FULL
711 deltamode = repository.CG_DELTAMODE_FULL
712
712
713 revisions = store.emitrevisions(
713 revisions = store.emitrevisions(
714 nodes,
714 nodes,
715 nodesorder=nodesorder,
715 nodesorder=nodesorder,
716 revisiondata=True,
716 revisiondata=True,
717 assumehaveparentrevisions=not ellipses,
717 assumehaveparentrevisions=not ellipses,
718 deltamode=deltamode)
718 deltamode=deltamode)
719
719
720 for i, revision in enumerate(revisions):
720 for i, revision in enumerate(revisions):
721 if progress:
721 if progress:
722 progress.update(i + 1)
722 progress.update(i + 1)
723
723
724 if ellipses:
724 if ellipses:
725 linknode = linknodes[revision.node]
725 linknode = linknodes[revision.node]
726
726
727 if revision.node in adjustedparents:
727 if revision.node in adjustedparents:
728 p1node, p2node = adjustedparents[revision.node]
728 p1node, p2node = adjustedparents[revision.node]
729 revision.p1node = p1node
729 revision.p1node = p1node
730 revision.p2node = p2node
730 revision.p2node = p2node
731 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
731 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
732
732
733 else:
733 else:
734 linknode = lookup(revision.node)
734 linknode = lookup(revision.node)
735
735
736 revision.linknode = linknode
736 revision.linknode = linknode
737 yield revision
737 yield revision
738
738
739 if progress:
739 if progress:
740 progress.complete()
740 progress.complete()
741
741
742 class cgpacker(object):
742 class cgpacker(object):
743 def __init__(self, repo, oldmatcher, matcher, version,
743 def __init__(self, repo, oldmatcher, matcher, version,
744 builddeltaheader, manifestsend,
744 builddeltaheader, manifestsend,
745 forcedeltaparentprev=False,
745 forcedeltaparentprev=False,
746 bundlecaps=None, ellipses=False,
746 bundlecaps=None, ellipses=False,
747 shallow=False, ellipsisroots=None, fullnodes=None):
747 shallow=False, ellipsisroots=None, fullnodes=None):
748 """Given a source repo, construct a bundler.
748 """Given a source repo, construct a bundler.
749
749
750 oldmatcher is a matcher that matches on files the client already has.
750 oldmatcher is a matcher that matches on files the client already has.
751 These will not be included in the changegroup.
751 These will not be included in the changegroup.
752
752
753 matcher is a matcher that matches on files to include in the
753 matcher is a matcher that matches on files to include in the
754 changegroup. Used to facilitate sparse changegroups.
754 changegroup. Used to facilitate sparse changegroups.
755
755
756 forcedeltaparentprev indicates whether delta parents must be against
756 forcedeltaparentprev indicates whether delta parents must be against
757 the previous revision in a delta group. This should only be used for
757 the previous revision in a delta group. This should only be used for
758 compatibility with changegroup version 1.
758 compatibility with changegroup version 1.
759
759
760 builddeltaheader is a callable that constructs the header for a group
760 builddeltaheader is a callable that constructs the header for a group
761 delta.
761 delta.
762
762
763 manifestsend is a chunk to send after manifests have been fully emitted.
763 manifestsend is a chunk to send after manifests have been fully emitted.
764
764
765 ellipses indicates whether ellipsis serving mode is enabled.
765 ellipses indicates whether ellipsis serving mode is enabled.
766
766
767 bundlecaps is optional and can be used to specify the set of
767 bundlecaps is optional and can be used to specify the set of
768 capabilities which can be used to build the bundle. While bundlecaps is
768 capabilities which can be used to build the bundle. While bundlecaps is
769 unused in core Mercurial, extensions rely on this feature to communicate
769 unused in core Mercurial, extensions rely on this feature to communicate
770 capabilities to customize the changegroup packer.
770 capabilities to customize the changegroup packer.
771
771
772 shallow indicates whether shallow data might be sent. The packer may
772 shallow indicates whether shallow data might be sent. The packer may
773 need to pack file contents not introduced by the changes being packed.
773 need to pack file contents not introduced by the changes being packed.
774
774
775 fullnodes is the set of changelog nodes which should not be ellipsis
775 fullnodes is the set of changelog nodes which should not be ellipsis
776 nodes. We store this rather than the set of nodes that should be
776 nodes. We store this rather than the set of nodes that should be
777 ellipsis because for very large histories we expect this to be
777 ellipsis because for very large histories we expect this to be
778 significantly smaller.
778 significantly smaller.
779 """
779 """
780 assert oldmatcher
780 assert oldmatcher
781 assert matcher
781 assert matcher
782 self._oldmatcher = oldmatcher
782 self._oldmatcher = oldmatcher
783 self._matcher = matcher
783 self._matcher = matcher
784
784
785 self.version = version
785 self.version = version
786 self._forcedeltaparentprev = forcedeltaparentprev
786 self._forcedeltaparentprev = forcedeltaparentprev
787 self._builddeltaheader = builddeltaheader
787 self._builddeltaheader = builddeltaheader
788 self._manifestsend = manifestsend
788 self._manifestsend = manifestsend
789 self._ellipses = ellipses
789 self._ellipses = ellipses
790
790
791 # Set of capabilities we can use to build the bundle.
791 # Set of capabilities we can use to build the bundle.
792 if bundlecaps is None:
792 if bundlecaps is None:
793 bundlecaps = set()
793 bundlecaps = set()
794 self._bundlecaps = bundlecaps
794 self._bundlecaps = bundlecaps
795 self._isshallow = shallow
795 self._isshallow = shallow
796 self._fullclnodes = fullnodes
796 self._fullclnodes = fullnodes
797
797
798 # Maps ellipsis revs to their roots at the changelog level.
798 # Maps ellipsis revs to their roots at the changelog level.
799 self._precomputedellipsis = ellipsisroots
799 self._precomputedellipsis = ellipsisroots
800
800
801 self._repo = repo
801 self._repo = repo
802
802
803 if self._repo.ui.verbose and not self._repo.ui.debugflag:
803 if self._repo.ui.verbose and not self._repo.ui.debugflag:
804 self._verbosenote = self._repo.ui.note
804 self._verbosenote = self._repo.ui.note
805 else:
805 else:
806 self._verbosenote = lambda s: None
806 self._verbosenote = lambda s: None
807
807
808 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
808 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
809 changelog=True):
809 changelog=True):
810 """Yield a sequence of changegroup byte chunks.
810 """Yield a sequence of changegroup byte chunks.
811 If changelog is False, changelog data won't be added to changegroup
811 If changelog is False, changelog data won't be added to changegroup
812 """
812 """
813
813
814 repo = self._repo
814 repo = self._repo
815 cl = repo.changelog
815 cl = repo.changelog
816
816
817 self._verbosenote(_('uncompressed size of bundle content:\n'))
817 self._verbosenote(_('uncompressed size of bundle content:\n'))
818 size = 0
818 size = 0
819
819
820 clstate, deltas = self._generatechangelog(cl, clnodes,
820 clstate, deltas = self._generatechangelog(cl, clnodes,
821 generate=changelog)
821 generate=changelog)
822 for delta in deltas:
822 for delta in deltas:
823 for chunk in _revisiondeltatochunks(delta,
823 for chunk in _revisiondeltatochunks(delta,
824 self._builddeltaheader):
824 self._builddeltaheader):
825 size += len(chunk)
825 size += len(chunk)
826 yield chunk
826 yield chunk
827
827
828 close = closechunk()
828 close = closechunk()
829 size += len(close)
829 size += len(close)
830 yield closechunk()
830 yield closechunk()
831
831
832 self._verbosenote(_('%8.i (changelog)\n') % size)
832 self._verbosenote(_('%8.i (changelog)\n') % size)
833
833
834 clrevorder = clstate['clrevorder']
834 clrevorder = clstate['clrevorder']
835 manifests = clstate['manifests']
835 manifests = clstate['manifests']
836 changedfiles = clstate['changedfiles']
836 changedfiles = clstate['changedfiles']
837
837
838 # We need to make sure that the linkrev in the changegroup refers to
838 # We need to make sure that the linkrev in the changegroup refers to
839 # the first changeset that introduced the manifest or file revision.
839 # the first changeset that introduced the manifest or file revision.
840 # The fastpath is usually safer than the slowpath, because the filelogs
840 # The fastpath is usually safer than the slowpath, because the filelogs
841 # are walked in revlog order.
841 # are walked in revlog order.
842 #
842 #
843 # When taking the slowpath when the manifest revlog uses generaldelta,
843 # When taking the slowpath when the manifest revlog uses generaldelta,
844 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
844 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
845 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
845 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
846 #
846 #
847 # When taking the fastpath, we are only vulnerable to reordering
847 # When taking the fastpath, we are only vulnerable to reordering
848 # of the changelog itself. The changelog never uses generaldelta and is
848 # of the changelog itself. The changelog never uses generaldelta and is
849 # never reordered. To handle this case, we simply take the slowpath,
849 # never reordered. To handle this case, we simply take the slowpath,
850 # which already has the 'clrevorder' logic. This was also fixed in
850 # which already has the 'clrevorder' logic. This was also fixed in
851 # cc0ff93d0c0c.
851 # cc0ff93d0c0c.
852
852
853 # Treemanifests don't work correctly with fastpathlinkrev
853 # Treemanifests don't work correctly with fastpathlinkrev
854 # either, because we don't discover which directory nodes to
854 # either, because we don't discover which directory nodes to
855 # send along with files. This could probably be fixed.
855 # send along with files. This could probably be fixed.
856 fastpathlinkrev = fastpathlinkrev and (
856 fastpathlinkrev = fastpathlinkrev and (
857 'treemanifest' not in repo.requirements)
857 'treemanifest' not in repo.requirements)
858
858
859 fnodes = {} # needed file nodes
859 fnodes = {} # needed file nodes
860
860
861 size = 0
861 size = 0
862 it = self.generatemanifests(
862 it = self.generatemanifests(
863 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
863 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
864 clstate['clrevtomanifestrev'])
864 clstate['clrevtomanifestrev'])
865
865
866 for tree, deltas in it:
866 for tree, deltas in it:
867 if tree:
867 if tree:
868 assert self.version == b'03'
868 assert self.version == b'03'
869 chunk = _fileheader(tree)
869 chunk = _fileheader(tree)
870 size += len(chunk)
870 size += len(chunk)
871 yield chunk
871 yield chunk
872
872
873 for delta in deltas:
873 for delta in deltas:
874 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
874 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
875 for chunk in chunks:
875 for chunk in chunks:
876 size += len(chunk)
876 size += len(chunk)
877 yield chunk
877 yield chunk
878
878
879 close = closechunk()
879 close = closechunk()
880 size += len(close)
880 size += len(close)
881 yield close
881 yield close
882
882
883 self._verbosenote(_('%8.i (manifests)\n') % size)
883 self._verbosenote(_('%8.i (manifests)\n') % size)
884 yield self._manifestsend
884 yield self._manifestsend
885
885
886 mfdicts = None
886 mfdicts = None
887 if self._ellipses and self._isshallow:
887 if self._ellipses and self._isshallow:
888 mfdicts = [(self._repo.manifestlog[n].read(), lr)
888 mfdicts = [(self._repo.manifestlog[n].read(), lr)
889 for (n, lr) in manifests.iteritems()]
889 for (n, lr) in manifests.iteritems()]
890
890
891 manifests.clear()
891 manifests.clear()
892 clrevs = set(cl.rev(x) for x in clnodes)
892 clrevs = set(cl.rev(x) for x in clnodes)
893
893
894 it = self.generatefiles(changedfiles, commonrevs,
894 it = self.generatefiles(changedfiles, commonrevs,
895 source, mfdicts, fastpathlinkrev,
895 source, mfdicts, fastpathlinkrev,
896 fnodes, clrevs)
896 fnodes, clrevs)
897
897
898 for path, deltas in it:
898 for path, deltas in it:
899 h = _fileheader(path)
899 h = _fileheader(path)
900 size = len(h)
900 size = len(h)
901 yield h
901 yield h
902
902
903 for delta in deltas:
903 for delta in deltas:
904 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
904 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
905 for chunk in chunks:
905 for chunk in chunks:
906 size += len(chunk)
906 size += len(chunk)
907 yield chunk
907 yield chunk
908
908
909 close = closechunk()
909 close = closechunk()
910 size += len(close)
910 size += len(close)
911 yield close
911 yield close
912
912
913 self._verbosenote(_('%8.i %s\n') % (size, path))
913 self._verbosenote(_('%8.i %s\n') % (size, path))
914
914
915 yield closechunk()
915 yield closechunk()
916
916
917 if clnodes:
917 if clnodes:
918 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
918 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
919
919
920 def _generatechangelog(self, cl, nodes, generate=True):
920 def _generatechangelog(self, cl, nodes, generate=True):
921 """Generate data for changelog chunks.
921 """Generate data for changelog chunks.
922
922
923 Returns a 2-tuple of a dict containing state and an iterable of
923 Returns a 2-tuple of a dict containing state and an iterable of
924 byte chunks. The state will not be fully populated until the
924 byte chunks. The state will not be fully populated until the
925 chunk stream has been fully consumed.
925 chunk stream has been fully consumed.
926
926
927 if generate is False, the state will be fully populated and no chunk
927 if generate is False, the state will be fully populated and no chunk
928 stream will be yielded
928 stream will be yielded
929 """
929 """
930 clrevorder = {}
930 clrevorder = {}
931 manifests = {}
931 manifests = {}
932 mfl = self._repo.manifestlog
932 mfl = self._repo.manifestlog
933 changedfiles = set()
933 changedfiles = set()
934 clrevtomanifestrev = {}
934 clrevtomanifestrev = {}
935
935
936 state = {
936 state = {
937 'clrevorder': clrevorder,
937 'clrevorder': clrevorder,
938 'manifests': manifests,
938 'manifests': manifests,
939 'changedfiles': changedfiles,
939 'changedfiles': changedfiles,
940 'clrevtomanifestrev': clrevtomanifestrev,
940 'clrevtomanifestrev': clrevtomanifestrev,
941 }
941 }
942
942
943 if not (generate or self._ellipses):
943 if not (generate or self._ellipses):
944 # sort the nodes in storage order
944 # sort the nodes in storage order
945 nodes = sorted(nodes, key=cl.rev)
945 nodes = sorted(nodes, key=cl.rev)
946 for node in nodes:
946 for node in nodes:
947 c = cl.changelogrevision(node)
947 c = cl.changelogrevision(node)
948 clrevorder[node] = len(clrevorder)
948 clrevorder[node] = len(clrevorder)
949 # record the first changeset introducing this manifest version
949 # record the first changeset introducing this manifest version
950 manifests.setdefault(c.manifest, node)
950 manifests.setdefault(c.manifest, node)
951 # Record a complete list of potentially-changed files in
951 # Record a complete list of potentially-changed files in
952 # this manifest.
952 # this manifest.
953 changedfiles.update(c.files)
953 changedfiles.update(c.files)
954
954
955 return state, ()
955 return state, ()
956
956
957 # Callback for the changelog, used to collect changed files and
957 # Callback for the changelog, used to collect changed files and
958 # manifest nodes.
958 # manifest nodes.
959 # Returns the linkrev node (identity in the changelog case).
959 # Returns the linkrev node (identity in the changelog case).
960 def lookupcl(x):
960 def lookupcl(x):
961 c = cl.changelogrevision(x)
961 c = cl.changelogrevision(x)
962 clrevorder[x] = len(clrevorder)
962 clrevorder[x] = len(clrevorder)
963
963
964 if self._ellipses:
964 if self._ellipses:
965 # Only update manifests if x is going to be sent. Otherwise we
965 # Only update manifests if x is going to be sent. Otherwise we
966 # end up with bogus linkrevs specified for manifests and
966 # end up with bogus linkrevs specified for manifests and
967 # we skip some manifest nodes that we should otherwise
967 # we skip some manifest nodes that we should otherwise
968 # have sent.
968 # have sent.
969 if (x in self._fullclnodes
969 if (x in self._fullclnodes
970 or cl.rev(x) in self._precomputedellipsis):
970 or cl.rev(x) in self._precomputedellipsis):
971
971
972 manifestnode = c.manifest
972 manifestnode = c.manifest
973 # Record the first changeset introducing this manifest
973 # Record the first changeset introducing this manifest
974 # version.
974 # version.
975 manifests.setdefault(manifestnode, x)
975 manifests.setdefault(manifestnode, x)
976 # Set this narrow-specific dict so we have the lowest
976 # Set this narrow-specific dict so we have the lowest
977 # manifest revnum to look up for this cl revnum. (Part of
977 # manifest revnum to look up for this cl revnum. (Part of
978 # mapping changelog ellipsis parents to manifest ellipsis
978 # mapping changelog ellipsis parents to manifest ellipsis
979 # parents)
979 # parents)
980 clrevtomanifestrev.setdefault(
980 clrevtomanifestrev.setdefault(
981 cl.rev(x), mfl.rev(manifestnode))
981 cl.rev(x), mfl.rev(manifestnode))
982 # We can't trust the changed files list in the changeset if the
982 # We can't trust the changed files list in the changeset if the
983 # client requested a shallow clone.
983 # client requested a shallow clone.
984 if self._isshallow:
984 if self._isshallow:
985 changedfiles.update(mfl[c.manifest].read().keys())
985 changedfiles.update(mfl[c.manifest].read().keys())
986 else:
986 else:
987 changedfiles.update(c.files)
987 changedfiles.update(c.files)
988 else:
988 else:
989 # record the first changeset introducing this manifest version
989 # record the first changeset introducing this manifest version
990 manifests.setdefault(c.manifest, x)
990 manifests.setdefault(c.manifest, x)
991 # Record a complete list of potentially-changed files in
991 # Record a complete list of potentially-changed files in
992 # this manifest.
992 # this manifest.
993 changedfiles.update(c.files)
993 changedfiles.update(c.files)
994
994
995 return x
995 return x
996
996
997 gen = deltagroup(
997 gen = deltagroup(
998 self._repo, cl, nodes, True, lookupcl,
998 self._repo, cl, nodes, True, lookupcl,
999 self._forcedeltaparentprev,
999 self._forcedeltaparentprev,
1000 ellipses=self._ellipses,
1000 ellipses=self._ellipses,
1001 topic=_('changesets'),
1001 topic=_('changesets'),
1002 clrevtolocalrev={},
1002 clrevtolocalrev={},
1003 fullclnodes=self._fullclnodes,
1003 fullclnodes=self._fullclnodes,
1004 precomputedellipsis=self._precomputedellipsis)
1004 precomputedellipsis=self._precomputedellipsis)
1005
1005
1006 return state, gen
1006 return state, gen
1007
1007
1008 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
1008 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
1009 manifests, fnodes, source, clrevtolocalrev):
1009 manifests, fnodes, source, clrevtolocalrev):
1010 """Returns an iterator of changegroup chunks containing manifests.
1010 """Returns an iterator of changegroup chunks containing manifests.
1011
1011
1012 `source` is unused here, but is used by extensions like remotefilelog to
1012 `source` is unused here, but is used by extensions like remotefilelog to
1013 change what is sent based in pulls vs pushes, etc.
1013 change what is sent based in pulls vs pushes, etc.
1014 """
1014 """
1015 repo = self._repo
1015 repo = self._repo
1016 mfl = repo.manifestlog
1016 mfl = repo.manifestlog
1017 tmfnodes = {'': manifests}
1017 tmfnodes = {'': manifests}
1018
1018
1019 # Callback for the manifest, used to collect linkrevs for filelog
1019 # Callback for the manifest, used to collect linkrevs for filelog
1020 # revisions.
1020 # revisions.
1021 # Returns the linkrev node (collected in lookupcl).
1021 # Returns the linkrev node (collected in lookupcl).
1022 def makelookupmflinknode(tree, nodes):
1022 def makelookupmflinknode(tree, nodes):
1023 if fastpathlinkrev:
1023 if fastpathlinkrev:
1024 assert not tree
1024 assert not tree
1025 return manifests.__getitem__
1025 return manifests.__getitem__
1026
1026
1027 def lookupmflinknode(x):
1027 def lookupmflinknode(x):
1028 """Callback for looking up the linknode for manifests.
1028 """Callback for looking up the linknode for manifests.
1029
1029
1030 Returns the linkrev node for the specified manifest.
1030 Returns the linkrev node for the specified manifest.
1031
1031
1032 SIDE EFFECT:
1032 SIDE EFFECT:
1033
1033
1034 1) fclnodes gets populated with the list of relevant
1034 1) fclnodes gets populated with the list of relevant
1035 file nodes if we're not using fastpathlinkrev
1035 file nodes if we're not using fastpathlinkrev
1036 2) When treemanifests are in use, collects treemanifest nodes
1036 2) When treemanifests are in use, collects treemanifest nodes
1037 to send
1037 to send
1038
1038
1039 Note that this means manifests must be completely sent to
1039 Note that this means manifests must be completely sent to
1040 the client before you can trust the list of files and
1040 the client before you can trust the list of files and
1041 treemanifests to send.
1041 treemanifests to send.
1042 """
1042 """
1043 clnode = nodes[x]
1043 clnode = nodes[x]
1044 mdata = mfl.get(tree, x).readfast(shallow=True)
1044 mdata = mfl.get(tree, x).readfast(shallow=True)
1045 for p, n, fl in mdata.iterentries():
1045 for p, n, fl in mdata.iterentries():
1046 if fl == 't': # subdirectory manifest
1046 if fl == 't': # subdirectory manifest
1047 subtree = tree + p + '/'
1047 subtree = tree + p + '/'
1048 tmfclnodes = tmfnodes.setdefault(subtree, {})
1048 tmfclnodes = tmfnodes.setdefault(subtree, {})
1049 tmfclnode = tmfclnodes.setdefault(n, clnode)
1049 tmfclnode = tmfclnodes.setdefault(n, clnode)
1050 if clrevorder[clnode] < clrevorder[tmfclnode]:
1050 if clrevorder[clnode] < clrevorder[tmfclnode]:
1051 tmfclnodes[n] = clnode
1051 tmfclnodes[n] = clnode
1052 else:
1052 else:
1053 f = tree + p
1053 f = tree + p
1054 fclnodes = fnodes.setdefault(f, {})
1054 fclnodes = fnodes.setdefault(f, {})
1055 fclnode = fclnodes.setdefault(n, clnode)
1055 fclnode = fclnodes.setdefault(n, clnode)
1056 if clrevorder[clnode] < clrevorder[fclnode]:
1056 if clrevorder[clnode] < clrevorder[fclnode]:
1057 fclnodes[n] = clnode
1057 fclnodes[n] = clnode
1058 return clnode
1058 return clnode
1059 return lookupmflinknode
1059 return lookupmflinknode
1060
1060
1061 while tmfnodes:
1061 while tmfnodes:
1062 tree, nodes = tmfnodes.popitem()
1062 tree, nodes = tmfnodes.popitem()
1063
1063
1064 should_visit = self._matcher.visitdir(tree[:-1] or '.')
1064 should_visit = self._matcher.visitdir(tree[:-1] or '.')
1065 if tree and not should_visit:
1065 if tree and not should_visit:
1066 continue
1066 continue
1067
1067
1068 store = mfl.getstorage(tree)
1068 store = mfl.getstorage(tree)
1069
1069
1070 if not should_visit:
1070 if not should_visit:
1071 # No nodes to send because this directory is out of
1071 # No nodes to send because this directory is out of
1072 # the client's view of the repository (probably
1072 # the client's view of the repository (probably
1073 # because of narrow clones). Do this even for the root
1073 # because of narrow clones). Do this even for the root
1074 # directory (tree=='')
1074 # directory (tree=='')
1075 prunednodes = []
1075 prunednodes = []
1076 else:
1076 else:
1077 # Avoid sending any manifest nodes we can prove the
1077 # Avoid sending any manifest nodes we can prove the
1078 # client already has by checking linkrevs. See the
1078 # client already has by checking linkrevs. See the
1079 # related comment in generatefiles().
1079 # related comment in generatefiles().
1080 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1080 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1081
1081
1082 if tree and not prunednodes:
1082 if tree and not prunednodes:
1083 continue
1083 continue
1084
1084
1085 lookupfn = makelookupmflinknode(tree, nodes)
1085 lookupfn = makelookupmflinknode(tree, nodes)
1086
1086
1087 deltas = deltagroup(
1087 deltas = deltagroup(
1088 self._repo, store, prunednodes, False, lookupfn,
1088 self._repo, store, prunednodes, False, lookupfn,
1089 self._forcedeltaparentprev,
1089 self._forcedeltaparentprev,
1090 ellipses=self._ellipses,
1090 ellipses=self._ellipses,
1091 topic=_('manifests'),
1091 topic=_('manifests'),
1092 clrevtolocalrev=clrevtolocalrev,
1092 clrevtolocalrev=clrevtolocalrev,
1093 fullclnodes=self._fullclnodes,
1093 fullclnodes=self._fullclnodes,
1094 precomputedellipsis=self._precomputedellipsis)
1094 precomputedellipsis=self._precomputedellipsis)
1095
1095
1096 if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
1096 if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
1097 yield tree, deltas
1097 yield tree, deltas
1098 else:
1098 else:
1099 # 'deltas' is a generator and we need to consume it even if
1099 # 'deltas' is a generator and we need to consume it even if
1100 # we are not going to send it because a side-effect is that
1100 # we are not going to send it because a side-effect is that
1101 # it updates tmdnodes (via lookupfn)
1101 # it updates tmdnodes (via lookupfn)
1102 for d in deltas:
1102 for d in deltas:
1103 pass
1103 pass
1104 if not tree:
1104 if not tree:
1105 yield tree, []
1105 yield tree, []
1106
1106
1107 def _prunemanifests(self, store, nodes, commonrevs):
1107 def _prunemanifests(self, store, nodes, commonrevs):
1108 # This is split out as a separate method to allow filtering
1108 # This is split out as a separate method to allow filtering
1109 # commonrevs in extension code.
1109 # commonrevs in extension code.
1110 #
1110 #
1111 # TODO(augie): this shouldn't be required, instead we should
1111 # TODO(augie): this shouldn't be required, instead we should
1112 # make filtering of revisions to send delegated to the store
1112 # make filtering of revisions to send delegated to the store
1113 # layer.
1113 # layer.
1114 frev, flr = store.rev, store.linkrev
1114 frev, flr = store.rev, store.linkrev
1115 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1115 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1116
1116
1117 # The 'source' parameter is useful for extensions
1117 # The 'source' parameter is useful for extensions
1118 def generatefiles(self, changedfiles, commonrevs, source,
1118 def generatefiles(self, changedfiles, commonrevs, source,
1119 mfdicts, fastpathlinkrev, fnodes, clrevs):
1119 mfdicts, fastpathlinkrev, fnodes, clrevs):
1120 changedfiles = [f for f in changedfiles
1120 changedfiles = [f for f in changedfiles
1121 if self._matcher(f) and not self._oldmatcher(f)]
1121 if self._matcher(f) and not self._oldmatcher(f)]
1122
1122
1123 if not fastpathlinkrev:
1123 if not fastpathlinkrev:
1124 def normallinknodes(unused, fname):
1124 def normallinknodes(unused, fname):
1125 return fnodes.get(fname, {})
1125 return fnodes.get(fname, {})
1126 else:
1126 else:
1127 cln = self._repo.changelog.node
1127 cln = self._repo.changelog.node
1128
1128
1129 def normallinknodes(store, fname):
1129 def normallinknodes(store, fname):
1130 flinkrev = store.linkrev
1130 flinkrev = store.linkrev
1131 fnode = store.node
1131 fnode = store.node
1132 revs = ((r, flinkrev(r)) for r in store)
1132 revs = ((r, flinkrev(r)) for r in store)
1133 return dict((fnode(r), cln(lr))
1133 return dict((fnode(r), cln(lr))
1134 for r, lr in revs if lr in clrevs)
1134 for r, lr in revs if lr in clrevs)
1135
1135
1136 clrevtolocalrev = {}
1136 clrevtolocalrev = {}
1137
1137
1138 if self._isshallow:
1138 if self._isshallow:
1139 # In a shallow clone, the linknodes callback needs to also include
1139 # In a shallow clone, the linknodes callback needs to also include
1140 # those file nodes that are in the manifests we sent but weren't
1140 # those file nodes that are in the manifests we sent but weren't
1141 # introduced by those manifests.
1141 # introduced by those manifests.
1142 commonctxs = [self._repo[c] for c in commonrevs]
1142 commonctxs = [self._repo[c] for c in commonrevs]
1143 clrev = self._repo.changelog.rev
1143 clrev = self._repo.changelog.rev
1144
1144
1145 def linknodes(flog, fname):
1145 def linknodes(flog, fname):
1146 for c in commonctxs:
1146 for c in commonctxs:
1147 try:
1147 try:
1148 fnode = c.filenode(fname)
1148 fnode = c.filenode(fname)
1149 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1149 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1150 except error.ManifestLookupError:
1150 except error.ManifestLookupError:
1151 pass
1151 pass
1152 links = normallinknodes(flog, fname)
1152 links = normallinknodes(flog, fname)
1153 if len(links) != len(mfdicts):
1153 if len(links) != len(mfdicts):
1154 for mf, lr in mfdicts:
1154 for mf, lr in mfdicts:
1155 fnode = mf.get(fname, None)
1155 fnode = mf.get(fname, None)
1156 if fnode in links:
1156 if fnode in links:
1157 links[fnode] = min(links[fnode], lr, key=clrev)
1157 links[fnode] = min(links[fnode], lr, key=clrev)
1158 elif fnode:
1158 elif fnode:
1159 links[fnode] = lr
1159 links[fnode] = lr
1160 return links
1160 return links
1161 else:
1161 else:
1162 linknodes = normallinknodes
1162 linknodes = normallinknodes
1163
1163
1164 repo = self._repo
1164 repo = self._repo
1165 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1165 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1166 total=len(changedfiles))
1166 total=len(changedfiles))
1167 for i, fname in enumerate(sorted(changedfiles)):
1167 for i, fname in enumerate(sorted(changedfiles)):
1168 filerevlog = repo.file(fname)
1168 filerevlog = repo.file(fname)
1169 if not filerevlog:
1169 if not filerevlog:
1170 raise error.Abort(_("empty or missing file data for %s") %
1170 raise error.Abort(_("empty or missing file data for %s") %
1171 fname)
1171 fname)
1172
1172
1173 clrevtolocalrev.clear()
1173 clrevtolocalrev.clear()
1174
1174
1175 linkrevnodes = linknodes(filerevlog, fname)
1175 linkrevnodes = linknodes(filerevlog, fname)
1176 # Lookup for filenodes, we collected the linkrev nodes above in the
1176 # Lookup for filenodes, we collected the linkrev nodes above in the
1177 # fastpath case and with lookupmf in the slowpath case.
1177 # fastpath case and with lookupmf in the slowpath case.
1178 def lookupfilelog(x):
1178 def lookupfilelog(x):
1179 return linkrevnodes[x]
1179 return linkrevnodes[x]
1180
1180
1181 frev, flr = filerevlog.rev, filerevlog.linkrev
1181 frev, flr = filerevlog.rev, filerevlog.linkrev
1182 # Skip sending any filenode we know the client already
1182 # Skip sending any filenode we know the client already
1183 # has. This avoids over-sending files relatively
1183 # has. This avoids over-sending files relatively
1184 # inexpensively, so it's not a problem if we under-filter
1184 # inexpensively, so it's not a problem if we under-filter
1185 # here.
1185 # here.
1186 filenodes = [n for n in linkrevnodes
1186 filenodes = [n for n in linkrevnodes
1187 if flr(frev(n)) not in commonrevs]
1187 if flr(frev(n)) not in commonrevs]
1188
1188
1189 if not filenodes:
1189 if not filenodes:
1190 continue
1190 continue
1191
1191
1192 progress.update(i + 1, item=fname)
1192 progress.update(i + 1, item=fname)
1193
1193
1194 deltas = deltagroup(
1194 deltas = deltagroup(
1195 self._repo, filerevlog, filenodes, False, lookupfilelog,
1195 self._repo, filerevlog, filenodes, False, lookupfilelog,
1196 self._forcedeltaparentprev,
1196 self._forcedeltaparentprev,
1197 ellipses=self._ellipses,
1197 ellipses=self._ellipses,
1198 clrevtolocalrev=clrevtolocalrev,
1198 clrevtolocalrev=clrevtolocalrev,
1199 fullclnodes=self._fullclnodes,
1199 fullclnodes=self._fullclnodes,
1200 precomputedellipsis=self._precomputedellipsis)
1200 precomputedellipsis=self._precomputedellipsis)
1201
1201
1202 yield fname, deltas
1202 yield fname, deltas
1203
1203
1204 progress.complete()
1204 progress.complete()
1205
1205
1206 def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
1206 def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
1207 ellipses=False, shallow=False, ellipsisroots=None,
1207 ellipses=False, shallow=False, ellipsisroots=None,
1208 fullnodes=None):
1208 fullnodes=None):
1209 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1209 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1210 d.node, d.p1node, d.p2node, d.linknode)
1210 d.node, d.p1node, d.p2node, d.linknode)
1211
1211
1212 return cgpacker(repo, oldmatcher, matcher, b'01',
1212 return cgpacker(repo, oldmatcher, matcher, b'01',
1213 builddeltaheader=builddeltaheader,
1213 builddeltaheader=builddeltaheader,
1214 manifestsend=b'',
1214 manifestsend=b'',
1215 forcedeltaparentprev=True,
1215 forcedeltaparentprev=True,
1216 bundlecaps=bundlecaps,
1216 bundlecaps=bundlecaps,
1217 ellipses=ellipses,
1217 ellipses=ellipses,
1218 shallow=shallow,
1218 shallow=shallow,
1219 ellipsisroots=ellipsisroots,
1219 ellipsisroots=ellipsisroots,
1220 fullnodes=fullnodes)
1220 fullnodes=fullnodes)
1221
1221
1222 def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
1222 def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
1223 ellipses=False, shallow=False, ellipsisroots=None,
1223 ellipses=False, shallow=False, ellipsisroots=None,
1224 fullnodes=None):
1224 fullnodes=None):
1225 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1225 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1226 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1226 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1227
1227
1228 return cgpacker(repo, oldmatcher, matcher, b'02',
1228 return cgpacker(repo, oldmatcher, matcher, b'02',
1229 builddeltaheader=builddeltaheader,
1229 builddeltaheader=builddeltaheader,
1230 manifestsend=b'',
1230 manifestsend=b'',
1231 bundlecaps=bundlecaps,
1231 bundlecaps=bundlecaps,
1232 ellipses=ellipses,
1232 ellipses=ellipses,
1233 shallow=shallow,
1233 shallow=shallow,
1234 ellipsisroots=ellipsisroots,
1234 ellipsisroots=ellipsisroots,
1235 fullnodes=fullnodes)
1235 fullnodes=fullnodes)
1236
1236
1237 def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
1237 def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
1238 ellipses=False, shallow=False, ellipsisroots=None,
1238 ellipses=False, shallow=False, ellipsisroots=None,
1239 fullnodes=None):
1239 fullnodes=None):
1240 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1240 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1241 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1241 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1242
1242
1243 return cgpacker(repo, oldmatcher, matcher, b'03',
1243 return cgpacker(repo, oldmatcher, matcher, b'03',
1244 builddeltaheader=builddeltaheader,
1244 builddeltaheader=builddeltaheader,
1245 manifestsend=closechunk(),
1245 manifestsend=closechunk(),
1246 bundlecaps=bundlecaps,
1246 bundlecaps=bundlecaps,
1247 ellipses=ellipses,
1247 ellipses=ellipses,
1248 shallow=shallow,
1248 shallow=shallow,
1249 ellipsisroots=ellipsisroots,
1249 ellipsisroots=ellipsisroots,
1250 fullnodes=fullnodes)
1250 fullnodes=fullnodes)
1251
1251
1252 _packermap = {'01': (_makecg1packer, cg1unpacker),
1252 _packermap = {'01': (_makecg1packer, cg1unpacker),
1253 # cg2 adds support for exchanging generaldelta
1253 # cg2 adds support for exchanging generaldelta
1254 '02': (_makecg2packer, cg2unpacker),
1254 '02': (_makecg2packer, cg2unpacker),
1255 # cg3 adds support for exchanging revlog flags and treemanifests
1255 # cg3 adds support for exchanging revlog flags and treemanifests
1256 '03': (_makecg3packer, cg3unpacker),
1256 '03': (_makecg3packer, cg3unpacker),
1257 }
1257 }
1258
1258
1259 def allsupportedversions(repo):
1259 def allsupportedversions(repo):
1260 versions = set(_packermap.keys())
1260 versions = set(_packermap.keys())
1261 if not (repo.ui.configbool('experimental', 'changegroup3') or
1261 if not (repo.ui.configbool('experimental', 'changegroup3') or
1262 repo.ui.configbool('experimental', 'treemanifest') or
1262 repo.ui.configbool('experimental', 'treemanifest') or
1263 'treemanifest' in repo.requirements):
1263 'treemanifest' in repo.requirements):
1264 versions.discard('03')
1264 versions.discard('03')
1265 return versions
1265 return versions
1266
1266
1267 # Changegroup versions that can be applied to the repo
1267 # Changegroup versions that can be applied to the repo
1268 def supportedincomingversions(repo):
1268 def supportedincomingversions(repo):
1269 return allsupportedversions(repo)
1269 return allsupportedversions(repo)
1270
1270
1271 # Changegroup versions that can be created from the repo
1271 # Changegroup versions that can be created from the repo
1272 def supportedoutgoingversions(repo):
1272 def supportedoutgoingversions(repo):
1273 versions = allsupportedversions(repo)
1273 versions = allsupportedversions(repo)
1274 if 'treemanifest' in repo.requirements:
1274 if 'treemanifest' in repo.requirements:
1275 # Versions 01 and 02 support only flat manifests and it's just too
1275 # Versions 01 and 02 support only flat manifests and it's just too
1276 # expensive to convert between the flat manifest and tree manifest on
1276 # expensive to convert between the flat manifest and tree manifest on
1277 # the fly. Since tree manifests are hashed differently, all of history
1277 # the fly. Since tree manifests are hashed differently, all of history
1278 # would have to be converted. Instead, we simply don't even pretend to
1278 # would have to be converted. Instead, we simply don't even pretend to
1279 # support versions 01 and 02.
1279 # support versions 01 and 02.
1280 versions.discard('01')
1280 versions.discard('01')
1281 versions.discard('02')
1281 versions.discard('02')
1282 if repository.NARROW_REQUIREMENT in repo.requirements:
1282 if repository.NARROW_REQUIREMENT in repo.requirements:
1283 # Versions 01 and 02 don't support revlog flags, and we need to
1283 # Versions 01 and 02 don't support revlog flags, and we need to
1284 # support that for stripping and unbundling to work.
1284 # support that for stripping and unbundling to work.
1285 versions.discard('01')
1285 versions.discard('01')
1286 versions.discard('02')
1286 versions.discard('02')
1287 if LFS_REQUIREMENT in repo.requirements:
1287 if LFS_REQUIREMENT in repo.requirements:
1288 # Versions 01 and 02 don't support revlog flags, and we need to
1288 # Versions 01 and 02 don't support revlog flags, and we need to
1289 # mark LFS entries with REVIDX_EXTSTORED.
1289 # mark LFS entries with REVIDX_EXTSTORED.
1290 versions.discard('01')
1290 versions.discard('01')
1291 versions.discard('02')
1291 versions.discard('02')
1292
1292
1293 return versions
1293 return versions
1294
1294
1295 def localversion(repo):
1295 def localversion(repo):
1296 # Finds the best version to use for bundles that are meant to be used
1296 # Finds the best version to use for bundles that are meant to be used
1297 # locally, such as those from strip and shelve, and temporary bundles.
1297 # locally, such as those from strip and shelve, and temporary bundles.
1298 return max(supportedoutgoingversions(repo))
1298 return max(supportedoutgoingversions(repo))
1299
1299
1300 def safeversion(repo):
1300 def safeversion(repo):
1301 # Finds the smallest version that it's safe to assume clients of the repo
1301 # Finds the smallest version that it's safe to assume clients of the repo
1302 # will support. For example, all hg versions that support generaldelta also
1302 # will support. For example, all hg versions that support generaldelta also
1303 # support changegroup 02.
1303 # support changegroup 02.
1304 versions = supportedoutgoingversions(repo)
1304 versions = supportedoutgoingversions(repo)
1305 if 'generaldelta' in repo.requirements:
1305 if 'generaldelta' in repo.requirements:
1306 versions.discard('01')
1306 versions.discard('01')
1307 assert versions
1307 assert versions
1308 return min(versions)
1308 return min(versions)
1309
1309
1310 def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
1310 def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
1311 matcher=None, ellipses=False, shallow=False,
1311 matcher=None, ellipses=False, shallow=False,
1312 ellipsisroots=None, fullnodes=None):
1312 ellipsisroots=None, fullnodes=None):
1313 assert version in supportedoutgoingversions(repo)
1313 assert version in supportedoutgoingversions(repo)
1314
1314
1315 if matcher is None:
1315 if matcher is None:
1316 matcher = matchmod.alwaysmatcher(repo.root, '')
1316 matcher = matchmod.always(repo.root, '')
1317 if oldmatcher is None:
1317 if oldmatcher is None:
1318 oldmatcher = matchmod.nevermatcher(repo.root, '')
1318 oldmatcher = matchmod.never(repo.root, '')
1319
1319
1320 if version == '01' and not matcher.always():
1320 if version == '01' and not matcher.always():
1321 raise error.ProgrammingError('version 01 changegroups do not support '
1321 raise error.ProgrammingError('version 01 changegroups do not support '
1322 'sparse file matchers')
1322 'sparse file matchers')
1323
1323
1324 if ellipses and version in (b'01', b'02'):
1324 if ellipses and version in (b'01', b'02'):
1325 raise error.Abort(
1325 raise error.Abort(
1326 _('ellipsis nodes require at least cg3 on client and server, '
1326 _('ellipsis nodes require at least cg3 on client and server, '
1327 'but negotiated version %s') % version)
1327 'but negotiated version %s') % version)
1328
1328
1329 # Requested files could include files not in the local store. So
1329 # Requested files could include files not in the local store. So
1330 # filter those out.
1330 # filter those out.
1331 matcher = repo.narrowmatch(matcher)
1331 matcher = repo.narrowmatch(matcher)
1332
1332
1333 fn = _packermap[version][0]
1333 fn = _packermap[version][0]
1334 return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
1334 return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
1335 shallow=shallow, ellipsisroots=ellipsisroots,
1335 shallow=shallow, ellipsisroots=ellipsisroots,
1336 fullnodes=fullnodes)
1336 fullnodes=fullnodes)
1337
1337
1338 def getunbundler(version, fh, alg, extras=None):
1338 def getunbundler(version, fh, alg, extras=None):
1339 return _packermap[version][1](fh, alg, extras=extras)
1339 return _packermap[version][1](fh, alg, extras=extras)
1340
1340
1341 def _changegroupinfo(repo, nodes, source):
1341 def _changegroupinfo(repo, nodes, source):
1342 if repo.ui.verbose or source == 'bundle':
1342 if repo.ui.verbose or source == 'bundle':
1343 repo.ui.status(_("%d changesets found\n") % len(nodes))
1343 repo.ui.status(_("%d changesets found\n") % len(nodes))
1344 if repo.ui.debugflag:
1344 if repo.ui.debugflag:
1345 repo.ui.debug("list of changesets:\n")
1345 repo.ui.debug("list of changesets:\n")
1346 for node in nodes:
1346 for node in nodes:
1347 repo.ui.debug("%s\n" % hex(node))
1347 repo.ui.debug("%s\n" % hex(node))
1348
1348
1349 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1349 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1350 bundlecaps=None):
1350 bundlecaps=None):
1351 cgstream = makestream(repo, outgoing, version, source,
1351 cgstream = makestream(repo, outgoing, version, source,
1352 fastpath=fastpath, bundlecaps=bundlecaps)
1352 fastpath=fastpath, bundlecaps=bundlecaps)
1353 return getunbundler(version, util.chunkbuffer(cgstream), None,
1353 return getunbundler(version, util.chunkbuffer(cgstream), None,
1354 {'clcount': len(outgoing.missing) })
1354 {'clcount': len(outgoing.missing) })
1355
1355
1356 def makestream(repo, outgoing, version, source, fastpath=False,
1356 def makestream(repo, outgoing, version, source, fastpath=False,
1357 bundlecaps=None, matcher=None):
1357 bundlecaps=None, matcher=None):
1358 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1358 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1359 matcher=matcher)
1359 matcher=matcher)
1360
1360
1361 repo = repo.unfiltered()
1361 repo = repo.unfiltered()
1362 commonrevs = outgoing.common
1362 commonrevs = outgoing.common
1363 csets = outgoing.missing
1363 csets = outgoing.missing
1364 heads = outgoing.missingheads
1364 heads = outgoing.missingheads
1365 # We go through the fast path if we get told to, or if all (unfiltered
1365 # We go through the fast path if we get told to, or if all (unfiltered
1366 # heads have been requested (since we then know there all linkrevs will
1366 # heads have been requested (since we then know there all linkrevs will
1367 # be pulled by the client).
1367 # be pulled by the client).
1368 heads.sort()
1368 heads.sort()
1369 fastpathlinkrev = fastpath or (
1369 fastpathlinkrev = fastpath or (
1370 repo.filtername is None and heads == sorted(repo.heads()))
1370 repo.filtername is None and heads == sorted(repo.heads()))
1371
1371
1372 repo.hook('preoutgoing', throw=True, source=source)
1372 repo.hook('preoutgoing', throw=True, source=source)
1373 _changegroupinfo(repo, csets, source)
1373 _changegroupinfo(repo, csets, source)
1374 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1374 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1375
1375
1376 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1376 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1377 revisions = 0
1377 revisions = 0
1378 files = 0
1378 files = 0
1379 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1379 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1380 total=expectedfiles)
1380 total=expectedfiles)
1381 for chunkdata in iter(source.filelogheader, {}):
1381 for chunkdata in iter(source.filelogheader, {}):
1382 files += 1
1382 files += 1
1383 f = chunkdata["filename"]
1383 f = chunkdata["filename"]
1384 repo.ui.debug("adding %s revisions\n" % f)
1384 repo.ui.debug("adding %s revisions\n" % f)
1385 progress.increment()
1385 progress.increment()
1386 fl = repo.file(f)
1386 fl = repo.file(f)
1387 o = len(fl)
1387 o = len(fl)
1388 try:
1388 try:
1389 deltas = source.deltaiter()
1389 deltas = source.deltaiter()
1390 if not fl.addgroup(deltas, revmap, trp):
1390 if not fl.addgroup(deltas, revmap, trp):
1391 raise error.Abort(_("received file revlog group is empty"))
1391 raise error.Abort(_("received file revlog group is empty"))
1392 except error.CensoredBaseError as e:
1392 except error.CensoredBaseError as e:
1393 raise error.Abort(_("received delta base is censored: %s") % e)
1393 raise error.Abort(_("received delta base is censored: %s") % e)
1394 revisions += len(fl) - o
1394 revisions += len(fl) - o
1395 if f in needfiles:
1395 if f in needfiles:
1396 needs = needfiles[f]
1396 needs = needfiles[f]
1397 for new in pycompat.xrange(o, len(fl)):
1397 for new in pycompat.xrange(o, len(fl)):
1398 n = fl.node(new)
1398 n = fl.node(new)
1399 if n in needs:
1399 if n in needs:
1400 needs.remove(n)
1400 needs.remove(n)
1401 else:
1401 else:
1402 raise error.Abort(
1402 raise error.Abort(
1403 _("received spurious file revlog entry"))
1403 _("received spurious file revlog entry"))
1404 if not needs:
1404 if not needs:
1405 del needfiles[f]
1405 del needfiles[f]
1406 progress.complete()
1406 progress.complete()
1407
1407
1408 for f, needs in needfiles.iteritems():
1408 for f, needs in needfiles.iteritems():
1409 fl = repo.file(f)
1409 fl = repo.file(f)
1410 for n in needs:
1410 for n in needs:
1411 try:
1411 try:
1412 fl.rev(n)
1412 fl.rev(n)
1413 except error.LookupError:
1413 except error.LookupError:
1414 raise error.Abort(
1414 raise error.Abort(
1415 _('missing file data for %s:%s - run hg verify') %
1415 _('missing file data for %s:%s - run hg verify') %
1416 (f, hex(n)))
1416 (f, hex(n)))
1417
1417
1418 return revisions, files
1418 return revisions, files
@@ -1,562 +1,561 b''
1 # fileset.py - file set queries for mercurial
1 # fileset.py - file set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import re
11 import re
12
12
13 from .i18n import _
13 from .i18n import _
14 from . import (
14 from . import (
15 error,
15 error,
16 filesetlang,
16 filesetlang,
17 match as matchmod,
17 match as matchmod,
18 merge,
18 merge,
19 pycompat,
19 pycompat,
20 registrar,
20 registrar,
21 scmutil,
21 scmutil,
22 util,
22 util,
23 )
23 )
24 from .utils import (
24 from .utils import (
25 stringutil,
25 stringutil,
26 )
26 )
27
27
28 # common weight constants
28 # common weight constants
29 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
29 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
30 _WEIGHT_READ_CONTENTS = filesetlang.WEIGHT_READ_CONTENTS
30 _WEIGHT_READ_CONTENTS = filesetlang.WEIGHT_READ_CONTENTS
31 _WEIGHT_STATUS = filesetlang.WEIGHT_STATUS
31 _WEIGHT_STATUS = filesetlang.WEIGHT_STATUS
32 _WEIGHT_STATUS_THOROUGH = filesetlang.WEIGHT_STATUS_THOROUGH
32 _WEIGHT_STATUS_THOROUGH = filesetlang.WEIGHT_STATUS_THOROUGH
33
33
34 # helpers for processing parsed tree
34 # helpers for processing parsed tree
35 getsymbol = filesetlang.getsymbol
35 getsymbol = filesetlang.getsymbol
36 getstring = filesetlang.getstring
36 getstring = filesetlang.getstring
37 _getkindpat = filesetlang.getkindpat
37 _getkindpat = filesetlang.getkindpat
38 getpattern = filesetlang.getpattern
38 getpattern = filesetlang.getpattern
39 getargs = filesetlang.getargs
39 getargs = filesetlang.getargs
40
40
41 def getmatch(mctx, x):
41 def getmatch(mctx, x):
42 if not x:
42 if not x:
43 raise error.ParseError(_("missing argument"))
43 raise error.ParseError(_("missing argument"))
44 return methods[x[0]](mctx, *x[1:])
44 return methods[x[0]](mctx, *x[1:])
45
45
46 def getmatchwithstatus(mctx, x, hint):
46 def getmatchwithstatus(mctx, x, hint):
47 keys = set(getstring(hint, 'status hint must be a string').split())
47 keys = set(getstring(hint, 'status hint must be a string').split())
48 return getmatch(mctx.withstatus(keys), x)
48 return getmatch(mctx.withstatus(keys), x)
49
49
50 def stringmatch(mctx, x):
50 def stringmatch(mctx, x):
51 return mctx.matcher([x])
51 return mctx.matcher([x])
52
52
53 def kindpatmatch(mctx, x, y):
53 def kindpatmatch(mctx, x, y):
54 return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
54 return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
55 _("pattern must be a string")))
55 _("pattern must be a string")))
56
56
57 def patternsmatch(mctx, *xs):
57 def patternsmatch(mctx, *xs):
58 allkinds = matchmod.allpatternkinds
58 allkinds = matchmod.allpatternkinds
59 patterns = [getpattern(x, allkinds, _("pattern must be a string"))
59 patterns = [getpattern(x, allkinds, _("pattern must be a string"))
60 for x in xs]
60 for x in xs]
61 return mctx.matcher(patterns)
61 return mctx.matcher(patterns)
62
62
63 def andmatch(mctx, x, y):
63 def andmatch(mctx, x, y):
64 xm = getmatch(mctx, x)
64 xm = getmatch(mctx, x)
65 ym = getmatch(mctx.narrowed(xm), y)
65 ym = getmatch(mctx.narrowed(xm), y)
66 return matchmod.intersectmatchers(xm, ym)
66 return matchmod.intersectmatchers(xm, ym)
67
67
68 def ormatch(mctx, *xs):
68 def ormatch(mctx, *xs):
69 ms = [getmatch(mctx, x) for x in xs]
69 ms = [getmatch(mctx, x) for x in xs]
70 return matchmod.unionmatcher(ms)
70 return matchmod.unionmatcher(ms)
71
71
72 def notmatch(mctx, x):
72 def notmatch(mctx, x):
73 m = getmatch(mctx, x)
73 m = getmatch(mctx, x)
74 return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
74 return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
75
75
76 def minusmatch(mctx, x, y):
76 def minusmatch(mctx, x, y):
77 xm = getmatch(mctx, x)
77 xm = getmatch(mctx, x)
78 ym = getmatch(mctx.narrowed(xm), y)
78 ym = getmatch(mctx.narrowed(xm), y)
79 return matchmod.differencematcher(xm, ym)
79 return matchmod.differencematcher(xm, ym)
80
80
81 def listmatch(mctx, *xs):
81 def listmatch(mctx, *xs):
82 raise error.ParseError(_("can't use a list in this context"),
82 raise error.ParseError(_("can't use a list in this context"),
83 hint=_('see \'hg help "filesets.x or y"\''))
83 hint=_('see \'hg help "filesets.x or y"\''))
84
84
85 def func(mctx, a, b):
85 def func(mctx, a, b):
86 funcname = getsymbol(a)
86 funcname = getsymbol(a)
87 if funcname in symbols:
87 if funcname in symbols:
88 return symbols[funcname](mctx, b)
88 return symbols[funcname](mctx, b)
89
89
90 keep = lambda fn: getattr(fn, '__doc__', None) is not None
90 keep = lambda fn: getattr(fn, '__doc__', None) is not None
91
91
92 syms = [s for (s, fn) in symbols.items() if keep(fn)]
92 syms = [s for (s, fn) in symbols.items() if keep(fn)]
93 raise error.UnknownIdentifier(funcname, syms)
93 raise error.UnknownIdentifier(funcname, syms)
94
94
95 # symbols are callable like:
95 # symbols are callable like:
96 # fun(mctx, x)
96 # fun(mctx, x)
97 # with:
97 # with:
98 # mctx - current matchctx instance
98 # mctx - current matchctx instance
99 # x - argument in tree form
99 # x - argument in tree form
100 symbols = filesetlang.symbols
100 symbols = filesetlang.symbols
101
101
102 predicate = registrar.filesetpredicate(symbols)
102 predicate = registrar.filesetpredicate(symbols)
103
103
104 @predicate('modified()', callstatus=True, weight=_WEIGHT_STATUS)
104 @predicate('modified()', callstatus=True, weight=_WEIGHT_STATUS)
105 def modified(mctx, x):
105 def modified(mctx, x):
106 """File that is modified according to :hg:`status`.
106 """File that is modified according to :hg:`status`.
107 """
107 """
108 # i18n: "modified" is a keyword
108 # i18n: "modified" is a keyword
109 getargs(x, 0, 0, _("modified takes no arguments"))
109 getargs(x, 0, 0, _("modified takes no arguments"))
110 s = set(mctx.status().modified)
110 s = set(mctx.status().modified)
111 return mctx.predicate(s.__contains__, predrepr='modified')
111 return mctx.predicate(s.__contains__, predrepr='modified')
112
112
113 @predicate('added()', callstatus=True, weight=_WEIGHT_STATUS)
113 @predicate('added()', callstatus=True, weight=_WEIGHT_STATUS)
114 def added(mctx, x):
114 def added(mctx, x):
115 """File that is added according to :hg:`status`.
115 """File that is added according to :hg:`status`.
116 """
116 """
117 # i18n: "added" is a keyword
117 # i18n: "added" is a keyword
118 getargs(x, 0, 0, _("added takes no arguments"))
118 getargs(x, 0, 0, _("added takes no arguments"))
119 s = set(mctx.status().added)
119 s = set(mctx.status().added)
120 return mctx.predicate(s.__contains__, predrepr='added')
120 return mctx.predicate(s.__contains__, predrepr='added')
121
121
122 @predicate('removed()', callstatus=True, weight=_WEIGHT_STATUS)
122 @predicate('removed()', callstatus=True, weight=_WEIGHT_STATUS)
123 def removed(mctx, x):
123 def removed(mctx, x):
124 """File that is removed according to :hg:`status`.
124 """File that is removed according to :hg:`status`.
125 """
125 """
126 # i18n: "removed" is a keyword
126 # i18n: "removed" is a keyword
127 getargs(x, 0, 0, _("removed takes no arguments"))
127 getargs(x, 0, 0, _("removed takes no arguments"))
128 s = set(mctx.status().removed)
128 s = set(mctx.status().removed)
129 return mctx.predicate(s.__contains__, predrepr='removed')
129 return mctx.predicate(s.__contains__, predrepr='removed')
130
130
131 @predicate('deleted()', callstatus=True, weight=_WEIGHT_STATUS)
131 @predicate('deleted()', callstatus=True, weight=_WEIGHT_STATUS)
132 def deleted(mctx, x):
132 def deleted(mctx, x):
133 """Alias for ``missing()``.
133 """Alias for ``missing()``.
134 """
134 """
135 # i18n: "deleted" is a keyword
135 # i18n: "deleted" is a keyword
136 getargs(x, 0, 0, _("deleted takes no arguments"))
136 getargs(x, 0, 0, _("deleted takes no arguments"))
137 s = set(mctx.status().deleted)
137 s = set(mctx.status().deleted)
138 return mctx.predicate(s.__contains__, predrepr='deleted')
138 return mctx.predicate(s.__contains__, predrepr='deleted')
139
139
140 @predicate('missing()', callstatus=True, weight=_WEIGHT_STATUS)
140 @predicate('missing()', callstatus=True, weight=_WEIGHT_STATUS)
141 def missing(mctx, x):
141 def missing(mctx, x):
142 """File that is missing according to :hg:`status`.
142 """File that is missing according to :hg:`status`.
143 """
143 """
144 # i18n: "missing" is a keyword
144 # i18n: "missing" is a keyword
145 getargs(x, 0, 0, _("missing takes no arguments"))
145 getargs(x, 0, 0, _("missing takes no arguments"))
146 s = set(mctx.status().deleted)
146 s = set(mctx.status().deleted)
147 return mctx.predicate(s.__contains__, predrepr='deleted')
147 return mctx.predicate(s.__contains__, predrepr='deleted')
148
148
149 @predicate('unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
149 @predicate('unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
150 def unknown(mctx, x):
150 def unknown(mctx, x):
151 """File that is unknown according to :hg:`status`."""
151 """File that is unknown according to :hg:`status`."""
152 # i18n: "unknown" is a keyword
152 # i18n: "unknown" is a keyword
153 getargs(x, 0, 0, _("unknown takes no arguments"))
153 getargs(x, 0, 0, _("unknown takes no arguments"))
154 s = set(mctx.status().unknown)
154 s = set(mctx.status().unknown)
155 return mctx.predicate(s.__contains__, predrepr='unknown')
155 return mctx.predicate(s.__contains__, predrepr='unknown')
156
156
157 @predicate('ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
157 @predicate('ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
158 def ignored(mctx, x):
158 def ignored(mctx, x):
159 """File that is ignored according to :hg:`status`."""
159 """File that is ignored according to :hg:`status`."""
160 # i18n: "ignored" is a keyword
160 # i18n: "ignored" is a keyword
161 getargs(x, 0, 0, _("ignored takes no arguments"))
161 getargs(x, 0, 0, _("ignored takes no arguments"))
162 s = set(mctx.status().ignored)
162 s = set(mctx.status().ignored)
163 return mctx.predicate(s.__contains__, predrepr='ignored')
163 return mctx.predicate(s.__contains__, predrepr='ignored')
164
164
165 @predicate('clean()', callstatus=True, weight=_WEIGHT_STATUS)
165 @predicate('clean()', callstatus=True, weight=_WEIGHT_STATUS)
166 def clean(mctx, x):
166 def clean(mctx, x):
167 """File that is clean according to :hg:`status`.
167 """File that is clean according to :hg:`status`.
168 """
168 """
169 # i18n: "clean" is a keyword
169 # i18n: "clean" is a keyword
170 getargs(x, 0, 0, _("clean takes no arguments"))
170 getargs(x, 0, 0, _("clean takes no arguments"))
171 s = set(mctx.status().clean)
171 s = set(mctx.status().clean)
172 return mctx.predicate(s.__contains__, predrepr='clean')
172 return mctx.predicate(s.__contains__, predrepr='clean')
173
173
174 @predicate('tracked()')
174 @predicate('tracked()')
175 def tracked(mctx, x):
175 def tracked(mctx, x):
176 """File that is under Mercurial control."""
176 """File that is under Mercurial control."""
177 # i18n: "tracked" is a keyword
177 # i18n: "tracked" is a keyword
178 getargs(x, 0, 0, _("tracked takes no arguments"))
178 getargs(x, 0, 0, _("tracked takes no arguments"))
179 return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
179 return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
180
180
181 @predicate('binary()', weight=_WEIGHT_READ_CONTENTS)
181 @predicate('binary()', weight=_WEIGHT_READ_CONTENTS)
182 def binary(mctx, x):
182 def binary(mctx, x):
183 """File that appears to be binary (contains NUL bytes).
183 """File that appears to be binary (contains NUL bytes).
184 """
184 """
185 # i18n: "binary" is a keyword
185 # i18n: "binary" is a keyword
186 getargs(x, 0, 0, _("binary takes no arguments"))
186 getargs(x, 0, 0, _("binary takes no arguments"))
187 return mctx.fpredicate(lambda fctx: fctx.isbinary(),
187 return mctx.fpredicate(lambda fctx: fctx.isbinary(),
188 predrepr='binary', cache=True)
188 predrepr='binary', cache=True)
189
189
190 @predicate('exec()')
190 @predicate('exec()')
191 def exec_(mctx, x):
191 def exec_(mctx, x):
192 """File that is marked as executable.
192 """File that is marked as executable.
193 """
193 """
194 # i18n: "exec" is a keyword
194 # i18n: "exec" is a keyword
195 getargs(x, 0, 0, _("exec takes no arguments"))
195 getargs(x, 0, 0, _("exec takes no arguments"))
196 ctx = mctx.ctx
196 ctx = mctx.ctx
197 return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
197 return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
198
198
199 @predicate('symlink()')
199 @predicate('symlink()')
200 def symlink(mctx, x):
200 def symlink(mctx, x):
201 """File that is marked as a symlink.
201 """File that is marked as a symlink.
202 """
202 """
203 # i18n: "symlink" is a keyword
203 # i18n: "symlink" is a keyword
204 getargs(x, 0, 0, _("symlink takes no arguments"))
204 getargs(x, 0, 0, _("symlink takes no arguments"))
205 ctx = mctx.ctx
205 ctx = mctx.ctx
206 return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
206 return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
207
207
208 @predicate('resolved()', weight=_WEIGHT_STATUS)
208 @predicate('resolved()', weight=_WEIGHT_STATUS)
209 def resolved(mctx, x):
209 def resolved(mctx, x):
210 """File that is marked resolved according to :hg:`resolve -l`.
210 """File that is marked resolved according to :hg:`resolve -l`.
211 """
211 """
212 # i18n: "resolved" is a keyword
212 # i18n: "resolved" is a keyword
213 getargs(x, 0, 0, _("resolved takes no arguments"))
213 getargs(x, 0, 0, _("resolved takes no arguments"))
214 if mctx.ctx.rev() is not None:
214 if mctx.ctx.rev() is not None:
215 return mctx.never()
215 return mctx.never()
216 ms = merge.mergestate.read(mctx.ctx.repo())
216 ms = merge.mergestate.read(mctx.ctx.repo())
217 return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
217 return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
218 predrepr='resolved')
218 predrepr='resolved')
219
219
220 @predicate('unresolved()', weight=_WEIGHT_STATUS)
220 @predicate('unresolved()', weight=_WEIGHT_STATUS)
221 def unresolved(mctx, x):
221 def unresolved(mctx, x):
222 """File that is marked unresolved according to :hg:`resolve -l`.
222 """File that is marked unresolved according to :hg:`resolve -l`.
223 """
223 """
224 # i18n: "unresolved" is a keyword
224 # i18n: "unresolved" is a keyword
225 getargs(x, 0, 0, _("unresolved takes no arguments"))
225 getargs(x, 0, 0, _("unresolved takes no arguments"))
226 if mctx.ctx.rev() is not None:
226 if mctx.ctx.rev() is not None:
227 return mctx.never()
227 return mctx.never()
228 ms = merge.mergestate.read(mctx.ctx.repo())
228 ms = merge.mergestate.read(mctx.ctx.repo())
229 return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
229 return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
230 predrepr='unresolved')
230 predrepr='unresolved')
231
231
232 @predicate('hgignore()', weight=_WEIGHT_STATUS)
232 @predicate('hgignore()', weight=_WEIGHT_STATUS)
233 def hgignore(mctx, x):
233 def hgignore(mctx, x):
234 """File that matches the active .hgignore pattern.
234 """File that matches the active .hgignore pattern.
235 """
235 """
236 # i18n: "hgignore" is a keyword
236 # i18n: "hgignore" is a keyword
237 getargs(x, 0, 0, _("hgignore takes no arguments"))
237 getargs(x, 0, 0, _("hgignore takes no arguments"))
238 return mctx.ctx.repo().dirstate._ignore
238 return mctx.ctx.repo().dirstate._ignore
239
239
240 @predicate('portable()', weight=_WEIGHT_CHECK_FILENAME)
240 @predicate('portable()', weight=_WEIGHT_CHECK_FILENAME)
241 def portable(mctx, x):
241 def portable(mctx, x):
242 """File that has a portable name. (This doesn't include filenames with case
242 """File that has a portable name. (This doesn't include filenames with case
243 collisions.)
243 collisions.)
244 """
244 """
245 # i18n: "portable" is a keyword
245 # i18n: "portable" is a keyword
246 getargs(x, 0, 0, _("portable takes no arguments"))
246 getargs(x, 0, 0, _("portable takes no arguments"))
247 return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
247 return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
248 predrepr='portable')
248 predrepr='portable')
249
249
250 @predicate('grep(regex)', weight=_WEIGHT_READ_CONTENTS)
250 @predicate('grep(regex)', weight=_WEIGHT_READ_CONTENTS)
251 def grep(mctx, x):
251 def grep(mctx, x):
252 """File contains the given regular expression.
252 """File contains the given regular expression.
253 """
253 """
254 try:
254 try:
255 # i18n: "grep" is a keyword
255 # i18n: "grep" is a keyword
256 r = re.compile(getstring(x, _("grep requires a pattern")))
256 r = re.compile(getstring(x, _("grep requires a pattern")))
257 except re.error as e:
257 except re.error as e:
258 raise error.ParseError(_('invalid match pattern: %s') %
258 raise error.ParseError(_('invalid match pattern: %s') %
259 stringutil.forcebytestr(e))
259 stringutil.forcebytestr(e))
260 return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
260 return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
261 predrepr=('grep(%r)', r.pattern), cache=True)
261 predrepr=('grep(%r)', r.pattern), cache=True)
262
262
263 def _sizetomax(s):
263 def _sizetomax(s):
264 try:
264 try:
265 s = s.strip().lower()
265 s = s.strip().lower()
266 for k, v in util._sizeunits:
266 for k, v in util._sizeunits:
267 if s.endswith(k):
267 if s.endswith(k):
268 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
268 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
269 n = s[:-len(k)]
269 n = s[:-len(k)]
270 inc = 1.0
270 inc = 1.0
271 if "." in n:
271 if "." in n:
272 inc /= 10 ** len(n.split(".")[1])
272 inc /= 10 ** len(n.split(".")[1])
273 return int((float(n) + inc) * v) - 1
273 return int((float(n) + inc) * v) - 1
274 # no extension, this is a precise value
274 # no extension, this is a precise value
275 return int(s)
275 return int(s)
276 except ValueError:
276 except ValueError:
277 raise error.ParseError(_("couldn't parse size: %s") % s)
277 raise error.ParseError(_("couldn't parse size: %s") % s)
278
278
279 def sizematcher(expr):
279 def sizematcher(expr):
280 """Return a function(size) -> bool from the ``size()`` expression"""
280 """Return a function(size) -> bool from the ``size()`` expression"""
281 expr = expr.strip()
281 expr = expr.strip()
282 if '-' in expr: # do we have a range?
282 if '-' in expr: # do we have a range?
283 a, b = expr.split('-', 1)
283 a, b = expr.split('-', 1)
284 a = util.sizetoint(a)
284 a = util.sizetoint(a)
285 b = util.sizetoint(b)
285 b = util.sizetoint(b)
286 return lambda x: x >= a and x <= b
286 return lambda x: x >= a and x <= b
287 elif expr.startswith("<="):
287 elif expr.startswith("<="):
288 a = util.sizetoint(expr[2:])
288 a = util.sizetoint(expr[2:])
289 return lambda x: x <= a
289 return lambda x: x <= a
290 elif expr.startswith("<"):
290 elif expr.startswith("<"):
291 a = util.sizetoint(expr[1:])
291 a = util.sizetoint(expr[1:])
292 return lambda x: x < a
292 return lambda x: x < a
293 elif expr.startswith(">="):
293 elif expr.startswith(">="):
294 a = util.sizetoint(expr[2:])
294 a = util.sizetoint(expr[2:])
295 return lambda x: x >= a
295 return lambda x: x >= a
296 elif expr.startswith(">"):
296 elif expr.startswith(">"):
297 a = util.sizetoint(expr[1:])
297 a = util.sizetoint(expr[1:])
298 return lambda x: x > a
298 return lambda x: x > a
299 else:
299 else:
300 a = util.sizetoint(expr)
300 a = util.sizetoint(expr)
301 b = _sizetomax(expr)
301 b = _sizetomax(expr)
302 return lambda x: x >= a and x <= b
302 return lambda x: x >= a and x <= b
303
303
304 @predicate('size(expression)', weight=_WEIGHT_STATUS)
304 @predicate('size(expression)', weight=_WEIGHT_STATUS)
305 def size(mctx, x):
305 def size(mctx, x):
306 """File size matches the given expression. Examples:
306 """File size matches the given expression. Examples:
307
307
308 - size('1k') - files from 1024 to 2047 bytes
308 - size('1k') - files from 1024 to 2047 bytes
309 - size('< 20k') - files less than 20480 bytes
309 - size('< 20k') - files less than 20480 bytes
310 - size('>= .5MB') - files at least 524288 bytes
310 - size('>= .5MB') - files at least 524288 bytes
311 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
311 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
312 """
312 """
313 # i18n: "size" is a keyword
313 # i18n: "size" is a keyword
314 expr = getstring(x, _("size requires an expression"))
314 expr = getstring(x, _("size requires an expression"))
315 m = sizematcher(expr)
315 m = sizematcher(expr)
316 return mctx.fpredicate(lambda fctx: m(fctx.size()),
316 return mctx.fpredicate(lambda fctx: m(fctx.size()),
317 predrepr=('size(%r)', expr), cache=True)
317 predrepr=('size(%r)', expr), cache=True)
318
318
319 @predicate('encoding(name)', weight=_WEIGHT_READ_CONTENTS)
319 @predicate('encoding(name)', weight=_WEIGHT_READ_CONTENTS)
320 def encoding(mctx, x):
320 def encoding(mctx, x):
321 """File can be successfully decoded with the given character
321 """File can be successfully decoded with the given character
322 encoding. May not be useful for encodings other than ASCII and
322 encoding. May not be useful for encodings other than ASCII and
323 UTF-8.
323 UTF-8.
324 """
324 """
325
325
326 # i18n: "encoding" is a keyword
326 # i18n: "encoding" is a keyword
327 enc = getstring(x, _("encoding requires an encoding name"))
327 enc = getstring(x, _("encoding requires an encoding name"))
328
328
329 def encp(fctx):
329 def encp(fctx):
330 d = fctx.data()
330 d = fctx.data()
331 try:
331 try:
332 d.decode(pycompat.sysstr(enc))
332 d.decode(pycompat.sysstr(enc))
333 return True
333 return True
334 except LookupError:
334 except LookupError:
335 raise error.Abort(_("unknown encoding '%s'") % enc)
335 raise error.Abort(_("unknown encoding '%s'") % enc)
336 except UnicodeDecodeError:
336 except UnicodeDecodeError:
337 return False
337 return False
338
338
339 return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
339 return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
340
340
341 @predicate('eol(style)', weight=_WEIGHT_READ_CONTENTS)
341 @predicate('eol(style)', weight=_WEIGHT_READ_CONTENTS)
342 def eol(mctx, x):
342 def eol(mctx, x):
343 """File contains newlines of the given style (dos, unix, mac). Binary
343 """File contains newlines of the given style (dos, unix, mac). Binary
344 files are excluded, files with mixed line endings match multiple
344 files are excluded, files with mixed line endings match multiple
345 styles.
345 styles.
346 """
346 """
347
347
348 # i18n: "eol" is a keyword
348 # i18n: "eol" is a keyword
349 enc = getstring(x, _("eol requires a style name"))
349 enc = getstring(x, _("eol requires a style name"))
350
350
351 def eolp(fctx):
351 def eolp(fctx):
352 if fctx.isbinary():
352 if fctx.isbinary():
353 return False
353 return False
354 d = fctx.data()
354 d = fctx.data()
355 if (enc == 'dos' or enc == 'win') and '\r\n' in d:
355 if (enc == 'dos' or enc == 'win') and '\r\n' in d:
356 return True
356 return True
357 elif enc == 'unix' and re.search('(?<!\r)\n', d):
357 elif enc == 'unix' and re.search('(?<!\r)\n', d):
358 return True
358 return True
359 elif enc == 'mac' and re.search('\r(?!\n)', d):
359 elif enc == 'mac' and re.search('\r(?!\n)', d):
360 return True
360 return True
361 return False
361 return False
362 return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
362 return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
363
363
364 @predicate('copied()')
364 @predicate('copied()')
365 def copied(mctx, x):
365 def copied(mctx, x):
366 """File that is recorded as being copied.
366 """File that is recorded as being copied.
367 """
367 """
368 # i18n: "copied" is a keyword
368 # i18n: "copied" is a keyword
369 getargs(x, 0, 0, _("copied takes no arguments"))
369 getargs(x, 0, 0, _("copied takes no arguments"))
370 def copiedp(fctx):
370 def copiedp(fctx):
371 p = fctx.parents()
371 p = fctx.parents()
372 return p and p[0].path() != fctx.path()
372 return p and p[0].path() != fctx.path()
373 return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
373 return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
374
374
375 @predicate('revs(revs, pattern)', weight=_WEIGHT_STATUS)
375 @predicate('revs(revs, pattern)', weight=_WEIGHT_STATUS)
376 def revs(mctx, x):
376 def revs(mctx, x):
377 """Evaluate set in the specified revisions. If the revset match multiple
377 """Evaluate set in the specified revisions. If the revset match multiple
378 revs, this will return file matching pattern in any of the revision.
378 revs, this will return file matching pattern in any of the revision.
379 """
379 """
380 # i18n: "revs" is a keyword
380 # i18n: "revs" is a keyword
381 r, x = getargs(x, 2, 2, _("revs takes two arguments"))
381 r, x = getargs(x, 2, 2, _("revs takes two arguments"))
382 # i18n: "revs" is a keyword
382 # i18n: "revs" is a keyword
383 revspec = getstring(r, _("first argument to revs must be a revision"))
383 revspec = getstring(r, _("first argument to revs must be a revision"))
384 repo = mctx.ctx.repo()
384 repo = mctx.ctx.repo()
385 revs = scmutil.revrange(repo, [revspec])
385 revs = scmutil.revrange(repo, [revspec])
386
386
387 matchers = []
387 matchers = []
388 for r in revs:
388 for r in revs:
389 ctx = repo[r]
389 ctx = repo[r]
390 mc = mctx.switch(ctx.p1(), ctx)
390 mc = mctx.switch(ctx.p1(), ctx)
391 matchers.append(getmatch(mc, x))
391 matchers.append(getmatch(mc, x))
392 if not matchers:
392 if not matchers:
393 return mctx.never()
393 return mctx.never()
394 if len(matchers) == 1:
394 if len(matchers) == 1:
395 return matchers[0]
395 return matchers[0]
396 return matchmod.unionmatcher(matchers)
396 return matchmod.unionmatcher(matchers)
397
397
398 @predicate('status(base, rev, pattern)', weight=_WEIGHT_STATUS)
398 @predicate('status(base, rev, pattern)', weight=_WEIGHT_STATUS)
399 def status(mctx, x):
399 def status(mctx, x):
400 """Evaluate predicate using status change between ``base`` and
400 """Evaluate predicate using status change between ``base`` and
401 ``rev``. Examples:
401 ``rev``. Examples:
402
402
403 - ``status(3, 7, added())`` - matches files added from "3" to "7"
403 - ``status(3, 7, added())`` - matches files added from "3" to "7"
404 """
404 """
405 repo = mctx.ctx.repo()
405 repo = mctx.ctx.repo()
406 # i18n: "status" is a keyword
406 # i18n: "status" is a keyword
407 b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
407 b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
408 # i18n: "status" is a keyword
408 # i18n: "status" is a keyword
409 baseerr = _("first argument to status must be a revision")
409 baseerr = _("first argument to status must be a revision")
410 baserevspec = getstring(b, baseerr)
410 baserevspec = getstring(b, baseerr)
411 if not baserevspec:
411 if not baserevspec:
412 raise error.ParseError(baseerr)
412 raise error.ParseError(baseerr)
413 reverr = _("second argument to status must be a revision")
413 reverr = _("second argument to status must be a revision")
414 revspec = getstring(r, reverr)
414 revspec = getstring(r, reverr)
415 if not revspec:
415 if not revspec:
416 raise error.ParseError(reverr)
416 raise error.ParseError(reverr)
417 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
417 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
418 mc = mctx.switch(basectx, ctx)
418 mc = mctx.switch(basectx, ctx)
419 return getmatch(mc, x)
419 return getmatch(mc, x)
420
420
421 @predicate('subrepo([pattern])')
421 @predicate('subrepo([pattern])')
422 def subrepo(mctx, x):
422 def subrepo(mctx, x):
423 """Subrepositories whose paths match the given pattern.
423 """Subrepositories whose paths match the given pattern.
424 """
424 """
425 # i18n: "subrepo" is a keyword
425 # i18n: "subrepo" is a keyword
426 getargs(x, 0, 1, _("subrepo takes at most one argument"))
426 getargs(x, 0, 1, _("subrepo takes at most one argument"))
427 ctx = mctx.ctx
427 ctx = mctx.ctx
428 sstate = ctx.substate
428 sstate = ctx.substate
429 if x:
429 if x:
430 pat = getpattern(x, matchmod.allpatternkinds,
430 pat = getpattern(x, matchmod.allpatternkinds,
431 # i18n: "subrepo" is a keyword
431 # i18n: "subrepo" is a keyword
432 _("subrepo requires a pattern or no arguments"))
432 _("subrepo requires a pattern or no arguments"))
433 fast = not matchmod.patkind(pat)
433 fast = not matchmod.patkind(pat)
434 if fast:
434 if fast:
435 def m(s):
435 def m(s):
436 return (s == pat)
436 return (s == pat)
437 else:
437 else:
438 m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
438 m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
439 return mctx.predicate(lambda f: f in sstate and m(f),
439 return mctx.predicate(lambda f: f in sstate and m(f),
440 predrepr=('subrepo(%r)', pat))
440 predrepr=('subrepo(%r)', pat))
441 else:
441 else:
442 return mctx.predicate(sstate.__contains__, predrepr='subrepo')
442 return mctx.predicate(sstate.__contains__, predrepr='subrepo')
443
443
444 methods = {
444 methods = {
445 'withstatus': getmatchwithstatus,
445 'withstatus': getmatchwithstatus,
446 'string': stringmatch,
446 'string': stringmatch,
447 'symbol': stringmatch,
447 'symbol': stringmatch,
448 'kindpat': kindpatmatch,
448 'kindpat': kindpatmatch,
449 'patterns': patternsmatch,
449 'patterns': patternsmatch,
450 'and': andmatch,
450 'and': andmatch,
451 'or': ormatch,
451 'or': ormatch,
452 'minus': minusmatch,
452 'minus': minusmatch,
453 'list': listmatch,
453 'list': listmatch,
454 'not': notmatch,
454 'not': notmatch,
455 'func': func,
455 'func': func,
456 }
456 }
457
457
458 class matchctx(object):
458 class matchctx(object):
459 def __init__(self, basectx, ctx, badfn=None):
459 def __init__(self, basectx, ctx, badfn=None):
460 self._basectx = basectx
460 self._basectx = basectx
461 self.ctx = ctx
461 self.ctx = ctx
462 self._badfn = badfn
462 self._badfn = badfn
463 self._match = None
463 self._match = None
464 self._status = None
464 self._status = None
465
465
466 def narrowed(self, match):
466 def narrowed(self, match):
467 """Create matchctx for a sub-tree narrowed by the given matcher"""
467 """Create matchctx for a sub-tree narrowed by the given matcher"""
468 mctx = matchctx(self._basectx, self.ctx, self._badfn)
468 mctx = matchctx(self._basectx, self.ctx, self._badfn)
469 mctx._match = match
469 mctx._match = match
470 # leave wider status which we don't have to care
470 # leave wider status which we don't have to care
471 mctx._status = self._status
471 mctx._status = self._status
472 return mctx
472 return mctx
473
473
474 def switch(self, basectx, ctx):
474 def switch(self, basectx, ctx):
475 mctx = matchctx(basectx, ctx, self._badfn)
475 mctx = matchctx(basectx, ctx, self._badfn)
476 mctx._match = self._match
476 mctx._match = self._match
477 return mctx
477 return mctx
478
478
479 def withstatus(self, keys):
479 def withstatus(self, keys):
480 """Create matchctx which has precomputed status specified by the keys"""
480 """Create matchctx which has precomputed status specified by the keys"""
481 mctx = matchctx(self._basectx, self.ctx, self._badfn)
481 mctx = matchctx(self._basectx, self.ctx, self._badfn)
482 mctx._match = self._match
482 mctx._match = self._match
483 mctx._buildstatus(keys)
483 mctx._buildstatus(keys)
484 return mctx
484 return mctx
485
485
486 def _buildstatus(self, keys):
486 def _buildstatus(self, keys):
487 self._status = self._basectx.status(self.ctx, self._match,
487 self._status = self._basectx.status(self.ctx, self._match,
488 listignored='ignored' in keys,
488 listignored='ignored' in keys,
489 listclean='clean' in keys,
489 listclean='clean' in keys,
490 listunknown='unknown' in keys)
490 listunknown='unknown' in keys)
491
491
492 def status(self):
492 def status(self):
493 return self._status
493 return self._status
494
494
495 def matcher(self, patterns):
495 def matcher(self, patterns):
496 return self.ctx.match(patterns, badfn=self._badfn)
496 return self.ctx.match(patterns, badfn=self._badfn)
497
497
498 def predicate(self, predfn, predrepr=None, cache=False):
498 def predicate(self, predfn, predrepr=None, cache=False):
499 """Create a matcher to select files by predfn(filename)"""
499 """Create a matcher to select files by predfn(filename)"""
500 if cache:
500 if cache:
501 predfn = util.cachefunc(predfn)
501 predfn = util.cachefunc(predfn)
502 repo = self.ctx.repo()
502 repo = self.ctx.repo()
503 return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn,
503 return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn,
504 predrepr=predrepr, badfn=self._badfn)
504 predrepr=predrepr, badfn=self._badfn)
505
505
506 def fpredicate(self, predfn, predrepr=None, cache=False):
506 def fpredicate(self, predfn, predrepr=None, cache=False):
507 """Create a matcher to select files by predfn(fctx) at the current
507 """Create a matcher to select files by predfn(fctx) at the current
508 revision
508 revision
509
509
510 Missing files are ignored.
510 Missing files are ignored.
511 """
511 """
512 ctx = self.ctx
512 ctx = self.ctx
513 if ctx.rev() is None:
513 if ctx.rev() is None:
514 def fctxpredfn(f):
514 def fctxpredfn(f):
515 try:
515 try:
516 fctx = ctx[f]
516 fctx = ctx[f]
517 except error.LookupError:
517 except error.LookupError:
518 return False
518 return False
519 try:
519 try:
520 fctx.audit()
520 fctx.audit()
521 except error.Abort:
521 except error.Abort:
522 return False
522 return False
523 try:
523 try:
524 return predfn(fctx)
524 return predfn(fctx)
525 except (IOError, OSError) as e:
525 except (IOError, OSError) as e:
526 # open()-ing a directory fails with EACCES on Windows
526 # open()-ing a directory fails with EACCES on Windows
527 if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
527 if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
528 errno.EISDIR):
528 errno.EISDIR):
529 return False
529 return False
530 raise
530 raise
531 else:
531 else:
532 def fctxpredfn(f):
532 def fctxpredfn(f):
533 try:
533 try:
534 fctx = ctx[f]
534 fctx = ctx[f]
535 except error.LookupError:
535 except error.LookupError:
536 return False
536 return False
537 return predfn(fctx)
537 return predfn(fctx)
538 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
538 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
539
539
540 def never(self):
540 def never(self):
541 """Create a matcher to select nothing"""
541 """Create a matcher to select nothing"""
542 repo = self.ctx.repo()
542 repo = self.ctx.repo()
543 return matchmod.nevermatcher(repo.root, repo.getcwd(),
543 return matchmod.never(repo.root, repo.getcwd(), badfn=self._badfn)
544 badfn=self._badfn)
545
544
546 def match(ctx, expr, badfn=None):
545 def match(ctx, expr, badfn=None):
547 """Create a matcher for a single fileset expression"""
546 """Create a matcher for a single fileset expression"""
548 tree = filesetlang.parse(expr)
547 tree = filesetlang.parse(expr)
549 tree = filesetlang.analyze(tree)
548 tree = filesetlang.analyze(tree)
550 tree = filesetlang.optimize(tree)
549 tree = filesetlang.optimize(tree)
551 mctx = matchctx(ctx.p1(), ctx, badfn=badfn)
550 mctx = matchctx(ctx.p1(), ctx, badfn=badfn)
552 return getmatch(mctx, tree)
551 return getmatch(mctx, tree)
553
552
554
553
555 def loadpredicate(ui, extname, registrarobj):
554 def loadpredicate(ui, extname, registrarobj):
556 """Load fileset predicates from specified registrarobj
555 """Load fileset predicates from specified registrarobj
557 """
556 """
558 for name, func in registrarobj._table.iteritems():
557 for name, func in registrarobj._table.iteritems():
559 symbols[name] = func
558 symbols[name] = func
560
559
561 # tell hggettext to extract docstrings from these functions:
560 # tell hggettext to extract docstrings from these functions:
562 i18nfunctions = symbols.values()
561 i18nfunctions = symbols.values()
@@ -1,1840 +1,1840 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import stat
15 import stat
16 import subprocess
16 import subprocess
17 import sys
17 import sys
18 import tarfile
18 import tarfile
19 import xml.dom.minidom
19 import xml.dom.minidom
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 cmdutil,
23 cmdutil,
24 encoding,
24 encoding,
25 error,
25 error,
26 exchange,
26 exchange,
27 logcmdutil,
27 logcmdutil,
28 match as matchmod,
28 match as matchmod,
29 node,
29 node,
30 pathutil,
30 pathutil,
31 phases,
31 phases,
32 pycompat,
32 pycompat,
33 scmutil,
33 scmutil,
34 subrepoutil,
34 subrepoutil,
35 util,
35 util,
36 vfs as vfsmod,
36 vfs as vfsmod,
37 )
37 )
38 from .utils import (
38 from .utils import (
39 dateutil,
39 dateutil,
40 procutil,
40 procutil,
41 stringutil,
41 stringutil,
42 )
42 )
43
43
44 hg = None
44 hg = None
45 reporelpath = subrepoutil.reporelpath
45 reporelpath = subrepoutil.reporelpath
46 subrelpath = subrepoutil.subrelpath
46 subrelpath = subrepoutil.subrelpath
47 _abssource = subrepoutil._abssource
47 _abssource = subrepoutil._abssource
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49
49
50 def _expandedabspath(path):
50 def _expandedabspath(path):
51 '''
51 '''
52 get a path or url and if it is a path expand it and return an absolute path
52 get a path or url and if it is a path expand it and return an absolute path
53 '''
53 '''
54 expandedpath = util.urllocalpath(util.expandpath(path))
54 expandedpath = util.urllocalpath(util.expandpath(path))
55 u = util.url(expandedpath)
55 u = util.url(expandedpath)
56 if not u.scheme:
56 if not u.scheme:
57 path = util.normpath(os.path.abspath(u.path))
57 path = util.normpath(os.path.abspath(u.path))
58 return path
58 return path
59
59
60 def _getstorehashcachename(remotepath):
60 def _getstorehashcachename(remotepath):
61 '''get a unique filename for the store hash cache of a remote repository'''
61 '''get a unique filename for the store hash cache of a remote repository'''
62 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
62 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
63
63
64 class SubrepoAbort(error.Abort):
64 class SubrepoAbort(error.Abort):
65 """Exception class used to avoid handling a subrepo error more than once"""
65 """Exception class used to avoid handling a subrepo error more than once"""
66 def __init__(self, *args, **kw):
66 def __init__(self, *args, **kw):
67 self.subrepo = kw.pop(r'subrepo', None)
67 self.subrepo = kw.pop(r'subrepo', None)
68 self.cause = kw.pop(r'cause', None)
68 self.cause = kw.pop(r'cause', None)
69 error.Abort.__init__(self, *args, **kw)
69 error.Abort.__init__(self, *args, **kw)
70
70
71 def annotatesubrepoerror(func):
71 def annotatesubrepoerror(func):
72 def decoratedmethod(self, *args, **kargs):
72 def decoratedmethod(self, *args, **kargs):
73 try:
73 try:
74 res = func(self, *args, **kargs)
74 res = func(self, *args, **kargs)
75 except SubrepoAbort as ex:
75 except SubrepoAbort as ex:
76 # This exception has already been handled
76 # This exception has already been handled
77 raise ex
77 raise ex
78 except error.Abort as ex:
78 except error.Abort as ex:
79 subrepo = subrelpath(self)
79 subrepo = subrelpath(self)
80 errormsg = (stringutil.forcebytestr(ex) + ' '
80 errormsg = (stringutil.forcebytestr(ex) + ' '
81 + _('(in subrepository "%s")') % subrepo)
81 + _('(in subrepository "%s")') % subrepo)
82 # avoid handling this exception by raising a SubrepoAbort exception
82 # avoid handling this exception by raising a SubrepoAbort exception
83 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
83 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
84 cause=sys.exc_info())
84 cause=sys.exc_info())
85 return res
85 return res
86 return decoratedmethod
86 return decoratedmethod
87
87
88 def _updateprompt(ui, sub, dirty, local, remote):
88 def _updateprompt(ui, sub, dirty, local, remote):
89 if dirty:
89 if dirty:
90 msg = (_(' subrepository sources for %s differ\n'
90 msg = (_(' subrepository sources for %s differ\n'
91 'use (l)ocal source (%s) or (r)emote source (%s)?'
91 'use (l)ocal source (%s) or (r)emote source (%s)?'
92 '$$ &Local $$ &Remote')
92 '$$ &Local $$ &Remote')
93 % (subrelpath(sub), local, remote))
93 % (subrelpath(sub), local, remote))
94 else:
94 else:
95 msg = (_(' subrepository sources for %s differ (in checked out '
95 msg = (_(' subrepository sources for %s differ (in checked out '
96 'version)\n'
96 'version)\n'
97 'use (l)ocal source (%s) or (r)emote source (%s)?'
97 'use (l)ocal source (%s) or (r)emote source (%s)?'
98 '$$ &Local $$ &Remote')
98 '$$ &Local $$ &Remote')
99 % (subrelpath(sub), local, remote))
99 % (subrelpath(sub), local, remote))
100 return ui.promptchoice(msg, 0)
100 return ui.promptchoice(msg, 0)
101
101
102 def _sanitize(ui, vfs, ignore):
102 def _sanitize(ui, vfs, ignore):
103 for dirname, dirs, names in vfs.walk():
103 for dirname, dirs, names in vfs.walk():
104 for i, d in enumerate(dirs):
104 for i, d in enumerate(dirs):
105 if d.lower() == ignore:
105 if d.lower() == ignore:
106 del dirs[i]
106 del dirs[i]
107 break
107 break
108 if vfs.basename(dirname).lower() != '.hg':
108 if vfs.basename(dirname).lower() != '.hg':
109 continue
109 continue
110 for f in names:
110 for f in names:
111 if f.lower() == 'hgrc':
111 if f.lower() == 'hgrc':
112 ui.warn(_("warning: removing potentially hostile 'hgrc' "
112 ui.warn(_("warning: removing potentially hostile 'hgrc' "
113 "in '%s'\n") % vfs.join(dirname))
113 "in '%s'\n") % vfs.join(dirname))
114 vfs.unlink(vfs.reljoin(dirname, f))
114 vfs.unlink(vfs.reljoin(dirname, f))
115
115
116 def _auditsubrepopath(repo, path):
116 def _auditsubrepopath(repo, path):
117 # sanity check for potentially unsafe paths such as '~' and '$FOO'
117 # sanity check for potentially unsafe paths such as '~' and '$FOO'
118 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
118 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
119 raise error.Abort(_('subrepo path contains illegal component: %s')
119 raise error.Abort(_('subrepo path contains illegal component: %s')
120 % path)
120 % path)
121 # auditor doesn't check if the path itself is a symlink
121 # auditor doesn't check if the path itself is a symlink
122 pathutil.pathauditor(repo.root)(path)
122 pathutil.pathauditor(repo.root)(path)
123 if repo.wvfs.islink(path):
123 if repo.wvfs.islink(path):
124 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
124 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
125
125
126 SUBREPO_ALLOWED_DEFAULTS = {
126 SUBREPO_ALLOWED_DEFAULTS = {
127 'hg': True,
127 'hg': True,
128 'git': False,
128 'git': False,
129 'svn': False,
129 'svn': False,
130 }
130 }
131
131
132 def _checktype(ui, kind):
132 def _checktype(ui, kind):
133 # subrepos.allowed is a master kill switch. If disabled, subrepos are
133 # subrepos.allowed is a master kill switch. If disabled, subrepos are
134 # disabled period.
134 # disabled period.
135 if not ui.configbool('subrepos', 'allowed', True):
135 if not ui.configbool('subrepos', 'allowed', True):
136 raise error.Abort(_('subrepos not enabled'),
136 raise error.Abort(_('subrepos not enabled'),
137 hint=_("see 'hg help config.subrepos' for details"))
137 hint=_("see 'hg help config.subrepos' for details"))
138
138
139 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
139 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
140 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
140 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
141 raise error.Abort(_('%s subrepos not allowed') % kind,
141 raise error.Abort(_('%s subrepos not allowed') % kind,
142 hint=_("see 'hg help config.subrepos' for details"))
142 hint=_("see 'hg help config.subrepos' for details"))
143
143
144 if kind not in types:
144 if kind not in types:
145 raise error.Abort(_('unknown subrepo type %s') % kind)
145 raise error.Abort(_('unknown subrepo type %s') % kind)
146
146
147 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
147 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
148 """return instance of the right subrepo class for subrepo in path"""
148 """return instance of the right subrepo class for subrepo in path"""
149 # subrepo inherently violates our import layering rules
149 # subrepo inherently violates our import layering rules
150 # because it wants to make repo objects from deep inside the stack
150 # because it wants to make repo objects from deep inside the stack
151 # so we manually delay the circular imports to not break
151 # so we manually delay the circular imports to not break
152 # scripts that don't use our demand-loading
152 # scripts that don't use our demand-loading
153 global hg
153 global hg
154 from . import hg as h
154 from . import hg as h
155 hg = h
155 hg = h
156
156
157 repo = ctx.repo()
157 repo = ctx.repo()
158 _auditsubrepopath(repo, path)
158 _auditsubrepopath(repo, path)
159 state = ctx.substate[path]
159 state = ctx.substate[path]
160 _checktype(repo.ui, state[2])
160 _checktype(repo.ui, state[2])
161 if allowwdir:
161 if allowwdir:
162 state = (state[0], ctx.subrev(path), state[2])
162 state = (state[0], ctx.subrev(path), state[2])
163 return types[state[2]](ctx, path, state[:2], allowcreate)
163 return types[state[2]](ctx, path, state[:2], allowcreate)
164
164
165 def nullsubrepo(ctx, path, pctx):
165 def nullsubrepo(ctx, path, pctx):
166 """return an empty subrepo in pctx for the extant subrepo in ctx"""
166 """return an empty subrepo in pctx for the extant subrepo in ctx"""
167 # subrepo inherently violates our import layering rules
167 # subrepo inherently violates our import layering rules
168 # because it wants to make repo objects from deep inside the stack
168 # because it wants to make repo objects from deep inside the stack
169 # so we manually delay the circular imports to not break
169 # so we manually delay the circular imports to not break
170 # scripts that don't use our demand-loading
170 # scripts that don't use our demand-loading
171 global hg
171 global hg
172 from . import hg as h
172 from . import hg as h
173 hg = h
173 hg = h
174
174
175 repo = ctx.repo()
175 repo = ctx.repo()
176 _auditsubrepopath(repo, path)
176 _auditsubrepopath(repo, path)
177 state = ctx.substate[path]
177 state = ctx.substate[path]
178 _checktype(repo.ui, state[2])
178 _checktype(repo.ui, state[2])
179 subrev = ''
179 subrev = ''
180 if state[2] == 'hg':
180 if state[2] == 'hg':
181 subrev = "0" * 40
181 subrev = "0" * 40
182 return types[state[2]](pctx, path, (state[0], subrev), True)
182 return types[state[2]](pctx, path, (state[0], subrev), True)
183
183
184 # subrepo classes need to implement the following abstract class:
184 # subrepo classes need to implement the following abstract class:
185
185
186 class abstractsubrepo(object):
186 class abstractsubrepo(object):
187
187
188 def __init__(self, ctx, path):
188 def __init__(self, ctx, path):
189 """Initialize abstractsubrepo part
189 """Initialize abstractsubrepo part
190
190
191 ``ctx`` is the context referring this subrepository in the
191 ``ctx`` is the context referring this subrepository in the
192 parent repository.
192 parent repository.
193
193
194 ``path`` is the path to this subrepository as seen from
194 ``path`` is the path to this subrepository as seen from
195 innermost repository.
195 innermost repository.
196 """
196 """
197 self.ui = ctx.repo().ui
197 self.ui = ctx.repo().ui
198 self._ctx = ctx
198 self._ctx = ctx
199 self._path = path
199 self._path = path
200
200
201 def addwebdirpath(self, serverpath, webconf):
201 def addwebdirpath(self, serverpath, webconf):
202 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
202 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
203
203
204 ``serverpath`` is the path component of the URL for this repo.
204 ``serverpath`` is the path component of the URL for this repo.
205
205
206 ``webconf`` is the dictionary of hgwebdir entries.
206 ``webconf`` is the dictionary of hgwebdir entries.
207 """
207 """
208 pass
208 pass
209
209
210 def storeclean(self, path):
210 def storeclean(self, path):
211 """
211 """
212 returns true if the repository has not changed since it was last
212 returns true if the repository has not changed since it was last
213 cloned from or pushed to a given repository.
213 cloned from or pushed to a given repository.
214 """
214 """
215 return False
215 return False
216
216
217 def dirty(self, ignoreupdate=False, missing=False):
217 def dirty(self, ignoreupdate=False, missing=False):
218 """returns true if the dirstate of the subrepo is dirty or does not
218 """returns true if the dirstate of the subrepo is dirty or does not
219 match current stored state. If ignoreupdate is true, only check
219 match current stored state. If ignoreupdate is true, only check
220 whether the subrepo has uncommitted changes in its dirstate. If missing
220 whether the subrepo has uncommitted changes in its dirstate. If missing
221 is true, check for deleted files.
221 is true, check for deleted files.
222 """
222 """
223 raise NotImplementedError
223 raise NotImplementedError
224
224
225 def dirtyreason(self, ignoreupdate=False, missing=False):
225 def dirtyreason(self, ignoreupdate=False, missing=False):
226 """return reason string if it is ``dirty()``
226 """return reason string if it is ``dirty()``
227
227
228 Returned string should have enough information for the message
228 Returned string should have enough information for the message
229 of exception.
229 of exception.
230
230
231 This returns None, otherwise.
231 This returns None, otherwise.
232 """
232 """
233 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
233 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
234 return _('uncommitted changes in subrepository "%s"'
234 return _('uncommitted changes in subrepository "%s"'
235 ) % subrelpath(self)
235 ) % subrelpath(self)
236
236
237 def bailifchanged(self, ignoreupdate=False, hint=None):
237 def bailifchanged(self, ignoreupdate=False, hint=None):
238 """raise Abort if subrepository is ``dirty()``
238 """raise Abort if subrepository is ``dirty()``
239 """
239 """
240 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
240 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
241 missing=True)
241 missing=True)
242 if dirtyreason:
242 if dirtyreason:
243 raise error.Abort(dirtyreason, hint=hint)
243 raise error.Abort(dirtyreason, hint=hint)
244
244
245 def basestate(self):
245 def basestate(self):
246 """current working directory base state, disregarding .hgsubstate
246 """current working directory base state, disregarding .hgsubstate
247 state and working directory modifications"""
247 state and working directory modifications"""
248 raise NotImplementedError
248 raise NotImplementedError
249
249
250 def checknested(self, path):
250 def checknested(self, path):
251 """check if path is a subrepository within this repository"""
251 """check if path is a subrepository within this repository"""
252 return False
252 return False
253
253
254 def commit(self, text, user, date):
254 def commit(self, text, user, date):
255 """commit the current changes to the subrepo with the given
255 """commit the current changes to the subrepo with the given
256 log message. Use given user and date if possible. Return the
256 log message. Use given user and date if possible. Return the
257 new state of the subrepo.
257 new state of the subrepo.
258 """
258 """
259 raise NotImplementedError
259 raise NotImplementedError
260
260
261 def phase(self, state):
261 def phase(self, state):
262 """returns phase of specified state in the subrepository.
262 """returns phase of specified state in the subrepository.
263 """
263 """
264 return phases.public
264 return phases.public
265
265
266 def remove(self):
266 def remove(self):
267 """remove the subrepo
267 """remove the subrepo
268
268
269 (should verify the dirstate is not dirty first)
269 (should verify the dirstate is not dirty first)
270 """
270 """
271 raise NotImplementedError
271 raise NotImplementedError
272
272
273 def get(self, state, overwrite=False):
273 def get(self, state, overwrite=False):
274 """run whatever commands are needed to put the subrepo into
274 """run whatever commands are needed to put the subrepo into
275 this state
275 this state
276 """
276 """
277 raise NotImplementedError
277 raise NotImplementedError
278
278
279 def merge(self, state):
279 def merge(self, state):
280 """merge currently-saved state with the new state."""
280 """merge currently-saved state with the new state."""
281 raise NotImplementedError
281 raise NotImplementedError
282
282
283 def push(self, opts):
283 def push(self, opts):
284 """perform whatever action is analogous to 'hg push'
284 """perform whatever action is analogous to 'hg push'
285
285
286 This may be a no-op on some systems.
286 This may be a no-op on some systems.
287 """
287 """
288 raise NotImplementedError
288 raise NotImplementedError
289
289
290 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
290 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
291 return []
291 return []
292
292
293 def addremove(self, matcher, prefix, uipathfn, opts):
293 def addremove(self, matcher, prefix, uipathfn, opts):
294 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
294 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
295 return 1
295 return 1
296
296
297 def cat(self, match, fm, fntemplate, prefix, **opts):
297 def cat(self, match, fm, fntemplate, prefix, **opts):
298 return 1
298 return 1
299
299
300 def status(self, rev2, **opts):
300 def status(self, rev2, **opts):
301 return scmutil.status([], [], [], [], [], [], [])
301 return scmutil.status([], [], [], [], [], [], [])
302
302
303 def diff(self, ui, diffopts, node2, match, prefix, **opts):
303 def diff(self, ui, diffopts, node2, match, prefix, **opts):
304 pass
304 pass
305
305
306 def outgoing(self, ui, dest, opts):
306 def outgoing(self, ui, dest, opts):
307 return 1
307 return 1
308
308
309 def incoming(self, ui, source, opts):
309 def incoming(self, ui, source, opts):
310 return 1
310 return 1
311
311
312 def files(self):
312 def files(self):
313 """return filename iterator"""
313 """return filename iterator"""
314 raise NotImplementedError
314 raise NotImplementedError
315
315
316 def filedata(self, name, decode):
316 def filedata(self, name, decode):
317 """return file data, optionally passed through repo decoders"""
317 """return file data, optionally passed through repo decoders"""
318 raise NotImplementedError
318 raise NotImplementedError
319
319
320 def fileflags(self, name):
320 def fileflags(self, name):
321 """return file flags"""
321 """return file flags"""
322 return ''
322 return ''
323
323
324 def matchfileset(self, expr, badfn=None):
324 def matchfileset(self, expr, badfn=None):
325 """Resolve the fileset expression for this repo"""
325 """Resolve the fileset expression for this repo"""
326 return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn)
326 return matchmod.never(self.wvfs.base, '', badfn=badfn)
327
327
328 def printfiles(self, ui, m, fm, fmt, subrepos):
328 def printfiles(self, ui, m, fm, fmt, subrepos):
329 """handle the files command for this subrepo"""
329 """handle the files command for this subrepo"""
330 return 1
330 return 1
331
331
332 def archive(self, archiver, prefix, match=None, decode=True):
332 def archive(self, archiver, prefix, match=None, decode=True):
333 if match is not None:
333 if match is not None:
334 files = [f for f in self.files() if match(f)]
334 files = [f for f in self.files() if match(f)]
335 else:
335 else:
336 files = self.files()
336 files = self.files()
337 total = len(files)
337 total = len(files)
338 relpath = subrelpath(self)
338 relpath = subrelpath(self)
339 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
339 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
340 unit=_('files'), total=total)
340 unit=_('files'), total=total)
341 progress.update(0)
341 progress.update(0)
342 for name in files:
342 for name in files:
343 flags = self.fileflags(name)
343 flags = self.fileflags(name)
344 mode = 'x' in flags and 0o755 or 0o644
344 mode = 'x' in flags and 0o755 or 0o644
345 symlink = 'l' in flags
345 symlink = 'l' in flags
346 archiver.addfile(prefix + name, mode, symlink,
346 archiver.addfile(prefix + name, mode, symlink,
347 self.filedata(name, decode))
347 self.filedata(name, decode))
348 progress.increment()
348 progress.increment()
349 progress.complete()
349 progress.complete()
350 return total
350 return total
351
351
352 def walk(self, match):
352 def walk(self, match):
353 '''
353 '''
354 walk recursively through the directory tree, finding all files
354 walk recursively through the directory tree, finding all files
355 matched by the match function
355 matched by the match function
356 '''
356 '''
357
357
358 def forget(self, match, prefix, uipathfn, dryrun, interactive):
358 def forget(self, match, prefix, uipathfn, dryrun, interactive):
359 return ([], [])
359 return ([], [])
360
360
361 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
361 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
362 dryrun, warnings):
362 dryrun, warnings):
363 """remove the matched files from the subrepository and the filesystem,
363 """remove the matched files from the subrepository and the filesystem,
364 possibly by force and/or after the file has been removed from the
364 possibly by force and/or after the file has been removed from the
365 filesystem. Return 0 on success, 1 on any warning.
365 filesystem. Return 0 on success, 1 on any warning.
366 """
366 """
367 warnings.append(_("warning: removefiles not implemented (%s)")
367 warnings.append(_("warning: removefiles not implemented (%s)")
368 % self._path)
368 % self._path)
369 return 1
369 return 1
370
370
371 def revert(self, substate, *pats, **opts):
371 def revert(self, substate, *pats, **opts):
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
373 % (substate[0], substate[2]))
373 % (substate[0], substate[2]))
374 return []
374 return []
375
375
376 def shortid(self, revid):
376 def shortid(self, revid):
377 return revid
377 return revid
378
378
379 def unshare(self):
379 def unshare(self):
380 '''
380 '''
381 convert this repository from shared to normal storage.
381 convert this repository from shared to normal storage.
382 '''
382 '''
383
383
384 def verify(self):
384 def verify(self):
385 '''verify the integrity of the repository. Return 0 on success or
385 '''verify the integrity of the repository. Return 0 on success or
386 warning, 1 on any error.
386 warning, 1 on any error.
387 '''
387 '''
388 return 0
388 return 0
389
389
390 @propertycache
390 @propertycache
391 def wvfs(self):
391 def wvfs(self):
392 """return vfs to access the working directory of this subrepository
392 """return vfs to access the working directory of this subrepository
393 """
393 """
394 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
394 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
395
395
396 @propertycache
396 @propertycache
397 def _relpath(self):
397 def _relpath(self):
398 """return path to this subrepository as seen from outermost repository
398 """return path to this subrepository as seen from outermost repository
399 """
399 """
400 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
400 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
401
401
402 class hgsubrepo(abstractsubrepo):
402 class hgsubrepo(abstractsubrepo):
403 def __init__(self, ctx, path, state, allowcreate):
403 def __init__(self, ctx, path, state, allowcreate):
404 super(hgsubrepo, self).__init__(ctx, path)
404 super(hgsubrepo, self).__init__(ctx, path)
405 self._state = state
405 self._state = state
406 r = ctx.repo()
406 r = ctx.repo()
407 root = r.wjoin(path)
407 root = r.wjoin(path)
408 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
408 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
409 # repository constructor does expand variables in path, which is
409 # repository constructor does expand variables in path, which is
410 # unsafe since subrepo path might come from untrusted source.
410 # unsafe since subrepo path might come from untrusted source.
411 if os.path.realpath(util.expandpath(root)) != root:
411 if os.path.realpath(util.expandpath(root)) != root:
412 raise error.Abort(_('subrepo path contains illegal component: %s')
412 raise error.Abort(_('subrepo path contains illegal component: %s')
413 % path)
413 % path)
414 self._repo = hg.repository(r.baseui, root, create=create)
414 self._repo = hg.repository(r.baseui, root, create=create)
415 if self._repo.root != root:
415 if self._repo.root != root:
416 raise error.ProgrammingError('failed to reject unsafe subrepo '
416 raise error.ProgrammingError('failed to reject unsafe subrepo '
417 'path: %s (expanded to %s)'
417 'path: %s (expanded to %s)'
418 % (root, self._repo.root))
418 % (root, self._repo.root))
419
419
420 # Propagate the parent's --hidden option
420 # Propagate the parent's --hidden option
421 if r is r.unfiltered():
421 if r is r.unfiltered():
422 self._repo = self._repo.unfiltered()
422 self._repo = self._repo.unfiltered()
423
423
424 self.ui = self._repo.ui
424 self.ui = self._repo.ui
425 for s, k in [('ui', 'commitsubrepos')]:
425 for s, k in [('ui', 'commitsubrepos')]:
426 v = r.ui.config(s, k)
426 v = r.ui.config(s, k)
427 if v:
427 if v:
428 self.ui.setconfig(s, k, v, 'subrepo')
428 self.ui.setconfig(s, k, v, 'subrepo')
429 # internal config: ui._usedassubrepo
429 # internal config: ui._usedassubrepo
430 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
430 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
431 self._initrepo(r, state[0], create)
431 self._initrepo(r, state[0], create)
432
432
433 @annotatesubrepoerror
433 @annotatesubrepoerror
434 def addwebdirpath(self, serverpath, webconf):
434 def addwebdirpath(self, serverpath, webconf):
435 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
435 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
436
436
437 def storeclean(self, path):
437 def storeclean(self, path):
438 with self._repo.lock():
438 with self._repo.lock():
439 return self._storeclean(path)
439 return self._storeclean(path)
440
440
441 def _storeclean(self, path):
441 def _storeclean(self, path):
442 clean = True
442 clean = True
443 itercache = self._calcstorehash(path)
443 itercache = self._calcstorehash(path)
444 for filehash in self._readstorehashcache(path):
444 for filehash in self._readstorehashcache(path):
445 if filehash != next(itercache, None):
445 if filehash != next(itercache, None):
446 clean = False
446 clean = False
447 break
447 break
448 if clean:
448 if clean:
449 # if not empty:
449 # if not empty:
450 # the cached and current pull states have a different size
450 # the cached and current pull states have a different size
451 clean = next(itercache, None) is None
451 clean = next(itercache, None) is None
452 return clean
452 return clean
453
453
454 def _calcstorehash(self, remotepath):
454 def _calcstorehash(self, remotepath):
455 '''calculate a unique "store hash"
455 '''calculate a unique "store hash"
456
456
457 This method is used to to detect when there are changes that may
457 This method is used to to detect when there are changes that may
458 require a push to a given remote path.'''
458 require a push to a given remote path.'''
459 # sort the files that will be hashed in increasing (likely) file size
459 # sort the files that will be hashed in increasing (likely) file size
460 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
460 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
461 yield '# %s\n' % _expandedabspath(remotepath)
461 yield '# %s\n' % _expandedabspath(remotepath)
462 vfs = self._repo.vfs
462 vfs = self._repo.vfs
463 for relname in filelist:
463 for relname in filelist:
464 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
464 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
465 yield '%s = %s\n' % (relname, filehash)
465 yield '%s = %s\n' % (relname, filehash)
466
466
467 @propertycache
467 @propertycache
468 def _cachestorehashvfs(self):
468 def _cachestorehashvfs(self):
469 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
469 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
470
470
471 def _readstorehashcache(self, remotepath):
471 def _readstorehashcache(self, remotepath):
472 '''read the store hash cache for a given remote repository'''
472 '''read the store hash cache for a given remote repository'''
473 cachefile = _getstorehashcachename(remotepath)
473 cachefile = _getstorehashcachename(remotepath)
474 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
474 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
475
475
476 def _cachestorehash(self, remotepath):
476 def _cachestorehash(self, remotepath):
477 '''cache the current store hash
477 '''cache the current store hash
478
478
479 Each remote repo requires its own store hash cache, because a subrepo
479 Each remote repo requires its own store hash cache, because a subrepo
480 store may be "clean" versus a given remote repo, but not versus another
480 store may be "clean" versus a given remote repo, but not versus another
481 '''
481 '''
482 cachefile = _getstorehashcachename(remotepath)
482 cachefile = _getstorehashcachename(remotepath)
483 with self._repo.lock():
483 with self._repo.lock():
484 storehash = list(self._calcstorehash(remotepath))
484 storehash = list(self._calcstorehash(remotepath))
485 vfs = self._cachestorehashvfs
485 vfs = self._cachestorehashvfs
486 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
486 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
487
487
488 def _getctx(self):
488 def _getctx(self):
489 '''fetch the context for this subrepo revision, possibly a workingctx
489 '''fetch the context for this subrepo revision, possibly a workingctx
490 '''
490 '''
491 if self._ctx.rev() is None:
491 if self._ctx.rev() is None:
492 return self._repo[None] # workingctx if parent is workingctx
492 return self._repo[None] # workingctx if parent is workingctx
493 else:
493 else:
494 rev = self._state[1]
494 rev = self._state[1]
495 return self._repo[rev]
495 return self._repo[rev]
496
496
497 @annotatesubrepoerror
497 @annotatesubrepoerror
498 def _initrepo(self, parentrepo, source, create):
498 def _initrepo(self, parentrepo, source, create):
499 self._repo._subparent = parentrepo
499 self._repo._subparent = parentrepo
500 self._repo._subsource = source
500 self._repo._subsource = source
501
501
502 if create:
502 if create:
503 lines = ['[paths]\n']
503 lines = ['[paths]\n']
504
504
505 def addpathconfig(key, value):
505 def addpathconfig(key, value):
506 if value:
506 if value:
507 lines.append('%s = %s\n' % (key, value))
507 lines.append('%s = %s\n' % (key, value))
508 self.ui.setconfig('paths', key, value, 'subrepo')
508 self.ui.setconfig('paths', key, value, 'subrepo')
509
509
510 defpath = _abssource(self._repo, abort=False)
510 defpath = _abssource(self._repo, abort=False)
511 defpushpath = _abssource(self._repo, True, abort=False)
511 defpushpath = _abssource(self._repo, True, abort=False)
512 addpathconfig('default', defpath)
512 addpathconfig('default', defpath)
513 if defpath != defpushpath:
513 if defpath != defpushpath:
514 addpathconfig('default-push', defpushpath)
514 addpathconfig('default-push', defpushpath)
515
515
516 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
516 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
517
517
518 @annotatesubrepoerror
518 @annotatesubrepoerror
519 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
519 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
520 return cmdutil.add(ui, self._repo, match, prefix, uipathfn,
520 return cmdutil.add(ui, self._repo, match, prefix, uipathfn,
521 explicitonly, **opts)
521 explicitonly, **opts)
522
522
523 @annotatesubrepoerror
523 @annotatesubrepoerror
524 def addremove(self, m, prefix, uipathfn, opts):
524 def addremove(self, m, prefix, uipathfn, opts):
525 # In the same way as sub directories are processed, once in a subrepo,
525 # In the same way as sub directories are processed, once in a subrepo,
526 # always entry any of its subrepos. Don't corrupt the options that will
526 # always entry any of its subrepos. Don't corrupt the options that will
527 # be used to process sibling subrepos however.
527 # be used to process sibling subrepos however.
528 opts = copy.copy(opts)
528 opts = copy.copy(opts)
529 opts['subrepos'] = True
529 opts['subrepos'] = True
530 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
530 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
531
531
532 @annotatesubrepoerror
532 @annotatesubrepoerror
533 def cat(self, match, fm, fntemplate, prefix, **opts):
533 def cat(self, match, fm, fntemplate, prefix, **opts):
534 rev = self._state[1]
534 rev = self._state[1]
535 ctx = self._repo[rev]
535 ctx = self._repo[rev]
536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
537 prefix, **opts)
537 prefix, **opts)
538
538
539 @annotatesubrepoerror
539 @annotatesubrepoerror
540 def status(self, rev2, **opts):
540 def status(self, rev2, **opts):
541 try:
541 try:
542 rev1 = self._state[1]
542 rev1 = self._state[1]
543 ctx1 = self._repo[rev1]
543 ctx1 = self._repo[rev1]
544 ctx2 = self._repo[rev2]
544 ctx2 = self._repo[rev2]
545 return self._repo.status(ctx1, ctx2, **opts)
545 return self._repo.status(ctx1, ctx2, **opts)
546 except error.RepoLookupError as inst:
546 except error.RepoLookupError as inst:
547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
548 % (inst, subrelpath(self)))
548 % (inst, subrelpath(self)))
549 return scmutil.status([], [], [], [], [], [], [])
549 return scmutil.status([], [], [], [], [], [], [])
550
550
551 @annotatesubrepoerror
551 @annotatesubrepoerror
552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
553 try:
553 try:
554 node1 = node.bin(self._state[1])
554 node1 = node.bin(self._state[1])
555 # We currently expect node2 to come from substate and be
555 # We currently expect node2 to come from substate and be
556 # in hex format
556 # in hex format
557 if node2 is not None:
557 if node2 is not None:
558 node2 = node.bin(node2)
558 node2 = node.bin(node2)
559 logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2,
559 logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2,
560 match, prefix=prefix, listsubrepos=True,
560 match, prefix=prefix, listsubrepos=True,
561 **opts)
561 **opts)
562 except error.RepoLookupError as inst:
562 except error.RepoLookupError as inst:
563 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
563 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
564 % (inst, subrelpath(self)))
564 % (inst, subrelpath(self)))
565
565
566 @annotatesubrepoerror
566 @annotatesubrepoerror
567 def archive(self, archiver, prefix, match=None, decode=True):
567 def archive(self, archiver, prefix, match=None, decode=True):
568 self._get(self._state + ('hg',))
568 self._get(self._state + ('hg',))
569 files = self.files()
569 files = self.files()
570 if match:
570 if match:
571 files = [f for f in files if match(f)]
571 files = [f for f in files if match(f)]
572 rev = self._state[1]
572 rev = self._state[1]
573 ctx = self._repo[rev]
573 ctx = self._repo[rev]
574 scmutil.prefetchfiles(self._repo, [ctx.rev()],
574 scmutil.prefetchfiles(self._repo, [ctx.rev()],
575 scmutil.matchfiles(self._repo, files))
575 scmutil.matchfiles(self._repo, files))
576 total = abstractsubrepo.archive(self, archiver, prefix, match)
576 total = abstractsubrepo.archive(self, archiver, prefix, match)
577 for subpath in ctx.substate:
577 for subpath in ctx.substate:
578 s = subrepo(ctx, subpath, True)
578 s = subrepo(ctx, subpath, True)
579 submatch = matchmod.subdirmatcher(subpath, match)
579 submatch = matchmod.subdirmatcher(subpath, match)
580 subprefix = prefix + subpath + '/'
580 subprefix = prefix + subpath + '/'
581 total += s.archive(archiver, subprefix, submatch,
581 total += s.archive(archiver, subprefix, submatch,
582 decode)
582 decode)
583 return total
583 return total
584
584
585 @annotatesubrepoerror
585 @annotatesubrepoerror
586 def dirty(self, ignoreupdate=False, missing=False):
586 def dirty(self, ignoreupdate=False, missing=False):
587 r = self._state[1]
587 r = self._state[1]
588 if r == '' and not ignoreupdate: # no state recorded
588 if r == '' and not ignoreupdate: # no state recorded
589 return True
589 return True
590 w = self._repo[None]
590 w = self._repo[None]
591 if r != w.p1().hex() and not ignoreupdate:
591 if r != w.p1().hex() and not ignoreupdate:
592 # different version checked out
592 # different version checked out
593 return True
593 return True
594 return w.dirty(missing=missing) # working directory changed
594 return w.dirty(missing=missing) # working directory changed
595
595
596 def basestate(self):
596 def basestate(self):
597 return self._repo['.'].hex()
597 return self._repo['.'].hex()
598
598
599 def checknested(self, path):
599 def checknested(self, path):
600 return self._repo._checknested(self._repo.wjoin(path))
600 return self._repo._checknested(self._repo.wjoin(path))
601
601
602 @annotatesubrepoerror
602 @annotatesubrepoerror
603 def commit(self, text, user, date):
603 def commit(self, text, user, date):
604 # don't bother committing in the subrepo if it's only been
604 # don't bother committing in the subrepo if it's only been
605 # updated
605 # updated
606 if not self.dirty(True):
606 if not self.dirty(True):
607 return self._repo['.'].hex()
607 return self._repo['.'].hex()
608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
609 n = self._repo.commit(text, user, date)
609 n = self._repo.commit(text, user, date)
610 if not n:
610 if not n:
611 return self._repo['.'].hex() # different version checked out
611 return self._repo['.'].hex() # different version checked out
612 return node.hex(n)
612 return node.hex(n)
613
613
614 @annotatesubrepoerror
614 @annotatesubrepoerror
615 def phase(self, state):
615 def phase(self, state):
616 return self._repo[state or '.'].phase()
616 return self._repo[state or '.'].phase()
617
617
618 @annotatesubrepoerror
618 @annotatesubrepoerror
619 def remove(self):
619 def remove(self):
620 # we can't fully delete the repository as it may contain
620 # we can't fully delete the repository as it may contain
621 # local-only history
621 # local-only history
622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
623 hg.clean(self._repo, node.nullid, False)
623 hg.clean(self._repo, node.nullid, False)
624
624
625 def _get(self, state):
625 def _get(self, state):
626 source, revision, kind = state
626 source, revision, kind = state
627 parentrepo = self._repo._subparent
627 parentrepo = self._repo._subparent
628
628
629 if revision in self._repo.unfiltered():
629 if revision in self._repo.unfiltered():
630 # Allow shared subrepos tracked at null to setup the sharedpath
630 # Allow shared subrepos tracked at null to setup the sharedpath
631 if len(self._repo) != 0 or not parentrepo.shared():
631 if len(self._repo) != 0 or not parentrepo.shared():
632 return True
632 return True
633 self._repo._subsource = source
633 self._repo._subsource = source
634 srcurl = _abssource(self._repo)
634 srcurl = _abssource(self._repo)
635
635
636 # Defer creating the peer until after the status message is logged, in
636 # Defer creating the peer until after the status message is logged, in
637 # case there are network problems.
637 # case there are network problems.
638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
639
639
640 if len(self._repo) == 0:
640 if len(self._repo) == 0:
641 # use self._repo.vfs instead of self.wvfs to remove .hg only
641 # use self._repo.vfs instead of self.wvfs to remove .hg only
642 self._repo.vfs.rmtree()
642 self._repo.vfs.rmtree()
643
643
644 # A remote subrepo could be shared if there is a local copy
644 # A remote subrepo could be shared if there is a local copy
645 # relative to the parent's share source. But clone pooling doesn't
645 # relative to the parent's share source. But clone pooling doesn't
646 # assemble the repos in a tree, so that can't be consistently done.
646 # assemble the repos in a tree, so that can't be consistently done.
647 # A simpler option is for the user to configure clone pooling, and
647 # A simpler option is for the user to configure clone pooling, and
648 # work with that.
648 # work with that.
649 if parentrepo.shared() and hg.islocal(srcurl):
649 if parentrepo.shared() and hg.islocal(srcurl):
650 self.ui.status(_('sharing subrepo %s from %s\n')
650 self.ui.status(_('sharing subrepo %s from %s\n')
651 % (subrelpath(self), srcurl))
651 % (subrelpath(self), srcurl))
652 shared = hg.share(self._repo._subparent.baseui,
652 shared = hg.share(self._repo._subparent.baseui,
653 getpeer(), self._repo.root,
653 getpeer(), self._repo.root,
654 update=False, bookmarks=False)
654 update=False, bookmarks=False)
655 self._repo = shared.local()
655 self._repo = shared.local()
656 else:
656 else:
657 # TODO: find a common place for this and this code in the
657 # TODO: find a common place for this and this code in the
658 # share.py wrap of the clone command.
658 # share.py wrap of the clone command.
659 if parentrepo.shared():
659 if parentrepo.shared():
660 pool = self.ui.config('share', 'pool')
660 pool = self.ui.config('share', 'pool')
661 if pool:
661 if pool:
662 pool = util.expandpath(pool)
662 pool = util.expandpath(pool)
663
663
664 shareopts = {
664 shareopts = {
665 'pool': pool,
665 'pool': pool,
666 'mode': self.ui.config('share', 'poolnaming'),
666 'mode': self.ui.config('share', 'poolnaming'),
667 }
667 }
668 else:
668 else:
669 shareopts = {}
669 shareopts = {}
670
670
671 self.ui.status(_('cloning subrepo %s from %s\n')
671 self.ui.status(_('cloning subrepo %s from %s\n')
672 % (subrelpath(self), util.hidepassword(srcurl)))
672 % (subrelpath(self), util.hidepassword(srcurl)))
673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
674 getpeer(), self._repo.root,
674 getpeer(), self._repo.root,
675 update=False, shareopts=shareopts)
675 update=False, shareopts=shareopts)
676 self._repo = cloned.local()
676 self._repo = cloned.local()
677 self._initrepo(parentrepo, source, create=True)
677 self._initrepo(parentrepo, source, create=True)
678 self._cachestorehash(srcurl)
678 self._cachestorehash(srcurl)
679 else:
679 else:
680 self.ui.status(_('pulling subrepo %s from %s\n')
680 self.ui.status(_('pulling subrepo %s from %s\n')
681 % (subrelpath(self), util.hidepassword(srcurl)))
681 % (subrelpath(self), util.hidepassword(srcurl)))
682 cleansub = self.storeclean(srcurl)
682 cleansub = self.storeclean(srcurl)
683 exchange.pull(self._repo, getpeer())
683 exchange.pull(self._repo, getpeer())
684 if cleansub:
684 if cleansub:
685 # keep the repo clean after pull
685 # keep the repo clean after pull
686 self._cachestorehash(srcurl)
686 self._cachestorehash(srcurl)
687 return False
687 return False
688
688
689 @annotatesubrepoerror
689 @annotatesubrepoerror
690 def get(self, state, overwrite=False):
690 def get(self, state, overwrite=False):
691 inrepo = self._get(state)
691 inrepo = self._get(state)
692 source, revision, kind = state
692 source, revision, kind = state
693 repo = self._repo
693 repo = self._repo
694 repo.ui.debug("getting subrepo %s\n" % self._path)
694 repo.ui.debug("getting subrepo %s\n" % self._path)
695 if inrepo:
695 if inrepo:
696 urepo = repo.unfiltered()
696 urepo = repo.unfiltered()
697 ctx = urepo[revision]
697 ctx = urepo[revision]
698 if ctx.hidden():
698 if ctx.hidden():
699 urepo.ui.warn(
699 urepo.ui.warn(
700 _('revision %s in subrepository "%s" is hidden\n') \
700 _('revision %s in subrepository "%s" is hidden\n') \
701 % (revision[0:12], self._path))
701 % (revision[0:12], self._path))
702 repo = urepo
702 repo = urepo
703 hg.updaterepo(repo, revision, overwrite)
703 hg.updaterepo(repo, revision, overwrite)
704
704
705 @annotatesubrepoerror
705 @annotatesubrepoerror
706 def merge(self, state):
706 def merge(self, state):
707 self._get(state)
707 self._get(state)
708 cur = self._repo['.']
708 cur = self._repo['.']
709 dst = self._repo[state[1]]
709 dst = self._repo[state[1]]
710 anc = dst.ancestor(cur)
710 anc = dst.ancestor(cur)
711
711
712 def mergefunc():
712 def mergefunc():
713 if anc == cur and dst.branch() == cur.branch():
713 if anc == cur and dst.branch() == cur.branch():
714 self.ui.debug('updating subrepository "%s"\n'
714 self.ui.debug('updating subrepository "%s"\n'
715 % subrelpath(self))
715 % subrelpath(self))
716 hg.update(self._repo, state[1])
716 hg.update(self._repo, state[1])
717 elif anc == dst:
717 elif anc == dst:
718 self.ui.debug('skipping subrepository "%s"\n'
718 self.ui.debug('skipping subrepository "%s"\n'
719 % subrelpath(self))
719 % subrelpath(self))
720 else:
720 else:
721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
722 hg.merge(self._repo, state[1], remind=False)
722 hg.merge(self._repo, state[1], remind=False)
723
723
724 wctx = self._repo[None]
724 wctx = self._repo[None]
725 if self.dirty():
725 if self.dirty():
726 if anc != dst:
726 if anc != dst:
727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
728 mergefunc()
728 mergefunc()
729 else:
729 else:
730 mergefunc()
730 mergefunc()
731 else:
731 else:
732 mergefunc()
732 mergefunc()
733
733
734 @annotatesubrepoerror
734 @annotatesubrepoerror
735 def push(self, opts):
735 def push(self, opts):
736 force = opts.get('force')
736 force = opts.get('force')
737 newbranch = opts.get('new_branch')
737 newbranch = opts.get('new_branch')
738 ssh = opts.get('ssh')
738 ssh = opts.get('ssh')
739
739
740 # push subrepos depth-first for coherent ordering
740 # push subrepos depth-first for coherent ordering
741 c = self._repo['.']
741 c = self._repo['.']
742 subs = c.substate # only repos that are committed
742 subs = c.substate # only repos that are committed
743 for s in sorted(subs):
743 for s in sorted(subs):
744 if c.sub(s).push(opts) == 0:
744 if c.sub(s).push(opts) == 0:
745 return False
745 return False
746
746
747 dsturl = _abssource(self._repo, True)
747 dsturl = _abssource(self._repo, True)
748 if not force:
748 if not force:
749 if self.storeclean(dsturl):
749 if self.storeclean(dsturl):
750 self.ui.status(
750 self.ui.status(
751 _('no changes made to subrepo %s since last push to %s\n')
751 _('no changes made to subrepo %s since last push to %s\n')
752 % (subrelpath(self), util.hidepassword(dsturl)))
752 % (subrelpath(self), util.hidepassword(dsturl)))
753 return None
753 return None
754 self.ui.status(_('pushing subrepo %s to %s\n') %
754 self.ui.status(_('pushing subrepo %s to %s\n') %
755 (subrelpath(self), util.hidepassword(dsturl)))
755 (subrelpath(self), util.hidepassword(dsturl)))
756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
758
758
759 # the repo is now clean
759 # the repo is now clean
760 self._cachestorehash(dsturl)
760 self._cachestorehash(dsturl)
761 return res.cgresult
761 return res.cgresult
762
762
763 @annotatesubrepoerror
763 @annotatesubrepoerror
764 def outgoing(self, ui, dest, opts):
764 def outgoing(self, ui, dest, opts):
765 if 'rev' in opts or 'branch' in opts:
765 if 'rev' in opts or 'branch' in opts:
766 opts = copy.copy(opts)
766 opts = copy.copy(opts)
767 opts.pop('rev', None)
767 opts.pop('rev', None)
768 opts.pop('branch', None)
768 opts.pop('branch', None)
769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def incoming(self, ui, source, opts):
772 def incoming(self, ui, source, opts):
773 if 'rev' in opts or 'branch' in opts:
773 if 'rev' in opts or 'branch' in opts:
774 opts = copy.copy(opts)
774 opts = copy.copy(opts)
775 opts.pop('rev', None)
775 opts.pop('rev', None)
776 opts.pop('branch', None)
776 opts.pop('branch', None)
777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
778
778
779 @annotatesubrepoerror
779 @annotatesubrepoerror
780 def files(self):
780 def files(self):
781 rev = self._state[1]
781 rev = self._state[1]
782 ctx = self._repo[rev]
782 ctx = self._repo[rev]
783 return ctx.manifest().keys()
783 return ctx.manifest().keys()
784
784
785 def filedata(self, name, decode):
785 def filedata(self, name, decode):
786 rev = self._state[1]
786 rev = self._state[1]
787 data = self._repo[rev][name].data()
787 data = self._repo[rev][name].data()
788 if decode:
788 if decode:
789 data = self._repo.wwritedata(name, data)
789 data = self._repo.wwritedata(name, data)
790 return data
790 return data
791
791
792 def fileflags(self, name):
792 def fileflags(self, name):
793 rev = self._state[1]
793 rev = self._state[1]
794 ctx = self._repo[rev]
794 ctx = self._repo[rev]
795 return ctx.flags(name)
795 return ctx.flags(name)
796
796
797 @annotatesubrepoerror
797 @annotatesubrepoerror
798 def printfiles(self, ui, m, fm, fmt, subrepos):
798 def printfiles(self, ui, m, fm, fmt, subrepos):
799 # If the parent context is a workingctx, use the workingctx here for
799 # If the parent context is a workingctx, use the workingctx here for
800 # consistency.
800 # consistency.
801 if self._ctx.rev() is None:
801 if self._ctx.rev() is None:
802 ctx = self._repo[None]
802 ctx = self._repo[None]
803 else:
803 else:
804 rev = self._state[1]
804 rev = self._state[1]
805 ctx = self._repo[rev]
805 ctx = self._repo[rev]
806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
807
807
808 @annotatesubrepoerror
808 @annotatesubrepoerror
809 def matchfileset(self, expr, badfn=None):
809 def matchfileset(self, expr, badfn=None):
810 repo = self._repo
810 repo = self._repo
811 if self._ctx.rev() is None:
811 if self._ctx.rev() is None:
812 ctx = repo[None]
812 ctx = repo[None]
813 else:
813 else:
814 rev = self._state[1]
814 rev = self._state[1]
815 ctx = repo[rev]
815 ctx = repo[rev]
816
816
817 matchers = [ctx.matchfileset(expr, badfn=badfn)]
817 matchers = [ctx.matchfileset(expr, badfn=badfn)]
818
818
819 for subpath in ctx.substate:
819 for subpath in ctx.substate:
820 sub = ctx.sub(subpath)
820 sub = ctx.sub(subpath)
821
821
822 try:
822 try:
823 sm = sub.matchfileset(expr, badfn=badfn)
823 sm = sub.matchfileset(expr, badfn=badfn)
824 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
824 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
825 subpath, sm, badfn=badfn)
825 subpath, sm, badfn=badfn)
826 matchers.append(pm)
826 matchers.append(pm)
827 except error.LookupError:
827 except error.LookupError:
828 self.ui.status(_("skipping missing subrepository: %s\n")
828 self.ui.status(_("skipping missing subrepository: %s\n")
829 % self.wvfs.reljoin(reporelpath(self), subpath))
829 % self.wvfs.reljoin(reporelpath(self), subpath))
830 if len(matchers) == 1:
830 if len(matchers) == 1:
831 return matchers[0]
831 return matchers[0]
832 return matchmod.unionmatcher(matchers)
832 return matchmod.unionmatcher(matchers)
833
833
834 def walk(self, match):
834 def walk(self, match):
835 ctx = self._repo[None]
835 ctx = self._repo[None]
836 return ctx.walk(match)
836 return ctx.walk(match)
837
837
838 @annotatesubrepoerror
838 @annotatesubrepoerror
839 def forget(self, match, prefix, uipathfn, dryrun, interactive):
839 def forget(self, match, prefix, uipathfn, dryrun, interactive):
840 return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn,
840 return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn,
841 True, dryrun=dryrun, interactive=interactive)
841 True, dryrun=dryrun, interactive=interactive)
842
842
843 @annotatesubrepoerror
843 @annotatesubrepoerror
844 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
844 def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
845 dryrun, warnings):
845 dryrun, warnings):
846 return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn,
846 return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn,
847 after, force, subrepos, dryrun)
847 after, force, subrepos, dryrun)
848
848
849 @annotatesubrepoerror
849 @annotatesubrepoerror
850 def revert(self, substate, *pats, **opts):
850 def revert(self, substate, *pats, **opts):
851 # reverting a subrepo is a 2 step process:
851 # reverting a subrepo is a 2 step process:
852 # 1. if the no_backup is not set, revert all modified
852 # 1. if the no_backup is not set, revert all modified
853 # files inside the subrepo
853 # files inside the subrepo
854 # 2. update the subrepo to the revision specified in
854 # 2. update the subrepo to the revision specified in
855 # the corresponding substate dictionary
855 # the corresponding substate dictionary
856 self.ui.status(_('reverting subrepo %s\n') % substate[0])
856 self.ui.status(_('reverting subrepo %s\n') % substate[0])
857 if not opts.get(r'no_backup'):
857 if not opts.get(r'no_backup'):
858 # Revert all files on the subrepo, creating backups
858 # Revert all files on the subrepo, creating backups
859 # Note that this will not recursively revert subrepos
859 # Note that this will not recursively revert subrepos
860 # We could do it if there was a set:subrepos() predicate
860 # We could do it if there was a set:subrepos() predicate
861 opts = opts.copy()
861 opts = opts.copy()
862 opts[r'date'] = None
862 opts[r'date'] = None
863 opts[r'rev'] = substate[1]
863 opts[r'rev'] = substate[1]
864
864
865 self.filerevert(*pats, **opts)
865 self.filerevert(*pats, **opts)
866
866
867 # Update the repo to the revision specified in the given substate
867 # Update the repo to the revision specified in the given substate
868 if not opts.get(r'dry_run'):
868 if not opts.get(r'dry_run'):
869 self.get(substate, overwrite=True)
869 self.get(substate, overwrite=True)
870
870
871 def filerevert(self, *pats, **opts):
871 def filerevert(self, *pats, **opts):
872 ctx = self._repo[opts[r'rev']]
872 ctx = self._repo[opts[r'rev']]
873 parents = self._repo.dirstate.parents()
873 parents = self._repo.dirstate.parents()
874 if opts.get(r'all'):
874 if opts.get(r'all'):
875 pats = ['set:modified()']
875 pats = ['set:modified()']
876 else:
876 else:
877 pats = []
877 pats = []
878 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
878 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
879
879
880 def shortid(self, revid):
880 def shortid(self, revid):
881 return revid[:12]
881 return revid[:12]
882
882
883 @annotatesubrepoerror
883 @annotatesubrepoerror
884 def unshare(self):
884 def unshare(self):
885 # subrepo inherently violates our import layering rules
885 # subrepo inherently violates our import layering rules
886 # because it wants to make repo objects from deep inside the stack
886 # because it wants to make repo objects from deep inside the stack
887 # so we manually delay the circular imports to not break
887 # so we manually delay the circular imports to not break
888 # scripts that don't use our demand-loading
888 # scripts that don't use our demand-loading
889 global hg
889 global hg
890 from . import hg as h
890 from . import hg as h
891 hg = h
891 hg = h
892
892
893 # Nothing prevents a user from sharing in a repo, and then making that a
893 # Nothing prevents a user from sharing in a repo, and then making that a
894 # subrepo. Alternately, the previous unshare attempt may have failed
894 # subrepo. Alternately, the previous unshare attempt may have failed
895 # part way through. So recurse whether or not this layer is shared.
895 # part way through. So recurse whether or not this layer is shared.
896 if self._repo.shared():
896 if self._repo.shared():
897 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
897 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
898
898
899 hg.unshare(self.ui, self._repo)
899 hg.unshare(self.ui, self._repo)
900
900
901 def verify(self):
901 def verify(self):
902 try:
902 try:
903 rev = self._state[1]
903 rev = self._state[1]
904 ctx = self._repo.unfiltered()[rev]
904 ctx = self._repo.unfiltered()[rev]
905 if ctx.hidden():
905 if ctx.hidden():
906 # Since hidden revisions aren't pushed/pulled, it seems worth an
906 # Since hidden revisions aren't pushed/pulled, it seems worth an
907 # explicit warning.
907 # explicit warning.
908 ui = self._repo.ui
908 ui = self._repo.ui
909 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
909 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
910 (self._relpath, node.short(self._ctx.node())))
910 (self._relpath, node.short(self._ctx.node())))
911 return 0
911 return 0
912 except error.RepoLookupError:
912 except error.RepoLookupError:
913 # A missing subrepo revision may be a case of needing to pull it, so
913 # A missing subrepo revision may be a case of needing to pull it, so
914 # don't treat this as an error.
914 # don't treat this as an error.
915 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
915 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
916 (self._relpath, node.short(self._ctx.node())))
916 (self._relpath, node.short(self._ctx.node())))
917 return 0
917 return 0
918
918
919 @propertycache
919 @propertycache
920 def wvfs(self):
920 def wvfs(self):
921 """return own wvfs for efficiency and consistency
921 """return own wvfs for efficiency and consistency
922 """
922 """
923 return self._repo.wvfs
923 return self._repo.wvfs
924
924
925 @propertycache
925 @propertycache
926 def _relpath(self):
926 def _relpath(self):
927 """return path to this subrepository as seen from outermost repository
927 """return path to this subrepository as seen from outermost repository
928 """
928 """
929 # Keep consistent dir separators by avoiding vfs.join(self._path)
929 # Keep consistent dir separators by avoiding vfs.join(self._path)
930 return reporelpath(self._repo)
930 return reporelpath(self._repo)
931
931
932 class svnsubrepo(abstractsubrepo):
932 class svnsubrepo(abstractsubrepo):
933 def __init__(self, ctx, path, state, allowcreate):
933 def __init__(self, ctx, path, state, allowcreate):
934 super(svnsubrepo, self).__init__(ctx, path)
934 super(svnsubrepo, self).__init__(ctx, path)
935 self._state = state
935 self._state = state
936 self._exe = procutil.findexe('svn')
936 self._exe = procutil.findexe('svn')
937 if not self._exe:
937 if not self._exe:
938 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
938 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
939 % self._path)
939 % self._path)
940
940
941 def _svncommand(self, commands, filename='', failok=False):
941 def _svncommand(self, commands, filename='', failok=False):
942 cmd = [self._exe]
942 cmd = [self._exe]
943 extrakw = {}
943 extrakw = {}
944 if not self.ui.interactive():
944 if not self.ui.interactive():
945 # Making stdin be a pipe should prevent svn from behaving
945 # Making stdin be a pipe should prevent svn from behaving
946 # interactively even if we can't pass --non-interactive.
946 # interactively even if we can't pass --non-interactive.
947 extrakw[r'stdin'] = subprocess.PIPE
947 extrakw[r'stdin'] = subprocess.PIPE
948 # Starting in svn 1.5 --non-interactive is a global flag
948 # Starting in svn 1.5 --non-interactive is a global flag
949 # instead of being per-command, but we need to support 1.4 so
949 # instead of being per-command, but we need to support 1.4 so
950 # we have to be intelligent about what commands take
950 # we have to be intelligent about what commands take
951 # --non-interactive.
951 # --non-interactive.
952 if commands[0] in ('update', 'checkout', 'commit'):
952 if commands[0] in ('update', 'checkout', 'commit'):
953 cmd.append('--non-interactive')
953 cmd.append('--non-interactive')
954 cmd.extend(commands)
954 cmd.extend(commands)
955 if filename is not None:
955 if filename is not None:
956 path = self.wvfs.reljoin(self._ctx.repo().origroot,
956 path = self.wvfs.reljoin(self._ctx.repo().origroot,
957 self._path, filename)
957 self._path, filename)
958 cmd.append(path)
958 cmd.append(path)
959 env = dict(encoding.environ)
959 env = dict(encoding.environ)
960 # Avoid localized output, preserve current locale for everything else.
960 # Avoid localized output, preserve current locale for everything else.
961 lc_all = env.get('LC_ALL')
961 lc_all = env.get('LC_ALL')
962 if lc_all:
962 if lc_all:
963 env['LANG'] = lc_all
963 env['LANG'] = lc_all
964 del env['LC_ALL']
964 del env['LC_ALL']
965 env['LC_MESSAGES'] = 'C'
965 env['LC_MESSAGES'] = 'C'
966 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
966 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
967 bufsize=-1, close_fds=procutil.closefds,
967 bufsize=-1, close_fds=procutil.closefds,
968 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
968 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
969 env=procutil.tonativeenv(env), **extrakw)
969 env=procutil.tonativeenv(env), **extrakw)
970 stdout, stderr = map(util.fromnativeeol, p.communicate())
970 stdout, stderr = map(util.fromnativeeol, p.communicate())
971 stderr = stderr.strip()
971 stderr = stderr.strip()
972 if not failok:
972 if not failok:
973 if p.returncode:
973 if p.returncode:
974 raise error.Abort(stderr or 'exited with code %d'
974 raise error.Abort(stderr or 'exited with code %d'
975 % p.returncode)
975 % p.returncode)
976 if stderr:
976 if stderr:
977 self.ui.warn(stderr + '\n')
977 self.ui.warn(stderr + '\n')
978 return stdout, stderr
978 return stdout, stderr
979
979
980 @propertycache
980 @propertycache
981 def _svnversion(self):
981 def _svnversion(self):
982 output, err = self._svncommand(['--version', '--quiet'], filename=None)
982 output, err = self._svncommand(['--version', '--quiet'], filename=None)
983 m = re.search(br'^(\d+)\.(\d+)', output)
983 m = re.search(br'^(\d+)\.(\d+)', output)
984 if not m:
984 if not m:
985 raise error.Abort(_('cannot retrieve svn tool version'))
985 raise error.Abort(_('cannot retrieve svn tool version'))
986 return (int(m.group(1)), int(m.group(2)))
986 return (int(m.group(1)), int(m.group(2)))
987
987
988 def _svnmissing(self):
988 def _svnmissing(self):
989 return not self.wvfs.exists('.svn')
989 return not self.wvfs.exists('.svn')
990
990
991 def _wcrevs(self):
991 def _wcrevs(self):
992 # Get the working directory revision as well as the last
992 # Get the working directory revision as well as the last
993 # commit revision so we can compare the subrepo state with
993 # commit revision so we can compare the subrepo state with
994 # both. We used to store the working directory one.
994 # both. We used to store the working directory one.
995 output, err = self._svncommand(['info', '--xml'])
995 output, err = self._svncommand(['info', '--xml'])
996 doc = xml.dom.minidom.parseString(output)
996 doc = xml.dom.minidom.parseString(output)
997 entries = doc.getElementsByTagName(r'entry')
997 entries = doc.getElementsByTagName(r'entry')
998 lastrev, rev = '0', '0'
998 lastrev, rev = '0', '0'
999 if entries:
999 if entries:
1000 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1000 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1001 commits = entries[0].getElementsByTagName(r'commit')
1001 commits = entries[0].getElementsByTagName(r'commit')
1002 if commits:
1002 if commits:
1003 lastrev = pycompat.bytestr(
1003 lastrev = pycompat.bytestr(
1004 commits[0].getAttribute(r'revision')) or '0'
1004 commits[0].getAttribute(r'revision')) or '0'
1005 return (lastrev, rev)
1005 return (lastrev, rev)
1006
1006
1007 def _wcrev(self):
1007 def _wcrev(self):
1008 return self._wcrevs()[0]
1008 return self._wcrevs()[0]
1009
1009
1010 def _wcchanged(self):
1010 def _wcchanged(self):
1011 """Return (changes, extchanges, missing) where changes is True
1011 """Return (changes, extchanges, missing) where changes is True
1012 if the working directory was changed, extchanges is
1012 if the working directory was changed, extchanges is
1013 True if any of these changes concern an external entry and missing
1013 True if any of these changes concern an external entry and missing
1014 is True if any change is a missing entry.
1014 is True if any change is a missing entry.
1015 """
1015 """
1016 output, err = self._svncommand(['status', '--xml'])
1016 output, err = self._svncommand(['status', '--xml'])
1017 externals, changes, missing = [], [], []
1017 externals, changes, missing = [], [], []
1018 doc = xml.dom.minidom.parseString(output)
1018 doc = xml.dom.minidom.parseString(output)
1019 for e in doc.getElementsByTagName(r'entry'):
1019 for e in doc.getElementsByTagName(r'entry'):
1020 s = e.getElementsByTagName(r'wc-status')
1020 s = e.getElementsByTagName(r'wc-status')
1021 if not s:
1021 if not s:
1022 continue
1022 continue
1023 item = s[0].getAttribute(r'item')
1023 item = s[0].getAttribute(r'item')
1024 props = s[0].getAttribute(r'props')
1024 props = s[0].getAttribute(r'props')
1025 path = e.getAttribute(r'path').encode('utf8')
1025 path = e.getAttribute(r'path').encode('utf8')
1026 if item == r'external':
1026 if item == r'external':
1027 externals.append(path)
1027 externals.append(path)
1028 elif item == r'missing':
1028 elif item == r'missing':
1029 missing.append(path)
1029 missing.append(path)
1030 if (item not in (r'', r'normal', r'unversioned', r'external')
1030 if (item not in (r'', r'normal', r'unversioned', r'external')
1031 or props not in (r'', r'none', r'normal')):
1031 or props not in (r'', r'none', r'normal')):
1032 changes.append(path)
1032 changes.append(path)
1033 for path in changes:
1033 for path in changes:
1034 for ext in externals:
1034 for ext in externals:
1035 if path == ext or path.startswith(ext + pycompat.ossep):
1035 if path == ext or path.startswith(ext + pycompat.ossep):
1036 return True, True, bool(missing)
1036 return True, True, bool(missing)
1037 return bool(changes), False, bool(missing)
1037 return bool(changes), False, bool(missing)
1038
1038
1039 @annotatesubrepoerror
1039 @annotatesubrepoerror
1040 def dirty(self, ignoreupdate=False, missing=False):
1040 def dirty(self, ignoreupdate=False, missing=False):
1041 if self._svnmissing():
1041 if self._svnmissing():
1042 return self._state[1] != ''
1042 return self._state[1] != ''
1043 wcchanged = self._wcchanged()
1043 wcchanged = self._wcchanged()
1044 changed = wcchanged[0] or (missing and wcchanged[2])
1044 changed = wcchanged[0] or (missing and wcchanged[2])
1045 if not changed:
1045 if not changed:
1046 if self._state[1] in self._wcrevs() or ignoreupdate:
1046 if self._state[1] in self._wcrevs() or ignoreupdate:
1047 return False
1047 return False
1048 return True
1048 return True
1049
1049
1050 def basestate(self):
1050 def basestate(self):
1051 lastrev, rev = self._wcrevs()
1051 lastrev, rev = self._wcrevs()
1052 if lastrev != rev:
1052 if lastrev != rev:
1053 # Last committed rev is not the same than rev. We would
1053 # Last committed rev is not the same than rev. We would
1054 # like to take lastrev but we do not know if the subrepo
1054 # like to take lastrev but we do not know if the subrepo
1055 # URL exists at lastrev. Test it and fallback to rev it
1055 # URL exists at lastrev. Test it and fallback to rev it
1056 # is not there.
1056 # is not there.
1057 try:
1057 try:
1058 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1058 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1059 return lastrev
1059 return lastrev
1060 except error.Abort:
1060 except error.Abort:
1061 pass
1061 pass
1062 return rev
1062 return rev
1063
1063
1064 @annotatesubrepoerror
1064 @annotatesubrepoerror
1065 def commit(self, text, user, date):
1065 def commit(self, text, user, date):
1066 # user and date are out of our hands since svn is centralized
1066 # user and date are out of our hands since svn is centralized
1067 changed, extchanged, missing = self._wcchanged()
1067 changed, extchanged, missing = self._wcchanged()
1068 if not changed:
1068 if not changed:
1069 return self.basestate()
1069 return self.basestate()
1070 if extchanged:
1070 if extchanged:
1071 # Do not try to commit externals
1071 # Do not try to commit externals
1072 raise error.Abort(_('cannot commit svn externals'))
1072 raise error.Abort(_('cannot commit svn externals'))
1073 if missing:
1073 if missing:
1074 # svn can commit with missing entries but aborting like hg
1074 # svn can commit with missing entries but aborting like hg
1075 # seems a better approach.
1075 # seems a better approach.
1076 raise error.Abort(_('cannot commit missing svn entries'))
1076 raise error.Abort(_('cannot commit missing svn entries'))
1077 commitinfo, err = self._svncommand(['commit', '-m', text])
1077 commitinfo, err = self._svncommand(['commit', '-m', text])
1078 self.ui.status(commitinfo)
1078 self.ui.status(commitinfo)
1079 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1079 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1080 if not newrev:
1080 if not newrev:
1081 if not commitinfo.strip():
1081 if not commitinfo.strip():
1082 # Sometimes, our definition of "changed" differs from
1082 # Sometimes, our definition of "changed" differs from
1083 # svn one. For instance, svn ignores missing files
1083 # svn one. For instance, svn ignores missing files
1084 # when committing. If there are only missing files, no
1084 # when committing. If there are only missing files, no
1085 # commit is made, no output and no error code.
1085 # commit is made, no output and no error code.
1086 raise error.Abort(_('failed to commit svn changes'))
1086 raise error.Abort(_('failed to commit svn changes'))
1087 raise error.Abort(commitinfo.splitlines()[-1])
1087 raise error.Abort(commitinfo.splitlines()[-1])
1088 newrev = newrev.groups()[0]
1088 newrev = newrev.groups()[0]
1089 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1089 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1090 return newrev
1090 return newrev
1091
1091
1092 @annotatesubrepoerror
1092 @annotatesubrepoerror
1093 def remove(self):
1093 def remove(self):
1094 if self.dirty():
1094 if self.dirty():
1095 self.ui.warn(_('not removing repo %s because '
1095 self.ui.warn(_('not removing repo %s because '
1096 'it has changes.\n') % self._path)
1096 'it has changes.\n') % self._path)
1097 return
1097 return
1098 self.ui.note(_('removing subrepo %s\n') % self._path)
1098 self.ui.note(_('removing subrepo %s\n') % self._path)
1099
1099
1100 self.wvfs.rmtree(forcibly=True)
1100 self.wvfs.rmtree(forcibly=True)
1101 try:
1101 try:
1102 pwvfs = self._ctx.repo().wvfs
1102 pwvfs = self._ctx.repo().wvfs
1103 pwvfs.removedirs(pwvfs.dirname(self._path))
1103 pwvfs.removedirs(pwvfs.dirname(self._path))
1104 except OSError:
1104 except OSError:
1105 pass
1105 pass
1106
1106
1107 @annotatesubrepoerror
1107 @annotatesubrepoerror
1108 def get(self, state, overwrite=False):
1108 def get(self, state, overwrite=False):
1109 if overwrite:
1109 if overwrite:
1110 self._svncommand(['revert', '--recursive'])
1110 self._svncommand(['revert', '--recursive'])
1111 args = ['checkout']
1111 args = ['checkout']
1112 if self._svnversion >= (1, 5):
1112 if self._svnversion >= (1, 5):
1113 args.append('--force')
1113 args.append('--force')
1114 # The revision must be specified at the end of the URL to properly
1114 # The revision must be specified at the end of the URL to properly
1115 # update to a directory which has since been deleted and recreated.
1115 # update to a directory which has since been deleted and recreated.
1116 args.append('%s@%s' % (state[0], state[1]))
1116 args.append('%s@%s' % (state[0], state[1]))
1117
1117
1118 # SEC: check that the ssh url is safe
1118 # SEC: check that the ssh url is safe
1119 util.checksafessh(state[0])
1119 util.checksafessh(state[0])
1120
1120
1121 status, err = self._svncommand(args, failok=True)
1121 status, err = self._svncommand(args, failok=True)
1122 _sanitize(self.ui, self.wvfs, '.svn')
1122 _sanitize(self.ui, self.wvfs, '.svn')
1123 if not re.search('Checked out revision [0-9]+.', status):
1123 if not re.search('Checked out revision [0-9]+.', status):
1124 if ('is already a working copy for a different URL' in err
1124 if ('is already a working copy for a different URL' in err
1125 and (self._wcchanged()[:2] == (False, False))):
1125 and (self._wcchanged()[:2] == (False, False))):
1126 # obstructed but clean working copy, so just blow it away.
1126 # obstructed but clean working copy, so just blow it away.
1127 self.remove()
1127 self.remove()
1128 self.get(state, overwrite=False)
1128 self.get(state, overwrite=False)
1129 return
1129 return
1130 raise error.Abort((status or err).splitlines()[-1])
1130 raise error.Abort((status or err).splitlines()[-1])
1131 self.ui.status(status)
1131 self.ui.status(status)
1132
1132
1133 @annotatesubrepoerror
1133 @annotatesubrepoerror
1134 def merge(self, state):
1134 def merge(self, state):
1135 old = self._state[1]
1135 old = self._state[1]
1136 new = state[1]
1136 new = state[1]
1137 wcrev = self._wcrev()
1137 wcrev = self._wcrev()
1138 if new != wcrev:
1138 if new != wcrev:
1139 dirty = old == wcrev or self._wcchanged()[0]
1139 dirty = old == wcrev or self._wcchanged()[0]
1140 if _updateprompt(self.ui, self, dirty, wcrev, new):
1140 if _updateprompt(self.ui, self, dirty, wcrev, new):
1141 self.get(state, False)
1141 self.get(state, False)
1142
1142
1143 def push(self, opts):
1143 def push(self, opts):
1144 # push is a no-op for SVN
1144 # push is a no-op for SVN
1145 return True
1145 return True
1146
1146
1147 @annotatesubrepoerror
1147 @annotatesubrepoerror
1148 def files(self):
1148 def files(self):
1149 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1149 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1150 doc = xml.dom.minidom.parseString(output)
1150 doc = xml.dom.minidom.parseString(output)
1151 paths = []
1151 paths = []
1152 for e in doc.getElementsByTagName(r'entry'):
1152 for e in doc.getElementsByTagName(r'entry'):
1153 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1153 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1154 if kind != 'file':
1154 if kind != 'file':
1155 continue
1155 continue
1156 name = r''.join(c.data for c
1156 name = r''.join(c.data for c
1157 in e.getElementsByTagName(r'name')[0].childNodes
1157 in e.getElementsByTagName(r'name')[0].childNodes
1158 if c.nodeType == c.TEXT_NODE)
1158 if c.nodeType == c.TEXT_NODE)
1159 paths.append(name.encode('utf8'))
1159 paths.append(name.encode('utf8'))
1160 return paths
1160 return paths
1161
1161
1162 def filedata(self, name, decode):
1162 def filedata(self, name, decode):
1163 return self._svncommand(['cat'], name)[0]
1163 return self._svncommand(['cat'], name)[0]
1164
1164
1165
1165
1166 class gitsubrepo(abstractsubrepo):
1166 class gitsubrepo(abstractsubrepo):
1167 def __init__(self, ctx, path, state, allowcreate):
1167 def __init__(self, ctx, path, state, allowcreate):
1168 super(gitsubrepo, self).__init__(ctx, path)
1168 super(gitsubrepo, self).__init__(ctx, path)
1169 self._state = state
1169 self._state = state
1170 self._abspath = ctx.repo().wjoin(path)
1170 self._abspath = ctx.repo().wjoin(path)
1171 self._subparent = ctx.repo()
1171 self._subparent = ctx.repo()
1172 self._ensuregit()
1172 self._ensuregit()
1173
1173
1174 def _ensuregit(self):
1174 def _ensuregit(self):
1175 try:
1175 try:
1176 self._gitexecutable = 'git'
1176 self._gitexecutable = 'git'
1177 out, err = self._gitnodir(['--version'])
1177 out, err = self._gitnodir(['--version'])
1178 except OSError as e:
1178 except OSError as e:
1179 genericerror = _("error executing git for subrepo '%s': %s")
1179 genericerror = _("error executing git for subrepo '%s': %s")
1180 notfoundhint = _("check git is installed and in your PATH")
1180 notfoundhint = _("check git is installed and in your PATH")
1181 if e.errno != errno.ENOENT:
1181 if e.errno != errno.ENOENT:
1182 raise error.Abort(genericerror % (
1182 raise error.Abort(genericerror % (
1183 self._path, encoding.strtolocal(e.strerror)))
1183 self._path, encoding.strtolocal(e.strerror)))
1184 elif pycompat.iswindows:
1184 elif pycompat.iswindows:
1185 try:
1185 try:
1186 self._gitexecutable = 'git.cmd'
1186 self._gitexecutable = 'git.cmd'
1187 out, err = self._gitnodir(['--version'])
1187 out, err = self._gitnodir(['--version'])
1188 except OSError as e2:
1188 except OSError as e2:
1189 if e2.errno == errno.ENOENT:
1189 if e2.errno == errno.ENOENT:
1190 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1190 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1191 " for subrepo '%s'") % self._path,
1191 " for subrepo '%s'") % self._path,
1192 hint=notfoundhint)
1192 hint=notfoundhint)
1193 else:
1193 else:
1194 raise error.Abort(genericerror % (self._path,
1194 raise error.Abort(genericerror % (self._path,
1195 encoding.strtolocal(e2.strerror)))
1195 encoding.strtolocal(e2.strerror)))
1196 else:
1196 else:
1197 raise error.Abort(_("couldn't find git for subrepo '%s'")
1197 raise error.Abort(_("couldn't find git for subrepo '%s'")
1198 % self._path, hint=notfoundhint)
1198 % self._path, hint=notfoundhint)
1199 versionstatus = self._checkversion(out)
1199 versionstatus = self._checkversion(out)
1200 if versionstatus == 'unknown':
1200 if versionstatus == 'unknown':
1201 self.ui.warn(_('cannot retrieve git version\n'))
1201 self.ui.warn(_('cannot retrieve git version\n'))
1202 elif versionstatus == 'abort':
1202 elif versionstatus == 'abort':
1203 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1203 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1204 elif versionstatus == 'warning':
1204 elif versionstatus == 'warning':
1205 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1205 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1206
1206
1207 @staticmethod
1207 @staticmethod
1208 def _gitversion(out):
1208 def _gitversion(out):
1209 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1209 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1210 if m:
1210 if m:
1211 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1211 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1212
1212
1213 m = re.search(br'^git version (\d+)\.(\d+)', out)
1213 m = re.search(br'^git version (\d+)\.(\d+)', out)
1214 if m:
1214 if m:
1215 return (int(m.group(1)), int(m.group(2)), 0)
1215 return (int(m.group(1)), int(m.group(2)), 0)
1216
1216
1217 return -1
1217 return -1
1218
1218
1219 @staticmethod
1219 @staticmethod
1220 def _checkversion(out):
1220 def _checkversion(out):
1221 '''ensure git version is new enough
1221 '''ensure git version is new enough
1222
1222
1223 >>> _checkversion = gitsubrepo._checkversion
1223 >>> _checkversion = gitsubrepo._checkversion
1224 >>> _checkversion(b'git version 1.6.0')
1224 >>> _checkversion(b'git version 1.6.0')
1225 'ok'
1225 'ok'
1226 >>> _checkversion(b'git version 1.8.5')
1226 >>> _checkversion(b'git version 1.8.5')
1227 'ok'
1227 'ok'
1228 >>> _checkversion(b'git version 1.4.0')
1228 >>> _checkversion(b'git version 1.4.0')
1229 'abort'
1229 'abort'
1230 >>> _checkversion(b'git version 1.5.0')
1230 >>> _checkversion(b'git version 1.5.0')
1231 'warning'
1231 'warning'
1232 >>> _checkversion(b'git version 1.9-rc0')
1232 >>> _checkversion(b'git version 1.9-rc0')
1233 'ok'
1233 'ok'
1234 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1234 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1235 'ok'
1235 'ok'
1236 >>> _checkversion(b'git version 1.9.0.GIT')
1236 >>> _checkversion(b'git version 1.9.0.GIT')
1237 'ok'
1237 'ok'
1238 >>> _checkversion(b'git version 12345')
1238 >>> _checkversion(b'git version 12345')
1239 'unknown'
1239 'unknown'
1240 >>> _checkversion(b'no')
1240 >>> _checkversion(b'no')
1241 'unknown'
1241 'unknown'
1242 '''
1242 '''
1243 version = gitsubrepo._gitversion(out)
1243 version = gitsubrepo._gitversion(out)
1244 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1244 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1245 # despite the docstring comment. For now, error on 1.4.0, warn on
1245 # despite the docstring comment. For now, error on 1.4.0, warn on
1246 # 1.5.0 but attempt to continue.
1246 # 1.5.0 but attempt to continue.
1247 if version == -1:
1247 if version == -1:
1248 return 'unknown'
1248 return 'unknown'
1249 if version < (1, 5, 0):
1249 if version < (1, 5, 0):
1250 return 'abort'
1250 return 'abort'
1251 elif version < (1, 6, 0):
1251 elif version < (1, 6, 0):
1252 return 'warning'
1252 return 'warning'
1253 return 'ok'
1253 return 'ok'
1254
1254
1255 def _gitcommand(self, commands, env=None, stream=False):
1255 def _gitcommand(self, commands, env=None, stream=False):
1256 return self._gitdir(commands, env=env, stream=stream)[0]
1256 return self._gitdir(commands, env=env, stream=stream)[0]
1257
1257
1258 def _gitdir(self, commands, env=None, stream=False):
1258 def _gitdir(self, commands, env=None, stream=False):
1259 return self._gitnodir(commands, env=env, stream=stream,
1259 return self._gitnodir(commands, env=env, stream=stream,
1260 cwd=self._abspath)
1260 cwd=self._abspath)
1261
1261
1262 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1262 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1263 """Calls the git command
1263 """Calls the git command
1264
1264
1265 The methods tries to call the git command. versions prior to 1.6.0
1265 The methods tries to call the git command. versions prior to 1.6.0
1266 are not supported and very probably fail.
1266 are not supported and very probably fail.
1267 """
1267 """
1268 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1268 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1269 if env is None:
1269 if env is None:
1270 env = encoding.environ.copy()
1270 env = encoding.environ.copy()
1271 # disable localization for Git output (issue5176)
1271 # disable localization for Git output (issue5176)
1272 env['LC_ALL'] = 'C'
1272 env['LC_ALL'] = 'C'
1273 # fix for Git CVE-2015-7545
1273 # fix for Git CVE-2015-7545
1274 if 'GIT_ALLOW_PROTOCOL' not in env:
1274 if 'GIT_ALLOW_PROTOCOL' not in env:
1275 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1275 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1276 # unless ui.quiet is set, print git's stderr,
1276 # unless ui.quiet is set, print git's stderr,
1277 # which is mostly progress and useful info
1277 # which is mostly progress and useful info
1278 errpipe = None
1278 errpipe = None
1279 if self.ui.quiet:
1279 if self.ui.quiet:
1280 errpipe = open(os.devnull, 'w')
1280 errpipe = open(os.devnull, 'w')
1281 if self.ui._colormode and len(commands) and commands[0] == "diff":
1281 if self.ui._colormode and len(commands) and commands[0] == "diff":
1282 # insert the argument in the front,
1282 # insert the argument in the front,
1283 # the end of git diff arguments is used for paths
1283 # the end of git diff arguments is used for paths
1284 commands.insert(1, '--color')
1284 commands.insert(1, '--color')
1285 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1285 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1286 [self._gitexecutable] + commands),
1286 [self._gitexecutable] + commands),
1287 bufsize=-1,
1287 bufsize=-1,
1288 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1288 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1289 env=procutil.tonativeenv(env),
1289 env=procutil.tonativeenv(env),
1290 close_fds=procutil.closefds,
1290 close_fds=procutil.closefds,
1291 stdout=subprocess.PIPE, stderr=errpipe)
1291 stdout=subprocess.PIPE, stderr=errpipe)
1292 if stream:
1292 if stream:
1293 return p.stdout, None
1293 return p.stdout, None
1294
1294
1295 retdata = p.stdout.read().strip()
1295 retdata = p.stdout.read().strip()
1296 # wait for the child to exit to avoid race condition.
1296 # wait for the child to exit to avoid race condition.
1297 p.wait()
1297 p.wait()
1298
1298
1299 if p.returncode != 0 and p.returncode != 1:
1299 if p.returncode != 0 and p.returncode != 1:
1300 # there are certain error codes that are ok
1300 # there are certain error codes that are ok
1301 command = commands[0]
1301 command = commands[0]
1302 if command in ('cat-file', 'symbolic-ref'):
1302 if command in ('cat-file', 'symbolic-ref'):
1303 return retdata, p.returncode
1303 return retdata, p.returncode
1304 # for all others, abort
1304 # for all others, abort
1305 raise error.Abort(_('git %s error %d in %s') %
1305 raise error.Abort(_('git %s error %d in %s') %
1306 (command, p.returncode, self._relpath))
1306 (command, p.returncode, self._relpath))
1307
1307
1308 return retdata, p.returncode
1308 return retdata, p.returncode
1309
1309
1310 def _gitmissing(self):
1310 def _gitmissing(self):
1311 return not self.wvfs.exists('.git')
1311 return not self.wvfs.exists('.git')
1312
1312
1313 def _gitstate(self):
1313 def _gitstate(self):
1314 return self._gitcommand(['rev-parse', 'HEAD'])
1314 return self._gitcommand(['rev-parse', 'HEAD'])
1315
1315
1316 def _gitcurrentbranch(self):
1316 def _gitcurrentbranch(self):
1317 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1317 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1318 if err:
1318 if err:
1319 current = None
1319 current = None
1320 return current
1320 return current
1321
1321
1322 def _gitremote(self, remote):
1322 def _gitremote(self, remote):
1323 out = self._gitcommand(['remote', 'show', '-n', remote])
1323 out = self._gitcommand(['remote', 'show', '-n', remote])
1324 line = out.split('\n')[1]
1324 line = out.split('\n')[1]
1325 i = line.index('URL: ') + len('URL: ')
1325 i = line.index('URL: ') + len('URL: ')
1326 return line[i:]
1326 return line[i:]
1327
1327
1328 def _githavelocally(self, revision):
1328 def _githavelocally(self, revision):
1329 out, code = self._gitdir(['cat-file', '-e', revision])
1329 out, code = self._gitdir(['cat-file', '-e', revision])
1330 return code == 0
1330 return code == 0
1331
1331
1332 def _gitisancestor(self, r1, r2):
1332 def _gitisancestor(self, r1, r2):
1333 base = self._gitcommand(['merge-base', r1, r2])
1333 base = self._gitcommand(['merge-base', r1, r2])
1334 return base == r1
1334 return base == r1
1335
1335
1336 def _gitisbare(self):
1336 def _gitisbare(self):
1337 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1337 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1338
1338
1339 def _gitupdatestat(self):
1339 def _gitupdatestat(self):
1340 """This must be run before git diff-index.
1340 """This must be run before git diff-index.
1341 diff-index only looks at changes to file stat;
1341 diff-index only looks at changes to file stat;
1342 this command looks at file contents and updates the stat."""
1342 this command looks at file contents and updates the stat."""
1343 self._gitcommand(['update-index', '-q', '--refresh'])
1343 self._gitcommand(['update-index', '-q', '--refresh'])
1344
1344
1345 def _gitbranchmap(self):
1345 def _gitbranchmap(self):
1346 '''returns 2 things:
1346 '''returns 2 things:
1347 a map from git branch to revision
1347 a map from git branch to revision
1348 a map from revision to branches'''
1348 a map from revision to branches'''
1349 branch2rev = {}
1349 branch2rev = {}
1350 rev2branch = {}
1350 rev2branch = {}
1351
1351
1352 out = self._gitcommand(['for-each-ref', '--format',
1352 out = self._gitcommand(['for-each-ref', '--format',
1353 '%(objectname) %(refname)'])
1353 '%(objectname) %(refname)'])
1354 for line in out.split('\n'):
1354 for line in out.split('\n'):
1355 revision, ref = line.split(' ')
1355 revision, ref = line.split(' ')
1356 if (not ref.startswith('refs/heads/') and
1356 if (not ref.startswith('refs/heads/') and
1357 not ref.startswith('refs/remotes/')):
1357 not ref.startswith('refs/remotes/')):
1358 continue
1358 continue
1359 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1359 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1360 continue # ignore remote/HEAD redirects
1360 continue # ignore remote/HEAD redirects
1361 branch2rev[ref] = revision
1361 branch2rev[ref] = revision
1362 rev2branch.setdefault(revision, []).append(ref)
1362 rev2branch.setdefault(revision, []).append(ref)
1363 return branch2rev, rev2branch
1363 return branch2rev, rev2branch
1364
1364
1365 def _gittracking(self, branches):
1365 def _gittracking(self, branches):
1366 'return map of remote branch to local tracking branch'
1366 'return map of remote branch to local tracking branch'
1367 # assumes no more than one local tracking branch for each remote
1367 # assumes no more than one local tracking branch for each remote
1368 tracking = {}
1368 tracking = {}
1369 for b in branches:
1369 for b in branches:
1370 if b.startswith('refs/remotes/'):
1370 if b.startswith('refs/remotes/'):
1371 continue
1371 continue
1372 bname = b.split('/', 2)[2]
1372 bname = b.split('/', 2)[2]
1373 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1373 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1374 if remote:
1374 if remote:
1375 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1375 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1376 tracking['refs/remotes/%s/%s' %
1376 tracking['refs/remotes/%s/%s' %
1377 (remote, ref.split('/', 2)[2])] = b
1377 (remote, ref.split('/', 2)[2])] = b
1378 return tracking
1378 return tracking
1379
1379
1380 def _abssource(self, source):
1380 def _abssource(self, source):
1381 if '://' not in source:
1381 if '://' not in source:
1382 # recognize the scp syntax as an absolute source
1382 # recognize the scp syntax as an absolute source
1383 colon = source.find(':')
1383 colon = source.find(':')
1384 if colon != -1 and '/' not in source[:colon]:
1384 if colon != -1 and '/' not in source[:colon]:
1385 return source
1385 return source
1386 self._subsource = source
1386 self._subsource = source
1387 return _abssource(self)
1387 return _abssource(self)
1388
1388
1389 def _fetch(self, source, revision):
1389 def _fetch(self, source, revision):
1390 if self._gitmissing():
1390 if self._gitmissing():
1391 # SEC: check for safe ssh url
1391 # SEC: check for safe ssh url
1392 util.checksafessh(source)
1392 util.checksafessh(source)
1393
1393
1394 source = self._abssource(source)
1394 source = self._abssource(source)
1395 self.ui.status(_('cloning subrepo %s from %s\n') %
1395 self.ui.status(_('cloning subrepo %s from %s\n') %
1396 (self._relpath, source))
1396 (self._relpath, source))
1397 self._gitnodir(['clone', source, self._abspath])
1397 self._gitnodir(['clone', source, self._abspath])
1398 if self._githavelocally(revision):
1398 if self._githavelocally(revision):
1399 return
1399 return
1400 self.ui.status(_('pulling subrepo %s from %s\n') %
1400 self.ui.status(_('pulling subrepo %s from %s\n') %
1401 (self._relpath, self._gitremote('origin')))
1401 (self._relpath, self._gitremote('origin')))
1402 # try only origin: the originally cloned repo
1402 # try only origin: the originally cloned repo
1403 self._gitcommand(['fetch'])
1403 self._gitcommand(['fetch'])
1404 if not self._githavelocally(revision):
1404 if not self._githavelocally(revision):
1405 raise error.Abort(_('revision %s does not exist in subrepository '
1405 raise error.Abort(_('revision %s does not exist in subrepository '
1406 '"%s"\n') % (revision, self._relpath))
1406 '"%s"\n') % (revision, self._relpath))
1407
1407
1408 @annotatesubrepoerror
1408 @annotatesubrepoerror
1409 def dirty(self, ignoreupdate=False, missing=False):
1409 def dirty(self, ignoreupdate=False, missing=False):
1410 if self._gitmissing():
1410 if self._gitmissing():
1411 return self._state[1] != ''
1411 return self._state[1] != ''
1412 if self._gitisbare():
1412 if self._gitisbare():
1413 return True
1413 return True
1414 if not ignoreupdate and self._state[1] != self._gitstate():
1414 if not ignoreupdate and self._state[1] != self._gitstate():
1415 # different version checked out
1415 # different version checked out
1416 return True
1416 return True
1417 # check for staged changes or modified files; ignore untracked files
1417 # check for staged changes or modified files; ignore untracked files
1418 self._gitupdatestat()
1418 self._gitupdatestat()
1419 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1419 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1420 return code == 1
1420 return code == 1
1421
1421
1422 def basestate(self):
1422 def basestate(self):
1423 return self._gitstate()
1423 return self._gitstate()
1424
1424
1425 @annotatesubrepoerror
1425 @annotatesubrepoerror
1426 def get(self, state, overwrite=False):
1426 def get(self, state, overwrite=False):
1427 source, revision, kind = state
1427 source, revision, kind = state
1428 if not revision:
1428 if not revision:
1429 self.remove()
1429 self.remove()
1430 return
1430 return
1431 self._fetch(source, revision)
1431 self._fetch(source, revision)
1432 # if the repo was set to be bare, unbare it
1432 # if the repo was set to be bare, unbare it
1433 if self._gitisbare():
1433 if self._gitisbare():
1434 self._gitcommand(['config', 'core.bare', 'false'])
1434 self._gitcommand(['config', 'core.bare', 'false'])
1435 if self._gitstate() == revision:
1435 if self._gitstate() == revision:
1436 self._gitcommand(['reset', '--hard', 'HEAD'])
1436 self._gitcommand(['reset', '--hard', 'HEAD'])
1437 return
1437 return
1438 elif self._gitstate() == revision:
1438 elif self._gitstate() == revision:
1439 if overwrite:
1439 if overwrite:
1440 # first reset the index to unmark new files for commit, because
1440 # first reset the index to unmark new files for commit, because
1441 # reset --hard will otherwise throw away files added for commit,
1441 # reset --hard will otherwise throw away files added for commit,
1442 # not just unmark them.
1442 # not just unmark them.
1443 self._gitcommand(['reset', 'HEAD'])
1443 self._gitcommand(['reset', 'HEAD'])
1444 self._gitcommand(['reset', '--hard', 'HEAD'])
1444 self._gitcommand(['reset', '--hard', 'HEAD'])
1445 return
1445 return
1446 branch2rev, rev2branch = self._gitbranchmap()
1446 branch2rev, rev2branch = self._gitbranchmap()
1447
1447
1448 def checkout(args):
1448 def checkout(args):
1449 cmd = ['checkout']
1449 cmd = ['checkout']
1450 if overwrite:
1450 if overwrite:
1451 # first reset the index to unmark new files for commit, because
1451 # first reset the index to unmark new files for commit, because
1452 # the -f option will otherwise throw away files added for
1452 # the -f option will otherwise throw away files added for
1453 # commit, not just unmark them.
1453 # commit, not just unmark them.
1454 self._gitcommand(['reset', 'HEAD'])
1454 self._gitcommand(['reset', 'HEAD'])
1455 cmd.append('-f')
1455 cmd.append('-f')
1456 self._gitcommand(cmd + args)
1456 self._gitcommand(cmd + args)
1457 _sanitize(self.ui, self.wvfs, '.git')
1457 _sanitize(self.ui, self.wvfs, '.git')
1458
1458
1459 def rawcheckout():
1459 def rawcheckout():
1460 # no branch to checkout, check it out with no branch
1460 # no branch to checkout, check it out with no branch
1461 self.ui.warn(_('checking out detached HEAD in '
1461 self.ui.warn(_('checking out detached HEAD in '
1462 'subrepository "%s"\n') % self._relpath)
1462 'subrepository "%s"\n') % self._relpath)
1463 self.ui.warn(_('check out a git branch if you intend '
1463 self.ui.warn(_('check out a git branch if you intend '
1464 'to make changes\n'))
1464 'to make changes\n'))
1465 checkout(['-q', revision])
1465 checkout(['-q', revision])
1466
1466
1467 if revision not in rev2branch:
1467 if revision not in rev2branch:
1468 rawcheckout()
1468 rawcheckout()
1469 return
1469 return
1470 branches = rev2branch[revision]
1470 branches = rev2branch[revision]
1471 firstlocalbranch = None
1471 firstlocalbranch = None
1472 for b in branches:
1472 for b in branches:
1473 if b == 'refs/heads/master':
1473 if b == 'refs/heads/master':
1474 # master trumps all other branches
1474 # master trumps all other branches
1475 checkout(['refs/heads/master'])
1475 checkout(['refs/heads/master'])
1476 return
1476 return
1477 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1477 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1478 firstlocalbranch = b
1478 firstlocalbranch = b
1479 if firstlocalbranch:
1479 if firstlocalbranch:
1480 checkout([firstlocalbranch])
1480 checkout([firstlocalbranch])
1481 return
1481 return
1482
1482
1483 tracking = self._gittracking(branch2rev.keys())
1483 tracking = self._gittracking(branch2rev.keys())
1484 # choose a remote branch already tracked if possible
1484 # choose a remote branch already tracked if possible
1485 remote = branches[0]
1485 remote = branches[0]
1486 if remote not in tracking:
1486 if remote not in tracking:
1487 for b in branches:
1487 for b in branches:
1488 if b in tracking:
1488 if b in tracking:
1489 remote = b
1489 remote = b
1490 break
1490 break
1491
1491
1492 if remote not in tracking:
1492 if remote not in tracking:
1493 # create a new local tracking branch
1493 # create a new local tracking branch
1494 local = remote.split('/', 3)[3]
1494 local = remote.split('/', 3)[3]
1495 checkout(['-b', local, remote])
1495 checkout(['-b', local, remote])
1496 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1496 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1497 # When updating to a tracked remote branch,
1497 # When updating to a tracked remote branch,
1498 # if the local tracking branch is downstream of it,
1498 # if the local tracking branch is downstream of it,
1499 # a normal `git pull` would have performed a "fast-forward merge"
1499 # a normal `git pull` would have performed a "fast-forward merge"
1500 # which is equivalent to updating the local branch to the remote.
1500 # which is equivalent to updating the local branch to the remote.
1501 # Since we are only looking at branching at update, we need to
1501 # Since we are only looking at branching at update, we need to
1502 # detect this situation and perform this action lazily.
1502 # detect this situation and perform this action lazily.
1503 if tracking[remote] != self._gitcurrentbranch():
1503 if tracking[remote] != self._gitcurrentbranch():
1504 checkout([tracking[remote]])
1504 checkout([tracking[remote]])
1505 self._gitcommand(['merge', '--ff', remote])
1505 self._gitcommand(['merge', '--ff', remote])
1506 _sanitize(self.ui, self.wvfs, '.git')
1506 _sanitize(self.ui, self.wvfs, '.git')
1507 else:
1507 else:
1508 # a real merge would be required, just checkout the revision
1508 # a real merge would be required, just checkout the revision
1509 rawcheckout()
1509 rawcheckout()
1510
1510
1511 @annotatesubrepoerror
1511 @annotatesubrepoerror
1512 def commit(self, text, user, date):
1512 def commit(self, text, user, date):
1513 if self._gitmissing():
1513 if self._gitmissing():
1514 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1514 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1515 cmd = ['commit', '-a', '-m', text]
1515 cmd = ['commit', '-a', '-m', text]
1516 env = encoding.environ.copy()
1516 env = encoding.environ.copy()
1517 if user:
1517 if user:
1518 cmd += ['--author', user]
1518 cmd += ['--author', user]
1519 if date:
1519 if date:
1520 # git's date parser silently ignores when seconds < 1e9
1520 # git's date parser silently ignores when seconds < 1e9
1521 # convert to ISO8601
1521 # convert to ISO8601
1522 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1522 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1523 '%Y-%m-%dT%H:%M:%S %1%2')
1523 '%Y-%m-%dT%H:%M:%S %1%2')
1524 self._gitcommand(cmd, env=env)
1524 self._gitcommand(cmd, env=env)
1525 # make sure commit works otherwise HEAD might not exist under certain
1525 # make sure commit works otherwise HEAD might not exist under certain
1526 # circumstances
1526 # circumstances
1527 return self._gitstate()
1527 return self._gitstate()
1528
1528
1529 @annotatesubrepoerror
1529 @annotatesubrepoerror
1530 def merge(self, state):
1530 def merge(self, state):
1531 source, revision, kind = state
1531 source, revision, kind = state
1532 self._fetch(source, revision)
1532 self._fetch(source, revision)
1533 base = self._gitcommand(['merge-base', revision, self._state[1]])
1533 base = self._gitcommand(['merge-base', revision, self._state[1]])
1534 self._gitupdatestat()
1534 self._gitupdatestat()
1535 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1535 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1536
1536
1537 def mergefunc():
1537 def mergefunc():
1538 if base == revision:
1538 if base == revision:
1539 self.get(state) # fast forward merge
1539 self.get(state) # fast forward merge
1540 elif base != self._state[1]:
1540 elif base != self._state[1]:
1541 self._gitcommand(['merge', '--no-commit', revision])
1541 self._gitcommand(['merge', '--no-commit', revision])
1542 _sanitize(self.ui, self.wvfs, '.git')
1542 _sanitize(self.ui, self.wvfs, '.git')
1543
1543
1544 if self.dirty():
1544 if self.dirty():
1545 if self._gitstate() != revision:
1545 if self._gitstate() != revision:
1546 dirty = self._gitstate() == self._state[1] or code != 0
1546 dirty = self._gitstate() == self._state[1] or code != 0
1547 if _updateprompt(self.ui, self, dirty,
1547 if _updateprompt(self.ui, self, dirty,
1548 self._state[1][:7], revision[:7]):
1548 self._state[1][:7], revision[:7]):
1549 mergefunc()
1549 mergefunc()
1550 else:
1550 else:
1551 mergefunc()
1551 mergefunc()
1552
1552
1553 @annotatesubrepoerror
1553 @annotatesubrepoerror
1554 def push(self, opts):
1554 def push(self, opts):
1555 force = opts.get('force')
1555 force = opts.get('force')
1556
1556
1557 if not self._state[1]:
1557 if not self._state[1]:
1558 return True
1558 return True
1559 if self._gitmissing():
1559 if self._gitmissing():
1560 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1560 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1561 # if a branch in origin contains the revision, nothing to do
1561 # if a branch in origin contains the revision, nothing to do
1562 branch2rev, rev2branch = self._gitbranchmap()
1562 branch2rev, rev2branch = self._gitbranchmap()
1563 if self._state[1] in rev2branch:
1563 if self._state[1] in rev2branch:
1564 for b in rev2branch[self._state[1]]:
1564 for b in rev2branch[self._state[1]]:
1565 if b.startswith('refs/remotes/origin/'):
1565 if b.startswith('refs/remotes/origin/'):
1566 return True
1566 return True
1567 for b, revision in branch2rev.iteritems():
1567 for b, revision in branch2rev.iteritems():
1568 if b.startswith('refs/remotes/origin/'):
1568 if b.startswith('refs/remotes/origin/'):
1569 if self._gitisancestor(self._state[1], revision):
1569 if self._gitisancestor(self._state[1], revision):
1570 return True
1570 return True
1571 # otherwise, try to push the currently checked out branch
1571 # otherwise, try to push the currently checked out branch
1572 cmd = ['push']
1572 cmd = ['push']
1573 if force:
1573 if force:
1574 cmd.append('--force')
1574 cmd.append('--force')
1575
1575
1576 current = self._gitcurrentbranch()
1576 current = self._gitcurrentbranch()
1577 if current:
1577 if current:
1578 # determine if the current branch is even useful
1578 # determine if the current branch is even useful
1579 if not self._gitisancestor(self._state[1], current):
1579 if not self._gitisancestor(self._state[1], current):
1580 self.ui.warn(_('unrelated git branch checked out '
1580 self.ui.warn(_('unrelated git branch checked out '
1581 'in subrepository "%s"\n') % self._relpath)
1581 'in subrepository "%s"\n') % self._relpath)
1582 return False
1582 return False
1583 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1583 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1584 (current.split('/', 2)[2], self._relpath))
1584 (current.split('/', 2)[2], self._relpath))
1585 ret = self._gitdir(cmd + ['origin', current])
1585 ret = self._gitdir(cmd + ['origin', current])
1586 return ret[1] == 0
1586 return ret[1] == 0
1587 else:
1587 else:
1588 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1588 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1589 'cannot push revision %s\n') %
1589 'cannot push revision %s\n') %
1590 (self._relpath, self._state[1]))
1590 (self._relpath, self._state[1]))
1591 return False
1591 return False
1592
1592
1593 @annotatesubrepoerror
1593 @annotatesubrepoerror
1594 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1594 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1595 if self._gitmissing():
1595 if self._gitmissing():
1596 return []
1596 return []
1597
1597
1598 s = self.status(None, unknown=True, clean=True)
1598 s = self.status(None, unknown=True, clean=True)
1599
1599
1600 tracked = set()
1600 tracked = set()
1601 # dirstates 'amn' warn, 'r' is added again
1601 # dirstates 'amn' warn, 'r' is added again
1602 for l in (s.modified, s.added, s.deleted, s.clean):
1602 for l in (s.modified, s.added, s.deleted, s.clean):
1603 tracked.update(l)
1603 tracked.update(l)
1604
1604
1605 # Unknown files not of interest will be rejected by the matcher
1605 # Unknown files not of interest will be rejected by the matcher
1606 files = s.unknown
1606 files = s.unknown
1607 files.extend(match.files())
1607 files.extend(match.files())
1608
1608
1609 rejected = []
1609 rejected = []
1610
1610
1611 files = [f for f in sorted(set(files)) if match(f)]
1611 files = [f for f in sorted(set(files)) if match(f)]
1612 for f in files:
1612 for f in files:
1613 exact = match.exact(f)
1613 exact = match.exact(f)
1614 command = ["add"]
1614 command = ["add"]
1615 if exact:
1615 if exact:
1616 command.append("-f") #should be added, even if ignored
1616 command.append("-f") #should be added, even if ignored
1617 if ui.verbose or not exact:
1617 if ui.verbose or not exact:
1618 ui.status(_('adding %s\n') % uipathfn(f))
1618 ui.status(_('adding %s\n') % uipathfn(f))
1619
1619
1620 if f in tracked: # hg prints 'adding' even if already tracked
1620 if f in tracked: # hg prints 'adding' even if already tracked
1621 if exact:
1621 if exact:
1622 rejected.append(f)
1622 rejected.append(f)
1623 continue
1623 continue
1624 if not opts.get(r'dry_run'):
1624 if not opts.get(r'dry_run'):
1625 self._gitcommand(command + [f])
1625 self._gitcommand(command + [f])
1626
1626
1627 for f in rejected:
1627 for f in rejected:
1628 ui.warn(_("%s already tracked!\n") % uipathfn(f))
1628 ui.warn(_("%s already tracked!\n") % uipathfn(f))
1629
1629
1630 return rejected
1630 return rejected
1631
1631
1632 @annotatesubrepoerror
1632 @annotatesubrepoerror
1633 def remove(self):
1633 def remove(self):
1634 if self._gitmissing():
1634 if self._gitmissing():
1635 return
1635 return
1636 if self.dirty():
1636 if self.dirty():
1637 self.ui.warn(_('not removing repo %s because '
1637 self.ui.warn(_('not removing repo %s because '
1638 'it has changes.\n') % self._relpath)
1638 'it has changes.\n') % self._relpath)
1639 return
1639 return
1640 # we can't fully delete the repository as it may contain
1640 # we can't fully delete the repository as it may contain
1641 # local-only history
1641 # local-only history
1642 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1642 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1643 self._gitcommand(['config', 'core.bare', 'true'])
1643 self._gitcommand(['config', 'core.bare', 'true'])
1644 for f, kind in self.wvfs.readdir():
1644 for f, kind in self.wvfs.readdir():
1645 if f == '.git':
1645 if f == '.git':
1646 continue
1646 continue
1647 if kind == stat.S_IFDIR:
1647 if kind == stat.S_IFDIR:
1648 self.wvfs.rmtree(f)
1648 self.wvfs.rmtree(f)
1649 else:
1649 else:
1650 self.wvfs.unlink(f)
1650 self.wvfs.unlink(f)
1651
1651
1652 def archive(self, archiver, prefix, match=None, decode=True):
1652 def archive(self, archiver, prefix, match=None, decode=True):
1653 total = 0
1653 total = 0
1654 source, revision = self._state
1654 source, revision = self._state
1655 if not revision:
1655 if not revision:
1656 return total
1656 return total
1657 self._fetch(source, revision)
1657 self._fetch(source, revision)
1658
1658
1659 # Parse git's native archive command.
1659 # Parse git's native archive command.
1660 # This should be much faster than manually traversing the trees
1660 # This should be much faster than manually traversing the trees
1661 # and objects with many subprocess calls.
1661 # and objects with many subprocess calls.
1662 tarstream = self._gitcommand(['archive', revision], stream=True)
1662 tarstream = self._gitcommand(['archive', revision], stream=True)
1663 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1663 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1664 relpath = subrelpath(self)
1664 relpath = subrelpath(self)
1665 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1665 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1666 unit=_('files'))
1666 unit=_('files'))
1667 progress.update(0)
1667 progress.update(0)
1668 for info in tar:
1668 for info in tar:
1669 if info.isdir():
1669 if info.isdir():
1670 continue
1670 continue
1671 bname = pycompat.fsencode(info.name)
1671 bname = pycompat.fsencode(info.name)
1672 if match and not match(bname):
1672 if match and not match(bname):
1673 continue
1673 continue
1674 if info.issym():
1674 if info.issym():
1675 data = info.linkname
1675 data = info.linkname
1676 else:
1676 else:
1677 data = tar.extractfile(info).read()
1677 data = tar.extractfile(info).read()
1678 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1678 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1679 total += 1
1679 total += 1
1680 progress.increment()
1680 progress.increment()
1681 progress.complete()
1681 progress.complete()
1682 return total
1682 return total
1683
1683
1684
1684
1685 @annotatesubrepoerror
1685 @annotatesubrepoerror
1686 def cat(self, match, fm, fntemplate, prefix, **opts):
1686 def cat(self, match, fm, fntemplate, prefix, **opts):
1687 rev = self._state[1]
1687 rev = self._state[1]
1688 if match.anypats():
1688 if match.anypats():
1689 return 1 #No support for include/exclude yet
1689 return 1 #No support for include/exclude yet
1690
1690
1691 if not match.files():
1691 if not match.files():
1692 return 1
1692 return 1
1693
1693
1694 # TODO: add support for non-plain formatter (see cmdutil.cat())
1694 # TODO: add support for non-plain formatter (see cmdutil.cat())
1695 for f in match.files():
1695 for f in match.files():
1696 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1696 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1697 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1697 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1698 pathname=self.wvfs.reljoin(prefix, f))
1698 pathname=self.wvfs.reljoin(prefix, f))
1699 fp.write(output)
1699 fp.write(output)
1700 fp.close()
1700 fp.close()
1701 return 0
1701 return 0
1702
1702
1703
1703
1704 @annotatesubrepoerror
1704 @annotatesubrepoerror
1705 def status(self, rev2, **opts):
1705 def status(self, rev2, **opts):
1706 rev1 = self._state[1]
1706 rev1 = self._state[1]
1707 if self._gitmissing() or not rev1:
1707 if self._gitmissing() or not rev1:
1708 # if the repo is missing, return no results
1708 # if the repo is missing, return no results
1709 return scmutil.status([], [], [], [], [], [], [])
1709 return scmutil.status([], [], [], [], [], [], [])
1710 modified, added, removed = [], [], []
1710 modified, added, removed = [], [], []
1711 self._gitupdatestat()
1711 self._gitupdatestat()
1712 if rev2:
1712 if rev2:
1713 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1713 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1714 else:
1714 else:
1715 command = ['diff-index', '--no-renames', rev1]
1715 command = ['diff-index', '--no-renames', rev1]
1716 out = self._gitcommand(command)
1716 out = self._gitcommand(command)
1717 for line in out.split('\n'):
1717 for line in out.split('\n'):
1718 tab = line.find('\t')
1718 tab = line.find('\t')
1719 if tab == -1:
1719 if tab == -1:
1720 continue
1720 continue
1721 status, f = line[tab - 1:tab], line[tab + 1:]
1721 status, f = line[tab - 1:tab], line[tab + 1:]
1722 if status == 'M':
1722 if status == 'M':
1723 modified.append(f)
1723 modified.append(f)
1724 elif status == 'A':
1724 elif status == 'A':
1725 added.append(f)
1725 added.append(f)
1726 elif status == 'D':
1726 elif status == 'D':
1727 removed.append(f)
1727 removed.append(f)
1728
1728
1729 deleted, unknown, ignored, clean = [], [], [], []
1729 deleted, unknown, ignored, clean = [], [], [], []
1730
1730
1731 command = ['status', '--porcelain', '-z']
1731 command = ['status', '--porcelain', '-z']
1732 if opts.get(r'unknown'):
1732 if opts.get(r'unknown'):
1733 command += ['--untracked-files=all']
1733 command += ['--untracked-files=all']
1734 if opts.get(r'ignored'):
1734 if opts.get(r'ignored'):
1735 command += ['--ignored']
1735 command += ['--ignored']
1736 out = self._gitcommand(command)
1736 out = self._gitcommand(command)
1737
1737
1738 changedfiles = set()
1738 changedfiles = set()
1739 changedfiles.update(modified)
1739 changedfiles.update(modified)
1740 changedfiles.update(added)
1740 changedfiles.update(added)
1741 changedfiles.update(removed)
1741 changedfiles.update(removed)
1742 for line in out.split('\0'):
1742 for line in out.split('\0'):
1743 if not line:
1743 if not line:
1744 continue
1744 continue
1745 st = line[0:2]
1745 st = line[0:2]
1746 #moves and copies show 2 files on one line
1746 #moves and copies show 2 files on one line
1747 if line.find('\0') >= 0:
1747 if line.find('\0') >= 0:
1748 filename1, filename2 = line[3:].split('\0')
1748 filename1, filename2 = line[3:].split('\0')
1749 else:
1749 else:
1750 filename1 = line[3:]
1750 filename1 = line[3:]
1751 filename2 = None
1751 filename2 = None
1752
1752
1753 changedfiles.add(filename1)
1753 changedfiles.add(filename1)
1754 if filename2:
1754 if filename2:
1755 changedfiles.add(filename2)
1755 changedfiles.add(filename2)
1756
1756
1757 if st == '??':
1757 if st == '??':
1758 unknown.append(filename1)
1758 unknown.append(filename1)
1759 elif st == '!!':
1759 elif st == '!!':
1760 ignored.append(filename1)
1760 ignored.append(filename1)
1761
1761
1762 if opts.get(r'clean'):
1762 if opts.get(r'clean'):
1763 out = self._gitcommand(['ls-files'])
1763 out = self._gitcommand(['ls-files'])
1764 for f in out.split('\n'):
1764 for f in out.split('\n'):
1765 if not f in changedfiles:
1765 if not f in changedfiles:
1766 clean.append(f)
1766 clean.append(f)
1767
1767
1768 return scmutil.status(modified, added, removed, deleted,
1768 return scmutil.status(modified, added, removed, deleted,
1769 unknown, ignored, clean)
1769 unknown, ignored, clean)
1770
1770
1771 @annotatesubrepoerror
1771 @annotatesubrepoerror
1772 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1772 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1773 node1 = self._state[1]
1773 node1 = self._state[1]
1774 cmd = ['diff', '--no-renames']
1774 cmd = ['diff', '--no-renames']
1775 if opts[r'stat']:
1775 if opts[r'stat']:
1776 cmd.append('--stat')
1776 cmd.append('--stat')
1777 else:
1777 else:
1778 # for Git, this also implies '-p'
1778 # for Git, this also implies '-p'
1779 cmd.append('-U%d' % diffopts.context)
1779 cmd.append('-U%d' % diffopts.context)
1780
1780
1781 if diffopts.noprefix:
1781 if diffopts.noprefix:
1782 cmd.extend(['--src-prefix=%s/' % prefix,
1782 cmd.extend(['--src-prefix=%s/' % prefix,
1783 '--dst-prefix=%s/' % prefix])
1783 '--dst-prefix=%s/' % prefix])
1784 else:
1784 else:
1785 cmd.extend(['--src-prefix=a/%s/' % prefix,
1785 cmd.extend(['--src-prefix=a/%s/' % prefix,
1786 '--dst-prefix=b/%s/' % prefix])
1786 '--dst-prefix=b/%s/' % prefix])
1787
1787
1788 if diffopts.ignorews:
1788 if diffopts.ignorews:
1789 cmd.append('--ignore-all-space')
1789 cmd.append('--ignore-all-space')
1790 if diffopts.ignorewsamount:
1790 if diffopts.ignorewsamount:
1791 cmd.append('--ignore-space-change')
1791 cmd.append('--ignore-space-change')
1792 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1792 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1793 and diffopts.ignoreblanklines:
1793 and diffopts.ignoreblanklines:
1794 cmd.append('--ignore-blank-lines')
1794 cmd.append('--ignore-blank-lines')
1795
1795
1796 cmd.append(node1)
1796 cmd.append(node1)
1797 if node2:
1797 if node2:
1798 cmd.append(node2)
1798 cmd.append(node2)
1799
1799
1800 output = ""
1800 output = ""
1801 if match.always():
1801 if match.always():
1802 output += self._gitcommand(cmd) + '\n'
1802 output += self._gitcommand(cmd) + '\n'
1803 else:
1803 else:
1804 st = self.status(node2)[:3]
1804 st = self.status(node2)[:3]
1805 files = [f for sublist in st for f in sublist]
1805 files = [f for sublist in st for f in sublist]
1806 for f in files:
1806 for f in files:
1807 if match(f):
1807 if match(f):
1808 output += self._gitcommand(cmd + ['--', f]) + '\n'
1808 output += self._gitcommand(cmd + ['--', f]) + '\n'
1809
1809
1810 if output.strip():
1810 if output.strip():
1811 ui.write(output)
1811 ui.write(output)
1812
1812
1813 @annotatesubrepoerror
1813 @annotatesubrepoerror
1814 def revert(self, substate, *pats, **opts):
1814 def revert(self, substate, *pats, **opts):
1815 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1815 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1816 if not opts.get(r'no_backup'):
1816 if not opts.get(r'no_backup'):
1817 status = self.status(None)
1817 status = self.status(None)
1818 names = status.modified
1818 names = status.modified
1819 for name in names:
1819 for name in names:
1820 # backuppath() expects a path relative to the parent repo (the
1820 # backuppath() expects a path relative to the parent repo (the
1821 # repo that ui.origbackuppath is relative to)
1821 # repo that ui.origbackuppath is relative to)
1822 parentname = os.path.join(self._path, name)
1822 parentname = os.path.join(self._path, name)
1823 bakname = scmutil.backuppath(self.ui, self._subparent,
1823 bakname = scmutil.backuppath(self.ui, self._subparent,
1824 parentname)
1824 parentname)
1825 self.ui.note(_('saving current version of %s as %s\n') %
1825 self.ui.note(_('saving current version of %s as %s\n') %
1826 (name, os.path.relpath(bakname)))
1826 (name, os.path.relpath(bakname)))
1827 util.rename(self.wvfs.join(name), bakname)
1827 util.rename(self.wvfs.join(name), bakname)
1828
1828
1829 if not opts.get(r'dry_run'):
1829 if not opts.get(r'dry_run'):
1830 self.get(substate, overwrite=True)
1830 self.get(substate, overwrite=True)
1831 return []
1831 return []
1832
1832
1833 def shortid(self, revid):
1833 def shortid(self, revid):
1834 return revid[:7]
1834 return revid[:7]
1835
1835
1836 types = {
1836 types = {
1837 'hg': hgsubrepo,
1837 'hg': hgsubrepo,
1838 'svn': svnsubrepo,
1838 'svn': svnsubrepo,
1839 'git': gitsubrepo,
1839 'git': gitsubrepo,
1840 }
1840 }
General Comments 0
You need to be logged in to leave comments. Login now