##// END OF EJS Templates
hooks: fix hooks not firing if prechangegroup was set (issue4934)...
Durham Goode -
r26859:e7c618ce stable
parent child Browse files
Show More
@@ -1,948 +1,952
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import tempfile
12 import tempfile
13 import weakref
13 import weakref
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 branchmap,
24 branchmap,
25 dagutil,
25 dagutil,
26 discovery,
26 discovery,
27 error,
27 error,
28 mdiff,
28 mdiff,
29 phases,
29 phases,
30 util,
30 util,
31 )
31 )
32
32
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
33 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
34 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
35
35
36 def readexactly(stream, n):
36 def readexactly(stream, n):
37 '''read n bytes from stream.read and abort if less was available'''
37 '''read n bytes from stream.read and abort if less was available'''
38 s = stream.read(n)
38 s = stream.read(n)
39 if len(s) < n:
39 if len(s) < n:
40 raise error.Abort(_("stream ended unexpectedly"
40 raise error.Abort(_("stream ended unexpectedly"
41 " (got %d bytes, expected %d)")
41 " (got %d bytes, expected %d)")
42 % (len(s), n))
42 % (len(s), n))
43 return s
43 return s
44
44
45 def getchunk(stream):
45 def getchunk(stream):
46 """return the next chunk from stream as a string"""
46 """return the next chunk from stream as a string"""
47 d = readexactly(stream, 4)
47 d = readexactly(stream, 4)
48 l = struct.unpack(">l", d)[0]
48 l = struct.unpack(">l", d)[0]
49 if l <= 4:
49 if l <= 4:
50 if l:
50 if l:
51 raise error.Abort(_("invalid chunk length %d") % l)
51 raise error.Abort(_("invalid chunk length %d") % l)
52 return ""
52 return ""
53 return readexactly(stream, l - 4)
53 return readexactly(stream, l - 4)
54
54
55 def chunkheader(length):
55 def chunkheader(length):
56 """return a changegroup chunk header (string)"""
56 """return a changegroup chunk header (string)"""
57 return struct.pack(">l", length + 4)
57 return struct.pack(">l", length + 4)
58
58
59 def closechunk():
59 def closechunk():
60 """return a changegroup chunk header (string) for a zero-length chunk"""
60 """return a changegroup chunk header (string) for a zero-length chunk"""
61 return struct.pack(">l", 0)
61 return struct.pack(">l", 0)
62
62
63 def combineresults(results):
63 def combineresults(results):
64 """logic to combine 0 or more addchangegroup results into one"""
64 """logic to combine 0 or more addchangegroup results into one"""
65 changedheads = 0
65 changedheads = 0
66 result = 1
66 result = 1
67 for ret in results:
67 for ret in results:
68 # If any changegroup result is 0, return 0
68 # If any changegroup result is 0, return 0
69 if ret == 0:
69 if ret == 0:
70 result = 0
70 result = 0
71 break
71 break
72 if ret < -1:
72 if ret < -1:
73 changedheads += ret + 1
73 changedheads += ret + 1
74 elif ret > 1:
74 elif ret > 1:
75 changedheads += ret - 1
75 changedheads += ret - 1
76 if changedheads > 0:
76 if changedheads > 0:
77 result = 1 + changedheads
77 result = 1 + changedheads
78 elif changedheads < 0:
78 elif changedheads < 0:
79 result = -1 + changedheads
79 result = -1 + changedheads
80 return result
80 return result
81
81
82 bundletypes = {
82 bundletypes = {
83 "": ("", None), # only when using unbundle on ssh and old http servers
83 "": ("", None), # only when using unbundle on ssh and old http servers
84 # since the unification ssh accepts a header but there
84 # since the unification ssh accepts a header but there
85 # is no capability signaling it.
85 # is no capability signaling it.
86 "HG20": (), # special-cased below
86 "HG20": (), # special-cased below
87 "HG10UN": ("HG10UN", None),
87 "HG10UN": ("HG10UN", None),
88 "HG10BZ": ("HG10", 'BZ'),
88 "HG10BZ": ("HG10", 'BZ'),
89 "HG10GZ": ("HG10GZ", 'GZ'),
89 "HG10GZ": ("HG10GZ", 'GZ'),
90 }
90 }
91
91
92 # hgweb uses this list to communicate its preferred type
92 # hgweb uses this list to communicate its preferred type
93 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
93 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
94
94
95 def writechunks(ui, chunks, filename, vfs=None):
95 def writechunks(ui, chunks, filename, vfs=None):
96 """Write chunks to a file and return its filename.
96 """Write chunks to a file and return its filename.
97
97
98 The stream is assumed to be a bundle file.
98 The stream is assumed to be a bundle file.
99 Existing files will not be overwritten.
99 Existing files will not be overwritten.
100 If no filename is specified, a temporary file is created.
100 If no filename is specified, a temporary file is created.
101 """
101 """
102 fh = None
102 fh = None
103 cleanup = None
103 cleanup = None
104 try:
104 try:
105 if filename:
105 if filename:
106 if vfs:
106 if vfs:
107 fh = vfs.open(filename, "wb")
107 fh = vfs.open(filename, "wb")
108 else:
108 else:
109 fh = open(filename, "wb")
109 fh = open(filename, "wb")
110 else:
110 else:
111 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
111 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
112 fh = os.fdopen(fd, "wb")
112 fh = os.fdopen(fd, "wb")
113 cleanup = filename
113 cleanup = filename
114 for c in chunks:
114 for c in chunks:
115 fh.write(c)
115 fh.write(c)
116 cleanup = None
116 cleanup = None
117 return filename
117 return filename
118 finally:
118 finally:
119 if fh is not None:
119 if fh is not None:
120 fh.close()
120 fh.close()
121 if cleanup is not None:
121 if cleanup is not None:
122 if filename and vfs:
122 if filename and vfs:
123 vfs.unlink(cleanup)
123 vfs.unlink(cleanup)
124 else:
124 else:
125 os.unlink(cleanup)
125 os.unlink(cleanup)
126
126
127 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None):
127 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None):
128 """Write a bundle file and return its filename.
128 """Write a bundle file and return its filename.
129
129
130 Existing files will not be overwritten.
130 Existing files will not be overwritten.
131 If no filename is specified, a temporary file is created.
131 If no filename is specified, a temporary file is created.
132 bz2 compression can be turned off.
132 bz2 compression can be turned off.
133 The bundle file will be deleted in case of errors.
133 The bundle file will be deleted in case of errors.
134 """
134 """
135
135
136 if bundletype == "HG20":
136 if bundletype == "HG20":
137 from . import bundle2
137 from . import bundle2
138 bundle = bundle2.bundle20(ui)
138 bundle = bundle2.bundle20(ui)
139 bundle.setcompression(compression)
139 bundle.setcompression(compression)
140 part = bundle.newpart('changegroup', data=cg.getchunks())
140 part = bundle.newpart('changegroup', data=cg.getchunks())
141 part.addparam('version', cg.version)
141 part.addparam('version', cg.version)
142 chunkiter = bundle.getchunks()
142 chunkiter = bundle.getchunks()
143 else:
143 else:
144 # compression argument is only for the bundle2 case
144 # compression argument is only for the bundle2 case
145 assert compression is None
145 assert compression is None
146 if cg.version != '01':
146 if cg.version != '01':
147 raise error.Abort(_('old bundle types only supports v1 '
147 raise error.Abort(_('old bundle types only supports v1 '
148 'changegroups'))
148 'changegroups'))
149 header, comp = bundletypes[bundletype]
149 header, comp = bundletypes[bundletype]
150 if comp not in util.compressors:
150 if comp not in util.compressors:
151 raise error.Abort(_('unknown stream compression type: %s')
151 raise error.Abort(_('unknown stream compression type: %s')
152 % comp)
152 % comp)
153 z = util.compressors[comp]()
153 z = util.compressors[comp]()
154 subchunkiter = cg.getchunks()
154 subchunkiter = cg.getchunks()
155 def chunkiter():
155 def chunkiter():
156 yield header
156 yield header
157 for chunk in subchunkiter:
157 for chunk in subchunkiter:
158 yield z.compress(chunk)
158 yield z.compress(chunk)
159 yield z.flush()
159 yield z.flush()
160 chunkiter = chunkiter()
160 chunkiter = chunkiter()
161
161
162 # parse the changegroup data, otherwise we will block
162 # parse the changegroup data, otherwise we will block
163 # in case of sshrepo because we don't know the end of the stream
163 # in case of sshrepo because we don't know the end of the stream
164
164
165 # an empty chunkgroup is the end of the changegroup
165 # an empty chunkgroup is the end of the changegroup
166 # a changegroup has at least 2 chunkgroups (changelog and manifest).
166 # a changegroup has at least 2 chunkgroups (changelog and manifest).
167 # after that, an empty chunkgroup is the end of the changegroup
167 # after that, an empty chunkgroup is the end of the changegroup
168 return writechunks(ui, chunkiter, filename, vfs=vfs)
168 return writechunks(ui, chunkiter, filename, vfs=vfs)
169
169
170 class cg1unpacker(object):
170 class cg1unpacker(object):
171 """Unpacker for cg1 changegroup streams.
171 """Unpacker for cg1 changegroup streams.
172
172
173 A changegroup unpacker handles the framing of the revision data in
173 A changegroup unpacker handles the framing of the revision data in
174 the wire format. Most consumers will want to use the apply()
174 the wire format. Most consumers will want to use the apply()
175 method to add the changes from the changegroup to a repository.
175 method to add the changes from the changegroup to a repository.
176
176
177 If you're forwarding a changegroup unmodified to another consumer,
177 If you're forwarding a changegroup unmodified to another consumer,
178 use getchunks(), which returns an iterator of changegroup
178 use getchunks(), which returns an iterator of changegroup
179 chunks. This is mostly useful for cases where you need to know the
179 chunks. This is mostly useful for cases where you need to know the
180 data stream has ended by observing the end of the changegroup.
180 data stream has ended by observing the end of the changegroup.
181
181
182 deltachunk() is useful only if you're applying delta data. Most
182 deltachunk() is useful only if you're applying delta data. Most
183 consumers should prefer apply() instead.
183 consumers should prefer apply() instead.
184
184
185 A few other public methods exist. Those are used only for
185 A few other public methods exist. Those are used only for
186 bundlerepo and some debug commands - their use is discouraged.
186 bundlerepo and some debug commands - their use is discouraged.
187 """
187 """
188 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
188 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
189 deltaheadersize = struct.calcsize(deltaheader)
189 deltaheadersize = struct.calcsize(deltaheader)
190 version = '01'
190 version = '01'
191 def __init__(self, fh, alg):
191 def __init__(self, fh, alg):
192 if alg == 'UN':
192 if alg == 'UN':
193 alg = None # get more modern without breaking too much
193 alg = None # get more modern without breaking too much
194 if not alg in util.decompressors:
194 if not alg in util.decompressors:
195 raise error.Abort(_('unknown stream compression type: %s')
195 raise error.Abort(_('unknown stream compression type: %s')
196 % alg)
196 % alg)
197 if alg == 'BZ':
197 if alg == 'BZ':
198 alg = '_truncatedBZ'
198 alg = '_truncatedBZ'
199 self._stream = util.decompressors[alg](fh)
199 self._stream = util.decompressors[alg](fh)
200 self._type = alg
200 self._type = alg
201 self.callback = None
201 self.callback = None
202
202
203 # These methods (compressed, read, seek, tell) all appear to only
203 # These methods (compressed, read, seek, tell) all appear to only
204 # be used by bundlerepo, but it's a little hard to tell.
204 # be used by bundlerepo, but it's a little hard to tell.
205 def compressed(self):
205 def compressed(self):
206 return self._type is not None
206 return self._type is not None
207 def read(self, l):
207 def read(self, l):
208 return self._stream.read(l)
208 return self._stream.read(l)
209 def seek(self, pos):
209 def seek(self, pos):
210 return self._stream.seek(pos)
210 return self._stream.seek(pos)
211 def tell(self):
211 def tell(self):
212 return self._stream.tell()
212 return self._stream.tell()
213 def close(self):
213 def close(self):
214 return self._stream.close()
214 return self._stream.close()
215
215
216 def _chunklength(self):
216 def _chunklength(self):
217 d = readexactly(self._stream, 4)
217 d = readexactly(self._stream, 4)
218 l = struct.unpack(">l", d)[0]
218 l = struct.unpack(">l", d)[0]
219 if l <= 4:
219 if l <= 4:
220 if l:
220 if l:
221 raise error.Abort(_("invalid chunk length %d") % l)
221 raise error.Abort(_("invalid chunk length %d") % l)
222 return 0
222 return 0
223 if self.callback:
223 if self.callback:
224 self.callback()
224 self.callback()
225 return l - 4
225 return l - 4
226
226
227 def changelogheader(self):
227 def changelogheader(self):
228 """v10 does not have a changelog header chunk"""
228 """v10 does not have a changelog header chunk"""
229 return {}
229 return {}
230
230
231 def manifestheader(self):
231 def manifestheader(self):
232 """v10 does not have a manifest header chunk"""
232 """v10 does not have a manifest header chunk"""
233 return {}
233 return {}
234
234
235 def filelogheader(self):
235 def filelogheader(self):
236 """return the header of the filelogs chunk, v10 only has the filename"""
236 """return the header of the filelogs chunk, v10 only has the filename"""
237 l = self._chunklength()
237 l = self._chunklength()
238 if not l:
238 if not l:
239 return {}
239 return {}
240 fname = readexactly(self._stream, l)
240 fname = readexactly(self._stream, l)
241 return {'filename': fname}
241 return {'filename': fname}
242
242
243 def _deltaheader(self, headertuple, prevnode):
243 def _deltaheader(self, headertuple, prevnode):
244 node, p1, p2, cs = headertuple
244 node, p1, p2, cs = headertuple
245 if prevnode is None:
245 if prevnode is None:
246 deltabase = p1
246 deltabase = p1
247 else:
247 else:
248 deltabase = prevnode
248 deltabase = prevnode
249 return node, p1, p2, deltabase, cs
249 return node, p1, p2, deltabase, cs
250
250
251 def deltachunk(self, prevnode):
251 def deltachunk(self, prevnode):
252 l = self._chunklength()
252 l = self._chunklength()
253 if not l:
253 if not l:
254 return {}
254 return {}
255 headerdata = readexactly(self._stream, self.deltaheadersize)
255 headerdata = readexactly(self._stream, self.deltaheadersize)
256 header = struct.unpack(self.deltaheader, headerdata)
256 header = struct.unpack(self.deltaheader, headerdata)
257 delta = readexactly(self._stream, l - self.deltaheadersize)
257 delta = readexactly(self._stream, l - self.deltaheadersize)
258 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
258 node, p1, p2, deltabase, cs = self._deltaheader(header, prevnode)
259 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
259 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
260 'deltabase': deltabase, 'delta': delta}
260 'deltabase': deltabase, 'delta': delta}
261
261
262 def getchunks(self):
262 def getchunks(self):
263 """returns all the chunks contains in the bundle
263 """returns all the chunks contains in the bundle
264
264
265 Used when you need to forward the binary stream to a file or another
265 Used when you need to forward the binary stream to a file or another
266 network API. To do so, it parse the changegroup data, otherwise it will
266 network API. To do so, it parse the changegroup data, otherwise it will
267 block in case of sshrepo because it don't know the end of the stream.
267 block in case of sshrepo because it don't know the end of the stream.
268 """
268 """
269 # an empty chunkgroup is the end of the changegroup
269 # an empty chunkgroup is the end of the changegroup
270 # a changegroup has at least 2 chunkgroups (changelog and manifest).
270 # a changegroup has at least 2 chunkgroups (changelog and manifest).
271 # after that, an empty chunkgroup is the end of the changegroup
271 # after that, an empty chunkgroup is the end of the changegroup
272 empty = False
272 empty = False
273 count = 0
273 count = 0
274 while not empty or count <= 2:
274 while not empty or count <= 2:
275 empty = True
275 empty = True
276 count += 1
276 count += 1
277 while True:
277 while True:
278 chunk = getchunk(self)
278 chunk = getchunk(self)
279 if not chunk:
279 if not chunk:
280 break
280 break
281 empty = False
281 empty = False
282 yield chunkheader(len(chunk))
282 yield chunkheader(len(chunk))
283 pos = 0
283 pos = 0
284 while pos < len(chunk):
284 while pos < len(chunk):
285 next = pos + 2**20
285 next = pos + 2**20
286 yield chunk[pos:next]
286 yield chunk[pos:next]
287 pos = next
287 pos = next
288 yield closechunk()
288 yield closechunk()
289
289
290 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
290 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
291 # We know that we'll never have more manifests than we had
291 # We know that we'll never have more manifests than we had
292 # changesets.
292 # changesets.
293 self.callback = prog(_('manifests'), numchanges)
293 self.callback = prog(_('manifests'), numchanges)
294 # no need to check for empty manifest group here:
294 # no need to check for empty manifest group here:
295 # if the result of the merge of 1 and 2 is the same in 3 and 4,
295 # if the result of the merge of 1 and 2 is the same in 3 and 4,
296 # no new manifest will be created and the manifest group will
296 # no new manifest will be created and the manifest group will
297 # be empty during the pull
297 # be empty during the pull
298 self.manifestheader()
298 self.manifestheader()
299 repo.manifest.addgroup(self, revmap, trp)
299 repo.manifest.addgroup(self, revmap, trp)
300 repo.ui.progress(_('manifests'), None)
300 repo.ui.progress(_('manifests'), None)
301
301
302 def apply(self, repo, srctype, url, emptyok=False,
302 def apply(self, repo, srctype, url, emptyok=False,
303 targetphase=phases.draft, expectedtotal=None):
303 targetphase=phases.draft, expectedtotal=None):
304 """Add the changegroup returned by source.read() to this repo.
304 """Add the changegroup returned by source.read() to this repo.
305 srctype is a string like 'push', 'pull', or 'unbundle'. url is
305 srctype is a string like 'push', 'pull', or 'unbundle'. url is
306 the URL of the repo where this changegroup is coming from.
306 the URL of the repo where this changegroup is coming from.
307
307
308 Return an integer summarizing the change to this repo:
308 Return an integer summarizing the change to this repo:
309 - nothing changed or no source: 0
309 - nothing changed or no source: 0
310 - more heads than before: 1+added heads (2..n)
310 - more heads than before: 1+added heads (2..n)
311 - fewer heads than before: -1-removed heads (-2..-n)
311 - fewer heads than before: -1-removed heads (-2..-n)
312 - number of heads stays the same: 1
312 - number of heads stays the same: 1
313 """
313 """
314 repo = repo.unfiltered()
314 repo = repo.unfiltered()
315 def csmap(x):
315 def csmap(x):
316 repo.ui.debug("add changeset %s\n" % short(x))
316 repo.ui.debug("add changeset %s\n" % short(x))
317 return len(cl)
317 return len(cl)
318
318
319 def revmap(x):
319 def revmap(x):
320 return cl.rev(x)
320 return cl.rev(x)
321
321
322 changesets = files = revisions = 0
322 changesets = files = revisions = 0
323
323
324 tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
324 tr = repo.transaction("\n".join([srctype, util.hidepassword(url)]))
325 # The transaction could have been created before and already
325 # The transaction could have been created before and already
326 # carries source information. In this case we use the top
326 # carries source information. In this case we use the top
327 # level data. We overwrite the argument because we need to use
327 # level data. We overwrite the argument because we need to use
328 # the top level value (if they exist) in this function.
328 # the top level value (if they exist) in this function.
329 srctype = tr.hookargs.setdefault('source', srctype)
329 srctype = tr.hookargs.setdefault('source', srctype)
330 url = tr.hookargs.setdefault('url', url)
330 url = tr.hookargs.setdefault('url', url)
331
331
332 # write changelog data to temp files so concurrent readers will not see
332 # write changelog data to temp files so concurrent readers will not see
333 # inconsistent view
333 # inconsistent view
334 cl = repo.changelog
334 cl = repo.changelog
335 cl.delayupdate(tr)
335 cl.delayupdate(tr)
336 oldheads = cl.heads()
336 oldheads = cl.heads()
337 try:
337 try:
338 repo.hook('prechangegroup', throw=True, **tr.hookargs)
338 repo.hook('prechangegroup', throw=True, **tr.hookargs)
339
339
340 trp = weakref.proxy(tr)
340 trp = weakref.proxy(tr)
341 # pull off the changeset group
341 # pull off the changeset group
342 repo.ui.status(_("adding changesets\n"))
342 repo.ui.status(_("adding changesets\n"))
343 clstart = len(cl)
343 clstart = len(cl)
344 class prog(object):
344 class prog(object):
345 def __init__(self, step, total):
345 def __init__(self, step, total):
346 self._step = step
346 self._step = step
347 self._total = total
347 self._total = total
348 self._count = 1
348 self._count = 1
349 def __call__(self):
349 def __call__(self):
350 repo.ui.progress(self._step, self._count, unit=_('chunks'),
350 repo.ui.progress(self._step, self._count, unit=_('chunks'),
351 total=self._total)
351 total=self._total)
352 self._count += 1
352 self._count += 1
353 self.callback = prog(_('changesets'), expectedtotal)
353 self.callback = prog(_('changesets'), expectedtotal)
354
354
355 efiles = set()
355 efiles = set()
356 def onchangelog(cl, node):
356 def onchangelog(cl, node):
357 efiles.update(cl.read(node)[3])
357 efiles.update(cl.read(node)[3])
358
358
359 self.changelogheader()
359 self.changelogheader()
360 srccontent = cl.addgroup(self, csmap, trp,
360 srccontent = cl.addgroup(self, csmap, trp,
361 addrevisioncb=onchangelog)
361 addrevisioncb=onchangelog)
362 efiles = len(efiles)
362 efiles = len(efiles)
363
363
364 if not (srccontent or emptyok):
364 if not (srccontent or emptyok):
365 raise error.Abort(_("received changelog group is empty"))
365 raise error.Abort(_("received changelog group is empty"))
366 clend = len(cl)
366 clend = len(cl)
367 changesets = clend - clstart
367 changesets = clend - clstart
368 repo.ui.progress(_('changesets'), None)
368 repo.ui.progress(_('changesets'), None)
369
369
370 # pull off the manifest group
370 # pull off the manifest group
371 repo.ui.status(_("adding manifests\n"))
371 repo.ui.status(_("adding manifests\n"))
372 self._unpackmanifests(repo, revmap, trp, prog, changesets)
372 self._unpackmanifests(repo, revmap, trp, prog, changesets)
373
373
374 needfiles = {}
374 needfiles = {}
375 if repo.ui.configbool('server', 'validate', default=False):
375 if repo.ui.configbool('server', 'validate', default=False):
376 # validate incoming csets have their manifests
376 # validate incoming csets have their manifests
377 for cset in xrange(clstart, clend):
377 for cset in xrange(clstart, clend):
378 mfnode = repo.changelog.read(repo.changelog.node(cset))[0]
378 mfnode = repo.changelog.read(repo.changelog.node(cset))[0]
379 mfest = repo.manifest.readdelta(mfnode)
379 mfest = repo.manifest.readdelta(mfnode)
380 # store file nodes we must see
380 # store file nodes we must see
381 for f, n in mfest.iteritems():
381 for f, n in mfest.iteritems():
382 needfiles.setdefault(f, set()).add(n)
382 needfiles.setdefault(f, set()).add(n)
383
383
384 # process the files
384 # process the files
385 repo.ui.status(_("adding file changes\n"))
385 repo.ui.status(_("adding file changes\n"))
386 self.callback = None
386 self.callback = None
387 pr = prog(_('files'), efiles)
387 pr = prog(_('files'), efiles)
388 newrevs, newfiles = _addchangegroupfiles(
388 newrevs, newfiles = _addchangegroupfiles(
389 repo, self, revmap, trp, pr, needfiles)
389 repo, self, revmap, trp, pr, needfiles)
390 revisions += newrevs
390 revisions += newrevs
391 files += newfiles
391 files += newfiles
392
392
393 dh = 0
393 dh = 0
394 if oldheads:
394 if oldheads:
395 heads = cl.heads()
395 heads = cl.heads()
396 dh = len(heads) - len(oldheads)
396 dh = len(heads) - len(oldheads)
397 for h in heads:
397 for h in heads:
398 if h not in oldheads and repo[h].closesbranch():
398 if h not in oldheads and repo[h].closesbranch():
399 dh -= 1
399 dh -= 1
400 htext = ""
400 htext = ""
401 if dh:
401 if dh:
402 htext = _(" (%+d heads)") % dh
402 htext = _(" (%+d heads)") % dh
403
403
404 repo.ui.status(_("added %d changesets"
404 repo.ui.status(_("added %d changesets"
405 " with %d changes to %d files%s\n")
405 " with %d changes to %d files%s\n")
406 % (changesets, revisions, files, htext))
406 % (changesets, revisions, files, htext))
407 repo.invalidatevolatilesets()
407 repo.invalidatevolatilesets()
408
408
409 # Call delayupdate again to ensure the transaction writepending
410 # subscriptions are still in place.
411 cl.delayupdate(tr)
412
409 if changesets > 0:
413 if changesets > 0:
410 if 'node' not in tr.hookargs:
414 if 'node' not in tr.hookargs:
411 tr.hookargs['node'] = hex(cl.node(clstart))
415 tr.hookargs['node'] = hex(cl.node(clstart))
412 hookargs = dict(tr.hookargs)
416 hookargs = dict(tr.hookargs)
413 else:
417 else:
414 hookargs = dict(tr.hookargs)
418 hookargs = dict(tr.hookargs)
415 hookargs['node'] = hex(cl.node(clstart))
419 hookargs['node'] = hex(cl.node(clstart))
416 repo.hook('pretxnchangegroup', throw=True, **hookargs)
420 repo.hook('pretxnchangegroup', throw=True, **hookargs)
417
421
418 added = [cl.node(r) for r in xrange(clstart, clend)]
422 added = [cl.node(r) for r in xrange(clstart, clend)]
419 publishing = repo.publishing()
423 publishing = repo.publishing()
420 if srctype in ('push', 'serve'):
424 if srctype in ('push', 'serve'):
421 # Old servers can not push the boundary themselves.
425 # Old servers can not push the boundary themselves.
422 # New servers won't push the boundary if changeset already
426 # New servers won't push the boundary if changeset already
423 # exists locally as secret
427 # exists locally as secret
424 #
428 #
425 # We should not use added here but the list of all change in
429 # We should not use added here but the list of all change in
426 # the bundle
430 # the bundle
427 if publishing:
431 if publishing:
428 phases.advanceboundary(repo, tr, phases.public, srccontent)
432 phases.advanceboundary(repo, tr, phases.public, srccontent)
429 else:
433 else:
430 # Those changesets have been pushed from the outside, their
434 # Those changesets have been pushed from the outside, their
431 # phases are going to be pushed alongside. Therefor
435 # phases are going to be pushed alongside. Therefor
432 # `targetphase` is ignored.
436 # `targetphase` is ignored.
433 phases.advanceboundary(repo, tr, phases.draft, srccontent)
437 phases.advanceboundary(repo, tr, phases.draft, srccontent)
434 phases.retractboundary(repo, tr, phases.draft, added)
438 phases.retractboundary(repo, tr, phases.draft, added)
435 elif srctype != 'strip':
439 elif srctype != 'strip':
436 # publishing only alter behavior during push
440 # publishing only alter behavior during push
437 #
441 #
438 # strip should not touch boundary at all
442 # strip should not touch boundary at all
439 phases.retractboundary(repo, tr, targetphase, added)
443 phases.retractboundary(repo, tr, targetphase, added)
440
444
441 if changesets > 0:
445 if changesets > 0:
442 if srctype != 'strip':
446 if srctype != 'strip':
443 # During strip, branchcache is invalid but coming call to
447 # During strip, branchcache is invalid but coming call to
444 # `destroyed` will repair it.
448 # `destroyed` will repair it.
445 # In other case we can safely update cache on disk.
449 # In other case we can safely update cache on disk.
446 branchmap.updatecache(repo.filtered('served'))
450 branchmap.updatecache(repo.filtered('served'))
447
451
448 def runhooks():
452 def runhooks():
449 # These hooks run when the lock releases, not when the
453 # These hooks run when the lock releases, not when the
450 # transaction closes. So it's possible for the changelog
454 # transaction closes. So it's possible for the changelog
451 # to have changed since we last saw it.
455 # to have changed since we last saw it.
452 if clstart >= len(repo):
456 if clstart >= len(repo):
453 return
457 return
454
458
455 # forcefully update the on-disk branch cache
459 # forcefully update the on-disk branch cache
456 repo.ui.debug("updating the branch cache\n")
460 repo.ui.debug("updating the branch cache\n")
457 repo.hook("changegroup", **hookargs)
461 repo.hook("changegroup", **hookargs)
458
462
459 for n in added:
463 for n in added:
460 args = hookargs.copy()
464 args = hookargs.copy()
461 args['node'] = hex(n)
465 args['node'] = hex(n)
462 repo.hook("incoming", **args)
466 repo.hook("incoming", **args)
463
467
464 newheads = [h for h in repo.heads() if h not in oldheads]
468 newheads = [h for h in repo.heads() if h not in oldheads]
465 repo.ui.log("incoming",
469 repo.ui.log("incoming",
466 "%s incoming changes - new heads: %s\n",
470 "%s incoming changes - new heads: %s\n",
467 len(added),
471 len(added),
468 ', '.join([hex(c[:6]) for c in newheads]))
472 ', '.join([hex(c[:6]) for c in newheads]))
469
473
470 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
474 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
471 lambda tr: repo._afterlock(runhooks))
475 lambda tr: repo._afterlock(runhooks))
472
476
473 tr.close()
477 tr.close()
474
478
475 finally:
479 finally:
476 tr.release()
480 tr.release()
477 repo.ui.flush()
481 repo.ui.flush()
478 # never return 0 here:
482 # never return 0 here:
479 if dh < 0:
483 if dh < 0:
480 return dh - 1
484 return dh - 1
481 else:
485 else:
482 return dh + 1
486 return dh + 1
483
487
484 class cg2unpacker(cg1unpacker):
488 class cg2unpacker(cg1unpacker):
485 """Unpacker for cg2 streams.
489 """Unpacker for cg2 streams.
486
490
487 cg2 streams add support for generaldelta, so the delta header
491 cg2 streams add support for generaldelta, so the delta header
488 format is slightly different. All other features about the data
492 format is slightly different. All other features about the data
489 remain the same.
493 remain the same.
490 """
494 """
491 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
495 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
492 deltaheadersize = struct.calcsize(deltaheader)
496 deltaheadersize = struct.calcsize(deltaheader)
493 version = '02'
497 version = '02'
494
498
495 def _deltaheader(self, headertuple, prevnode):
499 def _deltaheader(self, headertuple, prevnode):
496 node, p1, p2, deltabase, cs = headertuple
500 node, p1, p2, deltabase, cs = headertuple
497 return node, p1, p2, deltabase, cs
501 return node, p1, p2, deltabase, cs
498
502
499 class headerlessfixup(object):
503 class headerlessfixup(object):
500 def __init__(self, fh, h):
504 def __init__(self, fh, h):
501 self._h = h
505 self._h = h
502 self._fh = fh
506 self._fh = fh
503 def read(self, n):
507 def read(self, n):
504 if self._h:
508 if self._h:
505 d, self._h = self._h[:n], self._h[n:]
509 d, self._h = self._h[:n], self._h[n:]
506 if len(d) < n:
510 if len(d) < n:
507 d += readexactly(self._fh, n - len(d))
511 d += readexactly(self._fh, n - len(d))
508 return d
512 return d
509 return readexactly(self._fh, n)
513 return readexactly(self._fh, n)
510
514
511 class cg1packer(object):
515 class cg1packer(object):
512 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
516 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
513 version = '01'
517 version = '01'
514 def __init__(self, repo, bundlecaps=None):
518 def __init__(self, repo, bundlecaps=None):
515 """Given a source repo, construct a bundler.
519 """Given a source repo, construct a bundler.
516
520
517 bundlecaps is optional and can be used to specify the set of
521 bundlecaps is optional and can be used to specify the set of
518 capabilities which can be used to build the bundle.
522 capabilities which can be used to build the bundle.
519 """
523 """
520 # Set of capabilities we can use to build the bundle.
524 # Set of capabilities we can use to build the bundle.
521 if bundlecaps is None:
525 if bundlecaps is None:
522 bundlecaps = set()
526 bundlecaps = set()
523 self._bundlecaps = bundlecaps
527 self._bundlecaps = bundlecaps
524 # experimental config: bundle.reorder
528 # experimental config: bundle.reorder
525 reorder = repo.ui.config('bundle', 'reorder', 'auto')
529 reorder = repo.ui.config('bundle', 'reorder', 'auto')
526 if reorder == 'auto':
530 if reorder == 'auto':
527 reorder = None
531 reorder = None
528 else:
532 else:
529 reorder = util.parsebool(reorder)
533 reorder = util.parsebool(reorder)
530 self._repo = repo
534 self._repo = repo
531 self._reorder = reorder
535 self._reorder = reorder
532 self._progress = repo.ui.progress
536 self._progress = repo.ui.progress
533 if self._repo.ui.verbose and not self._repo.ui.debugflag:
537 if self._repo.ui.verbose and not self._repo.ui.debugflag:
534 self._verbosenote = self._repo.ui.note
538 self._verbosenote = self._repo.ui.note
535 else:
539 else:
536 self._verbosenote = lambda s: None
540 self._verbosenote = lambda s: None
537
541
538 def close(self):
542 def close(self):
539 return closechunk()
543 return closechunk()
540
544
541 def fileheader(self, fname):
545 def fileheader(self, fname):
542 return chunkheader(len(fname)) + fname
546 return chunkheader(len(fname)) + fname
543
547
544 def group(self, nodelist, revlog, lookup, units=None):
548 def group(self, nodelist, revlog, lookup, units=None):
545 """Calculate a delta group, yielding a sequence of changegroup chunks
549 """Calculate a delta group, yielding a sequence of changegroup chunks
546 (strings).
550 (strings).
547
551
548 Given a list of changeset revs, return a set of deltas and
552 Given a list of changeset revs, return a set of deltas and
549 metadata corresponding to nodes. The first delta is
553 metadata corresponding to nodes. The first delta is
550 first parent(nodelist[0]) -> nodelist[0], the receiver is
554 first parent(nodelist[0]) -> nodelist[0], the receiver is
551 guaranteed to have this parent as it has all history before
555 guaranteed to have this parent as it has all history before
552 these changesets. In the case firstparent is nullrev the
556 these changesets. In the case firstparent is nullrev the
553 changegroup starts with a full revision.
557 changegroup starts with a full revision.
554
558
555 If units is not None, progress detail will be generated, units specifies
559 If units is not None, progress detail will be generated, units specifies
556 the type of revlog that is touched (changelog, manifest, etc.).
560 the type of revlog that is touched (changelog, manifest, etc.).
557 """
561 """
558 # if we don't have any revisions touched by these changesets, bail
562 # if we don't have any revisions touched by these changesets, bail
559 if len(nodelist) == 0:
563 if len(nodelist) == 0:
560 yield self.close()
564 yield self.close()
561 return
565 return
562
566
563 # for generaldelta revlogs, we linearize the revs; this will both be
567 # for generaldelta revlogs, we linearize the revs; this will both be
564 # much quicker and generate a much smaller bundle
568 # much quicker and generate a much smaller bundle
565 if (revlog._generaldelta and self._reorder is None) or self._reorder:
569 if (revlog._generaldelta and self._reorder is None) or self._reorder:
566 dag = dagutil.revlogdag(revlog)
570 dag = dagutil.revlogdag(revlog)
567 revs = set(revlog.rev(n) for n in nodelist)
571 revs = set(revlog.rev(n) for n in nodelist)
568 revs = dag.linearize(revs)
572 revs = dag.linearize(revs)
569 else:
573 else:
570 revs = sorted([revlog.rev(n) for n in nodelist])
574 revs = sorted([revlog.rev(n) for n in nodelist])
571
575
572 # add the parent of the first rev
576 # add the parent of the first rev
573 p = revlog.parentrevs(revs[0])[0]
577 p = revlog.parentrevs(revs[0])[0]
574 revs.insert(0, p)
578 revs.insert(0, p)
575
579
576 # build deltas
580 # build deltas
577 total = len(revs) - 1
581 total = len(revs) - 1
578 msgbundling = _('bundling')
582 msgbundling = _('bundling')
579 for r in xrange(len(revs) - 1):
583 for r in xrange(len(revs) - 1):
580 if units is not None:
584 if units is not None:
581 self._progress(msgbundling, r + 1, unit=units, total=total)
585 self._progress(msgbundling, r + 1, unit=units, total=total)
582 prev, curr = revs[r], revs[r + 1]
586 prev, curr = revs[r], revs[r + 1]
583 linknode = lookup(revlog.node(curr))
587 linknode = lookup(revlog.node(curr))
584 for c in self.revchunk(revlog, curr, prev, linknode):
588 for c in self.revchunk(revlog, curr, prev, linknode):
585 yield c
589 yield c
586
590
587 if units is not None:
591 if units is not None:
588 self._progress(msgbundling, None)
592 self._progress(msgbundling, None)
589 yield self.close()
593 yield self.close()
590
594
591 # filter any nodes that claim to be part of the known set
595 # filter any nodes that claim to be part of the known set
592 def prune(self, revlog, missing, commonrevs):
596 def prune(self, revlog, missing, commonrevs):
593 rr, rl = revlog.rev, revlog.linkrev
597 rr, rl = revlog.rev, revlog.linkrev
594 return [n for n in missing if rl(rr(n)) not in commonrevs]
598 return [n for n in missing if rl(rr(n)) not in commonrevs]
595
599
596 def _packmanifests(self, mfnodes, lookuplinknode):
600 def _packmanifests(self, mfnodes, lookuplinknode):
597 """Pack flat manifests into a changegroup stream."""
601 """Pack flat manifests into a changegroup stream."""
598 ml = self._repo.manifest
602 ml = self._repo.manifest
599 size = 0
603 size = 0
600 for chunk in self.group(
604 for chunk in self.group(
601 mfnodes, ml, lookuplinknode, units=_('manifests')):
605 mfnodes, ml, lookuplinknode, units=_('manifests')):
602 size += len(chunk)
606 size += len(chunk)
603 yield chunk
607 yield chunk
604 self._verbosenote(_('%8.i (manifests)\n') % size)
608 self._verbosenote(_('%8.i (manifests)\n') % size)
605
609
606 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
610 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
607 '''yield a sequence of changegroup chunks (strings)'''
611 '''yield a sequence of changegroup chunks (strings)'''
608 repo = self._repo
612 repo = self._repo
609 cl = repo.changelog
613 cl = repo.changelog
610 ml = repo.manifest
614 ml = repo.manifest
611
615
612 clrevorder = {}
616 clrevorder = {}
613 mfs = {} # needed manifests
617 mfs = {} # needed manifests
614 fnodes = {} # needed file nodes
618 fnodes = {} # needed file nodes
615 changedfiles = set()
619 changedfiles = set()
616
620
617 # Callback for the changelog, used to collect changed files and manifest
621 # Callback for the changelog, used to collect changed files and manifest
618 # nodes.
622 # nodes.
619 # Returns the linkrev node (identity in the changelog case).
623 # Returns the linkrev node (identity in the changelog case).
620 def lookupcl(x):
624 def lookupcl(x):
621 c = cl.read(x)
625 c = cl.read(x)
622 clrevorder[x] = len(clrevorder)
626 clrevorder[x] = len(clrevorder)
623 changedfiles.update(c[3])
627 changedfiles.update(c[3])
624 # record the first changeset introducing this manifest version
628 # record the first changeset introducing this manifest version
625 mfs.setdefault(c[0], x)
629 mfs.setdefault(c[0], x)
626 return x
630 return x
627
631
628 self._verbosenote(_('uncompressed size of bundle content:\n'))
632 self._verbosenote(_('uncompressed size of bundle content:\n'))
629 size = 0
633 size = 0
630 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
634 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
631 size += len(chunk)
635 size += len(chunk)
632 yield chunk
636 yield chunk
633 self._verbosenote(_('%8.i (changelog)\n') % size)
637 self._verbosenote(_('%8.i (changelog)\n') % size)
634
638
635 # We need to make sure that the linkrev in the changegroup refers to
639 # We need to make sure that the linkrev in the changegroup refers to
636 # the first changeset that introduced the manifest or file revision.
640 # the first changeset that introduced the manifest or file revision.
637 # The fastpath is usually safer than the slowpath, because the filelogs
641 # The fastpath is usually safer than the slowpath, because the filelogs
638 # are walked in revlog order.
642 # are walked in revlog order.
639 #
643 #
640 # When taking the slowpath with reorder=None and the manifest revlog
644 # When taking the slowpath with reorder=None and the manifest revlog
641 # uses generaldelta, the manifest may be walked in the "wrong" order.
645 # uses generaldelta, the manifest may be walked in the "wrong" order.
642 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
646 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
643 # cc0ff93d0c0c).
647 # cc0ff93d0c0c).
644 #
648 #
645 # When taking the fastpath, we are only vulnerable to reordering
649 # When taking the fastpath, we are only vulnerable to reordering
646 # of the changelog itself. The changelog never uses generaldelta, so
650 # of the changelog itself. The changelog never uses generaldelta, so
647 # it is only reordered when reorder=True. To handle this case, we
651 # it is only reordered when reorder=True. To handle this case, we
648 # simply take the slowpath, which already has the 'clrevorder' logic.
652 # simply take the slowpath, which already has the 'clrevorder' logic.
649 # This was also fixed in cc0ff93d0c0c.
653 # This was also fixed in cc0ff93d0c0c.
650 fastpathlinkrev = fastpathlinkrev and not self._reorder
654 fastpathlinkrev = fastpathlinkrev and not self._reorder
651 # Callback for the manifest, used to collect linkrevs for filelog
655 # Callback for the manifest, used to collect linkrevs for filelog
652 # revisions.
656 # revisions.
653 # Returns the linkrev node (collected in lookupcl).
657 # Returns the linkrev node (collected in lookupcl).
654 def lookupmflinknode(x):
658 def lookupmflinknode(x):
655 clnode = mfs[x]
659 clnode = mfs[x]
656 if not fastpathlinkrev:
660 if not fastpathlinkrev:
657 mdata = ml.readfast(x)
661 mdata = ml.readfast(x)
658 for f, n in mdata.iteritems():
662 for f, n in mdata.iteritems():
659 if f in changedfiles:
663 if f in changedfiles:
660 # record the first changeset introducing this filelog
664 # record the first changeset introducing this filelog
661 # version
665 # version
662 fclnodes = fnodes.setdefault(f, {})
666 fclnodes = fnodes.setdefault(f, {})
663 fclnode = fclnodes.setdefault(n, clnode)
667 fclnode = fclnodes.setdefault(n, clnode)
664 if clrevorder[clnode] < clrevorder[fclnode]:
668 if clrevorder[clnode] < clrevorder[fclnode]:
665 fclnodes[n] = clnode
669 fclnodes[n] = clnode
666 return clnode
670 return clnode
667
671
668 mfnodes = self.prune(ml, mfs, commonrevs)
672 mfnodes = self.prune(ml, mfs, commonrevs)
669 for x in self._packmanifests(mfnodes, lookupmflinknode):
673 for x in self._packmanifests(mfnodes, lookupmflinknode):
670 yield x
674 yield x
671
675
672 mfs.clear()
676 mfs.clear()
673 clrevs = set(cl.rev(x) for x in clnodes)
677 clrevs = set(cl.rev(x) for x in clnodes)
674
678
675 def linknodes(filerevlog, fname):
679 def linknodes(filerevlog, fname):
676 if fastpathlinkrev:
680 if fastpathlinkrev:
677 llr = filerevlog.linkrev
681 llr = filerevlog.linkrev
678 def genfilenodes():
682 def genfilenodes():
679 for r in filerevlog:
683 for r in filerevlog:
680 linkrev = llr(r)
684 linkrev = llr(r)
681 if linkrev in clrevs:
685 if linkrev in clrevs:
682 yield filerevlog.node(r), cl.node(linkrev)
686 yield filerevlog.node(r), cl.node(linkrev)
683 return dict(genfilenodes())
687 return dict(genfilenodes())
684 return fnodes.get(fname, {})
688 return fnodes.get(fname, {})
685
689
686 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
690 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
687 source):
691 source):
688 yield chunk
692 yield chunk
689
693
690 yield self.close()
694 yield self.close()
691
695
692 if clnodes:
696 if clnodes:
693 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
697 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
694
698
695 # The 'source' parameter is useful for extensions
699 # The 'source' parameter is useful for extensions
696 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
700 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
697 repo = self._repo
701 repo = self._repo
698 progress = self._progress
702 progress = self._progress
699 msgbundling = _('bundling')
703 msgbundling = _('bundling')
700
704
701 total = len(changedfiles)
705 total = len(changedfiles)
702 # for progress output
706 # for progress output
703 msgfiles = _('files')
707 msgfiles = _('files')
704 for i, fname in enumerate(sorted(changedfiles)):
708 for i, fname in enumerate(sorted(changedfiles)):
705 filerevlog = repo.file(fname)
709 filerevlog = repo.file(fname)
706 if not filerevlog:
710 if not filerevlog:
707 raise error.Abort(_("empty or missing revlog for %s") % fname)
711 raise error.Abort(_("empty or missing revlog for %s") % fname)
708
712
709 linkrevnodes = linknodes(filerevlog, fname)
713 linkrevnodes = linknodes(filerevlog, fname)
710 # Lookup for filenodes, we collected the linkrev nodes above in the
714 # Lookup for filenodes, we collected the linkrev nodes above in the
711 # fastpath case and with lookupmf in the slowpath case.
715 # fastpath case and with lookupmf in the slowpath case.
712 def lookupfilelog(x):
716 def lookupfilelog(x):
713 return linkrevnodes[x]
717 return linkrevnodes[x]
714
718
715 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
719 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
716 if filenodes:
720 if filenodes:
717 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
721 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
718 total=total)
722 total=total)
719 h = self.fileheader(fname)
723 h = self.fileheader(fname)
720 size = len(h)
724 size = len(h)
721 yield h
725 yield h
722 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
726 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
723 size += len(chunk)
727 size += len(chunk)
724 yield chunk
728 yield chunk
725 self._verbosenote(_('%8.i %s\n') % (size, fname))
729 self._verbosenote(_('%8.i %s\n') % (size, fname))
726 progress(msgbundling, None)
730 progress(msgbundling, None)
727
731
728 def deltaparent(self, revlog, rev, p1, p2, prev):
732 def deltaparent(self, revlog, rev, p1, p2, prev):
729 return prev
733 return prev
730
734
731 def revchunk(self, revlog, rev, prev, linknode):
735 def revchunk(self, revlog, rev, prev, linknode):
732 node = revlog.node(rev)
736 node = revlog.node(rev)
733 p1, p2 = revlog.parentrevs(rev)
737 p1, p2 = revlog.parentrevs(rev)
734 base = self.deltaparent(revlog, rev, p1, p2, prev)
738 base = self.deltaparent(revlog, rev, p1, p2, prev)
735
739
736 prefix = ''
740 prefix = ''
737 if revlog.iscensored(base) or revlog.iscensored(rev):
741 if revlog.iscensored(base) or revlog.iscensored(rev):
738 try:
742 try:
739 delta = revlog.revision(node)
743 delta = revlog.revision(node)
740 except error.CensoredNodeError as e:
744 except error.CensoredNodeError as e:
741 delta = e.tombstone
745 delta = e.tombstone
742 if base == nullrev:
746 if base == nullrev:
743 prefix = mdiff.trivialdiffheader(len(delta))
747 prefix = mdiff.trivialdiffheader(len(delta))
744 else:
748 else:
745 baselen = revlog.rawsize(base)
749 baselen = revlog.rawsize(base)
746 prefix = mdiff.replacediffheader(baselen, len(delta))
750 prefix = mdiff.replacediffheader(baselen, len(delta))
747 elif base == nullrev:
751 elif base == nullrev:
748 delta = revlog.revision(node)
752 delta = revlog.revision(node)
749 prefix = mdiff.trivialdiffheader(len(delta))
753 prefix = mdiff.trivialdiffheader(len(delta))
750 else:
754 else:
751 delta = revlog.revdiff(base, rev)
755 delta = revlog.revdiff(base, rev)
752 p1n, p2n = revlog.parents(node)
756 p1n, p2n = revlog.parents(node)
753 basenode = revlog.node(base)
757 basenode = revlog.node(base)
754 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
758 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode)
755 meta += prefix
759 meta += prefix
756 l = len(meta) + len(delta)
760 l = len(meta) + len(delta)
757 yield chunkheader(l)
761 yield chunkheader(l)
758 yield meta
762 yield meta
759 yield delta
763 yield delta
760 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
764 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
761 # do nothing with basenode, it is implicitly the previous one in HG10
765 # do nothing with basenode, it is implicitly the previous one in HG10
762 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
766 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
763
767
764 class cg2packer(cg1packer):
768 class cg2packer(cg1packer):
765 version = '02'
769 version = '02'
766 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
770 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
767
771
768 def __init__(self, repo, bundlecaps=None):
772 def __init__(self, repo, bundlecaps=None):
769 super(cg2packer, self).__init__(repo, bundlecaps)
773 super(cg2packer, self).__init__(repo, bundlecaps)
770 if self._reorder is None:
774 if self._reorder is None:
771 # Since generaldelta is directly supported by cg2, reordering
775 # Since generaldelta is directly supported by cg2, reordering
772 # generally doesn't help, so we disable it by default (treating
776 # generally doesn't help, so we disable it by default (treating
773 # bundle.reorder=auto just like bundle.reorder=False).
777 # bundle.reorder=auto just like bundle.reorder=False).
774 self._reorder = False
778 self._reorder = False
775
779
776 def deltaparent(self, revlog, rev, p1, p2, prev):
780 def deltaparent(self, revlog, rev, p1, p2, prev):
777 dp = revlog.deltaparent(rev)
781 dp = revlog.deltaparent(rev)
778 # avoid storing full revisions; pick prev in those cases
782 # avoid storing full revisions; pick prev in those cases
779 # also pick prev when we can't be sure remote has dp
783 # also pick prev when we can't be sure remote has dp
780 if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
784 if dp == nullrev or (dp != p1 and dp != p2 and dp != prev):
781 return prev
785 return prev
782 return dp
786 return dp
783
787
784 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
788 def builddeltaheader(self, node, p1n, p2n, basenode, linknode):
785 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
789 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
786
790
787 packermap = {'01': (cg1packer, cg1unpacker),
791 packermap = {'01': (cg1packer, cg1unpacker),
788 # cg2 adds support for exchanging generaldelta
792 # cg2 adds support for exchanging generaldelta
789 '02': (cg2packer, cg2unpacker),
793 '02': (cg2packer, cg2unpacker),
790 }
794 }
791
795
792 def _changegroupinfo(repo, nodes, source):
796 def _changegroupinfo(repo, nodes, source):
793 if repo.ui.verbose or source == 'bundle':
797 if repo.ui.verbose or source == 'bundle':
794 repo.ui.status(_("%d changesets found\n") % len(nodes))
798 repo.ui.status(_("%d changesets found\n") % len(nodes))
795 if repo.ui.debugflag:
799 if repo.ui.debugflag:
796 repo.ui.debug("list of changesets:\n")
800 repo.ui.debug("list of changesets:\n")
797 for node in nodes:
801 for node in nodes:
798 repo.ui.debug("%s\n" % hex(node))
802 repo.ui.debug("%s\n" % hex(node))
799
803
800 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
804 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
801 repo = repo.unfiltered()
805 repo = repo.unfiltered()
802 commonrevs = outgoing.common
806 commonrevs = outgoing.common
803 csets = outgoing.missing
807 csets = outgoing.missing
804 heads = outgoing.missingheads
808 heads = outgoing.missingheads
805 # We go through the fast path if we get told to, or if all (unfiltered
809 # We go through the fast path if we get told to, or if all (unfiltered
806 # heads have been requested (since we then know there all linkrevs will
810 # heads have been requested (since we then know there all linkrevs will
807 # be pulled by the client).
811 # be pulled by the client).
808 heads.sort()
812 heads.sort()
809 fastpathlinkrev = fastpath or (
813 fastpathlinkrev = fastpath or (
810 repo.filtername is None and heads == sorted(repo.heads()))
814 repo.filtername is None and heads == sorted(repo.heads()))
811
815
812 repo.hook('preoutgoing', throw=True, source=source)
816 repo.hook('preoutgoing', throw=True, source=source)
813 _changegroupinfo(repo, csets, source)
817 _changegroupinfo(repo, csets, source)
814 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
818 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
815
819
816 def getsubset(repo, outgoing, bundler, source, fastpath=False):
820 def getsubset(repo, outgoing, bundler, source, fastpath=False):
817 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
821 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
818 return packermap[bundler.version][1](util.chunkbuffer(gengroup), None)
822 return packermap[bundler.version][1](util.chunkbuffer(gengroup), None)
819
823
820 def changegroupsubset(repo, roots, heads, source, version='01'):
824 def changegroupsubset(repo, roots, heads, source, version='01'):
821 """Compute a changegroup consisting of all the nodes that are
825 """Compute a changegroup consisting of all the nodes that are
822 descendants of any of the roots and ancestors of any of the heads.
826 descendants of any of the roots and ancestors of any of the heads.
823 Return a chunkbuffer object whose read() method will return
827 Return a chunkbuffer object whose read() method will return
824 successive changegroup chunks.
828 successive changegroup chunks.
825
829
826 It is fairly complex as determining which filenodes and which
830 It is fairly complex as determining which filenodes and which
827 manifest nodes need to be included for the changeset to be complete
831 manifest nodes need to be included for the changeset to be complete
828 is non-trivial.
832 is non-trivial.
829
833
830 Another wrinkle is doing the reverse, figuring out which changeset in
834 Another wrinkle is doing the reverse, figuring out which changeset in
831 the changegroup a particular filenode or manifestnode belongs to.
835 the changegroup a particular filenode or manifestnode belongs to.
832 """
836 """
833 cl = repo.changelog
837 cl = repo.changelog
834 if not roots:
838 if not roots:
835 roots = [nullid]
839 roots = [nullid]
836 discbases = []
840 discbases = []
837 for n in roots:
841 for n in roots:
838 discbases.extend([p for p in cl.parents(n) if p != nullid])
842 discbases.extend([p for p in cl.parents(n) if p != nullid])
839 # TODO: remove call to nodesbetween.
843 # TODO: remove call to nodesbetween.
840 csets, roots, heads = cl.nodesbetween(roots, heads)
844 csets, roots, heads = cl.nodesbetween(roots, heads)
841 included = set(csets)
845 included = set(csets)
842 discbases = [n for n in discbases if n not in included]
846 discbases = [n for n in discbases if n not in included]
843 outgoing = discovery.outgoing(cl, discbases, heads)
847 outgoing = discovery.outgoing(cl, discbases, heads)
844 bundler = packermap[version][0](repo)
848 bundler = packermap[version][0](repo)
845 return getsubset(repo, outgoing, bundler, source)
849 return getsubset(repo, outgoing, bundler, source)
846
850
847 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
851 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
848 version='01'):
852 version='01'):
849 """Like getbundle, but taking a discovery.outgoing as an argument.
853 """Like getbundle, but taking a discovery.outgoing as an argument.
850
854
851 This is only implemented for local repos and reuses potentially
855 This is only implemented for local repos and reuses potentially
852 precomputed sets in outgoing. Returns a raw changegroup generator."""
856 precomputed sets in outgoing. Returns a raw changegroup generator."""
853 if not outgoing.missing:
857 if not outgoing.missing:
854 return None
858 return None
855 bundler = packermap[version][0](repo, bundlecaps)
859 bundler = packermap[version][0](repo, bundlecaps)
856 return getsubsetraw(repo, outgoing, bundler, source)
860 return getsubsetraw(repo, outgoing, bundler, source)
857
861
858 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
862 def getlocalchangegroup(repo, source, outgoing, bundlecaps=None,
859 version='01'):
863 version='01'):
860 """Like getbundle, but taking a discovery.outgoing as an argument.
864 """Like getbundle, but taking a discovery.outgoing as an argument.
861
865
862 This is only implemented for local repos and reuses potentially
866 This is only implemented for local repos and reuses potentially
863 precomputed sets in outgoing."""
867 precomputed sets in outgoing."""
864 if not outgoing.missing:
868 if not outgoing.missing:
865 return None
869 return None
866 bundler = packermap[version][0](repo, bundlecaps)
870 bundler = packermap[version][0](repo, bundlecaps)
867 return getsubset(repo, outgoing, bundler, source)
871 return getsubset(repo, outgoing, bundler, source)
868
872
869 def computeoutgoing(repo, heads, common):
873 def computeoutgoing(repo, heads, common):
870 """Computes which revs are outgoing given a set of common
874 """Computes which revs are outgoing given a set of common
871 and a set of heads.
875 and a set of heads.
872
876
873 This is a separate function so extensions can have access to
877 This is a separate function so extensions can have access to
874 the logic.
878 the logic.
875
879
876 Returns a discovery.outgoing object.
880 Returns a discovery.outgoing object.
877 """
881 """
878 cl = repo.changelog
882 cl = repo.changelog
879 if common:
883 if common:
880 hasnode = cl.hasnode
884 hasnode = cl.hasnode
881 common = [n for n in common if hasnode(n)]
885 common = [n for n in common if hasnode(n)]
882 else:
886 else:
883 common = [nullid]
887 common = [nullid]
884 if not heads:
888 if not heads:
885 heads = cl.heads()
889 heads = cl.heads()
886 return discovery.outgoing(cl, common, heads)
890 return discovery.outgoing(cl, common, heads)
887
891
888 def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None,
892 def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None,
889 version='01'):
893 version='01'):
890 """Like changegroupsubset, but returns the set difference between the
894 """Like changegroupsubset, but returns the set difference between the
891 ancestors of heads and the ancestors common.
895 ancestors of heads and the ancestors common.
892
896
893 If heads is None, use the local heads. If common is None, use [nullid].
897 If heads is None, use the local heads. If common is None, use [nullid].
894
898
895 The nodes in common might not all be known locally due to the way the
899 The nodes in common might not all be known locally due to the way the
896 current discovery protocol works.
900 current discovery protocol works.
897 """
901 """
898 outgoing = computeoutgoing(repo, heads, common)
902 outgoing = computeoutgoing(repo, heads, common)
899 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
903 return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps,
900 version=version)
904 version=version)
901
905
902 def changegroup(repo, basenodes, source):
906 def changegroup(repo, basenodes, source):
903 # to avoid a race we use changegroupsubset() (issue1320)
907 # to avoid a race we use changegroupsubset() (issue1320)
904 return changegroupsubset(repo, basenodes, repo.heads(), source)
908 return changegroupsubset(repo, basenodes, repo.heads(), source)
905
909
906 def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles):
910 def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles):
907 revisions = 0
911 revisions = 0
908 files = 0
912 files = 0
909 while True:
913 while True:
910 chunkdata = source.filelogheader()
914 chunkdata = source.filelogheader()
911 if not chunkdata:
915 if not chunkdata:
912 break
916 break
913 f = chunkdata["filename"]
917 f = chunkdata["filename"]
914 repo.ui.debug("adding %s revisions\n" % f)
918 repo.ui.debug("adding %s revisions\n" % f)
915 pr()
919 pr()
916 fl = repo.file(f)
920 fl = repo.file(f)
917 o = len(fl)
921 o = len(fl)
918 try:
922 try:
919 if not fl.addgroup(source, revmap, trp):
923 if not fl.addgroup(source, revmap, trp):
920 raise error.Abort(_("received file revlog group is empty"))
924 raise error.Abort(_("received file revlog group is empty"))
921 except error.CensoredBaseError as e:
925 except error.CensoredBaseError as e:
922 raise error.Abort(_("received delta base is censored: %s") % e)
926 raise error.Abort(_("received delta base is censored: %s") % e)
923 revisions += len(fl) - o
927 revisions += len(fl) - o
924 files += 1
928 files += 1
925 if f in needfiles:
929 if f in needfiles:
926 needs = needfiles[f]
930 needs = needfiles[f]
927 for new in xrange(o, len(fl)):
931 for new in xrange(o, len(fl)):
928 n = fl.node(new)
932 n = fl.node(new)
929 if n in needs:
933 if n in needs:
930 needs.remove(n)
934 needs.remove(n)
931 else:
935 else:
932 raise error.Abort(
936 raise error.Abort(
933 _("received spurious file revlog entry"))
937 _("received spurious file revlog entry"))
934 if not needs:
938 if not needs:
935 del needfiles[f]
939 del needfiles[f]
936 repo.ui.progress(_('files'), None)
940 repo.ui.progress(_('files'), None)
937
941
938 for f, needs in needfiles.iteritems():
942 for f, needs in needfiles.iteritems():
939 fl = repo.file(f)
943 fl = repo.file(f)
940 for n in needs:
944 for n in needs:
941 try:
945 try:
942 fl.rev(n)
946 fl.rev(n)
943 except error.LookupError:
947 except error.LookupError:
944 raise error.Abort(
948 raise error.Abort(
945 _('missing file data for %s:%s - run hg verify') %
949 _('missing file data for %s:%s - run hg verify') %
946 (f, hex(n)))
950 (f, hex(n)))
947
951
948 return revisions, files
952 return revisions, files
@@ -1,710 +1,717
1 commit hooks can see env vars
1 commit hooks can see env vars
2 (and post-transaction one are run unlocked)
2 (and post-transaction one are run unlocked)
3
3
4 $ cat << EOF >> $HGRCPATH
4 $ cat << EOF >> $HGRCPATH
5 > [experimental]
5 > [experimental]
6 > # drop me once bundle2 is the default,
6 > # drop me once bundle2 is the default,
7 > # added to get test change early.
7 > # added to get test change early.
8 > bundle2-exp = True
8 > bundle2-exp = True
9 > EOF
9 > EOF
10
10
11 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
11 $ cat > $TESTTMP/txnabort.checkargs.py <<EOF
12 > def showargs(ui, repo, hooktype, **kwargs):
12 > def showargs(ui, repo, hooktype, **kwargs):
13 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
13 > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs))))
14 > EOF
14 > EOF
15
15
16 $ hg init a
16 $ hg init a
17 $ cd a
17 $ cd a
18 $ cat > .hg/hgrc <<EOF
18 $ cat > .hg/hgrc <<EOF
19 > [hooks]
19 > [hooks]
20 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
20 > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit"
21 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
21 > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b"
22 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
22 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit"
23 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
23 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit"
24 > pretxncommit.tip = hg -q tip
24 > pretxncommit.tip = hg -q tip
25 > pre-identify = printenv.py pre-identify 1
25 > pre-identify = printenv.py pre-identify 1
26 > pre-cat = printenv.py pre-cat
26 > pre-cat = printenv.py pre-cat
27 > post-cat = printenv.py post-cat
27 > post-cat = printenv.py post-cat
28 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
28 > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen"
29 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
29 > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose"
30 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
30 > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose"
31 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
31 > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs
32 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
32 > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort"
33 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
33 > txnclose.checklock = sh -c "hg debuglock > /dev/null"
34 > EOF
34 > EOF
35 $ echo a > a
35 $ echo a > a
36 $ hg add a
36 $ hg add a
37 $ hg commit -m a
37 $ hg commit -m a
38 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
38 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
39 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
39 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
40 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
40 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
41 0:cb9a9f314b8b
41 0:cb9a9f314b8b
42 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
42 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
43 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
43 txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
44 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
44 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
45 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
45 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
46
46
47 $ hg clone . ../b
47 $ hg clone . ../b
48 updating to branch default
48 updating to branch default
49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 $ cd ../b
50 $ cd ../b
51
51
52 changegroup hooks can see env vars
52 changegroup hooks can see env vars
53
53
54 $ cat > .hg/hgrc <<EOF
54 $ cat > .hg/hgrc <<EOF
55 > [hooks]
55 > [hooks]
56 > prechangegroup = printenv.py prechangegroup
56 > prechangegroup = printenv.py prechangegroup
57 > changegroup = printenv.py changegroup
57 > changegroup = printenv.py changegroup
58 > incoming = printenv.py incoming
58 > incoming = printenv.py incoming
59 > EOF
59 > EOF
60
60
61 pretxncommit and commit hooks can see both parents of merge
61 pretxncommit and commit hooks can see both parents of merge
62
62
63 $ cd ../a
63 $ cd ../a
64 $ echo b >> a
64 $ echo b >> a
65 $ hg commit -m a1 -d "1 0"
65 $ hg commit -m a1 -d "1 0"
66 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
66 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
67 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
67 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
68 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
68 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
69 1:ab228980c14d
69 1:ab228980c14d
70 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
70 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
71 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
71 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
72 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
72 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
73 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
74 $ hg update -C 0
74 $ hg update -C 0
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 $ echo b > b
76 $ echo b > b
77 $ hg add b
77 $ hg add b
78 $ hg commit -m b -d '1 0'
78 $ hg commit -m b -d '1 0'
79 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
79 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
80 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
80 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
81 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
81 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
82 2:ee9deb46ab31
82 2:ee9deb46ab31
83 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
83 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
84 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
84 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
85 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
85 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
86 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
86 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
87 created new head
87 created new head
88 $ hg merge 1
88 $ hg merge 1
89 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
89 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 (branch merge, don't forget to commit)
90 (branch merge, don't forget to commit)
91 $ hg commit -m merge -d '2 0'
91 $ hg commit -m merge -d '2 0'
92 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
92 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
93 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
93 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
94 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
94 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
95 3:07f3376c1e65
95 3:07f3376c1e65
96 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
96 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
97 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
97 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
98 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
98 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
99 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
99 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
100
100
101 test generic hooks
101 test generic hooks
102
102
103 $ hg id
103 $ hg id
104 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
104 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
105 abort: pre-identify hook exited with status 1
105 abort: pre-identify hook exited with status 1
106 [255]
106 [255]
107 $ hg cat b
107 $ hg cat b
108 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
108 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
109 b
109 b
110 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
110 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
111
111
112 $ cd ../b
112 $ cd ../b
113 $ hg pull ../a
113 $ hg pull ../a
114 pulling from ../a
114 pulling from ../a
115 searching for changes
115 searching for changes
116 prechangegroup hook: HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
116 prechangegroup hook: HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
117 adding changesets
117 adding changesets
118 adding manifests
118 adding manifests
119 adding file changes
119 adding file changes
120 added 3 changesets with 2 changes to 2 files
120 added 3 changesets with 2 changes to 2 files
121 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
121 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
122 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
122 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
123 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
123 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
124 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
124 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
125 (run 'hg update' to get a working copy)
125 (run 'hg update' to get a working copy)
126
126
127 tag hooks can see env vars
127 tag hooks can see env vars
128
128
129 $ cd ../a
129 $ cd ../a
130 $ cat >> .hg/hgrc <<EOF
130 $ cat >> .hg/hgrc <<EOF
131 > pretag = printenv.py pretag
131 > pretag = printenv.py pretag
132 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
132 > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag"
133 > EOF
133 > EOF
134 $ hg tag -d '3 0' a
134 $ hg tag -d '3 0' a
135 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
135 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
136 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
136 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
137 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
137 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
138 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
138 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
139 4:539e4b31b6dc
139 4:539e4b31b6dc
140 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
140 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
141 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
141 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
142 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
142 txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
143 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
143 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
144 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
144 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
145 $ hg tag -l la
145 $ hg tag -l la
146 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
146 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
147 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
147 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
148
148
149 pretag hook can forbid tagging
149 pretag hook can forbid tagging
150
150
151 $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc
151 $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc
152 $ hg tag -d '4 0' fa
152 $ hg tag -d '4 0' fa
153 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
153 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
154 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
154 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
155 abort: pretag.forbid hook exited with status 1
155 abort: pretag.forbid hook exited with status 1
156 [255]
156 [255]
157 $ hg tag -l fla
157 $ hg tag -l fla
158 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
158 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
159 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
159 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
160 abort: pretag.forbid hook exited with status 1
160 abort: pretag.forbid hook exited with status 1
161 [255]
161 [255]
162
162
163 pretxncommit hook can see changeset, can roll back txn, changeset no
163 pretxncommit hook can see changeset, can roll back txn, changeset no
164 more there after
164 more there after
165
165
166 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
166 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
167 $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc
167 $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc
168 $ echo z > z
168 $ echo z > z
169 $ hg add z
169 $ hg add z
170 $ hg -q tip
170 $ hg -q tip
171 4:539e4b31b6dc
171 4:539e4b31b6dc
172 $ hg commit -m 'fail' -d '4 0'
172 $ hg commit -m 'fail' -d '4 0'
173 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
173 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
174 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
174 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
175 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
175 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
176 5:6f611f8018c1
176 5:6f611f8018c1
177 5:6f611f8018c1
177 5:6f611f8018c1
178 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
178 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
179 transaction abort!
179 transaction abort!
180 txnabort python hook: txnid,txnname
180 txnabort python hook: txnid,txnname
181 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
181 txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob)
182 rollback completed
182 rollback completed
183 abort: pretxncommit.forbid1 hook exited with status 1
183 abort: pretxncommit.forbid1 hook exited with status 1
184 [255]
184 [255]
185 $ hg -q tip
185 $ hg -q tip
186 4:539e4b31b6dc
186 4:539e4b31b6dc
187
187
188 (Check that no 'changelog.i.a' file were left behind)
188 (Check that no 'changelog.i.a' file were left behind)
189
189
190 $ ls -1 .hg/store/
190 $ ls -1 .hg/store/
191 00changelog.i
191 00changelog.i
192 00manifest.i
192 00manifest.i
193 data
193 data
194 fncache
194 fncache
195 journal.phaseroots
195 journal.phaseroots
196 phaseroots
196 phaseroots
197 undo
197 undo
198 undo.backup.fncache
198 undo.backup.fncache
199 undo.backupfiles
199 undo.backupfiles
200 undo.phaseroots
200 undo.phaseroots
201
201
202
202
203 precommit hook can prevent commit
203 precommit hook can prevent commit
204
204
205 $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc
205 $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc
206 $ hg commit -m 'fail' -d '4 0'
206 $ hg commit -m 'fail' -d '4 0'
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
207 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
208 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
209 abort: precommit.forbid hook exited with status 1
209 abort: precommit.forbid hook exited with status 1
210 [255]
210 [255]
211 $ hg -q tip
211 $ hg -q tip
212 4:539e4b31b6dc
212 4:539e4b31b6dc
213
213
214 preupdate hook can prevent update
214 preupdate hook can prevent update
215
215
216 $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc
216 $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc
217 $ hg update 1
217 $ hg update 1
218 preupdate hook: HG_PARENT1=ab228980c14d
218 preupdate hook: HG_PARENT1=ab228980c14d
219 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
219 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
220
220
221 update hook
221 update hook
222
222
223 $ echo "update = printenv.py update" >> .hg/hgrc
223 $ echo "update = printenv.py update" >> .hg/hgrc
224 $ hg update
224 $ hg update
225 preupdate hook: HG_PARENT1=539e4b31b6dc
225 preupdate hook: HG_PARENT1=539e4b31b6dc
226 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
226 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
227 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
228
228
229 pushkey hook
229 pushkey hook
230
230
231 $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc
231 $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc
232 $ cd ../b
232 $ cd ../b
233 $ hg bookmark -r null foo
233 $ hg bookmark -r null foo
234 $ hg push -B foo ../a
234 $ hg push -B foo ../a
235 pushing to ../a
235 pushing to ../a
236 searching for changes
236 searching for changes
237 no changes found
237 no changes found
238 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
238 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
239 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
239 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
240 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
240 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
241 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
241 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob)
242 exporting bookmark foo
242 exporting bookmark foo
243 [1]
243 [1]
244 $ cd ../a
244 $ cd ../a
245
245
246 listkeys hook
246 listkeys hook
247
247
248 $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc
248 $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc
249 $ hg bookmark -r null bar
249 $ hg bookmark -r null bar
250 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
250 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
251 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
251 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
252 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
252 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
253 $ cd ../b
253 $ cd ../b
254 $ hg pull -B bar ../a
254 $ hg pull -B bar ../a
255 pulling from ../a
255 pulling from ../a
256 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
256 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
257 no changes found
257 no changes found
258 listkeys hook: HG_NAMESPACE=phase HG_VALUES={}
258 listkeys hook: HG_NAMESPACE=phase HG_VALUES={}
259 adding remote bookmark bar
259 adding remote bookmark bar
260 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
260 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
261 $ cd ../a
261 $ cd ../a
262
262
263 test that prepushkey can prevent incoming keys
263 test that prepushkey can prevent incoming keys
264
264
265 $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc
265 $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc
266 $ cd ../b
266 $ cd ../b
267 $ hg bookmark -r null baz
267 $ hg bookmark -r null baz
268 $ hg push -B baz ../a
268 $ hg push -B baz ../a
269 pushing to ../a
269 pushing to ../a
270 searching for changes
270 searching for changes
271 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
271 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
272 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
272 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
273 no changes found
273 no changes found
274 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
274 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob)
275 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_URL=push (glob)
275 prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_URL=push (glob)
276 pushkey-abort: prepushkey hook exited with status 1
276 pushkey-abort: prepushkey hook exited with status 1
277 abort: exporting bookmark baz failed!
277 abort: exporting bookmark baz failed!
278 [255]
278 [255]
279 $ cd ../a
279 $ cd ../a
280
280
281 test that prelistkeys can prevent listing keys
281 test that prelistkeys can prevent listing keys
282
282
283 $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc
283 $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc
284 $ hg bookmark -r null quux
284 $ hg bookmark -r null quux
285 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
285 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
286 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
286 pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
287 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
287 txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob)
288 $ cd ../b
288 $ cd ../b
289 $ hg pull -B quux ../a
289 $ hg pull -B quux ../a
290 pulling from ../a
290 pulling from ../a
291 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
291 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
292 abort: prelistkeys hook exited with status 1
292 abort: prelistkeys hook exited with status 1
293 [255]
293 [255]
294 $ cd ../a
294 $ cd ../a
295 $ rm .hg/hgrc
295 $ rm .hg/hgrc
296
296
297 prechangegroup hook can prevent incoming changes
297 prechangegroup hook can prevent incoming changes
298
298
299 $ cd ../b
299 $ cd ../b
300 $ hg -q tip
300 $ hg -q tip
301 3:07f3376c1e65
301 3:07f3376c1e65
302 $ cat > .hg/hgrc <<EOF
302 $ cat > .hg/hgrc <<EOF
303 > [hooks]
303 > [hooks]
304 > prechangegroup.forbid = printenv.py prechangegroup.forbid 1
304 > prechangegroup.forbid = printenv.py prechangegroup.forbid 1
305 > EOF
305 > EOF
306 $ hg pull ../a
306 $ hg pull ../a
307 pulling from ../a
307 pulling from ../a
308 searching for changes
308 searching for changes
309 prechangegroup.forbid hook: HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
309 prechangegroup.forbid hook: HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
310 abort: prechangegroup.forbid hook exited with status 1
310 abort: prechangegroup.forbid hook exited with status 1
311 [255]
311 [255]
312
312
313 pretxnchangegroup hook can see incoming changes, can roll back txn,
313 pretxnchangegroup hook can see incoming changes, can roll back txn,
314 incoming changes no longer there after
314 incoming changes no longer there after
315
315
316 $ cat > .hg/hgrc <<EOF
316 $ cat > .hg/hgrc <<EOF
317 > [hooks]
317 > [hooks]
318 > pretxnchangegroup.forbid0 = hg tip -q
318 > pretxnchangegroup.forbid0 = hg tip -q
319 > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1
319 > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1
320 > EOF
320 > EOF
321 $ hg pull ../a
321 $ hg pull ../a
322 pulling from ../a
322 pulling from ../a
323 searching for changes
323 searching for changes
324 adding changesets
324 adding changesets
325 adding manifests
325 adding manifests
326 adding file changes
326 adding file changes
327 added 1 changesets with 1 changes to 1 files
327 added 1 changesets with 1 changes to 1 files
328 4:539e4b31b6dc
328 4:539e4b31b6dc
329 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
329 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob)
330 transaction abort!
330 transaction abort!
331 rollback completed
331 rollback completed
332 abort: pretxnchangegroup.forbid1 hook exited with status 1
332 abort: pretxnchangegroup.forbid1 hook exited with status 1
333 [255]
333 [255]
334 $ hg -q tip
334 $ hg -q tip
335 3:07f3376c1e65
335 3:07f3376c1e65
336
336
337 outgoing hooks can see env vars
337 outgoing hooks can see env vars
338
338
339 $ rm .hg/hgrc
339 $ rm .hg/hgrc
340 $ cat > ../a/.hg/hgrc <<EOF
340 $ cat > ../a/.hg/hgrc <<EOF
341 > [hooks]
341 > [hooks]
342 > preoutgoing = printenv.py preoutgoing
342 > preoutgoing = printenv.py preoutgoing
343 > outgoing = printenv.py outgoing
343 > outgoing = printenv.py outgoing
344 > EOF
344 > EOF
345 $ hg pull ../a
345 $ hg pull ../a
346 pulling from ../a
346 pulling from ../a
347 searching for changes
347 searching for changes
348 preoutgoing hook: HG_SOURCE=pull
348 preoutgoing hook: HG_SOURCE=pull
349 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
349 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
350 adding changesets
350 adding changesets
351 adding manifests
351 adding manifests
352 adding file changes
352 adding file changes
353 added 1 changesets with 1 changes to 1 files
353 added 1 changesets with 1 changes to 1 files
354 adding remote bookmark quux
354 adding remote bookmark quux
355 (run 'hg update' to get a working copy)
355 (run 'hg update' to get a working copy)
356 $ hg rollback
356 $ hg rollback
357 repository tip rolled back to revision 3 (undo pull)
357 repository tip rolled back to revision 3 (undo pull)
358
358
359 preoutgoing hook can prevent outgoing changes
359 preoutgoing hook can prevent outgoing changes
360
360
361 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc
361 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc
362 $ hg pull ../a
362 $ hg pull ../a
363 pulling from ../a
363 pulling from ../a
364 searching for changes
364 searching for changes
365 preoutgoing hook: HG_SOURCE=pull
365 preoutgoing hook: HG_SOURCE=pull
366 preoutgoing.forbid hook: HG_SOURCE=pull
366 preoutgoing.forbid hook: HG_SOURCE=pull
367 abort: preoutgoing.forbid hook exited with status 1
367 abort: preoutgoing.forbid hook exited with status 1
368 [255]
368 [255]
369
369
370 outgoing hooks work for local clones
370 outgoing hooks work for local clones
371
371
372 $ cd ..
372 $ cd ..
373 $ cat > a/.hg/hgrc <<EOF
373 $ cat > a/.hg/hgrc <<EOF
374 > [hooks]
374 > [hooks]
375 > preoutgoing = printenv.py preoutgoing
375 > preoutgoing = printenv.py preoutgoing
376 > outgoing = printenv.py outgoing
376 > outgoing = printenv.py outgoing
377 > EOF
377 > EOF
378 $ hg clone a c
378 $ hg clone a c
379 preoutgoing hook: HG_SOURCE=clone
379 preoutgoing hook: HG_SOURCE=clone
380 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
380 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
381 updating to branch default
381 updating to branch default
382 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
382 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
383 $ rm -rf c
383 $ rm -rf c
384
384
385 preoutgoing hook can prevent outgoing changes for local clones
385 preoutgoing hook can prevent outgoing changes for local clones
386
386
387 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc
387 $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc
388 $ hg clone a zzz
388 $ hg clone a zzz
389 preoutgoing hook: HG_SOURCE=clone
389 preoutgoing hook: HG_SOURCE=clone
390 preoutgoing.forbid hook: HG_SOURCE=clone
390 preoutgoing.forbid hook: HG_SOURCE=clone
391 abort: preoutgoing.forbid hook exited with status 1
391 abort: preoutgoing.forbid hook exited with status 1
392 [255]
392 [255]
393
393
394 $ cd "$TESTTMP/b"
394 $ cd "$TESTTMP/b"
395
395
396 $ cat > hooktests.py <<EOF
396 $ cat > hooktests.py <<EOF
397 > from mercurial import error
397 > from mercurial import error
398 >
398 >
399 > uncallable = 0
399 > uncallable = 0
400 >
400 >
401 > def printargs(args):
401 > def printargs(args):
402 > args.pop('ui', None)
402 > args.pop('ui', None)
403 > args.pop('repo', None)
403 > args.pop('repo', None)
404 > a = list(args.items())
404 > a = list(args.items())
405 > a.sort()
405 > a.sort()
406 > print 'hook args:'
406 > print 'hook args:'
407 > for k, v in a:
407 > for k, v in a:
408 > print ' ', k, v
408 > print ' ', k, v
409 >
409 >
410 > def passhook(**args):
410 > def passhook(**args):
411 > printargs(args)
411 > printargs(args)
412 >
412 >
413 > def failhook(**args):
413 > def failhook(**args):
414 > printargs(args)
414 > printargs(args)
415 > return True
415 > return True
416 >
416 >
417 > class LocalException(Exception):
417 > class LocalException(Exception):
418 > pass
418 > pass
419 >
419 >
420 > def raisehook(**args):
420 > def raisehook(**args):
421 > raise LocalException('exception from hook')
421 > raise LocalException('exception from hook')
422 >
422 >
423 > def aborthook(**args):
423 > def aborthook(**args):
424 > raise error.Abort('raise abort from hook')
424 > raise error.Abort('raise abort from hook')
425 >
425 >
426 > def brokenhook(**args):
426 > def brokenhook(**args):
427 > return 1 + {}
427 > return 1 + {}
428 >
428 >
429 > def verbosehook(ui, **args):
429 > def verbosehook(ui, **args):
430 > ui.note('verbose output from hook\n')
430 > ui.note('verbose output from hook\n')
431 >
431 >
432 > def printtags(ui, repo, **args):
432 > def printtags(ui, repo, **args):
433 > print sorted(repo.tags())
433 > print sorted(repo.tags())
434 >
434 >
435 > class container:
435 > class container:
436 > unreachable = 1
436 > unreachable = 1
437 > EOF
437 > EOF
438
438
439 test python hooks
439 test python hooks
440
440
441 #if windows
441 #if windows
442 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
442 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
443 #else
443 #else
444 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
444 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
445 #endif
445 #endif
446 $ export PYTHONPATH
446 $ export PYTHONPATH
447
447
448 $ echo '[hooks]' > ../a/.hg/hgrc
448 $ echo '[hooks]' > ../a/.hg/hgrc
449 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
449 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
450 $ hg pull ../a 2>&1 | grep 'raised an exception'
450 $ hg pull ../a 2>&1 | grep 'raised an exception'
451 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
451 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
452
452
453 $ echo '[hooks]' > ../a/.hg/hgrc
453 $ echo '[hooks]' > ../a/.hg/hgrc
454 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
454 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
455 $ hg pull ../a 2>&1 | grep 'raised an exception'
455 $ hg pull ../a 2>&1 | grep 'raised an exception'
456 error: preoutgoing.raise hook raised an exception: exception from hook
456 error: preoutgoing.raise hook raised an exception: exception from hook
457
457
458 $ echo '[hooks]' > ../a/.hg/hgrc
458 $ echo '[hooks]' > ../a/.hg/hgrc
459 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
459 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
460 $ hg pull ../a
460 $ hg pull ../a
461 pulling from ../a
461 pulling from ../a
462 searching for changes
462 searching for changes
463 error: preoutgoing.abort hook failed: raise abort from hook
463 error: preoutgoing.abort hook failed: raise abort from hook
464 abort: raise abort from hook
464 abort: raise abort from hook
465 [255]
465 [255]
466
466
467 $ echo '[hooks]' > ../a/.hg/hgrc
467 $ echo '[hooks]' > ../a/.hg/hgrc
468 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
468 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
469 $ hg pull ../a
469 $ hg pull ../a
470 pulling from ../a
470 pulling from ../a
471 searching for changes
471 searching for changes
472 hook args:
472 hook args:
473 hooktype preoutgoing
473 hooktype preoutgoing
474 source pull
474 source pull
475 abort: preoutgoing.fail hook failed
475 abort: preoutgoing.fail hook failed
476 [255]
476 [255]
477
477
478 $ echo '[hooks]' > ../a/.hg/hgrc
478 $ echo '[hooks]' > ../a/.hg/hgrc
479 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
479 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
480 $ hg pull ../a
480 $ hg pull ../a
481 pulling from ../a
481 pulling from ../a
482 searching for changes
482 searching for changes
483 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
483 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
484 [255]
484 [255]
485
485
486 $ echo '[hooks]' > ../a/.hg/hgrc
486 $ echo '[hooks]' > ../a/.hg/hgrc
487 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
487 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
488 $ hg pull ../a
488 $ hg pull ../a
489 pulling from ../a
489 pulling from ../a
490 searching for changes
490 searching for changes
491 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
491 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
492 [255]
492 [255]
493
493
494 $ echo '[hooks]' > ../a/.hg/hgrc
494 $ echo '[hooks]' > ../a/.hg/hgrc
495 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
495 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
496 $ hg pull ../a
496 $ hg pull ../a
497 pulling from ../a
497 pulling from ../a
498 searching for changes
498 searching for changes
499 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
499 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
500 [255]
500 [255]
501
501
502 $ echo '[hooks]' > ../a/.hg/hgrc
502 $ echo '[hooks]' > ../a/.hg/hgrc
503 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
503 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
504 $ hg pull ../a
504 $ hg pull ../a
505 pulling from ../a
505 pulling from ../a
506 searching for changes
506 searching for changes
507 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
507 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
508 [255]
508 [255]
509
509
510 $ echo '[hooks]' > ../a/.hg/hgrc
510 $ echo '[hooks]' > ../a/.hg/hgrc
511 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
511 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
512 $ hg pull ../a
512 $ hg pull ../a
513 pulling from ../a
513 pulling from ../a
514 searching for changes
514 searching for changes
515 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
515 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
516 [255]
516 [255]
517
517
518 $ echo '[hooks]' > ../a/.hg/hgrc
518 $ echo '[hooks]' > ../a/.hg/hgrc
519 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
519 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
520 $ hg pull ../a
520 $ hg pull ../a
521 pulling from ../a
521 pulling from ../a
522 searching for changes
522 searching for changes
523 hook args:
523 hook args:
524 hooktype preoutgoing
524 hooktype preoutgoing
525 source pull
525 source pull
526 adding changesets
526 adding changesets
527 adding manifests
527 adding manifests
528 adding file changes
528 adding file changes
529 added 1 changesets with 1 changes to 1 files
529 added 1 changesets with 1 changes to 1 files
530 adding remote bookmark quux
530 adding remote bookmark quux
531 (run 'hg update' to get a working copy)
531 (run 'hg update' to get a working copy)
532
532
533 make sure --traceback works
533 make sure --traceback works
534
534
535 $ echo '[hooks]' > .hg/hgrc
535 $ echo '[hooks]' > .hg/hgrc
536 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
536 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
537
537
538 $ echo aa > a
538 $ echo aa > a
539 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
539 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
540 Traceback (most recent call last):
540 Traceback (most recent call last):
541
541
542 $ cd ..
542 $ cd ..
543 $ hg init c
543 $ hg init c
544 $ cd c
544 $ cd c
545
545
546 $ cat > hookext.py <<EOF
546 $ cat > hookext.py <<EOF
547 > def autohook(**args):
547 > def autohook(**args):
548 > print "Automatically installed hook"
548 > print "Automatically installed hook"
549 >
549 >
550 > def reposetup(ui, repo):
550 > def reposetup(ui, repo):
551 > repo.ui.setconfig("hooks", "commit.auto", autohook)
551 > repo.ui.setconfig("hooks", "commit.auto", autohook)
552 > EOF
552 > EOF
553 $ echo '[extensions]' >> .hg/hgrc
553 $ echo '[extensions]' >> .hg/hgrc
554 $ echo 'hookext = hookext.py' >> .hg/hgrc
554 $ echo 'hookext = hookext.py' >> .hg/hgrc
555
555
556 $ touch foo
556 $ touch foo
557 $ hg add foo
557 $ hg add foo
558 $ hg ci -d '0 0' -m 'add foo'
558 $ hg ci -d '0 0' -m 'add foo'
559 Automatically installed hook
559 Automatically installed hook
560 $ echo >> foo
560 $ echo >> foo
561 $ hg ci --debug -d '0 0' -m 'change foo'
561 $ hg ci --debug -d '0 0' -m 'change foo'
562 committing files:
562 committing files:
563 foo
563 foo
564 committing manifest
564 committing manifest
565 committing changelog
565 committing changelog
566 calling hook commit.auto: hgext_hookext.autohook
566 calling hook commit.auto: hgext_hookext.autohook
567 Automatically installed hook
567 Automatically installed hook
568 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
568 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
569
569
570 $ hg showconfig hooks
570 $ hg showconfig hooks
571 hooks.commit.auto=<function autohook at *> (glob)
571 hooks.commit.auto=<function autohook at *> (glob)
572
572
573 test python hook configured with python:[file]:[hook] syntax
573 test python hook configured with python:[file]:[hook] syntax
574
574
575 $ cd ..
575 $ cd ..
576 $ mkdir d
576 $ mkdir d
577 $ cd d
577 $ cd d
578 $ hg init repo
578 $ hg init repo
579 $ mkdir hooks
579 $ mkdir hooks
580
580
581 $ cd hooks
581 $ cd hooks
582 $ cat > testhooks.py <<EOF
582 $ cat > testhooks.py <<EOF
583 > def testhook(**args):
583 > def testhook(**args):
584 > print 'hook works'
584 > print 'hook works'
585 > EOF
585 > EOF
586 $ echo '[hooks]' > ../repo/.hg/hgrc
586 $ echo '[hooks]' > ../repo/.hg/hgrc
587 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
587 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
588
588
589 $ cd ../repo
589 $ cd ../repo
590 $ hg commit -d '0 0'
590 $ hg commit -d '0 0'
591 hook works
591 hook works
592 nothing changed
592 nothing changed
593 [1]
593 [1]
594
594
595 $ echo '[hooks]' > .hg/hgrc
595 $ echo '[hooks]' > .hg/hgrc
596 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
596 $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
597 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
597 $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
598
598
599 $ hg up null
599 $ hg up null
600 loading update.ne hook failed:
600 loading update.ne hook failed:
601 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
601 abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
602 [255]
602 [255]
603
603
604 $ hg id
604 $ hg id
605 loading pre-identify.npmd hook failed:
605 loading pre-identify.npmd hook failed:
606 abort: No module named repo!
606 abort: No module named repo!
607 [255]
607 [255]
608
608
609 $ cd ../../b
609 $ cd ../../b
610
610
611 make sure --traceback works on hook import failure
611 make sure --traceback works on hook import failure
612
612
613 $ cat > importfail.py <<EOF
613 $ cat > importfail.py <<EOF
614 > import somebogusmodule
614 > import somebogusmodule
615 > # dereference something in the module to force demandimport to load it
615 > # dereference something in the module to force demandimport to load it
616 > somebogusmodule.whatever
616 > somebogusmodule.whatever
617 > EOF
617 > EOF
618
618
619 $ echo '[hooks]' > .hg/hgrc
619 $ echo '[hooks]' > .hg/hgrc
620 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
620 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
621
621
622 $ echo a >> a
622 $ echo a >> a
623 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
623 $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])'
624 exception from first failed import attempt:
624 exception from first failed import attempt:
625 Traceback (most recent call last):
625 Traceback (most recent call last):
626 ImportError: No module named somebogusmodule
626 ImportError: No module named somebogusmodule
627 exception from second failed import attempt:
627 exception from second failed import attempt:
628 Traceback (most recent call last):
628 Traceback (most recent call last):
629 ImportError: No module named hgext_importfail
629 ImportError: No module named hgext_importfail
630 Traceback (most recent call last):
630 Traceback (most recent call last):
631 HookLoadError: precommit.importfail hook is invalid (import of "importfail" failed)
631 HookLoadError: precommit.importfail hook is invalid (import of "importfail" failed)
632 abort: precommit.importfail hook is invalid (import of "importfail" failed)
632 abort: precommit.importfail hook is invalid (import of "importfail" failed)
633
633
634 Issue1827: Hooks Update & Commit not completely post operation
634 Issue1827: Hooks Update & Commit not completely post operation
635
635
636 commit and update hooks should run after command completion. The largefiles
636 commit and update hooks should run after command completion. The largefiles
637 use demonstrates a recursive wlock, showing the hook doesn't run until the
637 use demonstrates a recursive wlock, showing the hook doesn't run until the
638 final release (and dirstate flush).
638 final release (and dirstate flush).
639
639
640 $ echo '[hooks]' > .hg/hgrc
640 $ echo '[hooks]' > .hg/hgrc
641 $ echo 'commit = hg id' >> .hg/hgrc
641 $ echo 'commit = hg id' >> .hg/hgrc
642 $ echo 'update = hg id' >> .hg/hgrc
642 $ echo 'update = hg id' >> .hg/hgrc
643 $ echo bb > a
643 $ echo bb > a
644 $ hg ci -ma
644 $ hg ci -ma
645 223eafe2750c tip
645 223eafe2750c tip
646 $ hg up 0 --config extensions.largefiles=
646 $ hg up 0 --config extensions.largefiles=
647 cb9a9f314b8b
647 cb9a9f314b8b
648 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
648 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
649
649
650 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
650 make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
651 that is passed to pre/post hooks
651 that is passed to pre/post hooks
652
652
653 $ echo '[hooks]' > .hg/hgrc
653 $ echo '[hooks]' > .hg/hgrc
654 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
654 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
655 $ hg id
655 $ hg id
656 cb9a9f314b8b
656 cb9a9f314b8b
657 $ hg id --verbose
657 $ hg id --verbose
658 calling hook pre-identify: hooktests.verbosehook
658 calling hook pre-identify: hooktests.verbosehook
659 verbose output from hook
659 verbose output from hook
660 cb9a9f314b8b
660 cb9a9f314b8b
661
661
662 Ensure hooks can be prioritized
662 Ensure hooks can be prioritized
663
663
664 $ echo '[hooks]' > .hg/hgrc
664 $ echo '[hooks]' > .hg/hgrc
665 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
665 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
666 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
666 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
667 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
667 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
668 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
668 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
669 $ hg id --verbose
669 $ hg id --verbose
670 calling hook pre-identify.b: hooktests.verbosehook
670 calling hook pre-identify.b: hooktests.verbosehook
671 verbose output from hook
671 verbose output from hook
672 calling hook pre-identify.a: hooktests.verbosehook
672 calling hook pre-identify.a: hooktests.verbosehook
673 verbose output from hook
673 verbose output from hook
674 calling hook pre-identify.c: hooktests.verbosehook
674 calling hook pre-identify.c: hooktests.verbosehook
675 verbose output from hook
675 verbose output from hook
676 cb9a9f314b8b
676 cb9a9f314b8b
677
677
678 new tags must be visible in pretxncommit (issue3210)
678 new tags must be visible in pretxncommit (issue3210)
679
679
680 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
680 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
681 $ hg tag -f foo
681 $ hg tag -f foo
682 ['a', 'foo', 'tip']
682 ['a', 'foo', 'tip']
683
683
684 new commits must be visible in pretxnchangegroup (issue3428)
684 new commits must be visible in pretxnchangegroup (issue3428)
685
685
686 $ cd ..
686 $ cd ..
687 $ hg init to
687 $ hg init to
688 $ echo '[hooks]' >> to/.hg/hgrc
688 $ echo '[hooks]' >> to/.hg/hgrc
689 $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc
689 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
690 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
690 $ echo a >> to/a
691 $ echo a >> to/a
691 $ hg --cwd to ci -Ama
692 $ hg --cwd to ci -Ama
692 adding a
693 adding a
693 $ hg clone to from
694 $ hg clone to from
694 updating to branch default
695 updating to branch default
695 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
696 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
696 $ echo aa >> from/a
697 $ echo aa >> from/a
697 $ hg --cwd from ci -mb
698 $ hg --cwd from ci -mb
698 $ hg --cwd from push
699 $ hg --cwd from push
699 pushing to $TESTTMP/to (glob)
700 pushing to $TESTTMP/to (glob)
700 searching for changes
701 searching for changes
702 changeset: 0:cb9a9f314b8b
703 tag: tip
704 user: test
705 date: Thu Jan 01 00:00:00 1970 +0000
706 summary: a
707
701 adding changesets
708 adding changesets
702 adding manifests
709 adding manifests
703 adding file changes
710 adding file changes
704 added 1 changesets with 1 changes to 1 files
711 added 1 changesets with 1 changes to 1 files
705 changeset: 1:9836a07b9b9d
712 changeset: 1:9836a07b9b9d
706 tag: tip
713 tag: tip
707 user: test
714 user: test
708 date: Thu Jan 01 00:00:00 1970 +0000
715 date: Thu Jan 01 00:00:00 1970 +0000
709 summary: b
716 summary: b
710
717
General Comments 0
You need to be logged in to leave comments. Login now