##// END OF EJS Templates
obsolete: exchange obsolete marker over pushkey...
Pierre-Yves.David@ens-lyon.org -
r17075:28ed1c45 default
parent child Browse files
Show More
@@ -1,2443 +1,2453 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
8 from node import bin, hex, nullid, nullrev, short
7 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
8 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey, obsolete
9 import repo, changegroup, subrepo, discovery, pushkey, obsolete
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock, transaction, store, encoding
11 import lock, transaction, store, encoding, base85
13 import scmutil, util, extensions, hook, error, revset
12 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
13 import match as matchmod
15 import merge as mergemod
14 import merge as mergemod
16 import tags as tagsmod
15 import tags as tagsmod
17 from lock import release
16 from lock import release
18 import weakref, errno, os, time, inspect
17 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
18 propertycache = util.propertycache
20 filecache = scmutil.filecache
19 filecache = scmutil.filecache
21
20
22 class storecache(filecache):
21 class storecache(filecache):
23 """filecache for files in the store"""
22 """filecache for files in the store"""
24 def join(self, obj, fname):
23 def join(self, obj, fname):
25 return obj.sjoin(fname)
24 return obj.sjoin(fname)
26
25
27 class localrepository(repo.repository):
26 class localrepository(repo.repository):
28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
27 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
29 'known', 'getbundle'))
28 'known', 'getbundle'))
30 supportedformats = set(('revlogv1', 'generaldelta'))
29 supportedformats = set(('revlogv1', 'generaldelta'))
31 supported = supportedformats | set(('store', 'fncache', 'shared',
30 supported = supportedformats | set(('store', 'fncache', 'shared',
32 'dotencode'))
31 'dotencode'))
33
32
34 def __init__(self, baseui, path=None, create=False):
33 def __init__(self, baseui, path=None, create=False):
35 repo.repository.__init__(self)
34 repo.repository.__init__(self)
36 self.root = os.path.realpath(util.expandpath(path))
35 self.root = os.path.realpath(util.expandpath(path))
37 self.path = os.path.join(self.root, ".hg")
36 self.path = os.path.join(self.root, ".hg")
38 self.origroot = path
37 self.origroot = path
39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
38 self.auditor = scmutil.pathauditor(self.root, self._checknested)
40 self.opener = scmutil.opener(self.path)
39 self.opener = scmutil.opener(self.path)
41 self.wopener = scmutil.opener(self.root)
40 self.wopener = scmutil.opener(self.root)
42 self.baseui = baseui
41 self.baseui = baseui
43 self.ui = baseui.copy()
42 self.ui = baseui.copy()
44 # A list of callback to shape the phase if no data were found.
43 # A list of callback to shape the phase if no data were found.
45 # Callback are in the form: func(repo, roots) --> processed root.
44 # Callback are in the form: func(repo, roots) --> processed root.
46 # This list it to be filled by extension during repo setup
45 # This list it to be filled by extension during repo setup
47 self._phasedefaults = []
46 self._phasedefaults = []
48
47
49 try:
48 try:
50 self.ui.readconfig(self.join("hgrc"), self.root)
49 self.ui.readconfig(self.join("hgrc"), self.root)
51 extensions.loadall(self.ui)
50 extensions.loadall(self.ui)
52 except IOError:
51 except IOError:
53 pass
52 pass
54
53
55 if not os.path.isdir(self.path):
54 if not os.path.isdir(self.path):
56 if create:
55 if create:
57 if not os.path.exists(path):
56 if not os.path.exists(path):
58 util.makedirs(path)
57 util.makedirs(path)
59 util.makedir(self.path, notindexed=True)
58 util.makedir(self.path, notindexed=True)
60 requirements = ["revlogv1"]
59 requirements = ["revlogv1"]
61 if self.ui.configbool('format', 'usestore', True):
60 if self.ui.configbool('format', 'usestore', True):
62 os.mkdir(os.path.join(self.path, "store"))
61 os.mkdir(os.path.join(self.path, "store"))
63 requirements.append("store")
62 requirements.append("store")
64 if self.ui.configbool('format', 'usefncache', True):
63 if self.ui.configbool('format', 'usefncache', True):
65 requirements.append("fncache")
64 requirements.append("fncache")
66 if self.ui.configbool('format', 'dotencode', True):
65 if self.ui.configbool('format', 'dotencode', True):
67 requirements.append('dotencode')
66 requirements.append('dotencode')
68 # create an invalid changelog
67 # create an invalid changelog
69 self.opener.append(
68 self.opener.append(
70 "00changelog.i",
69 "00changelog.i",
71 '\0\0\0\2' # represents revlogv2
70 '\0\0\0\2' # represents revlogv2
72 ' dummy changelog to prevent using the old repo layout'
71 ' dummy changelog to prevent using the old repo layout'
73 )
72 )
74 if self.ui.configbool('format', 'generaldelta', False):
73 if self.ui.configbool('format', 'generaldelta', False):
75 requirements.append("generaldelta")
74 requirements.append("generaldelta")
76 requirements = set(requirements)
75 requirements = set(requirements)
77 else:
76 else:
78 raise error.RepoError(_("repository %s not found") % path)
77 raise error.RepoError(_("repository %s not found") % path)
79 elif create:
78 elif create:
80 raise error.RepoError(_("repository %s already exists") % path)
79 raise error.RepoError(_("repository %s already exists") % path)
81 else:
80 else:
82 try:
81 try:
83 requirements = scmutil.readrequires(self.opener, self.supported)
82 requirements = scmutil.readrequires(self.opener, self.supported)
84 except IOError, inst:
83 except IOError, inst:
85 if inst.errno != errno.ENOENT:
84 if inst.errno != errno.ENOENT:
86 raise
85 raise
87 requirements = set()
86 requirements = set()
88
87
89 self.sharedpath = self.path
88 self.sharedpath = self.path
90 try:
89 try:
91 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
90 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
92 if not os.path.exists(s):
91 if not os.path.exists(s):
93 raise error.RepoError(
92 raise error.RepoError(
94 _('.hg/sharedpath points to nonexistent directory %s') % s)
93 _('.hg/sharedpath points to nonexistent directory %s') % s)
95 self.sharedpath = s
94 self.sharedpath = s
96 except IOError, inst:
95 except IOError, inst:
97 if inst.errno != errno.ENOENT:
96 if inst.errno != errno.ENOENT:
98 raise
97 raise
99
98
100 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
99 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
101 self.spath = self.store.path
100 self.spath = self.store.path
102 self.sopener = self.store.opener
101 self.sopener = self.store.opener
103 self.sjoin = self.store.join
102 self.sjoin = self.store.join
104 self.opener.createmode = self.store.createmode
103 self.opener.createmode = self.store.createmode
105 self._applyrequirements(requirements)
104 self._applyrequirements(requirements)
106 if create:
105 if create:
107 self._writerequirements()
106 self._writerequirements()
108
107
109
108
110 self._branchcache = None
109 self._branchcache = None
111 self._branchcachetip = None
110 self._branchcachetip = None
112 self.filterpats = {}
111 self.filterpats = {}
113 self._datafilters = {}
112 self._datafilters = {}
114 self._transref = self._lockref = self._wlockref = None
113 self._transref = self._lockref = self._wlockref = None
115
114
116 # A cache for various files under .hg/ that tracks file changes,
115 # A cache for various files under .hg/ that tracks file changes,
117 # (used by the filecache decorator)
116 # (used by the filecache decorator)
118 #
117 #
119 # Maps a property name to its util.filecacheentry
118 # Maps a property name to its util.filecacheentry
120 self._filecache = {}
119 self._filecache = {}
121
120
122 def _applyrequirements(self, requirements):
121 def _applyrequirements(self, requirements):
123 self.requirements = requirements
122 self.requirements = requirements
124 openerreqs = set(('revlogv1', 'generaldelta'))
123 openerreqs = set(('revlogv1', 'generaldelta'))
125 self.sopener.options = dict((r, 1) for r in requirements
124 self.sopener.options = dict((r, 1) for r in requirements
126 if r in openerreqs)
125 if r in openerreqs)
127
126
128 def _writerequirements(self):
127 def _writerequirements(self):
129 reqfile = self.opener("requires", "w")
128 reqfile = self.opener("requires", "w")
130 for r in self.requirements:
129 for r in self.requirements:
131 reqfile.write("%s\n" % r)
130 reqfile.write("%s\n" % r)
132 reqfile.close()
131 reqfile.close()
133
132
134 def _checknested(self, path):
133 def _checknested(self, path):
135 """Determine if path is a legal nested repository."""
134 """Determine if path is a legal nested repository."""
136 if not path.startswith(self.root):
135 if not path.startswith(self.root):
137 return False
136 return False
138 subpath = path[len(self.root) + 1:]
137 subpath = path[len(self.root) + 1:]
139 normsubpath = util.pconvert(subpath)
138 normsubpath = util.pconvert(subpath)
140
139
141 # XXX: Checking against the current working copy is wrong in
140 # XXX: Checking against the current working copy is wrong in
142 # the sense that it can reject things like
141 # the sense that it can reject things like
143 #
142 #
144 # $ hg cat -r 10 sub/x.txt
143 # $ hg cat -r 10 sub/x.txt
145 #
144 #
146 # if sub/ is no longer a subrepository in the working copy
145 # if sub/ is no longer a subrepository in the working copy
147 # parent revision.
146 # parent revision.
148 #
147 #
149 # However, it can of course also allow things that would have
148 # However, it can of course also allow things that would have
150 # been rejected before, such as the above cat command if sub/
149 # been rejected before, such as the above cat command if sub/
151 # is a subrepository now, but was a normal directory before.
150 # is a subrepository now, but was a normal directory before.
152 # The old path auditor would have rejected by mistake since it
151 # The old path auditor would have rejected by mistake since it
153 # panics when it sees sub/.hg/.
152 # panics when it sees sub/.hg/.
154 #
153 #
155 # All in all, checking against the working copy seems sensible
154 # All in all, checking against the working copy seems sensible
156 # since we want to prevent access to nested repositories on
155 # since we want to prevent access to nested repositories on
157 # the filesystem *now*.
156 # the filesystem *now*.
158 ctx = self[None]
157 ctx = self[None]
159 parts = util.splitpath(subpath)
158 parts = util.splitpath(subpath)
160 while parts:
159 while parts:
161 prefix = '/'.join(parts)
160 prefix = '/'.join(parts)
162 if prefix in ctx.substate:
161 if prefix in ctx.substate:
163 if prefix == normsubpath:
162 if prefix == normsubpath:
164 return True
163 return True
165 else:
164 else:
166 sub = ctx.sub(prefix)
165 sub = ctx.sub(prefix)
167 return sub.checknested(subpath[len(prefix) + 1:])
166 return sub.checknested(subpath[len(prefix) + 1:])
168 else:
167 else:
169 parts.pop()
168 parts.pop()
170 return False
169 return False
171
170
172 @filecache('bookmarks')
171 @filecache('bookmarks')
173 def _bookmarks(self):
172 def _bookmarks(self):
174 return bookmarks.read(self)
173 return bookmarks.read(self)
175
174
176 @filecache('bookmarks.current')
175 @filecache('bookmarks.current')
177 def _bookmarkcurrent(self):
176 def _bookmarkcurrent(self):
178 return bookmarks.readcurrent(self)
177 return bookmarks.readcurrent(self)
179
178
180 def _writebookmarks(self, marks):
179 def _writebookmarks(self, marks):
181 bookmarks.write(self)
180 bookmarks.write(self)
182
181
183 def bookmarkheads(self, bookmark):
182 def bookmarkheads(self, bookmark):
184 name = bookmark.split('@', 1)[0]
183 name = bookmark.split('@', 1)[0]
185 heads = []
184 heads = []
186 for mark, n in self._bookmarks.iteritems():
185 for mark, n in self._bookmarks.iteritems():
187 if mark.split('@', 1)[0] == name:
186 if mark.split('@', 1)[0] == name:
188 heads.append(n)
187 heads.append(n)
189 return heads
188 return heads
190
189
191 @storecache('phaseroots')
190 @storecache('phaseroots')
192 def _phasecache(self):
191 def _phasecache(self):
193 return phases.phasecache(self, self._phasedefaults)
192 return phases.phasecache(self, self._phasedefaults)
194
193
195 @storecache('obsstore')
194 @storecache('obsstore')
196 def obsstore(self):
195 def obsstore(self):
197 store = obsolete.obsstore()
196 store = obsolete.obsstore()
198 data = self.sopener.tryread('obsstore')
197 data = self.sopener.tryread('obsstore')
199 if data:
198 if data:
200 store.loadmarkers(data)
199 store.loadmarkers(data)
201 return store
200 return store
202
201
203 @storecache('00changelog.i')
202 @storecache('00changelog.i')
204 def changelog(self):
203 def changelog(self):
205 c = changelog.changelog(self.sopener)
204 c = changelog.changelog(self.sopener)
206 if 'HG_PENDING' in os.environ:
205 if 'HG_PENDING' in os.environ:
207 p = os.environ['HG_PENDING']
206 p = os.environ['HG_PENDING']
208 if p.startswith(self.root):
207 if p.startswith(self.root):
209 c.readpending('00changelog.i.a')
208 c.readpending('00changelog.i.a')
210 return c
209 return c
211
210
212 @storecache('00manifest.i')
211 @storecache('00manifest.i')
213 def manifest(self):
212 def manifest(self):
214 return manifest.manifest(self.sopener)
213 return manifest.manifest(self.sopener)
215
214
216 @filecache('dirstate')
215 @filecache('dirstate')
217 def dirstate(self):
216 def dirstate(self):
218 warned = [0]
217 warned = [0]
219 def validate(node):
218 def validate(node):
220 try:
219 try:
221 self.changelog.rev(node)
220 self.changelog.rev(node)
222 return node
221 return node
223 except error.LookupError:
222 except error.LookupError:
224 if not warned[0]:
223 if not warned[0]:
225 warned[0] = True
224 warned[0] = True
226 self.ui.warn(_("warning: ignoring unknown"
225 self.ui.warn(_("warning: ignoring unknown"
227 " working parent %s!\n") % short(node))
226 " working parent %s!\n") % short(node))
228 return nullid
227 return nullid
229
228
230 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
229 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
231
230
232 def __getitem__(self, changeid):
231 def __getitem__(self, changeid):
233 if changeid is None:
232 if changeid is None:
234 return context.workingctx(self)
233 return context.workingctx(self)
235 return context.changectx(self, changeid)
234 return context.changectx(self, changeid)
236
235
237 def __contains__(self, changeid):
236 def __contains__(self, changeid):
238 try:
237 try:
239 return bool(self.lookup(changeid))
238 return bool(self.lookup(changeid))
240 except error.RepoLookupError:
239 except error.RepoLookupError:
241 return False
240 return False
242
241
243 def __nonzero__(self):
242 def __nonzero__(self):
244 return True
243 return True
245
244
246 def __len__(self):
245 def __len__(self):
247 return len(self.changelog)
246 return len(self.changelog)
248
247
249 def __iter__(self):
248 def __iter__(self):
250 for i in xrange(len(self)):
249 for i in xrange(len(self)):
251 yield i
250 yield i
252
251
253 def revs(self, expr, *args):
252 def revs(self, expr, *args):
254 '''Return a list of revisions matching the given revset'''
253 '''Return a list of revisions matching the given revset'''
255 expr = revset.formatspec(expr, *args)
254 expr = revset.formatspec(expr, *args)
256 m = revset.match(None, expr)
255 m = revset.match(None, expr)
257 return [r for r in m(self, range(len(self)))]
256 return [r for r in m(self, range(len(self)))]
258
257
259 def set(self, expr, *args):
258 def set(self, expr, *args):
260 '''
259 '''
261 Yield a context for each matching revision, after doing arg
260 Yield a context for each matching revision, after doing arg
262 replacement via revset.formatspec
261 replacement via revset.formatspec
263 '''
262 '''
264 for r in self.revs(expr, *args):
263 for r in self.revs(expr, *args):
265 yield self[r]
264 yield self[r]
266
265
267 def url(self):
266 def url(self):
268 return 'file:' + self.root
267 return 'file:' + self.root
269
268
270 def hook(self, name, throw=False, **args):
269 def hook(self, name, throw=False, **args):
271 return hook.hook(self.ui, self, name, throw, **args)
270 return hook.hook(self.ui, self, name, throw, **args)
272
271
273 tag_disallowed = ':\r\n'
272 tag_disallowed = ':\r\n'
274
273
275 def _tag(self, names, node, message, local, user, date, extra={}):
274 def _tag(self, names, node, message, local, user, date, extra={}):
276 if isinstance(names, str):
275 if isinstance(names, str):
277 allchars = names
276 allchars = names
278 names = (names,)
277 names = (names,)
279 else:
278 else:
280 allchars = ''.join(names)
279 allchars = ''.join(names)
281 for c in self.tag_disallowed:
280 for c in self.tag_disallowed:
282 if c in allchars:
281 if c in allchars:
283 raise util.Abort(_('%r cannot be used in a tag name') % c)
282 raise util.Abort(_('%r cannot be used in a tag name') % c)
284
283
285 branches = self.branchmap()
284 branches = self.branchmap()
286 for name in names:
285 for name in names:
287 self.hook('pretag', throw=True, node=hex(node), tag=name,
286 self.hook('pretag', throw=True, node=hex(node), tag=name,
288 local=local)
287 local=local)
289 if name in branches:
288 if name in branches:
290 self.ui.warn(_("warning: tag %s conflicts with existing"
289 self.ui.warn(_("warning: tag %s conflicts with existing"
291 " branch name\n") % name)
290 " branch name\n") % name)
292
291
293 def writetags(fp, names, munge, prevtags):
292 def writetags(fp, names, munge, prevtags):
294 fp.seek(0, 2)
293 fp.seek(0, 2)
295 if prevtags and prevtags[-1] != '\n':
294 if prevtags and prevtags[-1] != '\n':
296 fp.write('\n')
295 fp.write('\n')
297 for name in names:
296 for name in names:
298 m = munge and munge(name) or name
297 m = munge and munge(name) or name
299 if (self._tagscache.tagtypes and
298 if (self._tagscache.tagtypes and
300 name in self._tagscache.tagtypes):
299 name in self._tagscache.tagtypes):
301 old = self.tags().get(name, nullid)
300 old = self.tags().get(name, nullid)
302 fp.write('%s %s\n' % (hex(old), m))
301 fp.write('%s %s\n' % (hex(old), m))
303 fp.write('%s %s\n' % (hex(node), m))
302 fp.write('%s %s\n' % (hex(node), m))
304 fp.close()
303 fp.close()
305
304
306 prevtags = ''
305 prevtags = ''
307 if local:
306 if local:
308 try:
307 try:
309 fp = self.opener('localtags', 'r+')
308 fp = self.opener('localtags', 'r+')
310 except IOError:
309 except IOError:
311 fp = self.opener('localtags', 'a')
310 fp = self.opener('localtags', 'a')
312 else:
311 else:
313 prevtags = fp.read()
312 prevtags = fp.read()
314
313
315 # local tags are stored in the current charset
314 # local tags are stored in the current charset
316 writetags(fp, names, None, prevtags)
315 writetags(fp, names, None, prevtags)
317 for name in names:
316 for name in names:
318 self.hook('tag', node=hex(node), tag=name, local=local)
317 self.hook('tag', node=hex(node), tag=name, local=local)
319 return
318 return
320
319
321 try:
320 try:
322 fp = self.wfile('.hgtags', 'rb+')
321 fp = self.wfile('.hgtags', 'rb+')
323 except IOError, e:
322 except IOError, e:
324 if e.errno != errno.ENOENT:
323 if e.errno != errno.ENOENT:
325 raise
324 raise
326 fp = self.wfile('.hgtags', 'ab')
325 fp = self.wfile('.hgtags', 'ab')
327 else:
326 else:
328 prevtags = fp.read()
327 prevtags = fp.read()
329
328
330 # committed tags are stored in UTF-8
329 # committed tags are stored in UTF-8
331 writetags(fp, names, encoding.fromlocal, prevtags)
330 writetags(fp, names, encoding.fromlocal, prevtags)
332
331
333 fp.close()
332 fp.close()
334
333
335 self.invalidatecaches()
334 self.invalidatecaches()
336
335
337 if '.hgtags' not in self.dirstate:
336 if '.hgtags' not in self.dirstate:
338 self[None].add(['.hgtags'])
337 self[None].add(['.hgtags'])
339
338
340 m = matchmod.exact(self.root, '', ['.hgtags'])
339 m = matchmod.exact(self.root, '', ['.hgtags'])
341 tagnode = self.commit(message, user, date, extra=extra, match=m)
340 tagnode = self.commit(message, user, date, extra=extra, match=m)
342
341
343 for name in names:
342 for name in names:
344 self.hook('tag', node=hex(node), tag=name, local=local)
343 self.hook('tag', node=hex(node), tag=name, local=local)
345
344
346 return tagnode
345 return tagnode
347
346
348 def tag(self, names, node, message, local, user, date):
347 def tag(self, names, node, message, local, user, date):
349 '''tag a revision with one or more symbolic names.
348 '''tag a revision with one or more symbolic names.
350
349
351 names is a list of strings or, when adding a single tag, names may be a
350 names is a list of strings or, when adding a single tag, names may be a
352 string.
351 string.
353
352
354 if local is True, the tags are stored in a per-repository file.
353 if local is True, the tags are stored in a per-repository file.
355 otherwise, they are stored in the .hgtags file, and a new
354 otherwise, they are stored in the .hgtags file, and a new
356 changeset is committed with the change.
355 changeset is committed with the change.
357
356
358 keyword arguments:
357 keyword arguments:
359
358
360 local: whether to store tags in non-version-controlled file
359 local: whether to store tags in non-version-controlled file
361 (default False)
360 (default False)
362
361
363 message: commit message to use if committing
362 message: commit message to use if committing
364
363
365 user: name of user to use if committing
364 user: name of user to use if committing
366
365
367 date: date tuple to use if committing'''
366 date: date tuple to use if committing'''
368
367
369 if not local:
368 if not local:
370 for x in self.status()[:5]:
369 for x in self.status()[:5]:
371 if '.hgtags' in x:
370 if '.hgtags' in x:
372 raise util.Abort(_('working copy of .hgtags is changed '
371 raise util.Abort(_('working copy of .hgtags is changed '
373 '(please commit .hgtags manually)'))
372 '(please commit .hgtags manually)'))
374
373
375 self.tags() # instantiate the cache
374 self.tags() # instantiate the cache
376 self._tag(names, node, message, local, user, date)
375 self._tag(names, node, message, local, user, date)
377
376
378 @propertycache
377 @propertycache
379 def _tagscache(self):
378 def _tagscache(self):
380 '''Returns a tagscache object that contains various tags related
379 '''Returns a tagscache object that contains various tags related
381 caches.'''
380 caches.'''
382
381
383 # This simplifies its cache management by having one decorated
382 # This simplifies its cache management by having one decorated
384 # function (this one) and the rest simply fetch things from it.
383 # function (this one) and the rest simply fetch things from it.
385 class tagscache(object):
384 class tagscache(object):
386 def __init__(self):
385 def __init__(self):
387 # These two define the set of tags for this repository. tags
386 # These two define the set of tags for this repository. tags
388 # maps tag name to node; tagtypes maps tag name to 'global' or
387 # maps tag name to node; tagtypes maps tag name to 'global' or
389 # 'local'. (Global tags are defined by .hgtags across all
388 # 'local'. (Global tags are defined by .hgtags across all
390 # heads, and local tags are defined in .hg/localtags.)
389 # heads, and local tags are defined in .hg/localtags.)
391 # They constitute the in-memory cache of tags.
390 # They constitute the in-memory cache of tags.
392 self.tags = self.tagtypes = None
391 self.tags = self.tagtypes = None
393
392
394 self.nodetagscache = self.tagslist = None
393 self.nodetagscache = self.tagslist = None
395
394
396 cache = tagscache()
395 cache = tagscache()
397 cache.tags, cache.tagtypes = self._findtags()
396 cache.tags, cache.tagtypes = self._findtags()
398
397
399 return cache
398 return cache
400
399
401 def tags(self):
400 def tags(self):
402 '''return a mapping of tag to node'''
401 '''return a mapping of tag to node'''
403 t = {}
402 t = {}
404 for k, v in self._tagscache.tags.iteritems():
403 for k, v in self._tagscache.tags.iteritems():
405 try:
404 try:
406 # ignore tags to unknown nodes
405 # ignore tags to unknown nodes
407 self.changelog.rev(v)
406 self.changelog.rev(v)
408 t[k] = v
407 t[k] = v
409 except (error.LookupError, ValueError):
408 except (error.LookupError, ValueError):
410 pass
409 pass
411 return t
410 return t
412
411
413 def _findtags(self):
412 def _findtags(self):
414 '''Do the hard work of finding tags. Return a pair of dicts
413 '''Do the hard work of finding tags. Return a pair of dicts
415 (tags, tagtypes) where tags maps tag name to node, and tagtypes
414 (tags, tagtypes) where tags maps tag name to node, and tagtypes
416 maps tag name to a string like \'global\' or \'local\'.
415 maps tag name to a string like \'global\' or \'local\'.
417 Subclasses or extensions are free to add their own tags, but
416 Subclasses or extensions are free to add their own tags, but
418 should be aware that the returned dicts will be retained for the
417 should be aware that the returned dicts will be retained for the
419 duration of the localrepo object.'''
418 duration of the localrepo object.'''
420
419
421 # XXX what tagtype should subclasses/extensions use? Currently
420 # XXX what tagtype should subclasses/extensions use? Currently
422 # mq and bookmarks add tags, but do not set the tagtype at all.
421 # mq and bookmarks add tags, but do not set the tagtype at all.
423 # Should each extension invent its own tag type? Should there
422 # Should each extension invent its own tag type? Should there
424 # be one tagtype for all such "virtual" tags? Or is the status
423 # be one tagtype for all such "virtual" tags? Or is the status
425 # quo fine?
424 # quo fine?
426
425
427 alltags = {} # map tag name to (node, hist)
426 alltags = {} # map tag name to (node, hist)
428 tagtypes = {}
427 tagtypes = {}
429
428
430 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
429 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
431 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
430 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
432
431
433 # Build the return dicts. Have to re-encode tag names because
432 # Build the return dicts. Have to re-encode tag names because
434 # the tags module always uses UTF-8 (in order not to lose info
433 # the tags module always uses UTF-8 (in order not to lose info
435 # writing to the cache), but the rest of Mercurial wants them in
434 # writing to the cache), but the rest of Mercurial wants them in
436 # local encoding.
435 # local encoding.
437 tags = {}
436 tags = {}
438 for (name, (node, hist)) in alltags.iteritems():
437 for (name, (node, hist)) in alltags.iteritems():
439 if node != nullid:
438 if node != nullid:
440 tags[encoding.tolocal(name)] = node
439 tags[encoding.tolocal(name)] = node
441 tags['tip'] = self.changelog.tip()
440 tags['tip'] = self.changelog.tip()
442 tagtypes = dict([(encoding.tolocal(name), value)
441 tagtypes = dict([(encoding.tolocal(name), value)
443 for (name, value) in tagtypes.iteritems()])
442 for (name, value) in tagtypes.iteritems()])
444 return (tags, tagtypes)
443 return (tags, tagtypes)
445
444
446 def tagtype(self, tagname):
445 def tagtype(self, tagname):
447 '''
446 '''
448 return the type of the given tag. result can be:
447 return the type of the given tag. result can be:
449
448
450 'local' : a local tag
449 'local' : a local tag
451 'global' : a global tag
450 'global' : a global tag
452 None : tag does not exist
451 None : tag does not exist
453 '''
452 '''
454
453
455 return self._tagscache.tagtypes.get(tagname)
454 return self._tagscache.tagtypes.get(tagname)
456
455
457 def tagslist(self):
456 def tagslist(self):
458 '''return a list of tags ordered by revision'''
457 '''return a list of tags ordered by revision'''
459 if not self._tagscache.tagslist:
458 if not self._tagscache.tagslist:
460 l = []
459 l = []
461 for t, n in self.tags().iteritems():
460 for t, n in self.tags().iteritems():
462 r = self.changelog.rev(n)
461 r = self.changelog.rev(n)
463 l.append((r, t, n))
462 l.append((r, t, n))
464 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
463 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
465
464
466 return self._tagscache.tagslist
465 return self._tagscache.tagslist
467
466
468 def nodetags(self, node):
467 def nodetags(self, node):
469 '''return the tags associated with a node'''
468 '''return the tags associated with a node'''
470 if not self._tagscache.nodetagscache:
469 if not self._tagscache.nodetagscache:
471 nodetagscache = {}
470 nodetagscache = {}
472 for t, n in self._tagscache.tags.iteritems():
471 for t, n in self._tagscache.tags.iteritems():
473 nodetagscache.setdefault(n, []).append(t)
472 nodetagscache.setdefault(n, []).append(t)
474 for tags in nodetagscache.itervalues():
473 for tags in nodetagscache.itervalues():
475 tags.sort()
474 tags.sort()
476 self._tagscache.nodetagscache = nodetagscache
475 self._tagscache.nodetagscache = nodetagscache
477 return self._tagscache.nodetagscache.get(node, [])
476 return self._tagscache.nodetagscache.get(node, [])
478
477
479 def nodebookmarks(self, node):
478 def nodebookmarks(self, node):
480 marks = []
479 marks = []
481 for bookmark, n in self._bookmarks.iteritems():
480 for bookmark, n in self._bookmarks.iteritems():
482 if n == node:
481 if n == node:
483 marks.append(bookmark)
482 marks.append(bookmark)
484 return sorted(marks)
483 return sorted(marks)
485
484
486 def _branchtags(self, partial, lrev):
485 def _branchtags(self, partial, lrev):
487 # TODO: rename this function?
486 # TODO: rename this function?
488 tiprev = len(self) - 1
487 tiprev = len(self) - 1
489 if lrev != tiprev:
488 if lrev != tiprev:
490 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
489 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
491 self._updatebranchcache(partial, ctxgen)
490 self._updatebranchcache(partial, ctxgen)
492 self._writebranchcache(partial, self.changelog.tip(), tiprev)
491 self._writebranchcache(partial, self.changelog.tip(), tiprev)
493
492
494 return partial
493 return partial
495
494
496 def updatebranchcache(self):
495 def updatebranchcache(self):
497 tip = self.changelog.tip()
496 tip = self.changelog.tip()
498 if self._branchcache is not None and self._branchcachetip == tip:
497 if self._branchcache is not None and self._branchcachetip == tip:
499 return
498 return
500
499
501 oldtip = self._branchcachetip
500 oldtip = self._branchcachetip
502 self._branchcachetip = tip
501 self._branchcachetip = tip
503 if oldtip is None or oldtip not in self.changelog.nodemap:
502 if oldtip is None or oldtip not in self.changelog.nodemap:
504 partial, last, lrev = self._readbranchcache()
503 partial, last, lrev = self._readbranchcache()
505 else:
504 else:
506 lrev = self.changelog.rev(oldtip)
505 lrev = self.changelog.rev(oldtip)
507 partial = self._branchcache
506 partial = self._branchcache
508
507
509 self._branchtags(partial, lrev)
508 self._branchtags(partial, lrev)
510 # this private cache holds all heads (not just the branch tips)
509 # this private cache holds all heads (not just the branch tips)
511 self._branchcache = partial
510 self._branchcache = partial
512
511
513 def branchmap(self):
512 def branchmap(self):
514 '''returns a dictionary {branch: [branchheads]}'''
513 '''returns a dictionary {branch: [branchheads]}'''
515 self.updatebranchcache()
514 self.updatebranchcache()
516 return self._branchcache
515 return self._branchcache
517
516
518 def _branchtip(self, heads):
517 def _branchtip(self, heads):
519 '''return the tipmost branch head in heads'''
518 '''return the tipmost branch head in heads'''
520 tip = heads[-1]
519 tip = heads[-1]
521 for h in reversed(heads):
520 for h in reversed(heads):
522 if not self[h].closesbranch():
521 if not self[h].closesbranch():
523 tip = h
522 tip = h
524 break
523 break
525 return tip
524 return tip
526
525
527 def branchtip(self, branch):
526 def branchtip(self, branch):
528 '''return the tip node for a given branch'''
527 '''return the tip node for a given branch'''
529 if branch not in self.branchmap():
528 if branch not in self.branchmap():
530 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
529 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
531 return self._branchtip(self.branchmap()[branch])
530 return self._branchtip(self.branchmap()[branch])
532
531
533 def branchtags(self):
532 def branchtags(self):
534 '''return a dict where branch names map to the tipmost head of
533 '''return a dict where branch names map to the tipmost head of
535 the branch, open heads come before closed'''
534 the branch, open heads come before closed'''
536 bt = {}
535 bt = {}
537 for bn, heads in self.branchmap().iteritems():
536 for bn, heads in self.branchmap().iteritems():
538 bt[bn] = self._branchtip(heads)
537 bt[bn] = self._branchtip(heads)
539 return bt
538 return bt
540
539
541 def _readbranchcache(self):
540 def _readbranchcache(self):
542 partial = {}
541 partial = {}
543 try:
542 try:
544 f = self.opener("cache/branchheads")
543 f = self.opener("cache/branchheads")
545 lines = f.read().split('\n')
544 lines = f.read().split('\n')
546 f.close()
545 f.close()
547 except (IOError, OSError):
546 except (IOError, OSError):
548 return {}, nullid, nullrev
547 return {}, nullid, nullrev
549
548
550 try:
549 try:
551 last, lrev = lines.pop(0).split(" ", 1)
550 last, lrev = lines.pop(0).split(" ", 1)
552 last, lrev = bin(last), int(lrev)
551 last, lrev = bin(last), int(lrev)
553 if lrev >= len(self) or self[lrev].node() != last:
552 if lrev >= len(self) or self[lrev].node() != last:
554 # invalidate the cache
553 # invalidate the cache
555 raise ValueError('invalidating branch cache (tip differs)')
554 raise ValueError('invalidating branch cache (tip differs)')
556 for l in lines:
555 for l in lines:
557 if not l:
556 if not l:
558 continue
557 continue
559 node, label = l.split(" ", 1)
558 node, label = l.split(" ", 1)
560 label = encoding.tolocal(label.strip())
559 label = encoding.tolocal(label.strip())
561 if not node in self:
560 if not node in self:
562 raise ValueError('invalidating branch cache because node '+
561 raise ValueError('invalidating branch cache because node '+
563 '%s does not exist' % node)
562 '%s does not exist' % node)
564 partial.setdefault(label, []).append(bin(node))
563 partial.setdefault(label, []).append(bin(node))
565 except KeyboardInterrupt:
564 except KeyboardInterrupt:
566 raise
565 raise
567 except Exception, inst:
566 except Exception, inst:
568 if self.ui.debugflag:
567 if self.ui.debugflag:
569 self.ui.warn(str(inst), '\n')
568 self.ui.warn(str(inst), '\n')
570 partial, last, lrev = {}, nullid, nullrev
569 partial, last, lrev = {}, nullid, nullrev
571 return partial, last, lrev
570 return partial, last, lrev
572
571
573 def _writebranchcache(self, branches, tip, tiprev):
572 def _writebranchcache(self, branches, tip, tiprev):
574 try:
573 try:
575 f = self.opener("cache/branchheads", "w", atomictemp=True)
574 f = self.opener("cache/branchheads", "w", atomictemp=True)
576 f.write("%s %s\n" % (hex(tip), tiprev))
575 f.write("%s %s\n" % (hex(tip), tiprev))
577 for label, nodes in branches.iteritems():
576 for label, nodes in branches.iteritems():
578 for node in nodes:
577 for node in nodes:
579 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
578 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
580 f.close()
579 f.close()
581 except (IOError, OSError):
580 except (IOError, OSError):
582 pass
581 pass
583
582
584 def _updatebranchcache(self, partial, ctxgen):
583 def _updatebranchcache(self, partial, ctxgen):
585 """Given a branchhead cache, partial, that may have extra nodes or be
584 """Given a branchhead cache, partial, that may have extra nodes or be
586 missing heads, and a generator of nodes that are at least a superset of
585 missing heads, and a generator of nodes that are at least a superset of
587 heads missing, this function updates partial to be correct.
586 heads missing, this function updates partial to be correct.
588 """
587 """
589 # collect new branch entries
588 # collect new branch entries
590 newbranches = {}
589 newbranches = {}
591 for c in ctxgen:
590 for c in ctxgen:
592 newbranches.setdefault(c.branch(), []).append(c.node())
591 newbranches.setdefault(c.branch(), []).append(c.node())
593 # if older branchheads are reachable from new ones, they aren't
592 # if older branchheads are reachable from new ones, they aren't
594 # really branchheads. Note checking parents is insufficient:
593 # really branchheads. Note checking parents is insufficient:
595 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
594 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
596 for branch, newnodes in newbranches.iteritems():
595 for branch, newnodes in newbranches.iteritems():
597 bheads = partial.setdefault(branch, [])
596 bheads = partial.setdefault(branch, [])
598 # Remove candidate heads that no longer are in the repo (e.g., as
597 # Remove candidate heads that no longer are in the repo (e.g., as
599 # the result of a strip that just happened). Avoid using 'node in
598 # the result of a strip that just happened). Avoid using 'node in
600 # self' here because that dives down into branchcache code somewhat
599 # self' here because that dives down into branchcache code somewhat
601 # recrusively.
600 # recrusively.
602 bheadrevs = [self.changelog.rev(node) for node in bheads
601 bheadrevs = [self.changelog.rev(node) for node in bheads
603 if self.changelog.hasnode(node)]
602 if self.changelog.hasnode(node)]
604 newheadrevs = [self.changelog.rev(node) for node in newnodes
603 newheadrevs = [self.changelog.rev(node) for node in newnodes
605 if self.changelog.hasnode(node)]
604 if self.changelog.hasnode(node)]
606 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
605 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
607 # Remove duplicates - nodes that are in newheadrevs and are already
606 # Remove duplicates - nodes that are in newheadrevs and are already
608 # in bheadrevs. This can happen if you strip a node whose parent
607 # in bheadrevs. This can happen if you strip a node whose parent
609 # was already a head (because they're on different branches).
608 # was already a head (because they're on different branches).
610 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
609 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
611
610
612 # Starting from tip means fewer passes over reachable. If we know
611 # Starting from tip means fewer passes over reachable. If we know
613 # the new candidates are not ancestors of existing heads, we don't
612 # the new candidates are not ancestors of existing heads, we don't
614 # have to examine ancestors of existing heads
613 # have to examine ancestors of existing heads
615 if ctxisnew:
614 if ctxisnew:
616 iterrevs = sorted(newheadrevs)
615 iterrevs = sorted(newheadrevs)
617 else:
616 else:
618 iterrevs = list(bheadrevs)
617 iterrevs = list(bheadrevs)
619
618
620 # This loop prunes out two kinds of heads - heads that are
619 # This loop prunes out two kinds of heads - heads that are
621 # superceded by a head in newheadrevs, and newheadrevs that are not
620 # superceded by a head in newheadrevs, and newheadrevs that are not
622 # heads because an existing head is their descendant.
621 # heads because an existing head is their descendant.
623 while iterrevs:
622 while iterrevs:
624 latest = iterrevs.pop()
623 latest = iterrevs.pop()
625 if latest not in bheadrevs:
624 if latest not in bheadrevs:
626 continue
625 continue
627 ancestors = set(self.changelog.ancestors([latest],
626 ancestors = set(self.changelog.ancestors([latest],
628 bheadrevs[0]))
627 bheadrevs[0]))
629 if ancestors:
628 if ancestors:
630 bheadrevs = [b for b in bheadrevs if b not in ancestors]
629 bheadrevs = [b for b in bheadrevs if b not in ancestors]
631 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
630 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
632
631
633 # There may be branches that cease to exist when the last commit in the
632 # There may be branches that cease to exist when the last commit in the
634 # branch was stripped. This code filters them out. Note that the
633 # branch was stripped. This code filters them out. Note that the
635 # branch that ceased to exist may not be in newbranches because
634 # branch that ceased to exist may not be in newbranches because
636 # newbranches is the set of candidate heads, which when you strip the
635 # newbranches is the set of candidate heads, which when you strip the
637 # last commit in a branch will be the parent branch.
636 # last commit in a branch will be the parent branch.
638 for branch in partial:
637 for branch in partial:
639 nodes = [head for head in partial[branch]
638 nodes = [head for head in partial[branch]
640 if self.changelog.hasnode(head)]
639 if self.changelog.hasnode(head)]
641 if not nodes:
640 if not nodes:
642 del partial[branch]
641 del partial[branch]
643
642
644 def lookup(self, key):
643 def lookup(self, key):
645 return self[key].node()
644 return self[key].node()
646
645
647 def lookupbranch(self, key, remote=None):
646 def lookupbranch(self, key, remote=None):
648 repo = remote or self
647 repo = remote or self
649 if key in repo.branchmap():
648 if key in repo.branchmap():
650 return key
649 return key
651
650
652 repo = (remote and remote.local()) and remote or self
651 repo = (remote and remote.local()) and remote or self
653 return repo[key].branch()
652 return repo[key].branch()
654
653
655 def known(self, nodes):
654 def known(self, nodes):
656 nm = self.changelog.nodemap
655 nm = self.changelog.nodemap
657 pc = self._phasecache
656 pc = self._phasecache
658 result = []
657 result = []
659 for n in nodes:
658 for n in nodes:
660 r = nm.get(n)
659 r = nm.get(n)
661 resp = not (r is None or pc.phase(self, r) >= phases.secret)
660 resp = not (r is None or pc.phase(self, r) >= phases.secret)
662 result.append(resp)
661 result.append(resp)
663 return result
662 return result
664
663
665 def local(self):
664 def local(self):
666 return self
665 return self
667
666
668 def join(self, f):
667 def join(self, f):
669 return os.path.join(self.path, f)
668 return os.path.join(self.path, f)
670
669
671 def wjoin(self, f):
670 def wjoin(self, f):
672 return os.path.join(self.root, f)
671 return os.path.join(self.root, f)
673
672
674 def file(self, f):
673 def file(self, f):
675 if f[0] == '/':
674 if f[0] == '/':
676 f = f[1:]
675 f = f[1:]
677 return filelog.filelog(self.sopener, f)
676 return filelog.filelog(self.sopener, f)
678
677
679 def changectx(self, changeid):
678 def changectx(self, changeid):
680 return self[changeid]
679 return self[changeid]
681
680
682 def parents(self, changeid=None):
681 def parents(self, changeid=None):
683 '''get list of changectxs for parents of changeid'''
682 '''get list of changectxs for parents of changeid'''
684 return self[changeid].parents()
683 return self[changeid].parents()
685
684
686 def setparents(self, p1, p2=nullid):
685 def setparents(self, p1, p2=nullid):
687 copies = self.dirstate.setparents(p1, p2)
686 copies = self.dirstate.setparents(p1, p2)
688 if copies:
687 if copies:
689 # Adjust copy records, the dirstate cannot do it, it
688 # Adjust copy records, the dirstate cannot do it, it
690 # requires access to parents manifests. Preserve them
689 # requires access to parents manifests. Preserve them
691 # only for entries added to first parent.
690 # only for entries added to first parent.
692 pctx = self[p1]
691 pctx = self[p1]
693 for f in copies:
692 for f in copies:
694 if f not in pctx and copies[f] in pctx:
693 if f not in pctx and copies[f] in pctx:
695 self.dirstate.copy(copies[f], f)
694 self.dirstate.copy(copies[f], f)
696
695
697 def filectx(self, path, changeid=None, fileid=None):
696 def filectx(self, path, changeid=None, fileid=None):
698 """changeid can be a changeset revision, node, or tag.
697 """changeid can be a changeset revision, node, or tag.
699 fileid can be a file revision or node."""
698 fileid can be a file revision or node."""
700 return context.filectx(self, path, changeid, fileid)
699 return context.filectx(self, path, changeid, fileid)
701
700
702 def getcwd(self):
701 def getcwd(self):
703 return self.dirstate.getcwd()
702 return self.dirstate.getcwd()
704
703
705 def pathto(self, f, cwd=None):
704 def pathto(self, f, cwd=None):
706 return self.dirstate.pathto(f, cwd)
705 return self.dirstate.pathto(f, cwd)
707
706
708 def wfile(self, f, mode='r'):
707 def wfile(self, f, mode='r'):
709 return self.wopener(f, mode)
708 return self.wopener(f, mode)
710
709
711 def _link(self, f):
710 def _link(self, f):
712 return os.path.islink(self.wjoin(f))
711 return os.path.islink(self.wjoin(f))
713
712
714 def _loadfilter(self, filter):
713 def _loadfilter(self, filter):
715 if filter not in self.filterpats:
714 if filter not in self.filterpats:
716 l = []
715 l = []
717 for pat, cmd in self.ui.configitems(filter):
716 for pat, cmd in self.ui.configitems(filter):
718 if cmd == '!':
717 if cmd == '!':
719 continue
718 continue
720 mf = matchmod.match(self.root, '', [pat])
719 mf = matchmod.match(self.root, '', [pat])
721 fn = None
720 fn = None
722 params = cmd
721 params = cmd
723 for name, filterfn in self._datafilters.iteritems():
722 for name, filterfn in self._datafilters.iteritems():
724 if cmd.startswith(name):
723 if cmd.startswith(name):
725 fn = filterfn
724 fn = filterfn
726 params = cmd[len(name):].lstrip()
725 params = cmd[len(name):].lstrip()
727 break
726 break
728 if not fn:
727 if not fn:
729 fn = lambda s, c, **kwargs: util.filter(s, c)
728 fn = lambda s, c, **kwargs: util.filter(s, c)
730 # Wrap old filters not supporting keyword arguments
729 # Wrap old filters not supporting keyword arguments
731 if not inspect.getargspec(fn)[2]:
730 if not inspect.getargspec(fn)[2]:
732 oldfn = fn
731 oldfn = fn
733 fn = lambda s, c, **kwargs: oldfn(s, c)
732 fn = lambda s, c, **kwargs: oldfn(s, c)
734 l.append((mf, fn, params))
733 l.append((mf, fn, params))
735 self.filterpats[filter] = l
734 self.filterpats[filter] = l
736 return self.filterpats[filter]
735 return self.filterpats[filter]
737
736
738 def _filter(self, filterpats, filename, data):
737 def _filter(self, filterpats, filename, data):
739 for mf, fn, cmd in filterpats:
738 for mf, fn, cmd in filterpats:
740 if mf(filename):
739 if mf(filename):
741 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
740 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
742 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
741 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
743 break
742 break
744
743
745 return data
744 return data
746
745
747 @propertycache
746 @propertycache
748 def _encodefilterpats(self):
747 def _encodefilterpats(self):
749 return self._loadfilter('encode')
748 return self._loadfilter('encode')
750
749
751 @propertycache
750 @propertycache
752 def _decodefilterpats(self):
751 def _decodefilterpats(self):
753 return self._loadfilter('decode')
752 return self._loadfilter('decode')
754
753
755 def adddatafilter(self, name, filter):
754 def adddatafilter(self, name, filter):
756 self._datafilters[name] = filter
755 self._datafilters[name] = filter
757
756
758 def wread(self, filename):
757 def wread(self, filename):
759 if self._link(filename):
758 if self._link(filename):
760 data = os.readlink(self.wjoin(filename))
759 data = os.readlink(self.wjoin(filename))
761 else:
760 else:
762 data = self.wopener.read(filename)
761 data = self.wopener.read(filename)
763 return self._filter(self._encodefilterpats, filename, data)
762 return self._filter(self._encodefilterpats, filename, data)
764
763
765 def wwrite(self, filename, data, flags):
764 def wwrite(self, filename, data, flags):
766 data = self._filter(self._decodefilterpats, filename, data)
765 data = self._filter(self._decodefilterpats, filename, data)
767 if 'l' in flags:
766 if 'l' in flags:
768 self.wopener.symlink(data, filename)
767 self.wopener.symlink(data, filename)
769 else:
768 else:
770 self.wopener.write(filename, data)
769 self.wopener.write(filename, data)
771 if 'x' in flags:
770 if 'x' in flags:
772 util.setflags(self.wjoin(filename), False, True)
771 util.setflags(self.wjoin(filename), False, True)
773
772
774 def wwritedata(self, filename, data):
773 def wwritedata(self, filename, data):
775 return self._filter(self._decodefilterpats, filename, data)
774 return self._filter(self._decodefilterpats, filename, data)
776
775
777 def transaction(self, desc):
776 def transaction(self, desc):
778 tr = self._transref and self._transref() or None
777 tr = self._transref and self._transref() or None
779 if tr and tr.running():
778 if tr and tr.running():
780 return tr.nest()
779 return tr.nest()
781
780
782 # abort here if the journal already exists
781 # abort here if the journal already exists
783 if os.path.exists(self.sjoin("journal")):
782 if os.path.exists(self.sjoin("journal")):
784 raise error.RepoError(
783 raise error.RepoError(
785 _("abandoned transaction found - run hg recover"))
784 _("abandoned transaction found - run hg recover"))
786
785
787 self._writejournal(desc)
786 self._writejournal(desc)
788 renames = [(x, undoname(x)) for x in self._journalfiles()]
787 renames = [(x, undoname(x)) for x in self._journalfiles()]
789
788
790 tr = transaction.transaction(self.ui.warn, self.sopener,
789 tr = transaction.transaction(self.ui.warn, self.sopener,
791 self.sjoin("journal"),
790 self.sjoin("journal"),
792 aftertrans(renames),
791 aftertrans(renames),
793 self.store.createmode)
792 self.store.createmode)
794 self._transref = weakref.ref(tr)
793 self._transref = weakref.ref(tr)
795 return tr
794 return tr
796
795
797 def _journalfiles(self):
796 def _journalfiles(self):
798 return (self.sjoin('journal'), self.join('journal.dirstate'),
797 return (self.sjoin('journal'), self.join('journal.dirstate'),
799 self.join('journal.branch'), self.join('journal.desc'),
798 self.join('journal.branch'), self.join('journal.desc'),
800 self.join('journal.bookmarks'),
799 self.join('journal.bookmarks'),
801 self.sjoin('journal.phaseroots'))
800 self.sjoin('journal.phaseroots'))
802
801
803 def undofiles(self):
802 def undofiles(self):
804 return [undoname(x) for x in self._journalfiles()]
803 return [undoname(x) for x in self._journalfiles()]
805
804
806 def _writejournal(self, desc):
805 def _writejournal(self, desc):
807 self.opener.write("journal.dirstate",
806 self.opener.write("journal.dirstate",
808 self.opener.tryread("dirstate"))
807 self.opener.tryread("dirstate"))
809 self.opener.write("journal.branch",
808 self.opener.write("journal.branch",
810 encoding.fromlocal(self.dirstate.branch()))
809 encoding.fromlocal(self.dirstate.branch()))
811 self.opener.write("journal.desc",
810 self.opener.write("journal.desc",
812 "%d\n%s\n" % (len(self), desc))
811 "%d\n%s\n" % (len(self), desc))
813 self.opener.write("journal.bookmarks",
812 self.opener.write("journal.bookmarks",
814 self.opener.tryread("bookmarks"))
813 self.opener.tryread("bookmarks"))
815 self.sopener.write("journal.phaseroots",
814 self.sopener.write("journal.phaseroots",
816 self.sopener.tryread("phaseroots"))
815 self.sopener.tryread("phaseroots"))
817
816
818 def recover(self):
817 def recover(self):
819 lock = self.lock()
818 lock = self.lock()
820 try:
819 try:
821 if os.path.exists(self.sjoin("journal")):
820 if os.path.exists(self.sjoin("journal")):
822 self.ui.status(_("rolling back interrupted transaction\n"))
821 self.ui.status(_("rolling back interrupted transaction\n"))
823 transaction.rollback(self.sopener, self.sjoin("journal"),
822 transaction.rollback(self.sopener, self.sjoin("journal"),
824 self.ui.warn)
823 self.ui.warn)
825 self.invalidate()
824 self.invalidate()
826 return True
825 return True
827 else:
826 else:
828 self.ui.warn(_("no interrupted transaction available\n"))
827 self.ui.warn(_("no interrupted transaction available\n"))
829 return False
828 return False
830 finally:
829 finally:
831 lock.release()
830 lock.release()
832
831
833 def rollback(self, dryrun=False, force=False):
832 def rollback(self, dryrun=False, force=False):
834 wlock = lock = None
833 wlock = lock = None
835 try:
834 try:
836 wlock = self.wlock()
835 wlock = self.wlock()
837 lock = self.lock()
836 lock = self.lock()
838 if os.path.exists(self.sjoin("undo")):
837 if os.path.exists(self.sjoin("undo")):
839 return self._rollback(dryrun, force)
838 return self._rollback(dryrun, force)
840 else:
839 else:
841 self.ui.warn(_("no rollback information available\n"))
840 self.ui.warn(_("no rollback information available\n"))
842 return 1
841 return 1
843 finally:
842 finally:
844 release(lock, wlock)
843 release(lock, wlock)
845
844
846 def _rollback(self, dryrun, force):
845 def _rollback(self, dryrun, force):
847 ui = self.ui
846 ui = self.ui
848 try:
847 try:
849 args = self.opener.read('undo.desc').splitlines()
848 args = self.opener.read('undo.desc').splitlines()
850 (oldlen, desc, detail) = (int(args[0]), args[1], None)
849 (oldlen, desc, detail) = (int(args[0]), args[1], None)
851 if len(args) >= 3:
850 if len(args) >= 3:
852 detail = args[2]
851 detail = args[2]
853 oldtip = oldlen - 1
852 oldtip = oldlen - 1
854
853
855 if detail and ui.verbose:
854 if detail and ui.verbose:
856 msg = (_('repository tip rolled back to revision %s'
855 msg = (_('repository tip rolled back to revision %s'
857 ' (undo %s: %s)\n')
856 ' (undo %s: %s)\n')
858 % (oldtip, desc, detail))
857 % (oldtip, desc, detail))
859 else:
858 else:
860 msg = (_('repository tip rolled back to revision %s'
859 msg = (_('repository tip rolled back to revision %s'
861 ' (undo %s)\n')
860 ' (undo %s)\n')
862 % (oldtip, desc))
861 % (oldtip, desc))
863 except IOError:
862 except IOError:
864 msg = _('rolling back unknown transaction\n')
863 msg = _('rolling back unknown transaction\n')
865 desc = None
864 desc = None
866
865
867 if not force and self['.'] != self['tip'] and desc == 'commit':
866 if not force and self['.'] != self['tip'] and desc == 'commit':
868 raise util.Abort(
867 raise util.Abort(
869 _('rollback of last commit while not checked out '
868 _('rollback of last commit while not checked out '
870 'may lose data'), hint=_('use -f to force'))
869 'may lose data'), hint=_('use -f to force'))
871
870
872 ui.status(msg)
871 ui.status(msg)
873 if dryrun:
872 if dryrun:
874 return 0
873 return 0
875
874
876 parents = self.dirstate.parents()
875 parents = self.dirstate.parents()
877 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
876 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
878 if os.path.exists(self.join('undo.bookmarks')):
877 if os.path.exists(self.join('undo.bookmarks')):
879 util.rename(self.join('undo.bookmarks'),
878 util.rename(self.join('undo.bookmarks'),
880 self.join('bookmarks'))
879 self.join('bookmarks'))
881 if os.path.exists(self.sjoin('undo.phaseroots')):
880 if os.path.exists(self.sjoin('undo.phaseroots')):
882 util.rename(self.sjoin('undo.phaseroots'),
881 util.rename(self.sjoin('undo.phaseroots'),
883 self.sjoin('phaseroots'))
882 self.sjoin('phaseroots'))
884 self.invalidate()
883 self.invalidate()
885
884
886 parentgone = (parents[0] not in self.changelog.nodemap or
885 parentgone = (parents[0] not in self.changelog.nodemap or
887 parents[1] not in self.changelog.nodemap)
886 parents[1] not in self.changelog.nodemap)
888 if parentgone:
887 if parentgone:
889 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
888 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
890 try:
889 try:
891 branch = self.opener.read('undo.branch')
890 branch = self.opener.read('undo.branch')
892 self.dirstate.setbranch(branch)
891 self.dirstate.setbranch(branch)
893 except IOError:
892 except IOError:
894 ui.warn(_('named branch could not be reset: '
893 ui.warn(_('named branch could not be reset: '
895 'current branch is still \'%s\'\n')
894 'current branch is still \'%s\'\n')
896 % self.dirstate.branch())
895 % self.dirstate.branch())
897
896
898 self.dirstate.invalidate()
897 self.dirstate.invalidate()
899 parents = tuple([p.rev() for p in self.parents()])
898 parents = tuple([p.rev() for p in self.parents()])
900 if len(parents) > 1:
899 if len(parents) > 1:
901 ui.status(_('working directory now based on '
900 ui.status(_('working directory now based on '
902 'revisions %d and %d\n') % parents)
901 'revisions %d and %d\n') % parents)
903 else:
902 else:
904 ui.status(_('working directory now based on '
903 ui.status(_('working directory now based on '
905 'revision %d\n') % parents)
904 'revision %d\n') % parents)
906 # TODO: if we know which new heads may result from this rollback, pass
905 # TODO: if we know which new heads may result from this rollback, pass
907 # them to destroy(), which will prevent the branchhead cache from being
906 # them to destroy(), which will prevent the branchhead cache from being
908 # invalidated.
907 # invalidated.
909 self.destroyed()
908 self.destroyed()
910 return 0
909 return 0
911
910
912 def invalidatecaches(self):
911 def invalidatecaches(self):
913 def delcache(name):
912 def delcache(name):
914 try:
913 try:
915 delattr(self, name)
914 delattr(self, name)
916 except AttributeError:
915 except AttributeError:
917 pass
916 pass
918
917
919 delcache('_tagscache')
918 delcache('_tagscache')
920
919
921 self._branchcache = None # in UTF-8
920 self._branchcache = None # in UTF-8
922 self._branchcachetip = None
921 self._branchcachetip = None
923
922
924 def invalidatedirstate(self):
923 def invalidatedirstate(self):
925 '''Invalidates the dirstate, causing the next call to dirstate
924 '''Invalidates the dirstate, causing the next call to dirstate
926 to check if it was modified since the last time it was read,
925 to check if it was modified since the last time it was read,
927 rereading it if it has.
926 rereading it if it has.
928
927
929 This is different to dirstate.invalidate() that it doesn't always
928 This is different to dirstate.invalidate() that it doesn't always
930 rereads the dirstate. Use dirstate.invalidate() if you want to
929 rereads the dirstate. Use dirstate.invalidate() if you want to
931 explicitly read the dirstate again (i.e. restoring it to a previous
930 explicitly read the dirstate again (i.e. restoring it to a previous
932 known good state).'''
931 known good state).'''
933 if 'dirstate' in self.__dict__:
932 if 'dirstate' in self.__dict__:
934 for k in self.dirstate._filecache:
933 for k in self.dirstate._filecache:
935 try:
934 try:
936 delattr(self.dirstate, k)
935 delattr(self.dirstate, k)
937 except AttributeError:
936 except AttributeError:
938 pass
937 pass
939 delattr(self, 'dirstate')
938 delattr(self, 'dirstate')
940
939
941 def invalidate(self):
940 def invalidate(self):
942 for k in self._filecache:
941 for k in self._filecache:
943 # dirstate is invalidated separately in invalidatedirstate()
942 # dirstate is invalidated separately in invalidatedirstate()
944 if k == 'dirstate':
943 if k == 'dirstate':
945 continue
944 continue
946
945
947 try:
946 try:
948 delattr(self, k)
947 delattr(self, k)
949 except AttributeError:
948 except AttributeError:
950 pass
949 pass
951 self.invalidatecaches()
950 self.invalidatecaches()
952
951
953 # Discard all cache entries to force reloading everything.
952 # Discard all cache entries to force reloading everything.
954 self._filecache.clear()
953 self._filecache.clear()
955
954
956 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
955 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
957 try:
956 try:
958 l = lock.lock(lockname, 0, releasefn, desc=desc)
957 l = lock.lock(lockname, 0, releasefn, desc=desc)
959 except error.LockHeld, inst:
958 except error.LockHeld, inst:
960 if not wait:
959 if not wait:
961 raise
960 raise
962 self.ui.warn(_("waiting for lock on %s held by %r\n") %
961 self.ui.warn(_("waiting for lock on %s held by %r\n") %
963 (desc, inst.locker))
962 (desc, inst.locker))
964 # default to 600 seconds timeout
963 # default to 600 seconds timeout
965 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
964 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
966 releasefn, desc=desc)
965 releasefn, desc=desc)
967 if acquirefn:
966 if acquirefn:
968 acquirefn()
967 acquirefn()
969 return l
968 return l
970
969
971 def _afterlock(self, callback):
970 def _afterlock(self, callback):
972 """add a callback to the current repository lock.
971 """add a callback to the current repository lock.
973
972
974 The callback will be executed on lock release."""
973 The callback will be executed on lock release."""
975 l = self._lockref and self._lockref()
974 l = self._lockref and self._lockref()
976 if l:
975 if l:
977 l.postrelease.append(callback)
976 l.postrelease.append(callback)
978 else:
977 else:
979 callback()
978 callback()
980
979
981 def lock(self, wait=True):
980 def lock(self, wait=True):
982 '''Lock the repository store (.hg/store) and return a weak reference
981 '''Lock the repository store (.hg/store) and return a weak reference
983 to the lock. Use this before modifying the store (e.g. committing or
982 to the lock. Use this before modifying the store (e.g. committing or
984 stripping). If you are opening a transaction, get a lock as well.)'''
983 stripping). If you are opening a transaction, get a lock as well.)'''
985 l = self._lockref and self._lockref()
984 l = self._lockref and self._lockref()
986 if l is not None and l.held:
985 if l is not None and l.held:
987 l.lock()
986 l.lock()
988 return l
987 return l
989
988
990 def unlock():
989 def unlock():
991 self.store.write()
990 self.store.write()
992 if '_phasecache' in vars(self):
991 if '_phasecache' in vars(self):
993 self._phasecache.write()
992 self._phasecache.write()
994 if 'obsstore' in vars(self) and self.obsstore._new:
993 if 'obsstore' in vars(self) and self.obsstore._new:
995 # XXX: transaction logic should be used here. But for
994 # XXX: transaction logic should be used here. But for
996 # now rewriting the whole file is good enough.
995 # now rewriting the whole file is good enough.
997 f = self.sopener('obsstore', 'wb', atomictemp=True)
996 f = self.sopener('obsstore', 'wb', atomictemp=True)
998 try:
997 try:
999 self.obsstore.flushmarkers(f)
998 self.obsstore.flushmarkers(f)
1000 f.close()
999 f.close()
1001 except: # re-raises
1000 except: # re-raises
1002 f.discard()
1001 f.discard()
1003 raise
1002 raise
1004 for k, ce in self._filecache.items():
1003 for k, ce in self._filecache.items():
1005 if k == 'dirstate':
1004 if k == 'dirstate':
1006 continue
1005 continue
1007 ce.refresh()
1006 ce.refresh()
1008
1007
1009 l = self._lock(self.sjoin("lock"), wait, unlock,
1008 l = self._lock(self.sjoin("lock"), wait, unlock,
1010 self.invalidate, _('repository %s') % self.origroot)
1009 self.invalidate, _('repository %s') % self.origroot)
1011 self._lockref = weakref.ref(l)
1010 self._lockref = weakref.ref(l)
1012 return l
1011 return l
1013
1012
1014 def wlock(self, wait=True):
1013 def wlock(self, wait=True):
1015 '''Lock the non-store parts of the repository (everything under
1014 '''Lock the non-store parts of the repository (everything under
1016 .hg except .hg/store) and return a weak reference to the lock.
1015 .hg except .hg/store) and return a weak reference to the lock.
1017 Use this before modifying files in .hg.'''
1016 Use this before modifying files in .hg.'''
1018 l = self._wlockref and self._wlockref()
1017 l = self._wlockref and self._wlockref()
1019 if l is not None and l.held:
1018 if l is not None and l.held:
1020 l.lock()
1019 l.lock()
1021 return l
1020 return l
1022
1021
1023 def unlock():
1022 def unlock():
1024 self.dirstate.write()
1023 self.dirstate.write()
1025 ce = self._filecache.get('dirstate')
1024 ce = self._filecache.get('dirstate')
1026 if ce:
1025 if ce:
1027 ce.refresh()
1026 ce.refresh()
1028
1027
1029 l = self._lock(self.join("wlock"), wait, unlock,
1028 l = self._lock(self.join("wlock"), wait, unlock,
1030 self.invalidatedirstate, _('working directory of %s') %
1029 self.invalidatedirstate, _('working directory of %s') %
1031 self.origroot)
1030 self.origroot)
1032 self._wlockref = weakref.ref(l)
1031 self._wlockref = weakref.ref(l)
1033 return l
1032 return l
1034
1033
1035 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1034 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1036 """
1035 """
1037 commit an individual file as part of a larger transaction
1036 commit an individual file as part of a larger transaction
1038 """
1037 """
1039
1038
1040 fname = fctx.path()
1039 fname = fctx.path()
1041 text = fctx.data()
1040 text = fctx.data()
1042 flog = self.file(fname)
1041 flog = self.file(fname)
1043 fparent1 = manifest1.get(fname, nullid)
1042 fparent1 = manifest1.get(fname, nullid)
1044 fparent2 = fparent2o = manifest2.get(fname, nullid)
1043 fparent2 = fparent2o = manifest2.get(fname, nullid)
1045
1044
1046 meta = {}
1045 meta = {}
1047 copy = fctx.renamed()
1046 copy = fctx.renamed()
1048 if copy and copy[0] != fname:
1047 if copy and copy[0] != fname:
1049 # Mark the new revision of this file as a copy of another
1048 # Mark the new revision of this file as a copy of another
1050 # file. This copy data will effectively act as a parent
1049 # file. This copy data will effectively act as a parent
1051 # of this new revision. If this is a merge, the first
1050 # of this new revision. If this is a merge, the first
1052 # parent will be the nullid (meaning "look up the copy data")
1051 # parent will be the nullid (meaning "look up the copy data")
1053 # and the second one will be the other parent. For example:
1052 # and the second one will be the other parent. For example:
1054 #
1053 #
1055 # 0 --- 1 --- 3 rev1 changes file foo
1054 # 0 --- 1 --- 3 rev1 changes file foo
1056 # \ / rev2 renames foo to bar and changes it
1055 # \ / rev2 renames foo to bar and changes it
1057 # \- 2 -/ rev3 should have bar with all changes and
1056 # \- 2 -/ rev3 should have bar with all changes and
1058 # should record that bar descends from
1057 # should record that bar descends from
1059 # bar in rev2 and foo in rev1
1058 # bar in rev2 and foo in rev1
1060 #
1059 #
1061 # this allows this merge to succeed:
1060 # this allows this merge to succeed:
1062 #
1061 #
1063 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1062 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1064 # \ / merging rev3 and rev4 should use bar@rev2
1063 # \ / merging rev3 and rev4 should use bar@rev2
1065 # \- 2 --- 4 as the merge base
1064 # \- 2 --- 4 as the merge base
1066 #
1065 #
1067
1066
1068 cfname = copy[0]
1067 cfname = copy[0]
1069 crev = manifest1.get(cfname)
1068 crev = manifest1.get(cfname)
1070 newfparent = fparent2
1069 newfparent = fparent2
1071
1070
1072 if manifest2: # branch merge
1071 if manifest2: # branch merge
1073 if fparent2 == nullid or crev is None: # copied on remote side
1072 if fparent2 == nullid or crev is None: # copied on remote side
1074 if cfname in manifest2:
1073 if cfname in manifest2:
1075 crev = manifest2[cfname]
1074 crev = manifest2[cfname]
1076 newfparent = fparent1
1075 newfparent = fparent1
1077
1076
1078 # find source in nearest ancestor if we've lost track
1077 # find source in nearest ancestor if we've lost track
1079 if not crev:
1078 if not crev:
1080 self.ui.debug(" %s: searching for copy revision for %s\n" %
1079 self.ui.debug(" %s: searching for copy revision for %s\n" %
1081 (fname, cfname))
1080 (fname, cfname))
1082 for ancestor in self[None].ancestors():
1081 for ancestor in self[None].ancestors():
1083 if cfname in ancestor:
1082 if cfname in ancestor:
1084 crev = ancestor[cfname].filenode()
1083 crev = ancestor[cfname].filenode()
1085 break
1084 break
1086
1085
1087 if crev:
1086 if crev:
1088 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1087 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1089 meta["copy"] = cfname
1088 meta["copy"] = cfname
1090 meta["copyrev"] = hex(crev)
1089 meta["copyrev"] = hex(crev)
1091 fparent1, fparent2 = nullid, newfparent
1090 fparent1, fparent2 = nullid, newfparent
1092 else:
1091 else:
1093 self.ui.warn(_("warning: can't find ancestor for '%s' "
1092 self.ui.warn(_("warning: can't find ancestor for '%s' "
1094 "copied from '%s'!\n") % (fname, cfname))
1093 "copied from '%s'!\n") % (fname, cfname))
1095
1094
1096 elif fparent2 != nullid:
1095 elif fparent2 != nullid:
1097 # is one parent an ancestor of the other?
1096 # is one parent an ancestor of the other?
1098 fparentancestor = flog.ancestor(fparent1, fparent2)
1097 fparentancestor = flog.ancestor(fparent1, fparent2)
1099 if fparentancestor == fparent1:
1098 if fparentancestor == fparent1:
1100 fparent1, fparent2 = fparent2, nullid
1099 fparent1, fparent2 = fparent2, nullid
1101 elif fparentancestor == fparent2:
1100 elif fparentancestor == fparent2:
1102 fparent2 = nullid
1101 fparent2 = nullid
1103
1102
1104 # is the file changed?
1103 # is the file changed?
1105 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1104 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1106 changelist.append(fname)
1105 changelist.append(fname)
1107 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1106 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1108
1107
1109 # are just the flags changed during merge?
1108 # are just the flags changed during merge?
1110 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1109 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1111 changelist.append(fname)
1110 changelist.append(fname)
1112
1111
1113 return fparent1
1112 return fparent1
1114
1113
1115 def commit(self, text="", user=None, date=None, match=None, force=False,
1114 def commit(self, text="", user=None, date=None, match=None, force=False,
1116 editor=False, extra={}):
1115 editor=False, extra={}):
1117 """Add a new revision to current repository.
1116 """Add a new revision to current repository.
1118
1117
1119 Revision information is gathered from the working directory,
1118 Revision information is gathered from the working directory,
1120 match can be used to filter the committed files. If editor is
1119 match can be used to filter the committed files. If editor is
1121 supplied, it is called to get a commit message.
1120 supplied, it is called to get a commit message.
1122 """
1121 """
1123
1122
1124 def fail(f, msg):
1123 def fail(f, msg):
1125 raise util.Abort('%s: %s' % (f, msg))
1124 raise util.Abort('%s: %s' % (f, msg))
1126
1125
1127 if not match:
1126 if not match:
1128 match = matchmod.always(self.root, '')
1127 match = matchmod.always(self.root, '')
1129
1128
1130 if not force:
1129 if not force:
1131 vdirs = []
1130 vdirs = []
1132 match.dir = vdirs.append
1131 match.dir = vdirs.append
1133 match.bad = fail
1132 match.bad = fail
1134
1133
1135 wlock = self.wlock()
1134 wlock = self.wlock()
1136 try:
1135 try:
1137 wctx = self[None]
1136 wctx = self[None]
1138 merge = len(wctx.parents()) > 1
1137 merge = len(wctx.parents()) > 1
1139
1138
1140 if (not force and merge and match and
1139 if (not force and merge and match and
1141 (match.files() or match.anypats())):
1140 (match.files() or match.anypats())):
1142 raise util.Abort(_('cannot partially commit a merge '
1141 raise util.Abort(_('cannot partially commit a merge '
1143 '(do not specify files or patterns)'))
1142 '(do not specify files or patterns)'))
1144
1143
1145 changes = self.status(match=match, clean=force)
1144 changes = self.status(match=match, clean=force)
1146 if force:
1145 if force:
1147 changes[0].extend(changes[6]) # mq may commit unchanged files
1146 changes[0].extend(changes[6]) # mq may commit unchanged files
1148
1147
1149 # check subrepos
1148 # check subrepos
1150 subs = []
1149 subs = []
1151 commitsubs = set()
1150 commitsubs = set()
1152 newstate = wctx.substate.copy()
1151 newstate = wctx.substate.copy()
1153 # only manage subrepos and .hgsubstate if .hgsub is present
1152 # only manage subrepos and .hgsubstate if .hgsub is present
1154 if '.hgsub' in wctx:
1153 if '.hgsub' in wctx:
1155 # we'll decide whether to track this ourselves, thanks
1154 # we'll decide whether to track this ourselves, thanks
1156 if '.hgsubstate' in changes[0]:
1155 if '.hgsubstate' in changes[0]:
1157 changes[0].remove('.hgsubstate')
1156 changes[0].remove('.hgsubstate')
1158 if '.hgsubstate' in changes[2]:
1157 if '.hgsubstate' in changes[2]:
1159 changes[2].remove('.hgsubstate')
1158 changes[2].remove('.hgsubstate')
1160
1159
1161 # compare current state to last committed state
1160 # compare current state to last committed state
1162 # build new substate based on last committed state
1161 # build new substate based on last committed state
1163 oldstate = wctx.p1().substate
1162 oldstate = wctx.p1().substate
1164 for s in sorted(newstate.keys()):
1163 for s in sorted(newstate.keys()):
1165 if not match(s):
1164 if not match(s):
1166 # ignore working copy, use old state if present
1165 # ignore working copy, use old state if present
1167 if s in oldstate:
1166 if s in oldstate:
1168 newstate[s] = oldstate[s]
1167 newstate[s] = oldstate[s]
1169 continue
1168 continue
1170 if not force:
1169 if not force:
1171 raise util.Abort(
1170 raise util.Abort(
1172 _("commit with new subrepo %s excluded") % s)
1171 _("commit with new subrepo %s excluded") % s)
1173 if wctx.sub(s).dirty(True):
1172 if wctx.sub(s).dirty(True):
1174 if not self.ui.configbool('ui', 'commitsubrepos'):
1173 if not self.ui.configbool('ui', 'commitsubrepos'):
1175 raise util.Abort(
1174 raise util.Abort(
1176 _("uncommitted changes in subrepo %s") % s,
1175 _("uncommitted changes in subrepo %s") % s,
1177 hint=_("use --subrepos for recursive commit"))
1176 hint=_("use --subrepos for recursive commit"))
1178 subs.append(s)
1177 subs.append(s)
1179 commitsubs.add(s)
1178 commitsubs.add(s)
1180 else:
1179 else:
1181 bs = wctx.sub(s).basestate()
1180 bs = wctx.sub(s).basestate()
1182 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1181 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1183 if oldstate.get(s, (None, None, None))[1] != bs:
1182 if oldstate.get(s, (None, None, None))[1] != bs:
1184 subs.append(s)
1183 subs.append(s)
1185
1184
1186 # check for removed subrepos
1185 # check for removed subrepos
1187 for p in wctx.parents():
1186 for p in wctx.parents():
1188 r = [s for s in p.substate if s not in newstate]
1187 r = [s for s in p.substate if s not in newstate]
1189 subs += [s for s in r if match(s)]
1188 subs += [s for s in r if match(s)]
1190 if subs:
1189 if subs:
1191 if (not match('.hgsub') and
1190 if (not match('.hgsub') and
1192 '.hgsub' in (wctx.modified() + wctx.added())):
1191 '.hgsub' in (wctx.modified() + wctx.added())):
1193 raise util.Abort(
1192 raise util.Abort(
1194 _("can't commit subrepos without .hgsub"))
1193 _("can't commit subrepos without .hgsub"))
1195 changes[0].insert(0, '.hgsubstate')
1194 changes[0].insert(0, '.hgsubstate')
1196
1195
1197 elif '.hgsub' in changes[2]:
1196 elif '.hgsub' in changes[2]:
1198 # clean up .hgsubstate when .hgsub is removed
1197 # clean up .hgsubstate when .hgsub is removed
1199 if ('.hgsubstate' in wctx and
1198 if ('.hgsubstate' in wctx and
1200 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1199 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1201 changes[2].insert(0, '.hgsubstate')
1200 changes[2].insert(0, '.hgsubstate')
1202
1201
1203 # make sure all explicit patterns are matched
1202 # make sure all explicit patterns are matched
1204 if not force and match.files():
1203 if not force and match.files():
1205 matched = set(changes[0] + changes[1] + changes[2])
1204 matched = set(changes[0] + changes[1] + changes[2])
1206
1205
1207 for f in match.files():
1206 for f in match.files():
1208 if f == '.' or f in matched or f in wctx.substate:
1207 if f == '.' or f in matched or f in wctx.substate:
1209 continue
1208 continue
1210 if f in changes[3]: # missing
1209 if f in changes[3]: # missing
1211 fail(f, _('file not found!'))
1210 fail(f, _('file not found!'))
1212 if f in vdirs: # visited directory
1211 if f in vdirs: # visited directory
1213 d = f + '/'
1212 d = f + '/'
1214 for mf in matched:
1213 for mf in matched:
1215 if mf.startswith(d):
1214 if mf.startswith(d):
1216 break
1215 break
1217 else:
1216 else:
1218 fail(f, _("no match under directory!"))
1217 fail(f, _("no match under directory!"))
1219 elif f not in self.dirstate:
1218 elif f not in self.dirstate:
1220 fail(f, _("file not tracked!"))
1219 fail(f, _("file not tracked!"))
1221
1220
1222 if (not force and not extra.get("close") and not merge
1221 if (not force and not extra.get("close") and not merge
1223 and not (changes[0] or changes[1] or changes[2])
1222 and not (changes[0] or changes[1] or changes[2])
1224 and wctx.branch() == wctx.p1().branch()):
1223 and wctx.branch() == wctx.p1().branch()):
1225 return None
1224 return None
1226
1225
1227 if merge and changes[3]:
1226 if merge and changes[3]:
1228 raise util.Abort(_("cannot commit merge with missing files"))
1227 raise util.Abort(_("cannot commit merge with missing files"))
1229
1228
1230 ms = mergemod.mergestate(self)
1229 ms = mergemod.mergestate(self)
1231 for f in changes[0]:
1230 for f in changes[0]:
1232 if f in ms and ms[f] == 'u':
1231 if f in ms and ms[f] == 'u':
1233 raise util.Abort(_("unresolved merge conflicts "
1232 raise util.Abort(_("unresolved merge conflicts "
1234 "(see hg help resolve)"))
1233 "(see hg help resolve)"))
1235
1234
1236 cctx = context.workingctx(self, text, user, date, extra, changes)
1235 cctx = context.workingctx(self, text, user, date, extra, changes)
1237 if editor:
1236 if editor:
1238 cctx._text = editor(self, cctx, subs)
1237 cctx._text = editor(self, cctx, subs)
1239 edited = (text != cctx._text)
1238 edited = (text != cctx._text)
1240
1239
1241 # commit subs and write new state
1240 # commit subs and write new state
1242 if subs:
1241 if subs:
1243 for s in sorted(commitsubs):
1242 for s in sorted(commitsubs):
1244 sub = wctx.sub(s)
1243 sub = wctx.sub(s)
1245 self.ui.status(_('committing subrepository %s\n') %
1244 self.ui.status(_('committing subrepository %s\n') %
1246 subrepo.subrelpath(sub))
1245 subrepo.subrelpath(sub))
1247 sr = sub.commit(cctx._text, user, date)
1246 sr = sub.commit(cctx._text, user, date)
1248 newstate[s] = (newstate[s][0], sr)
1247 newstate[s] = (newstate[s][0], sr)
1249 subrepo.writestate(self, newstate)
1248 subrepo.writestate(self, newstate)
1250
1249
1251 # Save commit message in case this transaction gets rolled back
1250 # Save commit message in case this transaction gets rolled back
1252 # (e.g. by a pretxncommit hook). Leave the content alone on
1251 # (e.g. by a pretxncommit hook). Leave the content alone on
1253 # the assumption that the user will use the same editor again.
1252 # the assumption that the user will use the same editor again.
1254 msgfn = self.savecommitmessage(cctx._text)
1253 msgfn = self.savecommitmessage(cctx._text)
1255
1254
1256 p1, p2 = self.dirstate.parents()
1255 p1, p2 = self.dirstate.parents()
1257 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1256 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1258 try:
1257 try:
1259 self.hook("precommit", throw=True, parent1=hookp1,
1258 self.hook("precommit", throw=True, parent1=hookp1,
1260 parent2=hookp2)
1259 parent2=hookp2)
1261 ret = self.commitctx(cctx, True)
1260 ret = self.commitctx(cctx, True)
1262 except: # re-raises
1261 except: # re-raises
1263 if edited:
1262 if edited:
1264 self.ui.write(
1263 self.ui.write(
1265 _('note: commit message saved in %s\n') % msgfn)
1264 _('note: commit message saved in %s\n') % msgfn)
1266 raise
1265 raise
1267
1266
1268 # update bookmarks, dirstate and mergestate
1267 # update bookmarks, dirstate and mergestate
1269 bookmarks.update(self, [p1, p2], ret)
1268 bookmarks.update(self, [p1, p2], ret)
1270 for f in changes[0] + changes[1]:
1269 for f in changes[0] + changes[1]:
1271 self.dirstate.normal(f)
1270 self.dirstate.normal(f)
1272 for f in changes[2]:
1271 for f in changes[2]:
1273 self.dirstate.drop(f)
1272 self.dirstate.drop(f)
1274 self.dirstate.setparents(ret)
1273 self.dirstate.setparents(ret)
1275 ms.reset()
1274 ms.reset()
1276 finally:
1275 finally:
1277 wlock.release()
1276 wlock.release()
1278
1277
1279 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1278 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1280 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1279 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1281 self._afterlock(commithook)
1280 self._afterlock(commithook)
1282 return ret
1281 return ret
1283
1282
1284 def commitctx(self, ctx, error=False):
1283 def commitctx(self, ctx, error=False):
1285 """Add a new revision to current repository.
1284 """Add a new revision to current repository.
1286 Revision information is passed via the context argument.
1285 Revision information is passed via the context argument.
1287 """
1286 """
1288
1287
1289 tr = lock = None
1288 tr = lock = None
1290 removed = list(ctx.removed())
1289 removed = list(ctx.removed())
1291 p1, p2 = ctx.p1(), ctx.p2()
1290 p1, p2 = ctx.p1(), ctx.p2()
1292 user = ctx.user()
1291 user = ctx.user()
1293
1292
1294 lock = self.lock()
1293 lock = self.lock()
1295 try:
1294 try:
1296 tr = self.transaction("commit")
1295 tr = self.transaction("commit")
1297 trp = weakref.proxy(tr)
1296 trp = weakref.proxy(tr)
1298
1297
1299 if ctx.files():
1298 if ctx.files():
1300 m1 = p1.manifest().copy()
1299 m1 = p1.manifest().copy()
1301 m2 = p2.manifest()
1300 m2 = p2.manifest()
1302
1301
1303 # check in files
1302 # check in files
1304 new = {}
1303 new = {}
1305 changed = []
1304 changed = []
1306 linkrev = len(self)
1305 linkrev = len(self)
1307 for f in sorted(ctx.modified() + ctx.added()):
1306 for f in sorted(ctx.modified() + ctx.added()):
1308 self.ui.note(f + "\n")
1307 self.ui.note(f + "\n")
1309 try:
1308 try:
1310 fctx = ctx[f]
1309 fctx = ctx[f]
1311 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1310 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1312 changed)
1311 changed)
1313 m1.set(f, fctx.flags())
1312 m1.set(f, fctx.flags())
1314 except OSError, inst:
1313 except OSError, inst:
1315 self.ui.warn(_("trouble committing %s!\n") % f)
1314 self.ui.warn(_("trouble committing %s!\n") % f)
1316 raise
1315 raise
1317 except IOError, inst:
1316 except IOError, inst:
1318 errcode = getattr(inst, 'errno', errno.ENOENT)
1317 errcode = getattr(inst, 'errno', errno.ENOENT)
1319 if error or errcode and errcode != errno.ENOENT:
1318 if error or errcode and errcode != errno.ENOENT:
1320 self.ui.warn(_("trouble committing %s!\n") % f)
1319 self.ui.warn(_("trouble committing %s!\n") % f)
1321 raise
1320 raise
1322 else:
1321 else:
1323 removed.append(f)
1322 removed.append(f)
1324
1323
1325 # update manifest
1324 # update manifest
1326 m1.update(new)
1325 m1.update(new)
1327 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1326 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1328 drop = [f for f in removed if f in m1]
1327 drop = [f for f in removed if f in m1]
1329 for f in drop:
1328 for f in drop:
1330 del m1[f]
1329 del m1[f]
1331 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1330 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1332 p2.manifestnode(), (new, drop))
1331 p2.manifestnode(), (new, drop))
1333 files = changed + removed
1332 files = changed + removed
1334 else:
1333 else:
1335 mn = p1.manifestnode()
1334 mn = p1.manifestnode()
1336 files = []
1335 files = []
1337
1336
1338 # update changelog
1337 # update changelog
1339 self.changelog.delayupdate()
1338 self.changelog.delayupdate()
1340 n = self.changelog.add(mn, files, ctx.description(),
1339 n = self.changelog.add(mn, files, ctx.description(),
1341 trp, p1.node(), p2.node(),
1340 trp, p1.node(), p2.node(),
1342 user, ctx.date(), ctx.extra().copy())
1341 user, ctx.date(), ctx.extra().copy())
1343 p = lambda: self.changelog.writepending() and self.root or ""
1342 p = lambda: self.changelog.writepending() and self.root or ""
1344 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1343 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1345 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1344 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1346 parent2=xp2, pending=p)
1345 parent2=xp2, pending=p)
1347 self.changelog.finalize(trp)
1346 self.changelog.finalize(trp)
1348 # set the new commit is proper phase
1347 # set the new commit is proper phase
1349 targetphase = phases.newcommitphase(self.ui)
1348 targetphase = phases.newcommitphase(self.ui)
1350 if targetphase:
1349 if targetphase:
1351 # retract boundary do not alter parent changeset.
1350 # retract boundary do not alter parent changeset.
1352 # if a parent have higher the resulting phase will
1351 # if a parent have higher the resulting phase will
1353 # be compliant anyway
1352 # be compliant anyway
1354 #
1353 #
1355 # if minimal phase was 0 we don't need to retract anything
1354 # if minimal phase was 0 we don't need to retract anything
1356 phases.retractboundary(self, targetphase, [n])
1355 phases.retractboundary(self, targetphase, [n])
1357 tr.close()
1356 tr.close()
1358 self.updatebranchcache()
1357 self.updatebranchcache()
1359 return n
1358 return n
1360 finally:
1359 finally:
1361 if tr:
1360 if tr:
1362 tr.release()
1361 tr.release()
1363 lock.release()
1362 lock.release()
1364
1363
1365 def destroyed(self, newheadnodes=None):
1364 def destroyed(self, newheadnodes=None):
1366 '''Inform the repository that nodes have been destroyed.
1365 '''Inform the repository that nodes have been destroyed.
1367 Intended for use by strip and rollback, so there's a common
1366 Intended for use by strip and rollback, so there's a common
1368 place for anything that has to be done after destroying history.
1367 place for anything that has to be done after destroying history.
1369
1368
1370 If you know the branchheadcache was uptodate before nodes were removed
1369 If you know the branchheadcache was uptodate before nodes were removed
1371 and you also know the set of candidate new heads that may have resulted
1370 and you also know the set of candidate new heads that may have resulted
1372 from the destruction, you can set newheadnodes. This will enable the
1371 from the destruction, you can set newheadnodes. This will enable the
1373 code to update the branchheads cache, rather than having future code
1372 code to update the branchheads cache, rather than having future code
1374 decide it's invalid and regenrating it from scratch.
1373 decide it's invalid and regenrating it from scratch.
1375 '''
1374 '''
1376 # If we have info, newheadnodes, on how to update the branch cache, do
1375 # If we have info, newheadnodes, on how to update the branch cache, do
1377 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1376 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1378 # will be caught the next time it is read.
1377 # will be caught the next time it is read.
1379 if newheadnodes:
1378 if newheadnodes:
1380 tiprev = len(self) - 1
1379 tiprev = len(self) - 1
1381 ctxgen = (self[node] for node in newheadnodes
1380 ctxgen = (self[node] for node in newheadnodes
1382 if self.changelog.hasnode(node))
1381 if self.changelog.hasnode(node))
1383 self._updatebranchcache(self._branchcache, ctxgen)
1382 self._updatebranchcache(self._branchcache, ctxgen)
1384 self._writebranchcache(self._branchcache, self.changelog.tip(),
1383 self._writebranchcache(self._branchcache, self.changelog.tip(),
1385 tiprev)
1384 tiprev)
1386
1385
1387 # Ensure the persistent tag cache is updated. Doing it now
1386 # Ensure the persistent tag cache is updated. Doing it now
1388 # means that the tag cache only has to worry about destroyed
1387 # means that the tag cache only has to worry about destroyed
1389 # heads immediately after a strip/rollback. That in turn
1388 # heads immediately after a strip/rollback. That in turn
1390 # guarantees that "cachetip == currenttip" (comparing both rev
1389 # guarantees that "cachetip == currenttip" (comparing both rev
1391 # and node) always means no nodes have been added or destroyed.
1390 # and node) always means no nodes have been added or destroyed.
1392
1391
1393 # XXX this is suboptimal when qrefresh'ing: we strip the current
1392 # XXX this is suboptimal when qrefresh'ing: we strip the current
1394 # head, refresh the tag cache, then immediately add a new head.
1393 # head, refresh the tag cache, then immediately add a new head.
1395 # But I think doing it this way is necessary for the "instant
1394 # But I think doing it this way is necessary for the "instant
1396 # tag cache retrieval" case to work.
1395 # tag cache retrieval" case to work.
1397 self.invalidatecaches()
1396 self.invalidatecaches()
1398
1397
1399 def walk(self, match, node=None):
1398 def walk(self, match, node=None):
1400 '''
1399 '''
1401 walk recursively through the directory tree or a given
1400 walk recursively through the directory tree or a given
1402 changeset, finding all files matched by the match
1401 changeset, finding all files matched by the match
1403 function
1402 function
1404 '''
1403 '''
1405 return self[node].walk(match)
1404 return self[node].walk(match)
1406
1405
1407 def status(self, node1='.', node2=None, match=None,
1406 def status(self, node1='.', node2=None, match=None,
1408 ignored=False, clean=False, unknown=False,
1407 ignored=False, clean=False, unknown=False,
1409 listsubrepos=False):
1408 listsubrepos=False):
1410 """return status of files between two nodes or node and working
1409 """return status of files between two nodes or node and working
1411 directory.
1410 directory.
1412
1411
1413 If node1 is None, use the first dirstate parent instead.
1412 If node1 is None, use the first dirstate parent instead.
1414 If node2 is None, compare node1 with working directory.
1413 If node2 is None, compare node1 with working directory.
1415 """
1414 """
1416
1415
1417 def mfmatches(ctx):
1416 def mfmatches(ctx):
1418 mf = ctx.manifest().copy()
1417 mf = ctx.manifest().copy()
1419 if match.always():
1418 if match.always():
1420 return mf
1419 return mf
1421 for fn in mf.keys():
1420 for fn in mf.keys():
1422 if not match(fn):
1421 if not match(fn):
1423 del mf[fn]
1422 del mf[fn]
1424 return mf
1423 return mf
1425
1424
1426 if isinstance(node1, context.changectx):
1425 if isinstance(node1, context.changectx):
1427 ctx1 = node1
1426 ctx1 = node1
1428 else:
1427 else:
1429 ctx1 = self[node1]
1428 ctx1 = self[node1]
1430 if isinstance(node2, context.changectx):
1429 if isinstance(node2, context.changectx):
1431 ctx2 = node2
1430 ctx2 = node2
1432 else:
1431 else:
1433 ctx2 = self[node2]
1432 ctx2 = self[node2]
1434
1433
1435 working = ctx2.rev() is None
1434 working = ctx2.rev() is None
1436 parentworking = working and ctx1 == self['.']
1435 parentworking = working and ctx1 == self['.']
1437 match = match or matchmod.always(self.root, self.getcwd())
1436 match = match or matchmod.always(self.root, self.getcwd())
1438 listignored, listclean, listunknown = ignored, clean, unknown
1437 listignored, listclean, listunknown = ignored, clean, unknown
1439
1438
1440 # load earliest manifest first for caching reasons
1439 # load earliest manifest first for caching reasons
1441 if not working and ctx2.rev() < ctx1.rev():
1440 if not working and ctx2.rev() < ctx1.rev():
1442 ctx2.manifest()
1441 ctx2.manifest()
1443
1442
1444 if not parentworking:
1443 if not parentworking:
1445 def bad(f, msg):
1444 def bad(f, msg):
1446 # 'f' may be a directory pattern from 'match.files()',
1445 # 'f' may be a directory pattern from 'match.files()',
1447 # so 'f not in ctx1' is not enough
1446 # so 'f not in ctx1' is not enough
1448 if f not in ctx1 and f not in ctx1.dirs():
1447 if f not in ctx1 and f not in ctx1.dirs():
1449 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1448 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1450 match.bad = bad
1449 match.bad = bad
1451
1450
1452 if working: # we need to scan the working dir
1451 if working: # we need to scan the working dir
1453 subrepos = []
1452 subrepos = []
1454 if '.hgsub' in self.dirstate:
1453 if '.hgsub' in self.dirstate:
1455 subrepos = ctx2.substate.keys()
1454 subrepos = ctx2.substate.keys()
1456 s = self.dirstate.status(match, subrepos, listignored,
1455 s = self.dirstate.status(match, subrepos, listignored,
1457 listclean, listunknown)
1456 listclean, listunknown)
1458 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1457 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1459
1458
1460 # check for any possibly clean files
1459 # check for any possibly clean files
1461 if parentworking and cmp:
1460 if parentworking and cmp:
1462 fixup = []
1461 fixup = []
1463 # do a full compare of any files that might have changed
1462 # do a full compare of any files that might have changed
1464 for f in sorted(cmp):
1463 for f in sorted(cmp):
1465 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1464 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1466 or ctx1[f].cmp(ctx2[f])):
1465 or ctx1[f].cmp(ctx2[f])):
1467 modified.append(f)
1466 modified.append(f)
1468 else:
1467 else:
1469 fixup.append(f)
1468 fixup.append(f)
1470
1469
1471 # update dirstate for files that are actually clean
1470 # update dirstate for files that are actually clean
1472 if fixup:
1471 if fixup:
1473 if listclean:
1472 if listclean:
1474 clean += fixup
1473 clean += fixup
1475
1474
1476 try:
1475 try:
1477 # updating the dirstate is optional
1476 # updating the dirstate is optional
1478 # so we don't wait on the lock
1477 # so we don't wait on the lock
1479 wlock = self.wlock(False)
1478 wlock = self.wlock(False)
1480 try:
1479 try:
1481 for f in fixup:
1480 for f in fixup:
1482 self.dirstate.normal(f)
1481 self.dirstate.normal(f)
1483 finally:
1482 finally:
1484 wlock.release()
1483 wlock.release()
1485 except error.LockError:
1484 except error.LockError:
1486 pass
1485 pass
1487
1486
1488 if not parentworking:
1487 if not parentworking:
1489 mf1 = mfmatches(ctx1)
1488 mf1 = mfmatches(ctx1)
1490 if working:
1489 if working:
1491 # we are comparing working dir against non-parent
1490 # we are comparing working dir against non-parent
1492 # generate a pseudo-manifest for the working dir
1491 # generate a pseudo-manifest for the working dir
1493 mf2 = mfmatches(self['.'])
1492 mf2 = mfmatches(self['.'])
1494 for f in cmp + modified + added:
1493 for f in cmp + modified + added:
1495 mf2[f] = None
1494 mf2[f] = None
1496 mf2.set(f, ctx2.flags(f))
1495 mf2.set(f, ctx2.flags(f))
1497 for f in removed:
1496 for f in removed:
1498 if f in mf2:
1497 if f in mf2:
1499 del mf2[f]
1498 del mf2[f]
1500 else:
1499 else:
1501 # we are comparing two revisions
1500 # we are comparing two revisions
1502 deleted, unknown, ignored = [], [], []
1501 deleted, unknown, ignored = [], [], []
1503 mf2 = mfmatches(ctx2)
1502 mf2 = mfmatches(ctx2)
1504
1503
1505 modified, added, clean = [], [], []
1504 modified, added, clean = [], [], []
1506 withflags = mf1.withflags() | mf2.withflags()
1505 withflags = mf1.withflags() | mf2.withflags()
1507 for fn in mf2:
1506 for fn in mf2:
1508 if fn in mf1:
1507 if fn in mf1:
1509 if (fn not in deleted and
1508 if (fn not in deleted and
1510 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1509 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1511 (mf1[fn] != mf2[fn] and
1510 (mf1[fn] != mf2[fn] and
1512 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1511 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1513 modified.append(fn)
1512 modified.append(fn)
1514 elif listclean:
1513 elif listclean:
1515 clean.append(fn)
1514 clean.append(fn)
1516 del mf1[fn]
1515 del mf1[fn]
1517 elif fn not in deleted:
1516 elif fn not in deleted:
1518 added.append(fn)
1517 added.append(fn)
1519 removed = mf1.keys()
1518 removed = mf1.keys()
1520
1519
1521 if working and modified and not self.dirstate._checklink:
1520 if working and modified and not self.dirstate._checklink:
1522 # Symlink placeholders may get non-symlink-like contents
1521 # Symlink placeholders may get non-symlink-like contents
1523 # via user error or dereferencing by NFS or Samba servers,
1522 # via user error or dereferencing by NFS or Samba servers,
1524 # so we filter out any placeholders that don't look like a
1523 # so we filter out any placeholders that don't look like a
1525 # symlink
1524 # symlink
1526 sane = []
1525 sane = []
1527 for f in modified:
1526 for f in modified:
1528 if ctx2.flags(f) == 'l':
1527 if ctx2.flags(f) == 'l':
1529 d = ctx2[f].data()
1528 d = ctx2[f].data()
1530 if len(d) >= 1024 or '\n' in d or util.binary(d):
1529 if len(d) >= 1024 or '\n' in d or util.binary(d):
1531 self.ui.debug('ignoring suspect symlink placeholder'
1530 self.ui.debug('ignoring suspect symlink placeholder'
1532 ' "%s"\n' % f)
1531 ' "%s"\n' % f)
1533 continue
1532 continue
1534 sane.append(f)
1533 sane.append(f)
1535 modified = sane
1534 modified = sane
1536
1535
1537 r = modified, added, removed, deleted, unknown, ignored, clean
1536 r = modified, added, removed, deleted, unknown, ignored, clean
1538
1537
1539 if listsubrepos:
1538 if listsubrepos:
1540 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1539 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1541 if working:
1540 if working:
1542 rev2 = None
1541 rev2 = None
1543 else:
1542 else:
1544 rev2 = ctx2.substate[subpath][1]
1543 rev2 = ctx2.substate[subpath][1]
1545 try:
1544 try:
1546 submatch = matchmod.narrowmatcher(subpath, match)
1545 submatch = matchmod.narrowmatcher(subpath, match)
1547 s = sub.status(rev2, match=submatch, ignored=listignored,
1546 s = sub.status(rev2, match=submatch, ignored=listignored,
1548 clean=listclean, unknown=listunknown,
1547 clean=listclean, unknown=listunknown,
1549 listsubrepos=True)
1548 listsubrepos=True)
1550 for rfiles, sfiles in zip(r, s):
1549 for rfiles, sfiles in zip(r, s):
1551 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1550 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1552 except error.LookupError:
1551 except error.LookupError:
1553 self.ui.status(_("skipping missing subrepository: %s\n")
1552 self.ui.status(_("skipping missing subrepository: %s\n")
1554 % subpath)
1553 % subpath)
1555
1554
1556 for l in r:
1555 for l in r:
1557 l.sort()
1556 l.sort()
1558 return r
1557 return r
1559
1558
1560 def heads(self, start=None):
1559 def heads(self, start=None):
1561 heads = self.changelog.heads(start)
1560 heads = self.changelog.heads(start)
1562 # sort the output in rev descending order
1561 # sort the output in rev descending order
1563 return sorted(heads, key=self.changelog.rev, reverse=True)
1562 return sorted(heads, key=self.changelog.rev, reverse=True)
1564
1563
1565 def branchheads(self, branch=None, start=None, closed=False):
1564 def branchheads(self, branch=None, start=None, closed=False):
1566 '''return a (possibly filtered) list of heads for the given branch
1565 '''return a (possibly filtered) list of heads for the given branch
1567
1566
1568 Heads are returned in topological order, from newest to oldest.
1567 Heads are returned in topological order, from newest to oldest.
1569 If branch is None, use the dirstate branch.
1568 If branch is None, use the dirstate branch.
1570 If start is not None, return only heads reachable from start.
1569 If start is not None, return only heads reachable from start.
1571 If closed is True, return heads that are marked as closed as well.
1570 If closed is True, return heads that are marked as closed as well.
1572 '''
1571 '''
1573 if branch is None:
1572 if branch is None:
1574 branch = self[None].branch()
1573 branch = self[None].branch()
1575 branches = self.branchmap()
1574 branches = self.branchmap()
1576 if branch not in branches:
1575 if branch not in branches:
1577 return []
1576 return []
1578 # the cache returns heads ordered lowest to highest
1577 # the cache returns heads ordered lowest to highest
1579 bheads = list(reversed(branches[branch]))
1578 bheads = list(reversed(branches[branch]))
1580 if start is not None:
1579 if start is not None:
1581 # filter out the heads that cannot be reached from startrev
1580 # filter out the heads that cannot be reached from startrev
1582 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1581 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1583 bheads = [h for h in bheads if h in fbheads]
1582 bheads = [h for h in bheads if h in fbheads]
1584 if not closed:
1583 if not closed:
1585 bheads = [h for h in bheads if not self[h].closesbranch()]
1584 bheads = [h for h in bheads if not self[h].closesbranch()]
1586 return bheads
1585 return bheads
1587
1586
1588 def branches(self, nodes):
1587 def branches(self, nodes):
1589 if not nodes:
1588 if not nodes:
1590 nodes = [self.changelog.tip()]
1589 nodes = [self.changelog.tip()]
1591 b = []
1590 b = []
1592 for n in nodes:
1591 for n in nodes:
1593 t = n
1592 t = n
1594 while True:
1593 while True:
1595 p = self.changelog.parents(n)
1594 p = self.changelog.parents(n)
1596 if p[1] != nullid or p[0] == nullid:
1595 if p[1] != nullid or p[0] == nullid:
1597 b.append((t, n, p[0], p[1]))
1596 b.append((t, n, p[0], p[1]))
1598 break
1597 break
1599 n = p[0]
1598 n = p[0]
1600 return b
1599 return b
1601
1600
1602 def between(self, pairs):
1601 def between(self, pairs):
1603 r = []
1602 r = []
1604
1603
1605 for top, bottom in pairs:
1604 for top, bottom in pairs:
1606 n, l, i = top, [], 0
1605 n, l, i = top, [], 0
1607 f = 1
1606 f = 1
1608
1607
1609 while n != bottom and n != nullid:
1608 while n != bottom and n != nullid:
1610 p = self.changelog.parents(n)[0]
1609 p = self.changelog.parents(n)[0]
1611 if i == f:
1610 if i == f:
1612 l.append(n)
1611 l.append(n)
1613 f = f * 2
1612 f = f * 2
1614 n = p
1613 n = p
1615 i += 1
1614 i += 1
1616
1615
1617 r.append(l)
1616 r.append(l)
1618
1617
1619 return r
1618 return r
1620
1619
1621 def pull(self, remote, heads=None, force=False):
1620 def pull(self, remote, heads=None, force=False):
1622 lock = self.lock()
1621 lock = self.lock()
1623 try:
1622 try:
1624 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1623 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1625 force=force)
1624 force=force)
1626 common, fetch, rheads = tmp
1625 common, fetch, rheads = tmp
1627 if not fetch:
1626 if not fetch:
1628 self.ui.status(_("no changes found\n"))
1627 self.ui.status(_("no changes found\n"))
1629 added = []
1628 added = []
1630 result = 0
1629 result = 0
1631 else:
1630 else:
1632 if heads is None and list(common) == [nullid]:
1631 if heads is None and list(common) == [nullid]:
1633 self.ui.status(_("requesting all changes\n"))
1632 self.ui.status(_("requesting all changes\n"))
1634 elif heads is None and remote.capable('changegroupsubset'):
1633 elif heads is None and remote.capable('changegroupsubset'):
1635 # issue1320, avoid a race if remote changed after discovery
1634 # issue1320, avoid a race if remote changed after discovery
1636 heads = rheads
1635 heads = rheads
1637
1636
1638 if remote.capable('getbundle'):
1637 if remote.capable('getbundle'):
1639 cg = remote.getbundle('pull', common=common,
1638 cg = remote.getbundle('pull', common=common,
1640 heads=heads or rheads)
1639 heads=heads or rheads)
1641 elif heads is None:
1640 elif heads is None:
1642 cg = remote.changegroup(fetch, 'pull')
1641 cg = remote.changegroup(fetch, 'pull')
1643 elif not remote.capable('changegroupsubset'):
1642 elif not remote.capable('changegroupsubset'):
1644 raise util.Abort(_("partial pull cannot be done because "
1643 raise util.Abort(_("partial pull cannot be done because "
1645 "other repository doesn't support "
1644 "other repository doesn't support "
1646 "changegroupsubset."))
1645 "changegroupsubset."))
1647 else:
1646 else:
1648 cg = remote.changegroupsubset(fetch, heads, 'pull')
1647 cg = remote.changegroupsubset(fetch, heads, 'pull')
1649 clstart = len(self.changelog)
1648 clstart = len(self.changelog)
1650 result = self.addchangegroup(cg, 'pull', remote.url())
1649 result = self.addchangegroup(cg, 'pull', remote.url())
1651 clend = len(self.changelog)
1650 clend = len(self.changelog)
1652 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1651 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1653
1652
1654 # compute target subset
1653 # compute target subset
1655 if heads is None:
1654 if heads is None:
1656 # We pulled every thing possible
1655 # We pulled every thing possible
1657 # sync on everything common
1656 # sync on everything common
1658 subset = common + added
1657 subset = common + added
1659 else:
1658 else:
1660 # We pulled a specific subset
1659 # We pulled a specific subset
1661 # sync on this subset
1660 # sync on this subset
1662 subset = heads
1661 subset = heads
1663
1662
1664 # Get remote phases data from remote
1663 # Get remote phases data from remote
1665 remotephases = remote.listkeys('phases')
1664 remotephases = remote.listkeys('phases')
1666 publishing = bool(remotephases.get('publishing', False))
1665 publishing = bool(remotephases.get('publishing', False))
1667 if remotephases and not publishing:
1666 if remotephases and not publishing:
1668 # remote is new and unpublishing
1667 # remote is new and unpublishing
1669 pheads, _dr = phases.analyzeremotephases(self, subset,
1668 pheads, _dr = phases.analyzeremotephases(self, subset,
1670 remotephases)
1669 remotephases)
1671 phases.advanceboundary(self, phases.public, pheads)
1670 phases.advanceboundary(self, phases.public, pheads)
1672 phases.advanceboundary(self, phases.draft, subset)
1671 phases.advanceboundary(self, phases.draft, subset)
1673 else:
1672 else:
1674 # Remote is old or publishing all common changesets
1673 # Remote is old or publishing all common changesets
1675 # should be seen as public
1674 # should be seen as public
1676 phases.advanceboundary(self, phases.public, subset)
1675 phases.advanceboundary(self, phases.public, subset)
1676
1677 remoteobs = remote.listkeys('obsolete')
1678 if 'dump' in remoteobs:
1679 data = base85.b85decode(remoteobs['dump'])
1680 self.obsstore.mergemarkers(data)
1677 finally:
1681 finally:
1678 lock.release()
1682 lock.release()
1679
1683
1680 return result
1684 return result
1681
1685
1682 def checkpush(self, force, revs):
1686 def checkpush(self, force, revs):
1683 """Extensions can override this function if additional checks have
1687 """Extensions can override this function if additional checks have
1684 to be performed before pushing, or call it if they override push
1688 to be performed before pushing, or call it if they override push
1685 command.
1689 command.
1686 """
1690 """
1687 pass
1691 pass
1688
1692
1689 def push(self, remote, force=False, revs=None, newbranch=False):
1693 def push(self, remote, force=False, revs=None, newbranch=False):
1690 '''Push outgoing changesets (limited by revs) from the current
1694 '''Push outgoing changesets (limited by revs) from the current
1691 repository to remote. Return an integer:
1695 repository to remote. Return an integer:
1692 - None means nothing to push
1696 - None means nothing to push
1693 - 0 means HTTP error
1697 - 0 means HTTP error
1694 - 1 means we pushed and remote head count is unchanged *or*
1698 - 1 means we pushed and remote head count is unchanged *or*
1695 we have outgoing changesets but refused to push
1699 we have outgoing changesets but refused to push
1696 - other values as described by addchangegroup()
1700 - other values as described by addchangegroup()
1697 '''
1701 '''
1698 # there are two ways to push to remote repo:
1702 # there are two ways to push to remote repo:
1699 #
1703 #
1700 # addchangegroup assumes local user can lock remote
1704 # addchangegroup assumes local user can lock remote
1701 # repo (local filesystem, old ssh servers).
1705 # repo (local filesystem, old ssh servers).
1702 #
1706 #
1703 # unbundle assumes local user cannot lock remote repo (new ssh
1707 # unbundle assumes local user cannot lock remote repo (new ssh
1704 # servers, http servers).
1708 # servers, http servers).
1705
1709
1706 # get local lock as we might write phase data
1710 # get local lock as we might write phase data
1707 locallock = self.lock()
1711 locallock = self.lock()
1708 try:
1712 try:
1709 self.checkpush(force, revs)
1713 self.checkpush(force, revs)
1710 lock = None
1714 lock = None
1711 unbundle = remote.capable('unbundle')
1715 unbundle = remote.capable('unbundle')
1712 if not unbundle:
1716 if not unbundle:
1713 lock = remote.lock()
1717 lock = remote.lock()
1714 try:
1718 try:
1715 # discovery
1719 # discovery
1716 fci = discovery.findcommonincoming
1720 fci = discovery.findcommonincoming
1717 commoninc = fci(self, remote, force=force)
1721 commoninc = fci(self, remote, force=force)
1718 common, inc, remoteheads = commoninc
1722 common, inc, remoteheads = commoninc
1719 fco = discovery.findcommonoutgoing
1723 fco = discovery.findcommonoutgoing
1720 outgoing = fco(self, remote, onlyheads=revs,
1724 outgoing = fco(self, remote, onlyheads=revs,
1721 commoninc=commoninc, force=force)
1725 commoninc=commoninc, force=force)
1722
1726
1723
1727
1724 if not outgoing.missing:
1728 if not outgoing.missing:
1725 # nothing to push
1729 # nothing to push
1726 scmutil.nochangesfound(self.ui, outgoing.excluded)
1730 scmutil.nochangesfound(self.ui, outgoing.excluded)
1727 ret = None
1731 ret = None
1728 else:
1732 else:
1729 # something to push
1733 # something to push
1730 if not force:
1734 if not force:
1731 discovery.checkheads(self, remote, outgoing,
1735 discovery.checkheads(self, remote, outgoing,
1732 remoteheads, newbranch,
1736 remoteheads, newbranch,
1733 bool(inc))
1737 bool(inc))
1734
1738
1735 # create a changegroup from local
1739 # create a changegroup from local
1736 if revs is None and not outgoing.excluded:
1740 if revs is None and not outgoing.excluded:
1737 # push everything,
1741 # push everything,
1738 # use the fast path, no race possible on push
1742 # use the fast path, no race possible on push
1739 cg = self._changegroup(outgoing.missing, 'push')
1743 cg = self._changegroup(outgoing.missing, 'push')
1740 else:
1744 else:
1741 cg = self.getlocalbundle('push', outgoing)
1745 cg = self.getlocalbundle('push', outgoing)
1742
1746
1743 # apply changegroup to remote
1747 # apply changegroup to remote
1744 if unbundle:
1748 if unbundle:
1745 # local repo finds heads on server, finds out what
1749 # local repo finds heads on server, finds out what
1746 # revs it must push. once revs transferred, if server
1750 # revs it must push. once revs transferred, if server
1747 # finds it has different heads (someone else won
1751 # finds it has different heads (someone else won
1748 # commit/push race), server aborts.
1752 # commit/push race), server aborts.
1749 if force:
1753 if force:
1750 remoteheads = ['force']
1754 remoteheads = ['force']
1751 # ssh: return remote's addchangegroup()
1755 # ssh: return remote's addchangegroup()
1752 # http: return remote's addchangegroup() or 0 for error
1756 # http: return remote's addchangegroup() or 0 for error
1753 ret = remote.unbundle(cg, remoteheads, 'push')
1757 ret = remote.unbundle(cg, remoteheads, 'push')
1754 else:
1758 else:
1755 # we return an integer indicating remote head count
1759 # we return an integer indicating remote head count
1756 # change
1760 # change
1757 ret = remote.addchangegroup(cg, 'push', self.url())
1761 ret = remote.addchangegroup(cg, 'push', self.url())
1758
1762
1759 if ret:
1763 if ret:
1760 # push succeed, synchonize target of the push
1764 # push succeed, synchonize target of the push
1761 cheads = outgoing.missingheads
1765 cheads = outgoing.missingheads
1762 elif revs is None:
1766 elif revs is None:
1763 # All out push fails. synchronize all common
1767 # All out push fails. synchronize all common
1764 cheads = outgoing.commonheads
1768 cheads = outgoing.commonheads
1765 else:
1769 else:
1766 # I want cheads = heads(::missingheads and ::commonheads)
1770 # I want cheads = heads(::missingheads and ::commonheads)
1767 # (missingheads is revs with secret changeset filtered out)
1771 # (missingheads is revs with secret changeset filtered out)
1768 #
1772 #
1769 # This can be expressed as:
1773 # This can be expressed as:
1770 # cheads = ( (missingheads and ::commonheads)
1774 # cheads = ( (missingheads and ::commonheads)
1771 # + (commonheads and ::missingheads))"
1775 # + (commonheads and ::missingheads))"
1772 # )
1776 # )
1773 #
1777 #
1774 # while trying to push we already computed the following:
1778 # while trying to push we already computed the following:
1775 # common = (::commonheads)
1779 # common = (::commonheads)
1776 # missing = ((commonheads::missingheads) - commonheads)
1780 # missing = ((commonheads::missingheads) - commonheads)
1777 #
1781 #
1778 # We can pick:
1782 # We can pick:
1779 # * missingheads part of comon (::commonheads)
1783 # * missingheads part of comon (::commonheads)
1780 common = set(outgoing.common)
1784 common = set(outgoing.common)
1781 cheads = [node for node in revs if node in common]
1785 cheads = [node for node in revs if node in common]
1782 # and
1786 # and
1783 # * commonheads parents on missing
1787 # * commonheads parents on missing
1784 revset = self.set('%ln and parents(roots(%ln))',
1788 revset = self.set('%ln and parents(roots(%ln))',
1785 outgoing.commonheads,
1789 outgoing.commonheads,
1786 outgoing.missing)
1790 outgoing.missing)
1787 cheads.extend(c.node() for c in revset)
1791 cheads.extend(c.node() for c in revset)
1788 # even when we don't push, exchanging phase data is useful
1792 # even when we don't push, exchanging phase data is useful
1789 remotephases = remote.listkeys('phases')
1793 remotephases = remote.listkeys('phases')
1790 if not remotephases: # old server or public only repo
1794 if not remotephases: # old server or public only repo
1791 phases.advanceboundary(self, phases.public, cheads)
1795 phases.advanceboundary(self, phases.public, cheads)
1792 # don't push any phase data as there is nothing to push
1796 # don't push any phase data as there is nothing to push
1793 else:
1797 else:
1794 ana = phases.analyzeremotephases(self, cheads, remotephases)
1798 ana = phases.analyzeremotephases(self, cheads, remotephases)
1795 pheads, droots = ana
1799 pheads, droots = ana
1796 ### Apply remote phase on local
1800 ### Apply remote phase on local
1797 if remotephases.get('publishing', False):
1801 if remotephases.get('publishing', False):
1798 phases.advanceboundary(self, phases.public, cheads)
1802 phases.advanceboundary(self, phases.public, cheads)
1799 else: # publish = False
1803 else: # publish = False
1800 phases.advanceboundary(self, phases.public, pheads)
1804 phases.advanceboundary(self, phases.public, pheads)
1801 phases.advanceboundary(self, phases.draft, cheads)
1805 phases.advanceboundary(self, phases.draft, cheads)
1802 ### Apply local phase on remote
1806 ### Apply local phase on remote
1803
1807
1804 # Get the list of all revs draft on remote by public here.
1808 # Get the list of all revs draft on remote by public here.
1805 # XXX Beware that revset break if droots is not strictly
1809 # XXX Beware that revset break if droots is not strictly
1806 # XXX root we may want to ensure it is but it is costly
1810 # XXX root we may want to ensure it is but it is costly
1807 outdated = self.set('heads((%ln::%ln) and public())',
1811 outdated = self.set('heads((%ln::%ln) and public())',
1808 droots, cheads)
1812 droots, cheads)
1809 for newremotehead in outdated:
1813 for newremotehead in outdated:
1810 r = remote.pushkey('phases',
1814 r = remote.pushkey('phases',
1811 newremotehead.hex(),
1815 newremotehead.hex(),
1812 str(phases.draft),
1816 str(phases.draft),
1813 str(phases.public))
1817 str(phases.public))
1814 if not r:
1818 if not r:
1815 self.ui.warn(_('updating %s to public failed!\n')
1819 self.ui.warn(_('updating %s to public failed!\n')
1816 % newremotehead)
1820 % newremotehead)
1821 if 'obsolete' in self.listkeys('namespaces') and self.obsstore:
1822 data = self.obsstore._writemarkers()
1823 r = remote.pushkey('obsolete', 'dump', '',
1824 base85.b85encode(data))
1825 if not r:
1826 self.ui.warn(_('failed to push obsolete markers!\n'))
1817 finally:
1827 finally:
1818 if lock is not None:
1828 if lock is not None:
1819 lock.release()
1829 lock.release()
1820 finally:
1830 finally:
1821 locallock.release()
1831 locallock.release()
1822
1832
1823 self.ui.debug("checking for updated bookmarks\n")
1833 self.ui.debug("checking for updated bookmarks\n")
1824 rb = remote.listkeys('bookmarks')
1834 rb = remote.listkeys('bookmarks')
1825 for k in rb.keys():
1835 for k in rb.keys():
1826 if k in self._bookmarks:
1836 if k in self._bookmarks:
1827 nr, nl = rb[k], hex(self._bookmarks[k])
1837 nr, nl = rb[k], hex(self._bookmarks[k])
1828 if nr in self:
1838 if nr in self:
1829 cr = self[nr]
1839 cr = self[nr]
1830 cl = self[nl]
1840 cl = self[nl]
1831 if cl in cr.descendants():
1841 if cl in cr.descendants():
1832 r = remote.pushkey('bookmarks', k, nr, nl)
1842 r = remote.pushkey('bookmarks', k, nr, nl)
1833 if r:
1843 if r:
1834 self.ui.status(_("updating bookmark %s\n") % k)
1844 self.ui.status(_("updating bookmark %s\n") % k)
1835 else:
1845 else:
1836 self.ui.warn(_('updating bookmark %s'
1846 self.ui.warn(_('updating bookmark %s'
1837 ' failed!\n') % k)
1847 ' failed!\n') % k)
1838
1848
1839 return ret
1849 return ret
1840
1850
1841 def changegroupinfo(self, nodes, source):
1851 def changegroupinfo(self, nodes, source):
1842 if self.ui.verbose or source == 'bundle':
1852 if self.ui.verbose or source == 'bundle':
1843 self.ui.status(_("%d changesets found\n") % len(nodes))
1853 self.ui.status(_("%d changesets found\n") % len(nodes))
1844 if self.ui.debugflag:
1854 if self.ui.debugflag:
1845 self.ui.debug("list of changesets:\n")
1855 self.ui.debug("list of changesets:\n")
1846 for node in nodes:
1856 for node in nodes:
1847 self.ui.debug("%s\n" % hex(node))
1857 self.ui.debug("%s\n" % hex(node))
1848
1858
1849 def changegroupsubset(self, bases, heads, source):
1859 def changegroupsubset(self, bases, heads, source):
1850 """Compute a changegroup consisting of all the nodes that are
1860 """Compute a changegroup consisting of all the nodes that are
1851 descendants of any of the bases and ancestors of any of the heads.
1861 descendants of any of the bases and ancestors of any of the heads.
1852 Return a chunkbuffer object whose read() method will return
1862 Return a chunkbuffer object whose read() method will return
1853 successive changegroup chunks.
1863 successive changegroup chunks.
1854
1864
1855 It is fairly complex as determining which filenodes and which
1865 It is fairly complex as determining which filenodes and which
1856 manifest nodes need to be included for the changeset to be complete
1866 manifest nodes need to be included for the changeset to be complete
1857 is non-trivial.
1867 is non-trivial.
1858
1868
1859 Another wrinkle is doing the reverse, figuring out which changeset in
1869 Another wrinkle is doing the reverse, figuring out which changeset in
1860 the changegroup a particular filenode or manifestnode belongs to.
1870 the changegroup a particular filenode or manifestnode belongs to.
1861 """
1871 """
1862 cl = self.changelog
1872 cl = self.changelog
1863 if not bases:
1873 if not bases:
1864 bases = [nullid]
1874 bases = [nullid]
1865 csets, bases, heads = cl.nodesbetween(bases, heads)
1875 csets, bases, heads = cl.nodesbetween(bases, heads)
1866 # We assume that all ancestors of bases are known
1876 # We assume that all ancestors of bases are known
1867 common = set(cl.ancestors([cl.rev(n) for n in bases]))
1877 common = set(cl.ancestors([cl.rev(n) for n in bases]))
1868 return self._changegroupsubset(common, csets, heads, source)
1878 return self._changegroupsubset(common, csets, heads, source)
1869
1879
1870 def getlocalbundle(self, source, outgoing):
1880 def getlocalbundle(self, source, outgoing):
1871 """Like getbundle, but taking a discovery.outgoing as an argument.
1881 """Like getbundle, but taking a discovery.outgoing as an argument.
1872
1882
1873 This is only implemented for local repos and reuses potentially
1883 This is only implemented for local repos and reuses potentially
1874 precomputed sets in outgoing."""
1884 precomputed sets in outgoing."""
1875 if not outgoing.missing:
1885 if not outgoing.missing:
1876 return None
1886 return None
1877 return self._changegroupsubset(outgoing.common,
1887 return self._changegroupsubset(outgoing.common,
1878 outgoing.missing,
1888 outgoing.missing,
1879 outgoing.missingheads,
1889 outgoing.missingheads,
1880 source)
1890 source)
1881
1891
1882 def getbundle(self, source, heads=None, common=None):
1892 def getbundle(self, source, heads=None, common=None):
1883 """Like changegroupsubset, but returns the set difference between the
1893 """Like changegroupsubset, but returns the set difference between the
1884 ancestors of heads and the ancestors common.
1894 ancestors of heads and the ancestors common.
1885
1895
1886 If heads is None, use the local heads. If common is None, use [nullid].
1896 If heads is None, use the local heads. If common is None, use [nullid].
1887
1897
1888 The nodes in common might not all be known locally due to the way the
1898 The nodes in common might not all be known locally due to the way the
1889 current discovery protocol works.
1899 current discovery protocol works.
1890 """
1900 """
1891 cl = self.changelog
1901 cl = self.changelog
1892 if common:
1902 if common:
1893 nm = cl.nodemap
1903 nm = cl.nodemap
1894 common = [n for n in common if n in nm]
1904 common = [n for n in common if n in nm]
1895 else:
1905 else:
1896 common = [nullid]
1906 common = [nullid]
1897 if not heads:
1907 if not heads:
1898 heads = cl.heads()
1908 heads = cl.heads()
1899 return self.getlocalbundle(source,
1909 return self.getlocalbundle(source,
1900 discovery.outgoing(cl, common, heads))
1910 discovery.outgoing(cl, common, heads))
1901
1911
1902 def _changegroupsubset(self, commonrevs, csets, heads, source):
1912 def _changegroupsubset(self, commonrevs, csets, heads, source):
1903
1913
1904 cl = self.changelog
1914 cl = self.changelog
1905 mf = self.manifest
1915 mf = self.manifest
1906 mfs = {} # needed manifests
1916 mfs = {} # needed manifests
1907 fnodes = {} # needed file nodes
1917 fnodes = {} # needed file nodes
1908 changedfiles = set()
1918 changedfiles = set()
1909 fstate = ['', {}]
1919 fstate = ['', {}]
1910 count = [0, 0]
1920 count = [0, 0]
1911
1921
1912 # can we go through the fast path ?
1922 # can we go through the fast path ?
1913 heads.sort()
1923 heads.sort()
1914 if heads == sorted(self.heads()):
1924 if heads == sorted(self.heads()):
1915 return self._changegroup(csets, source)
1925 return self._changegroup(csets, source)
1916
1926
1917 # slow path
1927 # slow path
1918 self.hook('preoutgoing', throw=True, source=source)
1928 self.hook('preoutgoing', throw=True, source=source)
1919 self.changegroupinfo(csets, source)
1929 self.changegroupinfo(csets, source)
1920
1930
1921 # filter any nodes that claim to be part of the known set
1931 # filter any nodes that claim to be part of the known set
1922 def prune(revlog, missing):
1932 def prune(revlog, missing):
1923 rr, rl = revlog.rev, revlog.linkrev
1933 rr, rl = revlog.rev, revlog.linkrev
1924 return [n for n in missing
1934 return [n for n in missing
1925 if rl(rr(n)) not in commonrevs]
1935 if rl(rr(n)) not in commonrevs]
1926
1936
1927 progress = self.ui.progress
1937 progress = self.ui.progress
1928 _bundling = _('bundling')
1938 _bundling = _('bundling')
1929 _changesets = _('changesets')
1939 _changesets = _('changesets')
1930 _manifests = _('manifests')
1940 _manifests = _('manifests')
1931 _files = _('files')
1941 _files = _('files')
1932
1942
1933 def lookup(revlog, x):
1943 def lookup(revlog, x):
1934 if revlog == cl:
1944 if revlog == cl:
1935 c = cl.read(x)
1945 c = cl.read(x)
1936 changedfiles.update(c[3])
1946 changedfiles.update(c[3])
1937 mfs.setdefault(c[0], x)
1947 mfs.setdefault(c[0], x)
1938 count[0] += 1
1948 count[0] += 1
1939 progress(_bundling, count[0],
1949 progress(_bundling, count[0],
1940 unit=_changesets, total=count[1])
1950 unit=_changesets, total=count[1])
1941 return x
1951 return x
1942 elif revlog == mf:
1952 elif revlog == mf:
1943 clnode = mfs[x]
1953 clnode = mfs[x]
1944 mdata = mf.readfast(x)
1954 mdata = mf.readfast(x)
1945 for f, n in mdata.iteritems():
1955 for f, n in mdata.iteritems():
1946 if f in changedfiles:
1956 if f in changedfiles:
1947 fnodes[f].setdefault(n, clnode)
1957 fnodes[f].setdefault(n, clnode)
1948 count[0] += 1
1958 count[0] += 1
1949 progress(_bundling, count[0],
1959 progress(_bundling, count[0],
1950 unit=_manifests, total=count[1])
1960 unit=_manifests, total=count[1])
1951 return clnode
1961 return clnode
1952 else:
1962 else:
1953 progress(_bundling, count[0], item=fstate[0],
1963 progress(_bundling, count[0], item=fstate[0],
1954 unit=_files, total=count[1])
1964 unit=_files, total=count[1])
1955 return fstate[1][x]
1965 return fstate[1][x]
1956
1966
1957 bundler = changegroup.bundle10(lookup)
1967 bundler = changegroup.bundle10(lookup)
1958 reorder = self.ui.config('bundle', 'reorder', 'auto')
1968 reorder = self.ui.config('bundle', 'reorder', 'auto')
1959 if reorder == 'auto':
1969 if reorder == 'auto':
1960 reorder = None
1970 reorder = None
1961 else:
1971 else:
1962 reorder = util.parsebool(reorder)
1972 reorder = util.parsebool(reorder)
1963
1973
1964 def gengroup():
1974 def gengroup():
1965 # Create a changenode group generator that will call our functions
1975 # Create a changenode group generator that will call our functions
1966 # back to lookup the owning changenode and collect information.
1976 # back to lookup the owning changenode and collect information.
1967 count[:] = [0, len(csets)]
1977 count[:] = [0, len(csets)]
1968 for chunk in cl.group(csets, bundler, reorder=reorder):
1978 for chunk in cl.group(csets, bundler, reorder=reorder):
1969 yield chunk
1979 yield chunk
1970 progress(_bundling, None)
1980 progress(_bundling, None)
1971
1981
1972 # Create a generator for the manifestnodes that calls our lookup
1982 # Create a generator for the manifestnodes that calls our lookup
1973 # and data collection functions back.
1983 # and data collection functions back.
1974 for f in changedfiles:
1984 for f in changedfiles:
1975 fnodes[f] = {}
1985 fnodes[f] = {}
1976 count[:] = [0, len(mfs)]
1986 count[:] = [0, len(mfs)]
1977 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1987 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1978 yield chunk
1988 yield chunk
1979 progress(_bundling, None)
1989 progress(_bundling, None)
1980
1990
1981 mfs.clear()
1991 mfs.clear()
1982
1992
1983 # Go through all our files in order sorted by name.
1993 # Go through all our files in order sorted by name.
1984 count[:] = [0, len(changedfiles)]
1994 count[:] = [0, len(changedfiles)]
1985 for fname in sorted(changedfiles):
1995 for fname in sorted(changedfiles):
1986 filerevlog = self.file(fname)
1996 filerevlog = self.file(fname)
1987 if not len(filerevlog):
1997 if not len(filerevlog):
1988 raise util.Abort(_("empty or missing revlog for %s")
1998 raise util.Abort(_("empty or missing revlog for %s")
1989 % fname)
1999 % fname)
1990 fstate[0] = fname
2000 fstate[0] = fname
1991 fstate[1] = fnodes.pop(fname, {})
2001 fstate[1] = fnodes.pop(fname, {})
1992
2002
1993 nodelist = prune(filerevlog, fstate[1])
2003 nodelist = prune(filerevlog, fstate[1])
1994 if nodelist:
2004 if nodelist:
1995 count[0] += 1
2005 count[0] += 1
1996 yield bundler.fileheader(fname)
2006 yield bundler.fileheader(fname)
1997 for chunk in filerevlog.group(nodelist, bundler, reorder):
2007 for chunk in filerevlog.group(nodelist, bundler, reorder):
1998 yield chunk
2008 yield chunk
1999
2009
2000 # Signal that no more groups are left.
2010 # Signal that no more groups are left.
2001 yield bundler.close()
2011 yield bundler.close()
2002 progress(_bundling, None)
2012 progress(_bundling, None)
2003
2013
2004 if csets:
2014 if csets:
2005 self.hook('outgoing', node=hex(csets[0]), source=source)
2015 self.hook('outgoing', node=hex(csets[0]), source=source)
2006
2016
2007 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2017 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2008
2018
2009 def changegroup(self, basenodes, source):
2019 def changegroup(self, basenodes, source):
2010 # to avoid a race we use changegroupsubset() (issue1320)
2020 # to avoid a race we use changegroupsubset() (issue1320)
2011 return self.changegroupsubset(basenodes, self.heads(), source)
2021 return self.changegroupsubset(basenodes, self.heads(), source)
2012
2022
2013 def _changegroup(self, nodes, source):
2023 def _changegroup(self, nodes, source):
2014 """Compute the changegroup of all nodes that we have that a recipient
2024 """Compute the changegroup of all nodes that we have that a recipient
2015 doesn't. Return a chunkbuffer object whose read() method will return
2025 doesn't. Return a chunkbuffer object whose read() method will return
2016 successive changegroup chunks.
2026 successive changegroup chunks.
2017
2027
2018 This is much easier than the previous function as we can assume that
2028 This is much easier than the previous function as we can assume that
2019 the recipient has any changenode we aren't sending them.
2029 the recipient has any changenode we aren't sending them.
2020
2030
2021 nodes is the set of nodes to send"""
2031 nodes is the set of nodes to send"""
2022
2032
2023 cl = self.changelog
2033 cl = self.changelog
2024 mf = self.manifest
2034 mf = self.manifest
2025 mfs = {}
2035 mfs = {}
2026 changedfiles = set()
2036 changedfiles = set()
2027 fstate = ['']
2037 fstate = ['']
2028 count = [0, 0]
2038 count = [0, 0]
2029
2039
2030 self.hook('preoutgoing', throw=True, source=source)
2040 self.hook('preoutgoing', throw=True, source=source)
2031 self.changegroupinfo(nodes, source)
2041 self.changegroupinfo(nodes, source)
2032
2042
2033 revset = set([cl.rev(n) for n in nodes])
2043 revset = set([cl.rev(n) for n in nodes])
2034
2044
2035 def gennodelst(log):
2045 def gennodelst(log):
2036 ln, llr = log.node, log.linkrev
2046 ln, llr = log.node, log.linkrev
2037 return [ln(r) for r in log if llr(r) in revset]
2047 return [ln(r) for r in log if llr(r) in revset]
2038
2048
2039 progress = self.ui.progress
2049 progress = self.ui.progress
2040 _bundling = _('bundling')
2050 _bundling = _('bundling')
2041 _changesets = _('changesets')
2051 _changesets = _('changesets')
2042 _manifests = _('manifests')
2052 _manifests = _('manifests')
2043 _files = _('files')
2053 _files = _('files')
2044
2054
2045 def lookup(revlog, x):
2055 def lookup(revlog, x):
2046 if revlog == cl:
2056 if revlog == cl:
2047 c = cl.read(x)
2057 c = cl.read(x)
2048 changedfiles.update(c[3])
2058 changedfiles.update(c[3])
2049 mfs.setdefault(c[0], x)
2059 mfs.setdefault(c[0], x)
2050 count[0] += 1
2060 count[0] += 1
2051 progress(_bundling, count[0],
2061 progress(_bundling, count[0],
2052 unit=_changesets, total=count[1])
2062 unit=_changesets, total=count[1])
2053 return x
2063 return x
2054 elif revlog == mf:
2064 elif revlog == mf:
2055 count[0] += 1
2065 count[0] += 1
2056 progress(_bundling, count[0],
2066 progress(_bundling, count[0],
2057 unit=_manifests, total=count[1])
2067 unit=_manifests, total=count[1])
2058 return cl.node(revlog.linkrev(revlog.rev(x)))
2068 return cl.node(revlog.linkrev(revlog.rev(x)))
2059 else:
2069 else:
2060 progress(_bundling, count[0], item=fstate[0],
2070 progress(_bundling, count[0], item=fstate[0],
2061 total=count[1], unit=_files)
2071 total=count[1], unit=_files)
2062 return cl.node(revlog.linkrev(revlog.rev(x)))
2072 return cl.node(revlog.linkrev(revlog.rev(x)))
2063
2073
2064 bundler = changegroup.bundle10(lookup)
2074 bundler = changegroup.bundle10(lookup)
2065 reorder = self.ui.config('bundle', 'reorder', 'auto')
2075 reorder = self.ui.config('bundle', 'reorder', 'auto')
2066 if reorder == 'auto':
2076 if reorder == 'auto':
2067 reorder = None
2077 reorder = None
2068 else:
2078 else:
2069 reorder = util.parsebool(reorder)
2079 reorder = util.parsebool(reorder)
2070
2080
2071 def gengroup():
2081 def gengroup():
2072 '''yield a sequence of changegroup chunks (strings)'''
2082 '''yield a sequence of changegroup chunks (strings)'''
2073 # construct a list of all changed files
2083 # construct a list of all changed files
2074
2084
2075 count[:] = [0, len(nodes)]
2085 count[:] = [0, len(nodes)]
2076 for chunk in cl.group(nodes, bundler, reorder=reorder):
2086 for chunk in cl.group(nodes, bundler, reorder=reorder):
2077 yield chunk
2087 yield chunk
2078 progress(_bundling, None)
2088 progress(_bundling, None)
2079
2089
2080 count[:] = [0, len(mfs)]
2090 count[:] = [0, len(mfs)]
2081 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2091 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2082 yield chunk
2092 yield chunk
2083 progress(_bundling, None)
2093 progress(_bundling, None)
2084
2094
2085 count[:] = [0, len(changedfiles)]
2095 count[:] = [0, len(changedfiles)]
2086 for fname in sorted(changedfiles):
2096 for fname in sorted(changedfiles):
2087 filerevlog = self.file(fname)
2097 filerevlog = self.file(fname)
2088 if not len(filerevlog):
2098 if not len(filerevlog):
2089 raise util.Abort(_("empty or missing revlog for %s")
2099 raise util.Abort(_("empty or missing revlog for %s")
2090 % fname)
2100 % fname)
2091 fstate[0] = fname
2101 fstate[0] = fname
2092 nodelist = gennodelst(filerevlog)
2102 nodelist = gennodelst(filerevlog)
2093 if nodelist:
2103 if nodelist:
2094 count[0] += 1
2104 count[0] += 1
2095 yield bundler.fileheader(fname)
2105 yield bundler.fileheader(fname)
2096 for chunk in filerevlog.group(nodelist, bundler, reorder):
2106 for chunk in filerevlog.group(nodelist, bundler, reorder):
2097 yield chunk
2107 yield chunk
2098 yield bundler.close()
2108 yield bundler.close()
2099 progress(_bundling, None)
2109 progress(_bundling, None)
2100
2110
2101 if nodes:
2111 if nodes:
2102 self.hook('outgoing', node=hex(nodes[0]), source=source)
2112 self.hook('outgoing', node=hex(nodes[0]), source=source)
2103
2113
2104 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2114 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2105
2115
2106 def addchangegroup(self, source, srctype, url, emptyok=False):
2116 def addchangegroup(self, source, srctype, url, emptyok=False):
2107 """Add the changegroup returned by source.read() to this repo.
2117 """Add the changegroup returned by source.read() to this repo.
2108 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2118 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2109 the URL of the repo where this changegroup is coming from.
2119 the URL of the repo where this changegroup is coming from.
2110
2120
2111 Return an integer summarizing the change to this repo:
2121 Return an integer summarizing the change to this repo:
2112 - nothing changed or no source: 0
2122 - nothing changed or no source: 0
2113 - more heads than before: 1+added heads (2..n)
2123 - more heads than before: 1+added heads (2..n)
2114 - fewer heads than before: -1-removed heads (-2..-n)
2124 - fewer heads than before: -1-removed heads (-2..-n)
2115 - number of heads stays the same: 1
2125 - number of heads stays the same: 1
2116 """
2126 """
2117 def csmap(x):
2127 def csmap(x):
2118 self.ui.debug("add changeset %s\n" % short(x))
2128 self.ui.debug("add changeset %s\n" % short(x))
2119 return len(cl)
2129 return len(cl)
2120
2130
2121 def revmap(x):
2131 def revmap(x):
2122 return cl.rev(x)
2132 return cl.rev(x)
2123
2133
2124 if not source:
2134 if not source:
2125 return 0
2135 return 0
2126
2136
2127 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2137 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2128
2138
2129 changesets = files = revisions = 0
2139 changesets = files = revisions = 0
2130 efiles = set()
2140 efiles = set()
2131
2141
2132 # write changelog data to temp files so concurrent readers will not see
2142 # write changelog data to temp files so concurrent readers will not see
2133 # inconsistent view
2143 # inconsistent view
2134 cl = self.changelog
2144 cl = self.changelog
2135 cl.delayupdate()
2145 cl.delayupdate()
2136 oldheads = cl.heads()
2146 oldheads = cl.heads()
2137
2147
2138 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2148 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2139 try:
2149 try:
2140 trp = weakref.proxy(tr)
2150 trp = weakref.proxy(tr)
2141 # pull off the changeset group
2151 # pull off the changeset group
2142 self.ui.status(_("adding changesets\n"))
2152 self.ui.status(_("adding changesets\n"))
2143 clstart = len(cl)
2153 clstart = len(cl)
2144 class prog(object):
2154 class prog(object):
2145 step = _('changesets')
2155 step = _('changesets')
2146 count = 1
2156 count = 1
2147 ui = self.ui
2157 ui = self.ui
2148 total = None
2158 total = None
2149 def __call__(self):
2159 def __call__(self):
2150 self.ui.progress(self.step, self.count, unit=_('chunks'),
2160 self.ui.progress(self.step, self.count, unit=_('chunks'),
2151 total=self.total)
2161 total=self.total)
2152 self.count += 1
2162 self.count += 1
2153 pr = prog()
2163 pr = prog()
2154 source.callback = pr
2164 source.callback = pr
2155
2165
2156 source.changelogheader()
2166 source.changelogheader()
2157 srccontent = cl.addgroup(source, csmap, trp)
2167 srccontent = cl.addgroup(source, csmap, trp)
2158 if not (srccontent or emptyok):
2168 if not (srccontent or emptyok):
2159 raise util.Abort(_("received changelog group is empty"))
2169 raise util.Abort(_("received changelog group is empty"))
2160 clend = len(cl)
2170 clend = len(cl)
2161 changesets = clend - clstart
2171 changesets = clend - clstart
2162 for c in xrange(clstart, clend):
2172 for c in xrange(clstart, clend):
2163 efiles.update(self[c].files())
2173 efiles.update(self[c].files())
2164 efiles = len(efiles)
2174 efiles = len(efiles)
2165 self.ui.progress(_('changesets'), None)
2175 self.ui.progress(_('changesets'), None)
2166
2176
2167 # pull off the manifest group
2177 # pull off the manifest group
2168 self.ui.status(_("adding manifests\n"))
2178 self.ui.status(_("adding manifests\n"))
2169 pr.step = _('manifests')
2179 pr.step = _('manifests')
2170 pr.count = 1
2180 pr.count = 1
2171 pr.total = changesets # manifests <= changesets
2181 pr.total = changesets # manifests <= changesets
2172 # no need to check for empty manifest group here:
2182 # no need to check for empty manifest group here:
2173 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2183 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2174 # no new manifest will be created and the manifest group will
2184 # no new manifest will be created and the manifest group will
2175 # be empty during the pull
2185 # be empty during the pull
2176 source.manifestheader()
2186 source.manifestheader()
2177 self.manifest.addgroup(source, revmap, trp)
2187 self.manifest.addgroup(source, revmap, trp)
2178 self.ui.progress(_('manifests'), None)
2188 self.ui.progress(_('manifests'), None)
2179
2189
2180 needfiles = {}
2190 needfiles = {}
2181 if self.ui.configbool('server', 'validate', default=False):
2191 if self.ui.configbool('server', 'validate', default=False):
2182 # validate incoming csets have their manifests
2192 # validate incoming csets have their manifests
2183 for cset in xrange(clstart, clend):
2193 for cset in xrange(clstart, clend):
2184 mfest = self.changelog.read(self.changelog.node(cset))[0]
2194 mfest = self.changelog.read(self.changelog.node(cset))[0]
2185 mfest = self.manifest.readdelta(mfest)
2195 mfest = self.manifest.readdelta(mfest)
2186 # store file nodes we must see
2196 # store file nodes we must see
2187 for f, n in mfest.iteritems():
2197 for f, n in mfest.iteritems():
2188 needfiles.setdefault(f, set()).add(n)
2198 needfiles.setdefault(f, set()).add(n)
2189
2199
2190 # process the files
2200 # process the files
2191 self.ui.status(_("adding file changes\n"))
2201 self.ui.status(_("adding file changes\n"))
2192 pr.step = _('files')
2202 pr.step = _('files')
2193 pr.count = 1
2203 pr.count = 1
2194 pr.total = efiles
2204 pr.total = efiles
2195 source.callback = None
2205 source.callback = None
2196
2206
2197 while True:
2207 while True:
2198 chunkdata = source.filelogheader()
2208 chunkdata = source.filelogheader()
2199 if not chunkdata:
2209 if not chunkdata:
2200 break
2210 break
2201 f = chunkdata["filename"]
2211 f = chunkdata["filename"]
2202 self.ui.debug("adding %s revisions\n" % f)
2212 self.ui.debug("adding %s revisions\n" % f)
2203 pr()
2213 pr()
2204 fl = self.file(f)
2214 fl = self.file(f)
2205 o = len(fl)
2215 o = len(fl)
2206 if not fl.addgroup(source, revmap, trp):
2216 if not fl.addgroup(source, revmap, trp):
2207 raise util.Abort(_("received file revlog group is empty"))
2217 raise util.Abort(_("received file revlog group is empty"))
2208 revisions += len(fl) - o
2218 revisions += len(fl) - o
2209 files += 1
2219 files += 1
2210 if f in needfiles:
2220 if f in needfiles:
2211 needs = needfiles[f]
2221 needs = needfiles[f]
2212 for new in xrange(o, len(fl)):
2222 for new in xrange(o, len(fl)):
2213 n = fl.node(new)
2223 n = fl.node(new)
2214 if n in needs:
2224 if n in needs:
2215 needs.remove(n)
2225 needs.remove(n)
2216 if not needs:
2226 if not needs:
2217 del needfiles[f]
2227 del needfiles[f]
2218 self.ui.progress(_('files'), None)
2228 self.ui.progress(_('files'), None)
2219
2229
2220 for f, needs in needfiles.iteritems():
2230 for f, needs in needfiles.iteritems():
2221 fl = self.file(f)
2231 fl = self.file(f)
2222 for n in needs:
2232 for n in needs:
2223 try:
2233 try:
2224 fl.rev(n)
2234 fl.rev(n)
2225 except error.LookupError:
2235 except error.LookupError:
2226 raise util.Abort(
2236 raise util.Abort(
2227 _('missing file data for %s:%s - run hg verify') %
2237 _('missing file data for %s:%s - run hg verify') %
2228 (f, hex(n)))
2238 (f, hex(n)))
2229
2239
2230 dh = 0
2240 dh = 0
2231 if oldheads:
2241 if oldheads:
2232 heads = cl.heads()
2242 heads = cl.heads()
2233 dh = len(heads) - len(oldheads)
2243 dh = len(heads) - len(oldheads)
2234 for h in heads:
2244 for h in heads:
2235 if h not in oldheads and self[h].closesbranch():
2245 if h not in oldheads and self[h].closesbranch():
2236 dh -= 1
2246 dh -= 1
2237 htext = ""
2247 htext = ""
2238 if dh:
2248 if dh:
2239 htext = _(" (%+d heads)") % dh
2249 htext = _(" (%+d heads)") % dh
2240
2250
2241 self.ui.status(_("added %d changesets"
2251 self.ui.status(_("added %d changesets"
2242 " with %d changes to %d files%s\n")
2252 " with %d changes to %d files%s\n")
2243 % (changesets, revisions, files, htext))
2253 % (changesets, revisions, files, htext))
2244
2254
2245 if changesets > 0:
2255 if changesets > 0:
2246 p = lambda: cl.writepending() and self.root or ""
2256 p = lambda: cl.writepending() and self.root or ""
2247 self.hook('pretxnchangegroup', throw=True,
2257 self.hook('pretxnchangegroup', throw=True,
2248 node=hex(cl.node(clstart)), source=srctype,
2258 node=hex(cl.node(clstart)), source=srctype,
2249 url=url, pending=p)
2259 url=url, pending=p)
2250
2260
2251 added = [cl.node(r) for r in xrange(clstart, clend)]
2261 added = [cl.node(r) for r in xrange(clstart, clend)]
2252 publishing = self.ui.configbool('phases', 'publish', True)
2262 publishing = self.ui.configbool('phases', 'publish', True)
2253 if srctype == 'push':
2263 if srctype == 'push':
2254 # Old server can not push the boundary themself.
2264 # Old server can not push the boundary themself.
2255 # New server won't push the boundary if changeset already
2265 # New server won't push the boundary if changeset already
2256 # existed locally as secrete
2266 # existed locally as secrete
2257 #
2267 #
2258 # We should not use added here but the list of all change in
2268 # We should not use added here but the list of all change in
2259 # the bundle
2269 # the bundle
2260 if publishing:
2270 if publishing:
2261 phases.advanceboundary(self, phases.public, srccontent)
2271 phases.advanceboundary(self, phases.public, srccontent)
2262 else:
2272 else:
2263 phases.advanceboundary(self, phases.draft, srccontent)
2273 phases.advanceboundary(self, phases.draft, srccontent)
2264 phases.retractboundary(self, phases.draft, added)
2274 phases.retractboundary(self, phases.draft, added)
2265 elif srctype != 'strip':
2275 elif srctype != 'strip':
2266 # publishing only alter behavior during push
2276 # publishing only alter behavior during push
2267 #
2277 #
2268 # strip should not touch boundary at all
2278 # strip should not touch boundary at all
2269 phases.retractboundary(self, phases.draft, added)
2279 phases.retractboundary(self, phases.draft, added)
2270
2280
2271 # make changelog see real files again
2281 # make changelog see real files again
2272 cl.finalize(trp)
2282 cl.finalize(trp)
2273
2283
2274 tr.close()
2284 tr.close()
2275
2285
2276 if changesets > 0:
2286 if changesets > 0:
2277 def runhooks():
2287 def runhooks():
2278 # forcefully update the on-disk branch cache
2288 # forcefully update the on-disk branch cache
2279 self.ui.debug("updating the branch cache\n")
2289 self.ui.debug("updating the branch cache\n")
2280 self.updatebranchcache()
2290 self.updatebranchcache()
2281 self.hook("changegroup", node=hex(cl.node(clstart)),
2291 self.hook("changegroup", node=hex(cl.node(clstart)),
2282 source=srctype, url=url)
2292 source=srctype, url=url)
2283
2293
2284 for n in added:
2294 for n in added:
2285 self.hook("incoming", node=hex(n), source=srctype,
2295 self.hook("incoming", node=hex(n), source=srctype,
2286 url=url)
2296 url=url)
2287 self._afterlock(runhooks)
2297 self._afterlock(runhooks)
2288
2298
2289 finally:
2299 finally:
2290 tr.release()
2300 tr.release()
2291 # never return 0 here:
2301 # never return 0 here:
2292 if dh < 0:
2302 if dh < 0:
2293 return dh - 1
2303 return dh - 1
2294 else:
2304 else:
2295 return dh + 1
2305 return dh + 1
2296
2306
2297 def stream_in(self, remote, requirements):
2307 def stream_in(self, remote, requirements):
2298 lock = self.lock()
2308 lock = self.lock()
2299 try:
2309 try:
2300 fp = remote.stream_out()
2310 fp = remote.stream_out()
2301 l = fp.readline()
2311 l = fp.readline()
2302 try:
2312 try:
2303 resp = int(l)
2313 resp = int(l)
2304 except ValueError:
2314 except ValueError:
2305 raise error.ResponseError(
2315 raise error.ResponseError(
2306 _('unexpected response from remote server:'), l)
2316 _('unexpected response from remote server:'), l)
2307 if resp == 1:
2317 if resp == 1:
2308 raise util.Abort(_('operation forbidden by server'))
2318 raise util.Abort(_('operation forbidden by server'))
2309 elif resp == 2:
2319 elif resp == 2:
2310 raise util.Abort(_('locking the remote repository failed'))
2320 raise util.Abort(_('locking the remote repository failed'))
2311 elif resp != 0:
2321 elif resp != 0:
2312 raise util.Abort(_('the server sent an unknown error code'))
2322 raise util.Abort(_('the server sent an unknown error code'))
2313 self.ui.status(_('streaming all changes\n'))
2323 self.ui.status(_('streaming all changes\n'))
2314 l = fp.readline()
2324 l = fp.readline()
2315 try:
2325 try:
2316 total_files, total_bytes = map(int, l.split(' ', 1))
2326 total_files, total_bytes = map(int, l.split(' ', 1))
2317 except (ValueError, TypeError):
2327 except (ValueError, TypeError):
2318 raise error.ResponseError(
2328 raise error.ResponseError(
2319 _('unexpected response from remote server:'), l)
2329 _('unexpected response from remote server:'), l)
2320 self.ui.status(_('%d files to transfer, %s of data\n') %
2330 self.ui.status(_('%d files to transfer, %s of data\n') %
2321 (total_files, util.bytecount(total_bytes)))
2331 (total_files, util.bytecount(total_bytes)))
2322 handled_bytes = 0
2332 handled_bytes = 0
2323 self.ui.progress(_('clone'), 0, total=total_bytes)
2333 self.ui.progress(_('clone'), 0, total=total_bytes)
2324 start = time.time()
2334 start = time.time()
2325 for i in xrange(total_files):
2335 for i in xrange(total_files):
2326 # XXX doesn't support '\n' or '\r' in filenames
2336 # XXX doesn't support '\n' or '\r' in filenames
2327 l = fp.readline()
2337 l = fp.readline()
2328 try:
2338 try:
2329 name, size = l.split('\0', 1)
2339 name, size = l.split('\0', 1)
2330 size = int(size)
2340 size = int(size)
2331 except (ValueError, TypeError):
2341 except (ValueError, TypeError):
2332 raise error.ResponseError(
2342 raise error.ResponseError(
2333 _('unexpected response from remote server:'), l)
2343 _('unexpected response from remote server:'), l)
2334 if self.ui.debugflag:
2344 if self.ui.debugflag:
2335 self.ui.debug('adding %s (%s)\n' %
2345 self.ui.debug('adding %s (%s)\n' %
2336 (name, util.bytecount(size)))
2346 (name, util.bytecount(size)))
2337 # for backwards compat, name was partially encoded
2347 # for backwards compat, name was partially encoded
2338 ofp = self.sopener(store.decodedir(name), 'w')
2348 ofp = self.sopener(store.decodedir(name), 'w')
2339 for chunk in util.filechunkiter(fp, limit=size):
2349 for chunk in util.filechunkiter(fp, limit=size):
2340 handled_bytes += len(chunk)
2350 handled_bytes += len(chunk)
2341 self.ui.progress(_('clone'), handled_bytes,
2351 self.ui.progress(_('clone'), handled_bytes,
2342 total=total_bytes)
2352 total=total_bytes)
2343 ofp.write(chunk)
2353 ofp.write(chunk)
2344 ofp.close()
2354 ofp.close()
2345 elapsed = time.time() - start
2355 elapsed = time.time() - start
2346 if elapsed <= 0:
2356 if elapsed <= 0:
2347 elapsed = 0.001
2357 elapsed = 0.001
2348 self.ui.progress(_('clone'), None)
2358 self.ui.progress(_('clone'), None)
2349 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2359 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2350 (util.bytecount(total_bytes), elapsed,
2360 (util.bytecount(total_bytes), elapsed,
2351 util.bytecount(total_bytes / elapsed)))
2361 util.bytecount(total_bytes / elapsed)))
2352
2362
2353 # new requirements = old non-format requirements +
2363 # new requirements = old non-format requirements +
2354 # new format-related
2364 # new format-related
2355 # requirements from the streamed-in repository
2365 # requirements from the streamed-in repository
2356 requirements.update(set(self.requirements) - self.supportedformats)
2366 requirements.update(set(self.requirements) - self.supportedformats)
2357 self._applyrequirements(requirements)
2367 self._applyrequirements(requirements)
2358 self._writerequirements()
2368 self._writerequirements()
2359
2369
2360 self.invalidate()
2370 self.invalidate()
2361 return len(self.heads()) + 1
2371 return len(self.heads()) + 1
2362 finally:
2372 finally:
2363 lock.release()
2373 lock.release()
2364
2374
2365 def clone(self, remote, heads=[], stream=False):
2375 def clone(self, remote, heads=[], stream=False):
2366 '''clone remote repository.
2376 '''clone remote repository.
2367
2377
2368 keyword arguments:
2378 keyword arguments:
2369 heads: list of revs to clone (forces use of pull)
2379 heads: list of revs to clone (forces use of pull)
2370 stream: use streaming clone if possible'''
2380 stream: use streaming clone if possible'''
2371
2381
2372 # now, all clients that can request uncompressed clones can
2382 # now, all clients that can request uncompressed clones can
2373 # read repo formats supported by all servers that can serve
2383 # read repo formats supported by all servers that can serve
2374 # them.
2384 # them.
2375
2385
2376 # if revlog format changes, client will have to check version
2386 # if revlog format changes, client will have to check version
2377 # and format flags on "stream" capability, and use
2387 # and format flags on "stream" capability, and use
2378 # uncompressed only if compatible.
2388 # uncompressed only if compatible.
2379
2389
2380 if not stream:
2390 if not stream:
2381 # if the server explicitely prefer to stream (for fast LANs)
2391 # if the server explicitely prefer to stream (for fast LANs)
2382 stream = remote.capable('stream-preferred')
2392 stream = remote.capable('stream-preferred')
2383
2393
2384 if stream and not heads:
2394 if stream and not heads:
2385 # 'stream' means remote revlog format is revlogv1 only
2395 # 'stream' means remote revlog format is revlogv1 only
2386 if remote.capable('stream'):
2396 if remote.capable('stream'):
2387 return self.stream_in(remote, set(('revlogv1',)))
2397 return self.stream_in(remote, set(('revlogv1',)))
2388 # otherwise, 'streamreqs' contains the remote revlog format
2398 # otherwise, 'streamreqs' contains the remote revlog format
2389 streamreqs = remote.capable('streamreqs')
2399 streamreqs = remote.capable('streamreqs')
2390 if streamreqs:
2400 if streamreqs:
2391 streamreqs = set(streamreqs.split(','))
2401 streamreqs = set(streamreqs.split(','))
2392 # if we support it, stream in and adjust our requirements
2402 # if we support it, stream in and adjust our requirements
2393 if not streamreqs - self.supportedformats:
2403 if not streamreqs - self.supportedformats:
2394 return self.stream_in(remote, streamreqs)
2404 return self.stream_in(remote, streamreqs)
2395 return self.pull(remote, heads)
2405 return self.pull(remote, heads)
2396
2406
2397 def pushkey(self, namespace, key, old, new):
2407 def pushkey(self, namespace, key, old, new):
2398 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2408 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2399 old=old, new=new)
2409 old=old, new=new)
2400 ret = pushkey.push(self, namespace, key, old, new)
2410 ret = pushkey.push(self, namespace, key, old, new)
2401 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2411 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2402 ret=ret)
2412 ret=ret)
2403 return ret
2413 return ret
2404
2414
2405 def listkeys(self, namespace):
2415 def listkeys(self, namespace):
2406 self.hook('prelistkeys', throw=True, namespace=namespace)
2416 self.hook('prelistkeys', throw=True, namespace=namespace)
2407 values = pushkey.list(self, namespace)
2417 values = pushkey.list(self, namespace)
2408 self.hook('listkeys', namespace=namespace, values=values)
2418 self.hook('listkeys', namespace=namespace, values=values)
2409 return values
2419 return values
2410
2420
2411 def debugwireargs(self, one, two, three=None, four=None, five=None):
2421 def debugwireargs(self, one, two, three=None, four=None, five=None):
2412 '''used to test argument passing over the wire'''
2422 '''used to test argument passing over the wire'''
2413 return "%s %s %s %s %s" % (one, two, three, four, five)
2423 return "%s %s %s %s %s" % (one, two, three, four, five)
2414
2424
2415 def savecommitmessage(self, text):
2425 def savecommitmessage(self, text):
2416 fp = self.opener('last-message.txt', 'wb')
2426 fp = self.opener('last-message.txt', 'wb')
2417 try:
2427 try:
2418 fp.write(text)
2428 fp.write(text)
2419 finally:
2429 finally:
2420 fp.close()
2430 fp.close()
2421 return self.pathto(fp.name[len(self.root)+1:])
2431 return self.pathto(fp.name[len(self.root)+1:])
2422
2432
2423 # used to avoid circular references so destructors work
2433 # used to avoid circular references so destructors work
2424 def aftertrans(files):
2434 def aftertrans(files):
2425 renamefiles = [tuple(t) for t in files]
2435 renamefiles = [tuple(t) for t in files]
2426 def a():
2436 def a():
2427 for src, dest in renamefiles:
2437 for src, dest in renamefiles:
2428 try:
2438 try:
2429 util.rename(src, dest)
2439 util.rename(src, dest)
2430 except OSError: # journal file does not yet exist
2440 except OSError: # journal file does not yet exist
2431 pass
2441 pass
2432 return a
2442 return a
2433
2443
2434 def undoname(fn):
2444 def undoname(fn):
2435 base, name = os.path.split(fn)
2445 base, name = os.path.split(fn)
2436 assert name.startswith('journal')
2446 assert name.startswith('journal')
2437 return os.path.join(base, name.replace('journal', 'undo', 1))
2447 return os.path.join(base, name.replace('journal', 'undo', 1))
2438
2448
2439 def instance(ui, path, create):
2449 def instance(ui, path, create):
2440 return localrepository(ui, util.urllocalpath(path), create)
2450 return localrepository(ui, util.urllocalpath(path), create)
2441
2451
2442 def islocal(path):
2452 def islocal(path):
2443 return True
2453 return True
@@ -1,231 +1,269 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete markers handling
9 """Obsolete markers handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewriting operations, and help
17 transformations performed by history rewriting operations, and help
18 building new tools to reconciliate conflicting rewriting actions. To
18 building new tools to reconciliate conflicting rewriting actions. To
19 facilitate conflicts resolution, markers include various annotations
19 facilitate conflicts resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23
23
24 Format
24 Format
25 ------
25 ------
26
26
27 Markers are stored in an append-only file stored in
27 Markers are stored in an append-only file stored in
28 '.hg/store/obsstore'.
28 '.hg/store/obsstore'.
29
29
30 The file starts with a version header:
30 The file starts with a version header:
31
31
32 - 1 unsigned byte: version number, starting at zero.
32 - 1 unsigned byte: version number, starting at zero.
33
33
34
34
35 The header is followed by the markers. Each marker is made of:
35 The header is followed by the markers. Each marker is made of:
36
36
37 - 1 unsigned byte: number of new changesets "R", could be zero.
37 - 1 unsigned byte: number of new changesets "R", could be zero.
38
38
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
39 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
40
40
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
41 - 1 byte: a bit field. It is reserved for flags used in obsolete
42 markers common operations, to avoid repeated decoding of metadata
42 markers common operations, to avoid repeated decoding of metadata
43 entries.
43 entries.
44
44
45 - 20 bytes: obsoleted changeset identifier.
45 - 20 bytes: obsoleted changeset identifier.
46
46
47 - N*20 bytes: new changesets identifiers.
47 - N*20 bytes: new changesets identifiers.
48
48
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
49 - M bytes: metadata as a sequence of nul-terminated strings. Each
50 string contains a key and a value, separated by a color ':', without
50 string contains a key and a value, separated by a color ':', without
51 additional encoding. Keys cannot contain '\0' or ':' and values
51 additional encoding. Keys cannot contain '\0' or ':' and values
52 cannot contain '\0'.
52 cannot contain '\0'.
53 """
53 """
54 import struct
54 import struct
55 from mercurial import util
55 from mercurial import util, base85
56 from i18n import _
56 from i18n import _
57
57
58 _pack = struct.pack
58 _pack = struct.pack
59 _unpack = struct.unpack
59 _unpack = struct.unpack
60
60
61
61
62
62
63 # data used for parsing and writing
63 # data used for parsing and writing
64 _fmversion = 0
64 _fmversion = 0
65 _fmfixed = '>BIB20s'
65 _fmfixed = '>BIB20s'
66 _fmnode = '20s'
66 _fmnode = '20s'
67 _fmfsize = struct.calcsize(_fmfixed)
67 _fmfsize = struct.calcsize(_fmfixed)
68 _fnodesize = struct.calcsize(_fmnode)
68 _fnodesize = struct.calcsize(_fmnode)
69
69
70 def _readmarkers(data):
70 def _readmarkers(data):
71 """Read and enumerate markers from raw data"""
71 """Read and enumerate markers from raw data"""
72 off = 0
72 off = 0
73 diskversion = _unpack('>B', data[off:off + 1])[0]
73 diskversion = _unpack('>B', data[off:off + 1])[0]
74 off += 1
74 off += 1
75 if diskversion != _fmversion:
75 if diskversion != _fmversion:
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
76 raise util.Abort(_('parsing obsolete marker: unknown version %r')
77 % diskversion)
77 % diskversion)
78
78
79 # Loop on markers
79 # Loop on markers
80 l = len(data)
80 l = len(data)
81 while off + _fmfsize <= l:
81 while off + _fmfsize <= l:
82 # read fixed part
82 # read fixed part
83 cur = data[off:off + _fmfsize]
83 cur = data[off:off + _fmfsize]
84 off += _fmfsize
84 off += _fmfsize
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
85 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
86 # read replacement
86 # read replacement
87 sucs = ()
87 sucs = ()
88 if nbsuc:
88 if nbsuc:
89 s = (_fnodesize * nbsuc)
89 s = (_fnodesize * nbsuc)
90 cur = data[off:off + s]
90 cur = data[off:off + s]
91 sucs = _unpack(_fmnode * nbsuc, cur)
91 sucs = _unpack(_fmnode * nbsuc, cur)
92 off += s
92 off += s
93 # read metadata
93 # read metadata
94 # (metadata will be decoded on demand)
94 # (metadata will be decoded on demand)
95 metadata = data[off:off + mdsize]
95 metadata = data[off:off + mdsize]
96 if len(metadata) != mdsize:
96 if len(metadata) != mdsize:
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
97 raise util.Abort(_('parsing obsolete marker: metadata is too '
98 'short, %d bytes expected, got %d')
98 'short, %d bytes expected, got %d')
99 % (len(metadata), mdsize))
99 % (len(metadata), mdsize))
100 off += mdsize
100 off += mdsize
101 yield (pre, sucs, flags, metadata)
101 yield (pre, sucs, flags, metadata)
102
102
103 def encodemeta(meta):
103 def encodemeta(meta):
104 """Return encoded metadata string to string mapping.
104 """Return encoded metadata string to string mapping.
105
105
106 Assume no ':' in key and no '\0' in both key and value."""
106 Assume no ':' in key and no '\0' in both key and value."""
107 for key, value in meta.iteritems():
107 for key, value in meta.iteritems():
108 if ':' in key or '\0' in key:
108 if ':' in key or '\0' in key:
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
109 raise ValueError("':' and '\0' are forbidden in metadata key'")
110 if '\0' in value:
110 if '\0' in value:
111 raise ValueError("':' are forbidden in metadata value'")
111 raise ValueError("':' are forbidden in metadata value'")
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
112 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
113
113
114 def decodemeta(data):
114 def decodemeta(data):
115 """Return string to string dictionary from encoded version."""
115 """Return string to string dictionary from encoded version."""
116 d = {}
116 d = {}
117 for l in data.split('\0'):
117 for l in data.split('\0'):
118 if l:
118 if l:
119 key, value = l.split(':')
119 key, value = l.split(':')
120 d[key] = value
120 d[key] = value
121 return d
121 return d
122
122
123 class marker(object):
123 class marker(object):
124 """Wrap obsolete marker raw data"""
124 """Wrap obsolete marker raw data"""
125
125
126 def __init__(self, repo, data):
126 def __init__(self, repo, data):
127 # the repo argument will be used to create changectx in later version
127 # the repo argument will be used to create changectx in later version
128 self._repo = repo
128 self._repo = repo
129 self._data = data
129 self._data = data
130 self._decodedmeta = None
130 self._decodedmeta = None
131
131
132 def precnode(self):
132 def precnode(self):
133 """Precursor changeset node identifier"""
133 """Precursor changeset node identifier"""
134 return self._data[0]
134 return self._data[0]
135
135
136 def succnodes(self):
136 def succnodes(self):
137 """List of successor changesets node identifiers"""
137 """List of successor changesets node identifiers"""
138 return self._data[1]
138 return self._data[1]
139
139
140 def metadata(self):
140 def metadata(self):
141 """Decoded metadata dictionary"""
141 """Decoded metadata dictionary"""
142 if self._decodedmeta is None:
142 if self._decodedmeta is None:
143 self._decodedmeta = decodemeta(self._data[3])
143 self._decodedmeta = decodemeta(self._data[3])
144 return self._decodedmeta
144 return self._decodedmeta
145
145
146 def date(self):
146 def date(self):
147 """Creation date as (unixtime, offset)"""
147 """Creation date as (unixtime, offset)"""
148 parts = self.metadata()['date'].split(' ')
148 parts = self.metadata()['date'].split(' ')
149 return (float(parts[0]), int(parts[1]))
149 return (float(parts[0]), int(parts[1]))
150
150
151 class obsstore(object):
151 class obsstore(object):
152 """Store obsolete markers
152 """Store obsolete markers
153
153
154 Markers can be accessed with two mappings:
154 Markers can be accessed with two mappings:
155 - precursors: old -> set(new)
155 - precursors: old -> set(new)
156 - successors: new -> set(old)
156 - successors: new -> set(old)
157 """
157 """
158
158
159 def __init__(self):
159 def __init__(self):
160 self._all = []
160 self._all = []
161 # new markers to serialize
161 # new markers to serialize
162 self._new = []
162 self._new = []
163 self.precursors = {}
163 self.precursors = {}
164 self.successors = {}
164 self.successors = {}
165
165
166 def __iter__(self):
166 def __iter__(self):
167 return iter(self._all)
167 return iter(self._all)
168
168
169 def __nonzero__(self):
170 return bool(self._all)
171
169 def create(self, prec, succs=(), flag=0, metadata=None):
172 def create(self, prec, succs=(), flag=0, metadata=None):
170 """obsolete: add a new obsolete marker
173 """obsolete: add a new obsolete marker
171
174
172 * ensuring it is hashable
175 * ensuring it is hashable
173 * check mandatory metadata
176 * check mandatory metadata
174 * encode metadata
177 * encode metadata
175 """
178 """
176 if metadata is None:
179 if metadata is None:
177 metadata = {}
180 metadata = {}
178 if len(prec) != 20:
181 if len(prec) != 20:
179 raise ValueError(prec)
182 raise ValueError(prec)
180 for succ in succs:
183 for succ in succs:
181 if len(succ) != 20:
184 if len(succ) != 20:
182 raise ValueError(prec)
185 raise ValueError(prec)
183 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
186 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
184 self.add(marker)
187 self.add(marker)
185
188
186 def add(self, marker):
189 def add(self, marker):
187 """Add a new marker to the store
190 """Add a new marker to the store
188
191
189 This marker still needs to be written to disk"""
192 This marker still needs to be written to disk"""
190 self._new.append(marker)
193 self._new.append(marker)
191 self._load(marker)
194 self._load(marker)
192
195
193 def loadmarkers(self, data):
196 def loadmarkers(self, data):
194 """Load all markers in data, mark them as known."""
197 """Load all markers in data, mark them as known."""
195 for marker in _readmarkers(data):
198 for marker in _readmarkers(data):
196 self._load(marker)
199 self._load(marker)
197
200
201 def mergemarkers(self, data):
202 other = set(_readmarkers(data))
203 local = set(self._all)
204 new = other - local
205 for marker in new:
206 self.add(marker)
207
198 def flushmarkers(self, stream):
208 def flushmarkers(self, stream):
199 """Write all markers to a stream
209 """Write all markers to a stream
200
210
201 After this operation, "new" markers are considered "known"."""
211 After this operation, "new" markers are considered "known"."""
202 self._writemarkers(stream)
212 self._writemarkers(stream)
203 self._new[:] = []
213 self._new[:] = []
204
214
205 def _load(self, marker):
215 def _load(self, marker):
206 self._all.append(marker)
216 self._all.append(marker)
207 pre, sucs = marker[:2]
217 pre, sucs = marker[:2]
208 self.precursors.setdefault(pre, set()).add(marker)
218 self.precursors.setdefault(pre, set()).add(marker)
209 for suc in sucs:
219 for suc in sucs:
210 self.successors.setdefault(suc, set()).add(marker)
220 self.successors.setdefault(suc, set()).add(marker)
211
221
212 def _writemarkers(self, stream):
222 def _writemarkers(self, stream=None):
213 # Kept separate from flushmarkers(), it will be reused for
223 # Kept separate from flushmarkers(), it will be reused for
214 # markers exchange.
224 # markers exchange.
215 stream.write(_pack('>B', _fmversion))
225 if stream is None:
226 final = []
227 w = final.append
228 else:
229 w = stream.write
230 w(_pack('>B', _fmversion))
216 for marker in self._all:
231 for marker in self._all:
217 pre, sucs, flags, metadata = marker
232 pre, sucs, flags, metadata = marker
218 nbsuc = len(sucs)
233 nbsuc = len(sucs)
219 format = _fmfixed + (_fmnode * nbsuc)
234 format = _fmfixed + (_fmnode * nbsuc)
220 data = [nbsuc, len(metadata), flags, pre]
235 data = [nbsuc, len(metadata), flags, pre]
221 data.extend(sucs)
236 data.extend(sucs)
222 stream.write(_pack(format, *data))
237 w(_pack(format, *data))
223 stream.write(metadata)
238 w(metadata)
239 if stream is None:
240 return ''.join(final)
241
242 def listmarkers(repo):
243 """List markers over pushkey"""
244 if not repo.obsstore:
245 return {}
246 data = repo.obsstore._writemarkers()
247 return {'dump': base85.b85encode(data)}
224
248
225
249 def pushmarker(repo, key, old, new):
250 """Push markers over pushkey"""
251 if key != 'dump':
252 repo.ui.warn(_('unknown key: %r') % key)
253 return 0
254 if old:
255 repo.ui.warn(_('unexpected old value') % key)
256 return 0
257 data = base85.b85decode(new)
258 lock = repo.lock()
259 try:
260 repo.obsstore.mergemarkers(data)
261 return 1
262 finally:
263 lock.release()
226
264
227 def allmarkers(repo):
265 def allmarkers(repo):
228 """all obsolete markers known in a repository"""
266 """all obsolete markers known in a repository"""
229 for markerdata in repo.obsstore:
267 for markerdata in repo.obsstore:
230 yield marker(repo, markerdata)
268 yield marker(repo, markerdata)
231
269
@@ -1,36 +1,37 b''
1 # pushkey.py - dispatching for pushing and pulling keys
1 # pushkey.py - dispatching for pushing and pulling keys
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import bookmarks, phases
8 import bookmarks, phases, obsolete
9
9
10 def _nslist(repo):
10 def _nslist(repo):
11 n = {}
11 n = {}
12 for k in _namespaces:
12 for k in _namespaces:
13 n[k] = ""
13 n[k] = ""
14 return n
14 return n
15
15
16 _namespaces = {"namespaces": (lambda *x: False, _nslist),
16 _namespaces = {"namespaces": (lambda *x: False, _nslist),
17 "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks),
17 "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks),
18 "phases": (phases.pushphase, phases.listphases),
18 "phases": (phases.pushphase, phases.listphases),
19 "obsolete": (obsolete.pushmarker, obsolete.listmarkers),
19 }
20 }
20
21
21 def register(namespace, pushkey, listkeys):
22 def register(namespace, pushkey, listkeys):
22 _namespaces[namespace] = (pushkey, listkeys)
23 _namespaces[namespace] = (pushkey, listkeys)
23
24
24 def _get(namespace):
25 def _get(namespace):
25 return _namespaces.get(namespace, (lambda *x: False, lambda *x: {}))
26 return _namespaces.get(namespace, (lambda *x: False, lambda *x: {}))
26
27
27 def push(repo, namespace, key, old, new):
28 def push(repo, namespace, key, old, new):
28 '''should succeed iff value was old'''
29 '''should succeed iff value was old'''
29 pk = _get(namespace)[0]
30 pk = _get(namespace)[0]
30 return pk(repo, key, old, new)
31 return pk(repo, key, old, new)
31
32
32 def list(repo, namespace):
33 def list(repo, namespace):
33 '''return a dict'''
34 '''return a dict'''
34 lk = _get(namespace)[1]
35 lk = _get(namespace)[1]
35 return lk(repo)
36 return lk(repo)
36
37
@@ -1,264 +1,266 b''
1 $ "$TESTDIR/hghave" serve || exit 80
1 $ "$TESTDIR/hghave" serve || exit 80
2
2
3 initialize
3 initialize
4
4
5 $ hg init a
5 $ hg init a
6 $ cd a
6 $ cd a
7 $ echo 'test' > test
7 $ echo 'test' > test
8 $ hg commit -Am'test'
8 $ hg commit -Am'test'
9 adding test
9 adding test
10
10
11 set bookmarks
11 set bookmarks
12
12
13 $ hg bookmark X
13 $ hg bookmark X
14 $ hg bookmark Y
14 $ hg bookmark Y
15 $ hg bookmark Z
15 $ hg bookmark Z
16
16
17 import bookmark by name
17 import bookmark by name
18
18
19 $ hg init ../b
19 $ hg init ../b
20 $ cd ../b
20 $ cd ../b
21 $ hg book Y
21 $ hg book Y
22 $ hg book
22 $ hg book
23 * Y -1:000000000000
23 * Y -1:000000000000
24 $ hg pull ../a
24 $ hg pull ../a
25 pulling from ../a
25 pulling from ../a
26 requesting all changes
26 requesting all changes
27 adding changesets
27 adding changesets
28 adding manifests
28 adding manifests
29 adding file changes
29 adding file changes
30 added 1 changesets with 1 changes to 1 files
30 added 1 changesets with 1 changes to 1 files
31 updating bookmark Y
31 updating bookmark Y
32 adding remote bookmark X
32 adding remote bookmark X
33 adding remote bookmark Z
33 adding remote bookmark Z
34 (run 'hg update' to get a working copy)
34 (run 'hg update' to get a working copy)
35 $ hg bookmarks
35 $ hg bookmarks
36 X 0:4e3505fd9583
36 X 0:4e3505fd9583
37 Y 0:4e3505fd9583
37 Y 0:4e3505fd9583
38 Z 0:4e3505fd9583
38 Z 0:4e3505fd9583
39 $ hg debugpushkey ../a namespaces
39 $ hg debugpushkey ../a namespaces
40 bookmarks
40 bookmarks
41 phases
41 phases
42 namespaces
42 namespaces
43 obsolete
43 $ hg debugpushkey ../a bookmarks
44 $ hg debugpushkey ../a bookmarks
44 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
45 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
46 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
47 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
47 $ hg pull -B X ../a
48 $ hg pull -B X ../a
48 pulling from ../a
49 pulling from ../a
49 no changes found
50 no changes found
50 importing bookmark X
51 importing bookmark X
51 $ hg bookmark
52 $ hg bookmark
52 X 0:4e3505fd9583
53 X 0:4e3505fd9583
53 Y 0:4e3505fd9583
54 Y 0:4e3505fd9583
54 Z 0:4e3505fd9583
55 Z 0:4e3505fd9583
55
56
56 export bookmark by name
57 export bookmark by name
57
58
58 $ hg bookmark W
59 $ hg bookmark W
59 $ hg bookmark foo
60 $ hg bookmark foo
60 $ hg bookmark foobar
61 $ hg bookmark foobar
61 $ hg push -B W ../a
62 $ hg push -B W ../a
62 pushing to ../a
63 pushing to ../a
63 searching for changes
64 searching for changes
64 no changes found
65 no changes found
65 exporting bookmark W
66 exporting bookmark W
66 [1]
67 [1]
67 $ hg -R ../a bookmarks
68 $ hg -R ../a bookmarks
68 W -1:000000000000
69 W -1:000000000000
69 X 0:4e3505fd9583
70 X 0:4e3505fd9583
70 Y 0:4e3505fd9583
71 Y 0:4e3505fd9583
71 * Z 0:4e3505fd9583
72 * Z 0:4e3505fd9583
72
73
73 delete a remote bookmark
74 delete a remote bookmark
74
75
75 $ hg book -d W
76 $ hg book -d W
76 $ hg push -B W ../a
77 $ hg push -B W ../a
77 pushing to ../a
78 pushing to ../a
78 searching for changes
79 searching for changes
79 no changes found
80 no changes found
80 deleting remote bookmark W
81 deleting remote bookmark W
81 [1]
82 [1]
82
83
83 push/pull name that doesn't exist
84 push/pull name that doesn't exist
84
85
85 $ hg push -B badname ../a
86 $ hg push -B badname ../a
86 pushing to ../a
87 pushing to ../a
87 searching for changes
88 searching for changes
88 no changes found
89 no changes found
89 bookmark badname does not exist on the local or remote repository!
90 bookmark badname does not exist on the local or remote repository!
90 [2]
91 [2]
91 $ hg pull -B anotherbadname ../a
92 $ hg pull -B anotherbadname ../a
92 pulling from ../a
93 pulling from ../a
93 abort: remote bookmark anotherbadname not found!
94 abort: remote bookmark anotherbadname not found!
94 [255]
95 [255]
95
96
96 divergent bookmarks
97 divergent bookmarks
97
98
98 $ cd ../a
99 $ cd ../a
99 $ echo c1 > f1
100 $ echo c1 > f1
100 $ hg ci -Am1
101 $ hg ci -Am1
101 adding f1
102 adding f1
102 $ hg book -f X
103 $ hg book -f X
103 $ hg book
104 $ hg book
104 * X 1:0d2164f0ce0d
105 * X 1:0d2164f0ce0d
105 Y 0:4e3505fd9583
106 Y 0:4e3505fd9583
106 Z 1:0d2164f0ce0d
107 Z 1:0d2164f0ce0d
107
108
108 $ cd ../b
109 $ cd ../b
109 $ hg up
110 $ hg up
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 updating bookmark foobar
112 updating bookmark foobar
112 $ echo c2 > f2
113 $ echo c2 > f2
113 $ hg ci -Am2
114 $ hg ci -Am2
114 adding f2
115 adding f2
115 $ hg book -f X
116 $ hg book -f X
116 $ hg book
117 $ hg book
117 * X 1:9b140be10808
118 * X 1:9b140be10808
118 Y 0:4e3505fd9583
119 Y 0:4e3505fd9583
119 Z 0:4e3505fd9583
120 Z 0:4e3505fd9583
120 foo -1:000000000000
121 foo -1:000000000000
121 foobar 1:9b140be10808
122 foobar 1:9b140be10808
122
123
123 $ hg pull --config paths.foo=../a foo
124 $ hg pull --config paths.foo=../a foo
124 pulling from $TESTTMP/a (glob)
125 pulling from $TESTTMP/a (glob)
125 searching for changes
126 searching for changes
126 adding changesets
127 adding changesets
127 adding manifests
128 adding manifests
128 adding file changes
129 adding file changes
129 added 1 changesets with 1 changes to 1 files (+1 heads)
130 added 1 changesets with 1 changes to 1 files (+1 heads)
130 divergent bookmark X stored as X@foo
131 divergent bookmark X stored as X@foo
131 updating bookmark Z
132 updating bookmark Z
132 (run 'hg heads' to see heads, 'hg merge' to merge)
133 (run 'hg heads' to see heads, 'hg merge' to merge)
133 $ hg book
134 $ hg book
134 * X 1:9b140be10808
135 * X 1:9b140be10808
135 X@foo 2:0d2164f0ce0d
136 X@foo 2:0d2164f0ce0d
136 Y 0:4e3505fd9583
137 Y 0:4e3505fd9583
137 Z 2:0d2164f0ce0d
138 Z 2:0d2164f0ce0d
138 foo -1:000000000000
139 foo -1:000000000000
139 foobar 1:9b140be10808
140 foobar 1:9b140be10808
140 $ hg push -f ../a
141 $ hg push -f ../a
141 pushing to ../a
142 pushing to ../a
142 searching for changes
143 searching for changes
143 adding changesets
144 adding changesets
144 adding manifests
145 adding manifests
145 adding file changes
146 adding file changes
146 added 1 changesets with 1 changes to 1 files (+1 heads)
147 added 1 changesets with 1 changes to 1 files (+1 heads)
147 $ hg -R ../a book
148 $ hg -R ../a book
148 * X 1:0d2164f0ce0d
149 * X 1:0d2164f0ce0d
149 Y 0:4e3505fd9583
150 Y 0:4e3505fd9583
150 Z 1:0d2164f0ce0d
151 Z 1:0d2164f0ce0d
151
152
152 update a remote bookmark from a non-head to a head
153 update a remote bookmark from a non-head to a head
153
154
154 $ hg up -q Y
155 $ hg up -q Y
155 $ echo c3 > f2
156 $ echo c3 > f2
156 $ hg ci -Am3
157 $ hg ci -Am3
157 adding f2
158 adding f2
158 created new head
159 created new head
159 $ hg push ../a
160 $ hg push ../a
160 pushing to ../a
161 pushing to ../a
161 searching for changes
162 searching for changes
162 adding changesets
163 adding changesets
163 adding manifests
164 adding manifests
164 adding file changes
165 adding file changes
165 added 1 changesets with 1 changes to 1 files (+1 heads)
166 added 1 changesets with 1 changes to 1 files (+1 heads)
166 updating bookmark Y
167 updating bookmark Y
167 $ hg -R ../a book
168 $ hg -R ../a book
168 * X 1:0d2164f0ce0d
169 * X 1:0d2164f0ce0d
169 Y 3:f6fc62dde3c0
170 Y 3:f6fc62dde3c0
170 Z 1:0d2164f0ce0d
171 Z 1:0d2164f0ce0d
171
172
172 diverging a remote bookmark fails
173 diverging a remote bookmark fails
173
174
174 $ hg up -q 4e3505fd9583
175 $ hg up -q 4e3505fd9583
175 $ echo c4 > f2
176 $ echo c4 > f2
176 $ hg ci -Am4
177 $ hg ci -Am4
177 adding f2
178 adding f2
178 created new head
179 created new head
179 $ hg book -f Y
180 $ hg book -f Y
180
181
181 $ cat <<EOF > ../a/.hg/hgrc
182 $ cat <<EOF > ../a/.hg/hgrc
182 > [web]
183 > [web]
183 > push_ssl = false
184 > push_ssl = false
184 > allow_push = *
185 > allow_push = *
185 > EOF
186 > EOF
186
187
187 $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid
188 $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid
188 $ cat ../hg2.pid >> $DAEMON_PIDS
189 $ cat ../hg2.pid >> $DAEMON_PIDS
189
190
190 $ hg push http://localhost:$HGPORT2/
191 $ hg push http://localhost:$HGPORT2/
191 pushing to http://localhost:$HGPORT2/
192 pushing to http://localhost:$HGPORT2/
192 searching for changes
193 searching for changes
193 abort: push creates new remote head 4efff6d98829!
194 abort: push creates new remote head 4efff6d98829!
194 (did you forget to merge? use push -f to force)
195 (did you forget to merge? use push -f to force)
195 [255]
196 [255]
196 $ hg -R ../a book
197 $ hg -R ../a book
197 * X 1:0d2164f0ce0d
198 * X 1:0d2164f0ce0d
198 Y 3:f6fc62dde3c0
199 Y 3:f6fc62dde3c0
199 Z 1:0d2164f0ce0d
200 Z 1:0d2164f0ce0d
200
201
201 hgweb
202 hgweb
202
203
203 $ cat <<EOF > .hg/hgrc
204 $ cat <<EOF > .hg/hgrc
204 > [web]
205 > [web]
205 > push_ssl = false
206 > push_ssl = false
206 > allow_push = *
207 > allow_push = *
207 > EOF
208 > EOF
208
209
209 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
210 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
210 $ cat ../hg.pid >> $DAEMON_PIDS
211 $ cat ../hg.pid >> $DAEMON_PIDS
211 $ cd ../a
212 $ cd ../a
212
213
213 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
214 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
214 bookmarks
215 bookmarks
215 phases
216 phases
216 namespaces
217 namespaces
218 obsolete
217 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
219 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
218 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
220 Y 4efff6d98829d9c824c621afd6e3f01865f5439f
219 foobar 9b140be1080824d768c5a4691a564088eede71f9
221 foobar 9b140be1080824d768c5a4691a564088eede71f9
220 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
222 Z 0d2164f0ce0d8f1d6f94351eba04b794909be66c
221 foo 0000000000000000000000000000000000000000
223 foo 0000000000000000000000000000000000000000
222 X 9b140be1080824d768c5a4691a564088eede71f9
224 X 9b140be1080824d768c5a4691a564088eede71f9
223 $ hg out -B http://localhost:$HGPORT/
225 $ hg out -B http://localhost:$HGPORT/
224 comparing with http://localhost:$HGPORT/
226 comparing with http://localhost:$HGPORT/
225 searching for changed bookmarks
227 searching for changed bookmarks
226 no changed bookmarks found
228 no changed bookmarks found
227 [1]
229 [1]
228 $ hg push -B Z http://localhost:$HGPORT/
230 $ hg push -B Z http://localhost:$HGPORT/
229 pushing to http://localhost:$HGPORT/
231 pushing to http://localhost:$HGPORT/
230 searching for changes
232 searching for changes
231 no changes found
233 no changes found
232 exporting bookmark Z
234 exporting bookmark Z
233 [1]
235 [1]
234 $ hg book -d Z
236 $ hg book -d Z
235 $ hg in -B http://localhost:$HGPORT/
237 $ hg in -B http://localhost:$HGPORT/
236 comparing with http://localhost:$HGPORT/
238 comparing with http://localhost:$HGPORT/
237 searching for changed bookmarks
239 searching for changed bookmarks
238 Z 0d2164f0ce0d
240 Z 0d2164f0ce0d
239 foo 000000000000
241 foo 000000000000
240 foobar 9b140be10808
242 foobar 9b140be10808
241 $ hg pull -B Z http://localhost:$HGPORT/
243 $ hg pull -B Z http://localhost:$HGPORT/
242 pulling from http://localhost:$HGPORT/
244 pulling from http://localhost:$HGPORT/
243 no changes found
245 no changes found
244 adding remote bookmark foobar
246 adding remote bookmark foobar
245 adding remote bookmark Z
247 adding remote bookmark Z
246 adding remote bookmark foo
248 adding remote bookmark foo
247 divergent bookmark X stored as X@1
249 divergent bookmark X stored as X@1
248 importing bookmark Z
250 importing bookmark Z
249 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
251 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
250 requesting all changes
252 requesting all changes
251 adding changesets
253 adding changesets
252 adding manifests
254 adding manifests
253 adding file changes
255 adding file changes
254 added 5 changesets with 5 changes to 3 files (+3 heads)
256 added 5 changesets with 5 changes to 3 files (+3 heads)
255 updating to branch default
257 updating to branch default
256 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
258 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
257 $ hg -R cloned-bookmarks bookmarks
259 $ hg -R cloned-bookmarks bookmarks
258 X 1:9b140be10808
260 X 1:9b140be10808
259 Y 4:4efff6d98829
261 Y 4:4efff6d98829
260 Z 2:0d2164f0ce0d
262 Z 2:0d2164f0ce0d
261 foo -1:000000000000
263 foo -1:000000000000
262 foobar 1:9b140be10808
264 foobar 1:9b140be10808
263
265
264 $ cd ..
266 $ cd ..
@@ -1,628 +1,629 b''
1 commit hooks can see env vars
1 commit hooks can see env vars
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ cat > .hg/hgrc <<EOF
5 $ cat > .hg/hgrc <<EOF
6 > [hooks]
6 > [hooks]
7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
7 > commit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit"
8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
8 > commit.b = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" commit.b"
9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
9 > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= python \"$TESTDIR/printenv.py\" precommit"
10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
10 > pretxncommit = sh -c "HG_LOCAL= HG_TAG= python \"$TESTDIR/printenv.py\" pretxncommit"
11 > pretxncommit.tip = hg -q tip
11 > pretxncommit.tip = hg -q tip
12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
12 > pre-identify = python "$TESTDIR/printenv.py" pre-identify 1
13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
13 > pre-cat = python "$TESTDIR/printenv.py" pre-cat
14 > post-cat = python "$TESTDIR/printenv.py" post-cat
14 > post-cat = python "$TESTDIR/printenv.py" post-cat
15 > EOF
15 > EOF
16 $ echo a > a
16 $ echo a > a
17 $ hg add a
17 $ hg add a
18 $ hg commit -m a
18 $ hg commit -m a
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
19 precommit hook: HG_PARENT1=0000000000000000000000000000000000000000
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
20 pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a
21 0:cb9a9f314b8b
21 0:cb9a9f314b8b
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
22 commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
23 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000
24
24
25 $ hg clone . ../b
25 $ hg clone . ../b
26 updating to branch default
26 updating to branch default
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ cd ../b
28 $ cd ../b
29
29
30 changegroup hooks can see env vars
30 changegroup hooks can see env vars
31
31
32 $ cat > .hg/hgrc <<EOF
32 $ cat > .hg/hgrc <<EOF
33 > [hooks]
33 > [hooks]
34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
34 > prechangegroup = python "$TESTDIR/printenv.py" prechangegroup
35 > changegroup = python "$TESTDIR/printenv.py" changegroup
35 > changegroup = python "$TESTDIR/printenv.py" changegroup
36 > incoming = python "$TESTDIR/printenv.py" incoming
36 > incoming = python "$TESTDIR/printenv.py" incoming
37 > EOF
37 > EOF
38
38
39 pretxncommit and commit hooks can see both parents of merge
39 pretxncommit and commit hooks can see both parents of merge
40
40
41 $ cd ../a
41 $ cd ../a
42 $ echo b >> a
42 $ echo b >> a
43 $ hg commit -m a1 -d "1 0"
43 $ hg commit -m a1 -d "1 0"
44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
44 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
45 pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
46 1:ab228980c14d
46 1:ab228980c14d
47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
47 commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
48 commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
49 $ hg update -C 0
49 $ hg update -C 0
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 $ echo b > b
51 $ echo b > b
52 $ hg add b
52 $ hg add b
53 $ hg commit -m b -d '1 0'
53 $ hg commit -m b -d '1 0'
54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
54 precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
55 pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a
56 2:ee9deb46ab31
56 2:ee9deb46ab31
57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
57 commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
58 commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b
59 created new head
59 created new head
60 $ hg merge 1
60 $ hg merge 1
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 (branch merge, don't forget to commit)
62 (branch merge, don't forget to commit)
63 $ hg commit -m merge -d '2 0'
63 $ hg commit -m merge -d '2 0'
64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
64 precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
65 pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a
66 3:07f3376c1e65
66 3:07f3376c1e65
67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
67 commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
68 commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd
69
69
70 test generic hooks
70 test generic hooks
71
71
72 $ hg id
72 $ hg id
73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
73 pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[]
74 warning: pre-identify hook exited with status 1
74 warning: pre-identify hook exited with status 1
75 [1]
75 [1]
76 $ hg cat b
76 $ hg cat b
77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
77 pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b']
78 b
78 b
79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
79 post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0
80
80
81 $ cd ../b
81 $ cd ../b
82 $ hg pull ../a
82 $ hg pull ../a
83 pulling from ../a
83 pulling from ../a
84 searching for changes
84 searching for changes
85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
85 prechangegroup hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
86 adding changesets
86 adding changesets
87 adding manifests
87 adding manifests
88 adding file changes
88 adding file changes
89 added 3 changesets with 2 changes to 2 files
89 added 3 changesets with 2 changes to 2 files
90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
90 changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
91 incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
92 incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
93 incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_URL=file:$TESTTMP/a
94 (run 'hg update' to get a working copy)
94 (run 'hg update' to get a working copy)
95
95
96 tag hooks can see env vars
96 tag hooks can see env vars
97
97
98 $ cd ../a
98 $ cd ../a
99 $ cat >> .hg/hgrc <<EOF
99 $ cat >> .hg/hgrc <<EOF
100 > pretag = python "$TESTDIR/printenv.py" pretag
100 > pretag = python "$TESTDIR/printenv.py" pretag
101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
101 > tag = sh -c "HG_PARENT1= HG_PARENT2= python \"$TESTDIR/printenv.py\" tag"
102 > EOF
102 > EOF
103 $ hg tag -d '3 0' a
103 $ hg tag -d '3 0' a
104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
104 pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
105 precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
106 pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a
107 4:539e4b31b6dc
107 4:539e4b31b6dc
108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
108 tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a
109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
109 commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
110 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2
111 $ hg tag -l la
111 $ hg tag -l la
112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
112 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
113 tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la
114
114
115 pretag hook can forbid tagging
115 pretag hook can forbid tagging
116
116
117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
117 $ echo "pretag.forbid = python \"$TESTDIR/printenv.py\" pretag.forbid 1" >> .hg/hgrc
118 $ hg tag -d '4 0' fa
118 $ hg tag -d '4 0' fa
119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
119 pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
120 pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa
121 abort: pretag.forbid hook exited with status 1
121 abort: pretag.forbid hook exited with status 1
122 [255]
122 [255]
123 $ hg tag -l fla
123 $ hg tag -l fla
124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
124 pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
125 pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla
126 abort: pretag.forbid hook exited with status 1
126 abort: pretag.forbid hook exited with status 1
127 [255]
127 [255]
128
128
129 pretxncommit hook can see changeset, can roll back txn, changeset no
129 pretxncommit hook can see changeset, can roll back txn, changeset no
130 more there after
130 more there after
131
131
132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
132 $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc
133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
133 $ echo "pretxncommit.forbid1 = python \"$TESTDIR/printenv.py\" pretxncommit.forbid 1" >> .hg/hgrc
134 $ echo z > z
134 $ echo z > z
135 $ hg add z
135 $ hg add z
136 $ hg -q tip
136 $ hg -q tip
137 4:539e4b31b6dc
137 4:539e4b31b6dc
138 $ hg commit -m 'fail' -d '4 0'
138 $ hg commit -m 'fail' -d '4 0'
139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
139 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
140 pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
141 5:6f611f8018c1
141 5:6f611f8018c1
142 5:6f611f8018c1
142 5:6f611f8018c1
143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
143 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a
144 transaction abort!
144 transaction abort!
145 rollback completed
145 rollback completed
146 abort: pretxncommit.forbid1 hook exited with status 1
146 abort: pretxncommit.forbid1 hook exited with status 1
147 [255]
147 [255]
148 $ hg -q tip
148 $ hg -q tip
149 4:539e4b31b6dc
149 4:539e4b31b6dc
150
150
151 precommit hook can prevent commit
151 precommit hook can prevent commit
152
152
153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
153 $ echo "precommit.forbid = python \"$TESTDIR/printenv.py\" precommit.forbid 1" >> .hg/hgrc
154 $ hg commit -m 'fail' -d '4 0'
154 $ hg commit -m 'fail' -d '4 0'
155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
155 precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
156 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10
157 abort: precommit.forbid hook exited with status 1
157 abort: precommit.forbid hook exited with status 1
158 [255]
158 [255]
159 $ hg -q tip
159 $ hg -q tip
160 4:539e4b31b6dc
160 4:539e4b31b6dc
161
161
162 preupdate hook can prevent update
162 preupdate hook can prevent update
163
163
164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
164 $ echo "preupdate = python \"$TESTDIR/printenv.py\" preupdate" >> .hg/hgrc
165 $ hg update 1
165 $ hg update 1
166 preupdate hook: HG_PARENT1=ab228980c14d
166 preupdate hook: HG_PARENT1=ab228980c14d
167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
167 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
168
168
169 update hook
169 update hook
170
170
171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
171 $ echo "update = python \"$TESTDIR/printenv.py\" update" >> .hg/hgrc
172 $ hg update
172 $ hg update
173 preupdate hook: HG_PARENT1=539e4b31b6dc
173 preupdate hook: HG_PARENT1=539e4b31b6dc
174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
174 update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc
175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
176
176
177 pushkey hook
177 pushkey hook
178
178
179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
179 $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey" >> .hg/hgrc
180 $ cd ../b
180 $ cd ../b
181 $ hg bookmark -r null foo
181 $ hg bookmark -r null foo
182 $ hg push -B foo ../a
182 $ hg push -B foo ../a
183 pushing to ../a
183 pushing to ../a
184 searching for changes
184 searching for changes
185 no changes found
185 no changes found
186 exporting bookmark foo
186 exporting bookmark foo
187 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
187 pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1
188 [1]
188 [1]
189 $ cd ../a
189 $ cd ../a
190
190
191 listkeys hook
191 listkeys hook
192
192
193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
193 $ echo "listkeys = python \"$TESTDIR/printenv.py\" listkeys" >> .hg/hgrc
194 $ hg bookmark -r null bar
194 $ hg bookmark -r null bar
195 $ cd ../b
195 $ cd ../b
196 $ hg pull -B bar ../a
196 $ hg pull -B bar ../a
197 pulling from ../a
197 pulling from ../a
198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
198 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
199 no changes found
199 no changes found
200 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
200 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
201 listkeys hook: HG_NAMESPACE=obsolete HG_VALUES={}
201 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
202 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
202 adding remote bookmark bar
203 adding remote bookmark bar
203 importing bookmark bar
204 importing bookmark bar
204 $ cd ../a
205 $ cd ../a
205
206
206 test that prepushkey can prevent incoming keys
207 test that prepushkey can prevent incoming keys
207
208
208 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
209 $ echo "prepushkey = python \"$TESTDIR/printenv.py\" prepushkey.forbid 1" >> .hg/hgrc
209 $ cd ../b
210 $ cd ../b
210 $ hg bookmark -r null baz
211 $ hg bookmark -r null baz
211 $ hg push -B baz ../a
212 $ hg push -B baz ../a
212 pushing to ../a
213 pushing to ../a
213 searching for changes
214 searching for changes
214 no changes found
215 no changes found
215 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
216 listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'}
216 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
217 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
217 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
218 listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'}
218 exporting bookmark baz
219 exporting bookmark baz
219 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
220 prepushkey.forbid hook: HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000
220 abort: prepushkey hook exited with status 1
221 abort: prepushkey hook exited with status 1
221 [255]
222 [255]
222 $ cd ../a
223 $ cd ../a
223
224
224 test that prelistkeys can prevent listing keys
225 test that prelistkeys can prevent listing keys
225
226
226 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
227 $ echo "prelistkeys = python \"$TESTDIR/printenv.py\" prelistkeys.forbid 1" >> .hg/hgrc
227 $ hg bookmark -r null quux
228 $ hg bookmark -r null quux
228 $ cd ../b
229 $ cd ../b
229 $ hg pull -B quux ../a
230 $ hg pull -B quux ../a
230 pulling from ../a
231 pulling from ../a
231 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
232 prelistkeys.forbid hook: HG_NAMESPACE=bookmarks
232 abort: prelistkeys hook exited with status 1
233 abort: prelistkeys hook exited with status 1
233 [255]
234 [255]
234 $ cd ../a
235 $ cd ../a
235
236
236 prechangegroup hook can prevent incoming changes
237 prechangegroup hook can prevent incoming changes
237
238
238 $ cd ../b
239 $ cd ../b
239 $ hg -q tip
240 $ hg -q tip
240 3:07f3376c1e65
241 3:07f3376c1e65
241 $ cat > .hg/hgrc <<EOF
242 $ cat > .hg/hgrc <<EOF
242 > [hooks]
243 > [hooks]
243 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
244 > prechangegroup.forbid = python "$TESTDIR/printenv.py" prechangegroup.forbid 1
244 > EOF
245 > EOF
245 $ hg pull ../a
246 $ hg pull ../a
246 pulling from ../a
247 pulling from ../a
247 searching for changes
248 searching for changes
248 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
249 prechangegroup.forbid hook: HG_SOURCE=pull HG_URL=file:$TESTTMP/a
249 abort: prechangegroup.forbid hook exited with status 1
250 abort: prechangegroup.forbid hook exited with status 1
250 [255]
251 [255]
251
252
252 pretxnchangegroup hook can see incoming changes, can roll back txn,
253 pretxnchangegroup hook can see incoming changes, can roll back txn,
253 incoming changes no longer there after
254 incoming changes no longer there after
254
255
255 $ cat > .hg/hgrc <<EOF
256 $ cat > .hg/hgrc <<EOF
256 > [hooks]
257 > [hooks]
257 > pretxnchangegroup.forbid0 = hg tip -q
258 > pretxnchangegroup.forbid0 = hg tip -q
258 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
259 > pretxnchangegroup.forbid1 = python "$TESTDIR/printenv.py" pretxnchangegroup.forbid 1
259 > EOF
260 > EOF
260 $ hg pull ../a
261 $ hg pull ../a
261 pulling from ../a
262 pulling from ../a
262 searching for changes
263 searching for changes
263 adding changesets
264 adding changesets
264 adding manifests
265 adding manifests
265 adding file changes
266 adding file changes
266 added 1 changesets with 1 changes to 1 files
267 added 1 changesets with 1 changes to 1 files
267 4:539e4b31b6dc
268 4:539e4b31b6dc
268 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
269 pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_URL=file:$TESTTMP/a
269 transaction abort!
270 transaction abort!
270 rollback completed
271 rollback completed
271 abort: pretxnchangegroup.forbid1 hook exited with status 1
272 abort: pretxnchangegroup.forbid1 hook exited with status 1
272 [255]
273 [255]
273 $ hg -q tip
274 $ hg -q tip
274 3:07f3376c1e65
275 3:07f3376c1e65
275
276
276 outgoing hooks can see env vars
277 outgoing hooks can see env vars
277
278
278 $ rm .hg/hgrc
279 $ rm .hg/hgrc
279 $ cat > ../a/.hg/hgrc <<EOF
280 $ cat > ../a/.hg/hgrc <<EOF
280 > [hooks]
281 > [hooks]
281 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
282 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
282 > outgoing = python "$TESTDIR/printenv.py" outgoing
283 > outgoing = python "$TESTDIR/printenv.py" outgoing
283 > EOF
284 > EOF
284 $ hg pull ../a
285 $ hg pull ../a
285 pulling from ../a
286 pulling from ../a
286 searching for changes
287 searching for changes
287 preoutgoing hook: HG_SOURCE=pull
288 preoutgoing hook: HG_SOURCE=pull
288 adding changesets
289 adding changesets
289 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
290 outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull
290 adding manifests
291 adding manifests
291 adding file changes
292 adding file changes
292 added 1 changesets with 1 changes to 1 files
293 added 1 changesets with 1 changes to 1 files
293 adding remote bookmark quux
294 adding remote bookmark quux
294 (run 'hg update' to get a working copy)
295 (run 'hg update' to get a working copy)
295 $ hg rollback
296 $ hg rollback
296 repository tip rolled back to revision 3 (undo pull)
297 repository tip rolled back to revision 3 (undo pull)
297
298
298 preoutgoing hook can prevent outgoing changes
299 preoutgoing hook can prevent outgoing changes
299
300
300 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
301 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> ../a/.hg/hgrc
301 $ hg pull ../a
302 $ hg pull ../a
302 pulling from ../a
303 pulling from ../a
303 searching for changes
304 searching for changes
304 preoutgoing hook: HG_SOURCE=pull
305 preoutgoing hook: HG_SOURCE=pull
305 preoutgoing.forbid hook: HG_SOURCE=pull
306 preoutgoing.forbid hook: HG_SOURCE=pull
306 abort: preoutgoing.forbid hook exited with status 1
307 abort: preoutgoing.forbid hook exited with status 1
307 [255]
308 [255]
308
309
309 outgoing hooks work for local clones
310 outgoing hooks work for local clones
310
311
311 $ cd ..
312 $ cd ..
312 $ cat > a/.hg/hgrc <<EOF
313 $ cat > a/.hg/hgrc <<EOF
313 > [hooks]
314 > [hooks]
314 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
315 > preoutgoing = python "$TESTDIR/printenv.py" preoutgoing
315 > outgoing = python "$TESTDIR/printenv.py" outgoing
316 > outgoing = python "$TESTDIR/printenv.py" outgoing
316 > EOF
317 > EOF
317 $ hg clone a c
318 $ hg clone a c
318 preoutgoing hook: HG_SOURCE=clone
319 preoutgoing hook: HG_SOURCE=clone
319 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
320 outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone
320 updating to branch default
321 updating to branch default
321 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 $ rm -rf c
323 $ rm -rf c
323
324
324 preoutgoing hook can prevent outgoing changes for local clones
325 preoutgoing hook can prevent outgoing changes for local clones
325
326
326 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
327 $ echo "preoutgoing.forbid = python \"$TESTDIR/printenv.py\" preoutgoing.forbid 1" >> a/.hg/hgrc
327 $ hg clone a zzz
328 $ hg clone a zzz
328 preoutgoing hook: HG_SOURCE=clone
329 preoutgoing hook: HG_SOURCE=clone
329 preoutgoing.forbid hook: HG_SOURCE=clone
330 preoutgoing.forbid hook: HG_SOURCE=clone
330 abort: preoutgoing.forbid hook exited with status 1
331 abort: preoutgoing.forbid hook exited with status 1
331 [255]
332 [255]
332
333
333 $ cd "$TESTTMP/b"
334 $ cd "$TESTTMP/b"
334
335
335 $ cat > hooktests.py <<EOF
336 $ cat > hooktests.py <<EOF
336 > from mercurial import util
337 > from mercurial import util
337 >
338 >
338 > uncallable = 0
339 > uncallable = 0
339 >
340 >
340 > def printargs(args):
341 > def printargs(args):
341 > args.pop('ui', None)
342 > args.pop('ui', None)
342 > args.pop('repo', None)
343 > args.pop('repo', None)
343 > a = list(args.items())
344 > a = list(args.items())
344 > a.sort()
345 > a.sort()
345 > print 'hook args:'
346 > print 'hook args:'
346 > for k, v in a:
347 > for k, v in a:
347 > print ' ', k, v
348 > print ' ', k, v
348 >
349 >
349 > def passhook(**args):
350 > def passhook(**args):
350 > printargs(args)
351 > printargs(args)
351 >
352 >
352 > def failhook(**args):
353 > def failhook(**args):
353 > printargs(args)
354 > printargs(args)
354 > return True
355 > return True
355 >
356 >
356 > class LocalException(Exception):
357 > class LocalException(Exception):
357 > pass
358 > pass
358 >
359 >
359 > def raisehook(**args):
360 > def raisehook(**args):
360 > raise LocalException('exception from hook')
361 > raise LocalException('exception from hook')
361 >
362 >
362 > def aborthook(**args):
363 > def aborthook(**args):
363 > raise util.Abort('raise abort from hook')
364 > raise util.Abort('raise abort from hook')
364 >
365 >
365 > def brokenhook(**args):
366 > def brokenhook(**args):
366 > return 1 + {}
367 > return 1 + {}
367 >
368 >
368 > def verbosehook(ui, **args):
369 > def verbosehook(ui, **args):
369 > ui.note('verbose output from hook\n')
370 > ui.note('verbose output from hook\n')
370 >
371 >
371 > def printtags(ui, repo, **args):
372 > def printtags(ui, repo, **args):
372 > print repo.tags().keys()
373 > print repo.tags().keys()
373 >
374 >
374 > class container:
375 > class container:
375 > unreachable = 1
376 > unreachable = 1
376 > EOF
377 > EOF
377
378
378 test python hooks
379 test python hooks
379
380
380 #if windows
381 #if windows
381 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
382 $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH"
382 #else
383 #else
383 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
384 $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH"
384 #endif
385 #endif
385 $ export PYTHONPATH
386 $ export PYTHONPATH
386
387
387 $ echo '[hooks]' > ../a/.hg/hgrc
388 $ echo '[hooks]' > ../a/.hg/hgrc
388 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
389 $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
389 $ hg pull ../a 2>&1 | grep 'raised an exception'
390 $ hg pull ../a 2>&1 | grep 'raised an exception'
390 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
391 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
391
392
392 $ echo '[hooks]' > ../a/.hg/hgrc
393 $ echo '[hooks]' > ../a/.hg/hgrc
393 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
394 $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
394 $ hg pull ../a 2>&1 | grep 'raised an exception'
395 $ hg pull ../a 2>&1 | grep 'raised an exception'
395 error: preoutgoing.raise hook raised an exception: exception from hook
396 error: preoutgoing.raise hook raised an exception: exception from hook
396
397
397 $ echo '[hooks]' > ../a/.hg/hgrc
398 $ echo '[hooks]' > ../a/.hg/hgrc
398 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
399 $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
399 $ hg pull ../a
400 $ hg pull ../a
400 pulling from ../a
401 pulling from ../a
401 searching for changes
402 searching for changes
402 error: preoutgoing.abort hook failed: raise abort from hook
403 error: preoutgoing.abort hook failed: raise abort from hook
403 abort: raise abort from hook
404 abort: raise abort from hook
404 [255]
405 [255]
405
406
406 $ echo '[hooks]' > ../a/.hg/hgrc
407 $ echo '[hooks]' > ../a/.hg/hgrc
407 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
408 $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
408 $ hg pull ../a
409 $ hg pull ../a
409 pulling from ../a
410 pulling from ../a
410 searching for changes
411 searching for changes
411 hook args:
412 hook args:
412 hooktype preoutgoing
413 hooktype preoutgoing
413 source pull
414 source pull
414 abort: preoutgoing.fail hook failed
415 abort: preoutgoing.fail hook failed
415 [255]
416 [255]
416
417
417 $ echo '[hooks]' > ../a/.hg/hgrc
418 $ echo '[hooks]' > ../a/.hg/hgrc
418 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
419 $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
419 $ hg pull ../a
420 $ hg pull ../a
420 pulling from ../a
421 pulling from ../a
421 searching for changes
422 searching for changes
422 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
423 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
423 [255]
424 [255]
424
425
425 $ echo '[hooks]' > ../a/.hg/hgrc
426 $ echo '[hooks]' > ../a/.hg/hgrc
426 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
427 $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
427 $ hg pull ../a
428 $ hg pull ../a
428 pulling from ../a
429 pulling from ../a
429 searching for changes
430 searching for changes
430 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
431 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
431 [255]
432 [255]
432
433
433 $ echo '[hooks]' > ../a/.hg/hgrc
434 $ echo '[hooks]' > ../a/.hg/hgrc
434 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
435 $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
435 $ hg pull ../a
436 $ hg pull ../a
436 pulling from ../a
437 pulling from ../a
437 searching for changes
438 searching for changes
438 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
439 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
439 [255]
440 [255]
440
441
441 $ echo '[hooks]' > ../a/.hg/hgrc
442 $ echo '[hooks]' > ../a/.hg/hgrc
442 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
443 $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
443 $ hg pull ../a
444 $ hg pull ../a
444 pulling from ../a
445 pulling from ../a
445 searching for changes
446 searching for changes
446 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
447 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
447 [255]
448 [255]
448
449
449 $ echo '[hooks]' > ../a/.hg/hgrc
450 $ echo '[hooks]' > ../a/.hg/hgrc
450 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
451 $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
451 $ hg pull ../a
452 $ hg pull ../a
452 pulling from ../a
453 pulling from ../a
453 searching for changes
454 searching for changes
454 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
455 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
455 [255]
456 [255]
456
457
457 $ echo '[hooks]' > ../a/.hg/hgrc
458 $ echo '[hooks]' > ../a/.hg/hgrc
458 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
459 $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
459 $ hg pull ../a
460 $ hg pull ../a
460 pulling from ../a
461 pulling from ../a
461 searching for changes
462 searching for changes
462 hook args:
463 hook args:
463 hooktype preoutgoing
464 hooktype preoutgoing
464 source pull
465 source pull
465 adding changesets
466 adding changesets
466 adding manifests
467 adding manifests
467 adding file changes
468 adding file changes
468 added 1 changesets with 1 changes to 1 files
469 added 1 changesets with 1 changes to 1 files
469 adding remote bookmark quux
470 adding remote bookmark quux
470 (run 'hg update' to get a working copy)
471 (run 'hg update' to get a working copy)
471
472
472 make sure --traceback works
473 make sure --traceback works
473
474
474 $ echo '[hooks]' > .hg/hgrc
475 $ echo '[hooks]' > .hg/hgrc
475 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
476 $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
476
477
477 $ echo aa > a
478 $ echo aa > a
478 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
479 $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback'
479 Traceback (most recent call last):
480 Traceback (most recent call last):
480
481
481 $ cd ..
482 $ cd ..
482 $ hg init c
483 $ hg init c
483 $ cd c
484 $ cd c
484
485
485 $ cat > hookext.py <<EOF
486 $ cat > hookext.py <<EOF
486 > def autohook(**args):
487 > def autohook(**args):
487 > print "Automatically installed hook"
488 > print "Automatically installed hook"
488 >
489 >
489 > def reposetup(ui, repo):
490 > def reposetup(ui, repo):
490 > repo.ui.setconfig("hooks", "commit.auto", autohook)
491 > repo.ui.setconfig("hooks", "commit.auto", autohook)
491 > EOF
492 > EOF
492 $ echo '[extensions]' >> .hg/hgrc
493 $ echo '[extensions]' >> .hg/hgrc
493 $ echo 'hookext = hookext.py' >> .hg/hgrc
494 $ echo 'hookext = hookext.py' >> .hg/hgrc
494
495
495 $ touch foo
496 $ touch foo
496 $ hg add foo
497 $ hg add foo
497 $ hg ci -d '0 0' -m 'add foo'
498 $ hg ci -d '0 0' -m 'add foo'
498 Automatically installed hook
499 Automatically installed hook
499 $ echo >> foo
500 $ echo >> foo
500 $ hg ci --debug -d '0 0' -m 'change foo'
501 $ hg ci --debug -d '0 0' -m 'change foo'
501 foo
502 foo
502 calling hook commit.auto: <function autohook at *> (glob)
503 calling hook commit.auto: <function autohook at *> (glob)
503 Automatically installed hook
504 Automatically installed hook
504 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
505 committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708
505
506
506 $ hg showconfig hooks
507 $ hg showconfig hooks
507 hooks.commit.auto=<function autohook at *> (glob)
508 hooks.commit.auto=<function autohook at *> (glob)
508
509
509 test python hook configured with python:[file]:[hook] syntax
510 test python hook configured with python:[file]:[hook] syntax
510
511
511 $ cd ..
512 $ cd ..
512 $ mkdir d
513 $ mkdir d
513 $ cd d
514 $ cd d
514 $ hg init repo
515 $ hg init repo
515 $ mkdir hooks
516 $ mkdir hooks
516
517
517 $ cd hooks
518 $ cd hooks
518 $ cat > testhooks.py <<EOF
519 $ cat > testhooks.py <<EOF
519 > def testhook(**args):
520 > def testhook(**args):
520 > print 'hook works'
521 > print 'hook works'
521 > EOF
522 > EOF
522 $ echo '[hooks]' > ../repo/.hg/hgrc
523 $ echo '[hooks]' > ../repo/.hg/hgrc
523 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
524 $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc
524
525
525 $ cd ../repo
526 $ cd ../repo
526 $ hg commit -d '0 0'
527 $ hg commit -d '0 0'
527 hook works
528 hook works
528 nothing changed
529 nothing changed
529 [1]
530 [1]
530
531
531 $ cd ../../b
532 $ cd ../../b
532
533
533 make sure --traceback works on hook import failure
534 make sure --traceback works on hook import failure
534
535
535 $ cat > importfail.py <<EOF
536 $ cat > importfail.py <<EOF
536 > import somebogusmodule
537 > import somebogusmodule
537 > # dereference something in the module to force demandimport to load it
538 > # dereference something in the module to force demandimport to load it
538 > somebogusmodule.whatever
539 > somebogusmodule.whatever
539 > EOF
540 > EOF
540
541
541 $ echo '[hooks]' > .hg/hgrc
542 $ echo '[hooks]' > .hg/hgrc
542 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
543 $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc
543
544
544 $ echo a >> a
545 $ echo a >> a
545 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
546 $ hg --traceback commit -ma 2>&1 | egrep '^(exception|Traceback|ImportError)'
546 exception from first failed import attempt:
547 exception from first failed import attempt:
547 Traceback (most recent call last):
548 Traceback (most recent call last):
548 ImportError: No module named somebogusmodule
549 ImportError: No module named somebogusmodule
549 exception from second failed import attempt:
550 exception from second failed import attempt:
550 Traceback (most recent call last):
551 Traceback (most recent call last):
551 ImportError: No module named hgext_importfail
552 ImportError: No module named hgext_importfail
552 Traceback (most recent call last):
553 Traceback (most recent call last):
553
554
554 Issue1827: Hooks Update & Commit not completely post operation
555 Issue1827: Hooks Update & Commit not completely post operation
555
556
556 commit and update hooks should run after command completion
557 commit and update hooks should run after command completion
557
558
558 $ echo '[hooks]' > .hg/hgrc
559 $ echo '[hooks]' > .hg/hgrc
559 $ echo 'commit = hg id' >> .hg/hgrc
560 $ echo 'commit = hg id' >> .hg/hgrc
560 $ echo 'update = hg id' >> .hg/hgrc
561 $ echo 'update = hg id' >> .hg/hgrc
561 $ echo bb > a
562 $ echo bb > a
562 $ hg ci -ma
563 $ hg ci -ma
563 223eafe2750c tip
564 223eafe2750c tip
564 $ hg up 0
565 $ hg up 0
565 cb9a9f314b8b
566 cb9a9f314b8b
566 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
567 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
567
568
568 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
569 make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
569 that is passed to pre/post hooks
570 that is passed to pre/post hooks
570
571
571 $ echo '[hooks]' > .hg/hgrc
572 $ echo '[hooks]' > .hg/hgrc
572 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
573 $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc
573 $ hg id
574 $ hg id
574 cb9a9f314b8b
575 cb9a9f314b8b
575 $ hg id --verbose
576 $ hg id --verbose
576 calling hook pre-identify: hooktests.verbosehook
577 calling hook pre-identify: hooktests.verbosehook
577 verbose output from hook
578 verbose output from hook
578 cb9a9f314b8b
579 cb9a9f314b8b
579
580
580 Ensure hooks can be prioritized
581 Ensure hooks can be prioritized
581
582
582 $ echo '[hooks]' > .hg/hgrc
583 $ echo '[hooks]' > .hg/hgrc
583 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
584 $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc
584 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
585 $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc
585 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
586 $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc
586 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
587 $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc
587 $ hg id --verbose
588 $ hg id --verbose
588 calling hook pre-identify.b: hooktests.verbosehook
589 calling hook pre-identify.b: hooktests.verbosehook
589 verbose output from hook
590 verbose output from hook
590 calling hook pre-identify.a: hooktests.verbosehook
591 calling hook pre-identify.a: hooktests.verbosehook
591 verbose output from hook
592 verbose output from hook
592 calling hook pre-identify.c: hooktests.verbosehook
593 calling hook pre-identify.c: hooktests.verbosehook
593 verbose output from hook
594 verbose output from hook
594 cb9a9f314b8b
595 cb9a9f314b8b
595
596
596 new tags must be visible in pretxncommit (issue3210)
597 new tags must be visible in pretxncommit (issue3210)
597
598
598 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
599 $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc
599 $ hg tag -f foo
600 $ hg tag -f foo
600 ['a', 'foo', 'tip']
601 ['a', 'foo', 'tip']
601
602
602 new commits must be visible in pretxnchangegroup (issue3428)
603 new commits must be visible in pretxnchangegroup (issue3428)
603
604
604 $ cd ..
605 $ cd ..
605 $ hg init to
606 $ hg init to
606 $ echo '[hooks]' >> to/.hg/hgrc
607 $ echo '[hooks]' >> to/.hg/hgrc
607 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
608 $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc
608 $ echo a >> to/a
609 $ echo a >> to/a
609 $ hg --cwd to ci -Ama
610 $ hg --cwd to ci -Ama
610 adding a
611 adding a
611 $ hg clone to from
612 $ hg clone to from
612 updating to branch default
613 updating to branch default
613 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
614 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
614 $ echo aa >> from/a
615 $ echo aa >> from/a
615 $ hg --cwd from ci -mb
616 $ hg --cwd from ci -mb
616 $ hg --cwd from push
617 $ hg --cwd from push
617 pushing to $TESTTMP/to (glob)
618 pushing to $TESTTMP/to (glob)
618 searching for changes
619 searching for changes
619 adding changesets
620 adding changesets
620 adding manifests
621 adding manifests
621 adding file changes
622 adding file changes
622 added 1 changesets with 1 changes to 1 files
623 added 1 changesets with 1 changes to 1 files
623 changeset: 1:9836a07b9b9d
624 changeset: 1:9836a07b9b9d
624 tag: tip
625 tag: tip
625 user: test
626 user: test
626 date: Thu Jan 01 00:00:00 1970 +0000
627 date: Thu Jan 01 00:00:00 1970 +0000
627 summary: b
628 summary: b
628
629
@@ -1,124 +1,128 b''
1 $ "$TESTDIR/hghave" serve || exit 80
1 $ "$TESTDIR/hghave" serve || exit 80
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ echo a > a
5 $ echo a > a
6 $ hg ci -Ama -d '1123456789 0'
6 $ hg ci -Ama -d '1123456789 0'
7 adding a
7 adding a
8 $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid
8 $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid
9 $ cat hg.pid >> $DAEMON_PIDS
9 $ cat hg.pid >> $DAEMON_PIDS
10 $ cd ..
10 $ cd ..
11 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
11 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log 2>&1 </dev/null &
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
12 $ while [ ! -f proxy.pid ]; do sleep 0; done
13 $ cat proxy.pid >> $DAEMON_PIDS
13 $ cat proxy.pid >> $DAEMON_PIDS
14
14
15 url for proxy, stream
15 url for proxy, stream
16
16
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b
17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b
18 streaming all changes
18 streaming all changes
19 3 files to transfer, 303 bytes of data
19 3 files to transfer, 303 bytes of data
20 transferred * bytes in * seconds (*/sec) (glob)
20 transferred * bytes in * seconds (*/sec) (glob)
21 updating to branch default
21 updating to branch default
22 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 $ cd b
23 $ cd b
24 $ hg verify
24 $ hg verify
25 checking changesets
25 checking changesets
26 checking manifests
26 checking manifests
27 crosschecking files in changesets and manifests
27 crosschecking files in changesets and manifests
28 checking files
28 checking files
29 1 files, 1 changesets, 1 total revisions
29 1 files, 1 changesets, 1 total revisions
30 $ cd ..
30 $ cd ..
31
31
32 url for proxy, pull
32 url for proxy, pull
33
33
34 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
34 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
35 requesting all changes
35 requesting all changes
36 adding changesets
36 adding changesets
37 adding manifests
37 adding manifests
38 adding file changes
38 adding file changes
39 added 1 changesets with 1 changes to 1 files
39 added 1 changesets with 1 changes to 1 files
40 updating to branch default
40 updating to branch default
41 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 $ cd b-pull
42 $ cd b-pull
43 $ hg verify
43 $ hg verify
44 checking changesets
44 checking changesets
45 checking manifests
45 checking manifests
46 crosschecking files in changesets and manifests
46 crosschecking files in changesets and manifests
47 checking files
47 checking files
48 1 files, 1 changesets, 1 total revisions
48 1 files, 1 changesets, 1 total revisions
49 $ cd ..
49 $ cd ..
50
50
51 host:port for proxy
51 host:port for proxy
52
52
53 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
53 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
54 requesting all changes
54 requesting all changes
55 adding changesets
55 adding changesets
56 adding manifests
56 adding manifests
57 adding file changes
57 adding file changes
58 added 1 changesets with 1 changes to 1 files
58 added 1 changesets with 1 changes to 1 files
59 updating to branch default
59 updating to branch default
60 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
60 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
61
61
62 proxy url with user name and password
62 proxy url with user name and password
63
63
64 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
64 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
65 requesting all changes
65 requesting all changes
66 adding changesets
66 adding changesets
67 adding manifests
67 adding manifests
68 adding file changes
68 adding file changes
69 added 1 changesets with 1 changes to 1 files
69 added 1 changesets with 1 changes to 1 files
70 updating to branch default
70 updating to branch default
71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73 url with user name and password
73 url with user name and password
74
74
75 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
75 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
76 requesting all changes
76 requesting all changes
77 adding changesets
77 adding changesets
78 adding manifests
78 adding manifests
79 adding file changes
79 adding file changes
80 added 1 changesets with 1 changes to 1 files
80 added 1 changesets with 1 changes to 1 files
81 updating to branch default
81 updating to branch default
82 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
82 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
83
83
84 bad host:port for proxy
84 bad host:port for proxy
85
85
86 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
86 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
87 abort: error: Connection refused
87 abort: error: Connection refused
88 [255]
88 [255]
89
89
90 do not use the proxy if it is in the no list
90 do not use the proxy if it is in the no list
91
91
92 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
92 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
93 requesting all changes
93 requesting all changes
94 adding changesets
94 adding changesets
95 adding manifests
95 adding manifests
96 adding file changes
96 adding file changes
97 added 1 changesets with 1 changes to 1 files
97 added 1 changesets with 1 changes to 1 files
98 updating to branch default
98 updating to branch default
99 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
99 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
100 $ cat proxy.log
100 $ cat proxy.log
101 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
101 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
102 * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
102 * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
103 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
103 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
104 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
104 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
105 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
105 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
106 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
106 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
107 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
107 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
108 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=obsolete (glob)
108 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
109 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
109 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
110 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
110 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
111 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
111 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
112 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
112 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
113 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=obsolete (glob)
113 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
115 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
114 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
116 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
115 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
116 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
118 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
117 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
119 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=obsolete (glob)
118 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
121 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
119 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
120 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
123 * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob)
121 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
124 * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629 (glob)
122 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
125 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob)
126 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=obsolete (glob)
123 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
127 * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
124
128
@@ -1,281 +1,287 b''
1 Proper https client requires the built-in ssl from Python 2.6.
1 Proper https client requires the built-in ssl from Python 2.6.
2
2
3 $ "$TESTDIR/hghave" serve ssl || exit 80
3 $ "$TESTDIR/hghave" serve ssl || exit 80
4
4
5 Certificates created with:
5 Certificates created with:
6 printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \
6 printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \
7 openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem
7 openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem
8 Can be dumped with:
8 Can be dumped with:
9 openssl x509 -in pub.pem -text
9 openssl x509 -in pub.pem -text
10
10
11 $ cat << EOT > priv.pem
11 $ cat << EOT > priv.pem
12 > -----BEGIN PRIVATE KEY-----
12 > -----BEGIN PRIVATE KEY-----
13 > MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH
13 > MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH
14 > aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8
14 > aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8
15 > j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc
15 > j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc
16 > EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG
16 > EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG
17 > MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR
17 > MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR
18 > +wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy
18 > +wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy
19 > aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh
19 > aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh
20 > HY8gUVkVRVs=
20 > HY8gUVkVRVs=
21 > -----END PRIVATE KEY-----
21 > -----END PRIVATE KEY-----
22 > EOT
22 > EOT
23
23
24 $ cat << EOT > pub.pem
24 $ cat << EOT > pub.pem
25 > -----BEGIN CERTIFICATE-----
25 > -----BEGIN CERTIFICATE-----
26 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
26 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
27 > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
27 > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
28 > MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0
28 > MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0
29 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
29 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
30 > ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX
30 > ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX
31 > 6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm
31 > 6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm
32 > r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw
32 > r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw
33 > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl
33 > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl
34 > t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c=
34 > t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c=
35 > -----END CERTIFICATE-----
35 > -----END CERTIFICATE-----
36 > EOT
36 > EOT
37 $ cat priv.pem pub.pem >> server.pem
37 $ cat priv.pem pub.pem >> server.pem
38 $ PRIV=`pwd`/server.pem
38 $ PRIV=`pwd`/server.pem
39
39
40 $ cat << EOT > pub-other.pem
40 $ cat << EOT > pub-other.pem
41 > -----BEGIN CERTIFICATE-----
41 > -----BEGIN CERTIFICATE-----
42 > MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
42 > MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV
43 > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
43 > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw
44 > MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0
44 > MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0
45 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
45 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL
46 > ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo
46 > ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo
47 > K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN
47 > K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN
48 > y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw
48 > y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw
49 > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6
49 > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6
50 > bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig=
50 > bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig=
51 > -----END CERTIFICATE-----
51 > -----END CERTIFICATE-----
52 > EOT
52 > EOT
53
53
54 pub.pem patched with other notBefore / notAfter:
54 pub.pem patched with other notBefore / notAfter:
55
55
56 $ cat << EOT > pub-not-yet.pem
56 $ cat << EOT > pub-not-yet.pem
57 > -----BEGIN CERTIFICATE-----
57 > -----BEGIN CERTIFICATE-----
58 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
58 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
59 > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw
59 > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw
60 > NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
60 > NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
61 > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
61 > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
62 > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
62 > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
63 > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
63 > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
64 > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb
64 > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb
65 > /12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0=
65 > /12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0=
66 > -----END CERTIFICATE-----
66 > -----END CERTIFICATE-----
67 > EOT
67 > EOT
68 $ cat priv.pem pub-not-yet.pem > server-not-yet.pem
68 $ cat priv.pem pub-not-yet.pem > server-not-yet.pem
69
69
70 $ cat << EOT > pub-expired.pem
70 $ cat << EOT > pub-expired.pem
71 > -----BEGIN CERTIFICATE-----
71 > -----BEGIN CERTIFICATE-----
72 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
72 > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs
73 > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx
73 > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx
74 > NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
74 > NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv
75 > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
75 > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK
76 > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
76 > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA
77 > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
77 > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T
78 > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt
78 > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt
79 > 2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ=
79 > 2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ=
80 > -----END CERTIFICATE-----
80 > -----END CERTIFICATE-----
81 > EOT
81 > EOT
82 $ cat priv.pem pub-expired.pem > server-expired.pem
82 $ cat priv.pem pub-expired.pem > server-expired.pem
83
83
84 $ hg init test
84 $ hg init test
85 $ cd test
85 $ cd test
86 $ echo foo>foo
86 $ echo foo>foo
87 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
87 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
88 $ echo foo>foo.d/foo
88 $ echo foo>foo.d/foo
89 $ echo bar>foo.d/bAr.hg.d/BaR
89 $ echo bar>foo.d/bAr.hg.d/BaR
90 $ echo bar>foo.d/baR.d.hg/bAR
90 $ echo bar>foo.d/baR.d.hg/bAR
91 $ hg commit -A -m 1
91 $ hg commit -A -m 1
92 adding foo
92 adding foo
93 adding foo.d/bAr.hg.d/BaR
93 adding foo.d/bAr.hg.d/BaR
94 adding foo.d/baR.d.hg/bAR
94 adding foo.d/baR.d.hg/bAR
95 adding foo.d/foo
95 adding foo.d/foo
96 $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV
96 $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV
97 $ cat ../hg0.pid >> $DAEMON_PIDS
97 $ cat ../hg0.pid >> $DAEMON_PIDS
98
98
99 cacert not found
99 cacert not found
100
100
101 $ hg in --config web.cacerts=no-such.pem https://localhost:$HGPORT/
101 $ hg in --config web.cacerts=no-such.pem https://localhost:$HGPORT/
102 abort: could not find web.cacerts: no-such.pem
102 abort: could not find web.cacerts: no-such.pem
103 [255]
103 [255]
104
104
105 Test server address cannot be reused
105 Test server address cannot be reused
106
106
107 #if windows
107 #if windows
108 $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
108 $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
109 abort: cannot start server at ':$HGPORT': (glob)
109 abort: cannot start server at ':$HGPORT': (glob)
110 [255]
110 [255]
111 #else
111 #else
112 $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
112 $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
113 abort: cannot start server at ':$HGPORT': Address already in use
113 abort: cannot start server at ':$HGPORT': Address already in use
114 [255]
114 [255]
115 #endif
115 #endif
116 $ cd ..
116 $ cd ..
117
117
118 clone via pull
118 clone via pull
119
119
120 $ hg clone https://localhost:$HGPORT/ copy-pull
120 $ hg clone https://localhost:$HGPORT/ copy-pull
121 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
121 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
122 requesting all changes
122 requesting all changes
123 adding changesets
123 adding changesets
124 adding manifests
124 adding manifests
125 adding file changes
125 adding file changes
126 added 1 changesets with 4 changes to 4 files
126 added 1 changesets with 4 changes to 4 files
127 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
127 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
128 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
128 updating to branch default
129 updating to branch default
129 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 $ hg verify -R copy-pull
131 $ hg verify -R copy-pull
131 checking changesets
132 checking changesets
132 checking manifests
133 checking manifests
133 crosschecking files in changesets and manifests
134 crosschecking files in changesets and manifests
134 checking files
135 checking files
135 4 files, 1 changesets, 4 total revisions
136 4 files, 1 changesets, 4 total revisions
136 $ cd test
137 $ cd test
137 $ echo bar > bar
138 $ echo bar > bar
138 $ hg commit -A -d '1 0' -m 2
139 $ hg commit -A -d '1 0' -m 2
139 adding bar
140 adding bar
140 $ cd ..
141 $ cd ..
141
142
142 pull without cacert
143 pull without cacert
143
144
144 $ cd copy-pull
145 $ cd copy-pull
145 $ echo '[hooks]' >> .hg/hgrc
146 $ echo '[hooks]' >> .hg/hgrc
146 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup" >> .hg/hgrc
147 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup" >> .hg/hgrc
147 $ hg pull
148 $ hg pull
148 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
149 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
149 pulling from https://localhost:$HGPORT/
150 pulling from https://localhost:$HGPORT/
150 searching for changes
151 searching for changes
151 adding changesets
152 adding changesets
152 adding manifests
153 adding manifests
153 adding file changes
154 adding file changes
154 added 1 changesets with 1 changes to 1 files
155 added 1 changesets with 1 changes to 1 files
155 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
156 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
156 changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=https://localhost:$HGPORT/
157 changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_URL=https://localhost:$HGPORT/
158 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
157 (run 'hg update' to get a working copy)
159 (run 'hg update' to get a working copy)
158 $ cd ..
160 $ cd ..
159
161
160 cacert configured in local repo
162 cacert configured in local repo
161
163
162 $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu
164 $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu
163 $ echo "[web]" >> copy-pull/.hg/hgrc
165 $ echo "[web]" >> copy-pull/.hg/hgrc
164 $ echo "cacerts=`pwd`/pub.pem" >> copy-pull/.hg/hgrc
166 $ echo "cacerts=`pwd`/pub.pem" >> copy-pull/.hg/hgrc
165 $ hg -R copy-pull pull --traceback
167 $ hg -R copy-pull pull --traceback
166 pulling from https://localhost:$HGPORT/
168 pulling from https://localhost:$HGPORT/
167 searching for changes
169 searching for changes
168 no changes found
170 no changes found
169 $ mv copy-pull/.hg/hgrc.bu copy-pull/.hg/hgrc
171 $ mv copy-pull/.hg/hgrc.bu copy-pull/.hg/hgrc
170
172
171 cacert configured globally, also testing expansion of environment
173 cacert configured globally, also testing expansion of environment
172 variables in the filename
174 variables in the filename
173
175
174 $ echo "[web]" >> $HGRCPATH
176 $ echo "[web]" >> $HGRCPATH
175 $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH
177 $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH
176 $ P=`pwd` hg -R copy-pull pull
178 $ P=`pwd` hg -R copy-pull pull
177 pulling from https://localhost:$HGPORT/
179 pulling from https://localhost:$HGPORT/
178 searching for changes
180 searching for changes
179 no changes found
181 no changes found
180 $ P=`pwd` hg -R copy-pull pull --insecure
182 $ P=`pwd` hg -R copy-pull pull --insecure
181 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
183 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
182 pulling from https://localhost:$HGPORT/
184 pulling from https://localhost:$HGPORT/
183 searching for changes
185 searching for changes
184 no changes found
186 no changes found
187 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
185
188
186 cacert mismatch
189 cacert mismatch
187
190
188 $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/
191 $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/
189 abort: 127.0.0.1 certificate error: certificate is for localhost
192 abort: 127.0.0.1 certificate error: certificate is for localhost
190 (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely)
193 (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely)
191 [255]
194 [255]
192 $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure
195 $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure
193 warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
196 warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
194 pulling from https://127.0.0.1:$HGPORT/
197 pulling from https://127.0.0.1:$HGPORT/
195 searching for changes
198 searching for changes
196 no changes found
199 no changes found
200 warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
197 $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
201 $ hg -R copy-pull pull --config web.cacerts=pub-other.pem
198 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
202 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
199 [255]
203 [255]
200 $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
204 $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure
201 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
205 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
202 pulling from https://localhost:$HGPORT/
206 pulling from https://localhost:$HGPORT/
203 searching for changes
207 searching for changes
204 no changes found
208 no changes found
209 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
205
210
206 Test server cert which isn't valid yet
211 Test server cert which isn't valid yet
207
212
208 $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
213 $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
209 $ cat hg1.pid >> $DAEMON_PIDS
214 $ cat hg1.pid >> $DAEMON_PIDS
210 $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
215 $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/
211 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
216 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
212 [255]
217 [255]
213
218
214 Test server cert which no longer is valid
219 Test server cert which no longer is valid
215
220
216 $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
221 $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
217 $ cat hg2.pid >> $DAEMON_PIDS
222 $ cat hg2.pid >> $DAEMON_PIDS
218 $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
223 $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
219 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
224 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
220 [255]
225 [255]
221
226
222 Fingerprints
227 Fingerprints
223
228
224 $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
229 $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
225 $ echo "localhost = 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca" >> copy-pull/.hg/hgrc
230 $ echo "localhost = 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca" >> copy-pull/.hg/hgrc
226 $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc
231 $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc
227
232
228 - works without cacerts
233 - works without cacerts
229 $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=
234 $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=
230 5fed3813f7f5
235 5fed3813f7f5
231
236
232 - fails when cert doesn't match hostname (port is ignored)
237 - fails when cert doesn't match hostname (port is ignored)
233 $ hg -R copy-pull id https://localhost:$HGPORT1/
238 $ hg -R copy-pull id https://localhost:$HGPORT1/
234 abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b
239 abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b
235 (check hostfingerprint configuration)
240 (check hostfingerprint configuration)
236 [255]
241 [255]
237
242
238 - ignores that certificate doesn't match hostname
243 - ignores that certificate doesn't match hostname
239 $ hg -R copy-pull id https://127.0.0.1:$HGPORT/
244 $ hg -R copy-pull id https://127.0.0.1:$HGPORT/
240 5fed3813f7f5
245 5fed3813f7f5
241
246
242 $ while kill `cat hg1.pid` 2>/dev/null; do sleep 0; done
247 $ while kill `cat hg1.pid` 2>/dev/null; do sleep 0; done
243
248
244 Prepare for connecting through proxy
249 Prepare for connecting through proxy
245
250
246 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log </dev/null 2>&1 &
251 $ "$TESTDIR/tinyproxy.py" $HGPORT1 localhost >proxy.log </dev/null 2>&1 &
247 $ while [ ! -f proxy.pid ]; do sleep 0; done
252 $ while [ ! -f proxy.pid ]; do sleep 0; done
248 $ cat proxy.pid >> $DAEMON_PIDS
253 $ cat proxy.pid >> $DAEMON_PIDS
249
254
250 $ echo "[http_proxy]" >> copy-pull/.hg/hgrc
255 $ echo "[http_proxy]" >> copy-pull/.hg/hgrc
251 $ echo "always=True" >> copy-pull/.hg/hgrc
256 $ echo "always=True" >> copy-pull/.hg/hgrc
252 $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
257 $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
253 $ echo "localhost =" >> copy-pull/.hg/hgrc
258 $ echo "localhost =" >> copy-pull/.hg/hgrc
254
259
255 Test unvalidated https through proxy
260 Test unvalidated https through proxy
256
261
257 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback
262 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback
258 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
263 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
259 pulling from https://localhost:$HGPORT/
264 pulling from https://localhost:$HGPORT/
260 searching for changes
265 searching for changes
261 no changes found
266 no changes found
267 warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting)
262
268
263 Test https with cacert and fingerprint through proxy
269 Test https with cacert and fingerprint through proxy
264
270
265 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem
271 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem
266 pulling from https://localhost:$HGPORT/
272 pulling from https://localhost:$HGPORT/
267 searching for changes
273 searching for changes
268 no changes found
274 no changes found
269 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/
275 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/
270 pulling from https://127.0.0.1:$HGPORT/
276 pulling from https://127.0.0.1:$HGPORT/
271 searching for changes
277 searching for changes
272 no changes found
278 no changes found
273
279
274 Test https with cert problems through proxy
280 Test https with cert problems through proxy
275
281
276 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
282 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem
277 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
283 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
278 [255]
284 [255]
279 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
285 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/
280 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
286 abort: error: *:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed (glob)
281 [255]
287 [255]
@@ -1,61 +1,143 b''
1
1
2 $ mkcommit() {
2 $ mkcommit() {
3 > echo "$1" > "$1"
3 > echo "$1" > "$1"
4 > hg add "$1"
4 > hg add "$1"
5 > hg ci -m "add $1"
5 > hg ci -m "add $1"
6 > }
6 > }
7 $ getid() {
7 $ getid() {
8 > hg id --debug -ir "desc('$1')"
8 > hg id --debug -ir "desc('$1')"
9 > }
9 > }
10
10
11
11
12 $ hg init tmpa
12 $ hg init tmpa
13 $ cd tmpa
13 $ cd tmpa
14
14
15 Killing a single changeset without replacement
15 Killing a single changeset without replacement
16
16
17 $ mkcommit kill_me
17 $ mkcommit kill_me
18 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
18 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
19 $ hg debugobsolete
19 $ hg debugobsolete
20 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
20 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
21 $ cd ..
21 $ cd ..
22
22
23 Killing a single changeset with replacement
23 Killing a single changeset with replacement
24
24
25 $ hg init tmpb
25 $ hg init tmpb
26 $ cd tmpb
26 $ cd tmpb
27 $ mkcommit a
27 $ mkcommit a
28 $ mkcommit b
28 $ mkcommit b
29 $ mkcommit original_c
29 $ mkcommit original_c
30 $ hg up "desc('b')"
30 $ hg up "desc('b')"
31 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
31 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
32 $ mkcommit new_c
32 $ mkcommit new_c
33 created new head
33 created new head
34 $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12'
34 $ hg debugobsolete `getid original_c` `getid new_c` -d '56 12'
35 $ hg debugobsolete
35 $ hg debugobsolete
36 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
36 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
37
37
38 do it again (it read the obsstore before adding new changeset)
38 do it again (it read the obsstore before adding new changeset)
39
39
40 $ hg up '.^'
40 $ hg up '.^'
41 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
42 $ mkcommit new_2_c
42 $ mkcommit new_2_c
43 created new head
43 created new head
44 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
44 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
45 $ hg debugobsolete
45 $ hg debugobsolete
46 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
46 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
47 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
47 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
48
48
49 Register two markers with a missing node
49 Register two markers with a missing node
50
50
51 $ hg up '.^'
51 $ hg up '.^'
52 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
52 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
53 $ mkcommit new_3_c
53 $ mkcommit new_3_c
54 created new head
54 created new head
55 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
55 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
56 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
56 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
57 $ hg debugobsolete
57 $ hg debugobsolete
58 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
58 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
59 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
59 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
60 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
60 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
61 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
61 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
62
63 $ cd ..
64
65 Exchange Test
66 ============================
67
68 Destination repo does not have any data
69 ---------------------------------------
70
71 Try to pull markers
72
73 $ hg init tmpc
74 $ cd tmpc
75 $ hg pull ../tmpb
76 pulling from ../tmpb
77 requesting all changes
78 adding changesets
79 adding manifests
80 adding file changes
81 added 6 changesets with 6 changes to 6 files (+3 heads)
82 (run 'hg heads' to see heads, 'hg merge' to merge)
83 $ hg debugobsolete
84 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
85 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
86 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
87 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
88
89 $ cd ..
90
91 Try to pull markers
92
93 $ hg init tmpd
94 $ hg -R tmpb push tmpd
95 pushing to tmpd
96 searching for changes
97 adding changesets
98 adding manifests
99 adding file changes
100 added 6 changesets with 6 changes to 6 files (+3 heads)
101 $ hg -R tmpd debugobsolete
102 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
103 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
104 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
105 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
106
107
108 Destination repo have existing data
109 ---------------------------------------
110
111 On pull
112
113 $ hg init tmpe
114 $ cd tmpe
115 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
116 $ hg pull ../tmpb
117 pulling from ../tmpb
118 requesting all changes
119 adding changesets
120 adding manifests
121 adding file changes
122 added 6 changesets with 6 changes to 6 files (+3 heads)
123 (run 'hg heads' to see heads, 'hg merge' to merge)
124 $ hg debugobsolete
125 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
126 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
127 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
128 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
129 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
130
131 On push
132
133 $ hg push ../tmpc
134 pushing to ../tmpc
135 searching for changes
136 no changes found
137 [1]
138 $ hg -R ../tmpc debugobsolete
139 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
140 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
141 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
142 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
143 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
@@ -1,373 +1,374 b''
1
1
2
2
3 This test tries to exercise the ssh functionality with a dummy script
3 This test tries to exercise the ssh functionality with a dummy script
4
4
5 creating 'remote' repo
5 creating 'remote' repo
6
6
7 $ hg init remote
7 $ hg init remote
8 $ cd remote
8 $ cd remote
9 $ echo this > foo
9 $ echo this > foo
10 $ echo this > fooO
10 $ echo this > fooO
11 $ hg ci -A -m "init" foo fooO
11 $ hg ci -A -m "init" foo fooO
12 $ cat <<EOF > .hg/hgrc
12 $ cat <<EOF > .hg/hgrc
13 > [server]
13 > [server]
14 > uncompressed = True
14 > uncompressed = True
15 >
15 >
16 > [hooks]
16 > [hooks]
17 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
17 > changegroup = python "$TESTDIR/printenv.py" changegroup-in-remote 0 ../dummylog
18 > EOF
18 > EOF
19 $ cd ..
19 $ cd ..
20
20
21 repo not found error
21 repo not found error
22
22
23 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
23 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
24 remote: abort: there is no Mercurial repository here (.hg not found)!
24 remote: abort: there is no Mercurial repository here (.hg not found)!
25 abort: no suitable response from remote hg!
25 abort: no suitable response from remote hg!
26 [255]
26 [255]
27
27
28 non-existent absolute path
28 non-existent absolute path
29
29
30 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
30 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
31 remote: abort: there is no Mercurial repository here (.hg not found)!
31 remote: abort: there is no Mercurial repository here (.hg not found)!
32 abort: no suitable response from remote hg!
32 abort: no suitable response from remote hg!
33 [255]
33 [255]
34
34
35 clone remote via stream
35 clone remote via stream
36
36
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
37 $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
38 streaming all changes
38 streaming all changes
39 4 files to transfer, 392 bytes of data
39 4 files to transfer, 392 bytes of data
40 transferred 392 bytes in * seconds (*/sec) (glob)
40 transferred 392 bytes in * seconds (*/sec) (glob)
41 updating to branch default
41 updating to branch default
42 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
43 $ cd local-stream
43 $ cd local-stream
44 $ hg verify
44 $ hg verify
45 checking changesets
45 checking changesets
46 checking manifests
46 checking manifests
47 crosschecking files in changesets and manifests
47 crosschecking files in changesets and manifests
48 checking files
48 checking files
49 2 files, 1 changesets, 2 total revisions
49 2 files, 1 changesets, 2 total revisions
50 $ cd ..
50 $ cd ..
51
51
52 clone remote via pull
52 clone remote via pull
53
53
54 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
54 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
55 requesting all changes
55 requesting all changes
56 adding changesets
56 adding changesets
57 adding manifests
57 adding manifests
58 adding file changes
58 adding file changes
59 added 1 changesets with 2 changes to 2 files
59 added 1 changesets with 2 changes to 2 files
60 updating to branch default
60 updating to branch default
61 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
61 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
62
62
63 verify
63 verify
64
64
65 $ cd local
65 $ cd local
66 $ hg verify
66 $ hg verify
67 checking changesets
67 checking changesets
68 checking manifests
68 checking manifests
69 crosschecking files in changesets and manifests
69 crosschecking files in changesets and manifests
70 checking files
70 checking files
71 2 files, 1 changesets, 2 total revisions
71 2 files, 1 changesets, 2 total revisions
72 $ echo '[hooks]' >> .hg/hgrc
72 $ echo '[hooks]' >> .hg/hgrc
73 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
73 $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup-in-local 0 ../dummylog" >> .hg/hgrc
74
74
75 empty default pull
75 empty default pull
76
76
77 $ hg paths
77 $ hg paths
78 default = ssh://user@dummy/remote
78 default = ssh://user@dummy/remote
79 $ hg pull -e "python \"$TESTDIR/dummyssh\""
79 $ hg pull -e "python \"$TESTDIR/dummyssh\""
80 pulling from ssh://user@dummy/remote
80 pulling from ssh://user@dummy/remote
81 searching for changes
81 searching for changes
82 no changes found
82 no changes found
83
83
84 local change
84 local change
85
85
86 $ echo bleah > foo
86 $ echo bleah > foo
87 $ hg ci -m "add"
87 $ hg ci -m "add"
88
88
89 updating rc
89 updating rc
90
90
91 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
91 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
92 $ echo "[ui]" >> .hg/hgrc
92 $ echo "[ui]" >> .hg/hgrc
93 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
93 $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc
94
94
95 find outgoing
95 find outgoing
96
96
97 $ hg out ssh://user@dummy/remote
97 $ hg out ssh://user@dummy/remote
98 comparing with ssh://user@dummy/remote
98 comparing with ssh://user@dummy/remote
99 searching for changes
99 searching for changes
100 changeset: 1:a28a9d1a809c
100 changeset: 1:a28a9d1a809c
101 tag: tip
101 tag: tip
102 user: test
102 user: test
103 date: Thu Jan 01 00:00:00 1970 +0000
103 date: Thu Jan 01 00:00:00 1970 +0000
104 summary: add
104 summary: add
105
105
106
106
107 find incoming on the remote side
107 find incoming on the remote side
108
108
109 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
109 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
110 comparing with ssh://user@dummy/local
110 comparing with ssh://user@dummy/local
111 searching for changes
111 searching for changes
112 changeset: 1:a28a9d1a809c
112 changeset: 1:a28a9d1a809c
113 tag: tip
113 tag: tip
114 user: test
114 user: test
115 date: Thu Jan 01 00:00:00 1970 +0000
115 date: Thu Jan 01 00:00:00 1970 +0000
116 summary: add
116 summary: add
117
117
118
118
119 find incoming on the remote side (using absolute path)
119 find incoming on the remote side (using absolute path)
120
120
121 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
121 $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
122 comparing with ssh://user@dummy/$TESTTMP/local
122 comparing with ssh://user@dummy/$TESTTMP/local
123 searching for changes
123 searching for changes
124 changeset: 1:a28a9d1a809c
124 changeset: 1:a28a9d1a809c
125 tag: tip
125 tag: tip
126 user: test
126 user: test
127 date: Thu Jan 01 00:00:00 1970 +0000
127 date: Thu Jan 01 00:00:00 1970 +0000
128 summary: add
128 summary: add
129
129
130
130
131 push
131 push
132
132
133 $ hg push
133 $ hg push
134 pushing to ssh://user@dummy/remote
134 pushing to ssh://user@dummy/remote
135 searching for changes
135 searching for changes
136 remote: adding changesets
136 remote: adding changesets
137 remote: adding manifests
137 remote: adding manifests
138 remote: adding file changes
138 remote: adding file changes
139 remote: added 1 changesets with 1 changes to 1 files
139 remote: added 1 changesets with 1 changes to 1 files
140 $ cd ../remote
140 $ cd ../remote
141
141
142 check remote tip
142 check remote tip
143
143
144 $ hg tip
144 $ hg tip
145 changeset: 1:a28a9d1a809c
145 changeset: 1:a28a9d1a809c
146 tag: tip
146 tag: tip
147 user: test
147 user: test
148 date: Thu Jan 01 00:00:00 1970 +0000
148 date: Thu Jan 01 00:00:00 1970 +0000
149 summary: add
149 summary: add
150
150
151 $ hg verify
151 $ hg verify
152 checking changesets
152 checking changesets
153 checking manifests
153 checking manifests
154 crosschecking files in changesets and manifests
154 crosschecking files in changesets and manifests
155 checking files
155 checking files
156 2 files, 2 changesets, 3 total revisions
156 2 files, 2 changesets, 3 total revisions
157 $ hg cat -r tip foo
157 $ hg cat -r tip foo
158 bleah
158 bleah
159 $ echo z > z
159 $ echo z > z
160 $ hg ci -A -m z z
160 $ hg ci -A -m z z
161 created new head
161 created new head
162
162
163 test pushkeys and bookmarks
163 test pushkeys and bookmarks
164
164
165 $ cd ../local
165 $ cd ../local
166 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
166 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
167 bookmarks
167 bookmarks
168 phases
168 phases
169 namespaces
169 namespaces
170 obsolete
170 $ hg book foo -r 0
171 $ hg book foo -r 0
171 $ hg out -B
172 $ hg out -B
172 comparing with ssh://user@dummy/remote
173 comparing with ssh://user@dummy/remote
173 searching for changed bookmarks
174 searching for changed bookmarks
174 foo 1160648e36ce
175 foo 1160648e36ce
175 $ hg push -B foo
176 $ hg push -B foo
176 pushing to ssh://user@dummy/remote
177 pushing to ssh://user@dummy/remote
177 searching for changes
178 searching for changes
178 no changes found
179 no changes found
179 exporting bookmark foo
180 exporting bookmark foo
180 [1]
181 [1]
181 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
182 $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
182 foo 1160648e36cec0054048a7edc4110c6f84fde594
183 foo 1160648e36cec0054048a7edc4110c6f84fde594
183 $ hg book -f foo
184 $ hg book -f foo
184 $ hg push --traceback
185 $ hg push --traceback
185 pushing to ssh://user@dummy/remote
186 pushing to ssh://user@dummy/remote
186 searching for changes
187 searching for changes
187 no changes found
188 no changes found
188 updating bookmark foo
189 updating bookmark foo
189 [1]
190 [1]
190 $ hg book -d foo
191 $ hg book -d foo
191 $ hg in -B
192 $ hg in -B
192 comparing with ssh://user@dummy/remote
193 comparing with ssh://user@dummy/remote
193 searching for changed bookmarks
194 searching for changed bookmarks
194 foo a28a9d1a809c
195 foo a28a9d1a809c
195 $ hg book -f -r 0 foo
196 $ hg book -f -r 0 foo
196 $ hg pull -B foo
197 $ hg pull -B foo
197 pulling from ssh://user@dummy/remote
198 pulling from ssh://user@dummy/remote
198 no changes found
199 no changes found
199 updating bookmark foo
200 updating bookmark foo
200 importing bookmark foo
201 importing bookmark foo
201 $ hg book -d foo
202 $ hg book -d foo
202 $ hg push -B foo
203 $ hg push -B foo
203 pushing to ssh://user@dummy/remote
204 pushing to ssh://user@dummy/remote
204 searching for changes
205 searching for changes
205 no changes found
206 no changes found
206 deleting remote bookmark foo
207 deleting remote bookmark foo
207 [1]
208 [1]
208
209
209 a bad, evil hook that prints to stdout
210 a bad, evil hook that prints to stdout
210
211
211 $ cat <<EOF > $TESTTMP/badhook
212 $ cat <<EOF > $TESTTMP/badhook
212 > import sys
213 > import sys
213 > sys.stdout.write("KABOOM\n")
214 > sys.stdout.write("KABOOM\n")
214 > EOF
215 > EOF
215
216
216 $ echo '[hooks]' >> ../remote/.hg/hgrc
217 $ echo '[hooks]' >> ../remote/.hg/hgrc
217 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
218 $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc
218 $ echo r > r
219 $ echo r > r
219 $ hg ci -A -m z r
220 $ hg ci -A -m z r
220
221
221 push should succeed even though it has an unexpected response
222 push should succeed even though it has an unexpected response
222
223
223 $ hg push
224 $ hg push
224 pushing to ssh://user@dummy/remote
225 pushing to ssh://user@dummy/remote
225 searching for changes
226 searching for changes
226 note: unsynced remote changes!
227 note: unsynced remote changes!
227 remote: adding changesets
228 remote: adding changesets
228 remote: adding manifests
229 remote: adding manifests
229 remote: adding file changes
230 remote: adding file changes
230 remote: added 1 changesets with 1 changes to 1 files
231 remote: added 1 changesets with 1 changes to 1 files
231 remote: KABOOM
232 remote: KABOOM
232 $ hg -R ../remote heads
233 $ hg -R ../remote heads
233 changeset: 3:1383141674ec
234 changeset: 3:1383141674ec
234 tag: tip
235 tag: tip
235 parent: 1:a28a9d1a809c
236 parent: 1:a28a9d1a809c
236 user: test
237 user: test
237 date: Thu Jan 01 00:00:00 1970 +0000
238 date: Thu Jan 01 00:00:00 1970 +0000
238 summary: z
239 summary: z
239
240
240 changeset: 2:6c0482d977a3
241 changeset: 2:6c0482d977a3
241 parent: 0:1160648e36ce
242 parent: 0:1160648e36ce
242 user: test
243 user: test
243 date: Thu Jan 01 00:00:00 1970 +0000
244 date: Thu Jan 01 00:00:00 1970 +0000
244 summary: z
245 summary: z
245
246
246
247
247 clone bookmarks
248 clone bookmarks
248
249
249 $ hg -R ../remote bookmark test
250 $ hg -R ../remote bookmark test
250 $ hg -R ../remote bookmarks
251 $ hg -R ../remote bookmarks
251 * test 2:6c0482d977a3
252 * test 2:6c0482d977a3
252 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
253 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
253 requesting all changes
254 requesting all changes
254 adding changesets
255 adding changesets
255 adding manifests
256 adding manifests
256 adding file changes
257 adding file changes
257 added 4 changesets with 5 changes to 4 files (+1 heads)
258 added 4 changesets with 5 changes to 4 files (+1 heads)
258 updating to branch default
259 updating to branch default
259 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
260 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
260 $ hg -R local-bookmarks bookmarks
261 $ hg -R local-bookmarks bookmarks
261 test 2:6c0482d977a3
262 test 2:6c0482d977a3
262
263
263 passwords in ssh urls are not supported
264 passwords in ssh urls are not supported
264 (we use a glob here because different Python versions give different
265 (we use a glob here because different Python versions give different
265 results here)
266 results here)
266
267
267 $ hg push ssh://user:erroneouspwd@dummy/remote
268 $ hg push ssh://user:erroneouspwd@dummy/remote
268 pushing to ssh://user:*@dummy/remote (glob)
269 pushing to ssh://user:*@dummy/remote (glob)
269 abort: password in URL not supported!
270 abort: password in URL not supported!
270 [255]
271 [255]
271
272
272 $ cd ..
273 $ cd ..
273
274
274 hide outer repo
275 hide outer repo
275 $ hg init
276 $ hg init
276
277
277 Test remote paths with spaces (issue2983):
278 Test remote paths with spaces (issue2983):
278
279
279 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
280 $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
280 $ hg -R 'a repo' tag tag
281 $ hg -R 'a repo' tag tag
281 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
282 $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
282 3fb238f49e8c
283 3fb238f49e8c
283
284
284 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
285 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
285 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
286 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
286 parameters:
287 parameters:
287
288
288 $ cat > ssh.sh << EOF
289 $ cat > ssh.sh << EOF
289 > userhost="\$1"
290 > userhost="\$1"
290 > SSH_ORIGINAL_COMMAND="\$2"
291 > SSH_ORIGINAL_COMMAND="\$2"
291 > export SSH_ORIGINAL_COMMAND
292 > export SSH_ORIGINAL_COMMAND
292 > PYTHONPATH="$PYTHONPATH"
293 > PYTHONPATH="$PYTHONPATH"
293 > export PYTHONPATH
294 > export PYTHONPATH
294 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
295 > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
295 > EOF
296 > EOF
296
297
297 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
298 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
298 3fb238f49e8c
299 3fb238f49e8c
299
300
300 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
301 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
301 remote: Illegal repository "$TESTTMP/a'repo" (glob)
302 remote: Illegal repository "$TESTTMP/a'repo" (glob)
302 abort: no suitable response from remote hg!
303 abort: no suitable response from remote hg!
303 [255]
304 [255]
304
305
305 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
306 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
306 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
307 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
307 abort: no suitable response from remote hg!
308 abort: no suitable response from remote hg!
308 [255]
309 [255]
309
310
310 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
311 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh"
311 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
312 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
312 [255]
313 [255]
313
314
314 Test hg-ssh in read-only mode:
315 Test hg-ssh in read-only mode:
315
316
316 $ cat > ssh.sh << EOF
317 $ cat > ssh.sh << EOF
317 > userhost="\$1"
318 > userhost="\$1"
318 > SSH_ORIGINAL_COMMAND="\$2"
319 > SSH_ORIGINAL_COMMAND="\$2"
319 > export SSH_ORIGINAL_COMMAND
320 > export SSH_ORIGINAL_COMMAND
320 > PYTHONPATH="$PYTHONPATH"
321 > PYTHONPATH="$PYTHONPATH"
321 > export PYTHONPATH
322 > export PYTHONPATH
322 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
323 > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
323 > EOF
324 > EOF
324
325
325 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
326 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
326 requesting all changes
327 requesting all changes
327 adding changesets
328 adding changesets
328 adding manifests
329 adding manifests
329 adding file changes
330 adding file changes
330 added 4 changesets with 5 changes to 4 files (+1 heads)
331 added 4 changesets with 5 changes to 4 files (+1 heads)
331 updating to branch default
332 updating to branch default
332 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
333 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
333
334
334 $ cd read-only-local
335 $ cd read-only-local
335 $ echo "baz" > bar
336 $ echo "baz" > bar
336 $ hg ci -A -m "unpushable commit" bar
337 $ hg ci -A -m "unpushable commit" bar
337 $ hg push --ssh "sh ../ssh.sh"
338 $ hg push --ssh "sh ../ssh.sh"
338 pushing to ssh://user@dummy/*/remote (glob)
339 pushing to ssh://user@dummy/*/remote (glob)
339 searching for changes
340 searching for changes
340 remote: Permission denied
341 remote: Permission denied
341 remote: abort: prechangegroup.hg-ssh hook failed
342 remote: abort: prechangegroup.hg-ssh hook failed
342 remote: Permission denied
343 remote: Permission denied
343 remote: abort: prepushkey.hg-ssh hook failed
344 remote: abort: prepushkey.hg-ssh hook failed
344 abort: unexpected response: empty string
345 abort: unexpected response: empty string
345 [255]
346 [255]
346
347
347 $ cd ..
348 $ cd ..
348
349
349 $ cat dummylog
350 $ cat dummylog
350 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
351 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
351 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
352 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
352 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
353 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
353 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
354 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
354 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
355 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
355 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
356 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
356 Got arguments 1:user@dummy 2:hg -R local serve --stdio
357 Got arguments 1:user@dummy 2:hg -R local serve --stdio
357 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
358 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
358 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
359 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
359 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
360 changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
360 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
361 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
361 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
362 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
362 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
363 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
363 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
364 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
364 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
365 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
365 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
366 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
366 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
367 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
367 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
368 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
368 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
369 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
369 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
370 changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_URL=remote:ssh:127.0.0.1
370 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
371 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
371 Got arguments 1:user@dummy 2:hg init 'a repo'
372 Got arguments 1:user@dummy 2:hg init 'a repo'
372 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
373 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
373 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
374 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
General Comments 0
You need to be logged in to leave comments. Login now