##// END OF EJS Templates
localrepo: use absolute_import
Gregory Szorc -
r27522:79853585 default
parent child Browse files
Show More
@@ -1,1922 +1,1961 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, wdirrev, short
7
8 from i18n import _
8 from __future__ import absolute_import
9
10 import errno
11 import inspect
12 import os
13 import random
14 import time
9 import urllib
15 import urllib
10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
16 import weakref
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
17
12 import lock as lockmod
18 from .i18n import _
13 import transaction, store, encoding, exchange, bundle2
19 from .node import (
14 import scmutil, util, extensions, hook, error, revset, cmdutil
20 hex,
15 import match as matchmod
21 nullid,
16 import merge as mergemod
22 short,
17 import tags as tagsmod
23 wdirrev,
18 from lock import release
24 )
19 import weakref, errno, os, time, inspect, random
25 from . import (
20 import branchmap, pathutil
26 bookmarks,
21 import namespaces
27 branchmap,
28 bundle2,
29 changegroup,
30 changelog,
31 cmdutil,
32 context,
33 dirstate,
34 encoding,
35 error,
36 exchange,
37 extensions,
38 filelog,
39 hook,
40 lock as lockmod,
41 manifest,
42 match as matchmod,
43 merge as mergemod,
44 namespaces,
45 obsolete,
46 pathutil,
47 peer,
48 phases,
49 pushkey,
50 repoview,
51 revset,
52 scmutil,
53 store,
54 subrepo,
55 tags as tagsmod,
56 transaction,
57 util,
58 )
59
60 release = lockmod.release
22 propertycache = util.propertycache
61 propertycache = util.propertycache
23 filecache = scmutil.filecache
62 filecache = scmutil.filecache
24
63
25 class repofilecache(filecache):
64 class repofilecache(filecache):
26 """All filecache usage on repo are done for logic that should be unfiltered
65 """All filecache usage on repo are done for logic that should be unfiltered
27 """
66 """
28
67
29 def __get__(self, repo, type=None):
68 def __get__(self, repo, type=None):
30 return super(repofilecache, self).__get__(repo.unfiltered(), type)
69 return super(repofilecache, self).__get__(repo.unfiltered(), type)
31 def __set__(self, repo, value):
70 def __set__(self, repo, value):
32 return super(repofilecache, self).__set__(repo.unfiltered(), value)
71 return super(repofilecache, self).__set__(repo.unfiltered(), value)
33 def __delete__(self, repo):
72 def __delete__(self, repo):
34 return super(repofilecache, self).__delete__(repo.unfiltered())
73 return super(repofilecache, self).__delete__(repo.unfiltered())
35
74
36 class storecache(repofilecache):
75 class storecache(repofilecache):
37 """filecache for files in the store"""
76 """filecache for files in the store"""
38 def join(self, obj, fname):
77 def join(self, obj, fname):
39 return obj.sjoin(fname)
78 return obj.sjoin(fname)
40
79
41 class unfilteredpropertycache(propertycache):
80 class unfilteredpropertycache(propertycache):
42 """propertycache that apply to unfiltered repo only"""
81 """propertycache that apply to unfiltered repo only"""
43
82
44 def __get__(self, repo, type=None):
83 def __get__(self, repo, type=None):
45 unfi = repo.unfiltered()
84 unfi = repo.unfiltered()
46 if unfi is repo:
85 if unfi is repo:
47 return super(unfilteredpropertycache, self).__get__(unfi)
86 return super(unfilteredpropertycache, self).__get__(unfi)
48 return getattr(unfi, self.name)
87 return getattr(unfi, self.name)
49
88
50 class filteredpropertycache(propertycache):
89 class filteredpropertycache(propertycache):
51 """propertycache that must take filtering in account"""
90 """propertycache that must take filtering in account"""
52
91
53 def cachevalue(self, obj, value):
92 def cachevalue(self, obj, value):
54 object.__setattr__(obj, self.name, value)
93 object.__setattr__(obj, self.name, value)
55
94
56
95
57 def hasunfilteredcache(repo, name):
96 def hasunfilteredcache(repo, name):
58 """check if a repo has an unfilteredpropertycache value for <name>"""
97 """check if a repo has an unfilteredpropertycache value for <name>"""
59 return name in vars(repo.unfiltered())
98 return name in vars(repo.unfiltered())
60
99
61 def unfilteredmethod(orig):
100 def unfilteredmethod(orig):
62 """decorate method that always need to be run on unfiltered version"""
101 """decorate method that always need to be run on unfiltered version"""
63 def wrapper(repo, *args, **kwargs):
102 def wrapper(repo, *args, **kwargs):
64 return orig(repo.unfiltered(), *args, **kwargs)
103 return orig(repo.unfiltered(), *args, **kwargs)
65 return wrapper
104 return wrapper
66
105
67 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
106 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
68 'unbundle'))
107 'unbundle'))
69 legacycaps = moderncaps.union(set(['changegroupsubset']))
108 legacycaps = moderncaps.union(set(['changegroupsubset']))
70
109
71 class localpeer(peer.peerrepository):
110 class localpeer(peer.peerrepository):
72 '''peer for a local repo; reflects only the most recent API'''
111 '''peer for a local repo; reflects only the most recent API'''
73
112
74 def __init__(self, repo, caps=moderncaps):
113 def __init__(self, repo, caps=moderncaps):
75 peer.peerrepository.__init__(self)
114 peer.peerrepository.__init__(self)
76 self._repo = repo.filtered('served')
115 self._repo = repo.filtered('served')
77 self.ui = repo.ui
116 self.ui = repo.ui
78 self._caps = repo._restrictcapabilities(caps)
117 self._caps = repo._restrictcapabilities(caps)
79 self.requirements = repo.requirements
118 self.requirements = repo.requirements
80 self.supportedformats = repo.supportedformats
119 self.supportedformats = repo.supportedformats
81
120
82 def close(self):
121 def close(self):
83 self._repo.close()
122 self._repo.close()
84
123
85 def _capabilities(self):
124 def _capabilities(self):
86 return self._caps
125 return self._caps
87
126
88 def local(self):
127 def local(self):
89 return self._repo
128 return self._repo
90
129
91 def canpush(self):
130 def canpush(self):
92 return True
131 return True
93
132
94 def url(self):
133 def url(self):
95 return self._repo.url()
134 return self._repo.url()
96
135
97 def lookup(self, key):
136 def lookup(self, key):
98 return self._repo.lookup(key)
137 return self._repo.lookup(key)
99
138
100 def branchmap(self):
139 def branchmap(self):
101 return self._repo.branchmap()
140 return self._repo.branchmap()
102
141
103 def heads(self):
142 def heads(self):
104 return self._repo.heads()
143 return self._repo.heads()
105
144
106 def known(self, nodes):
145 def known(self, nodes):
107 return self._repo.known(nodes)
146 return self._repo.known(nodes)
108
147
109 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
148 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
110 **kwargs):
149 **kwargs):
111 cg = exchange.getbundle(self._repo, source, heads=heads,
150 cg = exchange.getbundle(self._repo, source, heads=heads,
112 common=common, bundlecaps=bundlecaps, **kwargs)
151 common=common, bundlecaps=bundlecaps, **kwargs)
113 if bundlecaps is not None and 'HG20' in bundlecaps:
152 if bundlecaps is not None and 'HG20' in bundlecaps:
114 # When requesting a bundle2, getbundle returns a stream to make the
153 # When requesting a bundle2, getbundle returns a stream to make the
115 # wire level function happier. We need to build a proper object
154 # wire level function happier. We need to build a proper object
116 # from it in local peer.
155 # from it in local peer.
117 cg = bundle2.getunbundler(self.ui, cg)
156 cg = bundle2.getunbundler(self.ui, cg)
118 return cg
157 return cg
119
158
120 # TODO We might want to move the next two calls into legacypeer and add
159 # TODO We might want to move the next two calls into legacypeer and add
121 # unbundle instead.
160 # unbundle instead.
122
161
123 def unbundle(self, cg, heads, url):
162 def unbundle(self, cg, heads, url):
124 """apply a bundle on a repo
163 """apply a bundle on a repo
125
164
126 This function handles the repo locking itself."""
165 This function handles the repo locking itself."""
127 try:
166 try:
128 try:
167 try:
129 cg = exchange.readbundle(self.ui, cg, None)
168 cg = exchange.readbundle(self.ui, cg, None)
130 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
169 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
131 if util.safehasattr(ret, 'getchunks'):
170 if util.safehasattr(ret, 'getchunks'):
132 # This is a bundle20 object, turn it into an unbundler.
171 # This is a bundle20 object, turn it into an unbundler.
133 # This little dance should be dropped eventually when the
172 # This little dance should be dropped eventually when the
134 # API is finally improved.
173 # API is finally improved.
135 stream = util.chunkbuffer(ret.getchunks())
174 stream = util.chunkbuffer(ret.getchunks())
136 ret = bundle2.getunbundler(self.ui, stream)
175 ret = bundle2.getunbundler(self.ui, stream)
137 return ret
176 return ret
138 except Exception as exc:
177 except Exception as exc:
139 # If the exception contains output salvaged from a bundle2
178 # If the exception contains output salvaged from a bundle2
140 # reply, we need to make sure it is printed before continuing
179 # reply, we need to make sure it is printed before continuing
141 # to fail. So we build a bundle2 with such output and consume
180 # to fail. So we build a bundle2 with such output and consume
142 # it directly.
181 # it directly.
143 #
182 #
144 # This is not very elegant but allows a "simple" solution for
183 # This is not very elegant but allows a "simple" solution for
145 # issue4594
184 # issue4594
146 output = getattr(exc, '_bundle2salvagedoutput', ())
185 output = getattr(exc, '_bundle2salvagedoutput', ())
147 if output:
186 if output:
148 bundler = bundle2.bundle20(self._repo.ui)
187 bundler = bundle2.bundle20(self._repo.ui)
149 for out in output:
188 for out in output:
150 bundler.addpart(out)
189 bundler.addpart(out)
151 stream = util.chunkbuffer(bundler.getchunks())
190 stream = util.chunkbuffer(bundler.getchunks())
152 b = bundle2.getunbundler(self.ui, stream)
191 b = bundle2.getunbundler(self.ui, stream)
153 bundle2.processbundle(self._repo, b)
192 bundle2.processbundle(self._repo, b)
154 raise
193 raise
155 except error.PushRaced as exc:
194 except error.PushRaced as exc:
156 raise error.ResponseError(_('push failed:'), str(exc))
195 raise error.ResponseError(_('push failed:'), str(exc))
157
196
158 def lock(self):
197 def lock(self):
159 return self._repo.lock()
198 return self._repo.lock()
160
199
161 def addchangegroup(self, cg, source, url):
200 def addchangegroup(self, cg, source, url):
162 return cg.apply(self._repo, source, url)
201 return cg.apply(self._repo, source, url)
163
202
164 def pushkey(self, namespace, key, old, new):
203 def pushkey(self, namespace, key, old, new):
165 return self._repo.pushkey(namespace, key, old, new)
204 return self._repo.pushkey(namespace, key, old, new)
166
205
167 def listkeys(self, namespace):
206 def listkeys(self, namespace):
168 return self._repo.listkeys(namespace)
207 return self._repo.listkeys(namespace)
169
208
170 def debugwireargs(self, one, two, three=None, four=None, five=None):
209 def debugwireargs(self, one, two, three=None, four=None, five=None):
171 '''used to test argument passing over the wire'''
210 '''used to test argument passing over the wire'''
172 return "%s %s %s %s %s" % (one, two, three, four, five)
211 return "%s %s %s %s %s" % (one, two, three, four, five)
173
212
174 class locallegacypeer(localpeer):
213 class locallegacypeer(localpeer):
175 '''peer extension which implements legacy methods too; used for tests with
214 '''peer extension which implements legacy methods too; used for tests with
176 restricted capabilities'''
215 restricted capabilities'''
177
216
178 def __init__(self, repo):
217 def __init__(self, repo):
179 localpeer.__init__(self, repo, caps=legacycaps)
218 localpeer.__init__(self, repo, caps=legacycaps)
180
219
181 def branches(self, nodes):
220 def branches(self, nodes):
182 return self._repo.branches(nodes)
221 return self._repo.branches(nodes)
183
222
184 def between(self, pairs):
223 def between(self, pairs):
185 return self._repo.between(pairs)
224 return self._repo.between(pairs)
186
225
187 def changegroup(self, basenodes, source):
226 def changegroup(self, basenodes, source):
188 return changegroup.changegroup(self._repo, basenodes, source)
227 return changegroup.changegroup(self._repo, basenodes, source)
189
228
190 def changegroupsubset(self, bases, heads, source):
229 def changegroupsubset(self, bases, heads, source):
191 return changegroup.changegroupsubset(self._repo, bases, heads, source)
230 return changegroup.changegroupsubset(self._repo, bases, heads, source)
192
231
193 class localrepository(object):
232 class localrepository(object):
194
233
195 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
234 supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
196 'manifestv2'))
235 'manifestv2'))
197 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
236 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
198 'dotencode'))
237 'dotencode'))
199 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
238 openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
200 filtername = None
239 filtername = None
201
240
202 # a list of (ui, featureset) functions.
241 # a list of (ui, featureset) functions.
203 # only functions defined in module of enabled extensions are invoked
242 # only functions defined in module of enabled extensions are invoked
204 featuresetupfuncs = set()
243 featuresetupfuncs = set()
205
244
206 def _baserequirements(self, create):
245 def _baserequirements(self, create):
207 return ['revlogv1']
246 return ['revlogv1']
208
247
209 def __init__(self, baseui, path=None, create=False):
248 def __init__(self, baseui, path=None, create=False):
210 self.requirements = set()
249 self.requirements = set()
211 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
250 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
212 self.wopener = self.wvfs
251 self.wopener = self.wvfs
213 self.root = self.wvfs.base
252 self.root = self.wvfs.base
214 self.path = self.wvfs.join(".hg")
253 self.path = self.wvfs.join(".hg")
215 self.origroot = path
254 self.origroot = path
216 self.auditor = pathutil.pathauditor(self.root, self._checknested)
255 self.auditor = pathutil.pathauditor(self.root, self._checknested)
217 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
256 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
218 realfs=False)
257 realfs=False)
219 self.vfs = scmutil.vfs(self.path)
258 self.vfs = scmutil.vfs(self.path)
220 self.opener = self.vfs
259 self.opener = self.vfs
221 self.baseui = baseui
260 self.baseui = baseui
222 self.ui = baseui.copy()
261 self.ui = baseui.copy()
223 self.ui.copy = baseui.copy # prevent copying repo configuration
262 self.ui.copy = baseui.copy # prevent copying repo configuration
224 # A list of callback to shape the phase if no data were found.
263 # A list of callback to shape the phase if no data were found.
225 # Callback are in the form: func(repo, roots) --> processed root.
264 # Callback are in the form: func(repo, roots) --> processed root.
226 # This list it to be filled by extension during repo setup
265 # This list it to be filled by extension during repo setup
227 self._phasedefaults = []
266 self._phasedefaults = []
228 try:
267 try:
229 self.ui.readconfig(self.join("hgrc"), self.root)
268 self.ui.readconfig(self.join("hgrc"), self.root)
230 extensions.loadall(self.ui)
269 extensions.loadall(self.ui)
231 except IOError:
270 except IOError:
232 pass
271 pass
233
272
234 if self.featuresetupfuncs:
273 if self.featuresetupfuncs:
235 self.supported = set(self._basesupported) # use private copy
274 self.supported = set(self._basesupported) # use private copy
236 extmods = set(m.__name__ for n, m
275 extmods = set(m.__name__ for n, m
237 in extensions.extensions(self.ui))
276 in extensions.extensions(self.ui))
238 for setupfunc in self.featuresetupfuncs:
277 for setupfunc in self.featuresetupfuncs:
239 if setupfunc.__module__ in extmods:
278 if setupfunc.__module__ in extmods:
240 setupfunc(self.ui, self.supported)
279 setupfunc(self.ui, self.supported)
241 else:
280 else:
242 self.supported = self._basesupported
281 self.supported = self._basesupported
243
282
244 if not self.vfs.isdir():
283 if not self.vfs.isdir():
245 if create:
284 if create:
246 if not self.wvfs.exists():
285 if not self.wvfs.exists():
247 self.wvfs.makedirs()
286 self.wvfs.makedirs()
248 self.vfs.makedir(notindexed=True)
287 self.vfs.makedir(notindexed=True)
249 self.requirements.update(self._baserequirements(create))
288 self.requirements.update(self._baserequirements(create))
250 if self.ui.configbool('format', 'usestore', True):
289 if self.ui.configbool('format', 'usestore', True):
251 self.vfs.mkdir("store")
290 self.vfs.mkdir("store")
252 self.requirements.add("store")
291 self.requirements.add("store")
253 if self.ui.configbool('format', 'usefncache', True):
292 if self.ui.configbool('format', 'usefncache', True):
254 self.requirements.add("fncache")
293 self.requirements.add("fncache")
255 if self.ui.configbool('format', 'dotencode', True):
294 if self.ui.configbool('format', 'dotencode', True):
256 self.requirements.add('dotencode')
295 self.requirements.add('dotencode')
257 # create an invalid changelog
296 # create an invalid changelog
258 self.vfs.append(
297 self.vfs.append(
259 "00changelog.i",
298 "00changelog.i",
260 '\0\0\0\2' # represents revlogv2
299 '\0\0\0\2' # represents revlogv2
261 ' dummy changelog to prevent using the old repo layout'
300 ' dummy changelog to prevent using the old repo layout'
262 )
301 )
263 if scmutil.gdinitconfig(self.ui):
302 if scmutil.gdinitconfig(self.ui):
264 self.requirements.add("generaldelta")
303 self.requirements.add("generaldelta")
265 if self.ui.configbool('experimental', 'treemanifest', False):
304 if self.ui.configbool('experimental', 'treemanifest', False):
266 self.requirements.add("treemanifest")
305 self.requirements.add("treemanifest")
267 if self.ui.configbool('experimental', 'manifestv2', False):
306 if self.ui.configbool('experimental', 'manifestv2', False):
268 self.requirements.add("manifestv2")
307 self.requirements.add("manifestv2")
269 else:
308 else:
270 raise error.RepoError(_("repository %s not found") % path)
309 raise error.RepoError(_("repository %s not found") % path)
271 elif create:
310 elif create:
272 raise error.RepoError(_("repository %s already exists") % path)
311 raise error.RepoError(_("repository %s already exists") % path)
273 else:
312 else:
274 try:
313 try:
275 self.requirements = scmutil.readrequires(
314 self.requirements = scmutil.readrequires(
276 self.vfs, self.supported)
315 self.vfs, self.supported)
277 except IOError as inst:
316 except IOError as inst:
278 if inst.errno != errno.ENOENT:
317 if inst.errno != errno.ENOENT:
279 raise
318 raise
280
319
281 self.sharedpath = self.path
320 self.sharedpath = self.path
282 try:
321 try:
283 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
322 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
284 realpath=True)
323 realpath=True)
285 s = vfs.base
324 s = vfs.base
286 if not vfs.exists():
325 if not vfs.exists():
287 raise error.RepoError(
326 raise error.RepoError(
288 _('.hg/sharedpath points to nonexistent directory %s') % s)
327 _('.hg/sharedpath points to nonexistent directory %s') % s)
289 self.sharedpath = s
328 self.sharedpath = s
290 except IOError as inst:
329 except IOError as inst:
291 if inst.errno != errno.ENOENT:
330 if inst.errno != errno.ENOENT:
292 raise
331 raise
293
332
294 self.store = store.store(
333 self.store = store.store(
295 self.requirements, self.sharedpath, scmutil.vfs)
334 self.requirements, self.sharedpath, scmutil.vfs)
296 self.spath = self.store.path
335 self.spath = self.store.path
297 self.svfs = self.store.vfs
336 self.svfs = self.store.vfs
298 self.sjoin = self.store.join
337 self.sjoin = self.store.join
299 self.vfs.createmode = self.store.createmode
338 self.vfs.createmode = self.store.createmode
300 self._applyopenerreqs()
339 self._applyopenerreqs()
301 if create:
340 if create:
302 self._writerequirements()
341 self._writerequirements()
303
342
304 self._dirstatevalidatewarned = False
343 self._dirstatevalidatewarned = False
305
344
306 self._branchcaches = {}
345 self._branchcaches = {}
307 self._revbranchcache = None
346 self._revbranchcache = None
308 self.filterpats = {}
347 self.filterpats = {}
309 self._datafilters = {}
348 self._datafilters = {}
310 self._transref = self._lockref = self._wlockref = None
349 self._transref = self._lockref = self._wlockref = None
311
350
312 # A cache for various files under .hg/ that tracks file changes,
351 # A cache for various files under .hg/ that tracks file changes,
313 # (used by the filecache decorator)
352 # (used by the filecache decorator)
314 #
353 #
315 # Maps a property name to its util.filecacheentry
354 # Maps a property name to its util.filecacheentry
316 self._filecache = {}
355 self._filecache = {}
317
356
318 # hold sets of revision to be filtered
357 # hold sets of revision to be filtered
319 # should be cleared when something might have changed the filter value:
358 # should be cleared when something might have changed the filter value:
320 # - new changesets,
359 # - new changesets,
321 # - phase change,
360 # - phase change,
322 # - new obsolescence marker,
361 # - new obsolescence marker,
323 # - working directory parent change,
362 # - working directory parent change,
324 # - bookmark changes
363 # - bookmark changes
325 self.filteredrevcache = {}
364 self.filteredrevcache = {}
326
365
327 # generic mapping between names and nodes
366 # generic mapping between names and nodes
328 self.names = namespaces.namespaces()
367 self.names = namespaces.namespaces()
329
368
330 def close(self):
369 def close(self):
331 self._writecaches()
370 self._writecaches()
332
371
333 def _writecaches(self):
372 def _writecaches(self):
334 if self._revbranchcache:
373 if self._revbranchcache:
335 self._revbranchcache.write()
374 self._revbranchcache.write()
336
375
337 def _restrictcapabilities(self, caps):
376 def _restrictcapabilities(self, caps):
338 if self.ui.configbool('experimental', 'bundle2-advertise', True):
377 if self.ui.configbool('experimental', 'bundle2-advertise', True):
339 caps = set(caps)
378 caps = set(caps)
340 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
379 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
341 caps.add('bundle2=' + urllib.quote(capsblob))
380 caps.add('bundle2=' + urllib.quote(capsblob))
342 return caps
381 return caps
343
382
344 def _applyopenerreqs(self):
383 def _applyopenerreqs(self):
345 self.svfs.options = dict((r, 1) for r in self.requirements
384 self.svfs.options = dict((r, 1) for r in self.requirements
346 if r in self.openerreqs)
385 if r in self.openerreqs)
347 # experimental config: format.chunkcachesize
386 # experimental config: format.chunkcachesize
348 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
387 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
349 if chunkcachesize is not None:
388 if chunkcachesize is not None:
350 self.svfs.options['chunkcachesize'] = chunkcachesize
389 self.svfs.options['chunkcachesize'] = chunkcachesize
351 # experimental config: format.maxchainlen
390 # experimental config: format.maxchainlen
352 maxchainlen = self.ui.configint('format', 'maxchainlen')
391 maxchainlen = self.ui.configint('format', 'maxchainlen')
353 if maxchainlen is not None:
392 if maxchainlen is not None:
354 self.svfs.options['maxchainlen'] = maxchainlen
393 self.svfs.options['maxchainlen'] = maxchainlen
355 # experimental config: format.manifestcachesize
394 # experimental config: format.manifestcachesize
356 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
395 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
357 if manifestcachesize is not None:
396 if manifestcachesize is not None:
358 self.svfs.options['manifestcachesize'] = manifestcachesize
397 self.svfs.options['manifestcachesize'] = manifestcachesize
359 # experimental config: format.aggressivemergedeltas
398 # experimental config: format.aggressivemergedeltas
360 aggressivemergedeltas = self.ui.configbool('format',
399 aggressivemergedeltas = self.ui.configbool('format',
361 'aggressivemergedeltas', False)
400 'aggressivemergedeltas', False)
362 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
401 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
363 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
402 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
364
403
365 def _writerequirements(self):
404 def _writerequirements(self):
366 scmutil.writerequires(self.vfs, self.requirements)
405 scmutil.writerequires(self.vfs, self.requirements)
367
406
368 def _checknested(self, path):
407 def _checknested(self, path):
369 """Determine if path is a legal nested repository."""
408 """Determine if path is a legal nested repository."""
370 if not path.startswith(self.root):
409 if not path.startswith(self.root):
371 return False
410 return False
372 subpath = path[len(self.root) + 1:]
411 subpath = path[len(self.root) + 1:]
373 normsubpath = util.pconvert(subpath)
412 normsubpath = util.pconvert(subpath)
374
413
375 # XXX: Checking against the current working copy is wrong in
414 # XXX: Checking against the current working copy is wrong in
376 # the sense that it can reject things like
415 # the sense that it can reject things like
377 #
416 #
378 # $ hg cat -r 10 sub/x.txt
417 # $ hg cat -r 10 sub/x.txt
379 #
418 #
380 # if sub/ is no longer a subrepository in the working copy
419 # if sub/ is no longer a subrepository in the working copy
381 # parent revision.
420 # parent revision.
382 #
421 #
383 # However, it can of course also allow things that would have
422 # However, it can of course also allow things that would have
384 # been rejected before, such as the above cat command if sub/
423 # been rejected before, such as the above cat command if sub/
385 # is a subrepository now, but was a normal directory before.
424 # is a subrepository now, but was a normal directory before.
386 # The old path auditor would have rejected by mistake since it
425 # The old path auditor would have rejected by mistake since it
387 # panics when it sees sub/.hg/.
426 # panics when it sees sub/.hg/.
388 #
427 #
389 # All in all, checking against the working copy seems sensible
428 # All in all, checking against the working copy seems sensible
390 # since we want to prevent access to nested repositories on
429 # since we want to prevent access to nested repositories on
391 # the filesystem *now*.
430 # the filesystem *now*.
392 ctx = self[None]
431 ctx = self[None]
393 parts = util.splitpath(subpath)
432 parts = util.splitpath(subpath)
394 while parts:
433 while parts:
395 prefix = '/'.join(parts)
434 prefix = '/'.join(parts)
396 if prefix in ctx.substate:
435 if prefix in ctx.substate:
397 if prefix == normsubpath:
436 if prefix == normsubpath:
398 return True
437 return True
399 else:
438 else:
400 sub = ctx.sub(prefix)
439 sub = ctx.sub(prefix)
401 return sub.checknested(subpath[len(prefix) + 1:])
440 return sub.checknested(subpath[len(prefix) + 1:])
402 else:
441 else:
403 parts.pop()
442 parts.pop()
404 return False
443 return False
405
444
406 def peer(self):
445 def peer(self):
407 return localpeer(self) # not cached to avoid reference cycle
446 return localpeer(self) # not cached to avoid reference cycle
408
447
409 def unfiltered(self):
448 def unfiltered(self):
410 """Return unfiltered version of the repository
449 """Return unfiltered version of the repository
411
450
412 Intended to be overwritten by filtered repo."""
451 Intended to be overwritten by filtered repo."""
413 return self
452 return self
414
453
415 def filtered(self, name):
454 def filtered(self, name):
416 """Return a filtered version of a repository"""
455 """Return a filtered version of a repository"""
417 # build a new class with the mixin and the current class
456 # build a new class with the mixin and the current class
418 # (possibly subclass of the repo)
457 # (possibly subclass of the repo)
419 class proxycls(repoview.repoview, self.unfiltered().__class__):
458 class proxycls(repoview.repoview, self.unfiltered().__class__):
420 pass
459 pass
421 return proxycls(self, name)
460 return proxycls(self, name)
422
461
423 @repofilecache('bookmarks')
462 @repofilecache('bookmarks')
424 def _bookmarks(self):
463 def _bookmarks(self):
425 return bookmarks.bmstore(self)
464 return bookmarks.bmstore(self)
426
465
427 @repofilecache('bookmarks.current')
466 @repofilecache('bookmarks.current')
428 def _activebookmark(self):
467 def _activebookmark(self):
429 return bookmarks.readactive(self)
468 return bookmarks.readactive(self)
430
469
431 def bookmarkheads(self, bookmark):
470 def bookmarkheads(self, bookmark):
432 name = bookmark.split('@', 1)[0]
471 name = bookmark.split('@', 1)[0]
433 heads = []
472 heads = []
434 for mark, n in self._bookmarks.iteritems():
473 for mark, n in self._bookmarks.iteritems():
435 if mark.split('@', 1)[0] == name:
474 if mark.split('@', 1)[0] == name:
436 heads.append(n)
475 heads.append(n)
437 return heads
476 return heads
438
477
439 # _phaserevs and _phasesets depend on changelog. what we need is to
478 # _phaserevs and _phasesets depend on changelog. what we need is to
440 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
479 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
441 # can't be easily expressed in filecache mechanism.
480 # can't be easily expressed in filecache mechanism.
442 @storecache('phaseroots', '00changelog.i')
481 @storecache('phaseroots', '00changelog.i')
443 def _phasecache(self):
482 def _phasecache(self):
444 return phases.phasecache(self, self._phasedefaults)
483 return phases.phasecache(self, self._phasedefaults)
445
484
446 @storecache('obsstore')
485 @storecache('obsstore')
447 def obsstore(self):
486 def obsstore(self):
448 # read default format for new obsstore.
487 # read default format for new obsstore.
449 # developer config: format.obsstore-version
488 # developer config: format.obsstore-version
450 defaultformat = self.ui.configint('format', 'obsstore-version', None)
489 defaultformat = self.ui.configint('format', 'obsstore-version', None)
451 # rely on obsstore class default when possible.
490 # rely on obsstore class default when possible.
452 kwargs = {}
491 kwargs = {}
453 if defaultformat is not None:
492 if defaultformat is not None:
454 kwargs['defaultformat'] = defaultformat
493 kwargs['defaultformat'] = defaultformat
455 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
494 readonly = not obsolete.isenabled(self, obsolete.createmarkersopt)
456 store = obsolete.obsstore(self.svfs, readonly=readonly,
495 store = obsolete.obsstore(self.svfs, readonly=readonly,
457 **kwargs)
496 **kwargs)
458 if store and readonly:
497 if store and readonly:
459 self.ui.warn(
498 self.ui.warn(
460 _('obsolete feature not enabled but %i markers found!\n')
499 _('obsolete feature not enabled but %i markers found!\n')
461 % len(list(store)))
500 % len(list(store)))
462 return store
501 return store
463
502
464 @storecache('00changelog.i')
503 @storecache('00changelog.i')
465 def changelog(self):
504 def changelog(self):
466 c = changelog.changelog(self.svfs)
505 c = changelog.changelog(self.svfs)
467 if 'HG_PENDING' in os.environ:
506 if 'HG_PENDING' in os.environ:
468 p = os.environ['HG_PENDING']
507 p = os.environ['HG_PENDING']
469 if p.startswith(self.root):
508 if p.startswith(self.root):
470 c.readpending('00changelog.i.a')
509 c.readpending('00changelog.i.a')
471 return c
510 return c
472
511
473 @storecache('00manifest.i')
512 @storecache('00manifest.i')
474 def manifest(self):
513 def manifest(self):
475 return manifest.manifest(self.svfs)
514 return manifest.manifest(self.svfs)
476
515
477 def dirlog(self, dir):
516 def dirlog(self, dir):
478 return self.manifest.dirlog(dir)
517 return self.manifest.dirlog(dir)
479
518
480 @repofilecache('dirstate')
519 @repofilecache('dirstate')
481 def dirstate(self):
520 def dirstate(self):
482 return dirstate.dirstate(self.vfs, self.ui, self.root,
521 return dirstate.dirstate(self.vfs, self.ui, self.root,
483 self._dirstatevalidate)
522 self._dirstatevalidate)
484
523
485 def _dirstatevalidate(self, node):
524 def _dirstatevalidate(self, node):
486 try:
525 try:
487 self.changelog.rev(node)
526 self.changelog.rev(node)
488 return node
527 return node
489 except error.LookupError:
528 except error.LookupError:
490 if not self._dirstatevalidatewarned:
529 if not self._dirstatevalidatewarned:
491 self._dirstatevalidatewarned = True
530 self._dirstatevalidatewarned = True
492 self.ui.warn(_("warning: ignoring unknown"
531 self.ui.warn(_("warning: ignoring unknown"
493 " working parent %s!\n") % short(node))
532 " working parent %s!\n") % short(node))
494 return nullid
533 return nullid
495
534
496 def __getitem__(self, changeid):
535 def __getitem__(self, changeid):
497 if changeid is None or changeid == wdirrev:
536 if changeid is None or changeid == wdirrev:
498 return context.workingctx(self)
537 return context.workingctx(self)
499 if isinstance(changeid, slice):
538 if isinstance(changeid, slice):
500 return [context.changectx(self, i)
539 return [context.changectx(self, i)
501 for i in xrange(*changeid.indices(len(self)))
540 for i in xrange(*changeid.indices(len(self)))
502 if i not in self.changelog.filteredrevs]
541 if i not in self.changelog.filteredrevs]
503 return context.changectx(self, changeid)
542 return context.changectx(self, changeid)
504
543
505 def __contains__(self, changeid):
544 def __contains__(self, changeid):
506 try:
545 try:
507 self[changeid]
546 self[changeid]
508 return True
547 return True
509 except error.RepoLookupError:
548 except error.RepoLookupError:
510 return False
549 return False
511
550
512 def __nonzero__(self):
551 def __nonzero__(self):
513 return True
552 return True
514
553
515 def __len__(self):
554 def __len__(self):
516 return len(self.changelog)
555 return len(self.changelog)
517
556
518 def __iter__(self):
557 def __iter__(self):
519 return iter(self.changelog)
558 return iter(self.changelog)
520
559
521 def revs(self, expr, *args):
560 def revs(self, expr, *args):
522 '''Find revisions matching a revset.
561 '''Find revisions matching a revset.
523
562
524 The revset is specified as a string ``expr`` that may contain
563 The revset is specified as a string ``expr`` that may contain
525 %-formatting to escape certain types. See ``revset.formatspec``.
564 %-formatting to escape certain types. See ``revset.formatspec``.
526
565
527 Return a revset.abstractsmartset, which is a list-like interface
566 Return a revset.abstractsmartset, which is a list-like interface
528 that contains integer revisions.
567 that contains integer revisions.
529 '''
568 '''
530 expr = revset.formatspec(expr, *args)
569 expr = revset.formatspec(expr, *args)
531 m = revset.match(None, expr)
570 m = revset.match(None, expr)
532 return m(self)
571 return m(self)
533
572
534 def set(self, expr, *args):
573 def set(self, expr, *args):
535 '''Find revisions matching a revset and emit changectx instances.
574 '''Find revisions matching a revset and emit changectx instances.
536
575
537 This is a convenience wrapper around ``revs()`` that iterates the
576 This is a convenience wrapper around ``revs()`` that iterates the
538 result and is a generator of changectx instances.
577 result and is a generator of changectx instances.
539 '''
578 '''
540 for r in self.revs(expr, *args):
579 for r in self.revs(expr, *args):
541 yield self[r]
580 yield self[r]
542
581
543 def url(self):
582 def url(self):
544 return 'file:' + self.root
583 return 'file:' + self.root
545
584
546 def hook(self, name, throw=False, **args):
585 def hook(self, name, throw=False, **args):
547 """Call a hook, passing this repo instance.
586 """Call a hook, passing this repo instance.
548
587
549 This a convenience method to aid invoking hooks. Extensions likely
588 This a convenience method to aid invoking hooks. Extensions likely
550 won't call this unless they have registered a custom hook or are
589 won't call this unless they have registered a custom hook or are
551 replacing code that is expected to call a hook.
590 replacing code that is expected to call a hook.
552 """
591 """
553 return hook.hook(self.ui, self, name, throw, **args)
592 return hook.hook(self.ui, self, name, throw, **args)
554
593
555 @unfilteredmethod
594 @unfilteredmethod
556 def _tag(self, names, node, message, local, user, date, extra=None,
595 def _tag(self, names, node, message, local, user, date, extra=None,
557 editor=False):
596 editor=False):
558 if isinstance(names, str):
597 if isinstance(names, str):
559 names = (names,)
598 names = (names,)
560
599
561 branches = self.branchmap()
600 branches = self.branchmap()
562 for name in names:
601 for name in names:
563 self.hook('pretag', throw=True, node=hex(node), tag=name,
602 self.hook('pretag', throw=True, node=hex(node), tag=name,
564 local=local)
603 local=local)
565 if name in branches:
604 if name in branches:
566 self.ui.warn(_("warning: tag %s conflicts with existing"
605 self.ui.warn(_("warning: tag %s conflicts with existing"
567 " branch name\n") % name)
606 " branch name\n") % name)
568
607
569 def writetags(fp, names, munge, prevtags):
608 def writetags(fp, names, munge, prevtags):
570 fp.seek(0, 2)
609 fp.seek(0, 2)
571 if prevtags and prevtags[-1] != '\n':
610 if prevtags and prevtags[-1] != '\n':
572 fp.write('\n')
611 fp.write('\n')
573 for name in names:
612 for name in names:
574 if munge:
613 if munge:
575 m = munge(name)
614 m = munge(name)
576 else:
615 else:
577 m = name
616 m = name
578
617
579 if (self._tagscache.tagtypes and
618 if (self._tagscache.tagtypes and
580 name in self._tagscache.tagtypes):
619 name in self._tagscache.tagtypes):
581 old = self.tags().get(name, nullid)
620 old = self.tags().get(name, nullid)
582 fp.write('%s %s\n' % (hex(old), m))
621 fp.write('%s %s\n' % (hex(old), m))
583 fp.write('%s %s\n' % (hex(node), m))
622 fp.write('%s %s\n' % (hex(node), m))
584 fp.close()
623 fp.close()
585
624
586 prevtags = ''
625 prevtags = ''
587 if local:
626 if local:
588 try:
627 try:
589 fp = self.vfs('localtags', 'r+')
628 fp = self.vfs('localtags', 'r+')
590 except IOError:
629 except IOError:
591 fp = self.vfs('localtags', 'a')
630 fp = self.vfs('localtags', 'a')
592 else:
631 else:
593 prevtags = fp.read()
632 prevtags = fp.read()
594
633
595 # local tags are stored in the current charset
634 # local tags are stored in the current charset
596 writetags(fp, names, None, prevtags)
635 writetags(fp, names, None, prevtags)
597 for name in names:
636 for name in names:
598 self.hook('tag', node=hex(node), tag=name, local=local)
637 self.hook('tag', node=hex(node), tag=name, local=local)
599 return
638 return
600
639
601 try:
640 try:
602 fp = self.wfile('.hgtags', 'rb+')
641 fp = self.wfile('.hgtags', 'rb+')
603 except IOError as e:
642 except IOError as e:
604 if e.errno != errno.ENOENT:
643 if e.errno != errno.ENOENT:
605 raise
644 raise
606 fp = self.wfile('.hgtags', 'ab')
645 fp = self.wfile('.hgtags', 'ab')
607 else:
646 else:
608 prevtags = fp.read()
647 prevtags = fp.read()
609
648
610 # committed tags are stored in UTF-8
649 # committed tags are stored in UTF-8
611 writetags(fp, names, encoding.fromlocal, prevtags)
650 writetags(fp, names, encoding.fromlocal, prevtags)
612
651
613 fp.close()
652 fp.close()
614
653
615 self.invalidatecaches()
654 self.invalidatecaches()
616
655
617 if '.hgtags' not in self.dirstate:
656 if '.hgtags' not in self.dirstate:
618 self[None].add(['.hgtags'])
657 self[None].add(['.hgtags'])
619
658
620 m = matchmod.exact(self.root, '', ['.hgtags'])
659 m = matchmod.exact(self.root, '', ['.hgtags'])
621 tagnode = self.commit(message, user, date, extra=extra, match=m,
660 tagnode = self.commit(message, user, date, extra=extra, match=m,
622 editor=editor)
661 editor=editor)
623
662
624 for name in names:
663 for name in names:
625 self.hook('tag', node=hex(node), tag=name, local=local)
664 self.hook('tag', node=hex(node), tag=name, local=local)
626
665
627 return tagnode
666 return tagnode
628
667
629 def tag(self, names, node, message, local, user, date, editor=False):
668 def tag(self, names, node, message, local, user, date, editor=False):
630 '''tag a revision with one or more symbolic names.
669 '''tag a revision with one or more symbolic names.
631
670
632 names is a list of strings or, when adding a single tag, names may be a
671 names is a list of strings or, when adding a single tag, names may be a
633 string.
672 string.
634
673
635 if local is True, the tags are stored in a per-repository file.
674 if local is True, the tags are stored in a per-repository file.
636 otherwise, they are stored in the .hgtags file, and a new
675 otherwise, they are stored in the .hgtags file, and a new
637 changeset is committed with the change.
676 changeset is committed with the change.
638
677
639 keyword arguments:
678 keyword arguments:
640
679
641 local: whether to store tags in non-version-controlled file
680 local: whether to store tags in non-version-controlled file
642 (default False)
681 (default False)
643
682
644 message: commit message to use if committing
683 message: commit message to use if committing
645
684
646 user: name of user to use if committing
685 user: name of user to use if committing
647
686
648 date: date tuple to use if committing'''
687 date: date tuple to use if committing'''
649
688
650 if not local:
689 if not local:
651 m = matchmod.exact(self.root, '', ['.hgtags'])
690 m = matchmod.exact(self.root, '', ['.hgtags'])
652 if any(self.status(match=m, unknown=True, ignored=True)):
691 if any(self.status(match=m, unknown=True, ignored=True)):
653 raise error.Abort(_('working copy of .hgtags is changed'),
692 raise error.Abort(_('working copy of .hgtags is changed'),
654 hint=_('please commit .hgtags manually'))
693 hint=_('please commit .hgtags manually'))
655
694
656 self.tags() # instantiate the cache
695 self.tags() # instantiate the cache
657 self._tag(names, node, message, local, user, date, editor=editor)
696 self._tag(names, node, message, local, user, date, editor=editor)
658
697
659 @filteredpropertycache
698 @filteredpropertycache
660 def _tagscache(self):
699 def _tagscache(self):
661 '''Returns a tagscache object that contains various tags related
700 '''Returns a tagscache object that contains various tags related
662 caches.'''
701 caches.'''
663
702
664 # This simplifies its cache management by having one decorated
703 # This simplifies its cache management by having one decorated
665 # function (this one) and the rest simply fetch things from it.
704 # function (this one) and the rest simply fetch things from it.
666 class tagscache(object):
705 class tagscache(object):
667 def __init__(self):
706 def __init__(self):
668 # These two define the set of tags for this repository. tags
707 # These two define the set of tags for this repository. tags
669 # maps tag name to node; tagtypes maps tag name to 'global' or
708 # maps tag name to node; tagtypes maps tag name to 'global' or
670 # 'local'. (Global tags are defined by .hgtags across all
709 # 'local'. (Global tags are defined by .hgtags across all
671 # heads, and local tags are defined in .hg/localtags.)
710 # heads, and local tags are defined in .hg/localtags.)
672 # They constitute the in-memory cache of tags.
711 # They constitute the in-memory cache of tags.
673 self.tags = self.tagtypes = None
712 self.tags = self.tagtypes = None
674
713
675 self.nodetagscache = self.tagslist = None
714 self.nodetagscache = self.tagslist = None
676
715
677 cache = tagscache()
716 cache = tagscache()
678 cache.tags, cache.tagtypes = self._findtags()
717 cache.tags, cache.tagtypes = self._findtags()
679
718
680 return cache
719 return cache
681
720
682 def tags(self):
721 def tags(self):
683 '''return a mapping of tag to node'''
722 '''return a mapping of tag to node'''
684 t = {}
723 t = {}
685 if self.changelog.filteredrevs:
724 if self.changelog.filteredrevs:
686 tags, tt = self._findtags()
725 tags, tt = self._findtags()
687 else:
726 else:
688 tags = self._tagscache.tags
727 tags = self._tagscache.tags
689 for k, v in tags.iteritems():
728 for k, v in tags.iteritems():
690 try:
729 try:
691 # ignore tags to unknown nodes
730 # ignore tags to unknown nodes
692 self.changelog.rev(v)
731 self.changelog.rev(v)
693 t[k] = v
732 t[k] = v
694 except (error.LookupError, ValueError):
733 except (error.LookupError, ValueError):
695 pass
734 pass
696 return t
735 return t
697
736
698 def _findtags(self):
737 def _findtags(self):
699 '''Do the hard work of finding tags. Return a pair of dicts
738 '''Do the hard work of finding tags. Return a pair of dicts
700 (tags, tagtypes) where tags maps tag name to node, and tagtypes
739 (tags, tagtypes) where tags maps tag name to node, and tagtypes
701 maps tag name to a string like \'global\' or \'local\'.
740 maps tag name to a string like \'global\' or \'local\'.
702 Subclasses or extensions are free to add their own tags, but
741 Subclasses or extensions are free to add their own tags, but
703 should be aware that the returned dicts will be retained for the
742 should be aware that the returned dicts will be retained for the
704 duration of the localrepo object.'''
743 duration of the localrepo object.'''
705
744
706 # XXX what tagtype should subclasses/extensions use? Currently
745 # XXX what tagtype should subclasses/extensions use? Currently
707 # mq and bookmarks add tags, but do not set the tagtype at all.
746 # mq and bookmarks add tags, but do not set the tagtype at all.
708 # Should each extension invent its own tag type? Should there
747 # Should each extension invent its own tag type? Should there
709 # be one tagtype for all such "virtual" tags? Or is the status
748 # be one tagtype for all such "virtual" tags? Or is the status
710 # quo fine?
749 # quo fine?
711
750
712 alltags = {} # map tag name to (node, hist)
751 alltags = {} # map tag name to (node, hist)
713 tagtypes = {}
752 tagtypes = {}
714
753
715 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
754 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
716 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
755 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
717
756
718 # Build the return dicts. Have to re-encode tag names because
757 # Build the return dicts. Have to re-encode tag names because
719 # the tags module always uses UTF-8 (in order not to lose info
758 # the tags module always uses UTF-8 (in order not to lose info
720 # writing to the cache), but the rest of Mercurial wants them in
759 # writing to the cache), but the rest of Mercurial wants them in
721 # local encoding.
760 # local encoding.
722 tags = {}
761 tags = {}
723 for (name, (node, hist)) in alltags.iteritems():
762 for (name, (node, hist)) in alltags.iteritems():
724 if node != nullid:
763 if node != nullid:
725 tags[encoding.tolocal(name)] = node
764 tags[encoding.tolocal(name)] = node
726 tags['tip'] = self.changelog.tip()
765 tags['tip'] = self.changelog.tip()
727 tagtypes = dict([(encoding.tolocal(name), value)
766 tagtypes = dict([(encoding.tolocal(name), value)
728 for (name, value) in tagtypes.iteritems()])
767 for (name, value) in tagtypes.iteritems()])
729 return (tags, tagtypes)
768 return (tags, tagtypes)
730
769
731 def tagtype(self, tagname):
770 def tagtype(self, tagname):
732 '''
771 '''
733 return the type of the given tag. result can be:
772 return the type of the given tag. result can be:
734
773
735 'local' : a local tag
774 'local' : a local tag
736 'global' : a global tag
775 'global' : a global tag
737 None : tag does not exist
776 None : tag does not exist
738 '''
777 '''
739
778
740 return self._tagscache.tagtypes.get(tagname)
779 return self._tagscache.tagtypes.get(tagname)
741
780
742 def tagslist(self):
781 def tagslist(self):
743 '''return a list of tags ordered by revision'''
782 '''return a list of tags ordered by revision'''
744 if not self._tagscache.tagslist:
783 if not self._tagscache.tagslist:
745 l = []
784 l = []
746 for t, n in self.tags().iteritems():
785 for t, n in self.tags().iteritems():
747 l.append((self.changelog.rev(n), t, n))
786 l.append((self.changelog.rev(n), t, n))
748 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
787 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
749
788
750 return self._tagscache.tagslist
789 return self._tagscache.tagslist
751
790
752 def nodetags(self, node):
791 def nodetags(self, node):
753 '''return the tags associated with a node'''
792 '''return the tags associated with a node'''
754 if not self._tagscache.nodetagscache:
793 if not self._tagscache.nodetagscache:
755 nodetagscache = {}
794 nodetagscache = {}
756 for t, n in self._tagscache.tags.iteritems():
795 for t, n in self._tagscache.tags.iteritems():
757 nodetagscache.setdefault(n, []).append(t)
796 nodetagscache.setdefault(n, []).append(t)
758 for tags in nodetagscache.itervalues():
797 for tags in nodetagscache.itervalues():
759 tags.sort()
798 tags.sort()
760 self._tagscache.nodetagscache = nodetagscache
799 self._tagscache.nodetagscache = nodetagscache
761 return self._tagscache.nodetagscache.get(node, [])
800 return self._tagscache.nodetagscache.get(node, [])
762
801
763 def nodebookmarks(self, node):
802 def nodebookmarks(self, node):
764 """return the list of bookmarks pointing to the specified node"""
803 """return the list of bookmarks pointing to the specified node"""
765 marks = []
804 marks = []
766 for bookmark, n in self._bookmarks.iteritems():
805 for bookmark, n in self._bookmarks.iteritems():
767 if n == node:
806 if n == node:
768 marks.append(bookmark)
807 marks.append(bookmark)
769 return sorted(marks)
808 return sorted(marks)
770
809
771 def branchmap(self):
810 def branchmap(self):
772 '''returns a dictionary {branch: [branchheads]} with branchheads
811 '''returns a dictionary {branch: [branchheads]} with branchheads
773 ordered by increasing revision number'''
812 ordered by increasing revision number'''
774 branchmap.updatecache(self)
813 branchmap.updatecache(self)
775 return self._branchcaches[self.filtername]
814 return self._branchcaches[self.filtername]
776
815
777 @unfilteredmethod
816 @unfilteredmethod
778 def revbranchcache(self):
817 def revbranchcache(self):
779 if not self._revbranchcache:
818 if not self._revbranchcache:
780 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
819 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
781 return self._revbranchcache
820 return self._revbranchcache
782
821
783 def branchtip(self, branch, ignoremissing=False):
822 def branchtip(self, branch, ignoremissing=False):
784 '''return the tip node for a given branch
823 '''return the tip node for a given branch
785
824
786 If ignoremissing is True, then this method will not raise an error.
825 If ignoremissing is True, then this method will not raise an error.
787 This is helpful for callers that only expect None for a missing branch
826 This is helpful for callers that only expect None for a missing branch
788 (e.g. namespace).
827 (e.g. namespace).
789
828
790 '''
829 '''
791 try:
830 try:
792 return self.branchmap().branchtip(branch)
831 return self.branchmap().branchtip(branch)
793 except KeyError:
832 except KeyError:
794 if not ignoremissing:
833 if not ignoremissing:
795 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
834 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
796 else:
835 else:
797 pass
836 pass
798
837
799 def lookup(self, key):
838 def lookup(self, key):
800 return self[key].node()
839 return self[key].node()
801
840
802 def lookupbranch(self, key, remote=None):
841 def lookupbranch(self, key, remote=None):
803 repo = remote or self
842 repo = remote or self
804 if key in repo.branchmap():
843 if key in repo.branchmap():
805 return key
844 return key
806
845
807 repo = (remote and remote.local()) and remote or self
846 repo = (remote and remote.local()) and remote or self
808 return repo[key].branch()
847 return repo[key].branch()
809
848
810 def known(self, nodes):
849 def known(self, nodes):
811 cl = self.changelog
850 cl = self.changelog
812 nm = cl.nodemap
851 nm = cl.nodemap
813 filtered = cl.filteredrevs
852 filtered = cl.filteredrevs
814 result = []
853 result = []
815 for n in nodes:
854 for n in nodes:
816 r = nm.get(n)
855 r = nm.get(n)
817 resp = not (r is None or r in filtered)
856 resp = not (r is None or r in filtered)
818 result.append(resp)
857 result.append(resp)
819 return result
858 return result
820
859
821 def local(self):
860 def local(self):
822 return self
861 return self
823
862
824 def publishing(self):
863 def publishing(self):
825 # it's safe (and desirable) to trust the publish flag unconditionally
864 # it's safe (and desirable) to trust the publish flag unconditionally
826 # so that we don't finalize changes shared between users via ssh or nfs
865 # so that we don't finalize changes shared between users via ssh or nfs
827 return self.ui.configbool('phases', 'publish', True, untrusted=True)
866 return self.ui.configbool('phases', 'publish', True, untrusted=True)
828
867
829 def cancopy(self):
868 def cancopy(self):
830 # so statichttprepo's override of local() works
869 # so statichttprepo's override of local() works
831 if not self.local():
870 if not self.local():
832 return False
871 return False
833 if not self.publishing():
872 if not self.publishing():
834 return True
873 return True
835 # if publishing we can't copy if there is filtered content
874 # if publishing we can't copy if there is filtered content
836 return not self.filtered('visible').changelog.filteredrevs
875 return not self.filtered('visible').changelog.filteredrevs
837
876
838 def shared(self):
877 def shared(self):
839 '''the type of shared repository (None if not shared)'''
878 '''the type of shared repository (None if not shared)'''
840 if self.sharedpath != self.path:
879 if self.sharedpath != self.path:
841 return 'store'
880 return 'store'
842 return None
881 return None
843
882
844 def join(self, f, *insidef):
883 def join(self, f, *insidef):
845 return self.vfs.join(os.path.join(f, *insidef))
884 return self.vfs.join(os.path.join(f, *insidef))
846
885
847 def wjoin(self, f, *insidef):
886 def wjoin(self, f, *insidef):
848 return self.vfs.reljoin(self.root, f, *insidef)
887 return self.vfs.reljoin(self.root, f, *insidef)
849
888
850 def file(self, f):
889 def file(self, f):
851 if f[0] == '/':
890 if f[0] == '/':
852 f = f[1:]
891 f = f[1:]
853 return filelog.filelog(self.svfs, f)
892 return filelog.filelog(self.svfs, f)
854
893
855 def parents(self, changeid=None):
894 def parents(self, changeid=None):
856 '''get list of changectxs for parents of changeid'''
895 '''get list of changectxs for parents of changeid'''
857 msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
896 msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid
858 self.ui.deprecwarn(msg, '3.7')
897 self.ui.deprecwarn(msg, '3.7')
859 return self[changeid].parents()
898 return self[changeid].parents()
860
899
861 def changectx(self, changeid):
900 def changectx(self, changeid):
862 return self[changeid]
901 return self[changeid]
863
902
864 def setparents(self, p1, p2=nullid):
903 def setparents(self, p1, p2=nullid):
865 self.dirstate.beginparentchange()
904 self.dirstate.beginparentchange()
866 copies = self.dirstate.setparents(p1, p2)
905 copies = self.dirstate.setparents(p1, p2)
867 pctx = self[p1]
906 pctx = self[p1]
868 if copies:
907 if copies:
869 # Adjust copy records, the dirstate cannot do it, it
908 # Adjust copy records, the dirstate cannot do it, it
870 # requires access to parents manifests. Preserve them
909 # requires access to parents manifests. Preserve them
871 # only for entries added to first parent.
910 # only for entries added to first parent.
872 for f in copies:
911 for f in copies:
873 if f not in pctx and copies[f] in pctx:
912 if f not in pctx and copies[f] in pctx:
874 self.dirstate.copy(copies[f], f)
913 self.dirstate.copy(copies[f], f)
875 if p2 == nullid:
914 if p2 == nullid:
876 for f, s in sorted(self.dirstate.copies().items()):
915 for f, s in sorted(self.dirstate.copies().items()):
877 if f not in pctx and s not in pctx:
916 if f not in pctx and s not in pctx:
878 self.dirstate.copy(None, f)
917 self.dirstate.copy(None, f)
879 self.dirstate.endparentchange()
918 self.dirstate.endparentchange()
880
919
881 def filectx(self, path, changeid=None, fileid=None):
920 def filectx(self, path, changeid=None, fileid=None):
882 """changeid can be a changeset revision, node, or tag.
921 """changeid can be a changeset revision, node, or tag.
883 fileid can be a file revision or node."""
922 fileid can be a file revision or node."""
884 return context.filectx(self, path, changeid, fileid)
923 return context.filectx(self, path, changeid, fileid)
885
924
886 def getcwd(self):
925 def getcwd(self):
887 return self.dirstate.getcwd()
926 return self.dirstate.getcwd()
888
927
889 def pathto(self, f, cwd=None):
928 def pathto(self, f, cwd=None):
890 return self.dirstate.pathto(f, cwd)
929 return self.dirstate.pathto(f, cwd)
891
930
892 def wfile(self, f, mode='r'):
931 def wfile(self, f, mode='r'):
893 return self.wvfs(f, mode)
932 return self.wvfs(f, mode)
894
933
895 def _link(self, f):
934 def _link(self, f):
896 return self.wvfs.islink(f)
935 return self.wvfs.islink(f)
897
936
898 def _loadfilter(self, filter):
937 def _loadfilter(self, filter):
899 if filter not in self.filterpats:
938 if filter not in self.filterpats:
900 l = []
939 l = []
901 for pat, cmd in self.ui.configitems(filter):
940 for pat, cmd in self.ui.configitems(filter):
902 if cmd == '!':
941 if cmd == '!':
903 continue
942 continue
904 mf = matchmod.match(self.root, '', [pat])
943 mf = matchmod.match(self.root, '', [pat])
905 fn = None
944 fn = None
906 params = cmd
945 params = cmd
907 for name, filterfn in self._datafilters.iteritems():
946 for name, filterfn in self._datafilters.iteritems():
908 if cmd.startswith(name):
947 if cmd.startswith(name):
909 fn = filterfn
948 fn = filterfn
910 params = cmd[len(name):].lstrip()
949 params = cmd[len(name):].lstrip()
911 break
950 break
912 if not fn:
951 if not fn:
913 fn = lambda s, c, **kwargs: util.filter(s, c)
952 fn = lambda s, c, **kwargs: util.filter(s, c)
914 # Wrap old filters not supporting keyword arguments
953 # Wrap old filters not supporting keyword arguments
915 if not inspect.getargspec(fn)[2]:
954 if not inspect.getargspec(fn)[2]:
916 oldfn = fn
955 oldfn = fn
917 fn = lambda s, c, **kwargs: oldfn(s, c)
956 fn = lambda s, c, **kwargs: oldfn(s, c)
918 l.append((mf, fn, params))
957 l.append((mf, fn, params))
919 self.filterpats[filter] = l
958 self.filterpats[filter] = l
920 return self.filterpats[filter]
959 return self.filterpats[filter]
921
960
922 def _filter(self, filterpats, filename, data):
961 def _filter(self, filterpats, filename, data):
923 for mf, fn, cmd in filterpats:
962 for mf, fn, cmd in filterpats:
924 if mf(filename):
963 if mf(filename):
925 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
964 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
926 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
965 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
927 break
966 break
928
967
929 return data
968 return data
930
969
931 @unfilteredpropertycache
970 @unfilteredpropertycache
932 def _encodefilterpats(self):
971 def _encodefilterpats(self):
933 return self._loadfilter('encode')
972 return self._loadfilter('encode')
934
973
935 @unfilteredpropertycache
974 @unfilteredpropertycache
936 def _decodefilterpats(self):
975 def _decodefilterpats(self):
937 return self._loadfilter('decode')
976 return self._loadfilter('decode')
938
977
939 def adddatafilter(self, name, filter):
978 def adddatafilter(self, name, filter):
940 self._datafilters[name] = filter
979 self._datafilters[name] = filter
941
980
942 def wread(self, filename):
981 def wread(self, filename):
943 if self._link(filename):
982 if self._link(filename):
944 data = self.wvfs.readlink(filename)
983 data = self.wvfs.readlink(filename)
945 else:
984 else:
946 data = self.wvfs.read(filename)
985 data = self.wvfs.read(filename)
947 return self._filter(self._encodefilterpats, filename, data)
986 return self._filter(self._encodefilterpats, filename, data)
948
987
949 def wwrite(self, filename, data, flags):
988 def wwrite(self, filename, data, flags):
950 """write ``data`` into ``filename`` in the working directory
989 """write ``data`` into ``filename`` in the working directory
951
990
952 This returns length of written (maybe decoded) data.
991 This returns length of written (maybe decoded) data.
953 """
992 """
954 data = self._filter(self._decodefilterpats, filename, data)
993 data = self._filter(self._decodefilterpats, filename, data)
955 if 'l' in flags:
994 if 'l' in flags:
956 self.wvfs.symlink(data, filename)
995 self.wvfs.symlink(data, filename)
957 else:
996 else:
958 self.wvfs.write(filename, data)
997 self.wvfs.write(filename, data)
959 if 'x' in flags:
998 if 'x' in flags:
960 self.wvfs.setflags(filename, False, True)
999 self.wvfs.setflags(filename, False, True)
961 return len(data)
1000 return len(data)
962
1001
963 def wwritedata(self, filename, data):
1002 def wwritedata(self, filename, data):
964 return self._filter(self._decodefilterpats, filename, data)
1003 return self._filter(self._decodefilterpats, filename, data)
965
1004
966 def currenttransaction(self):
1005 def currenttransaction(self):
967 """return the current transaction or None if non exists"""
1006 """return the current transaction or None if non exists"""
968 if self._transref:
1007 if self._transref:
969 tr = self._transref()
1008 tr = self._transref()
970 else:
1009 else:
971 tr = None
1010 tr = None
972
1011
973 if tr and tr.running():
1012 if tr and tr.running():
974 return tr
1013 return tr
975 return None
1014 return None
976
1015
977 def transaction(self, desc, report=None):
1016 def transaction(self, desc, report=None):
978 if (self.ui.configbool('devel', 'all-warnings')
1017 if (self.ui.configbool('devel', 'all-warnings')
979 or self.ui.configbool('devel', 'check-locks')):
1018 or self.ui.configbool('devel', 'check-locks')):
980 l = self._lockref and self._lockref()
1019 l = self._lockref and self._lockref()
981 if l is None or not l.held:
1020 if l is None or not l.held:
982 self.ui.develwarn('transaction with no lock')
1021 self.ui.develwarn('transaction with no lock')
983 tr = self.currenttransaction()
1022 tr = self.currenttransaction()
984 if tr is not None:
1023 if tr is not None:
985 return tr.nest()
1024 return tr.nest()
986
1025
987 # abort here if the journal already exists
1026 # abort here if the journal already exists
988 if self.svfs.exists("journal"):
1027 if self.svfs.exists("journal"):
989 raise error.RepoError(
1028 raise error.RepoError(
990 _("abandoned transaction found"),
1029 _("abandoned transaction found"),
991 hint=_("run 'hg recover' to clean up transaction"))
1030 hint=_("run 'hg recover' to clean up transaction"))
992
1031
993 # make journal.dirstate contain in-memory changes at this point
1032 # make journal.dirstate contain in-memory changes at this point
994 self.dirstate.write(None)
1033 self.dirstate.write(None)
995
1034
996 idbase = "%.40f#%f" % (random.random(), time.time())
1035 idbase = "%.40f#%f" % (random.random(), time.time())
997 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
1036 txnid = 'TXN:' + util.sha1(idbase).hexdigest()
998 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1037 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
999
1038
1000 self._writejournal(desc)
1039 self._writejournal(desc)
1001 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1040 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1002 if report:
1041 if report:
1003 rp = report
1042 rp = report
1004 else:
1043 else:
1005 rp = self.ui.warn
1044 rp = self.ui.warn
1006 vfsmap = {'plain': self.vfs} # root of .hg/
1045 vfsmap = {'plain': self.vfs} # root of .hg/
1007 # we must avoid cyclic reference between repo and transaction.
1046 # we must avoid cyclic reference between repo and transaction.
1008 reporef = weakref.ref(self)
1047 reporef = weakref.ref(self)
1009 def validate(tr):
1048 def validate(tr):
1010 """will run pre-closing hooks"""
1049 """will run pre-closing hooks"""
1011 reporef().hook('pretxnclose', throw=True,
1050 reporef().hook('pretxnclose', throw=True,
1012 txnname=desc, **tr.hookargs)
1051 txnname=desc, **tr.hookargs)
1013 def releasefn(tr, success):
1052 def releasefn(tr, success):
1014 repo = reporef()
1053 repo = reporef()
1015 if success:
1054 if success:
1016 # this should be explicitly invoked here, because
1055 # this should be explicitly invoked here, because
1017 # in-memory changes aren't written out at closing
1056 # in-memory changes aren't written out at closing
1018 # transaction, if tr.addfilegenerator (via
1057 # transaction, if tr.addfilegenerator (via
1019 # dirstate.write or so) isn't invoked while
1058 # dirstate.write or so) isn't invoked while
1020 # transaction running
1059 # transaction running
1021 repo.dirstate.write(None)
1060 repo.dirstate.write(None)
1022 else:
1061 else:
1023 # prevent in-memory changes from being written out at
1062 # prevent in-memory changes from being written out at
1024 # the end of outer wlock scope or so
1063 # the end of outer wlock scope or so
1025 repo.dirstate.invalidate()
1064 repo.dirstate.invalidate()
1026
1065
1027 # discard all changes (including ones already written
1066 # discard all changes (including ones already written
1028 # out) in this transaction
1067 # out) in this transaction
1029 repo.vfs.rename('journal.dirstate', 'dirstate')
1068 repo.vfs.rename('journal.dirstate', 'dirstate')
1030
1069
1031 repo.invalidate(clearfilecache=True)
1070 repo.invalidate(clearfilecache=True)
1032
1071
1033 tr = transaction.transaction(rp, self.svfs, vfsmap,
1072 tr = transaction.transaction(rp, self.svfs, vfsmap,
1034 "journal",
1073 "journal",
1035 "undo",
1074 "undo",
1036 aftertrans(renames),
1075 aftertrans(renames),
1037 self.store.createmode,
1076 self.store.createmode,
1038 validator=validate,
1077 validator=validate,
1039 releasefn=releasefn)
1078 releasefn=releasefn)
1040
1079
1041 tr.hookargs['txnid'] = txnid
1080 tr.hookargs['txnid'] = txnid
1042 # note: writing the fncache only during finalize mean that the file is
1081 # note: writing the fncache only during finalize mean that the file is
1043 # outdated when running hooks. As fncache is used for streaming clone,
1082 # outdated when running hooks. As fncache is used for streaming clone,
1044 # this is not expected to break anything that happen during the hooks.
1083 # this is not expected to break anything that happen during the hooks.
1045 tr.addfinalize('flush-fncache', self.store.write)
1084 tr.addfinalize('flush-fncache', self.store.write)
1046 def txnclosehook(tr2):
1085 def txnclosehook(tr2):
1047 """To be run if transaction is successful, will schedule a hook run
1086 """To be run if transaction is successful, will schedule a hook run
1048 """
1087 """
1049 def hook():
1088 def hook():
1050 reporef().hook('txnclose', throw=False, txnname=desc,
1089 reporef().hook('txnclose', throw=False, txnname=desc,
1051 **tr2.hookargs)
1090 **tr2.hookargs)
1052 reporef()._afterlock(hook)
1091 reporef()._afterlock(hook)
1053 tr.addfinalize('txnclose-hook', txnclosehook)
1092 tr.addfinalize('txnclose-hook', txnclosehook)
1054 def txnaborthook(tr2):
1093 def txnaborthook(tr2):
1055 """To be run if transaction is aborted
1094 """To be run if transaction is aborted
1056 """
1095 """
1057 reporef().hook('txnabort', throw=False, txnname=desc,
1096 reporef().hook('txnabort', throw=False, txnname=desc,
1058 **tr2.hookargs)
1097 **tr2.hookargs)
1059 tr.addabort('txnabort-hook', txnaborthook)
1098 tr.addabort('txnabort-hook', txnaborthook)
1060 # avoid eager cache invalidation. in-memory data should be identical
1099 # avoid eager cache invalidation. in-memory data should be identical
1061 # to stored data if transaction has no error.
1100 # to stored data if transaction has no error.
1062 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1101 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1063 self._transref = weakref.ref(tr)
1102 self._transref = weakref.ref(tr)
1064 return tr
1103 return tr
1065
1104
1066 def _journalfiles(self):
1105 def _journalfiles(self):
1067 return ((self.svfs, 'journal'),
1106 return ((self.svfs, 'journal'),
1068 (self.vfs, 'journal.dirstate'),
1107 (self.vfs, 'journal.dirstate'),
1069 (self.vfs, 'journal.branch'),
1108 (self.vfs, 'journal.branch'),
1070 (self.vfs, 'journal.desc'),
1109 (self.vfs, 'journal.desc'),
1071 (self.vfs, 'journal.bookmarks'),
1110 (self.vfs, 'journal.bookmarks'),
1072 (self.svfs, 'journal.phaseroots'))
1111 (self.svfs, 'journal.phaseroots'))
1073
1112
1074 def undofiles(self):
1113 def undofiles(self):
1075 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1114 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1076
1115
1077 def _writejournal(self, desc):
1116 def _writejournal(self, desc):
1078 self.vfs.write("journal.dirstate",
1117 self.vfs.write("journal.dirstate",
1079 self.vfs.tryread("dirstate"))
1118 self.vfs.tryread("dirstate"))
1080 self.vfs.write("journal.branch",
1119 self.vfs.write("journal.branch",
1081 encoding.fromlocal(self.dirstate.branch()))
1120 encoding.fromlocal(self.dirstate.branch()))
1082 self.vfs.write("journal.desc",
1121 self.vfs.write("journal.desc",
1083 "%d\n%s\n" % (len(self), desc))
1122 "%d\n%s\n" % (len(self), desc))
1084 self.vfs.write("journal.bookmarks",
1123 self.vfs.write("journal.bookmarks",
1085 self.vfs.tryread("bookmarks"))
1124 self.vfs.tryread("bookmarks"))
1086 self.svfs.write("journal.phaseroots",
1125 self.svfs.write("journal.phaseroots",
1087 self.svfs.tryread("phaseroots"))
1126 self.svfs.tryread("phaseroots"))
1088
1127
1089 def recover(self):
1128 def recover(self):
1090 lock = self.lock()
1129 lock = self.lock()
1091 try:
1130 try:
1092 if self.svfs.exists("journal"):
1131 if self.svfs.exists("journal"):
1093 self.ui.status(_("rolling back interrupted transaction\n"))
1132 self.ui.status(_("rolling back interrupted transaction\n"))
1094 vfsmap = {'': self.svfs,
1133 vfsmap = {'': self.svfs,
1095 'plain': self.vfs,}
1134 'plain': self.vfs,}
1096 transaction.rollback(self.svfs, vfsmap, "journal",
1135 transaction.rollback(self.svfs, vfsmap, "journal",
1097 self.ui.warn)
1136 self.ui.warn)
1098 self.invalidate()
1137 self.invalidate()
1099 return True
1138 return True
1100 else:
1139 else:
1101 self.ui.warn(_("no interrupted transaction available\n"))
1140 self.ui.warn(_("no interrupted transaction available\n"))
1102 return False
1141 return False
1103 finally:
1142 finally:
1104 lock.release()
1143 lock.release()
1105
1144
1106 def rollback(self, dryrun=False, force=False):
1145 def rollback(self, dryrun=False, force=False):
1107 wlock = lock = dsguard = None
1146 wlock = lock = dsguard = None
1108 try:
1147 try:
1109 wlock = self.wlock()
1148 wlock = self.wlock()
1110 lock = self.lock()
1149 lock = self.lock()
1111 if self.svfs.exists("undo"):
1150 if self.svfs.exists("undo"):
1112 dsguard = cmdutil.dirstateguard(self, 'rollback')
1151 dsguard = cmdutil.dirstateguard(self, 'rollback')
1113
1152
1114 return self._rollback(dryrun, force, dsguard)
1153 return self._rollback(dryrun, force, dsguard)
1115 else:
1154 else:
1116 self.ui.warn(_("no rollback information available\n"))
1155 self.ui.warn(_("no rollback information available\n"))
1117 return 1
1156 return 1
1118 finally:
1157 finally:
1119 release(dsguard, lock, wlock)
1158 release(dsguard, lock, wlock)
1120
1159
1121 @unfilteredmethod # Until we get smarter cache management
1160 @unfilteredmethod # Until we get smarter cache management
1122 def _rollback(self, dryrun, force, dsguard):
1161 def _rollback(self, dryrun, force, dsguard):
1123 ui = self.ui
1162 ui = self.ui
1124 try:
1163 try:
1125 args = self.vfs.read('undo.desc').splitlines()
1164 args = self.vfs.read('undo.desc').splitlines()
1126 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1165 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1127 if len(args) >= 3:
1166 if len(args) >= 3:
1128 detail = args[2]
1167 detail = args[2]
1129 oldtip = oldlen - 1
1168 oldtip = oldlen - 1
1130
1169
1131 if detail and ui.verbose:
1170 if detail and ui.verbose:
1132 msg = (_('repository tip rolled back to revision %s'
1171 msg = (_('repository tip rolled back to revision %s'
1133 ' (undo %s: %s)\n')
1172 ' (undo %s: %s)\n')
1134 % (oldtip, desc, detail))
1173 % (oldtip, desc, detail))
1135 else:
1174 else:
1136 msg = (_('repository tip rolled back to revision %s'
1175 msg = (_('repository tip rolled back to revision %s'
1137 ' (undo %s)\n')
1176 ' (undo %s)\n')
1138 % (oldtip, desc))
1177 % (oldtip, desc))
1139 except IOError:
1178 except IOError:
1140 msg = _('rolling back unknown transaction\n')
1179 msg = _('rolling back unknown transaction\n')
1141 desc = None
1180 desc = None
1142
1181
1143 if not force and self['.'] != self['tip'] and desc == 'commit':
1182 if not force and self['.'] != self['tip'] and desc == 'commit':
1144 raise error.Abort(
1183 raise error.Abort(
1145 _('rollback of last commit while not checked out '
1184 _('rollback of last commit while not checked out '
1146 'may lose data'), hint=_('use -f to force'))
1185 'may lose data'), hint=_('use -f to force'))
1147
1186
1148 ui.status(msg)
1187 ui.status(msg)
1149 if dryrun:
1188 if dryrun:
1150 return 0
1189 return 0
1151
1190
1152 parents = self.dirstate.parents()
1191 parents = self.dirstate.parents()
1153 self.destroying()
1192 self.destroying()
1154 vfsmap = {'plain': self.vfs, '': self.svfs}
1193 vfsmap = {'plain': self.vfs, '': self.svfs}
1155 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1194 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn)
1156 if self.vfs.exists('undo.bookmarks'):
1195 if self.vfs.exists('undo.bookmarks'):
1157 self.vfs.rename('undo.bookmarks', 'bookmarks')
1196 self.vfs.rename('undo.bookmarks', 'bookmarks')
1158 if self.svfs.exists('undo.phaseroots'):
1197 if self.svfs.exists('undo.phaseroots'):
1159 self.svfs.rename('undo.phaseroots', 'phaseroots')
1198 self.svfs.rename('undo.phaseroots', 'phaseroots')
1160 self.invalidate()
1199 self.invalidate()
1161
1200
1162 parentgone = (parents[0] not in self.changelog.nodemap or
1201 parentgone = (parents[0] not in self.changelog.nodemap or
1163 parents[1] not in self.changelog.nodemap)
1202 parents[1] not in self.changelog.nodemap)
1164 if parentgone:
1203 if parentgone:
1165 # prevent dirstateguard from overwriting already restored one
1204 # prevent dirstateguard from overwriting already restored one
1166 dsguard.close()
1205 dsguard.close()
1167
1206
1168 self.vfs.rename('undo.dirstate', 'dirstate')
1207 self.vfs.rename('undo.dirstate', 'dirstate')
1169 try:
1208 try:
1170 branch = self.vfs.read('undo.branch')
1209 branch = self.vfs.read('undo.branch')
1171 self.dirstate.setbranch(encoding.tolocal(branch))
1210 self.dirstate.setbranch(encoding.tolocal(branch))
1172 except IOError:
1211 except IOError:
1173 ui.warn(_('named branch could not be reset: '
1212 ui.warn(_('named branch could not be reset: '
1174 'current branch is still \'%s\'\n')
1213 'current branch is still \'%s\'\n')
1175 % self.dirstate.branch())
1214 % self.dirstate.branch())
1176
1215
1177 self.dirstate.invalidate()
1216 self.dirstate.invalidate()
1178 parents = tuple([p.rev() for p in self[None].parents()])
1217 parents = tuple([p.rev() for p in self[None].parents()])
1179 if len(parents) > 1:
1218 if len(parents) > 1:
1180 ui.status(_('working directory now based on '
1219 ui.status(_('working directory now based on '
1181 'revisions %d and %d\n') % parents)
1220 'revisions %d and %d\n') % parents)
1182 else:
1221 else:
1183 ui.status(_('working directory now based on '
1222 ui.status(_('working directory now based on '
1184 'revision %d\n') % parents)
1223 'revision %d\n') % parents)
1185 mergemod.mergestate.clean(self, self['.'].node())
1224 mergemod.mergestate.clean(self, self['.'].node())
1186
1225
1187 # TODO: if we know which new heads may result from this rollback, pass
1226 # TODO: if we know which new heads may result from this rollback, pass
1188 # them to destroy(), which will prevent the branchhead cache from being
1227 # them to destroy(), which will prevent the branchhead cache from being
1189 # invalidated.
1228 # invalidated.
1190 self.destroyed()
1229 self.destroyed()
1191 return 0
1230 return 0
1192
1231
1193 def invalidatecaches(self):
1232 def invalidatecaches(self):
1194
1233
1195 if '_tagscache' in vars(self):
1234 if '_tagscache' in vars(self):
1196 # can't use delattr on proxy
1235 # can't use delattr on proxy
1197 del self.__dict__['_tagscache']
1236 del self.__dict__['_tagscache']
1198
1237
1199 self.unfiltered()._branchcaches.clear()
1238 self.unfiltered()._branchcaches.clear()
1200 self.invalidatevolatilesets()
1239 self.invalidatevolatilesets()
1201
1240
1202 def invalidatevolatilesets(self):
1241 def invalidatevolatilesets(self):
1203 self.filteredrevcache.clear()
1242 self.filteredrevcache.clear()
1204 obsolete.clearobscaches(self)
1243 obsolete.clearobscaches(self)
1205
1244
1206 def invalidatedirstate(self):
1245 def invalidatedirstate(self):
1207 '''Invalidates the dirstate, causing the next call to dirstate
1246 '''Invalidates the dirstate, causing the next call to dirstate
1208 to check if it was modified since the last time it was read,
1247 to check if it was modified since the last time it was read,
1209 rereading it if it has.
1248 rereading it if it has.
1210
1249
1211 This is different to dirstate.invalidate() that it doesn't always
1250 This is different to dirstate.invalidate() that it doesn't always
1212 rereads the dirstate. Use dirstate.invalidate() if you want to
1251 rereads the dirstate. Use dirstate.invalidate() if you want to
1213 explicitly read the dirstate again (i.e. restoring it to a previous
1252 explicitly read the dirstate again (i.e. restoring it to a previous
1214 known good state).'''
1253 known good state).'''
1215 if hasunfilteredcache(self, 'dirstate'):
1254 if hasunfilteredcache(self, 'dirstate'):
1216 for k in self.dirstate._filecache:
1255 for k in self.dirstate._filecache:
1217 try:
1256 try:
1218 delattr(self.dirstate, k)
1257 delattr(self.dirstate, k)
1219 except AttributeError:
1258 except AttributeError:
1220 pass
1259 pass
1221 delattr(self.unfiltered(), 'dirstate')
1260 delattr(self.unfiltered(), 'dirstate')
1222
1261
1223 def invalidate(self, clearfilecache=False):
1262 def invalidate(self, clearfilecache=False):
1224 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1263 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1225 for k in self._filecache.keys():
1264 for k in self._filecache.keys():
1226 # dirstate is invalidated separately in invalidatedirstate()
1265 # dirstate is invalidated separately in invalidatedirstate()
1227 if k == 'dirstate':
1266 if k == 'dirstate':
1228 continue
1267 continue
1229
1268
1230 if clearfilecache:
1269 if clearfilecache:
1231 del self._filecache[k]
1270 del self._filecache[k]
1232 try:
1271 try:
1233 delattr(unfiltered, k)
1272 delattr(unfiltered, k)
1234 except AttributeError:
1273 except AttributeError:
1235 pass
1274 pass
1236 self.invalidatecaches()
1275 self.invalidatecaches()
1237 self.store.invalidatecaches()
1276 self.store.invalidatecaches()
1238
1277
1239 def invalidateall(self):
1278 def invalidateall(self):
1240 '''Fully invalidates both store and non-store parts, causing the
1279 '''Fully invalidates both store and non-store parts, causing the
1241 subsequent operation to reread any outside changes.'''
1280 subsequent operation to reread any outside changes.'''
1242 # extension should hook this to invalidate its caches
1281 # extension should hook this to invalidate its caches
1243 self.invalidate()
1282 self.invalidate()
1244 self.invalidatedirstate()
1283 self.invalidatedirstate()
1245
1284
1246 def _refreshfilecachestats(self, tr):
1285 def _refreshfilecachestats(self, tr):
1247 """Reload stats of cached files so that they are flagged as valid"""
1286 """Reload stats of cached files so that they are flagged as valid"""
1248 for k, ce in self._filecache.items():
1287 for k, ce in self._filecache.items():
1249 if k == 'dirstate' or k not in self.__dict__:
1288 if k == 'dirstate' or k not in self.__dict__:
1250 continue
1289 continue
1251 ce.refresh()
1290 ce.refresh()
1252
1291
1253 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1292 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1254 inheritchecker=None, parentenvvar=None):
1293 inheritchecker=None, parentenvvar=None):
1255 parentlock = None
1294 parentlock = None
1256 # the contents of parentenvvar are used by the underlying lock to
1295 # the contents of parentenvvar are used by the underlying lock to
1257 # determine whether it can be inherited
1296 # determine whether it can be inherited
1258 if parentenvvar is not None:
1297 if parentenvvar is not None:
1259 parentlock = os.environ.get(parentenvvar)
1298 parentlock = os.environ.get(parentenvvar)
1260 try:
1299 try:
1261 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1300 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1262 acquirefn=acquirefn, desc=desc,
1301 acquirefn=acquirefn, desc=desc,
1263 inheritchecker=inheritchecker,
1302 inheritchecker=inheritchecker,
1264 parentlock=parentlock)
1303 parentlock=parentlock)
1265 except error.LockHeld as inst:
1304 except error.LockHeld as inst:
1266 if not wait:
1305 if not wait:
1267 raise
1306 raise
1268 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1307 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1269 (desc, inst.locker))
1308 (desc, inst.locker))
1270 # default to 600 seconds timeout
1309 # default to 600 seconds timeout
1271 l = lockmod.lock(vfs, lockname,
1310 l = lockmod.lock(vfs, lockname,
1272 int(self.ui.config("ui", "timeout", "600")),
1311 int(self.ui.config("ui", "timeout", "600")),
1273 releasefn=releasefn, acquirefn=acquirefn,
1312 releasefn=releasefn, acquirefn=acquirefn,
1274 desc=desc)
1313 desc=desc)
1275 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1314 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1276 return l
1315 return l
1277
1316
1278 def _afterlock(self, callback):
1317 def _afterlock(self, callback):
1279 """add a callback to be run when the repository is fully unlocked
1318 """add a callback to be run when the repository is fully unlocked
1280
1319
1281 The callback will be executed when the outermost lock is released
1320 The callback will be executed when the outermost lock is released
1282 (with wlock being higher level than 'lock')."""
1321 (with wlock being higher level than 'lock')."""
1283 for ref in (self._wlockref, self._lockref):
1322 for ref in (self._wlockref, self._lockref):
1284 l = ref and ref()
1323 l = ref and ref()
1285 if l and l.held:
1324 if l and l.held:
1286 l.postrelease.append(callback)
1325 l.postrelease.append(callback)
1287 break
1326 break
1288 else: # no lock have been found.
1327 else: # no lock have been found.
1289 callback()
1328 callback()
1290
1329
1291 def lock(self, wait=True):
1330 def lock(self, wait=True):
1292 '''Lock the repository store (.hg/store) and return a weak reference
1331 '''Lock the repository store (.hg/store) and return a weak reference
1293 to the lock. Use this before modifying the store (e.g. committing or
1332 to the lock. Use this before modifying the store (e.g. committing or
1294 stripping). If you are opening a transaction, get a lock as well.)
1333 stripping). If you are opening a transaction, get a lock as well.)
1295
1334
1296 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1335 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1297 'wlock' first to avoid a dead-lock hazard.'''
1336 'wlock' first to avoid a dead-lock hazard.'''
1298 l = self._lockref and self._lockref()
1337 l = self._lockref and self._lockref()
1299 if l is not None and l.held:
1338 if l is not None and l.held:
1300 l.lock()
1339 l.lock()
1301 return l
1340 return l
1302
1341
1303 l = self._lock(self.svfs, "lock", wait, None,
1342 l = self._lock(self.svfs, "lock", wait, None,
1304 self.invalidate, _('repository %s') % self.origroot)
1343 self.invalidate, _('repository %s') % self.origroot)
1305 self._lockref = weakref.ref(l)
1344 self._lockref = weakref.ref(l)
1306 return l
1345 return l
1307
1346
1308 def _wlockchecktransaction(self):
1347 def _wlockchecktransaction(self):
1309 if self.currenttransaction() is not None:
1348 if self.currenttransaction() is not None:
1310 raise error.LockInheritanceContractViolation(
1349 raise error.LockInheritanceContractViolation(
1311 'wlock cannot be inherited in the middle of a transaction')
1350 'wlock cannot be inherited in the middle of a transaction')
1312
1351
1313 def wlock(self, wait=True):
1352 def wlock(self, wait=True):
1314 '''Lock the non-store parts of the repository (everything under
1353 '''Lock the non-store parts of the repository (everything under
1315 .hg except .hg/store) and return a weak reference to the lock.
1354 .hg except .hg/store) and return a weak reference to the lock.
1316
1355
1317 Use this before modifying files in .hg.
1356 Use this before modifying files in .hg.
1318
1357
1319 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1358 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1320 'wlock' first to avoid a dead-lock hazard.'''
1359 'wlock' first to avoid a dead-lock hazard.'''
1321 l = self._wlockref and self._wlockref()
1360 l = self._wlockref and self._wlockref()
1322 if l is not None and l.held:
1361 if l is not None and l.held:
1323 l.lock()
1362 l.lock()
1324 return l
1363 return l
1325
1364
1326 # We do not need to check for non-waiting lock acquisition. Such
1365 # We do not need to check for non-waiting lock acquisition. Such
1327 # acquisition would not cause dead-lock as they would just fail.
1366 # acquisition would not cause dead-lock as they would just fail.
1328 if wait and (self.ui.configbool('devel', 'all-warnings')
1367 if wait and (self.ui.configbool('devel', 'all-warnings')
1329 or self.ui.configbool('devel', 'check-locks')):
1368 or self.ui.configbool('devel', 'check-locks')):
1330 l = self._lockref and self._lockref()
1369 l = self._lockref and self._lockref()
1331 if l is not None and l.held:
1370 if l is not None and l.held:
1332 self.ui.develwarn('"wlock" acquired after "lock"')
1371 self.ui.develwarn('"wlock" acquired after "lock"')
1333
1372
1334 def unlock():
1373 def unlock():
1335 if self.dirstate.pendingparentchange():
1374 if self.dirstate.pendingparentchange():
1336 self.dirstate.invalidate()
1375 self.dirstate.invalidate()
1337 else:
1376 else:
1338 self.dirstate.write(None)
1377 self.dirstate.write(None)
1339
1378
1340 self._filecache['dirstate'].refresh()
1379 self._filecache['dirstate'].refresh()
1341
1380
1342 l = self._lock(self.vfs, "wlock", wait, unlock,
1381 l = self._lock(self.vfs, "wlock", wait, unlock,
1343 self.invalidatedirstate, _('working directory of %s') %
1382 self.invalidatedirstate, _('working directory of %s') %
1344 self.origroot,
1383 self.origroot,
1345 inheritchecker=self._wlockchecktransaction,
1384 inheritchecker=self._wlockchecktransaction,
1346 parentenvvar='HG_WLOCK_LOCKER')
1385 parentenvvar='HG_WLOCK_LOCKER')
1347 self._wlockref = weakref.ref(l)
1386 self._wlockref = weakref.ref(l)
1348 return l
1387 return l
1349
1388
1350 def _currentlock(self, lockref):
1389 def _currentlock(self, lockref):
1351 """Returns the lock if it's held, or None if it's not."""
1390 """Returns the lock if it's held, or None if it's not."""
1352 if lockref is None:
1391 if lockref is None:
1353 return None
1392 return None
1354 l = lockref()
1393 l = lockref()
1355 if l is None or not l.held:
1394 if l is None or not l.held:
1356 return None
1395 return None
1357 return l
1396 return l
1358
1397
1359 def currentwlock(self):
1398 def currentwlock(self):
1360 """Returns the wlock if it's held, or None if it's not."""
1399 """Returns the wlock if it's held, or None if it's not."""
1361 return self._currentlock(self._wlockref)
1400 return self._currentlock(self._wlockref)
1362
1401
1363 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1402 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1364 """
1403 """
1365 commit an individual file as part of a larger transaction
1404 commit an individual file as part of a larger transaction
1366 """
1405 """
1367
1406
1368 fname = fctx.path()
1407 fname = fctx.path()
1369 fparent1 = manifest1.get(fname, nullid)
1408 fparent1 = manifest1.get(fname, nullid)
1370 fparent2 = manifest2.get(fname, nullid)
1409 fparent2 = manifest2.get(fname, nullid)
1371 if isinstance(fctx, context.filectx):
1410 if isinstance(fctx, context.filectx):
1372 node = fctx.filenode()
1411 node = fctx.filenode()
1373 if node in [fparent1, fparent2]:
1412 if node in [fparent1, fparent2]:
1374 self.ui.debug('reusing %s filelog entry\n' % fname)
1413 self.ui.debug('reusing %s filelog entry\n' % fname)
1375 return node
1414 return node
1376
1415
1377 flog = self.file(fname)
1416 flog = self.file(fname)
1378 meta = {}
1417 meta = {}
1379 copy = fctx.renamed()
1418 copy = fctx.renamed()
1380 if copy and copy[0] != fname:
1419 if copy and copy[0] != fname:
1381 # Mark the new revision of this file as a copy of another
1420 # Mark the new revision of this file as a copy of another
1382 # file. This copy data will effectively act as a parent
1421 # file. This copy data will effectively act as a parent
1383 # of this new revision. If this is a merge, the first
1422 # of this new revision. If this is a merge, the first
1384 # parent will be the nullid (meaning "look up the copy data")
1423 # parent will be the nullid (meaning "look up the copy data")
1385 # and the second one will be the other parent. For example:
1424 # and the second one will be the other parent. For example:
1386 #
1425 #
1387 # 0 --- 1 --- 3 rev1 changes file foo
1426 # 0 --- 1 --- 3 rev1 changes file foo
1388 # \ / rev2 renames foo to bar and changes it
1427 # \ / rev2 renames foo to bar and changes it
1389 # \- 2 -/ rev3 should have bar with all changes and
1428 # \- 2 -/ rev3 should have bar with all changes and
1390 # should record that bar descends from
1429 # should record that bar descends from
1391 # bar in rev2 and foo in rev1
1430 # bar in rev2 and foo in rev1
1392 #
1431 #
1393 # this allows this merge to succeed:
1432 # this allows this merge to succeed:
1394 #
1433 #
1395 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1434 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1396 # \ / merging rev3 and rev4 should use bar@rev2
1435 # \ / merging rev3 and rev4 should use bar@rev2
1397 # \- 2 --- 4 as the merge base
1436 # \- 2 --- 4 as the merge base
1398 #
1437 #
1399
1438
1400 cfname = copy[0]
1439 cfname = copy[0]
1401 crev = manifest1.get(cfname)
1440 crev = manifest1.get(cfname)
1402 newfparent = fparent2
1441 newfparent = fparent2
1403
1442
1404 if manifest2: # branch merge
1443 if manifest2: # branch merge
1405 if fparent2 == nullid or crev is None: # copied on remote side
1444 if fparent2 == nullid or crev is None: # copied on remote side
1406 if cfname in manifest2:
1445 if cfname in manifest2:
1407 crev = manifest2[cfname]
1446 crev = manifest2[cfname]
1408 newfparent = fparent1
1447 newfparent = fparent1
1409
1448
1410 # Here, we used to search backwards through history to try to find
1449 # Here, we used to search backwards through history to try to find
1411 # where the file copy came from if the source of a copy was not in
1450 # where the file copy came from if the source of a copy was not in
1412 # the parent directory. However, this doesn't actually make sense to
1451 # the parent directory. However, this doesn't actually make sense to
1413 # do (what does a copy from something not in your working copy even
1452 # do (what does a copy from something not in your working copy even
1414 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1453 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1415 # the user that copy information was dropped, so if they didn't
1454 # the user that copy information was dropped, so if they didn't
1416 # expect this outcome it can be fixed, but this is the correct
1455 # expect this outcome it can be fixed, but this is the correct
1417 # behavior in this circumstance.
1456 # behavior in this circumstance.
1418
1457
1419 if crev:
1458 if crev:
1420 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1459 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1421 meta["copy"] = cfname
1460 meta["copy"] = cfname
1422 meta["copyrev"] = hex(crev)
1461 meta["copyrev"] = hex(crev)
1423 fparent1, fparent2 = nullid, newfparent
1462 fparent1, fparent2 = nullid, newfparent
1424 else:
1463 else:
1425 self.ui.warn(_("warning: can't find ancestor for '%s' "
1464 self.ui.warn(_("warning: can't find ancestor for '%s' "
1426 "copied from '%s'!\n") % (fname, cfname))
1465 "copied from '%s'!\n") % (fname, cfname))
1427
1466
1428 elif fparent1 == nullid:
1467 elif fparent1 == nullid:
1429 fparent1, fparent2 = fparent2, nullid
1468 fparent1, fparent2 = fparent2, nullid
1430 elif fparent2 != nullid:
1469 elif fparent2 != nullid:
1431 # is one parent an ancestor of the other?
1470 # is one parent an ancestor of the other?
1432 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1471 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1433 if fparent1 in fparentancestors:
1472 if fparent1 in fparentancestors:
1434 fparent1, fparent2 = fparent2, nullid
1473 fparent1, fparent2 = fparent2, nullid
1435 elif fparent2 in fparentancestors:
1474 elif fparent2 in fparentancestors:
1436 fparent2 = nullid
1475 fparent2 = nullid
1437
1476
1438 # is the file changed?
1477 # is the file changed?
1439 text = fctx.data()
1478 text = fctx.data()
1440 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1479 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1441 changelist.append(fname)
1480 changelist.append(fname)
1442 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1481 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1443 # are just the flags changed during merge?
1482 # are just the flags changed during merge?
1444 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1483 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1445 changelist.append(fname)
1484 changelist.append(fname)
1446
1485
1447 return fparent1
1486 return fparent1
1448
1487
1449 @unfilteredmethod
1488 @unfilteredmethod
1450 def commit(self, text="", user=None, date=None, match=None, force=False,
1489 def commit(self, text="", user=None, date=None, match=None, force=False,
1451 editor=False, extra=None):
1490 editor=False, extra=None):
1452 """Add a new revision to current repository.
1491 """Add a new revision to current repository.
1453
1492
1454 Revision information is gathered from the working directory,
1493 Revision information is gathered from the working directory,
1455 match can be used to filter the committed files. If editor is
1494 match can be used to filter the committed files. If editor is
1456 supplied, it is called to get a commit message.
1495 supplied, it is called to get a commit message.
1457 """
1496 """
1458 if extra is None:
1497 if extra is None:
1459 extra = {}
1498 extra = {}
1460
1499
1461 def fail(f, msg):
1500 def fail(f, msg):
1462 raise error.Abort('%s: %s' % (f, msg))
1501 raise error.Abort('%s: %s' % (f, msg))
1463
1502
1464 if not match:
1503 if not match:
1465 match = matchmod.always(self.root, '')
1504 match = matchmod.always(self.root, '')
1466
1505
1467 if not force:
1506 if not force:
1468 vdirs = []
1507 vdirs = []
1469 match.explicitdir = vdirs.append
1508 match.explicitdir = vdirs.append
1470 match.bad = fail
1509 match.bad = fail
1471
1510
1472 wlock = lock = tr = None
1511 wlock = lock = tr = None
1473 try:
1512 try:
1474 wlock = self.wlock()
1513 wlock = self.wlock()
1475 lock = self.lock() # for recent changelog (see issue4368)
1514 lock = self.lock() # for recent changelog (see issue4368)
1476
1515
1477 wctx = self[None]
1516 wctx = self[None]
1478 merge = len(wctx.parents()) > 1
1517 merge = len(wctx.parents()) > 1
1479
1518
1480 if not force and merge and match.ispartial():
1519 if not force and merge and match.ispartial():
1481 raise error.Abort(_('cannot partially commit a merge '
1520 raise error.Abort(_('cannot partially commit a merge '
1482 '(do not specify files or patterns)'))
1521 '(do not specify files or patterns)'))
1483
1522
1484 status = self.status(match=match, clean=force)
1523 status = self.status(match=match, clean=force)
1485 if force:
1524 if force:
1486 status.modified.extend(status.clean) # mq may commit clean files
1525 status.modified.extend(status.clean) # mq may commit clean files
1487
1526
1488 # check subrepos
1527 # check subrepos
1489 subs = []
1528 subs = []
1490 commitsubs = set()
1529 commitsubs = set()
1491 newstate = wctx.substate.copy()
1530 newstate = wctx.substate.copy()
1492 # only manage subrepos and .hgsubstate if .hgsub is present
1531 # only manage subrepos and .hgsubstate if .hgsub is present
1493 if '.hgsub' in wctx:
1532 if '.hgsub' in wctx:
1494 # we'll decide whether to track this ourselves, thanks
1533 # we'll decide whether to track this ourselves, thanks
1495 for c in status.modified, status.added, status.removed:
1534 for c in status.modified, status.added, status.removed:
1496 if '.hgsubstate' in c:
1535 if '.hgsubstate' in c:
1497 c.remove('.hgsubstate')
1536 c.remove('.hgsubstate')
1498
1537
1499 # compare current state to last committed state
1538 # compare current state to last committed state
1500 # build new substate based on last committed state
1539 # build new substate based on last committed state
1501 oldstate = wctx.p1().substate
1540 oldstate = wctx.p1().substate
1502 for s in sorted(newstate.keys()):
1541 for s in sorted(newstate.keys()):
1503 if not match(s):
1542 if not match(s):
1504 # ignore working copy, use old state if present
1543 # ignore working copy, use old state if present
1505 if s in oldstate:
1544 if s in oldstate:
1506 newstate[s] = oldstate[s]
1545 newstate[s] = oldstate[s]
1507 continue
1546 continue
1508 if not force:
1547 if not force:
1509 raise error.Abort(
1548 raise error.Abort(
1510 _("commit with new subrepo %s excluded") % s)
1549 _("commit with new subrepo %s excluded") % s)
1511 dirtyreason = wctx.sub(s).dirtyreason(True)
1550 dirtyreason = wctx.sub(s).dirtyreason(True)
1512 if dirtyreason:
1551 if dirtyreason:
1513 if not self.ui.configbool('ui', 'commitsubrepos'):
1552 if not self.ui.configbool('ui', 'commitsubrepos'):
1514 raise error.Abort(dirtyreason,
1553 raise error.Abort(dirtyreason,
1515 hint=_("use --subrepos for recursive commit"))
1554 hint=_("use --subrepos for recursive commit"))
1516 subs.append(s)
1555 subs.append(s)
1517 commitsubs.add(s)
1556 commitsubs.add(s)
1518 else:
1557 else:
1519 bs = wctx.sub(s).basestate()
1558 bs = wctx.sub(s).basestate()
1520 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1559 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1521 if oldstate.get(s, (None, None, None))[1] != bs:
1560 if oldstate.get(s, (None, None, None))[1] != bs:
1522 subs.append(s)
1561 subs.append(s)
1523
1562
1524 # check for removed subrepos
1563 # check for removed subrepos
1525 for p in wctx.parents():
1564 for p in wctx.parents():
1526 r = [s for s in p.substate if s not in newstate]
1565 r = [s for s in p.substate if s not in newstate]
1527 subs += [s for s in r if match(s)]
1566 subs += [s for s in r if match(s)]
1528 if subs:
1567 if subs:
1529 if (not match('.hgsub') and
1568 if (not match('.hgsub') and
1530 '.hgsub' in (wctx.modified() + wctx.added())):
1569 '.hgsub' in (wctx.modified() + wctx.added())):
1531 raise error.Abort(
1570 raise error.Abort(
1532 _("can't commit subrepos without .hgsub"))
1571 _("can't commit subrepos without .hgsub"))
1533 status.modified.insert(0, '.hgsubstate')
1572 status.modified.insert(0, '.hgsubstate')
1534
1573
1535 elif '.hgsub' in status.removed:
1574 elif '.hgsub' in status.removed:
1536 # clean up .hgsubstate when .hgsub is removed
1575 # clean up .hgsubstate when .hgsub is removed
1537 if ('.hgsubstate' in wctx and
1576 if ('.hgsubstate' in wctx and
1538 '.hgsubstate' not in (status.modified + status.added +
1577 '.hgsubstate' not in (status.modified + status.added +
1539 status.removed)):
1578 status.removed)):
1540 status.removed.insert(0, '.hgsubstate')
1579 status.removed.insert(0, '.hgsubstate')
1541
1580
1542 # make sure all explicit patterns are matched
1581 # make sure all explicit patterns are matched
1543 if not force and (match.isexact() or match.prefix()):
1582 if not force and (match.isexact() or match.prefix()):
1544 matched = set(status.modified + status.added + status.removed)
1583 matched = set(status.modified + status.added + status.removed)
1545
1584
1546 for f in match.files():
1585 for f in match.files():
1547 f = self.dirstate.normalize(f)
1586 f = self.dirstate.normalize(f)
1548 if f == '.' or f in matched or f in wctx.substate:
1587 if f == '.' or f in matched or f in wctx.substate:
1549 continue
1588 continue
1550 if f in status.deleted:
1589 if f in status.deleted:
1551 fail(f, _('file not found!'))
1590 fail(f, _('file not found!'))
1552 if f in vdirs: # visited directory
1591 if f in vdirs: # visited directory
1553 d = f + '/'
1592 d = f + '/'
1554 for mf in matched:
1593 for mf in matched:
1555 if mf.startswith(d):
1594 if mf.startswith(d):
1556 break
1595 break
1557 else:
1596 else:
1558 fail(f, _("no match under directory!"))
1597 fail(f, _("no match under directory!"))
1559 elif f not in self.dirstate:
1598 elif f not in self.dirstate:
1560 fail(f, _("file not tracked!"))
1599 fail(f, _("file not tracked!"))
1561
1600
1562 cctx = context.workingcommitctx(self, status,
1601 cctx = context.workingcommitctx(self, status,
1563 text, user, date, extra)
1602 text, user, date, extra)
1564
1603
1565 # internal config: ui.allowemptycommit
1604 # internal config: ui.allowemptycommit
1566 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1605 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1567 or extra.get('close') or merge or cctx.files()
1606 or extra.get('close') or merge or cctx.files()
1568 or self.ui.configbool('ui', 'allowemptycommit'))
1607 or self.ui.configbool('ui', 'allowemptycommit'))
1569 if not allowemptycommit:
1608 if not allowemptycommit:
1570 return None
1609 return None
1571
1610
1572 if merge and cctx.deleted():
1611 if merge and cctx.deleted():
1573 raise error.Abort(_("cannot commit merge with missing files"))
1612 raise error.Abort(_("cannot commit merge with missing files"))
1574
1613
1575 ms = mergemod.mergestate.read(self)
1614 ms = mergemod.mergestate.read(self)
1576
1615
1577 if list(ms.unresolved()):
1616 if list(ms.unresolved()):
1578 raise error.Abort(_('unresolved merge conflicts '
1617 raise error.Abort(_('unresolved merge conflicts '
1579 '(see "hg help resolve")'))
1618 '(see "hg help resolve")'))
1580 if ms.mdstate() != 's' or list(ms.driverresolved()):
1619 if ms.mdstate() != 's' or list(ms.driverresolved()):
1581 raise error.Abort(_('driver-resolved merge conflicts'),
1620 raise error.Abort(_('driver-resolved merge conflicts'),
1582 hint=_('run "hg resolve --all" to resolve'))
1621 hint=_('run "hg resolve --all" to resolve'))
1583
1622
1584 if editor:
1623 if editor:
1585 cctx._text = editor(self, cctx, subs)
1624 cctx._text = editor(self, cctx, subs)
1586 edited = (text != cctx._text)
1625 edited = (text != cctx._text)
1587
1626
1588 # Save commit message in case this transaction gets rolled back
1627 # Save commit message in case this transaction gets rolled back
1589 # (e.g. by a pretxncommit hook). Leave the content alone on
1628 # (e.g. by a pretxncommit hook). Leave the content alone on
1590 # the assumption that the user will use the same editor again.
1629 # the assumption that the user will use the same editor again.
1591 msgfn = self.savecommitmessage(cctx._text)
1630 msgfn = self.savecommitmessage(cctx._text)
1592
1631
1593 # commit subs and write new state
1632 # commit subs and write new state
1594 if subs:
1633 if subs:
1595 for s in sorted(commitsubs):
1634 for s in sorted(commitsubs):
1596 sub = wctx.sub(s)
1635 sub = wctx.sub(s)
1597 self.ui.status(_('committing subrepository %s\n') %
1636 self.ui.status(_('committing subrepository %s\n') %
1598 subrepo.subrelpath(sub))
1637 subrepo.subrelpath(sub))
1599 sr = sub.commit(cctx._text, user, date)
1638 sr = sub.commit(cctx._text, user, date)
1600 newstate[s] = (newstate[s][0], sr)
1639 newstate[s] = (newstate[s][0], sr)
1601 subrepo.writestate(self, newstate)
1640 subrepo.writestate(self, newstate)
1602
1641
1603 p1, p2 = self.dirstate.parents()
1642 p1, p2 = self.dirstate.parents()
1604 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1643 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1605 try:
1644 try:
1606 self.hook("precommit", throw=True, parent1=hookp1,
1645 self.hook("precommit", throw=True, parent1=hookp1,
1607 parent2=hookp2)
1646 parent2=hookp2)
1608 tr = self.transaction('commit')
1647 tr = self.transaction('commit')
1609 ret = self.commitctx(cctx, True)
1648 ret = self.commitctx(cctx, True)
1610 except: # re-raises
1649 except: # re-raises
1611 if edited:
1650 if edited:
1612 self.ui.write(
1651 self.ui.write(
1613 _('note: commit message saved in %s\n') % msgfn)
1652 _('note: commit message saved in %s\n') % msgfn)
1614 raise
1653 raise
1615 # update bookmarks, dirstate and mergestate
1654 # update bookmarks, dirstate and mergestate
1616 bookmarks.update(self, [p1, p2], ret)
1655 bookmarks.update(self, [p1, p2], ret)
1617 cctx.markcommitted(ret)
1656 cctx.markcommitted(ret)
1618 ms.reset()
1657 ms.reset()
1619 tr.close()
1658 tr.close()
1620
1659
1621 finally:
1660 finally:
1622 lockmod.release(tr, lock, wlock)
1661 lockmod.release(tr, lock, wlock)
1623
1662
1624 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1663 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1625 # hack for command that use a temporary commit (eg: histedit)
1664 # hack for command that use a temporary commit (eg: histedit)
1626 # temporary commit got stripped before hook release
1665 # temporary commit got stripped before hook release
1627 if self.changelog.hasnode(ret):
1666 if self.changelog.hasnode(ret):
1628 self.hook("commit", node=node, parent1=parent1,
1667 self.hook("commit", node=node, parent1=parent1,
1629 parent2=parent2)
1668 parent2=parent2)
1630 self._afterlock(commithook)
1669 self._afterlock(commithook)
1631 return ret
1670 return ret
1632
1671
1633 @unfilteredmethod
1672 @unfilteredmethod
1634 def commitctx(self, ctx, error=False):
1673 def commitctx(self, ctx, error=False):
1635 """Add a new revision to current repository.
1674 """Add a new revision to current repository.
1636 Revision information is passed via the context argument.
1675 Revision information is passed via the context argument.
1637 """
1676 """
1638
1677
1639 tr = None
1678 tr = None
1640 p1, p2 = ctx.p1(), ctx.p2()
1679 p1, p2 = ctx.p1(), ctx.p2()
1641 user = ctx.user()
1680 user = ctx.user()
1642
1681
1643 lock = self.lock()
1682 lock = self.lock()
1644 try:
1683 try:
1645 tr = self.transaction("commit")
1684 tr = self.transaction("commit")
1646 trp = weakref.proxy(tr)
1685 trp = weakref.proxy(tr)
1647
1686
1648 if ctx.files():
1687 if ctx.files():
1649 m1 = p1.manifest()
1688 m1 = p1.manifest()
1650 m2 = p2.manifest()
1689 m2 = p2.manifest()
1651 m = m1.copy()
1690 m = m1.copy()
1652
1691
1653 # check in files
1692 # check in files
1654 added = []
1693 added = []
1655 changed = []
1694 changed = []
1656 removed = list(ctx.removed())
1695 removed = list(ctx.removed())
1657 linkrev = len(self)
1696 linkrev = len(self)
1658 self.ui.note(_("committing files:\n"))
1697 self.ui.note(_("committing files:\n"))
1659 for f in sorted(ctx.modified() + ctx.added()):
1698 for f in sorted(ctx.modified() + ctx.added()):
1660 self.ui.note(f + "\n")
1699 self.ui.note(f + "\n")
1661 try:
1700 try:
1662 fctx = ctx[f]
1701 fctx = ctx[f]
1663 if fctx is None:
1702 if fctx is None:
1664 removed.append(f)
1703 removed.append(f)
1665 else:
1704 else:
1666 added.append(f)
1705 added.append(f)
1667 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1706 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1668 trp, changed)
1707 trp, changed)
1669 m.setflag(f, fctx.flags())
1708 m.setflag(f, fctx.flags())
1670 except OSError as inst:
1709 except OSError as inst:
1671 self.ui.warn(_("trouble committing %s!\n") % f)
1710 self.ui.warn(_("trouble committing %s!\n") % f)
1672 raise
1711 raise
1673 except IOError as inst:
1712 except IOError as inst:
1674 errcode = getattr(inst, 'errno', errno.ENOENT)
1713 errcode = getattr(inst, 'errno', errno.ENOENT)
1675 if error or errcode and errcode != errno.ENOENT:
1714 if error or errcode and errcode != errno.ENOENT:
1676 self.ui.warn(_("trouble committing %s!\n") % f)
1715 self.ui.warn(_("trouble committing %s!\n") % f)
1677 raise
1716 raise
1678
1717
1679 # update manifest
1718 # update manifest
1680 self.ui.note(_("committing manifest\n"))
1719 self.ui.note(_("committing manifest\n"))
1681 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1720 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1682 drop = [f for f in removed if f in m]
1721 drop = [f for f in removed if f in m]
1683 for f in drop:
1722 for f in drop:
1684 del m[f]
1723 del m[f]
1685 mn = self.manifest.add(m, trp, linkrev,
1724 mn = self.manifest.add(m, trp, linkrev,
1686 p1.manifestnode(), p2.manifestnode(),
1725 p1.manifestnode(), p2.manifestnode(),
1687 added, drop)
1726 added, drop)
1688 files = changed + removed
1727 files = changed + removed
1689 else:
1728 else:
1690 mn = p1.manifestnode()
1729 mn = p1.manifestnode()
1691 files = []
1730 files = []
1692
1731
1693 # update changelog
1732 # update changelog
1694 self.ui.note(_("committing changelog\n"))
1733 self.ui.note(_("committing changelog\n"))
1695 self.changelog.delayupdate(tr)
1734 self.changelog.delayupdate(tr)
1696 n = self.changelog.add(mn, files, ctx.description(),
1735 n = self.changelog.add(mn, files, ctx.description(),
1697 trp, p1.node(), p2.node(),
1736 trp, p1.node(), p2.node(),
1698 user, ctx.date(), ctx.extra().copy())
1737 user, ctx.date(), ctx.extra().copy())
1699 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1738 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1700 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1739 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1701 parent2=xp2)
1740 parent2=xp2)
1702 # set the new commit is proper phase
1741 # set the new commit is proper phase
1703 targetphase = subrepo.newcommitphase(self.ui, ctx)
1742 targetphase = subrepo.newcommitphase(self.ui, ctx)
1704 if targetphase:
1743 if targetphase:
1705 # retract boundary do not alter parent changeset.
1744 # retract boundary do not alter parent changeset.
1706 # if a parent have higher the resulting phase will
1745 # if a parent have higher the resulting phase will
1707 # be compliant anyway
1746 # be compliant anyway
1708 #
1747 #
1709 # if minimal phase was 0 we don't need to retract anything
1748 # if minimal phase was 0 we don't need to retract anything
1710 phases.retractboundary(self, tr, targetphase, [n])
1749 phases.retractboundary(self, tr, targetphase, [n])
1711 tr.close()
1750 tr.close()
1712 branchmap.updatecache(self.filtered('served'))
1751 branchmap.updatecache(self.filtered('served'))
1713 return n
1752 return n
1714 finally:
1753 finally:
1715 if tr:
1754 if tr:
1716 tr.release()
1755 tr.release()
1717 lock.release()
1756 lock.release()
1718
1757
1719 @unfilteredmethod
1758 @unfilteredmethod
1720 def destroying(self):
1759 def destroying(self):
1721 '''Inform the repository that nodes are about to be destroyed.
1760 '''Inform the repository that nodes are about to be destroyed.
1722 Intended for use by strip and rollback, so there's a common
1761 Intended for use by strip and rollback, so there's a common
1723 place for anything that has to be done before destroying history.
1762 place for anything that has to be done before destroying history.
1724
1763
1725 This is mostly useful for saving state that is in memory and waiting
1764 This is mostly useful for saving state that is in memory and waiting
1726 to be flushed when the current lock is released. Because a call to
1765 to be flushed when the current lock is released. Because a call to
1727 destroyed is imminent, the repo will be invalidated causing those
1766 destroyed is imminent, the repo will be invalidated causing those
1728 changes to stay in memory (waiting for the next unlock), or vanish
1767 changes to stay in memory (waiting for the next unlock), or vanish
1729 completely.
1768 completely.
1730 '''
1769 '''
1731 # When using the same lock to commit and strip, the phasecache is left
1770 # When using the same lock to commit and strip, the phasecache is left
1732 # dirty after committing. Then when we strip, the repo is invalidated,
1771 # dirty after committing. Then when we strip, the repo is invalidated,
1733 # causing those changes to disappear.
1772 # causing those changes to disappear.
1734 if '_phasecache' in vars(self):
1773 if '_phasecache' in vars(self):
1735 self._phasecache.write()
1774 self._phasecache.write()
1736
1775
1737 @unfilteredmethod
1776 @unfilteredmethod
1738 def destroyed(self):
1777 def destroyed(self):
1739 '''Inform the repository that nodes have been destroyed.
1778 '''Inform the repository that nodes have been destroyed.
1740 Intended for use by strip and rollback, so there's a common
1779 Intended for use by strip and rollback, so there's a common
1741 place for anything that has to be done after destroying history.
1780 place for anything that has to be done after destroying history.
1742 '''
1781 '''
1743 # When one tries to:
1782 # When one tries to:
1744 # 1) destroy nodes thus calling this method (e.g. strip)
1783 # 1) destroy nodes thus calling this method (e.g. strip)
1745 # 2) use phasecache somewhere (e.g. commit)
1784 # 2) use phasecache somewhere (e.g. commit)
1746 #
1785 #
1747 # then 2) will fail because the phasecache contains nodes that were
1786 # then 2) will fail because the phasecache contains nodes that were
1748 # removed. We can either remove phasecache from the filecache,
1787 # removed. We can either remove phasecache from the filecache,
1749 # causing it to reload next time it is accessed, or simply filter
1788 # causing it to reload next time it is accessed, or simply filter
1750 # the removed nodes now and write the updated cache.
1789 # the removed nodes now and write the updated cache.
1751 self._phasecache.filterunknown(self)
1790 self._phasecache.filterunknown(self)
1752 self._phasecache.write()
1791 self._phasecache.write()
1753
1792
1754 # update the 'served' branch cache to help read only server process
1793 # update the 'served' branch cache to help read only server process
1755 # Thanks to branchcache collaboration this is done from the nearest
1794 # Thanks to branchcache collaboration this is done from the nearest
1756 # filtered subset and it is expected to be fast.
1795 # filtered subset and it is expected to be fast.
1757 branchmap.updatecache(self.filtered('served'))
1796 branchmap.updatecache(self.filtered('served'))
1758
1797
1759 # Ensure the persistent tag cache is updated. Doing it now
1798 # Ensure the persistent tag cache is updated. Doing it now
1760 # means that the tag cache only has to worry about destroyed
1799 # means that the tag cache only has to worry about destroyed
1761 # heads immediately after a strip/rollback. That in turn
1800 # heads immediately after a strip/rollback. That in turn
1762 # guarantees that "cachetip == currenttip" (comparing both rev
1801 # guarantees that "cachetip == currenttip" (comparing both rev
1763 # and node) always means no nodes have been added or destroyed.
1802 # and node) always means no nodes have been added or destroyed.
1764
1803
1765 # XXX this is suboptimal when qrefresh'ing: we strip the current
1804 # XXX this is suboptimal when qrefresh'ing: we strip the current
1766 # head, refresh the tag cache, then immediately add a new head.
1805 # head, refresh the tag cache, then immediately add a new head.
1767 # But I think doing it this way is necessary for the "instant
1806 # But I think doing it this way is necessary for the "instant
1768 # tag cache retrieval" case to work.
1807 # tag cache retrieval" case to work.
1769 self.invalidate()
1808 self.invalidate()
1770
1809
1771 def walk(self, match, node=None):
1810 def walk(self, match, node=None):
1772 '''
1811 '''
1773 walk recursively through the directory tree or a given
1812 walk recursively through the directory tree or a given
1774 changeset, finding all files matched by the match
1813 changeset, finding all files matched by the match
1775 function
1814 function
1776 '''
1815 '''
1777 return self[node].walk(match)
1816 return self[node].walk(match)
1778
1817
1779 def status(self, node1='.', node2=None, match=None,
1818 def status(self, node1='.', node2=None, match=None,
1780 ignored=False, clean=False, unknown=False,
1819 ignored=False, clean=False, unknown=False,
1781 listsubrepos=False):
1820 listsubrepos=False):
1782 '''a convenience method that calls node1.status(node2)'''
1821 '''a convenience method that calls node1.status(node2)'''
1783 return self[node1].status(node2, match, ignored, clean, unknown,
1822 return self[node1].status(node2, match, ignored, clean, unknown,
1784 listsubrepos)
1823 listsubrepos)
1785
1824
1786 def heads(self, start=None):
1825 def heads(self, start=None):
1787 heads = self.changelog.heads(start)
1826 heads = self.changelog.heads(start)
1788 # sort the output in rev descending order
1827 # sort the output in rev descending order
1789 return sorted(heads, key=self.changelog.rev, reverse=True)
1828 return sorted(heads, key=self.changelog.rev, reverse=True)
1790
1829
1791 def branchheads(self, branch=None, start=None, closed=False):
1830 def branchheads(self, branch=None, start=None, closed=False):
1792 '''return a (possibly filtered) list of heads for the given branch
1831 '''return a (possibly filtered) list of heads for the given branch
1793
1832
1794 Heads are returned in topological order, from newest to oldest.
1833 Heads are returned in topological order, from newest to oldest.
1795 If branch is None, use the dirstate branch.
1834 If branch is None, use the dirstate branch.
1796 If start is not None, return only heads reachable from start.
1835 If start is not None, return only heads reachable from start.
1797 If closed is True, return heads that are marked as closed as well.
1836 If closed is True, return heads that are marked as closed as well.
1798 '''
1837 '''
1799 if branch is None:
1838 if branch is None:
1800 branch = self[None].branch()
1839 branch = self[None].branch()
1801 branches = self.branchmap()
1840 branches = self.branchmap()
1802 if branch not in branches:
1841 if branch not in branches:
1803 return []
1842 return []
1804 # the cache returns heads ordered lowest to highest
1843 # the cache returns heads ordered lowest to highest
1805 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1844 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1806 if start is not None:
1845 if start is not None:
1807 # filter out the heads that cannot be reached from startrev
1846 # filter out the heads that cannot be reached from startrev
1808 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1847 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1809 bheads = [h for h in bheads if h in fbheads]
1848 bheads = [h for h in bheads if h in fbheads]
1810 return bheads
1849 return bheads
1811
1850
1812 def branches(self, nodes):
1851 def branches(self, nodes):
1813 if not nodes:
1852 if not nodes:
1814 nodes = [self.changelog.tip()]
1853 nodes = [self.changelog.tip()]
1815 b = []
1854 b = []
1816 for n in nodes:
1855 for n in nodes:
1817 t = n
1856 t = n
1818 while True:
1857 while True:
1819 p = self.changelog.parents(n)
1858 p = self.changelog.parents(n)
1820 if p[1] != nullid or p[0] == nullid:
1859 if p[1] != nullid or p[0] == nullid:
1821 b.append((t, n, p[0], p[1]))
1860 b.append((t, n, p[0], p[1]))
1822 break
1861 break
1823 n = p[0]
1862 n = p[0]
1824 return b
1863 return b
1825
1864
1826 def between(self, pairs):
1865 def between(self, pairs):
1827 r = []
1866 r = []
1828
1867
1829 for top, bottom in pairs:
1868 for top, bottom in pairs:
1830 n, l, i = top, [], 0
1869 n, l, i = top, [], 0
1831 f = 1
1870 f = 1
1832
1871
1833 while n != bottom and n != nullid:
1872 while n != bottom and n != nullid:
1834 p = self.changelog.parents(n)[0]
1873 p = self.changelog.parents(n)[0]
1835 if i == f:
1874 if i == f:
1836 l.append(n)
1875 l.append(n)
1837 f = f * 2
1876 f = f * 2
1838 n = p
1877 n = p
1839 i += 1
1878 i += 1
1840
1879
1841 r.append(l)
1880 r.append(l)
1842
1881
1843 return r
1882 return r
1844
1883
1845 def checkpush(self, pushop):
1884 def checkpush(self, pushop):
1846 """Extensions can override this function if additional checks have
1885 """Extensions can override this function if additional checks have
1847 to be performed before pushing, or call it if they override push
1886 to be performed before pushing, or call it if they override push
1848 command.
1887 command.
1849 """
1888 """
1850 pass
1889 pass
1851
1890
1852 @unfilteredpropertycache
1891 @unfilteredpropertycache
1853 def prepushoutgoinghooks(self):
1892 def prepushoutgoinghooks(self):
1854 """Return util.hooks consists of "(repo, remote, outgoing)"
1893 """Return util.hooks consists of "(repo, remote, outgoing)"
1855 functions, which are called before pushing changesets.
1894 functions, which are called before pushing changesets.
1856 """
1895 """
1857 return util.hooks()
1896 return util.hooks()
1858
1897
1859 def pushkey(self, namespace, key, old, new):
1898 def pushkey(self, namespace, key, old, new):
1860 try:
1899 try:
1861 tr = self.currenttransaction()
1900 tr = self.currenttransaction()
1862 hookargs = {}
1901 hookargs = {}
1863 if tr is not None:
1902 if tr is not None:
1864 hookargs.update(tr.hookargs)
1903 hookargs.update(tr.hookargs)
1865 hookargs['namespace'] = namespace
1904 hookargs['namespace'] = namespace
1866 hookargs['key'] = key
1905 hookargs['key'] = key
1867 hookargs['old'] = old
1906 hookargs['old'] = old
1868 hookargs['new'] = new
1907 hookargs['new'] = new
1869 self.hook('prepushkey', throw=True, **hookargs)
1908 self.hook('prepushkey', throw=True, **hookargs)
1870 except error.HookAbort as exc:
1909 except error.HookAbort as exc:
1871 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1910 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
1872 if exc.hint:
1911 if exc.hint:
1873 self.ui.write_err(_("(%s)\n") % exc.hint)
1912 self.ui.write_err(_("(%s)\n") % exc.hint)
1874 return False
1913 return False
1875 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1914 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1876 ret = pushkey.push(self, namespace, key, old, new)
1915 ret = pushkey.push(self, namespace, key, old, new)
1877 def runhook():
1916 def runhook():
1878 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1917 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1879 ret=ret)
1918 ret=ret)
1880 self._afterlock(runhook)
1919 self._afterlock(runhook)
1881 return ret
1920 return ret
1882
1921
1883 def listkeys(self, namespace):
1922 def listkeys(self, namespace):
1884 self.hook('prelistkeys', throw=True, namespace=namespace)
1923 self.hook('prelistkeys', throw=True, namespace=namespace)
1885 self.ui.debug('listing keys for "%s"\n' % namespace)
1924 self.ui.debug('listing keys for "%s"\n' % namespace)
1886 values = pushkey.list(self, namespace)
1925 values = pushkey.list(self, namespace)
1887 self.hook('listkeys', namespace=namespace, values=values)
1926 self.hook('listkeys', namespace=namespace, values=values)
1888 return values
1927 return values
1889
1928
1890 def debugwireargs(self, one, two, three=None, four=None, five=None):
1929 def debugwireargs(self, one, two, three=None, four=None, five=None):
1891 '''used to test argument passing over the wire'''
1930 '''used to test argument passing over the wire'''
1892 return "%s %s %s %s %s" % (one, two, three, four, five)
1931 return "%s %s %s %s %s" % (one, two, three, four, five)
1893
1932
1894 def savecommitmessage(self, text):
1933 def savecommitmessage(self, text):
1895 fp = self.vfs('last-message.txt', 'wb')
1934 fp = self.vfs('last-message.txt', 'wb')
1896 try:
1935 try:
1897 fp.write(text)
1936 fp.write(text)
1898 finally:
1937 finally:
1899 fp.close()
1938 fp.close()
1900 return self.pathto(fp.name[len(self.root) + 1:])
1939 return self.pathto(fp.name[len(self.root) + 1:])
1901
1940
1902 # used to avoid circular references so destructors work
1941 # used to avoid circular references so destructors work
1903 def aftertrans(files):
1942 def aftertrans(files):
1904 renamefiles = [tuple(t) for t in files]
1943 renamefiles = [tuple(t) for t in files]
1905 def a():
1944 def a():
1906 for vfs, src, dest in renamefiles:
1945 for vfs, src, dest in renamefiles:
1907 try:
1946 try:
1908 vfs.rename(src, dest)
1947 vfs.rename(src, dest)
1909 except OSError: # journal file does not yet exist
1948 except OSError: # journal file does not yet exist
1910 pass
1949 pass
1911 return a
1950 return a
1912
1951
1913 def undoname(fn):
1952 def undoname(fn):
1914 base, name = os.path.split(fn)
1953 base, name = os.path.split(fn)
1915 assert name.startswith('journal')
1954 assert name.startswith('journal')
1916 return os.path.join(base, name.replace('journal', 'undo', 1))
1955 return os.path.join(base, name.replace('journal', 'undo', 1))
1917
1956
1918 def instance(ui, path, create):
1957 def instance(ui, path, create):
1919 return localrepository(ui, util.urllocalpath(path), create)
1958 return localrepository(ui, util.urllocalpath(path), create)
1920
1959
1921 def islocal(path):
1960 def islocal(path):
1922 return True
1961 return True
@@ -1,191 +1,190 b''
1 #require test-repo
1 #require test-repo
2
2
3 $ cd "$TESTDIR"/..
3 $ cd "$TESTDIR"/..
4
4
5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
5 $ hg files 'set:(**.py)' | sed 's|\\|/|g' | xargs python contrib/check-py3-compat.py
6 contrib/casesmash.py not using absolute_import
6 contrib/casesmash.py not using absolute_import
7 contrib/check-code.py not using absolute_import
7 contrib/check-code.py not using absolute_import
8 contrib/check-code.py requires print_function
8 contrib/check-code.py requires print_function
9 contrib/check-config.py not using absolute_import
9 contrib/check-config.py not using absolute_import
10 contrib/check-config.py requires print_function
10 contrib/check-config.py requires print_function
11 contrib/debugcmdserver.py not using absolute_import
11 contrib/debugcmdserver.py not using absolute_import
12 contrib/debugcmdserver.py requires print_function
12 contrib/debugcmdserver.py requires print_function
13 contrib/debugshell.py not using absolute_import
13 contrib/debugshell.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
15 contrib/fixpax.py requires print_function
15 contrib/fixpax.py requires print_function
16 contrib/hgclient.py not using absolute_import
16 contrib/hgclient.py not using absolute_import
17 contrib/hgclient.py requires print_function
17 contrib/hgclient.py requires print_function
18 contrib/hgfixes/fix_bytes.py not using absolute_import
18 contrib/hgfixes/fix_bytes.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
22 contrib/import-checker.py requires print_function
22 contrib/import-checker.py requires print_function
23 contrib/memory.py not using absolute_import
23 contrib/memory.py not using absolute_import
24 contrib/perf.py not using absolute_import
24 contrib/perf.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
27 contrib/revsetbenchmarks.py requires print_function
27 contrib/revsetbenchmarks.py requires print_function
28 contrib/showstack.py not using absolute_import
28 contrib/showstack.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
32 doc/gendoc.py not using absolute_import
32 doc/gendoc.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
34 hgext/__init__.py not using absolute_import
34 hgext/__init__.py not using absolute_import
35 hgext/acl.py not using absolute_import
35 hgext/acl.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
38 hgext/censor.py not using absolute_import
38 hgext/censor.py not using absolute_import
39 hgext/children.py not using absolute_import
39 hgext/children.py not using absolute_import
40 hgext/churn.py not using absolute_import
40 hgext/churn.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
42 hgext/color.py not using absolute_import
42 hgext/color.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
58 hgext/eol.py not using absolute_import
58 hgext/eol.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
60 hgext/factotum.py not using absolute_import
60 hgext/factotum.py not using absolute_import
61 hgext/fetch.py not using absolute_import
61 hgext/fetch.py not using absolute_import
62 hgext/gpg.py not using absolute_import
62 hgext/gpg.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
65 hgext/hgk.py not using absolute_import
65 hgext/hgk.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
68 hgext/histedit.py not using absolute_import
68 hgext/histedit.py not using absolute_import
69 hgext/keyword.py not using absolute_import
69 hgext/keyword.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
81 hgext/mq.py not using absolute_import
81 hgext/mq.py not using absolute_import
82 hgext/notify.py not using absolute_import
82 hgext/notify.py not using absolute_import
83 hgext/pager.py not using absolute_import
83 hgext/pager.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
85 hgext/purge.py not using absolute_import
85 hgext/purge.py not using absolute_import
86 hgext/rebase.py not using absolute_import
86 hgext/rebase.py not using absolute_import
87 hgext/record.py not using absolute_import
87 hgext/record.py not using absolute_import
88 hgext/relink.py not using absolute_import
88 hgext/relink.py not using absolute_import
89 hgext/schemes.py not using absolute_import
89 hgext/schemes.py not using absolute_import
90 hgext/share.py not using absolute_import
90 hgext/share.py not using absolute_import
91 hgext/shelve.py not using absolute_import
91 hgext/shelve.py not using absolute_import
92 hgext/strip.py not using absolute_import
92 hgext/strip.py not using absolute_import
93 hgext/transplant.py not using absolute_import
93 hgext/transplant.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
95 hgext/win32text.py not using absolute_import
95 hgext/win32text.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
97 hgext/zeroconf/Zeroconf.py requires print_function
97 hgext/zeroconf/Zeroconf.py requires print_function
98 hgext/zeroconf/__init__.py not using absolute_import
98 hgext/zeroconf/__init__.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
100 i18n/polib.py not using absolute_import
100 i18n/polib.py not using absolute_import
101 mercurial/cmdutil.py not using absolute_import
101 mercurial/cmdutil.py not using absolute_import
102 mercurial/commands.py not using absolute_import
102 mercurial/commands.py not using absolute_import
103 mercurial/dispatch.py requires print_function
103 mercurial/dispatch.py requires print_function
104 mercurial/exchange.py not using absolute_import
104 mercurial/exchange.py not using absolute_import
105 mercurial/httpclient/__init__.py not using absolute_import
105 mercurial/httpclient/__init__.py not using absolute_import
106 mercurial/httpclient/_readers.py not using absolute_import
106 mercurial/httpclient/_readers.py not using absolute_import
107 mercurial/httpclient/socketutil.py not using absolute_import
107 mercurial/httpclient/socketutil.py not using absolute_import
108 mercurial/keepalive.py requires print_function
108 mercurial/keepalive.py requires print_function
109 mercurial/localrepo.py not using absolute_import
110 mercurial/lsprof.py requires print_function
109 mercurial/lsprof.py requires print_function
111 mercurial/lsprofcalltree.py requires print_function
110 mercurial/lsprofcalltree.py requires print_function
112 mercurial/mail.py requires print_function
111 mercurial/mail.py requires print_function
113 setup.py not using absolute_import
112 setup.py not using absolute_import
114 tests/filterpyflakes.py requires print_function
113 tests/filterpyflakes.py requires print_function
115 tests/generate-working-copy-states.py requires print_function
114 tests/generate-working-copy-states.py requires print_function
116 tests/get-with-headers.py requires print_function
115 tests/get-with-headers.py requires print_function
117 tests/heredoctest.py requires print_function
116 tests/heredoctest.py requires print_function
118 tests/hypothesishelpers.py not using absolute_import
117 tests/hypothesishelpers.py not using absolute_import
119 tests/hypothesishelpers.py requires print_function
118 tests/hypothesishelpers.py requires print_function
120 tests/killdaemons.py not using absolute_import
119 tests/killdaemons.py not using absolute_import
121 tests/md5sum.py not using absolute_import
120 tests/md5sum.py not using absolute_import
122 tests/mockblackbox.py not using absolute_import
121 tests/mockblackbox.py not using absolute_import
123 tests/printenv.py not using absolute_import
122 tests/printenv.py not using absolute_import
124 tests/readlink.py not using absolute_import
123 tests/readlink.py not using absolute_import
125 tests/readlink.py requires print_function
124 tests/readlink.py requires print_function
126 tests/revlog-formatv0.py not using absolute_import
125 tests/revlog-formatv0.py not using absolute_import
127 tests/run-tests.py not using absolute_import
126 tests/run-tests.py not using absolute_import
128 tests/seq.py not using absolute_import
127 tests/seq.py not using absolute_import
129 tests/seq.py requires print_function
128 tests/seq.py requires print_function
130 tests/silenttestrunner.py not using absolute_import
129 tests/silenttestrunner.py not using absolute_import
131 tests/silenttestrunner.py requires print_function
130 tests/silenttestrunner.py requires print_function
132 tests/sitecustomize.py not using absolute_import
131 tests/sitecustomize.py not using absolute_import
133 tests/svn-safe-append.py not using absolute_import
132 tests/svn-safe-append.py not using absolute_import
134 tests/svnxml.py not using absolute_import
133 tests/svnxml.py not using absolute_import
135 tests/test-ancestor.py requires print_function
134 tests/test-ancestor.py requires print_function
136 tests/test-atomictempfile.py not using absolute_import
135 tests/test-atomictempfile.py not using absolute_import
137 tests/test-batching.py not using absolute_import
136 tests/test-batching.py not using absolute_import
138 tests/test-batching.py requires print_function
137 tests/test-batching.py requires print_function
139 tests/test-bdiff.py not using absolute_import
138 tests/test-bdiff.py not using absolute_import
140 tests/test-bdiff.py requires print_function
139 tests/test-bdiff.py requires print_function
141 tests/test-context.py not using absolute_import
140 tests/test-context.py not using absolute_import
142 tests/test-context.py requires print_function
141 tests/test-context.py requires print_function
143 tests/test-demandimport.py not using absolute_import
142 tests/test-demandimport.py not using absolute_import
144 tests/test-demandimport.py requires print_function
143 tests/test-demandimport.py requires print_function
145 tests/test-dispatch.py not using absolute_import
144 tests/test-dispatch.py not using absolute_import
146 tests/test-dispatch.py requires print_function
145 tests/test-dispatch.py requires print_function
147 tests/test-doctest.py not using absolute_import
146 tests/test-doctest.py not using absolute_import
148 tests/test-duplicateoptions.py not using absolute_import
147 tests/test-duplicateoptions.py not using absolute_import
149 tests/test-duplicateoptions.py requires print_function
148 tests/test-duplicateoptions.py requires print_function
150 tests/test-filecache.py not using absolute_import
149 tests/test-filecache.py not using absolute_import
151 tests/test-filecache.py requires print_function
150 tests/test-filecache.py requires print_function
152 tests/test-filelog.py not using absolute_import
151 tests/test-filelog.py not using absolute_import
153 tests/test-filelog.py requires print_function
152 tests/test-filelog.py requires print_function
154 tests/test-hg-parseurl.py not using absolute_import
153 tests/test-hg-parseurl.py not using absolute_import
155 tests/test-hg-parseurl.py requires print_function
154 tests/test-hg-parseurl.py requires print_function
156 tests/test-hgweb-auth.py not using absolute_import
155 tests/test-hgweb-auth.py not using absolute_import
157 tests/test-hgweb-auth.py requires print_function
156 tests/test-hgweb-auth.py requires print_function
158 tests/test-hgwebdir-paths.py not using absolute_import
157 tests/test-hgwebdir-paths.py not using absolute_import
159 tests/test-hybridencode.py not using absolute_import
158 tests/test-hybridencode.py not using absolute_import
160 tests/test-hybridencode.py requires print_function
159 tests/test-hybridencode.py requires print_function
161 tests/test-lrucachedict.py not using absolute_import
160 tests/test-lrucachedict.py not using absolute_import
162 tests/test-lrucachedict.py requires print_function
161 tests/test-lrucachedict.py requires print_function
163 tests/test-manifest.py not using absolute_import
162 tests/test-manifest.py not using absolute_import
164 tests/test-minirst.py not using absolute_import
163 tests/test-minirst.py not using absolute_import
165 tests/test-minirst.py requires print_function
164 tests/test-minirst.py requires print_function
166 tests/test-parseindex2.py not using absolute_import
165 tests/test-parseindex2.py not using absolute_import
167 tests/test-parseindex2.py requires print_function
166 tests/test-parseindex2.py requires print_function
168 tests/test-pathencode.py not using absolute_import
167 tests/test-pathencode.py not using absolute_import
169 tests/test-pathencode.py requires print_function
168 tests/test-pathencode.py requires print_function
170 tests/test-propertycache.py not using absolute_import
169 tests/test-propertycache.py not using absolute_import
171 tests/test-propertycache.py requires print_function
170 tests/test-propertycache.py requires print_function
172 tests/test-revlog-ancestry.py not using absolute_import
171 tests/test-revlog-ancestry.py not using absolute_import
173 tests/test-revlog-ancestry.py requires print_function
172 tests/test-revlog-ancestry.py requires print_function
174 tests/test-run-tests.py not using absolute_import
173 tests/test-run-tests.py not using absolute_import
175 tests/test-simplemerge.py not using absolute_import
174 tests/test-simplemerge.py not using absolute_import
176 tests/test-status-inprocess.py not using absolute_import
175 tests/test-status-inprocess.py not using absolute_import
177 tests/test-status-inprocess.py requires print_function
176 tests/test-status-inprocess.py requires print_function
178 tests/test-symlink-os-yes-fs-no.py not using absolute_import
177 tests/test-symlink-os-yes-fs-no.py not using absolute_import
179 tests/test-trusted.py not using absolute_import
178 tests/test-trusted.py not using absolute_import
180 tests/test-trusted.py requires print_function
179 tests/test-trusted.py requires print_function
181 tests/test-ui-color.py not using absolute_import
180 tests/test-ui-color.py not using absolute_import
182 tests/test-ui-color.py requires print_function
181 tests/test-ui-color.py requires print_function
183 tests/test-ui-config.py not using absolute_import
182 tests/test-ui-config.py not using absolute_import
184 tests/test-ui-config.py requires print_function
183 tests/test-ui-config.py requires print_function
185 tests/test-ui-verbosity.py not using absolute_import
184 tests/test-ui-verbosity.py not using absolute_import
186 tests/test-ui-verbosity.py requires print_function
185 tests/test-ui-verbosity.py requires print_function
187 tests/test-url.py not using absolute_import
186 tests/test-url.py not using absolute_import
188 tests/test-url.py requires print_function
187 tests/test-url.py requires print_function
189 tests/test-walkrepo.py requires print_function
188 tests/test-walkrepo.py requires print_function
190 tests/test-wireproto.py requires print_function
189 tests/test-wireproto.py requires print_function
191 tests/tinyproxy.py requires print_function
190 tests/tinyproxy.py requires print_function
General Comments 0
You need to be logged in to leave comments. Login now