##// END OF EJS Templates
merge default into stable for 2.0 code freeze
Matt Mackall -
r15273:38408275 merge 2.0-rc stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,47 b''
1 #!/usr/bin/env python
2 #
3 # Dumps output generated by Mercurial's command server in a formatted style to a
4 # given file or stderr if '-' is specified. Output is also written in its raw
5 # format to stdout.
6 #
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
9
10 import sys, struct
11
12 if len(sys.argv) != 2:
13 print 'usage: debugcmdserver.py FILE'
14 sys.exit(1)
15
16 outputfmt = '>cI'
17 outputfmtsize = struct.calcsize(outputfmt)
18
19 if sys.argv[1] == '-':
20 log = sys.stderr
21 else:
22 log = open(sys.argv[1], 'a')
23
24 def read(size):
25 data = sys.stdin.read(size)
26 if not data:
27 raise EOFError()
28 sys.stdout.write(data)
29 sys.stdout.flush()
30 return data
31
32 try:
33 while True:
34 header = read(outputfmtsize)
35 channel, length = struct.unpack(outputfmt, header)
36 log.write('%s, %-4d' % (channel, length))
37 if channel in 'IL':
38 log.write(' -> waiting for input\n')
39 else:
40 data = read(length)
41 log.write(' -> %r\n' % data)
42 log.flush()
43 except EOFError:
44 pass
45 finally:
46 if log != sys.stderr:
47 log.close()
@@ -0,0 +1,4 b''
1 Greg Ward, author of the original bfiles extension
2 Na'Tosha Bard of Unity Technologies
3 Fog Creek Software
4 Special thanks to the University of Toronto and the UCOSP program
@@ -0,0 +1,94 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''track large binary files
10
11 Large binary files tend to be not very compressible, not very
12 diffable, and not at all mergeable. Such files are not handled
13 efficiently by Mercurial's storage format (revlog), which is based on
14 compressed binary deltas; storing large binary files as regular
15 Mercurial files wastes bandwidth and disk space and increases
16 Mercurial's memory usage. The largefiles extension addresses these
17 problems by adding a centralized client-server layer on top of
18 Mercurial: largefiles live in a *central store* out on the network
19 somewhere, and you only fetch the revisions that you need when you
20 need them.
21
22 largefiles works by maintaining a "standin file" in .hglf/ for each
23 largefile. The standins are small (41 bytes: an SHA-1 hash plus
24 newline) and are tracked by Mercurial. Largefile revisions are
25 identified by the SHA-1 hash of their contents, which is written to
26 the standin. largefiles uses that revision ID to get/put largefile
27 revisions from/to the central store. This saves both disk space and
28 bandwidth, since you don't need to retrieve all historical revisions
29 of large files when you clone or pull.
30
31 To start a new repository or add new large binary files, just add
32 --large to your ``hg add`` command. For example::
33
34 $ dd if=/dev/urandom of=randomdata count=2000
35 $ hg add --large randomdata
36 $ hg commit -m 'add randomdata as a largefile'
37
38 When you push a changeset that adds/modifies largefiles to a remote
39 repository, its largefile revisions will be uploaded along with it.
40 Note that the remote Mercurial must also have the largefiles extension
41 enabled for this to work.
42
43 When you pull a changeset that affects largefiles from a remote
44 repository, Mercurial behaves as normal. However, when you update to
45 such a revision, any largefiles needed by that revision are downloaded
46 and cached (if they have never been downloaded before). This means
47 that network access may be required to update to changesets you have
48 not previously updated to.
49
50 If you already have large files tracked by Mercurial without the
51 largefiles extension, you will need to convert your repository in
52 order to benefit from largefiles. This is done with the 'hg lfconvert'
53 command::
54
55 $ hg lfconvert --size 10 oldrepo newrepo
56
57 In repositories that already have largefiles in them, any new file
58 over 10MB will automatically be added as a largefile. To change this
59 threshhold, set ``largefiles.size`` in your Mercurial config file to
60 the minimum size in megabytes to track as a largefile, or use the
61 --lfsize option to the add command (also in megabytes)::
62
63 [largefiles]
64 size = 2 XXX wouldn't minsize be a better name?
65
66 $ hg add --lfsize 2
67
68 The ``largefiles.patterns`` config option allows you to specify a list
69 of filename patterns (see ``hg help patterns``) that should always be
70 tracked as largefiles::
71
72 [largefiles]
73 patterns =
74 *.jpg
75 re:.*\.(png|bmp)$
76 library.zip
77 content/audio/*
78
79 Files that match one of these patterns will be added as largefiles
80 regardless of their size.
81 '''
82
83 from mercurial import commands
84
85 import lfcommands
86 import reposetup
87 import uisetup
88
89 reposetup = reposetup.reposetup
90 uisetup = uisetup.uisetup
91
92 commands.norepo += " lfconvert"
93
94 cmdtable = lfcommands.cmdtable
@@ -0,0 +1,202 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''base class for store implementations and store-related utility code'''
10
11 import os
12 import tempfile
13 import binascii
14 import re
15
16 from mercurial import util, node, hg
17 from mercurial.i18n import _
18
19 import lfutil
20
21 class StoreError(Exception):
22 '''Raised when there is a problem getting files from or putting
23 files to a central store.'''
24 def __init__(self, filename, hash, url, detail):
25 self.filename = filename
26 self.hash = hash
27 self.url = url
28 self.detail = detail
29
30 def longmessage(self):
31 if self.url:
32 return ('%s: %s\n'
33 '(failed URL: %s)\n'
34 % (self.filename, self.detail, self.url))
35 else:
36 return ('%s: %s\n'
37 '(no default or default-push path set in hgrc)\n'
38 % (self.filename, self.detail))
39
40 def __str__(self):
41 return "%s: %s" % (self.url, self.detail)
42
43 class basestore(object):
44 def __init__(self, ui, repo, url):
45 self.ui = ui
46 self.repo = repo
47 self.url = url
48
49 def put(self, source, hash):
50 '''Put source file into the store under <filename>/<hash>.'''
51 raise NotImplementedError('abstract method')
52
53 def exists(self, hash):
54 '''Check to see if the store contains the given hash.'''
55 raise NotImplementedError('abstract method')
56
57 def get(self, files):
58 '''Get the specified largefiles from the store and write to local
59 files under repo.root. files is a list of (filename, hash)
60 tuples. Return (success, missing), lists of files successfuly
61 downloaded and those not found in the store. success is a list
62 of (filename, hash) tuples; missing is a list of filenames that
63 we could not get. (The detailed error message will already have
64 been presented to the user, so missing is just supplied as a
65 summary.)'''
66 success = []
67 missing = []
68 ui = self.ui
69
70 at = 0
71 for filename, hash in files:
72 ui.progress(_('getting largefiles'), at, unit='lfile',
73 total=len(files))
74 at += 1
75 ui.note(_('getting %s:%s\n') % (filename, hash))
76
77 cachefilename = lfutil.cachepath(self.repo, hash)
78 cachedir = os.path.dirname(cachefilename)
79
80 # No need to pass mode='wb' to fdopen(), since mkstemp() already
81 # opened the file in binary mode.
82 (tmpfd, tmpfilename) = tempfile.mkstemp(
83 dir=cachedir, prefix=os.path.basename(filename))
84 tmpfile = os.fdopen(tmpfd, 'w')
85
86 try:
87 hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash))
88 except StoreError, err:
89 ui.warn(err.longmessage())
90 hhash = ""
91
92 if hhash != hash:
93 if hhash != "":
94 ui.warn(_('%s: data corruption (expected %s, got %s)\n')
95 % (filename, hash, hhash))
96 tmpfile.close() # no-op if it's already closed
97 os.remove(tmpfilename)
98 missing.append(filename)
99 continue
100
101 if os.path.exists(cachefilename): # Windows
102 os.remove(cachefilename)
103 os.rename(tmpfilename, cachefilename)
104 lfutil.linktosystemcache(self.repo, hash)
105 success.append((filename, hhash))
106
107 ui.progress(_('getting largefiles'), None)
108 return (success, missing)
109
110 def verify(self, revs, contents=False):
111 '''Verify the existence (and, optionally, contents) of every big
112 file revision referenced by every changeset in revs.
113 Return 0 if all is well, non-zero on any errors.'''
114 write = self.ui.write
115 failed = False
116
117 write(_('searching %d changesets for largefiles\n') % len(revs))
118 verified = set() # set of (filename, filenode) tuples
119
120 for rev in revs:
121 cctx = self.repo[rev]
122 cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
123
124 failed = lfutil.any_(self._verifyfile(
125 cctx, cset, contents, standin, verified) for standin in cctx)
126
127 num_revs = len(verified)
128 num_lfiles = len(set([fname for (fname, fnode) in verified]))
129 if contents:
130 write(_('verified contents of %d revisions of %d largefiles\n')
131 % (num_revs, num_lfiles))
132 else:
133 write(_('verified existence of %d revisions of %d largefiles\n')
134 % (num_revs, num_lfiles))
135
136 return int(failed)
137
138 def _getfile(self, tmpfile, filename, hash):
139 '''Fetch one revision of one file from the store and write it
140 to tmpfile. Compute the hash of the file on-the-fly as it
141 downloads and return the binary hash. Close tmpfile. Raise
142 StoreError if unable to download the file (e.g. it does not
143 exist in the store).'''
144 raise NotImplementedError('abstract method')
145
146 def _verifyfile(self, cctx, cset, contents, standin, verified):
147 '''Perform the actual verification of a file in the store.
148 '''
149 raise NotImplementedError('abstract method')
150
151 import localstore, wirestore
152
153 _storeprovider = {
154 'file': [localstore.localstore],
155 'http': [wirestore.wirestore],
156 'https': [wirestore.wirestore],
157 'ssh': [wirestore.wirestore],
158 }
159
160 _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
161
162 # During clone this function is passed the src's ui object
163 # but it needs the dest's ui object so it can read out of
164 # the config file. Use repo.ui instead.
165 def _openstore(repo, remote=None, put=False):
166 ui = repo.ui
167
168 if not remote:
169 path = (getattr(repo, 'lfpullsource', None) or
170 ui.expandpath('default-push', 'default'))
171
172 # ui.expandpath() leaves 'default-push' and 'default' alone if
173 # they cannot be expanded: fallback to the empty string,
174 # meaning the current directory.
175 if path == 'default-push' or path == 'default':
176 path = ''
177 remote = repo
178 else:
179 remote = hg.peer(repo, {}, path)
180
181 # The path could be a scheme so use Mercurial's normal functionality
182 # to resolve the scheme to a repository and use its path
183 path = util.safehasattr(remote, 'url') and remote.url() or remote.path
184
185 match = _scheme_re.match(path)
186 if not match: # regular filesystem path
187 scheme = 'file'
188 else:
189 scheme = match.group(1)
190
191 try:
192 storeproviders = _storeprovider[scheme]
193 except KeyError:
194 raise util.Abort(_('unsupported URL scheme %r') % scheme)
195
196 for class_obj in storeproviders:
197 try:
198 return class_obj(ui, repo, remote)
199 except lfutil.storeprotonotcapable:
200 pass
201
202 raise util.Abort(_('%s does not appear to be a largefile store'), path)
@@ -0,0 +1,49 b''
1 = largefiles - manage large binary files =
2 This extension is based off of Greg Ward's bfiles extension which can be found
3 at http://mercurial.selenic.com/wiki/BfilesExtension.
4
5 == The largefile store ==
6
7 largefile stores are, in the typical use case, centralized servers that have
8 every past revision of a given binary file. Each largefile is identified by
9 its sha1 hash, and all interactions with the store take one of the following
10 forms.
11
12 -Download a bfile with this hash
13 -Upload a bfile with this hash
14 -Check if the store has a bfile with this hash
15
16 largefiles stores can take one of two forms:
17
18 -Directories on a network file share
19 -Mercurial wireproto servers, either via ssh or http (hgweb)
20
21 == The Local Repository ==
22
23 The local repository has a largefile cache in .hg/largefiles which holds a
24 subset of the largefiles needed. On a clone only the largefiles at tip are
25 downloaded. When largefiles are downloaded from the central store, a copy is
26 saved in this store.
27
28 == The Global Cache ==
29
30 largefiles in a local repository cache are hardlinked to files in the global
31 cache. Before a file is downloaded we check if it is in the global cache.
32
33 == Implementation Details ==
34
35 Each largefile has a standin which is in .hglf. The standin is tracked by
36 Mercurial. The standin contains the SHA1 hash of the largefile. When a
37 largefile is added/removed/copied/renamed/etc the same operation is applied to
38 the standin. Thus the history of the standin is the history of the largefile.
39
40 For performance reasons, the contents of a standin are only updated before a
41 commit. Standins are added/removed/copied/renamed from add/remove/copy/rename
42 Mercurial commands but their contents will not be updated. The contents of a
43 standin will always be the hash of the largefile as of the last commit. To
44 support some commands (revert) some standins are temporarily updated but will
45 be changed back after the command is finished.
46
47 A Mercurial dirstate object tracks the state of the largefiles. The dirstate
48 uses the last modified time and current size to detect if a file has changed
49 (without reading the entire contents of the file).
@@ -0,0 +1,481 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
11 import os
12 import shutil
13
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
16
17 import lfutil
18 import basestore
19
20 # -- Commands ----------------------------------------------------------
21
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''convert a normal repository to a largefiles repository
24
25 Convert repository SOURCE to a new repository DEST, identical to
26 SOURCE except that certain files will be converted as largefiles:
27 specifically, any file that matches any PATTERN *or* whose size is
28 above the minimum size threshold is converted as a largefile. The
29 size used to determine whether or not to track a file as a
30 largefile is the size of the first version of the file. The
31 minimum size can be specified either with --size or in
32 configuration as ``largefiles.size``.
33
34 After running this command you will need to make sure that
35 largefiles is enabled anywhere you intend to push the new
36 repository.
37
38 Use --tonormal to convert largefiles back to normal files; after
39 this, the DEST repository can be used without largefiles at all.'''
40
41 if opts['tonormal']:
42 tolfile = False
43 else:
44 tolfile = True
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
46 try:
47 rsrc = hg.repository(ui, src)
48 if not rsrc.local():
49 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 except error.RepoError, err:
51 ui.traceback()
52 raise util.Abort(err.args[0])
53 if os.path.exists(dest):
54 if not os.path.isdir(dest):
55 raise util.Abort(_('destination %s already exists') % dest)
56 elif os.listdir(dest):
57 raise util.Abort(_('destination %s is not empty') % dest)
58 try:
59 ui.status(_('initializing destination %s\n') % dest)
60 rdst = hg.repository(ui, dest, create=True)
61 if not rdst.local():
62 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
63 except error.RepoError:
64 ui.traceback()
65 raise util.Abort(_('%s is not a repo') % dest)
66
67 success = False
68 try:
69 # Lock destination to prevent modification while it is converted to.
70 # Don't need to lock src because we are just reading from its history
71 # which can't change.
72 dst_lock = rdst.lock()
73
74 # Get a list of all changesets in the source. The easy way to do this
75 # is to simply walk the changelog, using changelog.nodesbewteen().
76 # Take a look at mercurial/revlog.py:639 for more details.
77 # Use a generator instead of a list to decrease memory usage
78 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
79 rsrc.heads())[0])
80 revmap = {node.nullid: node.nullid}
81 if tolfile:
82 lfiles = set()
83 normalfiles = set()
84 if not pats:
85 pats = ui.config(lfutil.longname, 'patterns', default=())
86 if pats:
87 pats = pats.split(' ')
88 if pats:
89 matcher = match_.match(rsrc.root, '', list(pats))
90 else:
91 matcher = None
92
93 lfiletohash = {}
94 for ctx in ctxs:
95 ui.progress(_('converting revisions'), ctx.rev(),
96 unit=_('revision'), total=rsrc['tip'].rev())
97 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
98 lfiles, normalfiles, matcher, size, lfiletohash)
99 ui.progress(_('converting revisions'), None)
100
101 if os.path.exists(rdst.wjoin(lfutil.shortname)):
102 shutil.rmtree(rdst.wjoin(lfutil.shortname))
103
104 for f in lfiletohash.keys():
105 if os.path.isfile(rdst.wjoin(f)):
106 os.unlink(rdst.wjoin(f))
107 try:
108 os.removedirs(os.path.dirname(rdst.wjoin(f)))
109 except OSError:
110 pass
111
112 else:
113 for ctx in ctxs:
114 ui.progress(_('converting revisions'), ctx.rev(),
115 unit=_('revision'), total=rsrc['tip'].rev())
116 _addchangeset(ui, rsrc, rdst, ctx, revmap)
117
118 ui.progress(_('converting revisions'), None)
119 success = True
120 finally:
121 if not success:
122 # we failed, remove the new directory
123 shutil.rmtree(rdst.root)
124 dst_lock.release()
125
126 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
127 # Convert src parents to dst parents
128 parents = []
129 for p in ctx.parents():
130 parents.append(revmap[p.node()])
131 while len(parents) < 2:
132 parents.append(node.nullid)
133
134 # Generate list of changed files
135 files = set(ctx.files())
136 if node.nullid not in parents:
137 mc = ctx.manifest()
138 mp1 = ctx.parents()[0].manifest()
139 mp2 = ctx.parents()[1].manifest()
140 files |= (set(mp1) | set(mp2)) - set(mc)
141 for f in mc:
142 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
143 files.add(f)
144
145 def getfilectx(repo, memctx, f):
146 if lfutil.standin(f) in files:
147 # if the file isn't in the manifest then it was removed
148 # or renamed, raise IOError to indicate this
149 try:
150 fctx = ctx.filectx(lfutil.standin(f))
151 except error.LookupError:
152 raise IOError()
153 renamed = fctx.renamed()
154 if renamed:
155 renamed = lfutil.splitstandin(renamed[0])
156
157 hash = fctx.data().strip()
158 path = lfutil.findfile(rsrc, hash)
159 ### TODO: What if the file is not cached?
160 data = ''
161 fd = None
162 try:
163 fd = open(path, 'rb')
164 data = fd.read()
165 finally:
166 if fd:
167 fd.close()
168 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 'x' in fctx.flags(), renamed)
170 else:
171 try:
172 fctx = ctx.filectx(f)
173 except error.LookupError:
174 raise IOError()
175 renamed = fctx.renamed()
176 if renamed:
177 renamed = renamed[0]
178 data = fctx.data()
179 if f == '.hgtags':
180 newdata = []
181 for line in data.splitlines():
182 id, name = line.split(' ', 1)
183 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 name))
185 data = ''.join(newdata)
186 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 'x' in fctx.flags(), renamed)
188
189 dstfiles = []
190 for file in files:
191 if lfutil.isstandin(file):
192 dstfiles.append(lfutil.splitstandin(file))
193 else:
194 dstfiles.append(file)
195 # Commit
196 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 ret = rdst.commitctx(mctx)
199 rdst.dirstate.setparents(ret)
200 revmap[ctx.node()] = rdst.changelog.tip()
201
202 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 matcher, size, lfiletohash):
204 # Convert src parents to dst parents
205 parents = []
206 for p in ctx.parents():
207 parents.append(revmap[p.node()])
208 while len(parents) < 2:
209 parents.append(node.nullid)
210
211 # Generate list of changed files
212 files = set(ctx.files())
213 if node.nullid not in parents:
214 mc = ctx.manifest()
215 mp1 = ctx.parents()[0].manifest()
216 mp2 = ctx.parents()[1].manifest()
217 files |= (set(mp1) | set(mp2)) - set(mc)
218 for f in mc:
219 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 files.add(f)
221
222 dstfiles = []
223 for f in files:
224 if f not in lfiles and f not in normalfiles:
225 islfile = _islfile(f, ctx, matcher, size)
226 # If this file was renamed or copied then copy
227 # the lfileness of its predecessor
228 if f in ctx.manifest():
229 fctx = ctx.filectx(f)
230 renamed = fctx.renamed()
231 renamedlfile = renamed and renamed[0] in lfiles
232 islfile |= renamedlfile
233 if 'l' in fctx.flags():
234 if renamedlfile:
235 raise util.Abort(
236 _('Renamed/copied largefile %s becomes symlink')
237 % f)
238 islfile = False
239 if islfile:
240 lfiles.add(f)
241 else:
242 normalfiles.add(f)
243
244 if f in lfiles:
245 dstfiles.append(lfutil.standin(f))
246 # largefile in manifest if it has not been removed/renamed
247 if f in ctx.manifest():
248 if 'l' in ctx.filectx(f).flags():
249 if renamed and renamed[0] in lfiles:
250 raise util.Abort(_('largefile %s becomes symlink') % f)
251
252 # largefile was modified, update standins
253 fullpath = rdst.wjoin(f)
254 lfutil.createdir(os.path.dirname(fullpath))
255 m = util.sha1('')
256 m.update(ctx[f].data())
257 hash = m.hexdigest()
258 if f not in lfiletohash or lfiletohash[f] != hash:
259 try:
260 fd = open(fullpath, 'wb')
261 fd.write(ctx[f].data())
262 finally:
263 if fd:
264 fd.close()
265 executable = 'x' in ctx[f].flags()
266 os.chmod(fullpath, lfutil.getmode(executable))
267 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 executable)
269 lfiletohash[f] = hash
270 else:
271 # normal file
272 dstfiles.append(f)
273
274 def getfilectx(repo, memctx, f):
275 if lfutil.isstandin(f):
276 # if the file isn't in the manifest then it was removed
277 # or renamed, raise IOError to indicate this
278 srcfname = lfutil.splitstandin(f)
279 try:
280 fctx = ctx.filectx(srcfname)
281 except error.LookupError:
282 raise IOError()
283 renamed = fctx.renamed()
284 if renamed:
285 # standin is always a largefile because largefile-ness
286 # doesn't change after rename or copy
287 renamed = lfutil.standin(renamed[0])
288
289 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 fctx.flags(), 'x' in fctx.flags(), renamed)
291 else:
292 try:
293 fctx = ctx.filectx(f)
294 except error.LookupError:
295 raise IOError()
296 renamed = fctx.renamed()
297 if renamed:
298 renamed = renamed[0]
299
300 data = fctx.data()
301 if f == '.hgtags':
302 newdata = []
303 for line in data.splitlines():
304 id, name = line.split(' ', 1)
305 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 name))
307 data = ''.join(newdata)
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 'x' in fctx.flags(), renamed)
310
311 # Commit
312 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 ret = rdst.commitctx(mctx)
315 rdst.dirstate.setparents(ret)
316 revmap[ctx.node()] = rdst.changelog.tip()
317
318 def _islfile(file, ctx, matcher, size):
319 '''Return true if file should be considered a largefile, i.e.
320 matcher matches it or it is larger than size.'''
321 # never store special .hg* files as largefiles
322 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
323 return False
324 if matcher and matcher(file):
325 return True
326 try:
327 return ctx.filectx(file).size() >= size * 1024 * 1024
328 except error.LookupError:
329 return False
330
331 def uploadlfiles(ui, rsrc, rdst, files):
332 '''upload largefiles to the central store'''
333
334 # Don't upload locally. All largefiles are in the system wide cache
335 # so the other repo can just get them from there.
336 if not files or rdst.local():
337 return
338
339 store = basestore._openstore(rsrc, rdst, put=True)
340
341 at = 0
342 files = filter(lambda h: not store.exists(h), files)
343 for hash in files:
344 ui.progress(_('uploading largefiles'), at, unit='largefile',
345 total=len(files))
346 source = lfutil.findfile(rsrc, hash)
347 if not source:
348 raise util.Abort(_('largefile %s missing from store'
349 ' (needs to be uploaded)') % hash)
350 # XXX check for errors here
351 store.put(source, hash)
352 at += 1
353 ui.progress(_('uploading largefiles'), None)
354
355 def verifylfiles(ui, repo, all=False, contents=False):
356 '''Verify that every big file revision in the current changeset
357 exists in the central store. With --contents, also verify that
358 the contents of each big file revision are correct (SHA-1 hash
359 matches the revision ID). With --all, check every changeset in
360 this repository.'''
361 if all:
362 # Pass a list to the function rather than an iterator because we know a
363 # list will work.
364 revs = range(len(repo))
365 else:
366 revs = ['.']
367
368 store = basestore._openstore(repo)
369 return store.verify(revs, contents=contents)
370
371 def cachelfiles(ui, repo, node):
372 '''cachelfiles ensures that all largefiles needed by the specified revision
373 are present in the repository's largefile cache.
374
375 returns a tuple (cached, missing). cached is the list of files downloaded
376 by this operation; missing is the list of files that were needed but could
377 not be found.'''
378 lfiles = lfutil.listlfiles(repo, node)
379 toget = []
380
381 for lfile in lfiles:
382 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
383 # if it exists and its hash matches, it might have been locally
384 # modified before updating and the user chose 'local'. in this case,
385 # it will not be in any store, so don't look for it.
386 if ((not os.path.exists(repo.wjoin(lfile)) or
387 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
388 not lfutil.findfile(repo, expectedhash)):
389 toget.append((lfile, expectedhash))
390
391 if toget:
392 store = basestore._openstore(repo)
393 ret = store.get(toget)
394 return ret
395
396 return ([], [])
397
398 def updatelfiles(ui, repo, filelist=None, printmessage=True):
399 wlock = repo.wlock()
400 try:
401 lfdirstate = lfutil.openlfdirstate(ui, repo)
402 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
403
404 if filelist is not None:
405 lfiles = [f for f in lfiles if f in filelist]
406
407 printed = False
408 if printmessage and lfiles:
409 ui.status(_('getting changed largefiles\n'))
410 printed = True
411 cachelfiles(ui, repo, '.')
412
413 updated, removed = 0, 0
414 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
415 # increment the appropriate counter according to _updatelfile's
416 # return value
417 updated += i > 0 and i or 0
418 removed -= i < 0 and i or 0
419 if printmessage and (removed or updated) and not printed:
420 ui.status(_('getting changed largefiles\n'))
421 printed = True
422
423 lfdirstate.write()
424 if printed and printmessage:
425 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
426 removed))
427 finally:
428 wlock.release()
429
430 def _updatelfile(repo, lfdirstate, lfile):
431 '''updates a single largefile and copies the state of its standin from
432 the repository's dirstate to its state in the lfdirstate.
433
434 returns 1 if the file was modified, -1 if the file was removed, 0 if the
435 file was unchanged, and None if the needed largefile was missing from the
436 cache.'''
437 ret = 0
438 abslfile = repo.wjoin(lfile)
439 absstandin = repo.wjoin(lfutil.standin(lfile))
440 if os.path.exists(absstandin):
441 if os.path.exists(absstandin+'.orig'):
442 shutil.copyfile(abslfile, abslfile+'.orig')
443 expecthash = lfutil.readstandin(repo, lfile)
444 if (expecthash != '' and
445 (not os.path.exists(abslfile) or
446 expecthash != lfutil.hashfile(abslfile))):
447 if not lfutil.copyfromcache(repo, expecthash, lfile):
448 return None # don't try to set the mode or update the dirstate
449 ret = 1
450 mode = os.stat(absstandin).st_mode
451 if mode != os.stat(abslfile).st_mode:
452 os.chmod(abslfile, mode)
453 ret = 1
454 else:
455 if os.path.exists(abslfile):
456 os.unlink(abslfile)
457 ret = -1
458 state = repo.dirstate[lfutil.standin(lfile)]
459 if state == 'n':
460 lfdirstate.normal(lfile)
461 elif state == 'r':
462 lfdirstate.remove(lfile)
463 elif state == 'a':
464 lfdirstate.add(lfile)
465 elif state == '?':
466 lfdirstate.drop(lfile)
467 return ret
468
469 # -- hg commands declarations ------------------------------------------------
470
471 cmdtable = {
472 'lfconvert': (lfconvert,
473 [('s', 'size', '',
474 _('minimum size (MB) for files to be converted '
475 'as largefiles'),
476 'SIZE'),
477 ('', 'tonormal', False,
478 _('convert from a largefiles repo to a normal repo')),
479 ],
480 _('hg lfconvert SOURCE DEST [FILE ...]')),
481 }
@@ -0,0 +1,448 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''largefiles utility code: must not import other modules in this package.'''
10
11 import os
12 import errno
13 import shutil
14 import stat
15 import hashlib
16
17 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
18 from mercurial.i18n import _
19
20 shortname = '.hglf'
21 longname = 'largefiles'
22
23
24 # -- Portability wrappers ----------------------------------------------
25
26 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
27 return dirstate.walk(matcher, [], unknown, ignored)
28
29 def repo_add(repo, list):
30 add = repo[None].add
31 return add(list)
32
33 def repo_remove(repo, list, unlink=False):
34 def remove(list, unlink):
35 wlock = repo.wlock()
36 try:
37 if unlink:
38 for f in list:
39 try:
40 util.unlinkpath(repo.wjoin(f))
41 except OSError, inst:
42 if inst.errno != errno.ENOENT:
43 raise
44 repo[None].forget(list)
45 finally:
46 wlock.release()
47 return remove(list, unlink=unlink)
48
49 def repo_forget(repo, list):
50 forget = repo[None].forget
51 return forget(list)
52
53 def findoutgoing(repo, remote, force):
54 from mercurial import discovery
55 common, _anyinc, _heads = discovery.findcommonincoming(repo,
56 remote, force=force)
57 return repo.changelog.findmissing(common)
58
59 # -- Private worker functions ------------------------------------------
60
61 def getminsize(ui, assumelfiles, opt, default=10):
62 lfsize = opt
63 if not lfsize and assumelfiles:
64 lfsize = ui.config(longname, 'size', default=default)
65 if lfsize:
66 try:
67 lfsize = float(lfsize)
68 except ValueError:
69 raise util.Abort(_('largefiles: size must be number (not %s)\n')
70 % lfsize)
71 if lfsize is None:
72 raise util.Abort(_('minimum size for largefiles must be specified'))
73 return lfsize
74
75 def link(src, dest):
76 try:
77 util.oslink(src, dest)
78 except OSError:
79 # if hardlinks fail, fallback on copy
80 shutil.copyfile(src, dest)
81 os.chmod(dest, os.stat(src).st_mode)
82
83 def systemcachepath(ui, hash):
84 path = ui.config(longname, 'systemcache', None)
85 if path:
86 path = os.path.join(path, hash)
87 else:
88 if os.name == 'nt':
89 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
90 path = os.path.join(appdata, longname, hash)
91 elif os.name == 'posix':
92 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
93 else:
94 raise util.Abort(_('unknown operating system: %s\n') % os.name)
95 return path
96
97 def insystemcache(ui, hash):
98 return os.path.exists(systemcachepath(ui, hash))
99
100 def findfile(repo, hash):
101 if incache(repo, hash):
102 repo.ui.note(_('Found %s in cache\n') % hash)
103 return cachepath(repo, hash)
104 if insystemcache(repo.ui, hash):
105 repo.ui.note(_('Found %s in system cache\n') % hash)
106 return systemcachepath(repo.ui, hash)
107 return None
108
109 class largefiles_dirstate(dirstate.dirstate):
110 def __getitem__(self, key):
111 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
112 def normal(self, f):
113 return super(largefiles_dirstate, self).normal(unixpath(f))
114 def remove(self, f):
115 return super(largefiles_dirstate, self).remove(unixpath(f))
116 def add(self, f):
117 return super(largefiles_dirstate, self).add(unixpath(f))
118 def drop(self, f):
119 return super(largefiles_dirstate, self).drop(unixpath(f))
120 def forget(self, f):
121 return super(largefiles_dirstate, self).forget(unixpath(f))
122
123 def openlfdirstate(ui, repo):
124 '''
125 Return a dirstate object that tracks largefiles: i.e. its root is
126 the repo root, but it is saved in .hg/largefiles/dirstate.
127 '''
128 admin = repo.join(longname)
129 opener = scmutil.opener(admin)
130 if util.safehasattr(repo.dirstate, '_validate'):
131 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
132 repo.dirstate._validate)
133 else:
134 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
135
136 # If the largefiles dirstate does not exist, populate and create
137 # it. This ensures that we create it on the first meaningful
138 # largefiles operation in a new clone. It also gives us an easy
139 # way to forcibly rebuild largefiles state:
140 # rm .hg/largefiles/dirstate && hg status
141 # Or even, if things are really messed up:
142 # rm -rf .hg/largefiles && hg status
143 if not os.path.exists(os.path.join(admin, 'dirstate')):
144 util.makedirs(admin)
145 matcher = getstandinmatcher(repo)
146 for standin in dirstate_walk(repo.dirstate, matcher):
147 lfile = splitstandin(standin)
148 hash = readstandin(repo, lfile)
149 lfdirstate.normallookup(lfile)
150 try:
151 if hash == hashfile(lfile):
152 lfdirstate.normal(lfile)
153 except IOError, err:
154 if err.errno != errno.ENOENT:
155 raise
156
157 lfdirstate.write()
158
159 return lfdirstate
160
161 def lfdirstate_status(lfdirstate, repo, rev):
162 wlock = repo.wlock()
163 try:
164 match = match_.always(repo.root, repo.getcwd())
165 s = lfdirstate.status(match, [], False, False, False)
166 unsure, modified, added, removed, missing, unknown, ignored, clean = s
167 for lfile in unsure:
168 if repo[rev][standin(lfile)].data().strip() != \
169 hashfile(repo.wjoin(lfile)):
170 modified.append(lfile)
171 else:
172 clean.append(lfile)
173 lfdirstate.normal(lfile)
174 lfdirstate.write()
175 finally:
176 wlock.release()
177 return (modified, added, removed, missing, unknown, ignored, clean)
178
179 def listlfiles(repo, rev=None, matcher=None):
180 '''return a list of largefiles in the working copy or the
181 specified changeset'''
182
183 if matcher is None:
184 matcher = getstandinmatcher(repo)
185
186 # ignore unknown files in working directory
187 return [splitstandin(f)
188 for f in repo[rev].walk(matcher)
189 if rev is not None or repo.dirstate[f] != '?']
190
191 def incache(repo, hash):
192 return os.path.exists(cachepath(repo, hash))
193
194 def createdir(dir):
195 if not os.path.exists(dir):
196 os.makedirs(dir)
197
198 def cachepath(repo, hash):
199 return repo.join(os.path.join(longname, hash))
200
201 def copyfromcache(repo, hash, filename):
202 '''Copy the specified largefile from the repo or system cache to
203 filename in the repository. Return true on success or false if the
204 file was not found in either cache (which should not happened:
205 this is meant to be called only after ensuring that the needed
206 largefile exists in the cache).'''
207 path = findfile(repo, hash)
208 if path is None:
209 return False
210 util.makedirs(os.path.dirname(repo.wjoin(filename)))
211 shutil.copy(path, repo.wjoin(filename))
212 return True
213
214 def copytocache(repo, rev, file, uploaded=False):
215 hash = readstandin(repo, file)
216 if incache(repo, hash):
217 return
218 copytocacheabsolute(repo, repo.wjoin(file), hash)
219
220 def copytocacheabsolute(repo, file, hash):
221 createdir(os.path.dirname(cachepath(repo, hash)))
222 if insystemcache(repo.ui, hash):
223 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
224 else:
225 shutil.copyfile(file, cachepath(repo, hash))
226 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
227 linktosystemcache(repo, hash)
228
229 def linktosystemcache(repo, hash):
230 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
231 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
232
233 def getstandinmatcher(repo, pats=[], opts={}):
234 '''Return a match object that applies pats to the standin directory'''
235 standindir = repo.pathto(shortname)
236 if pats:
237 # patterns supplied: search standin directory relative to current dir
238 cwd = repo.getcwd()
239 if os.path.isabs(cwd):
240 # cwd is an absolute path for hg -R <reponame>
241 # work relative to the repository root in this case
242 cwd = ''
243 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
244 elif os.path.isdir(standindir):
245 # no patterns: relative to repo root
246 pats = [standindir]
247 else:
248 # no patterns and no standin dir: return matcher that matches nothing
249 match = match_.match(repo.root, None, [], exact=True)
250 match.matchfn = lambda f: False
251 return match
252 return getmatcher(repo, pats, opts, showbad=False)
253
254 def getmatcher(repo, pats=[], opts={}, showbad=True):
255 '''Wrapper around scmutil.match() that adds showbad: if false,
256 neuter the match object's bad() method so it does not print any
257 warnings about missing files or directories.'''
258 match = scmutil.match(repo[None], pats, opts)
259
260 if not showbad:
261 match.bad = lambda f, msg: None
262 return match
263
264 def composestandinmatcher(repo, rmatcher):
265 '''Return a matcher that accepts standins corresponding to the
266 files accepted by rmatcher. Pass the list of files in the matcher
267 as the paths specified by the user.'''
268 smatcher = getstandinmatcher(repo, rmatcher.files())
269 isstandin = smatcher.matchfn
270 def composed_matchfn(f):
271 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
272 smatcher.matchfn = composed_matchfn
273
274 return smatcher
275
276 def standin(filename):
277 '''Return the repo-relative path to the standin for the specified big
278 file.'''
279 # Notes:
280 # 1) Most callers want an absolute path, but _create_standin() needs
281 # it repo-relative so lfadd() can pass it to repo_add(). So leave
282 # it up to the caller to use repo.wjoin() to get an absolute path.
283 # 2) Join with '/' because that's what dirstate always uses, even on
284 # Windows. Change existing separator to '/' first in case we are
285 # passed filenames from an external source (like the command line).
286 return shortname + '/' + filename.replace(os.sep, '/')
287
288 def isstandin(filename):
289 '''Return true if filename is a big file standin. filename must be
290 in Mercurial's internal form (slash-separated).'''
291 return filename.startswith(shortname + '/')
292
293 def splitstandin(filename):
294 # Split on / because that's what dirstate always uses, even on Windows.
295 # Change local separator to / first just in case we are passed filenames
296 # from an external source (like the command line).
297 bits = filename.replace(os.sep, '/').split('/', 1)
298 if len(bits) == 2 and bits[0] == shortname:
299 return bits[1]
300 else:
301 return None
302
303 def updatestandin(repo, standin):
304 file = repo.wjoin(splitstandin(standin))
305 if os.path.exists(file):
306 hash = hashfile(file)
307 executable = getexecutable(file)
308 writestandin(repo, standin, hash, executable)
309
310 def readstandin(repo, filename, node=None):
311 '''read hex hash from standin for filename at given node, or working
312 directory if no node is given'''
313 return repo[node][standin(filename)].data().strip()
314
315 def writestandin(repo, standin, hash, executable):
316 '''write hash to <repo.root>/<standin>'''
317 writehash(hash, repo.wjoin(standin), executable)
318
319 def copyandhash(instream, outfile):
320 '''Read bytes from instream (iterable) and write them to outfile,
321 computing the SHA-1 hash of the data along the way. Close outfile
322 when done and return the binary hash.'''
323 hasher = util.sha1('')
324 for data in instream:
325 hasher.update(data)
326 outfile.write(data)
327
328 # Blecch: closing a file that somebody else opened is rude and
329 # wrong. But it's so darn convenient and practical! After all,
330 # outfile was opened just to copy and hash.
331 outfile.close()
332
333 return hasher.digest()
334
335 def hashrepofile(repo, file):
336 return hashfile(repo.wjoin(file))
337
338 def hashfile(file):
339 if not os.path.exists(file):
340 return ''
341 hasher = util.sha1('')
342 fd = open(file, 'rb')
343 for data in blockstream(fd):
344 hasher.update(data)
345 fd.close()
346 return hasher.hexdigest()
347
348 class limitreader(object):
349 def __init__(self, f, limit):
350 self.f = f
351 self.limit = limit
352
353 def read(self, length):
354 if self.limit == 0:
355 return ''
356 length = length > self.limit and self.limit or length
357 self.limit -= length
358 return self.f.read(length)
359
360 def close(self):
361 pass
362
363 def blockstream(infile, blocksize=128 * 1024):
364 """Generator that yields blocks of data from infile and closes infile."""
365 while True:
366 data = infile.read(blocksize)
367 if not data:
368 break
369 yield data
370 # same blecch as copyandhash() above
371 infile.close()
372
373 def readhash(filename):
374 rfile = open(filename, 'rb')
375 hash = rfile.read(40)
376 rfile.close()
377 if len(hash) < 40:
378 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
379 % (filename, len(hash)))
380 return hash
381
382 def writehash(hash, filename, executable):
383 util.makedirs(os.path.dirname(filename))
384 if os.path.exists(filename):
385 os.unlink(filename)
386 wfile = open(filename, 'wb')
387
388 try:
389 wfile.write(hash)
390 wfile.write('\n')
391 finally:
392 wfile.close()
393 if os.path.exists(filename):
394 os.chmod(filename, getmode(executable))
395
396 def getexecutable(filename):
397 mode = os.stat(filename).st_mode
398 return ((mode & stat.S_IXUSR) and
399 (mode & stat.S_IXGRP) and
400 (mode & stat.S_IXOTH))
401
402 def getmode(executable):
403 if executable:
404 return 0755
405 else:
406 return 0644
407
408 def urljoin(first, second, *arg):
409 def join(left, right):
410 if not left.endswith('/'):
411 left += '/'
412 if right.startswith('/'):
413 right = right[1:]
414 return left + right
415
416 url = join(first, second)
417 for a in arg:
418 url = join(url, a)
419 return url
420
421 def hexsha1(data):
422 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
423 object data"""
424 h = hashlib.sha1()
425 for chunk in util.filechunkiter(data):
426 h.update(chunk)
427 return h.hexdigest()
428
429 def httpsendfile(ui, filename):
430 return httpconnection.httpsendfile(ui, filename, 'rb')
431
432 def unixpath(path):
433 '''Return a version of path normalized for use with the lfdirstate.'''
434 return os.path.normpath(path).replace(os.sep, '/')
435
436 def islfilesrepo(repo):
437 return ('largefiles' in repo.requirements and
438 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
439
440 def any_(gen):
441 for x in gen:
442 if x:
443 return True
444 return False
445
446 class storeprotonotcapable(BaseException):
447 def __init__(self, storetypes):
448 self.storetypes = storetypes
@@ -0,0 +1,71 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''store class for local filesystem'''
10
11 import os
12
13 from mercurial import util
14 from mercurial.i18n import _
15
16 import lfutil
17 import basestore
18
19 class localstore(basestore.basestore):
20 '''Because there is a system-wide cache, the local store always
21 uses that cache. Since the cache is updated elsewhere, we can
22 just read from it here as if it were the store.'''
23
24 def __init__(self, ui, repo, remote):
25 url = os.path.join(remote.path, '.hg', lfutil.longname)
26 super(localstore, self).__init__(ui, repo, util.expandpath(url))
27
28 def put(self, source, filename, hash):
29 '''Any file that is put must already be in the system-wide
30 cache so do nothing.'''
31 return
32
33 def exists(self, hash):
34 return lfutil.insystemcache(self.repo.ui, hash)
35
36 def _getfile(self, tmpfile, filename, hash):
37 if lfutil.insystemcache(self.ui, hash):
38 return lfutil.systemcachepath(self.ui, hash)
39 raise basestore.StoreError(filename, hash, '',
40 _("Can't get file locally"))
41
42 def _verifyfile(self, cctx, cset, contents, standin, verified):
43 filename = lfutil.splitstandin(standin)
44 if not filename:
45 return False
46 fctx = cctx[standin]
47 key = (filename, fctx.filenode())
48 if key in verified:
49 return False
50
51 expecthash = fctx.data()[0:40]
52 verified.add(key)
53 if not lfutil.insystemcache(self.ui, expecthash):
54 self.ui.warn(
55 _('changeset %s: %s missing\n'
56 ' (looked for hash %s)\n')
57 % (cset, filename, expecthash))
58 return True # failed
59
60 if contents:
61 storepath = lfutil.systemcachepath(self.ui, expecthash)
62 actualhash = lfutil.hashfile(storepath)
63 if actualhash != expecthash:
64 self.ui.warn(
65 _('changeset %s: %s: contents differ\n'
66 ' (%s:\n'
67 ' expected hash %s,\n'
68 ' but got %s)\n')
69 % (cset, filename, storepath, expecthash, actualhash))
70 return True # failed
71 return False
This diff has been collapsed as it changes many lines, (830 lines changed) Show them Hide them
@@ -0,0 +1,830 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
11 import os
12 import copy
13
14 from mercurial import hg, commands, util, cmdutil, match as match_, node, \
15 archival, error, merge
16 from mercurial.i18n import _
17 from mercurial.node import hex
18 from hgext import rebase
19 import lfutil
20
21 try:
22 from mercurial import scmutil
23 except ImportError:
24 pass
25
26 import lfutil
27 import lfcommands
28
29 def installnormalfilesmatchfn(manifest):
30 '''overrides scmutil.match so that the matcher it returns will ignore all
31 largefiles'''
32 oldmatch = None # for the closure
33 def override_match(repo, pats=[], opts={}, globbed=False,
34 default='relpath'):
35 match = oldmatch(repo, pats, opts, globbed, default)
36 m = copy.copy(match)
37 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
38 manifest)
39 m._files = filter(notlfile, m._files)
40 m._fmap = set(m._files)
41 orig_matchfn = m.matchfn
42 m.matchfn = lambda f: notlfile(f) and orig_matchfn(f) or None
43 return m
44 oldmatch = installmatchfn(override_match)
45
46 def installmatchfn(f):
47 oldmatch = scmutil.match
48 setattr(f, 'oldmatch', oldmatch)
49 scmutil.match = f
50 return oldmatch
51
52 def restorematchfn():
53 '''restores scmutil.match to what it was before installnormalfilesmatchfn
54 was called. no-op if scmutil.match is its original function.
55
56 Note that n calls to installnormalfilesmatchfn will require n calls to
57 restore matchfn to reverse'''
58 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
59
60 # -- Wrappers: modify existing commands --------------------------------
61
62 # Add works by going through the files that the user wanted to add and
63 # checking if they should be added as largefiles. Then it makes a new
64 # matcher which matches only the normal files and runs the original
65 # version of add.
66 def override_add(orig, ui, repo, *pats, **opts):
67 large = opts.pop('large', None)
68 lfsize = lfutil.getminsize(
69 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
70
71 lfmatcher = None
72 if os.path.exists(repo.wjoin(lfutil.shortname)):
73 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
74 if lfpats:
75 lfmatcher = match_.match(repo.root, '', list(lfpats))
76
77 lfnames = []
78 m = scmutil.match(repo[None], pats, opts)
79 m.bad = lambda x, y: None
80 wctx = repo[None]
81 for f in repo.walk(m):
82 exact = m.exact(f)
83 lfile = lfutil.standin(f) in wctx
84 nfile = f in wctx
85 exists = lfile or nfile
86
87 # Don't warn the user when they attempt to add a normal tracked file.
88 # The normal add code will do that for us.
89 if exact and exists:
90 if lfile:
91 ui.warn(_('%s already a largefile\n') % f)
92 continue
93
94 if exact or not exists:
95 abovemin = (lfsize and
96 os.path.getsize(repo.wjoin(f)) >= lfsize * 1024 * 1024)
97 if large or abovemin or (lfmatcher and lfmatcher(f)):
98 lfnames.append(f)
99 if ui.verbose or not exact:
100 ui.status(_('adding %s as a largefile\n') % m.rel(f))
101
102 bad = []
103 standins = []
104
105 # Need to lock, otherwise there could be a race condition between
106 # when standins are created and added to the repo.
107 wlock = repo.wlock()
108 try:
109 if not opts.get('dry_run'):
110 lfdirstate = lfutil.openlfdirstate(ui, repo)
111 for f in lfnames:
112 standinname = lfutil.standin(f)
113 lfutil.writestandin(repo, standinname, hash='',
114 executable=lfutil.getexecutable(repo.wjoin(f)))
115 standins.append(standinname)
116 if lfdirstate[f] == 'r':
117 lfdirstate.normallookup(f)
118 else:
119 lfdirstate.add(f)
120 lfdirstate.write()
121 bad += [lfutil.splitstandin(f)
122 for f in lfutil.repo_add(repo, standins)
123 if f in m.files()]
124 finally:
125 wlock.release()
126
127 installnormalfilesmatchfn(repo[None].manifest())
128 result = orig(ui, repo, *pats, **opts)
129 restorematchfn()
130
131 return (result == 1 or bad) and 1 or 0
132
133 def override_remove(orig, ui, repo, *pats, **opts):
134 manifest = repo[None].manifest()
135 installnormalfilesmatchfn(manifest)
136 orig(ui, repo, *pats, **opts)
137 restorematchfn()
138
139 after, force = opts.get('after'), opts.get('force')
140 if not pats and not after:
141 raise util.Abort(_('no files specified'))
142 m = scmutil.match(repo[None], pats, opts)
143 try:
144 repo.lfstatus = True
145 s = repo.status(match=m, clean=True)
146 finally:
147 repo.lfstatus = False
148 modified, added, deleted, clean = [[f for f in list
149 if lfutil.standin(f) in manifest]
150 for list in [s[0], s[1], s[3], s[6]]]
151
152 def warn(files, reason):
153 for f in files:
154 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
155 % (m.rel(f), reason))
156
157 if force:
158 remove, forget = modified + deleted + clean, added
159 elif after:
160 remove, forget = deleted, []
161 warn(modified + added + clean, _('still exists'))
162 else:
163 remove, forget = deleted + clean, []
164 warn(modified, _('is modified'))
165 warn(added, _('has been marked for add'))
166
167 for f in sorted(remove + forget):
168 if ui.verbose or not m.exact(f):
169 ui.status(_('removing %s\n') % m.rel(f))
170
171 # Need to lock because standin files are deleted then removed from the
172 # repository and we could race inbetween.
173 wlock = repo.wlock()
174 try:
175 lfdirstate = lfutil.openlfdirstate(ui, repo)
176 for f in remove:
177 if not after:
178 os.unlink(repo.wjoin(f))
179 currentdir = os.path.split(f)[0]
180 while currentdir and not os.listdir(repo.wjoin(currentdir)):
181 os.rmdir(repo.wjoin(currentdir))
182 currentdir = os.path.split(currentdir)[0]
183 lfdirstate.remove(f)
184 lfdirstate.write()
185
186 forget = [lfutil.standin(f) for f in forget]
187 remove = [lfutil.standin(f) for f in remove]
188 lfutil.repo_forget(repo, forget)
189 lfutil.repo_remove(repo, remove, unlink=True)
190 finally:
191 wlock.release()
192
193 def override_status(orig, ui, repo, *pats, **opts):
194 try:
195 repo.lfstatus = True
196 return orig(ui, repo, *pats, **opts)
197 finally:
198 repo.lfstatus = False
199
200 def override_log(orig, ui, repo, *pats, **opts):
201 try:
202 repo.lfstatus = True
203 orig(ui, repo, *pats, **opts)
204 finally:
205 repo.lfstatus = False
206
207 def override_verify(orig, ui, repo, *pats, **opts):
208 large = opts.pop('large', False)
209 all = opts.pop('lfa', False)
210 contents = opts.pop('lfc', False)
211
212 result = orig(ui, repo, *pats, **opts)
213 if large:
214 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
215 return result
216
217 # Override needs to refresh standins so that update's normal merge
218 # will go through properly. Then the other update hook (overriding repo.update)
219 # will get the new files. Filemerge is also overriden so that the merge
220 # will merge standins correctly.
221 def override_update(orig, ui, repo, *pats, **opts):
222 lfdirstate = lfutil.openlfdirstate(ui, repo)
223 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
224 False, False)
225 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
226
227 # Need to lock between the standins getting updated and their
228 # largefiles getting updated
229 wlock = repo.wlock()
230 try:
231 if opts['check']:
232 mod = len(modified) > 0
233 for lfile in unsure:
234 standin = lfutil.standin(lfile)
235 if repo['.'][standin].data().strip() != \
236 lfutil.hashfile(repo.wjoin(lfile)):
237 mod = True
238 else:
239 lfdirstate.normal(lfile)
240 lfdirstate.write()
241 if mod:
242 raise util.Abort(_('uncommitted local changes'))
243 # XXX handle removed differently
244 if not opts['clean']:
245 for lfile in unsure + modified + added:
246 lfutil.updatestandin(repo, lfutil.standin(lfile))
247 finally:
248 wlock.release()
249 return orig(ui, repo, *pats, **opts)
250
251 # Override filemerge to prompt the user about how they wish to merge
252 # largefiles. This will handle identical edits, and copy/rename +
253 # edit without prompting the user.
254 def override_filemerge(origfn, repo, mynode, orig, fcd, fco, fca):
255 # Use better variable names here. Because this is a wrapper we cannot
256 # change the variable names in the function declaration.
257 fcdest, fcother, fcancestor = fcd, fco, fca
258 if not lfutil.isstandin(orig):
259 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
260 else:
261 if not fcother.cmp(fcdest): # files identical?
262 return None
263
264 # backwards, use working dir parent as ancestor
265 if fcancestor == fcother:
266 fcancestor = fcdest.parents()[0]
267
268 if orig != fcother.path():
269 repo.ui.status(_('merging %s and %s to %s\n')
270 % (lfutil.splitstandin(orig),
271 lfutil.splitstandin(fcother.path()),
272 lfutil.splitstandin(fcdest.path())))
273 else:
274 repo.ui.status(_('merging %s\n')
275 % lfutil.splitstandin(fcdest.path()))
276
277 if fcancestor.path() != fcother.path() and fcother.data() == \
278 fcancestor.data():
279 return 0
280 if fcancestor.path() != fcdest.path() and fcdest.data() == \
281 fcancestor.data():
282 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
283 return 0
284
285 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
286 'keep (l)ocal or take (o)ther?') %
287 lfutil.splitstandin(orig),
288 (_('&Local'), _('&Other')), 0) == 0:
289 return 0
290 else:
291 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
292 return 0
293
294 # Copy first changes the matchers to match standins instead of
295 # largefiles. Then it overrides util.copyfile in that function it
296 # checks if the destination largefile already exists. It also keeps a
297 # list of copied files so that the largefiles can be copied and the
298 # dirstate updated.
299 def override_copy(orig, ui, repo, pats, opts, rename=False):
300 # doesn't remove largefile on rename
301 if len(pats) < 2:
302 # this isn't legal, let the original function deal with it
303 return orig(ui, repo, pats, opts, rename)
304
305 def makestandin(relpath):
306 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
307 return os.path.join(os.path.relpath('.', repo.getcwd()),
308 lfutil.standin(path))
309
310 fullpats = scmutil.expandpats(pats)
311 dest = fullpats[-1]
312
313 if os.path.isdir(dest):
314 if not os.path.isdir(makestandin(dest)):
315 os.makedirs(makestandin(dest))
316 # This could copy both lfiles and normal files in one command,
317 # but we don't want to do that. First replace their matcher to
318 # only match normal files and run it, then replace it to just
319 # match largefiles and run it again.
320 nonormalfiles = False
321 nolfiles = False
322 try:
323 installnormalfilesmatchfn(repo[None].manifest())
324 result = orig(ui, repo, pats, opts, rename)
325 except util.Abort, e:
326 if str(e) != 'no files to copy':
327 raise e
328 else:
329 nonormalfiles = True
330 result = 0
331 finally:
332 restorematchfn()
333
334 # The first rename can cause our current working directory to be removed.
335 # In that case there is nothing left to copy/rename so just quit.
336 try:
337 repo.getcwd()
338 except OSError:
339 return result
340
341 try:
342 # When we call orig below it creates the standins but we don't add them
343 # to the dir state until later so lock during that time.
344 wlock = repo.wlock()
345
346 manifest = repo[None].manifest()
347 oldmatch = None # for the closure
348 def override_match(repo, pats=[], opts={}, globbed=False,
349 default='relpath'):
350 newpats = []
351 # The patterns were previously mangled to add the standin
352 # directory; we need to remove that now
353 for pat in pats:
354 if match_.patkind(pat) is None and lfutil.shortname in pat:
355 newpats.append(pat.replace(lfutil.shortname, ''))
356 else:
357 newpats.append(pat)
358 match = oldmatch(repo, newpats, opts, globbed, default)
359 m = copy.copy(match)
360 lfile = lambda f: lfutil.standin(f) in manifest
361 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
362 m._fmap = set(m._files)
363 orig_matchfn = m.matchfn
364 m.matchfn = lambda f: (lfutil.isstandin(f) and
365 lfile(lfutil.splitstandin(f)) and
366 orig_matchfn(lfutil.splitstandin(f)) or
367 None)
368 return m
369 oldmatch = installmatchfn(override_match)
370 listpats = []
371 for pat in pats:
372 if match_.patkind(pat) is not None:
373 listpats.append(pat)
374 else:
375 listpats.append(makestandin(pat))
376
377 try:
378 origcopyfile = util.copyfile
379 copiedfiles = []
380 def override_copyfile(src, dest):
381 if lfutil.shortname in src and lfutil.shortname in dest:
382 destlfile = dest.replace(lfutil.shortname, '')
383 if not opts['force'] and os.path.exists(destlfile):
384 raise IOError('',
385 _('destination largefile already exists'))
386 copiedfiles.append((src, dest))
387 origcopyfile(src, dest)
388
389 util.copyfile = override_copyfile
390 result += orig(ui, repo, listpats, opts, rename)
391 finally:
392 util.copyfile = origcopyfile
393
394 lfdirstate = lfutil.openlfdirstate(ui, repo)
395 for (src, dest) in copiedfiles:
396 if lfutil.shortname in src and lfutil.shortname in dest:
397 srclfile = src.replace(lfutil.shortname, '')
398 destlfile = dest.replace(lfutil.shortname, '')
399 destlfiledir = os.path.dirname(destlfile) or '.'
400 if not os.path.isdir(destlfiledir):
401 os.makedirs(destlfiledir)
402 if rename:
403 os.rename(srclfile, destlfile)
404 lfdirstate.remove(os.path.relpath(srclfile,
405 repo.root))
406 else:
407 util.copyfile(srclfile, destlfile)
408 lfdirstate.add(os.path.relpath(destlfile,
409 repo.root))
410 lfdirstate.write()
411 except util.Abort, e:
412 if str(e) != 'no files to copy':
413 raise e
414 else:
415 nolfiles = True
416 finally:
417 restorematchfn()
418 wlock.release()
419
420 if nolfiles and nonormalfiles:
421 raise util.Abort(_('no files to copy'))
422
423 return result
424
425 # When the user calls revert, we have to be careful to not revert any
426 # changes to other largefiles accidentally. This means we have to keep
427 # track of the largefiles that are being reverted so we only pull down
428 # the necessary largefiles.
429 #
430 # Standins are only updated (to match the hash of largefiles) before
431 # commits. Update the standins then run the original revert, changing
432 # the matcher to hit standins instead of largefiles. Based on the
433 # resulting standins update the largefiles. Then return the standins
434 # to their proper state
435 def override_revert(orig, ui, repo, *pats, **opts):
436 # Because we put the standins in a bad state (by updating them)
437 # and then return them to a correct state we need to lock to
438 # prevent others from changing them in their incorrect state.
439 wlock = repo.wlock()
440 try:
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
442 (modified, added, removed, missing, unknown, ignored, clean) = \
443 lfutil.lfdirstate_status(lfdirstate, repo, repo['.'].rev())
444 for lfile in modified:
445 lfutil.updatestandin(repo, lfutil.standin(lfile))
446
447 try:
448 ctx = repo[opts.get('rev')]
449 oldmatch = None # for the closure
450 def override_match(ctxorrepo, pats=[], opts={}, globbed=False,
451 default='relpath'):
452 if util.safehasattr(ctxorrepo, 'match'):
453 ctx0 = ctxorrepo
454 else:
455 ctx0 = ctxorrepo[None]
456 match = oldmatch(ctxorrepo, pats, opts, globbed, default)
457 m = copy.copy(match)
458 def tostandin(f):
459 if lfutil.standin(f) in ctx0 or lfutil.standin(f) in ctx:
460 return lfutil.standin(f)
461 elif lfutil.standin(f) in repo[None]:
462 return None
463 return f
464 m._files = [tostandin(f) for f in m._files]
465 m._files = [f for f in m._files if f is not None]
466 m._fmap = set(m._files)
467 orig_matchfn = m.matchfn
468 def matchfn(f):
469 if lfutil.isstandin(f):
470 # We need to keep track of what largefiles are being
471 # matched so we know which ones to update later --
472 # otherwise we accidentally revert changes to other
473 # largefiles. This is repo-specific, so duckpunch the
474 # repo object to keep the list of largefiles for us
475 # later.
476 if orig_matchfn(lfutil.splitstandin(f)) and \
477 (f in repo[None] or f in ctx):
478 lfileslist = getattr(repo, '_lfilestoupdate', [])
479 lfileslist.append(lfutil.splitstandin(f))
480 repo._lfilestoupdate = lfileslist
481 return True
482 else:
483 return False
484 return orig_matchfn(f)
485 m.matchfn = matchfn
486 return m
487 oldmatch = installmatchfn(override_match)
488 scmutil.match
489 matches = override_match(repo[None], pats, opts)
490 orig(ui, repo, *pats, **opts)
491 finally:
492 restorematchfn()
493 lfileslist = getattr(repo, '_lfilestoupdate', [])
494 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
495 printmessage=False)
496
497 # empty out the largefiles list so we start fresh next time
498 repo._lfilestoupdate = []
499 for lfile in modified:
500 if lfile in lfileslist:
501 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
502 in repo['.']:
503 lfutil.writestandin(repo, lfutil.standin(lfile),
504 repo['.'][lfile].data().strip(),
505 'x' in repo['.'][lfile].flags())
506 lfdirstate = lfutil.openlfdirstate(ui, repo)
507 for lfile in added:
508 standin = lfutil.standin(lfile)
509 if standin not in ctx and (standin in matches or opts.get('all')):
510 if lfile in lfdirstate:
511 lfdirstate.drop(lfile)
512 util.unlinkpath(repo.wjoin(standin))
513 lfdirstate.write()
514 finally:
515 wlock.release()
516
517 def hg_update(orig, repo, node):
518 result = orig(repo, node)
519 # XXX check if it worked first
520 lfcommands.updatelfiles(repo.ui, repo)
521 return result
522
523 def hg_clean(orig, repo, node, show_stats=True):
524 result = orig(repo, node, show_stats)
525 lfcommands.updatelfiles(repo.ui, repo)
526 return result
527
528 def hg_merge(orig, repo, node, force=None, remind=True):
529 result = orig(repo, node, force, remind)
530 lfcommands.updatelfiles(repo.ui, repo)
531 return result
532
533 # When we rebase a repository with remotely changed largefiles, we need to
534 # take some extra care so that the largefiles are correctly updated in the
535 # working copy
536 def override_pull(orig, ui, repo, source=None, **opts):
537 if opts.get('rebase', False):
538 repo._isrebasing = True
539 try:
540 if opts.get('update'):
541 del opts['update']
542 ui.debug('--update and --rebase are not compatible, ignoring '
543 'the update flag\n')
544 del opts['rebase']
545 cmdutil.bailifchanged(repo)
546 revsprepull = len(repo)
547 origpostincoming = commands.postincoming
548 def _dummy(*args, **kwargs):
549 pass
550 commands.postincoming = _dummy
551 repo.lfpullsource = source
552 if not source:
553 source = 'default'
554 try:
555 result = commands.pull(ui, repo, source, **opts)
556 finally:
557 commands.postincoming = origpostincoming
558 revspostpull = len(repo)
559 if revspostpull > revsprepull:
560 result = result or rebase.rebase(ui, repo)
561 finally:
562 repo._isrebasing = False
563 else:
564 repo.lfpullsource = source
565 if not source:
566 source = 'default'
567 result = orig(ui, repo, source, **opts)
568 return result
569
570 def override_rebase(orig, ui, repo, **opts):
571 repo._isrebasing = True
572 try:
573 orig(ui, repo, **opts)
574 finally:
575 repo._isrebasing = False
576
577 def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None,
578 prefix=None, mtime=None, subrepos=None):
579 # No need to lock because we are only reading history and
580 # largefile caches, neither of which are modified.
581 lfcommands.cachelfiles(repo.ui, repo, node)
582
583 if kind not in archival.archivers:
584 raise util.Abort(_("unknown archive type '%s'") % kind)
585
586 ctx = repo[node]
587
588 if kind == 'files':
589 if prefix:
590 raise util.Abort(
591 _('cannot give prefix when archiving to files'))
592 else:
593 prefix = archival.tidyprefix(dest, kind, prefix)
594
595 def write(name, mode, islink, getdata):
596 if matchfn and not matchfn(name):
597 return
598 data = getdata()
599 if decode:
600 data = repo.wwritedata(name, data)
601 archiver.addfile(prefix + name, mode, islink, data)
602
603 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
604
605 if repo.ui.configbool("ui", "archivemeta", True):
606 def metadata():
607 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
608 hex(repo.changelog.node(0)), hex(node), ctx.branch())
609
610 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
611 if repo.tagtype(t) == 'global')
612 if not tags:
613 repo.ui.pushbuffer()
614 opts = {'template': '{latesttag}\n{latesttagdistance}',
615 'style': '', 'patch': None, 'git': None}
616 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
617 ltags, dist = repo.ui.popbuffer().split('\n')
618 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
619 tags += 'latesttagdistance: %s\n' % dist
620
621 return base + tags
622
623 write('.hg_archival.txt', 0644, False, metadata)
624
625 for f in ctx:
626 ff = ctx.flags(f)
627 getdata = ctx[f].data
628 if lfutil.isstandin(f):
629 path = lfutil.findfile(repo, getdata().strip())
630 f = lfutil.splitstandin(f)
631
632 def getdatafn():
633 try:
634 fd = open(path, 'rb')
635 return fd.read()
636 finally:
637 fd.close()
638
639 getdata = getdatafn
640 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
641
642 if subrepos:
643 for subpath in ctx.substate:
644 sub = ctx.sub(subpath)
645 try:
646 sub.archive(repo.ui, archiver, prefix)
647 except TypeError:
648 sub.archive(archiver, prefix)
649
650 archiver.done()
651
652 # If a largefile is modified, the change is not reflected in its
653 # standin until a commit. cmdutil.bailifchanged() raises an exception
654 # if the repo has uncommitted changes. Wrap it to also check if
655 # largefiles were changed. This is used by bisect and backout.
656 def override_bailifchanged(orig, repo):
657 orig(repo)
658 repo.lfstatus = True
659 modified, added, removed, deleted = repo.status()[:4]
660 repo.lfstatus = False
661 if modified or added or removed or deleted:
662 raise util.Abort(_('outstanding uncommitted changes'))
663
664 # Fetch doesn't use cmdutil.bail_if_changed so override it to add the check
665 def override_fetch(orig, ui, repo, *pats, **opts):
666 repo.lfstatus = True
667 modified, added, removed, deleted = repo.status()[:4]
668 repo.lfstatus = False
669 if modified or added or removed or deleted:
670 raise util.Abort(_('outstanding uncommitted changes'))
671 return orig(ui, repo, *pats, **opts)
672
673 def override_forget(orig, ui, repo, *pats, **opts):
674 installnormalfilesmatchfn(repo[None].manifest())
675 orig(ui, repo, *pats, **opts)
676 restorematchfn()
677 m = scmutil.match(repo[None], pats, opts)
678
679 try:
680 repo.lfstatus = True
681 s = repo.status(match=m, clean=True)
682 finally:
683 repo.lfstatus = False
684 forget = sorted(s[0] + s[1] + s[3] + s[6])
685 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
686
687 for f in forget:
688 if lfutil.standin(f) not in repo.dirstate and not \
689 os.path.isdir(m.rel(lfutil.standin(f))):
690 ui.warn(_('not removing %s: file is already untracked\n')
691 % m.rel(f))
692
693 for f in forget:
694 if ui.verbose or not m.exact(f):
695 ui.status(_('removing %s\n') % m.rel(f))
696
697 # Need to lock because standin files are deleted then removed from the
698 # repository and we could race inbetween.
699 wlock = repo.wlock()
700 try:
701 lfdirstate = lfutil.openlfdirstate(ui, repo)
702 for f in forget:
703 if lfdirstate[f] == 'a':
704 lfdirstate.drop(f)
705 else:
706 lfdirstate.remove(f)
707 lfdirstate.write()
708 lfutil.repo_remove(repo, [lfutil.standin(f) for f in forget],
709 unlink=True)
710 finally:
711 wlock.release()
712
713 def getoutgoinglfiles(ui, repo, dest=None, **opts):
714 dest = ui.expandpath(dest or 'default-push', dest or 'default')
715 dest, branches = hg.parseurl(dest, opts.get('branch'))
716 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
717 if revs:
718 revs = [repo.lookup(rev) for rev in revs]
719
720 remoteui = hg.remoteui
721
722 try:
723 remote = hg.repository(remoteui(repo, opts), dest)
724 except error.RepoError:
725 return None
726 o = lfutil.findoutgoing(repo, remote, False)
727 if not o:
728 return None
729 o = repo.changelog.nodesbetween(o, revs)[0]
730 if opts.get('newest_first'):
731 o.reverse()
732
733 toupload = set()
734 for n in o:
735 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
736 ctx = repo[n]
737 files = set(ctx.files())
738 if len(parents) == 2:
739 mc = ctx.manifest()
740 mp1 = ctx.parents()[0].manifest()
741 mp2 = ctx.parents()[1].manifest()
742 for f in mp1:
743 if f not in mc:
744 files.add(f)
745 for f in mp2:
746 if f not in mc:
747 files.add(f)
748 for f in mc:
749 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
750 files.add(f)
751 toupload = toupload.union(
752 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
753 return toupload
754
755 def override_outgoing(orig, ui, repo, dest=None, **opts):
756 orig(ui, repo, dest, **opts)
757
758 if opts.pop('large', None):
759 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
760 if toupload is None:
761 ui.status(_('largefiles: No remote repo\n'))
762 else:
763 ui.status(_('largefiles to upload:\n'))
764 for file in toupload:
765 ui.status(lfutil.splitstandin(file) + '\n')
766 ui.status('\n')
767
768 def override_summary(orig, ui, repo, *pats, **opts):
769 orig(ui, repo, *pats, **opts)
770
771 if opts.pop('large', None):
772 toupload = getoutgoinglfiles(ui, repo, None, **opts)
773 if toupload is None:
774 ui.status(_('largefiles: No remote repo\n'))
775 else:
776 ui.status(_('largefiles: %d to upload\n') % len(toupload))
777
778 def override_addremove(orig, ui, repo, *pats, **opts):
779 # Check if the parent or child has largefiles; if so, disallow
780 # addremove. If there is a symlink in the manifest then getting
781 # the manifest throws an exception: catch it and let addremove
782 # deal with it.
783 try:
784 manifesttip = set(repo['tip'].manifest())
785 except util.Abort:
786 manifesttip = set()
787 try:
788 manifestworking = set(repo[None].manifest())
789 except util.Abort:
790 manifestworking = set()
791
792 # Manifests are only iterable so turn them into sets then union
793 for file in manifesttip.union(manifestworking):
794 if file.startswith(lfutil.shortname):
795 raise util.Abort(
796 _('addremove cannot be run on a repo with largefiles'))
797
798 return orig(ui, repo, *pats, **opts)
799
800 # Calling purge with --all will cause the largefiles to be deleted.
801 # Override repo.status to prevent this from happening.
802 def override_purge(orig, ui, repo, *dirs, **opts):
803 oldstatus = repo.status
804 def override_status(node1='.', node2=None, match=None, ignored=False,
805 clean=False, unknown=False, listsubrepos=False):
806 r = oldstatus(node1, node2, match, ignored, clean, unknown,
807 listsubrepos)
808 lfdirstate = lfutil.openlfdirstate(ui, repo)
809 modified, added, removed, deleted, unknown, ignored, clean = r
810 unknown = [f for f in unknown if lfdirstate[f] == '?']
811 ignored = [f for f in ignored if lfdirstate[f] == '?']
812 return modified, added, removed, deleted, unknown, ignored, clean
813 repo.status = override_status
814 orig(ui, repo, *dirs, **opts)
815 repo.status = oldstatus
816
817 def override_rollback(orig, ui, repo, **opts):
818 result = orig(ui, repo, **opts)
819 merge.update(repo, node=None, branchmerge=False, force=True,
820 partial=lfutil.isstandin)
821 lfdirstate = lfutil.openlfdirstate(ui, repo)
822 lfiles = lfutil.listlfiles(repo)
823 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
824 for file in lfiles:
825 if file in oldlfiles:
826 lfdirstate.normallookup(file)
827 else:
828 lfdirstate.add(file)
829 lfdirstate.write()
830 return result
@@ -0,0 +1,160 b''
1 # Copyright 2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 import os
7 import tempfile
8 import urllib2
9
10 from mercurial import error, httprepo, util, wireproto
11 from mercurial.i18n import _
12
13 import lfutil
14
15 LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
16 '\n\nPlease enable it in your Mercurial config '
17 'file.\n')
18
19 def putlfile(repo, proto, sha):
20 '''Put a largefile into a repository's local cache and into the
21 system cache.'''
22 f = None
23 proto.redirect()
24 try:
25 try:
26 f = tempfile.NamedTemporaryFile(mode='wb+', prefix='hg-putlfile-')
27 proto.getfile(f)
28 f.seek(0)
29 if sha != lfutil.hexsha1(f):
30 return wireproto.pushres(1)
31 lfutil.copytocacheabsolute(repo, f.name, sha)
32 except IOError:
33 repo.ui.warn(
34 _('error: could not put received data into largefile store'))
35 return wireproto.pushres(1)
36 finally:
37 if f:
38 f.close()
39
40 return wireproto.pushres(0)
41
42 def getlfile(repo, proto, sha):
43 '''Retrieve a largefile from the repository-local cache or system
44 cache.'''
45 filename = lfutil.findfile(repo, sha)
46 if not filename:
47 raise util.Abort(_('requested largefile %s not present in cache') % sha)
48 f = open(filename, 'rb')
49 length = os.fstat(f.fileno())[6]
50
51 # Since we can't set an HTTP content-length header here, and
52 # Mercurial core provides no way to give the length of a streamres
53 # (and reading the entire file into RAM would be ill-advised), we
54 # just send the length on the first line of the response, like the
55 # ssh proto does for string responses.
56 def generator():
57 yield '%d\n' % length
58 for chunk in f:
59 yield chunk
60 return wireproto.streamres(generator())
61
62 def statlfile(repo, proto, sha):
63 '''Return '2\n' if the largefile is missing, '1\n' if it has a
64 mismatched checksum, or '0\n' if it is in good condition'''
65 filename = lfutil.findfile(repo, sha)
66 if not filename:
67 return '2\n'
68 fd = None
69 try:
70 fd = open(filename, 'rb')
71 return lfutil.hexsha1(fd) == sha and '0\n' or '1\n'
72 finally:
73 if fd:
74 fd.close()
75
76 def wirereposetup(ui, repo):
77 class lfileswirerepository(repo.__class__):
78 def putlfile(self, sha, fd):
79 # unfortunately, httprepository._callpush tries to convert its
80 # input file-like into a bundle before sending it, so we can't use
81 # it ...
82 if issubclass(self.__class__, httprepo.httprepository):
83 try:
84 return int(self._call('putlfile', data=fd, sha=sha,
85 headers={'content-type':'application/mercurial-0.1'}))
86 except (ValueError, urllib2.HTTPError):
87 return 1
88 # ... but we can't use sshrepository._call because the data=
89 # argument won't get sent, and _callpush does exactly what we want
90 # in this case: send the data straight through
91 else:
92 try:
93 ret, output = self._callpush("putlfile", fd, sha=sha)
94 if ret == "":
95 raise error.ResponseError(_('putlfile failed:'),
96 output)
97 return int(ret)
98 except IOError:
99 return 1
100 except ValueError:
101 raise error.ResponseError(
102 _('putlfile failed (unexpected response):'), ret)
103
104 def getlfile(self, sha):
105 stream = self._callstream("getlfile", sha=sha)
106 length = stream.readline()
107 try:
108 length = int(length)
109 except ValueError:
110 self._abort(error.ResponseError(_("unexpected response:"),
111 length))
112 return (length, stream)
113
114 def statlfile(self, sha):
115 try:
116 return int(self._call("statlfile", sha=sha))
117 except (ValueError, urllib2.HTTPError):
118 # If the server returns anything but an integer followed by a
119 # newline, newline, it's not speaking our language; if we get
120 # an HTTP error, we can't be sure the largefile is present;
121 # either way, consider it missing.
122 return 2
123
124 repo.__class__ = lfileswirerepository
125
126 # advertise the largefiles=serve capability
127 def capabilities(repo, proto):
128 return capabilities_orig(repo, proto) + ' largefiles=serve'
129
130 # duplicate what Mercurial's new out-of-band errors mechanism does, because
131 # clients old and new alike both handle it well
132 def webproto_refuseclient(self, message):
133 self.req.header([('Content-Type', 'application/hg-error')])
134 return message
135
136 def sshproto_refuseclient(self, message):
137 self.ui.write_err('%s\n-\n' % message)
138 self.fout.write('\n')
139 self.fout.flush()
140
141 return ''
142
143 def heads(repo, proto):
144 if lfutil.islfilesrepo(repo):
145 return wireproto.ooberror(LARGEFILES_REQUIRED_MSG)
146 return wireproto.heads(repo, proto)
147
148 def sshrepo_callstream(self, cmd, **args):
149 if cmd == 'heads' and self.capable('largefiles'):
150 cmd = 'lheads'
151 if cmd == 'batch' and self.capable('largefiles'):
152 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
153 return ssh_oldcallstream(self, cmd, **args)
154
155 def httprepo_callstream(self, cmd, **args):
156 if cmd == 'heads' and self.capable('largefiles'):
157 cmd = 'lheads'
158 if cmd == 'batch' and self.capable('largefiles'):
159 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
160 return http_oldcallstream(self, cmd, **args)
@@ -0,0 +1,106 b''
1 # Copyright 2010-2011 Fog Creek Software
2 # Copyright 2010-2011 Unity Technologies
3 #
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
6
7 '''remote largefile store; the base class for servestore'''
8
9 import urllib2
10
11 from mercurial import util
12 from mercurial.i18n import _
13
14 import lfutil
15 import basestore
16
17 class remotestore(basestore.basestore):
18 '''a largefile store accessed over a network'''
19 def __init__(self, ui, repo, url):
20 super(remotestore, self).__init__(ui, repo, url)
21
22 def put(self, source, hash):
23 if self._verify(hash):
24 return
25 if self.sendfile(source, hash):
26 raise util.Abort(
27 _('remotestore: could not put %s to remote store %s')
28 % (source, self.url))
29 self.ui.debug(
30 _('remotestore: put %s to remote store %s') % (source, self.url))
31
32 def exists(self, hash):
33 return self._verify(hash)
34
35 def sendfile(self, filename, hash):
36 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
37 fd = None
38 try:
39 try:
40 fd = lfutil.httpsendfile(self.ui, filename)
41 except IOError, e:
42 raise util.Abort(
43 _('remotestore: could not open file %s: %s')
44 % (filename, str(e)))
45 return self._put(hash, fd)
46 finally:
47 if fd:
48 fd.close()
49
50 def _getfile(self, tmpfile, filename, hash):
51 # quit if the largefile isn't there
52 stat = self._stat(hash)
53 if stat == 1:
54 raise util.Abort(_('remotestore: largefile %s is invalid') % hash)
55 elif stat == 2:
56 raise util.Abort(_('remotestore: largefile %s is missing') % hash)
57
58 try:
59 length, infile = self._get(hash)
60 except urllib2.HTTPError, e:
61 # 401s get converted to util.Aborts; everything else is fine being
62 # turned into a StoreError
63 raise basestore.StoreError(filename, hash, self.url, str(e))
64 except urllib2.URLError, e:
65 # This usually indicates a connection problem, so don't
66 # keep trying with the other files... they will probably
67 # all fail too.
68 raise util.Abort('%s: %s' % (self.url, e.reason))
69 except IOError, e:
70 raise basestore.StoreError(filename, hash, self.url, str(e))
71
72 # Mercurial does not close its SSH connections after writing a stream
73 if length is not None:
74 infile = lfutil.limitreader(infile, length)
75 return lfutil.copyandhash(lfutil.blockstream(infile), tmpfile)
76
77 def _verify(self, hash):
78 return not self._stat(hash)
79
80 def _verifyfile(self, cctx, cset, contents, standin, verified):
81 filename = lfutil.splitstandin(standin)
82 if not filename:
83 return False
84 fctx = cctx[standin]
85 key = (filename, fctx.filenode())
86 if key in verified:
87 return False
88
89 verified.add(key)
90
91 stat = self._stat(hash)
92 if not stat:
93 return False
94 elif stat == 1:
95 self.ui.warn(
96 _('changeset %s: %s: contents differ\n')
97 % (cset, filename))
98 return True # failed
99 elif stat == 2:
100 self.ui.warn(
101 _('changeset %s: %s missing\n')
102 % (cset, filename))
103 return True # failed
104 else:
105 raise RuntimeError('verify failed: unexpected response from '
106 'statlfile (%r)' % stat)
@@ -0,0 +1,416 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles repositories: reposetup'''
10 import copy
11 import types
12 import os
13 import re
14
15 from mercurial import context, error, manifest, match as match_, \
16 node, util
17 from mercurial.i18n import _
18
19 import lfcommands
20 import proto
21 import lfutil
22
23 def reposetup(ui, repo):
24 # wire repositories should be given new wireproto functions but not the
25 # other largefiles modifications
26 if not repo.local():
27 return proto.wirereposetup(ui, repo)
28
29 for name in ('status', 'commitctx', 'commit', 'push'):
30 method = getattr(repo, name)
31 #if not (isinstance(method, types.MethodType) and
32 # method.im_func is repo.__class__.commitctx.im_func):
33 if (isinstance(method, types.FunctionType) and
34 method.func_name == 'wrap'):
35 ui.warn(_('largefiles: repo method %r appears to have already been'
36 ' wrapped by another extension: '
37 'largefiles may behave incorrectly\n')
38 % name)
39
40 class lfiles_repo(repo.__class__):
41 lfstatus = False
42 def status_nolfiles(self, *args, **kwargs):
43 return super(lfiles_repo, self).status(*args, **kwargs)
44
45 # When lfstatus is set, return a context that gives the names
46 # of largefiles instead of their corresponding standins and
47 # identifies the largefiles as always binary, regardless of
48 # their actual contents.
49 def __getitem__(self, changeid):
50 ctx = super(lfiles_repo, self).__getitem__(changeid)
51 if self.lfstatus:
52 class lfiles_manifestdict(manifest.manifestdict):
53 def __contains__(self, filename):
54 if super(lfiles_manifestdict,
55 self).__contains__(filename):
56 return True
57 return super(lfiles_manifestdict,
58 self).__contains__(lfutil.shortname+'/' + filename)
59 class lfiles_ctx(ctx.__class__):
60 def files(self):
61 filenames = super(lfiles_ctx, self).files()
62 return [re.sub('^\\'+lfutil.shortname+'/', '',
63 filename) for filename in filenames]
64 def manifest(self):
65 man1 = super(lfiles_ctx, self).manifest()
66 man1.__class__ = lfiles_manifestdict
67 return man1
68 def filectx(self, path, fileid=None, filelog=None):
69 try:
70 result = super(lfiles_ctx, self).filectx(path,
71 fileid, filelog)
72 except error.LookupError:
73 # Adding a null character will cause Mercurial to
74 # identify this as a binary file.
75 result = super(lfiles_ctx, self).filectx(
76 lfutil.shortname + '/' + path, fileid,
77 filelog)
78 olddata = result.data
79 result.data = lambda: olddata() + '\0'
80 return result
81 ctx.__class__ = lfiles_ctx
82 return ctx
83
84 # Figure out the status of big files and insert them into the
85 # appropriate list in the result. Also removes standin files
86 # from the listing. Revert to the original status if
87 # self.lfstatus is False.
88 def status(self, node1='.', node2=None, match=None, ignored=False,
89 clean=False, unknown=False, listsubrepos=False):
90 listignored, listclean, listunknown = ignored, clean, unknown
91 if not self.lfstatus:
92 try:
93 return super(lfiles_repo, self).status(node1, node2, match,
94 listignored, listclean, listunknown, listsubrepos)
95 except TypeError:
96 return super(lfiles_repo, self).status(node1, node2, match,
97 listignored, listclean, listunknown)
98 else:
99 # some calls in this function rely on the old version of status
100 self.lfstatus = False
101 if isinstance(node1, context.changectx):
102 ctx1 = node1
103 else:
104 ctx1 = repo[node1]
105 if isinstance(node2, context.changectx):
106 ctx2 = node2
107 else:
108 ctx2 = repo[node2]
109 working = ctx2.rev() is None
110 parentworking = working and ctx1 == self['.']
111
112 def inctx(file, ctx):
113 try:
114 if ctx.rev() is None:
115 return file in ctx.manifest()
116 ctx[file]
117 return True
118 except KeyError:
119 return False
120
121 if match is None:
122 match = match_.always(self.root, self.getcwd())
123
124 # Create a copy of match that matches standins instead
125 # of largefiles.
126 def tostandin(file):
127 if inctx(lfutil.standin(file), ctx2):
128 return lfutil.standin(file)
129 return file
130
131 m = copy.copy(match)
132 m._files = [tostandin(f) for f in m._files]
133
134 # get ignored, clean, and unknown but remove them
135 # later if they were not asked for
136 try:
137 result = super(lfiles_repo, self).status(node1, node2, m,
138 True, True, True, listsubrepos)
139 except TypeError:
140 result = super(lfiles_repo, self).status(node1, node2, m,
141 True, True, True)
142 if working:
143 # hold the wlock while we read largefiles and
144 # update the lfdirstate
145 wlock = repo.wlock()
146 try:
147 # Any non-largefiles that were explicitly listed must be
148 # taken out or lfdirstate.status will report an error.
149 # The status of these files was already computed using
150 # super's status.
151 lfdirstate = lfutil.openlfdirstate(ui, self)
152 match._files = [f for f in match._files if f in
153 lfdirstate]
154 s = lfdirstate.status(match, [], listignored,
155 listclean, listunknown)
156 (unsure, modified, added, removed, missing, unknown,
157 ignored, clean) = s
158 if parentworking:
159 for lfile in unsure:
160 if ctx1[lfutil.standin(lfile)].data().strip() \
161 != lfutil.hashfile(self.wjoin(lfile)):
162 modified.append(lfile)
163 else:
164 clean.append(lfile)
165 lfdirstate.normal(lfile)
166 lfdirstate.write()
167 else:
168 tocheck = unsure + modified + added + clean
169 modified, added, clean = [], [], []
170
171 for lfile in tocheck:
172 standin = lfutil.standin(lfile)
173 if inctx(standin, ctx1):
174 if ctx1[standin].data().strip() != \
175 lfutil.hashfile(self.wjoin(lfile)):
176 modified.append(lfile)
177 else:
178 clean.append(lfile)
179 else:
180 added.append(lfile)
181 finally:
182 wlock.release()
183
184 for standin in ctx1.manifest():
185 if not lfutil.isstandin(standin):
186 continue
187 lfile = lfutil.splitstandin(standin)
188 if not match(lfile):
189 continue
190 if lfile not in lfdirstate:
191 removed.append(lfile)
192 # Handle unknown and ignored differently
193 lfiles = (modified, added, removed, missing, [], [], clean)
194 result = list(result)
195 # Unknown files
196 result[4] = [f for f in unknown
197 if (repo.dirstate[f] == '?' and
198 not lfutil.isstandin(f))]
199 # Ignored files must be ignored by both the dirstate and
200 # lfdirstate
201 result[5] = set(ignored).intersection(set(result[5]))
202 # combine normal files and largefiles
203 normals = [[fn for fn in filelist
204 if not lfutil.isstandin(fn)]
205 for filelist in result]
206 result = [sorted(list1 + list2)
207 for (list1, list2) in zip(normals, lfiles)]
208 else:
209 def toname(f):
210 if lfutil.isstandin(f):
211 return lfutil.splitstandin(f)
212 return f
213 result = [[toname(f) for f in items] for items in result]
214
215 if not listunknown:
216 result[4] = []
217 if not listignored:
218 result[5] = []
219 if not listclean:
220 result[6] = []
221 self.lfstatus = True
222 return result
223
224 # As part of committing, copy all of the largefiles into the
225 # cache.
226 def commitctx(self, *args, **kwargs):
227 node = super(lfiles_repo, self).commitctx(*args, **kwargs)
228 ctx = self[node]
229 for filename in ctx.files():
230 if lfutil.isstandin(filename) and filename in ctx.manifest():
231 realfile = lfutil.splitstandin(filename)
232 lfutil.copytocache(self, ctx.node(), realfile)
233
234 return node
235
236 # Before commit, largefile standins have not had their
237 # contents updated to reflect the hash of their largefile.
238 # Do that here.
239 def commit(self, text="", user=None, date=None, match=None,
240 force=False, editor=False, extra={}):
241 orig = super(lfiles_repo, self).commit
242
243 wlock = repo.wlock()
244 try:
245 if getattr(repo, "_isrebasing", False):
246 # We have to take the time to pull down the new
247 # largefiles now. Otherwise if we are rebasing,
248 # any largefiles that were modified in the
249 # destination changesets get overwritten, either
250 # by the rebase or in the first commit after the
251 # rebase.
252 lfcommands.updatelfiles(repo.ui, repo)
253 # Case 1: user calls commit with no specific files or
254 # include/exclude patterns: refresh and commit all files that
255 # are "dirty".
256 if ((match is None) or
257 (not match.anypats() and not match.files())):
258 # Spend a bit of time here to get a list of files we know
259 # are modified so we can compare only against those.
260 # It can cost a lot of time (several seconds)
261 # otherwise to update all standins if the largefiles are
262 # large.
263 lfdirstate = lfutil.openlfdirstate(ui, self)
264 dirtymatch = match_.always(repo.root, repo.getcwd())
265 s = lfdirstate.status(dirtymatch, [], False, False, False)
266 modifiedfiles = []
267 for i in s:
268 modifiedfiles.extend(i)
269 lfiles = lfutil.listlfiles(self)
270 # this only loops through largefiles that exist (not
271 # removed/renamed)
272 for lfile in lfiles:
273 if lfile in modifiedfiles:
274 if os.path.exists(self.wjoin(lfutil.standin(lfile))):
275 # this handles the case where a rebase is being
276 # performed and the working copy is not updated
277 # yet.
278 if os.path.exists(self.wjoin(lfile)):
279 lfutil.updatestandin(self,
280 lfutil.standin(lfile))
281 lfdirstate.normal(lfile)
282 for lfile in lfdirstate:
283 if lfile in modifiedfiles:
284 if not os.path.exists(
285 repo.wjoin(lfutil.standin(lfile))):
286 lfdirstate.drop(lfile)
287 lfdirstate.write()
288
289 return orig(text=text, user=user, date=date, match=match,
290 force=force, editor=editor, extra=extra)
291
292 for f in match.files():
293 if lfutil.isstandin(f):
294 raise util.Abort(
295 _('file "%s" is a largefile standin') % f,
296 hint=('commit the largefile itself instead'))
297
298 # Case 2: user calls commit with specified patterns: refresh
299 # any matching big files.
300 smatcher = lfutil.composestandinmatcher(self, match)
301 standins = lfutil.dirstate_walk(self.dirstate, smatcher)
302
303 # No matching big files: get out of the way and pass control to
304 # the usual commit() method.
305 if not standins:
306 return orig(text=text, user=user, date=date, match=match,
307 force=force, editor=editor, extra=extra)
308
309 # Refresh all matching big files. It's possible that the
310 # commit will end up failing, in which case the big files will
311 # stay refreshed. No harm done: the user modified them and
312 # asked to commit them, so sooner or later we're going to
313 # refresh the standins. Might as well leave them refreshed.
314 lfdirstate = lfutil.openlfdirstate(ui, self)
315 for standin in standins:
316 lfile = lfutil.splitstandin(standin)
317 if lfdirstate[lfile] <> 'r':
318 lfutil.updatestandin(self, standin)
319 lfdirstate.normal(lfile)
320 else:
321 lfdirstate.drop(lfile)
322 lfdirstate.write()
323
324 # Cook up a new matcher that only matches regular files or
325 # standins corresponding to the big files requested by the
326 # user. Have to modify _files to prevent commit() from
327 # complaining "not tracked" for big files.
328 lfiles = lfutil.listlfiles(repo)
329 match = copy.copy(match)
330 orig_matchfn = match.matchfn
331
332 # Check both the list of largefiles and the list of
333 # standins because if a largefile was removed, it
334 # won't be in the list of largefiles at this point
335 match._files += sorted(standins)
336
337 actualfiles = []
338 for f in match._files:
339 fstandin = lfutil.standin(f)
340
341 # ignore known largefiles and standins
342 if f in lfiles or fstandin in standins:
343 continue
344
345 # append directory separator to avoid collisions
346 if not fstandin.endswith(os.sep):
347 fstandin += os.sep
348
349 # prevalidate matching standin directories
350 if lfutil.any_(st for st in match._files
351 if st.startswith(fstandin)):
352 continue
353 actualfiles.append(f)
354 match._files = actualfiles
355
356 def matchfn(f):
357 if orig_matchfn(f):
358 return f not in lfiles
359 else:
360 return f in standins
361
362 match.matchfn = matchfn
363 return orig(text=text, user=user, date=date, match=match,
364 force=force, editor=editor, extra=extra)
365 finally:
366 wlock.release()
367
368 def push(self, remote, force=False, revs=None, newbranch=False):
369 o = lfutil.findoutgoing(repo, remote, force)
370 if o:
371 toupload = set()
372 o = repo.changelog.nodesbetween(o, revs)[0]
373 for n in o:
374 parents = [p for p in repo.changelog.parents(n)
375 if p != node.nullid]
376 ctx = repo[n]
377 files = set(ctx.files())
378 if len(parents) == 2:
379 mc = ctx.manifest()
380 mp1 = ctx.parents()[0].manifest()
381 mp2 = ctx.parents()[1].manifest()
382 for f in mp1:
383 if f not in mc:
384 files.add(f)
385 for f in mp2:
386 if f not in mc:
387 files.add(f)
388 for f in mc:
389 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f,
390 None):
391 files.add(f)
392
393 toupload = toupload.union(
394 set([ctx[f].data().strip()
395 for f in files
396 if lfutil.isstandin(f) and f in ctx]))
397 lfcommands.uploadlfiles(ui, self, remote, toupload)
398 return super(lfiles_repo, self).push(remote, force, revs,
399 newbranch)
400
401 repo.__class__ = lfiles_repo
402
403 def checkrequireslfiles(ui, repo, **kwargs):
404 if 'largefiles' not in repo.requirements and lfutil.any_(
405 lfutil.shortname+'/' in f[0] for f in repo.store.datafiles()):
406 # workaround bug in Mercurial 1.9 whereby requirements is
407 # a list on newly-cloned repos
408 repo.requirements = set(repo.requirements)
409
410 repo.requirements |= set(['largefiles'])
411 repo._writerequirements()
412
413 checkrequireslfiles(ui, repo)
414
415 ui.setconfig('hooks', 'changegroup.lfiles', checkrequireslfiles)
416 ui.setconfig('hooks', 'commit.lfiles', checkrequireslfiles)
@@ -0,0 +1,138 b''
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles extension: uisetup'''
10
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
12 httprepo, localrepo, sshrepo, sshserver, util, wireproto
13 from mercurial.i18n import _
14 from mercurial.hgweb import hgweb_mod, protocol
15
16 import overrides
17 import proto
18
19 def uisetup(ui):
20 # Disable auto-status for some commands which assume that all
21 # files in the result are under Mercurial's control
22
23 entry = extensions.wrapcommand(commands.table, 'add',
24 overrides.override_add)
25 addopt = [('', 'large', None, _('add as largefile')),
26 ('', 'lfsize', '', _('add all files above this size (in megabytes)'
27 'as largefiles (default: 10)'))]
28 entry[1].extend(addopt)
29
30 entry = extensions.wrapcommand(commands.table, 'addremove',
31 overrides.override_addremove)
32 entry = extensions.wrapcommand(commands.table, 'remove',
33 overrides.override_remove)
34 entry = extensions.wrapcommand(commands.table, 'forget',
35 overrides.override_forget)
36 entry = extensions.wrapcommand(commands.table, 'status',
37 overrides.override_status)
38 entry = extensions.wrapcommand(commands.table, 'log',
39 overrides.override_log)
40 entry = extensions.wrapcommand(commands.table, 'rollback',
41 overrides.override_rollback)
42 entry = extensions.wrapcommand(commands.table, 'verify',
43 overrides.override_verify)
44
45 verifyopt = [('', 'large', None, _('verify largefiles')),
46 ('', 'lfa', None,
47 _('verify all revisions of largefiles not just current')),
48 ('', 'lfc', None,
49 _('verify largefile contents not just existence'))]
50 entry[1].extend(verifyopt)
51
52 entry = extensions.wrapcommand(commands.table, 'outgoing',
53 overrides.override_outgoing)
54 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
55 entry[1].extend(outgoingopt)
56 entry = extensions.wrapcommand(commands.table, 'summary',
57 overrides.override_summary)
58 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
59 entry[1].extend(summaryopt)
60
61 entry = extensions.wrapcommand(commands.table, 'update',
62 overrides.override_update)
63 entry = extensions.wrapcommand(commands.table, 'pull',
64 overrides.override_pull)
65 entry = extensions.wrapfunction(filemerge, 'filemerge',
66 overrides.override_filemerge)
67 entry = extensions.wrapfunction(cmdutil, 'copy',
68 overrides.override_copy)
69
70 # Backout calls revert so we need to override both the command and the
71 # function
72 entry = extensions.wrapcommand(commands.table, 'revert',
73 overrides.override_revert)
74 entry = extensions.wrapfunction(commands, 'revert',
75 overrides.override_revert)
76
77 # clone uses hg._update instead of hg.update even though they are the
78 # same function... so wrap both of them)
79 extensions.wrapfunction(hg, 'update', overrides.hg_update)
80 extensions.wrapfunction(hg, '_update', overrides.hg_update)
81 extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
82 extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
83
84 extensions.wrapfunction(archival, 'archive', overrides.override_archive)
85 if util.safehasattr(cmdutil, 'bailifchanged'):
86 extensions.wrapfunction(cmdutil, 'bailifchanged',
87 overrides.override_bailifchanged)
88 else:
89 extensions.wrapfunction(cmdutil, 'bail_if_changed',
90 overrides.override_bailifchanged)
91
92 # create the new wireproto commands ...
93 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
94 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
95 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
96
97 # ... and wrap some existing ones
98 wireproto.commands['capabilities'] = (proto.capabilities, '')
99 wireproto.commands['heads'] = (proto.heads, '')
100 wireproto.commands['lheads'] = (wireproto.heads, '')
101
102 # make putlfile behave the same as push and {get,stat}lfile behave
103 # the same as pull w.r.t. permissions checks
104 hgweb_mod.perms['putlfile'] = 'push'
105 hgweb_mod.perms['getlfile'] = 'pull'
106 hgweb_mod.perms['statlfile'] = 'pull'
107
108 # the hello wireproto command uses wireproto.capabilities, so it won't see
109 # our largefiles capability unless we replace the actual function as well.
110 proto.capabilities_orig = wireproto.capabilities
111 wireproto.capabilities = proto.capabilities
112
113 # these let us reject non-largefiles clients and make them display
114 # our error messages
115 protocol.webproto.refuseclient = proto.webproto_refuseclient
116 sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
117
118 # can't do this in reposetup because it needs to have happened before
119 # wirerepo.__init__ is called
120 proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
121 proto.http_oldcallstream = httprepo.httprepository._callstream
122 sshrepo.sshrepository._callstream = proto.sshrepo_callstream
123 httprepo.httprepository._callstream = proto.httprepo_callstream
124
125 # don't die on seeing a repo with the largefiles requirement
126 localrepo.localrepository.supported |= set(['largefiles'])
127
128 # override some extensions' stuff as well
129 for name, module in extensions.extensions():
130 if name == 'fetch':
131 extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
132 overrides.override_fetch)
133 if name == 'purge':
134 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
135 overrides.override_purge)
136 if name == 'rebase':
137 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
138 overrides.override_rebase)
@@ -0,0 +1,51 b''
1 Largefiles allows for tracking large, incompressible binary files in Mercurial
2 without requiring excessive bandwidth for clones and pulls. Files added as
3 largefiles are not tracked directly by Mercurial; rather, their revisions are
4 identified by a checksum, and Mercurial tracks these checksums. This way, when
5 you clone a repository or pull in changesets, the large files in older
6 revisions of the repository are not needed, and only the ones needed to update
7 to the current version are downloaded. This saves both disk space and
8 bandwidth.
9
10 If you are starting a new repository or adding new large binary files, using
11 largefiles for them is as easy as adding '--large' to your hg add command. For
12 example:
13
14 $ dd if=/dev/urandom of=thisfileislarge count=2000
15 $ hg add --large thisfileislarge
16 $ hg commit -m 'add thisfileislarge, which is large, as a largefile'
17
18 When you push a changeset that affects largefiles to a remote repository, its
19 largefile revisions will be uploaded along with it. Note that the remote
20 Mercurial must also have the largefiles extension enabled for this to work.
21
22 When you pull a changeset that affects largefiles from a remote repository,
23 nothing different from Mercurial's normal behavior happens. However, when you
24 update to such a revision, any largefiles needed by that revision are
25 downloaded and cached if they have never been downloaded before. This means
26 that network access is required to update to revision you have not yet updated
27 to.
28
29 If you already have large files tracked by Mercurial without the largefiles
30 extension, you will need to convert your repository in order to benefit from
31 largefiles. This is done with the 'hg lfconvert' command:
32
33 $ hg lfconvert --size 10 oldrepo newrepo
34
35 By default, in repositories that already have largefiles in them, any new file
36 over 10MB will automatically be added as largefiles. To change this
37 threshhold, set [largefiles].size in your Mercurial config file to the minimum
38 size in megabytes to track as a largefile, or use the --lfsize option to the
39 add command (also in megabytes):
40
41 [largefiles]
42 size = 2
43
44 $ hg add --lfsize 2
45
46 The [largefiles].patterns config option allows you to specify specific
47 space-separated filename patterns (in shell glob syntax) that should always be
48 tracked as largefiles:
49
50 [largefiles]
51 pattens = *.jpg *.{png,bmp} library.zip content/audio/*
@@ -0,0 +1,29 b''
1 # Copyright 2010-2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 '''largefile store working over Mercurial's wire protocol'''
7
8 import lfutil
9 import remotestore
10
11 class wirestore(remotestore.remotestore):
12 def __init__(self, ui, repo, remote):
13 cap = remote.capable('largefiles')
14 if not cap:
15 raise lfutil.storeprotonotcapable([])
16 storetypes = cap.split(',')
17 if not 'serve' in storetypes:
18 raise lfutil.storeprotonotcapable(storetypes)
19 self.remote = remote
20 super(wirestore, self).__init__(ui, repo, remote.url())
21
22 def _put(self, hash, fd):
23 return self.remote.putlfile(hash, fd)
24
25 def _get(self, hash):
26 return self.remote.getlfile(hash)
27
28 def _stat(self, hash):
29 return self.remote.statlfile(hash)
1 NO CONTENT: new file 100644, binary diff hidden
@@ -0,0 +1,13 b''
1 import doctest, tempfile, os, sys
2
3 if __name__ == "__main__":
4 fd, name = tempfile.mkstemp(suffix='hg-tst')
5
6 try:
7 os.write(fd, sys.stdin.read())
8 os.close(fd)
9 failures, _ = doctest.testfile(name, module_relative=False)
10 if failures:
11 sys.exit(1)
12 finally:
13 os.remove(name)
@@ -0,0 +1,232 b''
1 # Here we create a simple DAG which has just enough of the required
2 # topology to test all the bisection status labels:
3 #
4 # 13--14
5 # /
6 # 0--1--2--3---------9--10--11--12
7 # \ /
8 # 4--5--6--7--8
9
10
11 $ hg init
12
13 $ echo '0' >a
14 $ hg add a
15 $ hg ci -u test -d '0 0' -m '0'
16 $ echo '1' >a
17 $ hg ci -u test -d '0 1' -m '1'
18
19 branch 2-3
20
21 $ echo '2' >b
22 $ hg add b
23 $ hg ci -u test -d '0 2' -m '2'
24 $ echo '3' >b
25 $ hg ci -u test -d '0 3' -m '3'
26
27 branch 4-8
28
29 $ hg up -r 1
30 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
31 $ echo '4' >c
32 $ hg add c
33 $ hg ci -u test -d '0 4' -m '4'
34 created new head
35 $ echo '5' >c
36 $ hg ci -u test -d '0 5' -m '5'
37 $ echo '6' >c
38 $ hg ci -u test -d '0 6' -m '6'
39 $ echo '7' >c
40 $ hg ci -u test -d '0 7' -m '7'
41 $ echo '8' >c
42 $ hg ci -u test -d '0 8' -m '8'
43
44 merge
45
46 $ hg merge -r 3
47 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48 (branch merge, don't forget to commit)
49 $ hg ci -u test -d '0 9' -m '9=8+3'
50
51 $ echo '10' >a
52 $ hg ci -u test -d '0 10' -m '10'
53 $ echo '11' >a
54 $ hg ci -u test -d '0 11' -m '11'
55 $ echo '12' >a
56 $ hg ci -u test -d '0 12' -m '12'
57
58 unrelated branch
59
60 $ hg up -r 3
61 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
62 $ echo '13' >d
63 $ hg add d
64 $ hg ci -u test -d '0 13' -m '13'
65 created new head
66 $ echo '14' >d
67 $ hg ci -u test -d '0 14' -m '14'
68
69 mark changesets
70
71 $ hg bisect --reset
72 $ hg bisect --good 4
73 $ hg bisect --good 6
74 $ hg bisect --bad 12
75 Testing changeset 9:8bcbdb072033 (6 changesets remaining, ~2 tests)
76 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
77 $ hg bisect --bad 10
78 Testing changeset 8:3cd112f87d77 (4 changesets remaining, ~2 tests)
79 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
80 $ hg bisect --skip 7
81 Testing changeset 8:3cd112f87d77 (4 changesets remaining, ~2 tests)
82 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
83
84 test template
85
86 $ hg log --template '{rev}:{node|short} {bisect}\n'
87 14:cecd84203acc
88 13:86f7c8cdb6df
89 12:a76089b5f47c bad
90 11:5c3eb122d29c bad (implicit)
91 10:b097cef2be03 bad
92 9:8bcbdb072033 untested
93 8:3cd112f87d77 untested
94 7:577e237a73bd skipped
95 6:e597fa2707c5 good
96 5:b9cea37a76bc good (implicit)
97 4:da6b357259d7 good
98 3:e7f031aee8ca ignored
99 2:b1ad1b6bcc5c ignored
100 1:37f42ae8b45e good (implicit)
101 0:b4e73ffab476 good (implicit)
102 $ hg log --template '{bisect|shortbisect} {rev}:{node|short}\n'
103 14:cecd84203acc
104 13:86f7c8cdb6df
105 B 12:a76089b5f47c
106 B 11:5c3eb122d29c
107 B 10:b097cef2be03
108 U 9:8bcbdb072033
109 U 8:3cd112f87d77
110 S 7:577e237a73bd
111 G 6:e597fa2707c5
112 G 5:b9cea37a76bc
113 G 4:da6b357259d7
114 I 3:e7f031aee8ca
115 I 2:b1ad1b6bcc5c
116 G 1:37f42ae8b45e
117 G 0:b4e73ffab476
118
119 test style
120
121 $ hg log --style bisect
122 changeset: 14:cecd84203acc
123 bisect:
124 tag: tip
125 user: test
126 date: Wed Dec 31 23:59:46 1969 -0000
127 summary: 14
128
129 changeset: 13:86f7c8cdb6df
130 bisect:
131 parent: 3:e7f031aee8ca
132 user: test
133 date: Wed Dec 31 23:59:47 1969 -0000
134 summary: 13
135
136 changeset: 12:a76089b5f47c
137 bisect: bad
138 user: test
139 date: Wed Dec 31 23:59:48 1969 -0000
140 summary: 12
141
142 changeset: 11:5c3eb122d29c
143 bisect: bad (implicit)
144 user: test
145 date: Wed Dec 31 23:59:49 1969 -0000
146 summary: 11
147
148 changeset: 10:b097cef2be03
149 bisect: bad
150 user: test
151 date: Wed Dec 31 23:59:50 1969 -0000
152 summary: 10
153
154 changeset: 9:8bcbdb072033
155 bisect: untested
156 parent: 8:3cd112f87d77
157 parent: 3:e7f031aee8ca
158 user: test
159 date: Wed Dec 31 23:59:51 1969 -0000
160 summary: 9=8+3
161
162 changeset: 8:3cd112f87d77
163 bisect: untested
164 user: test
165 date: Wed Dec 31 23:59:52 1969 -0000
166 summary: 8
167
168 changeset: 7:577e237a73bd
169 bisect: skipped
170 user: test
171 date: Wed Dec 31 23:59:53 1969 -0000
172 summary: 7
173
174 changeset: 6:e597fa2707c5
175 bisect: good
176 user: test
177 date: Wed Dec 31 23:59:54 1969 -0000
178 summary: 6
179
180 changeset: 5:b9cea37a76bc
181 bisect: good (implicit)
182 user: test
183 date: Wed Dec 31 23:59:55 1969 -0000
184 summary: 5
185
186 changeset: 4:da6b357259d7
187 bisect: good
188 parent: 1:37f42ae8b45e
189 user: test
190 date: Wed Dec 31 23:59:56 1969 -0000
191 summary: 4
192
193 changeset: 3:e7f031aee8ca
194 bisect: ignored
195 user: test
196 date: Wed Dec 31 23:59:57 1969 -0000
197 summary: 3
198
199 changeset: 2:b1ad1b6bcc5c
200 bisect: ignored
201 user: test
202 date: Wed Dec 31 23:59:58 1969 -0000
203 summary: 2
204
205 changeset: 1:37f42ae8b45e
206 bisect: good (implicit)
207 user: test
208 date: Wed Dec 31 23:59:59 1969 -0000
209 summary: 1
210
211 changeset: 0:b4e73ffab476
212 bisect: good (implicit)
213 user: test
214 date: Thu Jan 01 00:00:00 1970 +0000
215 summary: 0
216
217 $ hg log --quiet --style bisect
218 14:cecd84203acc
219 13:86f7c8cdb6df
220 B 12:a76089b5f47c
221 B 11:5c3eb122d29c
222 B 10:b097cef2be03
223 U 9:8bcbdb072033
224 U 8:3cd112f87d77
225 S 7:577e237a73bd
226 G 6:e597fa2707c5
227 G 5:b9cea37a76bc
228 G 4:da6b357259d7
229 I 3:e7f031aee8ca
230 I 2:b1ad1b6bcc5c
231 G 1:37f42ae8b45e
232 G 0:b4e73ffab476
@@ -0,0 +1,94 b''
1 import sys, os, subprocess
2
3 if subprocess.call(['%s/hghave' % os.environ['TESTDIR'], 'cacheable']):
4 sys.exit(80)
5
6 from mercurial import util, scmutil, extensions
7
8 filecache = scmutil.filecache
9
10 class fakerepo(object):
11 def __init__(self):
12 self._filecache = {}
13
14 def join(self, p):
15 return p
16
17 def sjoin(self, p):
18 return p
19
20 @filecache('x')
21 def cached(self):
22 print 'creating'
23
24 def invalidate(self):
25 for k in self._filecache:
26 try:
27 delattr(self, k)
28 except AttributeError:
29 pass
30
31 def basic(repo):
32 # file doesn't exist, calls function
33 repo.cached
34
35 repo.invalidate()
36 # file still doesn't exist, uses cache
37 repo.cached
38
39 # create empty file
40 f = open('x', 'w')
41 f.close()
42 repo.invalidate()
43 # should recreate the object
44 repo.cached
45
46 f = open('x', 'w')
47 f.write('a')
48 f.close()
49 repo.invalidate()
50 # should recreate the object
51 repo.cached
52
53 repo.invalidate()
54 # stats file again, nothing changed, reuses object
55 repo.cached
56
57 # atomic replace file, size doesn't change
58 # hopefully st_mtime doesn't change as well so this doesn't use the cache
59 # because of inode change
60 f = scmutil.opener('.')('x', 'w', atomictemp=True)
61 f.write('b')
62 f.close()
63
64 repo.invalidate()
65 repo.cached
66
67 def fakeuncacheable():
68 def wrapcacheable(orig, *args, **kwargs):
69 return False
70
71 def wrapinit(orig, *args, **kwargs):
72 pass
73
74 originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
75 origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
76 wrapcacheable)
77
78 try:
79 os.remove('x')
80 except:
81 pass
82
83 basic(fakerepo())
84
85 util.cachestat.cacheable = origcacheable
86 util.cachestat.__init__ = originit
87
88 print 'basic:'
89 print
90 basic(fakerepo())
91 print
92 print 'fakeuncacheable:'
93 print
94 fakeuncacheable()
@@ -0,0 +1,15 b''
1 basic:
2
3 creating
4 creating
5 creating
6 creating
7
8 fakeuncacheable:
9
10 creating
11 creating
12 creating
13 creating
14 creating
15 creating
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -148,7 +148,7 b' pypats = ['
148 148 (r'(?<!def)\s+(any|all|format)\(',
149 149 "any/all/format not available in Python 2.4"),
150 150 (r'(?<!def)\s+(callable)\(',
151 "callable not available in Python 3, use hasattr(f, '__call__')"),
151 "callable not available in Python 3, use getattr(f, '__call__', None)"),
152 152 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
153 153 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
154 154 "gratuitous whitespace after Python keyword"),
@@ -168,6 +168,8 b' pypats = ['
168 168 "comparison with singleton, use 'is' or 'is not' instead"),
169 169 (r'^\s*(while|if) [01]:',
170 170 "use True/False for constant Boolean expression"),
171 (r'(?<!def)\s+hasattr',
172 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
171 173 (r'opener\([^)]*\).read\(',
172 174 "use opener.read() instead"),
173 175 (r'opener\([^)]*\).write\(',
@@ -8,7 +8,7 b' from distutils.command.build_py import b'
8 8 from lib2to3.refactor import get_fixers_from_package as getfixers
9 9
10 10 import sys
11 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
11 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
12 12 raise SystemExit("Mercurial requires Python 2.4 or later.")
13 13
14 14 if sys.version_info[0] >= 3:
@@ -236,7 +236,7 b' class hgbuildext(build_ext):'
236 236 try:
237 237 build_ext.build_extension(self, ext)
238 238 except CCompilerError:
239 if not hasattr(ext, 'optional') or not ext.optional:
239 if getattr(ext, 'optional', False):
240 240 raise
241 241 log.warn("Failed to build optional extension '%s' (skipping)",
242 242 ext.name)
@@ -50,7 +50,7 b' import sys'
50 50 #sys.path.insert(0, r'c:\path\to\python\lib')
51 51
52 52 # Enable tracing. Run 'python -m win32traceutil' to debug
53 if hasattr(sys, 'isapidllhandle'):
53 if getattr(sys, 'isapidllhandle', None) is not None:
54 54 import win32traceutil
55 55
56 56 # To serve pages in local charset instead of UTF-8, remove the two lines below
@@ -165,6 +165,7 b' typeset -A _hg_cmd_globals'
165 165 _hg_labels() {
166 166 _hg_tags "$@"
167 167 _hg_bookmarks "$@"
168 _hg_branches "$@"
168 169 }
169 170
170 171 _hg_tags() {
@@ -191,6 +192,17 b' typeset -A _hg_cmd_globals'
191 192 (( $#bookmarks )) && _describe -t bookmarks 'bookmarks' bookmarks
192 193 }
193 194
195 _hg_branches() {
196 typeset -a branches
197 local branch
198
199 _hg_cmd branches | while read branch
200 do
201 branches+=(${branch/ # [0-9]#:*})
202 done
203 (( $#branches )) && _describe -t branches 'branches' branches
204 }
205
194 206 # likely merge candidates
195 207 _hg_mergerevs() {
196 208 typeset -a heads
@@ -617,6 +629,7 b' typeset -A _hg_cmd_globals'
617 629 '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \
618 630 '(--patch -p)'{-p,--patch}'[show patch]' \
619 631 '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \
632 '(--branch -b)'{-b+,--branch}'[show changesets within the given named branch]:branch:_hg_branches' \
620 633 '*:files:_hg_files'
621 634 }
622 635
@@ -9,6 +9,7 b' from mercurial.commands import table, gl'
9 9 from mercurial.i18n import _
10 10 from mercurial.help import helptable
11 11 from mercurial import extensions
12 from mercurial import util
12 13
13 14 def get_desc(docstr):
14 15 if not docstr:
@@ -95,7 +96,7 b' def show_doc(ui):'
95 96 ui.write(".. _%s:\n" % name)
96 97 ui.write("\n")
97 98 section(ui, sec)
98 if hasattr(doc, '__call__'):
99 if util.safehasattr(doc, '__call__'):
99 100 doc = doc()
100 101 ui.write(doc)
101 102 ui.write("\n")
@@ -216,6 +216,8 b' def hook(ui, repo, hooktype, node=None, '
216 216 if user is None:
217 217 user = getpass.getuser()
218 218
219 ui.debug('acl: checking access for user "%s"\n' % user)
220
219 221 cfg = ui.config('acl', 'config')
220 222 if cfg:
221 223 ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches',
@@ -242,9 +244,9 b' def hook(ui, repo, hooktype, node=None, '
242 244
243 245 for f in ctx.files():
244 246 if deny and deny(f):
245 ui.debug('acl: user %s denied on %s\n' % (user, f))
246 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
247 raise util.Abort(_('acl: user "%s" denied on "%s"'
248 ' (changeset "%s")') % (user, f, ctx))
247 249 if allow and not allow(f):
248 ui.debug('acl: user %s not allowed on %s\n' % (user, f))
249 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
250 ui.debug('acl: allowing changeset %s\n' % ctx)
250 raise util.Abort(_('acl: user "%s" not allowed on "%s"'
251 ' (changeset "%s")') % (user, f, ctx))
252 ui.debug('acl: path access granted: "%s"\n' % ctx)
@@ -68,6 +68,9 b' Default effects may be overridden from y'
68 68 branches.current = green
69 69 branches.inactive = none
70 70
71 tags.normal = green
72 tags.local = black bold
73
71 74 The available effects in terminfo mode are 'blink', 'bold', 'dim',
72 75 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
73 76 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
@@ -257,7 +260,9 b' except ImportError:'
257 260 'status.ignored': 'black bold',
258 261 'status.modified': 'blue bold',
259 262 'status.removed': 'red bold',
260 'status.unknown': 'magenta bold underline'}
263 'status.unknown': 'magenta bold underline',
264 'tags.normal': 'green',
265 'tags.local': 'black bold'}
261 266
262 267
263 268 def _effect_str(effect):
@@ -11,6 +11,7 b' import cPickle as pickle'
11 11 from mercurial import util
12 12 from mercurial.i18n import _
13 13 from mercurial import hook
14 from mercurial import util
14 15
15 16 class logentry(object):
16 17 '''Class logentry has the following attributes:
@@ -362,8 +363,14 b' def createlog(ui, directory=None, root="'
362 363 elif state == 8:
363 364 # store commit log message
364 365 if re_31.match(line):
365 state = 5
366 store = True
366 cpeek = peek
367 if cpeek.endswith('\n'):
368 cpeek = cpeek[:-1]
369 if re_50.match(cpeek):
370 state = 5
371 store = True
372 else:
373 e.comment.append(line)
367 374 elif re_32.match(line):
368 375 state = 0
369 376 store = True
@@ -513,8 +520,8 b' def createchangeset(ui, log, fuzz=60, me'
513 520 e.comment == c.comment and
514 521 e.author == c.author and
515 522 e.branch == c.branch and
516 (not hasattr(e, 'branchpoints') or
517 not hasattr (c, 'branchpoints') or
523 (not util.safehasattr(e, 'branchpoints') or
524 not util.safehasattr (c, 'branchpoints') or
518 525 e.branchpoints == c.branchpoints) and
519 526 ((c.date[0] + c.date[1]) <=
520 527 (e.date[0] + e.date[1]) <=
@@ -375,3 +375,6 b' class filemap_source(converter_source):'
375 375
376 376 def lookuprev(self, rev):
377 377 return self.base.lookuprev(rev)
378
379 def getbookmarks(self):
380 return self.base.getbookmarks()
@@ -16,7 +16,7 b' class convert_git(converter_source):'
16 16 # Windows does not support GIT_DIR= construct while other systems
17 17 # cannot remove environment variable. Just assume none have
18 18 # both issues.
19 if hasattr(os, 'unsetenv'):
19 if util.safehasattr(os, 'unsetenv'):
20 20 def gitopen(self, s, noerr=False):
21 21 prevgitdir = os.environ.get('GIT_DIR')
22 22 os.environ['GIT_DIR'] = self.path
@@ -70,10 +70,10 b' class mercurial_sink(converter_sink):'
70 70 self.wlock.release()
71 71
72 72 def revmapfile(self):
73 return os.path.join(self.path, ".hg", "shamap")
73 return self.repo.join("shamap")
74 74
75 75 def authorfile(self):
76 return os.path.join(self.path, ".hg", "authormap")
76 return self.repo.join("authormap")
77 77
78 78 def getheads(self):
79 79 h = self.repo.changelog.heads()
@@ -178,7 +178,7 b' class mercurial_sink(converter_sink):'
178 178 closed = 'close' in commit.extra
179 179 if not closed and not man.cmp(m1node, man.revision(mnode)):
180 180 self.ui.status(_("filtering out empty revision\n"))
181 self.repo.rollback()
181 self.repo.rollback(force=True)
182 182 return parent
183 183 return p2
184 184
@@ -364,8 +364,7 b' class mercurial_source(converter_source)'
364 364
365 365 def converted(self, rev, destrev):
366 366 if self.convertfp is None:
367 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
368 'a')
367 self.convertfp = open(self.repo.join('shamap'), 'a')
369 368 self.convertfp.write('%s %s\n' % (destrev, rev))
370 369 self.convertfp.flush()
371 370
@@ -501,11 +501,11 b' class svn_source(converter_source):'
501 501 and not p[2].startswith(badroot + '/')]
502 502
503 503 # Tell tag renamings from tag creations
504 remainings = []
504 renamings = []
505 505 for source, sourcerev, dest in pendings:
506 506 tagname = dest.split('/')[-1]
507 507 if source.startswith(srctagspath):
508 remainings.append([source, sourcerev, tagname])
508 renamings.append([source, sourcerev, tagname])
509 509 continue
510 510 if tagname in tags:
511 511 # Keep the latest tag value
@@ -521,7 +521,7 b' class svn_source(converter_source):'
521 521 # but were really created in the tag
522 522 # directory.
523 523 pass
524 pendings = remainings
524 pendings = renamings
525 525 tagspath = srctagspath
526 526 finally:
527 527 stream.close()
@@ -54,7 +54,7 b' def _create_auth_baton(pool):'
54 54 if p:
55 55 providers.append(p)
56 56 else:
57 if hasattr(svn.client, 'get_windows_simple_provider'):
57 if util.safehasattr(svn.client, 'get_windows_simple_provider'):
58 58 providers.append(svn.client.get_windows_simple_provider(pool))
59 59
60 60 return svn.core.svn_auth_open(providers, pool)
@@ -73,7 +73,7 b' class SvnRaTransport(object):'
73 73 self.password = ''
74 74
75 75 # Only Subversion 1.4 has reparent()
76 if ra is None or not hasattr(svn.ra, 'reparent'):
76 if ra is None or not util.safehasattr(svn.ra, 'reparent'):
77 77 self.client = svn.client.create_context(self.pool)
78 78 ab = _create_auth_baton(self.pool)
79 79 if False:
@@ -52,9 +52,10 b' Example versioned ``.hgeol`` file::'
52 52 The rules will first apply when files are touched in the working
53 53 copy, e.g. by updating to null and back to tip to touch all files.
54 54
55 The extension uses an optional ``[eol]`` section in your hgrc file
56 (not the ``.hgeol`` file) for settings that control the overall
57 behavior. There are two settings:
55 The extension uses an optional ``[eol]`` section read from both the
56 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 latter overriding the former. You can use that section to control the
58 overall behavior. There are three settings:
58 59
59 60 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
60 61 ``CRLF`` to override the default interpretation of ``native`` for
@@ -67,6 +68,10 b' behavior. There are two settings:'
67 68 Such files are normally not touched under the assumption that they
68 69 have mixed EOLs on purpose.
69 70
71 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 ensure that converted files end with a EOL character (either ``\\n``
73 or ``\\r\\n`` as per the configured patterns).
74
70 75 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
71 76 like the deprecated win32text extension does. This means that you can
72 77 disable win32text and enable eol and your filters will still work. You
@@ -106,6 +111,8 b' def tolf(s, params, ui, **kwargs):'
106 111 return s
107 112 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
108 113 return s
114 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
115 s = s + '\n'
109 116 return eolre.sub('\n', s)
110 117
111 118 def tocrlf(s, params, ui, **kwargs):
@@ -114,6 +121,8 b' def tocrlf(s, params, ui, **kwargs):'
114 121 return s
115 122 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
116 123 return s
124 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
125 s = s + '\n'
117 126 return eolre.sub('\r\n', s)
118 127
119 128 def isbinary(s, params):
@@ -158,7 +167,7 b' class eolfile(object):'
158 167 # about inconsistent newlines.
159 168 self.match = match.match(root, '', [], include, exclude)
160 169
161 def setfilters(self, ui):
170 def copytoui(self, ui):
162 171 for pattern, style in self.cfg.items('patterns'):
163 172 key = style.upper()
164 173 try:
@@ -167,6 +176,9 b' class eolfile(object):'
167 176 except KeyError:
168 177 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
169 178 % (style, self.cfg.source('patterns', pattern)))
179 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
180 for k, v in self.cfg.items('eol'):
181 ui.setconfig('eol', k, v)
170 182
171 183 def checkrev(self, repo, ctx, files):
172 184 failed = []
@@ -273,7 +285,7 b' def reposetup(ui, repo):'
273 285 eol = parseeol(self.ui, self, nodes)
274 286 if eol is None:
275 287 return None
276 eol.setfilters(self.ui)
288 eol.copytoui(self.ui)
277 289 return eol.match
278 290
279 291 def _hgcleardirstate(self):
@@ -11,6 +11,7 b''
11 11 # todo: socket permissions
12 12
13 13 from mercurial.i18n import _
14 from mercurial import util
14 15 import server
15 16 from client import client, QueryFailed
16 17
@@ -31,7 +32,7 b' def debuginotify(ui, repo, **opts):'
31 32 ui.write((' %s/\n') % path)
32 33
33 34 def reposetup(ui, repo):
34 if not hasattr(repo, 'dirstate'):
35 if not util.safehasattr(repo, 'dirstate'):
35 36 return
36 37
37 38 class inotifydirstate(repo.dirstate.__class__):
@@ -249,10 +249,14 b' class kwtemplater(object):'
249 249 kwcmd = self.restrict and lookup # kwexpand/kwshrink
250 250 if self.restrict or expand and lookup:
251 251 mf = ctx.manifest()
252 lctx = ctx
253 re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp
254 msg = (expand and _('overwriting %s expanding keywords\n')
255 or _('overwriting %s shrinking keywords\n'))
252 if self.restrict or rekw:
253 re_kw = self.rekw
254 else:
255 re_kw = self.rekwexp
256 if expand:
257 msg = _('overwriting %s expanding keywords\n')
258 else:
259 msg = _('overwriting %s shrinking keywords\n')
256 260 for f in candidates:
257 261 if self.restrict:
258 262 data = self.repo.file(f).read(mf[f])
@@ -262,18 +266,17 b' class kwtemplater(object):'
262 266 continue
263 267 if expand:
264 268 if lookup:
265 lctx = self.linkctx(f, mf[f])
266 data, found = self.substitute(data, f, lctx, re_kw.subn)
269 ctx = self.linkctx(f, mf[f])
270 data, found = self.substitute(data, f, ctx, re_kw.subn)
267 271 elif self.restrict:
268 272 found = re_kw.search(data)
269 273 else:
270 274 data, found = _shrinktext(data, re_kw.subn)
271 275 if found:
272 276 self.ui.note(msg % f)
273 fpath = self.repo.wjoin(f)
274 mode = os.lstat(fpath).st_mode
275 self.repo.wwrite(f, data, ctx.flags(f))
276 os.chmod(fpath, mode)
277 fp = self.repo.wopener(f, "wb", atomictemp=True)
278 fp.write(data)
279 fp.close()
277 280 if kwcmd:
278 281 self.repo.dirstate.normal(f)
279 282 elif self.record:
@@ -296,7 +299,9 b' class kwtemplater(object):'
296 299 def wread(self, fname, data):
297 300 '''If in restricted mode returns data read from wdir with
298 301 keyword substitutions removed.'''
299 return self.restrict and self.shrink(fname, data) or data
302 if self.restrict:
303 return self.shrink(fname, data)
304 return data
300 305
301 306 class kwfilelog(filelog.filelog):
302 307 '''
@@ -325,11 +330,11 b' class kwfilelog(filelog.filelog):'
325 330 text = self.kwt.shrink(self.path, text)
326 331 return super(kwfilelog, self).cmp(node, text)
327 332
328 def _status(ui, repo, kwt, *pats, **opts):
333 def _status(ui, repo, wctx, kwt, *pats, **opts):
329 334 '''Bails out if [keyword] configuration is not active.
330 335 Returns status of working directory.'''
331 336 if kwt:
332 return repo.status(match=scmutil.match(repo[None], pats, opts), clean=True,
337 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
333 338 unknown=opts.get('unknown') or opts.get('all'))
334 339 if ui.configitems('keyword'):
335 340 raise util.Abort(_('[keyword] patterns cannot match'))
@@ -343,7 +348,7 b' def _kwfwrite(ui, repo, expand, *pats, *'
343 348 kwt = kwtools['templater']
344 349 wlock = repo.wlock()
345 350 try:
346 status = _status(ui, repo, kwt, *pats, **opts)
351 status = _status(ui, repo, wctx, kwt, *pats, **opts)
347 352 modified, added, removed, deleted, unknown, ignored, clean = status
348 353 if modified or added or removed or deleted:
349 354 raise util.Abort(_('outstanding uncommitted changes'))
@@ -415,7 +420,10 b' def demo(ui, repo, *args, **opts):'
415 420 ui.setconfig('keywordmaps', k, v)
416 421 else:
417 422 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
418 kwmaps = dict(uikwmaps) or _defaultkwmaps(ui)
423 if uikwmaps:
424 kwmaps = dict(uikwmaps)
425 else:
426 kwmaps = _defaultkwmaps(ui)
419 427
420 428 uisetup(ui)
421 429 reposetup(ui, repo)
@@ -478,13 +486,13 b' def files(ui, repo, *pats, **opts):'
478 486 i = ignored (not tracked)
479 487 '''
480 488 kwt = kwtools['templater']
481 status = _status(ui, repo, kwt, *pats, **opts)
489 wctx = repo[None]
490 status = _status(ui, repo, wctx, kwt, *pats, **opts)
482 491 cwd = pats and repo.getcwd() or ''
483 492 modified, added, removed, deleted, unknown, ignored, clean = status
484 493 files = []
485 494 if not opts.get('unknown') or opts.get('all'):
486 495 files = sorted(modified + added + clean)
487 wctx = repo[None]
488 496 kwfiles = kwt.iskwfile(files, wctx)
489 497 kwdeleted = kwt.iskwfile(deleted, wctx)
490 498 kwunknown = kwt.iskwfile(unknown, wctx)
@@ -582,12 +590,12 b' def reposetup(ui, repo):'
582 590 kwt.restrict = restrict
583 591 return n
584 592
585 def rollback(self, dryrun=False):
593 def rollback(self, dryrun=False, force=False):
586 594 wlock = self.wlock()
587 595 try:
588 596 if not dryrun:
589 597 changed = self['.'].files()
590 ret = super(kwrepo, self).rollback(dryrun)
598 ret = super(kwrepo, self).rollback(dryrun, force)
591 599 if not dryrun:
592 600 ctx = self['.']
593 601 modified, added = _preselect(self[None].status(), changed)
@@ -287,25 +287,31 b' class queue(object):'
287 287
288 288 @util.propertycache
289 289 def applied(self):
290 if os.path.exists(self.join(self.statuspath)):
291 def parselines(lines):
292 for l in lines:
293 entry = l.split(':', 1)
294 if len(entry) > 1:
295 n, name = entry
296 yield statusentry(bin(n), name)
297 elif l.strip():
298 self.ui.warn(_('malformated mq status line: %s\n') % entry)
299 # else we ignore empty lines
290 def parselines(lines):
291 for l in lines:
292 entry = l.split(':', 1)
293 if len(entry) > 1:
294 n, name = entry
295 yield statusentry(bin(n), name)
296 elif l.strip():
297 self.ui.warn(_('malformated mq status line: %s\n') % entry)
298 # else we ignore empty lines
299 try:
300 300 lines = self.opener.read(self.statuspath).splitlines()
301 301 return list(parselines(lines))
302 return []
302 except IOError, e:
303 if e.errno == errno.ENOENT:
304 return []
305 raise
303 306
304 307 @util.propertycache
305 308 def fullseries(self):
306 if os.path.exists(self.join(self.seriespath)):
307 return self.opener.read(self.seriespath).splitlines()
308 return []
309 try:
310 return self.opener.read(self.seriespath).splitlines()
311 except IOError, e:
312 if e.errno == errno.ENOENT:
313 return []
314 raise
309 315
310 316 @util.propertycache
311 317 def series(self):
@@ -626,6 +632,7 b' class queue(object):'
626 632 self.ui.note(str(inst) + '\n')
627 633 if not self.ui.verbose:
628 634 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
635 self.ui.traceback()
629 636 return (False, list(files), False)
630 637
631 638 def apply(self, repo, series, list=False, update_status=True,
@@ -938,7 +945,7 b' class queue(object):'
938 945 p.write("# User " + user + "\n")
939 946 if date:
940 947 p.write("# Date %s %s\n\n" % date)
941 if hasattr(msg, '__call__'):
948 if util.safehasattr(msg, '__call__'):
942 949 msg = msg()
943 950 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
944 951 n = repo.commit(commitmsg, user, date, match=match, force=True)
@@ -1010,12 +1017,10 b' class queue(object):'
1010 1017 # if the exact patch name does not exist, we try a few
1011 1018 # variations. If strict is passed, we try only #1
1012 1019 #
1013 # 1) a number to indicate an offset in the series file
1020 # 1) a number (as string) to indicate an offset in the series file
1014 1021 # 2) a unique substring of the patch name was given
1015 1022 # 3) patchname[-+]num to indicate an offset in the series file
1016 1023 def lookup(self, patch, strict=False):
1017 patch = patch and str(patch)
1018
1019 1024 def partialname(s):
1020 1025 if s in self.series:
1021 1026 return s
@@ -1034,8 +1039,6 b' class queue(object):'
1034 1039 return self.series[0]
1035 1040 return None
1036 1041
1037 if patch is None:
1038 return None
1039 1042 if patch in self.series:
1040 1043 return patch
1041 1044
@@ -1095,12 +1098,12 b' class queue(object):'
1095 1098 self.ui.warn(_('no patches in series\n'))
1096 1099 return 0
1097 1100
1098 patch = self.lookup(patch)
1099 1101 # Suppose our series file is: A B C and the current 'top'
1100 1102 # patch is B. qpush C should be performed (moving forward)
1101 1103 # qpush B is a NOP (no change) qpush A is an error (can't
1102 1104 # go backwards with qpush)
1103 1105 if patch:
1106 patch = self.lookup(patch)
1104 1107 info = self.isapplied(patch)
1105 1108 if info and info[0] >= len(self.applied) - 1:
1106 1109 self.ui.warn(
@@ -1492,7 +1495,7 b' class queue(object):'
1492 1495 n = repo.commit(message, user, ph.date, match=match,
1493 1496 force=True)
1494 1497 # only write patch after a successful commit
1495 patchf.rename()
1498 patchf.close()
1496 1499 self.applied.append(statusentry(n, patchfn))
1497 1500 except:
1498 1501 ctx = repo[cparents[0]]
@@ -2675,7 +2678,11 b' def save(ui, repo, **opts):'
2675 2678 return 0
2676 2679
2677 2680 @command("strip",
2678 [('f', 'force', None, _('force removal of changesets, discard '
2681 [
2682 ('r', 'rev', [], _('strip specified revision (optional, '
2683 'can specify revisions without this '
2684 'option)'), _('REV')),
2685 ('f', 'force', None, _('force removal of changesets, discard '
2679 2686 'uncommitted changes (no backup)')),
2680 2687 ('b', 'backup', None, _('bundle only changesets with local revision'
2681 2688 ' number greater than REV which are not'
@@ -2716,6 +2723,7 b' def strip(ui, repo, *revs, **opts):'
2716 2723 backup = 'none'
2717 2724
2718 2725 cl = repo.changelog
2726 revs = list(revs) + opts.get('rev')
2719 2727 revs = set(scmutil.revrange(repo, revs))
2720 2728 if not revs:
2721 2729 raise util.Abort(_('empty revision set'))
@@ -2867,7 +2875,7 b' def select(ui, repo, *args, **opts):'
2867 2875 if i == 0:
2868 2876 q.pop(repo, all=True)
2869 2877 else:
2870 q.pop(repo, i - 1)
2878 q.pop(repo, str(i - 1))
2871 2879 break
2872 2880 if popped:
2873 2881 try:
@@ -2915,6 +2923,7 b' def finish(ui, repo, *revrange, **opts):'
2915 2923
2916 2924 @command("qqueue",
2917 2925 [('l', 'list', False, _('list all available queues')),
2926 ('', 'active', False, _('print name of active queue')),
2918 2927 ('c', 'create', False, _('create new queue')),
2919 2928 ('', 'rename', False, _('rename active queue')),
2920 2929 ('', 'delete', False, _('delete reference to queue')),
@@ -2929,7 +2938,8 b' def qqueue(ui, repo, name=None, **opts):'
2929 2938
2930 2939 Omitting a queue name or specifying -l/--list will show you the registered
2931 2940 queues - by default the "normal" patches queue is registered. The currently
2932 active queue will be marked with "(active)".
2941 active queue will be marked with "(active)". Specifying --active will print
2942 only the name of the active queue.
2933 2943
2934 2944 To create a new queue, use -c/--create. The queue is automatically made
2935 2945 active, except in the case where there are applied patches from the
@@ -3022,8 +3032,11 b' def qqueue(ui, repo, name=None, **opts):'
3022 3032 fh.close()
3023 3033 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3024 3034
3025 if not name or opts.get('list'):
3035 if not name or opts.get('list') or opts.get('active'):
3026 3036 current = _getcurrent()
3037 if opts.get('active'):
3038 ui.write('%s\n' % (current,))
3039 return
3027 3040 for queue in _getqueues():
3028 3041 ui.write('%s' % (queue,))
3029 3042 if queue == current and not ui.quiet:
@@ -5,71 +5,115 b''
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 '''hooks for sending email notifications at commit/push time
8 '''hooks for sending email push notifications
9 9
10 Subscriptions can be managed through a hgrc file. Default mode is to
11 print messages to stdout, for testing and configuring.
10 This extension let you run hooks sending email notifications when
11 changesets are being pushed, from the sending or receiving side.
12 12
13 To use, configure the notify extension and enable it in hgrc like
14 this::
15
16 [extensions]
17 notify =
13 First, enable the extension as explained in :hg:`help extensions`, and
14 register the hook you want to run. ``incoming`` and ``outgoing`` hooks
15 are run by the changesets receiver while the ``outgoing`` one is for
16 the sender::
18 17
19 18 [hooks]
20 19 # one email for each incoming changeset
21 20 incoming.notify = python:hgext.notify.hook
22 # batch emails when many changesets incoming at one time
21 # one email for all incoming changesets
23 22 changegroup.notify = python:hgext.notify.hook
24 # batch emails when many changesets outgoing at one time (client side)
23
24 # one email for all outgoing changesets
25 25 outgoing.notify = python:hgext.notify.hook
26 26
27 [notify]
28 # config items go here
29
30 Required configuration items::
31
32 config = /path/to/file # file containing subscriptions
33
34 Optional configuration items::
35
36 test = True # print messages to stdout for testing
37 strip = 3 # number of slashes to strip for url paths
38 domain = example.com # domain to use if committer missing domain
39 style = ... # style file to use when formatting email
40 template = ... # template to use when formatting email
41 incoming = ... # template to use when run as incoming hook
42 outgoing = ... # template to use when run as outgoing hook
43 changegroup = ... # template to use when run as changegroup hook
44 maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
45 maxsubject = 67 # truncate subject line longer than this
46 diffstat = True # add a diffstat before the diff content
47 sources = serve # notify if source of incoming changes in this list
48 # (serve == ssh or http, push, pull, bundle)
49 merge = False # send notification for merges (default True)
50 [email]
51 from = user@host.com # email address to send as if none given
52 [web]
53 baseurl = http://hgserver/... # root of hg web site for browsing commits
54
55 The notify config file has same format as a regular hgrc file. It has
56 two sections so you can express subscriptions in whatever way is
57 handier for you.
58
59 ::
27 Now the hooks are running, subscribers must be assigned to
28 repositories. Use the ``[usersubs]`` section to map repositories to a
29 given email or the ``[reposubs]`` section to map emails to a single
30 repository::
60 31
61 32 [usersubs]
62 # key is subscriber email, value is ","-separated list of glob patterns
33 # key is subscriber email, value is a comma-separated list of glob
34 # patterns
63 35 user@host = pattern
64 36
65 37 [reposubs]
66 # key is glob pattern, value is ","-separated list of subscriber emails
38 # key is glob pattern, value is a comma-separated list of subscriber
39 # emails
67 40 pattern = user@host
68 41
69 Glob patterns are matched against path to repository root.
42 Glob patterns are matched against absolute path to repository
43 root. The subscriptions can be defined in their own file and
44 referenced with::
45
46 [notify]
47 config = /path/to/subscriptionsfile
48
49 Alternatively, they can be added to Mercurial configuration files by
50 setting the previous entry to an empty value.
51
52 At this point, notifications should be generated but will not be sent until you
53 set the ``notify.test`` entry to ``False``.
54
55 Notifications content can be tweaked with the following configuration entries:
56
57 notify.test
58 If ``True``, print messages to stdout instead of sending them. Default: True.
59
60 notify.sources
61 Space separated list of change sources. Notifications are sent only
62 if it includes the incoming or outgoing changes source. Incoming
63 sources can be ``serve`` for changes coming from http or ssh,
64 ``pull`` for pulled changes, ``unbundle`` for changes added by
65 :hg:`unbundle` or ``push`` for changes being pushed
66 locally. Outgoing sources are the same except for ``unbundle`` which
67 is replaced by ``bundle``. Default: serve.
68
69 notify.strip
70 Number of leading slashes to strip from url paths. By default, notifications
71 references repositories with their absolute path. ``notify.strip`` let you
72 turn them into relative paths. For example, ``notify.strip=3`` will change
73 ``/long/path/repository`` into ``repository``. Default: 0.
74
75 notify.domain
76 If subscribers emails or the from email have no domain set, complete them
77 with this value.
70 78
71 If you like, you can put notify config file in repository that users
72 can push changes to, they can manage their own subscriptions.
79 notify.style
80 Style file to use when formatting emails.
81
82 notify.template
83 Template to use when formatting emails.
84
85 notify.incoming
86 Template to use when run as incoming hook, override ``notify.template``.
87
88 notify.outgoing
89 Template to use when run as outgoing hook, override ``notify.template``.
90
91 notify.changegroup
92 Template to use when running as changegroup hook, override
93 ``notify.template``.
94
95 notify.maxdiff
96 Maximum number of diff lines to include in notification email. Set to 0
97 to disable the diff, -1 to include all of it. Default: 300.
98
99 notify.maxsubject
100 Maximum number of characters in emails subject line. Default: 67.
101
102 notify.diffstat
103 Set to True to include a diffstat before diff content. Default: True.
104
105 notify.merge
106 If True, send notifications for merge changesets. Default: True.
107
108 If set, the following entries will also be used to customize the notifications:
109
110 email.from
111 Email ``From`` address to use if none can be found in generated email content.
112
113 web.baseurl
114 Root repository browsing URL to combine with repository paths when making
115 references. See also ``notify.strip``.
116
73 117 '''
74 118
75 119 from mercurial.i18n import _
@@ -167,9 +211,6 b' class notifier(object):'
167 211 return [mail.addressencode(self.ui, s, self.charsets, self.test)
168 212 for s in sorted(subs)]
169 213
170 def url(self, path=None):
171 return self.ui.config('web', 'baseurl') + (path or self.root)
172
173 214 def node(self, ctx, **props):
174 215 '''format one changeset, unless it is a suppressed merge.'''
175 216 if not self.merge and len(ctx.parents()) > 1:
@@ -58,7 +58,7 b' from mercurial import commands, dispatch'
58 58 from mercurial.i18n import _
59 59
60 60 def _runpager(p):
61 if not hasattr(os, 'fork'):
61 if not util.safehasattr(os, 'fork'):
62 62 sys.stdout = util.popen(p, 'wb')
63 63 if util.isatty(sys.stderr):
64 64 sys.stderr = sys.stdout
@@ -57,24 +57,15 b' cmdtable = {}'
57 57 command = cmdutil.command(cmdtable)
58 58
59 59 def prompt(ui, prompt, default=None, rest=':'):
60 if not ui.interactive() and default is None:
61 raise util.Abort(_("%s Please enter a valid value" % (prompt + rest)))
62 60 if default:
63 61 prompt += ' [%s]' % default
64 prompt += rest
65 while True:
66 r = ui.prompt(prompt, default=default)
67 if r:
68 return r
69 if default is not None:
70 return default
71 ui.warn(_('Please enter a valid value.\n'))
62 return ui.prompt(prompt + rest, default)
72 63
73 def introneeded(opts, number):
74 '''is an introductory message required?'''
64 def introwanted(opts, number):
65 '''is an introductory message apparently wanted?'''
75 66 return number > 1 or opts.get('intro') or opts.get('desc')
76 67
77 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total,
68 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
78 69 patchname=None):
79 70
80 71 desc = []
@@ -141,7 +132,7 b' def makepatch(ui, repo, patchlines, opts'
141 132 flag = ' ' + flag
142 133
143 134 subj = desc[0].strip().rstrip('. ')
144 if not introneeded(opts, total):
135 if not numbered:
145 136 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
146 137 else:
147 138 tlen = len(str(total))
@@ -352,51 +343,66 b' def patchbomb(ui, repo, *revs, **opts):'
352 343 ui.write(_('\nWrite the introductory message for the '
353 344 'patch series.\n\n'))
354 345 body = ui.edit(body, sender)
355 # Save serie description in case sendmail fails
346 # Save series description in case sendmail fails
356 347 msgfile = repo.opener('last-email.txt', 'wb')
357 348 msgfile.write(body)
358 349 msgfile.close()
359 350 return body
360 351
361 352 def getpatchmsgs(patches, patchnames=None):
362 jumbo = []
363 353 msgs = []
364 354
365 355 ui.write(_('This patch series consists of %d patches.\n\n')
366 356 % len(patches))
367 357
358 # build the intro message, or skip it if the user declines
359 if introwanted(opts, len(patches)):
360 msg = makeintro(patches)
361 if msg:
362 msgs.append(msg)
363
364 # are we going to send more than one message?
365 numbered = len(msgs) + len(patches) > 1
366
367 # now generate the actual patch messages
368 368 name = None
369 369 for i, p in enumerate(patches):
370 jumbo.extend(p)
371 370 if patchnames:
372 371 name = patchnames[i]
373 372 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
374 len(patches), name)
373 len(patches), numbered, name)
375 374 msgs.append(msg)
376 375
377 if introneeded(opts, len(patches)):
378 tlen = len(str(len(patches)))
376 return msgs
377
378 def makeintro(patches):
379 tlen = len(str(len(patches)))
379 380
380 flag = ' '.join(opts.get('flag'))
381 if flag:
382 subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag)
383 else:
384 subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches))
385 subj += ' ' + (opts.get('subject') or
386 prompt(ui, 'Subject: ', rest=subj))
381 flag = opts.get('flag') or ''
382 if flag:
383 flag = ' ' + ' '.join(flag)
384 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
385
386 subj = (opts.get('subject') or
387 prompt(ui, 'Subject: ', rest=prefix, default=''))
388 if not subj:
389 return None # skip intro if the user doesn't bother
387 390
388 body = ''
389 ds = patch.diffstat(jumbo)
390 if ds and opts.get('diffstat'):
391 body = '\n' + ds
391 subj = prefix + ' ' + subj
392 392
393 body = getdescription(body, sender)
394 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
395 msg['Subject'] = mail.headencode(ui, subj, _charsets,
396 opts.get('test'))
393 body = ''
394 if opts.get('diffstat'):
395 # generate a cumulative diffstat of the whole patch series
396 diffstat = patch.diffstat(sum(patches, []))
397 body = '\n' + diffstat
398 else:
399 diffstat = None
397 400
398 msgs.insert(0, (msg, subj, ds))
399 return msgs
401 body = getdescription(body, sender)
402 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
403 msg['Subject'] = mail.headencode(ui, subj, _charsets,
404 opts.get('test'))
405 return (msg, subj, diffstat)
400 406
401 407 def getbundlemsgs(bundle):
402 408 subj = (opts.get('subject')
@@ -429,29 +435,33 b' def patchbomb(ui, repo, *revs, **opts):'
429 435
430 436 showaddrs = []
431 437
432 def getaddrs(opt, prpt=None, default=None):
433 addrs = opts.get(opt.replace('-', '_'))
434 if opt != 'reply-to':
435 showaddr = '%s:' % opt.capitalize()
436 else:
437 showaddr = 'Reply-To:'
438
438 def getaddrs(header, ask=False, default=None):
439 configkey = header.lower()
440 opt = header.replace('-', '_').lower()
441 addrs = opts.get(opt)
439 442 if addrs:
440 showaddrs.append('%s %s' % (showaddr, ', '.join(addrs)))
443 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
441 444 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
442 445
443 addrs = ui.config('email', opt) or ui.config('patchbomb', opt) or ''
444 if not addrs and prpt:
445 addrs = prompt(ui, prpt, default)
446 # not on the command line: fallback to config and then maybe ask
447 addr = (ui.config('email', configkey) or
448 ui.config('patchbomb', configkey) or
449 '')
450 if not addr and ask:
451 addr = prompt(ui, header, default=default)
452 if addr:
453 showaddrs.append('%s: %s' % (header, addr))
454 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
455 else:
456 return default
446 457
447 if addrs:
448 showaddrs.append('%s %s' % (showaddr, addrs))
449 return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test'))
450
451 to = getaddrs('to', 'To')
452 cc = getaddrs('cc', 'Cc', '')
453 bcc = getaddrs('bcc')
454 replyto = getaddrs('reply-to')
458 to = getaddrs('To', ask=True)
459 if not to:
460 # we can get here in non-interactive mode
461 raise util.Abort(_('no recipient addresses provided'))
462 cc = getaddrs('Cc', ask=True, default='') or []
463 bcc = getaddrs('Bcc') or []
464 replyto = getaddrs('Reply-To')
455 465
456 466 if opts.get('diffstat') or opts.get('confirm'):
457 467 ui.write(_('\nFinal summary:\n\n'))
@@ -27,6 +27,9 b' The following settings are available::'
27 27
28 28 [progress]
29 29 delay = 3 # number of seconds (float) before showing the progress bar
30 changedelay = 1 # changedelay: minimum delay before showing a new topic.
31 # If set to less than 3 * refresh, that value will
32 # be used instead.
30 33 refresh = 0.1 # time in seconds between refreshes of the progress bar
31 34 format = topic bar number estimate # format of the progress bar
32 35 width = <none> # if set, the maximum width of the progress information
@@ -53,7 +56,7 b' def spacejoin(*args):'
53 56 return ' '.join(s for s in args if s)
54 57
55 58 def shouldprint(ui):
56 return (util.isatty(sys.stderr) or ui.configbool('progress', 'assume-tty'))
59 return util.isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')
57 60
58 61 def fmtremaining(seconds):
59 62 if seconds < 60:
@@ -105,9 +108,13 b' class progbar(object):'
105 108 self.printed = False
106 109 self.lastprint = time.time() + float(self.ui.config(
107 110 'progress', 'delay', default=3))
111 self.lasttopic = None
108 112 self.indetcount = 0
109 113 self.refresh = float(self.ui.config(
110 114 'progress', 'refresh', default=0.1))
115 self.changedelay = max(3 * self.refresh,
116 float(self.ui.config(
117 'progress', 'changedelay', default=1)))
111 118 self.order = self.ui.configlist(
112 119 'progress', 'format',
113 120 default=['topic', 'bar', 'number', 'estimate'])
@@ -184,6 +191,7 b' class progbar(object):'
184 191 else:
185 192 out = spacejoin(head, tail)
186 193 sys.stderr.write('\r' + out[:termwidth])
194 self.lasttopic = topic
187 195 sys.stderr.flush()
188 196
189 197 def clear(self):
@@ -248,10 +256,18 b' class progbar(object):'
248 256 self.topics.append(topic)
249 257 self.topicstates[topic] = pos, item, unit, total
250 258 if now - self.lastprint >= self.refresh and self.topics:
251 self.lastprint = now
252 self.show(now, topic, *self.topicstates[topic])
259 if (self.lasttopic is None # first time we printed
260 # not a topic change
261 or topic == self.lasttopic
262 # it's been long enough we should print anyway
263 or now - self.lastprint >= self.changedelay):
264 self.lastprint = now
265 self.show(now, topic, *self.topicstates[topic])
266
267 _singleton = None
253 268
254 269 def uisetup(ui):
270 global _singleton
255 271 class progressui(ui.__class__):
256 272 _progbar = None
257 273
@@ -278,7 +294,9 b' def uisetup(ui):'
278 294 # we instantiate one globally shared progress bar to avoid
279 295 # competing progress bars when multiple UI objects get created
280 296 if not progressui._progbar:
281 progressui._progbar = progbar(ui)
297 if _singleton is None:
298 _singleton = progbar(ui)
299 progressui._progbar = _singleton
282 300
283 301 def reposetup(ui, repo):
284 302 uisetup(repo.ui)
@@ -15,7 +15,7 b' http://mercurial.selenic.com/wiki/Rebase'
15 15 '''
16 16
17 17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
18 from mercurial import extensions, copies, patch
18 from mercurial import extensions, patch
19 19 from mercurial.commands import templateopts
20 20 from mercurial.node import nullrev
21 21 from mercurial.lock import release
@@ -34,11 +34,15 b' command = cmdutil.command(cmdtable)'
34 34 _('rebase from the base of the specified changeset '
35 35 '(up to greatest common ancestor of base and dest)'),
36 36 _('REV')),
37 ('r', 'rev', [],
38 _('rebase these revisions'),
39 _('REV')),
37 40 ('d', 'dest', '',
38 41 _('rebase onto the specified changeset'), _('REV')),
39 42 ('', 'collapse', False, _('collapse the rebased changesets')),
40 43 ('m', 'message', '',
41 44 _('use text as collapse commit message'), _('TEXT')),
45 ('e', 'edit', False, _('invoke editor on commit messages')),
42 46 ('l', 'logfile', '',
43 47 _('read collapse commit message from file'), _('FILE')),
44 48 ('', 'keep', False, _('keep original changesets')),
@@ -105,6 +109,10 b' def rebase(ui, repo, **opts):'
105 109 skipped = set()
106 110 targetancestors = set()
107 111
112 editor = None
113 if opts.get('edit'):
114 editor = cmdutil.commitforceeditor
115
108 116 lock = wlock = None
109 117 try:
110 118 lock = repo.lock()
@@ -114,6 +122,7 b' def rebase(ui, repo, **opts):'
114 122 destf = opts.get('dest', None)
115 123 srcf = opts.get('source', None)
116 124 basef = opts.get('base', None)
125 revf = opts.get('rev', [])
117 126 contf = opts.get('continue')
118 127 abortf = opts.get('abort')
119 128 collapsef = opts.get('collapse', False)
@@ -151,7 +160,13 b' def rebase(ui, repo, **opts):'
151 160 else:
152 161 if srcf and basef:
153 162 raise util.Abort(_('cannot specify both a '
163 'source and a base'))
164 if revf and basef:
165 raise util.Abort(_('cannot specify both a'
154 166 'revision and a base'))
167 if revf and srcf:
168 raise util.Abort(_('cannot specify both a'
169 'revision and a source'))
155 170 if detachf:
156 171 if not srcf:
157 172 raise util.Abort(
@@ -160,7 +175,38 b' def rebase(ui, repo, **opts):'
160 175 raise util.Abort(_('cannot specify a base with detach'))
161 176
162 177 cmdutil.bailifchanged(repo)
163 result = buildstate(repo, destf, srcf, basef, detachf)
178
179 if not destf:
180 # Destination defaults to the latest revision in the
181 # current branch
182 branch = repo[None].branch()
183 dest = repo[branch]
184 else:
185 dest = repo[destf]
186
187 if revf:
188 revgen = repo.set('%lr', revf)
189 elif srcf:
190 revgen = repo.set('(%r)::', srcf)
191 else:
192 base = basef or '.'
193 revgen = repo.set('(children(ancestor(%r, %d)) and ::(%r))::',
194 base, dest, base)
195
196 rebaseset = [c.rev() for c in revgen]
197
198 if not rebaseset:
199 repo.ui.debug('base is ancestor of destination')
200 result = None
201 elif not keepf and list(repo.set('first(children(%ld) - %ld)',
202 rebaseset, rebaseset)):
203 raise util.Abort(
204 _("can't remove original changesets with"
205 " unrebased descendants"),
206 hint=_('use --keep to keep original changesets'))
207 else:
208 result = buildstate(repo, dest, rebaseset, detachf)
209
164 210 if not result:
165 211 # Empty state built, nothing to rebase
166 212 ui.status(_('nothing to rebase\n'))
@@ -215,9 +261,10 b' def rebase(ui, repo, **opts):'
215 261 'resolve, then hg rebase --continue)'))
216 262 finally:
217 263 ui.setconfig('ui', 'forcemerge', '')
218 updatedirstate(repo, rev, target, p2)
264 cmdutil.duplicatecopies(repo, rev, target, p2)
219 265 if not collapsef:
220 newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn)
266 newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn,
267 editor=editor)
221 268 else:
222 269 # Skip commit if we are collapsing
223 270 repo.dirstate.setparents(repo[p1].node())
@@ -247,7 +294,7 b' def rebase(ui, repo, **opts):'
247 294 commitmsg += '\n* %s' % repo[rebased].description()
248 295 commitmsg = ui.edit(commitmsg, repo.ui.username())
249 296 newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
250 extrafn=extrafn)
297 extrafn=extrafn, editor=editor)
251 298
252 299 if 'qtip' in repo.tags():
253 300 updatemq(repo, state, skipped, **opts)
@@ -301,21 +348,7 b' def checkexternal(repo, state, targetanc'
301 348 external = p.rev()
302 349 return external
303 350
304 def updatedirstate(repo, rev, p1, p2):
305 """Keep track of renamed files in the revision that is going to be rebased
306 """
307 # Here we simulate the copies and renames in the source changeset
308 cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True)
309 m1 = repo[rev].manifest()
310 m2 = repo[p1].manifest()
311 for k, v in cop.iteritems():
312 if k in m1:
313 if v in m1 or v in m2:
314 repo.dirstate.copy(v, k)
315 if v in m2 and v not in m1 and k in m2:
316 repo.dirstate.remove(v)
317
318 def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None):
351 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
319 352 'Commit the changes and store useful information in extra'
320 353 try:
321 354 repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
@@ -327,7 +360,7 b' def concludenode(repo, rev, p1, p2, comm'
327 360 extrafn(ctx, extra)
328 361 # Commit might fail if unresolved files exist
329 362 newrev = repo.commit(text=commitmsg, user=ctx.user(),
330 date=ctx.date(), extra=extra)
363 date=ctx.date(), extra=extra, editor=editor)
331 364 repo.dirstate.setbranch(repo[newrev].branch())
332 365 return newrev
333 366 except util.Abort:
@@ -515,68 +548,47 b' def abort(repo, originalwd, target, stat'
515 548 repo.ui.warn(_('rebase aborted\n'))
516 549 return 0
517 550
518 def buildstate(repo, dest, src, base, detach):
519 'Define which revisions are going to be rebased and where'
520 targetancestors = set()
521 detachset = set()
551 def buildstate(repo, dest, rebaseset, detach):
552 '''Define which revisions are going to be rebased and where
522 553
523 if not dest:
524 # Destination defaults to the latest revision in the current branch
525 branch = repo[None].branch()
526 dest = repo[branch].rev()
527 else:
528 dest = repo[dest].rev()
554 repo: repo
555 dest: context
556 rebaseset: set of rev
557 detach: boolean'''
529 558
530 559 # This check isn't strictly necessary, since mq detects commits over an
531 560 # applied patch. But it prevents messing up the working directory when
532 561 # a partially completed rebase is blocked by mq.
533 if 'qtip' in repo.tags() and (repo[dest].node() in
562 if 'qtip' in repo.tags() and (dest.node() in
534 563 [s.node for s in repo.mq.applied]):
535 564 raise util.Abort(_('cannot rebase onto an applied mq patch'))
536 565
537 if src:
538 commonbase = repo[src].ancestor(repo[dest])
539 samebranch = repo[src].branch() == repo[dest].branch()
540 if commonbase == repo[src]:
541 raise util.Abort(_('source is ancestor of destination'))
542 if samebranch and commonbase == repo[dest]:
543 raise util.Abort(_('source is descendant of destination'))
544 source = repo[src].rev()
545 if detach:
546 # We need to keep track of source's ancestors up to the common base
547 srcancestors = set(repo.changelog.ancestors(source))
548 baseancestors = set(repo.changelog.ancestors(commonbase.rev()))
549 detachset = srcancestors - baseancestors
550 detachset.discard(commonbase.rev())
551 else:
552 if base:
553 cwd = repo[base].rev()
554 else:
555 cwd = repo['.'].rev()
566 detachset = set()
567 roots = list(repo.set('roots(%ld)', rebaseset))
568 if not roots:
569 raise util.Abort(_('no matching revisions'))
570 if len(roots) > 1:
571 raise util.Abort(_("can't rebase multiple roots"))
572 root = roots[0]
556 573
557 if cwd == dest:
558 repo.ui.debug('source and destination are the same\n')
559 return None
560
561 targetancestors = set(repo.changelog.ancestors(dest))
562 if cwd in targetancestors:
563 repo.ui.debug('source is ancestor of destination\n')
564 return None
574 commonbase = root.ancestor(dest)
575 if commonbase == root:
576 raise util.Abort(_('source is ancestor of destination'))
577 if commonbase == dest:
578 samebranch = root.branch() == dest.branch()
579 if samebranch and root in dest.children():
580 repo.ui.debug('source is a child of destination')
581 return None
582 # rebase on ancestor, force detach
583 detach = True
584 if detach:
585 detachset = [c.rev() for c in repo.set('::%d - ::%d - %d',
586 root, commonbase, root)]
565 587
566 cwdancestors = set(repo.changelog.ancestors(cwd))
567 if dest in cwdancestors:
568 repo.ui.debug('source is descendant of destination\n')
569 return None
570
571 cwdancestors.add(cwd)
572 rebasingbranch = cwdancestors - targetancestors
573 source = min(rebasingbranch)
574
575 repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
576 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
588 repo.ui.debug('rebase onto %d starting from %d\n' % (dest, root))
589 state = dict.fromkeys(rebaseset, nullrev)
577 590 state.update(dict.fromkeys(detachset, nullmerge))
578 state[source] = nullrev
579 return repo['.'].rev(), repo[dest].rev(), state
591 return repo['.'].rev(), dest.rev(), state
580 592
581 593 def pullrebase(orig, ui, repo, *args, **opts):
582 594 'Call rebase after pull if the latter has been invoked with --rebase'
@@ -36,7 +36,8 b' def relink(ui, repo, origin=None, **opts'
36 36 command is running. (Both repositories will be locked against
37 37 writes.)
38 38 """
39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
39 if (not util.safehasattr(util, 'samefile') or
40 not util.safehasattr(util, 'samedevice')):
40 41 raise util.Abort(_('hardlinks are not supported on this system'))
41 42 src = hg.repository(ui, ui.expandpath(origin or 'default-relink',
42 43 origin or 'default'))
@@ -6,7 +6,7 b''
6 6 '''share a common history between several working directories'''
7 7
8 8 from mercurial.i18n import _
9 from mercurial import hg, commands
9 from mercurial import hg, commands, util
10 10
11 11 def share(ui, source, dest=None, noupdate=False):
12 12 """create a new shared repository
@@ -28,11 +28,46 b' def share(ui, source, dest=None, noupdat'
28 28
29 29 return hg.share(ui, source, dest, not noupdate)
30 30
31 def unshare(ui, repo):
32 """convert a shared repository to a normal one
33
34 Copy the store data to the repo and remove the sharedpath data.
35 """
36
37 if repo.sharedpath == repo.path:
38 raise util.Abort(_("this is not a shared repo"))
39
40 destlock = lock = None
41 lock = repo.lock()
42 try:
43 # we use locks here because if we race with commit, we
44 # can end up with extra data in the cloned revlogs that's
45 # not pointed to by changesets, thus causing verify to
46 # fail
47
48 destlock = hg.copystore(ui, repo, repo.path)
49
50 sharefile = repo.join('sharedpath')
51 util.rename(sharefile, sharefile + '.old')
52
53 repo.requirements.discard('sharedpath')
54 repo._writerequirements()
55 finally:
56 destlock and destlock.release()
57 lock and lock.release()
58
59 # update store, spath, sopener and sjoin of repo
60 repo.__init__(ui, repo.root)
61
31 62 cmdtable = {
32 63 "share":
33 64 (share,
34 65 [('U', 'noupdate', None, _('do not create a working copy'))],
35 66 _('[-U] SOURCE [DEST]')),
67 "unshare":
68 (unshare,
69 [],
70 ''),
36 71 }
37 72
38 73 commands.norepo += " share"
@@ -81,6 +81,7 b' class transplanter(object):'
81 81 self.opener = scmutil.opener(self.path)
82 82 self.transplants = transplants(self.path, 'transplants',
83 83 opener=self.opener)
84 self.editor = None
84 85
85 86 def applied(self, repo, node, parent):
86 87 '''returns True if a node is already an ancestor of parent
@@ -105,10 +106,11 b' class transplanter(object):'
105 106 diffopts = patch.diffopts(self.ui, opts)
106 107 diffopts.git = True
107 108
108 lock = wlock = None
109 lock = wlock = tr = None
109 110 try:
110 111 wlock = repo.wlock()
111 112 lock = repo.lock()
113 tr = repo.transaction('transplant')
112 114 for rev in revs:
113 115 node = revmap[rev]
114 116 revstr = '%s:%s' % (rev, short(node))
@@ -172,12 +174,15 b' class transplanter(object):'
172 174 finally:
173 175 if patchfile:
174 176 os.unlink(patchfile)
177 tr.close()
175 178 if pulls:
176 179 repo.pull(source, heads=pulls)
177 180 merge.update(repo, pulls[-1], False, False, None)
178 181 finally:
179 182 self.saveseries(revmap, merges)
180 183 self.transplants.write()
184 if tr:
185 tr.release()
181 186 lock.release()
182 187 wlock.release()
183 188
@@ -253,7 +258,8 b' class transplanter(object):'
253 258 else:
254 259 m = match.exact(repo.root, '', files)
255 260
256 n = repo.commit(message, user, date, extra=extra, match=m)
261 n = repo.commit(message, user, date, extra=extra, match=m,
262 editor=self.editor)
257 263 if not n:
258 264 # Crash here to prevent an unclear crash later, in
259 265 # transplants.write(). This can happen if patch.patch()
@@ -304,7 +310,8 b' class transplanter(object):'
304 310 revlog.hex(parents[0]))
305 311 if merge:
306 312 repo.dirstate.setparents(p1, parents[1])
307 n = repo.commit(message, user, date, extra=extra)
313 n = repo.commit(message, user, date, extra=extra,
314 editor=self.editor)
308 315 if not n:
309 316 raise util.Abort(_('commit failed'))
310 317 if not merge:
@@ -461,6 +468,7 b' def browserevs(ui, repo, nodes, opts):'
461 468 ('a', 'all', None, _('pull all changesets up to BRANCH')),
462 469 ('p', 'prune', [], _('skip over REV'), _('REV')),
463 470 ('m', 'merge', [], _('merge at REV'), _('REV')),
471 ('e', 'edit', False, _('invoke editor on commit messages')),
464 472 ('', 'log', None, _('append transplant info to log message')),
465 473 ('c', 'continue', None, _('continue last transplant session '
466 474 'after repair')),
@@ -549,6 +557,8 b' def transplant(ui, repo, *revs, **opts):'
549 557 opts['filter'] = ui.config('transplant', 'filter')
550 558
551 559 tp = transplanter(ui, repo)
560 if opts.get('edit'):
561 tp.editor = cmdutil.commitforceeditor
552 562
553 563 p1, p2 = repo.dirstate.parents()
554 564 if len(repo) > 0 and p1 == revlog.nullid:
@@ -195,7 +195,7 b' class fileit(object):'
195 195 return
196 196 f = self.opener(name, "w", atomictemp=True)
197 197 f.write(data)
198 f.rename()
198 f.close()
199 199 destfile = os.path.join(self.basedir, name)
200 200 os.chmod(destfile, mode)
201 201
@@ -366,11 +366,11 b' nomem:'
366 366
367 367 static PyObject *bdiff(PyObject *self, PyObject *args)
368 368 {
369 char *sa, *sb;
369 char *sa, *sb, *rb;
370 370 PyObject *result = NULL;
371 371 struct line *al, *bl;
372 372 struct hunk l, *h;
373 char encode[12], *rb;
373 uint32_t encode[3];
374 374 int an, bn, len = 0, la, lb, count;
375 375
376 376 if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
@@ -407,9 +407,9 b' static PyObject *bdiff(PyObject *self, P'
407 407 for (h = l.next; h; h = h->next) {
408 408 if (h->a1 != la || h->b1 != lb) {
409 409 len = bl[h->b1].l - bl[lb].l;
410 *(uint32_t *)(encode) = htonl(al[la].l - al->l);
411 *(uint32_t *)(encode + 4) = htonl(al[h->a1].l - al->l);
412 *(uint32_t *)(encode + 8) = htonl(len);
410 encode[0] = htonl(al[la].l - al->l);
411 encode[1] = htonl(al[h->a1].l - al->l);
412 encode[2] = htonl(len);
413 413 memcpy(rb, encode, 12);
414 414 memcpy(rb + 12, bl[lb].l, len);
415 415 rb += 12 + len;
@@ -26,7 +26,13 b' def read(repo):'
26 26 bookmarks = {}
27 27 try:
28 28 for line in repo.opener('bookmarks'):
29 sha, refspec = line.strip().split(' ', 1)
29 line = line.strip()
30 if not line:
31 continue
32 if ' ' not in line:
33 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n') % line)
34 continue
35 sha, refspec = line.split(' ', 1)
30 36 refspec = encoding.tolocal(refspec)
31 37 try:
32 38 bookmarks[refspec] = repo.changelog.lookup(sha)
@@ -84,7 +90,7 b' def write(repo):'
84 90 file = repo.opener('bookmarks', 'w', atomictemp=True)
85 91 for refspec, node in refs.iteritems():
86 92 file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec)))
87 file.rename()
93 file.close()
88 94
89 95 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
90 96 try:
@@ -115,7 +121,7 b' def setcurrent(repo, mark):'
115 121 try:
116 122 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
117 123 file.write(encoding.fromlocal(mark))
118 file.rename()
124 file.close()
119 125 finally:
120 126 wlock.release()
121 127 repo._bookmarkcurrent = mark
@@ -140,16 +146,15 b' def update(repo, parents, node):'
140 146 marks[mark] = new.node()
141 147 update = True
142 148 if update:
143 write(repo)
149 repo._writebookmarks(marks)
144 150
145 151 def listbookmarks(repo):
146 152 # We may try to list bookmarks on a repo type that does not
147 153 # support it (e.g., statichttprepository).
148 if not hasattr(repo, '_bookmarks'):
149 return {}
154 marks = getattr(repo, '_bookmarks', {})
150 155
151 156 d = {}
152 for k, v in repo._bookmarks.iteritems():
157 for k, v in marks.iteritems():
153 158 d[k] = hex(v)
154 159 return d
155 160
@@ -103,9 +103,7 b' class RangeableFileObject(object):'
103 103 """This effectively allows us to wrap at the instance level.
104 104 Any attribute not found in _this_ object will be searched for
105 105 in self.fo. This includes methods."""
106 if hasattr(self.fo, name):
107 return getattr(self.fo, name)
108 raise AttributeError(name)
106 return getattr(self.fo, name)
109 107
110 108 def tell(self):
111 109 """Return the position within the range.
@@ -170,10 +168,8 b' class RangeableFileObject(object):'
170 168 offset is relative to the current position (self.realpos).
171 169 """
172 170 assert offset >= 0
173 if not hasattr(self.fo, 'seek'):
174 self._poor_mans_seek(offset)
175 else:
176 self.fo.seek(self.realpos + offset)
171 seek = getattr(self.fo, 'seek', self._poor_mans_seek)
172 seek(self.realpos + offset)
177 173 self.realpos += offset
178 174
179 175 def _poor_mans_seek(self, offset):
@@ -8,7 +8,7 b''
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 12 import match as matchmod
13 13 import subrepo
14 14
@@ -75,6 +75,10 b' def bailifchanged(repo):'
75 75 modified, added, removed, deleted = repo.status()[:4]
76 76 if modified or added or removed or deleted:
77 77 raise util.Abort(_("outstanding uncommitted changes"))
78 ctx = repo[None]
79 for s in ctx.substate:
80 if ctx.sub(s).dirty():
81 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
78 82
79 83 def logmessage(ui, opts):
80 84 """ get the log message according to -m and -l option """
@@ -109,12 +113,13 b' def loglimit(opts):'
109 113 limit = None
110 114 return limit
111 115
112 def makefilename(repo, pat, node,
116 def makefilename(repo, pat, node, desc=None,
113 117 total=None, seqno=None, revwidth=None, pathname=None):
114 118 node_expander = {
115 119 'H': lambda: hex(node),
116 120 'R': lambda: str(repo.changelog.rev(node)),
117 121 'h': lambda: short(node),
122 'm': lambda: re.sub('[^\w]', '_', str(desc))
118 123 }
119 124 expander = {
120 125 '%': lambda: '%',
@@ -154,14 +159,14 b' def makefilename(repo, pat, node,'
154 159 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
155 160 inst.args[0])
156 161
157 def makefileobj(repo, pat, node=None, total=None,
162 def makefileobj(repo, pat, node=None, desc=None, total=None,
158 163 seqno=None, revwidth=None, mode='wb', pathname=None):
159 164
160 165 writable = mode not in ('r', 'rb')
161 166
162 167 if not pat or pat == '-':
163 168 fp = writable and repo.ui.fout or repo.ui.fin
164 if hasattr(fp, 'fileno'):
169 if util.safehasattr(fp, 'fileno'):
165 170 return os.fdopen(os.dup(fp.fileno()), mode)
166 171 else:
167 172 # if this fp can't be duped properly, return
@@ -177,11 +182,11 b' def makefileobj(repo, pat, node=None, to'
177 182 return getattr(self.f, attr)
178 183
179 184 return wrappedfileobj(fp)
180 if hasattr(pat, 'write') and writable:
185 if util.safehasattr(pat, 'write') and writable:
181 186 return pat
182 if hasattr(pat, 'read') and 'r' in mode:
187 if util.safehasattr(pat, 'read') and 'r' in mode:
183 188 return pat
184 return open(makefilename(repo, pat, node, total, seqno, revwidth,
189 return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
185 190 pathname),
186 191 mode)
187 192
@@ -516,11 +521,13 b" def export(repo, revs, template='hg-%h.p"
516 521
517 522 shouldclose = False
518 523 if not fp:
519 fp = makefileobj(repo, template, node, total=total, seqno=seqno,
520 revwidth=revwidth, mode='ab')
524 desc_lines = ctx.description().rstrip().split('\n')
525 desc = desc_lines[0] #Commit always has a first line.
526 fp = makefileobj(repo, template, node, desc=desc, total=total,
527 seqno=seqno, revwidth=revwidth, mode='ab')
521 528 if fp != template:
522 529 shouldclose = True
523 if fp != sys.stdout and hasattr(fp, 'name'):
530 if fp != sys.stdout and util.safehasattr(fp, 'name'):
524 531 repo.ui.note("%s\n" % fp.name)
525 532
526 533 fp.write("# HG changeset patch\n")
@@ -1173,6 +1180,19 b' def add(ui, repo, match, dryrun, listsub'
1173 1180 bad.extend(f for f in rejected if f in match.files())
1174 1181 return bad
1175 1182
1183 def duplicatecopies(repo, rev, p1, p2):
1184 "Reproduce copies found in the source revision in the dirstate for grafts"
1185 # Here we simulate the copies and renames in the source changeset
1186 cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True)
1187 m1 = repo[rev].manifest()
1188 m2 = repo[p1].manifest()
1189 for k, v in cop.iteritems():
1190 if k in m1:
1191 if v in m1 or v in m2:
1192 repo.dirstate.copy(v, k)
1193 if v in m2 and v not in m1 and k in m2:
1194 repo.dirstate.remove(v)
1195
1176 1196 def commit(ui, repo, commitfunc, pats, opts):
1177 1197 '''commit the specified files or all outstanding changes'''
1178 1198 date = opts.get('date')
This diff has been collapsed as it changes many lines, (983 lines changed) Show them Hide them
@@ -119,6 +119,10 b' diffopts2 = ['
119 119 ('', 'stat', None, _('output diffstat-style summary of changes')),
120 120 ]
121 121
122 mergetoolopts = [
123 ('t', 'tool', '', _('specify merge tool')),
124 ]
125
122 126 similarityopts = [
123 127 ('s', 'similarity', '',
124 128 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
@@ -303,6 +307,18 b' def archive(ui, repo, dest, **opts):'
303 307 The archive type is automatically detected based on file
304 308 extension (or override using -t/--type).
305 309
310 .. container:: verbose
311
312 Examples:
313
314 - create a zip file containing the 1.0 release::
315
316 hg archive -r 1.0 project-1.0.zip
317
318 - create a tarball excluding .hg files::
319
320 hg archive project.tar.gz -X ".hg*"
321
306 322 Valid types are:
307 323
308 324 :``files``: a directory full of files (default)
@@ -348,10 +364,10 b' def archive(ui, repo, dest, **opts):'
348 364
349 365 @command('backout',
350 366 [('', 'merge', None, _('merge with old dirstate parent after backout')),
351 ('', 'parent', '', _('parent to choose when backing out merge'), _('REV')),
352 ('t', 'tool', '', _('specify merge tool')),
367 ('', 'parent', '',
368 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
353 369 ('r', 'rev', '', _('revision to backout'), _('REV')),
354 ] + walkopts + commitopts + commitopts2,
370 ] + mergetoolopts + walkopts + commitopts + commitopts2,
355 371 _('[OPTION]... [-r] REV'))
356 372 def backout(ui, repo, node=None, rev=None, **opts):
357 373 '''reverse effect of earlier changeset
@@ -363,15 +379,21 b' def backout(ui, repo, node=None, rev=Non'
363 379 is committed automatically. Otherwise, hg needs to merge the
364 380 changes and the merged result is left uncommitted.
365 381
366 By default, the pending changeset will have one parent,
367 maintaining a linear history. With --merge, the pending changeset
368 will instead have two parents: the old parent of the working
369 directory and a new child of REV that simply undoes REV.
370
371 Before version 1.7, the behavior without --merge was equivalent to
372 specifying --merge followed by :hg:`update --clean .` to cancel
373 the merge and leave the child of REV as a head to be merged
374 separately.
382 .. note::
383 backout cannot be used to fix either an unwanted or
384 incorrect merge.
385
386 .. container:: verbose
387
388 By default, the pending changeset will have one parent,
389 maintaining a linear history. With --merge, the pending
390 changeset will instead have two parents: the old parent of the
391 working directory and a new child of REV that simply undoes REV.
392
393 Before version 1.7, the behavior without --merge was equivalent
394 to specifying --merge followed by :hg:`update --clean .` to
395 cancel the merge and leave the child of REV as a head to be
396 merged separately.
375 397
376 398 See :hg:`help dates` for a list of formats valid for -d/--date.
377 399
@@ -403,8 +425,7 b' def backout(ui, repo, node=None, rev=Non'
403 425 raise util.Abort(_('cannot backout a change with no parents'))
404 426 if p2 != nullid:
405 427 if not opts.get('parent'):
406 raise util.Abort(_('cannot backout a merge changeset without '
407 '--parent'))
428 raise util.Abort(_('cannot backout a merge changeset'))
408 429 p = repo.lookup(opts['parent'])
409 430 if p not in (p1, p2):
410 431 raise util.Abort(_('%s is not a parent of %s') %
@@ -486,6 +507,54 b' def bisect(ui, repo, rev=None, extra=Non'
486 507 (command not found) will abort the bisection, and any other
487 508 non-zero exit status means the revision is bad.
488 509
510 .. container:: verbose
511
512 Some examples:
513
514 - start a bisection with known bad revision 12, and good revision 34::
515
516 hg bisect --bad 34
517 hg bisect --good 12
518
519 - advance the current bisection by marking current revision as good or
520 bad::
521
522 hg bisect --good
523 hg bisect --bad
524
525 - mark the current revision, or a known revision, to be skipped (eg. if
526 that revision is not usable because of another issue)::
527
528 hg bisect --skip
529 hg bisect --skip 23
530
531 - forget the current bisection::
532
533 hg bisect --reset
534
535 - use 'make && make tests' to automatically find the first broken
536 revision::
537
538 hg bisect --reset
539 hg bisect --bad 34
540 hg bisect --good 12
541 hg bisect --command 'make && make tests'
542
543 - see all changesets whose states are already known in the current
544 bisection::
545
546 hg log -r "bisect(pruned)"
547
548 - see all changesets that took part in the current bisection::
549
550 hg log -r "bisect(range)"
551
552 - with the graphlog extension, you can even get a nice graph::
553
554 hg log --graph -r "bisect(range)"
555
556 See :hg:`help revsets` for more about the `bisect()` keyword.
557
489 558 Returns 0 on success.
490 559 """
491 560 def extendbisectrange(nodes, good):
@@ -767,7 +836,6 b' def branch(ui, repo, label=None, **opts)'
767 836 :hg:`commit --close-branch` to mark this branch as closed.
768 837
769 838 .. note::
770
771 839 Branch names are permanent. Use :hg:`bookmark` to create a
772 840 light-weight bookmark instead. See :hg:`help glossary` for more
773 841 information about named branches and bookmarks.
@@ -977,56 +1045,84 b' def clone(ui, source, dest=None, **opts)'
977 1045 The location of the source is added to the new repository's
978 1046 ``.hg/hgrc`` file, as the default to be used for future pulls.
979 1047
980 See :hg:`help urls` for valid source format details.
981
982 It is possible to specify an ``ssh://`` URL as the destination, but no
983 ``.hg/hgrc`` and working directory will be created on the remote side.
984 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
985
986 A set of changesets (tags, or branch names) to pull may be specified
987 by listing each changeset (tag, or branch name) with -r/--rev.
988 If -r/--rev is used, the cloned repository will contain only a subset
989 of the changesets of the source repository. Only the set of changesets
990 defined by all -r/--rev options (including all their ancestors)
991 will be pulled into the destination repository.
992 No subsequent changesets (including subsequent tags) will be present
993 in the destination.
994
995 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
996 local source repositories.
997
998 For efficiency, hardlinks are used for cloning whenever the source
999 and destination are on the same filesystem (note this applies only
1000 to the repository data, not to the working directory). Some
1001 filesystems, such as AFS, implement hardlinking incorrectly, but
1002 do not report errors. In these cases, use the --pull option to
1003 avoid hardlinking.
1004
1005 In some cases, you can clone repositories and the working directory
1006 using full hardlinks with ::
1007
1008 $ cp -al REPO REPOCLONE
1009
1010 This is the fastest way to clone, but it is not always safe. The
1011 operation is not atomic (making sure REPO is not modified during
1012 the operation is up to you) and you have to make sure your editor
1013 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
1014 this is not compatible with certain extensions that place their
1015 metadata under the .hg directory, such as mq.
1016
1017 Mercurial will update the working directory to the first applicable
1018 revision from this list:
1019
1020 a) null if -U or the source repository has no changesets
1021 b) if -u . and the source repository is local, the first parent of
1022 the source repository's working directory
1023 c) the changeset specified with -u (if a branch name, this means the
1024 latest head of that branch)
1025 d) the changeset specified with -r
1026 e) the tipmost head specified with -b
1027 f) the tipmost head specified with the url#branch source syntax
1028 g) the tipmost head of the default branch
1029 h) tip
1048 Only local paths and ``ssh://`` URLs are supported as
1049 destinations. For ``ssh://`` destinations, no working directory or
1050 ``.hg/hgrc`` will be created on the remote side.
1051
1052 To pull only a subset of changesets, specify one or more revisions
1053 identifiers with -r/--rev or branches with -b/--branch. The
1054 resulting clone will contain only the specified changesets and
1055 their ancestors. These options (or 'clone src#rev dest') imply
1056 --pull, even for local source repositories. Note that specifying a
1057 tag will include the tagged changeset but not the changeset
1058 containing the tag.
1059
1060 To check out a particular version, use -u/--update, or
1061 -U/--noupdate to create a clone with no working directory.
1062
1063 .. container:: verbose
1064
1065 For efficiency, hardlinks are used for cloning whenever the
1066 source and destination are on the same filesystem (note this
1067 applies only to the repository data, not to the working
1068 directory). Some filesystems, such as AFS, implement hardlinking
1069 incorrectly, but do not report errors. In these cases, use the
1070 --pull option to avoid hardlinking.
1071
1072 In some cases, you can clone repositories and the working
1073 directory using full hardlinks with ::
1074
1075 $ cp -al REPO REPOCLONE
1076
1077 This is the fastest way to clone, but it is not always safe. The
1078 operation is not atomic (making sure REPO is not modified during
1079 the operation is up to you) and you have to make sure your
1080 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1081 so). Also, this is not compatible with certain extensions that
1082 place their metadata under the .hg directory, such as mq.
1083
1084 Mercurial will update the working directory to the first applicable
1085 revision from this list:
1086
1087 a) null if -U or the source repository has no changesets
1088 b) if -u . and the source repository is local, the first parent of
1089 the source repository's working directory
1090 c) the changeset specified with -u (if a branch name, this means the
1091 latest head of that branch)
1092 d) the changeset specified with -r
1093 e) the tipmost head specified with -b
1094 f) the tipmost head specified with the url#branch source syntax
1095 g) the tipmost head of the default branch
1096 h) tip
1097
1098 Examples:
1099
1100 - clone a remote repository to a new directory named hg/::
1101
1102 hg clone http://selenic.com/hg
1103
1104 - create a lightweight local clone::
1105
1106 hg clone project/ project-feature/
1107
1108 - clone from an absolute path on an ssh server (note double-slash)::
1109
1110 hg clone ssh://user@server//home/projects/alpha/
1111
1112 - do a high-speed clone over a LAN while checking out a
1113 specified version::
1114
1115 hg clone --uncompressed http://server/repo -u 1.5
1116
1117 - create a repository without changesets after a particular revision::
1118
1119 hg clone -r 04e544 experimental/ good/
1120
1121 - clone (and track) a particular named branch::
1122
1123 hg clone http://selenic.com/hg#stable
1124
1125 See :hg:`help urls` for details on specifying URLs.
1030 1126
1031 1127 Returns 0 on success.
1032 1128 """
@@ -1102,8 +1198,8 b' def commit(ui, repo, *pats, **opts):'
1102 1198 ctx = repo[node]
1103 1199 parents = ctx.parents()
1104 1200
1105 if bheads and not [x for x in parents
1106 if x.node() in bheads and x.branch() == branch]:
1201 if (bheads and node not in bheads and not
1202 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1107 1203 ui.status(_('created new head\n'))
1108 1204 # The message is not printed for initial roots. For the other
1109 1205 # changesets, it is printed in the following situations:
@@ -1656,8 +1752,9 b' def debuggetbundle(ui, repopath, bundlep'
1656 1752 def debugignore(ui, repo, *values, **opts):
1657 1753 """display the combined ignore pattern"""
1658 1754 ignore = repo.dirstate._ignore
1659 if hasattr(ignore, 'includepat'):
1660 ui.write("%s\n" % ignore.includepat)
1755 includepat = getattr(ignore, 'includepat', None)
1756 if includepat is not None:
1757 ui.write("%s\n" % includepat)
1661 1758 else:
1662 1759 raise util.Abort(_("no ignore patterns found"))
1663 1760
@@ -1755,6 +1852,7 b' def debuginstall(ui):'
1755 1852 % os.path.dirname(__file__))
1756 1853 try:
1757 1854 import bdiff, mpatch, base85, osutil
1855 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1758 1856 except Exception, inst:
1759 1857 ui.write(" %s\n" % inst)
1760 1858 ui.write(_(" One or more extensions could not be found"))
@@ -1762,9 +1860,10 b' def debuginstall(ui):'
1762 1860 problems += 1
1763 1861
1764 1862 # templates
1765 ui.status(_("Checking templates...\n"))
1863 import templater
1864 p = templater.templatepath()
1865 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1766 1866 try:
1767 import templater
1768 1867 templater.templater(templater.templatepath("map-cmdline.default"))
1769 1868 except Exception, inst:
1770 1869 ui.write(" %s\n" % inst)
@@ -2170,6 +2269,32 b' def diff(ui, repo, *pats, **opts):'
2170 2269 Use the -g/--git option to generate diffs in the git extended diff
2171 2270 format. For more information, read :hg:`help diffs`.
2172 2271
2272 .. container:: verbose
2273
2274 Examples:
2275
2276 - compare a file in the current working directory to its parent::
2277
2278 hg diff foo.c
2279
2280 - compare two historical versions of a directory, with rename info::
2281
2282 hg diff --git -r 1.0:1.2 lib/
2283
2284 - get change stats relative to the last change on some date::
2285
2286 hg diff --stat -r "date('may 2')"
2287
2288 - diff all newly-added files that contain a keyword::
2289
2290 hg diff "set:added() and grep(GNU)"
2291
2292 - compare a revision and its parents::
2293
2294 hg diff -c 9353 # compare against first parent
2295 hg diff -r 9353^:9353 # same using revset syntax
2296 hg diff -r 9353^2:9353 # compare against the second parent
2297
2173 2298 Returns 0 on success.
2174 2299 """
2175 2300
@@ -2225,6 +2350,7 b' def export(ui, repo, *changesets, **opts'
2225 2350 :``%R``: changeset revision number
2226 2351 :``%b``: basename of the exporting repository
2227 2352 :``%h``: short-form changeset hash (12 hexadecimal digits)
2353 :``%m``: first line of the commit message (only alphanumeric characters)
2228 2354 :``%n``: zero-padded sequence number, starting at 1
2229 2355 :``%r``: zero-padded changeset revision number
2230 2356
@@ -2238,6 +2364,25 b' def export(ui, repo, *changesets, **opts'
2238 2364 With the --switch-parent option, the diff will be against the
2239 2365 second parent. It can be useful to review a merge.
2240 2366
2367 .. container:: verbose
2368
2369 Examples:
2370
2371 - use export and import to transplant a bugfix to the current
2372 branch::
2373
2374 hg export -r 9353 | hg import -
2375
2376 - export all the changesets between two revisions to a file with
2377 rename information::
2378
2379 hg export --git -r 123:150 > changes.txt
2380
2381 - split outgoing changes into a series of patches with
2382 descriptive names::
2383
2384 hg export -r "outgoing()" -o "%n-%m.patch"
2385
2241 2386 Returns 0 on success.
2242 2387 """
2243 2388 changesets += tuple(opts.get('rev', []))
@@ -2265,6 +2410,18 b' def forget(ui, repo, *pats, **opts):'
2265 2410
2266 2411 To undo a forget before the next commit, see :hg:`add`.
2267 2412
2413 .. container:: verbose
2414
2415 Examples:
2416
2417 - forget newly-added binary files::
2418
2419 hg forget "set:added() and binary()"
2420
2421 - forget files that would be excluded by .hgignore::
2422
2423 hg forget "set:hgignore()"
2424
2268 2425 Returns 0 on success.
2269 2426 """
2270 2427
@@ -2290,6 +2447,160 b' def forget(ui, repo, *pats, **opts):'
2290 2447 repo[None].forget(forget)
2291 2448 return errs
2292 2449
2450 @command(
2451 'graft',
2452 [('c', 'continue', False, _('resume interrupted graft')),
2453 ('e', 'edit', False, _('invoke editor on commit messages')),
2454 ('D', 'currentdate', False,
2455 _('record the current date as commit date')),
2456 ('U', 'currentuser', False,
2457 _('record the current user as committer'), _('DATE'))]
2458 + commitopts2 + mergetoolopts,
2459 _('[OPTION]... REVISION...'))
2460 def graft(ui, repo, *revs, **opts):
2461 '''copy changes from other branches onto the current branch
2462
2463 This command uses Mercurial's merge logic to copy individual
2464 changes from other branches without merging branches in the
2465 history graph. This is sometimes known as 'backporting' or
2466 'cherry-picking'. By default, graft will copy user, date, and
2467 description from the source changesets.
2468
2469 Changesets that are ancestors of the current revision, that have
2470 already been grafted, or that are merges will be skipped.
2471
2472 If a graft merge results in conflicts, the graft process is
2473 aborted so that the current merge can be manually resolved. Once
2474 all conflicts are addressed, the graft process can be continued
2475 with the -c/--continue option.
2476
2477 .. note::
2478 The -c/--continue option does not reapply earlier options.
2479
2480 .. container:: verbose
2481
2482 Examples:
2483
2484 - copy a single change to the stable branch and edit its description::
2485
2486 hg update stable
2487 hg graft --edit 9393
2488
2489 - graft a range of changesets with one exception, updating dates::
2490
2491 hg graft -D "2085::2093 and not 2091"
2492
2493 - continue a graft after resolving conflicts::
2494
2495 hg graft -c
2496
2497 - show the source of a grafted changeset::
2498
2499 hg log --debug -r tip
2500
2501 Returns 0 on successful completion.
2502 '''
2503
2504 if not opts.get('user') and opts.get('currentuser'):
2505 opts['user'] = ui.username()
2506 if not opts.get('date') and opts.get('currentdate'):
2507 opts['date'] = "%d %d" % util.makedate()
2508
2509 editor = None
2510 if opts.get('edit'):
2511 editor = cmdutil.commitforceeditor
2512
2513 cont = False
2514 if opts['continue']:
2515 cont = True
2516 if revs:
2517 raise util.Abort(_("can't specify --continue and revisions"))
2518 # read in unfinished revisions
2519 try:
2520 nodes = repo.opener.read('graftstate').splitlines()
2521 revs = [repo[node].rev() for node in nodes]
2522 except IOError, inst:
2523 if inst.errno != errno.ENOENT:
2524 raise
2525 raise util.Abort(_("no graft state found, can't continue"))
2526 else:
2527 cmdutil.bailifchanged(repo)
2528 if not revs:
2529 raise util.Abort(_('no revisions specified'))
2530 revs = scmutil.revrange(repo, revs)
2531
2532 # check for merges
2533 for ctx in repo.set('%ld and merge()', revs):
2534 ui.warn(_('skipping ungraftable merge revision %s\n') % ctx.rev())
2535 revs.remove(ctx.rev())
2536 if not revs:
2537 return -1
2538
2539 # check for ancestors of dest branch
2540 for ctx in repo.set('::. and %ld', revs):
2541 ui.warn(_('skipping ancestor revision %s\n') % ctx.rev())
2542 revs.remove(ctx.rev())
2543 if not revs:
2544 return -1
2545
2546 # check ancestors for earlier grafts
2547 ui.debug('scanning for existing transplants')
2548 for ctx in repo.set("::. - ::%ld", revs):
2549 n = ctx.extra().get('source')
2550 if n and n in repo:
2551 r = repo[n].rev()
2552 ui.warn(_('skipping already grafted revision %s\n') % r)
2553 revs.remove(r)
2554 if not revs:
2555 return -1
2556
2557 for pos, ctx in enumerate(repo.set("%ld", revs)):
2558 current = repo['.']
2559 ui.status('grafting revision %s', ctx.rev())
2560
2561 # we don't merge the first commit when continuing
2562 if not cont:
2563 # perform the graft merge with p1(rev) as 'ancestor'
2564 try:
2565 # ui.forcemerge is an internal variable, do not document
2566 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2567 stats = mergemod.update(repo, ctx.node(), True, True, False,
2568 ctx.p1().node())
2569 finally:
2570 ui.setconfig('ui', 'forcemerge', '')
2571 # drop the second merge parent
2572 repo.dirstate.setparents(current.node(), nullid)
2573 repo.dirstate.write()
2574 # fix up dirstate for copies and renames
2575 cmdutil.duplicatecopies(repo, ctx.rev(), current.node(), nullid)
2576 # report any conflicts
2577 if stats and stats[3] > 0:
2578 # write out state for --continue
2579 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2580 repo.opener.write('graftstate', ''.join(nodelines))
2581 raise util.Abort(
2582 _("unresolved conflicts, can't continue"),
2583 hint=_('use hg resolve and hg graft --continue'))
2584 else:
2585 cont = False
2586
2587 # commit
2588 extra = {'source': ctx.hex()}
2589 user = ctx.user()
2590 if opts.get('user'):
2591 user = opts['user']
2592 date = ctx.date()
2593 if opts.get('date'):
2594 date = opts['date']
2595 repo.commit(text=ctx.description(), user=user,
2596 date=date, extra=extra, editor=editor)
2597
2598 # remove state when we complete successfully
2599 if os.path.exists(repo.join('graftstate')):
2600 util.unlinkpath(repo.join('graftstate'))
2601
2602 return 0
2603
2293 2604 @command('grep',
2294 2605 [('0', 'print0', None, _('end fields with NUL')),
2295 2606 ('', 'all', None, _('print all revisions that match')),
@@ -2576,7 +2887,7 b' def heads(ui, repo, *branchrevs, **opts)'
2576 2887 [('e', 'extension', None, _('show only help for extensions')),
2577 2888 ('c', 'command', None, _('show only help for commands'))],
2578 2889 _('[-ec] [TOPIC]'))
2579 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True, **opts):
2890 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
2580 2891 """show help for a given topic or a help overview
2581 2892
2582 2893 With no arguments, print a list of commands with short help messages.
@@ -2586,14 +2897,67 b' def help_(ui, name=None, with_version=Fa'
2586 2897
2587 2898 Returns 0 if successful.
2588 2899 """
2589 option_lists = []
2900
2590 2901 textwidth = min(ui.termwidth(), 80) - 2
2591 2902
2592 def addglobalopts(aliases):
2903 def optrst(options):
2904 data = []
2905 multioccur = False
2906 for option in options:
2907 if len(option) == 5:
2908 shortopt, longopt, default, desc, optlabel = option
2909 else:
2910 shortopt, longopt, default, desc = option
2911 optlabel = _("VALUE") # default label
2912
2913 if _("DEPRECATED") in desc and not ui.verbose:
2914 continue
2915
2916 so = ''
2917 if shortopt:
2918 so = '-' + shortopt
2919 lo = '--' + longopt
2920 if default:
2921 desc += _(" (default: %s)") % default
2922
2923 if isinstance(default, list):
2924 lo += " %s [+]" % optlabel
2925 multioccur = True
2926 elif (default is not None) and not isinstance(default, bool):
2927 lo += " %s" % optlabel
2928
2929 data.append((so, lo, desc))
2930
2931 rst = minirst.maketable(data, 1)
2932
2933 if multioccur:
2934 rst += _("\n[+] marked option can be specified multiple times\n")
2935
2936 return rst
2937
2938 # list all option lists
2939 def opttext(optlist, width):
2940 rst = ''
2941 if not optlist:
2942 return ''
2943
2944 for title, options in optlist:
2945 rst += '\n%s\n' % title
2946 if options:
2947 rst += "\n"
2948 rst += optrst(options)
2949 rst += '\n'
2950
2951 return '\n' + minirst.format(rst, width)
2952
2953 def addglobalopts(optlist, aliases):
2954 if ui.quiet:
2955 return []
2956
2593 2957 if ui.verbose:
2594 option_lists.append((_("global options:"), globalopts))
2958 optlist.append((_("global options:"), globalopts))
2595 2959 if name == 'shortlist':
2596 option_lists.append((_('use "hg help" for the full list '
2960 optlist.append((_('use "hg help" for the full list '
2597 2961 'of commands'), ()))
2598 2962 else:
2599 2963 if name == 'shortlist':
@@ -2605,14 +2969,10 b' def help_(ui, name=None, with_version=Fa'
2605 2969 msg = _('use "hg -v help%s" to show builtin aliases and '
2606 2970 'global options') % (name and " " + name or "")
2607 2971 else:
2608 msg = _('use "hg -v help %s" to show global options') % name
2609 option_lists.append((msg, ()))
2972 msg = _('use "hg -v help %s" to show more info') % name
2973 optlist.append((msg, ()))
2610 2974
2611 2975 def helpcmd(name):
2612 if with_version:
2613 version_(ui)
2614 ui.write('\n')
2615
2616 2976 try:
2617 2977 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2618 2978 except error.AmbiguousCommand, inst:
@@ -2620,7 +2980,7 b' def help_(ui, name=None, with_version=Fa'
2620 2980 # except block, nor can be used inside a lambda. python issue4617
2621 2981 prefix = inst.args[0]
2622 2982 select = lambda c: c.lstrip('^').startswith(prefix)
2623 helplist(_('list of commands:\n\n'), select)
2983 helplist(select)
2624 2984 return
2625 2985
2626 2986 # check if it's an invalid alias and display its error if it is
@@ -2629,42 +2989,33 b' def help_(ui, name=None, with_version=Fa'
2629 2989 entry[0](ui)
2630 2990 return
2631 2991
2992 rst = ""
2993
2632 2994 # synopsis
2633 2995 if len(entry) > 2:
2634 2996 if entry[2].startswith('hg'):
2635 ui.write("%s\n" % entry[2])
2997 rst += "%s\n" % entry[2]
2636 2998 else:
2637 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2999 rst += 'hg %s %s\n' % (aliases[0], entry[2])
2638 3000 else:
2639 ui.write('hg %s\n' % aliases[0])
3001 rst += 'hg %s\n' % aliases[0]
2640 3002
2641 3003 # aliases
2642 3004 if full and not ui.quiet and len(aliases) > 1:
2643 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
3005 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
2644 3006
2645 3007 # description
2646 3008 doc = gettext(entry[0].__doc__)
2647 3009 if not doc:
2648 3010 doc = _("(no help text available)")
2649 if hasattr(entry[0], 'definition'): # aliased command
3011 if util.safehasattr(entry[0], 'definition'): # aliased command
2650 3012 if entry[0].definition.startswith('!'): # shell alias
2651 3013 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2652 3014 else:
2653 3015 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2654 3016 if ui.quiet or not full:
2655 3017 doc = doc.splitlines()[0]
2656 keep = ui.verbose and ['verbose'] or []
2657 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2658 ui.write("\n%s\n" % formatted)
2659 if pruned:
2660 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2661
2662 if not ui.quiet:
2663 # options
2664 if entry[1]:
2665 option_lists.append((_("options:\n"), entry[1]))
2666
2667 addglobalopts(False)
3018 rst += "\n" + doc + "\n"
2668 3019
2669 3020 # check if this command shadows a non-trivial (multi-line)
2670 3021 # extension help text
@@ -2674,11 +3025,38 b' def help_(ui, name=None, with_version=Fa'
2674 3025 if '\n' in doc.strip():
2675 3026 msg = _('use "hg help -e %s" to show help for '
2676 3027 'the %s extension') % (name, name)
2677 ui.write('\n%s\n' % msg)
3028 rst += '\n%s\n' % msg
2678 3029 except KeyError:
2679 3030 pass
2680 3031
2681 def helplist(header, select=None):
3032 # options
3033 if not ui.quiet and entry[1]:
3034 rst += '\noptions:\n\n'
3035 rst += optrst(entry[1])
3036
3037 if ui.verbose:
3038 rst += '\nglobal options:\n\n'
3039 rst += optrst(globalopts)
3040
3041 keep = ui.verbose and ['verbose'] or []
3042 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3043 ui.write(formatted)
3044
3045 if not ui.verbose:
3046 if not full:
3047 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3048 % name)
3049 elif not ui.quiet:
3050 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3051
3052
3053 def helplist(select=None):
3054 # list of commands
3055 if name == "shortlist":
3056 header = _('basic commands:\n\n')
3057 else:
3058 header = _('list of commands:\n\n')
3059
2682 3060 h = {}
2683 3061 cmds = {}
2684 3062 for c, e in table.iteritems():
@@ -2718,8 +3096,22 b' def help_(ui, name=None, with_version=Fa'
2718 3096 initindent=' %-*s ' % (m, f),
2719 3097 hangindent=' ' * (m + 4))))
2720 3098
2721 if not ui.quiet:
2722 addglobalopts(True)
3099 if not name:
3100 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3101 if text:
3102 ui.write("\n%s" % minirst.format(text, textwidth))
3103
3104 ui.write(_("\nadditional help topics:\n\n"))
3105 topics = []
3106 for names, header, doc in help.helptable:
3107 topics.append((sorted(names, key=len, reverse=True)[0], header))
3108 topics_len = max([len(s[0]) for s in topics])
3109 for t, desc in topics:
3110 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3111
3112 optlist = []
3113 addglobalopts(optlist, True)
3114 ui.write(opttext(optlist, textwidth))
2723 3115
2724 3116 def helptopic(name):
2725 3117 for names, header, doc in help.helptable:
@@ -2731,11 +3123,11 b' def help_(ui, name=None, with_version=Fa'
2731 3123 # description
2732 3124 if not doc:
2733 3125 doc = _("(no help text available)")
2734 if hasattr(doc, '__call__'):
3126 if util.safehasattr(doc, '__call__'):
2735 3127 doc = doc()
2736 3128
2737 3129 ui.write("%s\n\n" % header)
2738 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
3130 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
2739 3131 try:
2740 3132 cmdutil.findcmd(name, table)
2741 3133 ui.write(_('\nuse "hg help -c %s" to see help for '
@@ -2760,7 +3152,7 b' def help_(ui, name=None, with_version=Fa'
2760 3152 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2761 3153 if tail:
2762 3154 ui.write(minirst.format(tail, textwidth))
2763 ui.status('\n\n')
3155 ui.status('\n')
2764 3156
2765 3157 if mod:
2766 3158 try:
@@ -2768,7 +3160,7 b' def help_(ui, name=None, with_version=Fa'
2768 3160 except AttributeError:
2769 3161 ct = {}
2770 3162 modcmds = set([c.split('|', 1)[0] for c in ct])
2771 helplist(_('list of commands:\n\n'), modcmds.__contains__)
3163 helplist(modcmds.__contains__)
2772 3164 else:
2773 3165 ui.write(_('use "hg help extensions" for information on enabling '
2774 3166 'extensions\n'))
@@ -2780,7 +3172,7 b' def help_(ui, name=None, with_version=Fa'
2780 3172 msg = help.listexts(_("'%s' is provided by the following "
2781 3173 "extension:") % cmd, {ext: doc}, indent=4)
2782 3174 ui.write(minirst.format(msg, textwidth))
2783 ui.write('\n\n')
3175 ui.write('\n')
2784 3176 ui.write(_('use "hg help extensions" for information on enabling '
2785 3177 'extensions\n'))
2786 3178
@@ -2803,87 +3195,12 b' def help_(ui, name=None, with_version=Fa'
2803 3195 i = inst
2804 3196 if i:
2805 3197 raise i
2806
2807 3198 else:
2808 3199 # program name
2809 if ui.verbose or with_version:
2810 version_(ui)
2811 else:
2812 ui.status(_("Mercurial Distributed SCM\n"))
3200 ui.status(_("Mercurial Distributed SCM\n"))
2813 3201 ui.status('\n')
2814
2815 # list of commands
2816 if name == "shortlist":
2817 header = _('basic commands:\n\n')
2818 else:
2819 header = _('list of commands:\n\n')
2820
2821 helplist(header)
2822 if name != 'shortlist':
2823 text = help.listexts(_('enabled extensions:'), extensions.enabled())
2824 if text:
2825 ui.write("\n%s\n" % minirst.format(text, textwidth))
2826
2827 # list all option lists
2828 opt_output = []
2829 multioccur = False
2830 for title, options in option_lists:
2831 opt_output.append(("\n%s" % title, None))
2832 for option in options:
2833 if len(option) == 5:
2834 shortopt, longopt, default, desc, optlabel = option
2835 else:
2836 shortopt, longopt, default, desc = option
2837 optlabel = _("VALUE") # default label
2838
2839 if _("DEPRECATED") in desc and not ui.verbose:
2840 continue
2841 if isinstance(default, list):
2842 numqualifier = " %s [+]" % optlabel
2843 multioccur = True
2844 elif (default is not None) and not isinstance(default, bool):
2845 numqualifier = " %s" % optlabel
2846 else:
2847 numqualifier = ""
2848 opt_output.append(("%2s%s" %
2849 (shortopt and "-%s" % shortopt,
2850 longopt and " --%s%s" %
2851 (longopt, numqualifier)),
2852 "%s%s" % (desc,
2853 default
2854 and _(" (default: %s)") % default
2855 or "")))
2856 if multioccur:
2857 msg = _("\n[+] marked option can be specified multiple times")
2858 if ui.verbose and name != 'shortlist':
2859 opt_output.append((msg, None))
2860 else:
2861 opt_output.insert(-1, (msg, None))
2862
2863 if not name:
2864 ui.write(_("\nadditional help topics:\n\n"))
2865 topics = []
2866 for names, header, doc in help.helptable:
2867 topics.append((sorted(names, key=len, reverse=True)[0], header))
2868 topics_len = max([len(s[0]) for s in topics])
2869 for t, desc in topics:
2870 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2871
2872 if opt_output:
2873 colwidth = encoding.colwidth
2874 # normalize: (opt or message, desc or None, width of opt)
2875 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2876 for opt, desc in opt_output]
2877 hanging = max([e[2] for e in entries])
2878 for opt, desc, width in entries:
2879 if desc:
2880 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2881 hangindent = ' ' * (hanging + 3)
2882 ui.write('%s\n' % (util.wrap(desc, textwidth,
2883 initindent=initindent,
2884 hangindent=hangindent)))
2885 else:
2886 ui.write("%s\n" % opt)
3202 helplist()
3203
2887 3204
2888 3205 @command('identify|id',
2889 3206 [('r', 'rev', '',
@@ -2909,6 +3226,22 b' def identify(ui, repo, source=None, rev='
2909 3226 Specifying a path to a repository root or Mercurial bundle will
2910 3227 cause lookup to operate on that repository/bundle.
2911 3228
3229 .. container:: verbose
3230
3231 Examples:
3232
3233 - generate a build identifier for the working directory::
3234
3235 hg id --id > build-id.dat
3236
3237 - find the revision corresponding to a tag::
3238
3239 hg id -n -r 1.3
3240
3241 - check the most recent revision of a remote repository::
3242
3243 hg id -r tip http://selenic.com/hg/
3244
2912 3245 Returns 0 if successful.
2913 3246 """
2914 3247
@@ -3007,6 +3340,7 b' def identify(ui, repo, source=None, rev='
3007 3340 _('directory strip option for patch. This has the same '
3008 3341 'meaning as the corresponding patch option'), _('NUM')),
3009 3342 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3343 ('e', 'edit', False, _('invoke editor on commit messages')),
3010 3344 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3011 3345 ('', 'no-commit', None,
3012 3346 _("don't commit, just update the working directory")),
@@ -3057,6 +3391,27 b' def import_(ui, repo, patch1, *patches, '
3057 3391 a URL is specified, the patch will be downloaded from it.
3058 3392 See :hg:`help dates` for a list of formats valid for -d/--date.
3059 3393
3394 .. container:: verbose
3395
3396 Examples:
3397
3398 - import a traditional patch from a website and detect renames::
3399
3400 hg import -s 80 http://example.com/bugfix.patch
3401
3402 - import a changeset from an hgweb server::
3403
3404 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3405
3406 - import all the patches in an Unix-style mbox::
3407
3408 hg import incoming-patches.mbox
3409
3410 - attempt to exactly restore an exported changeset (not always
3411 possible)::
3412
3413 hg import --exact proposed-fix.patch
3414
3060 3415 Returns 0 on success.
3061 3416 """
3062 3417 patches = (patch1,) + patches
@@ -3065,6 +3420,10 b' def import_(ui, repo, patch1, *patches, '
3065 3420 if date:
3066 3421 opts['date'] = util.parsedate(date)
3067 3422
3423 editor = cmdutil.commiteditor
3424 if opts.get('edit'):
3425 editor = cmdutil.commitforceeditor
3426
3068 3427 update = not opts.get('bypass')
3069 3428 if not update and opts.get('no_commit'):
3070 3429 raise util.Abort(_('cannot use --no-commit with --bypass'))
@@ -3080,9 +3439,9 b' def import_(ui, repo, patch1, *patches, '
3080 3439 if (opts.get('exact') or not opts.get('force')) and update:
3081 3440 cmdutil.bailifchanged(repo)
3082 3441
3083 d = opts["base"]
3442 base = opts["base"]
3084 3443 strip = opts["strip"]
3085 wlock = lock = None
3444 wlock = lock = tr = None
3086 3445 msgs = []
3087 3446
3088 3447 def checkexact(repo, n, nodeid):
@@ -3095,8 +3454,8 b' def import_(ui, repo, patch1, *patches, '
3095 3454 patch.extract(ui, hunk)
3096 3455
3097 3456 if not tmpname:
3098 return None
3099 commitid = _('to working directory')
3457 return (None, None)
3458 msg = _('applied to working directory')
3100 3459
3101 3460 try:
3102 3461 cmdline_message = cmdutil.logmessage(ui, opts)
@@ -3151,11 +3510,8 b' def import_(ui, repo, patch1, *patches, '
3151 3510 m = scmutil.matchfiles(repo, files or [])
3152 3511 n = repo.commit(message, opts.get('user') or user,
3153 3512 opts.get('date') or date, match=m,
3154 editor=cmdutil.commiteditor)
3513 editor=editor)
3155 3514 checkexact(repo, n, nodeid)
3156 # Force a dirstate write so that the next transaction
3157 # backups an up-to-date file.
3158 repo.dirstate.write()
3159 3515 else:
3160 3516 if opts.get('exact') or opts.get('import_branch'):
3161 3517 branch = branch or 'default'
@@ -3181,45 +3537,52 b' def import_(ui, repo, patch1, *patches, '
3181 3537 finally:
3182 3538 store.close()
3183 3539 if n:
3184 commitid = short(n)
3185 return commitid
3540 msg = _('created %s') % short(n)
3541 return (msg, n)
3186 3542 finally:
3187 3543 os.unlink(tmpname)
3188 3544
3189 3545 try:
3190 3546 wlock = repo.wlock()
3191 3547 lock = repo.lock()
3548 tr = repo.transaction('import')
3192 3549 parents = repo.parents()
3193 lastcommit = None
3194 for p in patches:
3195 pf = os.path.join(d, p)
3196
3197 if pf == '-':
3198 ui.status(_("applying patch from stdin\n"))
3199 pf = ui.fin
3550 for patchurl in patches:
3551 if patchurl == '-':
3552 ui.status(_('applying patch from stdin\n'))
3553 patchfile = ui.fin
3554 patchurl = 'stdin' # for error message
3200 3555 else:
3201 ui.status(_("applying %s\n") % p)
3202 pf = url.open(ui, pf)
3556 patchurl = os.path.join(base, patchurl)
3557 ui.status(_('applying %s\n') % patchurl)
3558 patchfile = url.open(ui, patchurl)
3203 3559
3204 3560 haspatch = False
3205 for hunk in patch.split(pf):
3206 commitid = tryone(ui, hunk, parents)
3207 if commitid:
3561 for hunk in patch.split(patchfile):
3562 (msg, node) = tryone(ui, hunk, parents)
3563 if msg:
3208 3564 haspatch = True
3209 if lastcommit:
3210 ui.status(_('applied %s\n') % lastcommit)
3211 lastcommit = commitid
3565 ui.note(msg + '\n')
3212 3566 if update or opts.get('exact'):
3213 3567 parents = repo.parents()
3214 3568 else:
3215 parents = [repo[commitid]]
3569 parents = [repo[node]]
3216 3570
3217 3571 if not haspatch:
3218 raise util.Abort(_('no diffs found'))
3219
3572 raise util.Abort(_('%s: no diffs found') % patchurl)
3573
3574 tr.close()
3220 3575 if msgs:
3221 3576 repo.savecommitmessage('\n* * *\n'.join(msgs))
3577 except:
3578 # wlock.release() indirectly calls dirstate.write(): since
3579 # we're crashing, we do not want to change the working dir
3580 # parent after all, so make sure it writes nothing
3581 repo.dirstate.invalidate()
3582 raise
3222 3583 finally:
3584 if tr:
3585 tr.release()
3223 3586 release(lock, wlock)
3224 3587
3225 3588 @command('incoming|in',
@@ -3356,18 +3719,14 b' def log(ui, repo, *pats, **opts):'
3356 3719 Print the revision history of the specified files or the entire
3357 3720 project.
3358 3721
3722 If no revision range is specified, the default is ``tip:0`` unless
3723 --follow is set, in which case the working directory parent is
3724 used as the starting revision.
3725
3359 3726 File history is shown without following rename or copy history of
3360 3727 files. Use -f/--follow with a filename to follow history across
3361 3728 renames and copies. --follow without a filename will only show
3362 ancestors or descendants of the starting revision. --follow-first
3363 only follows the first parent of merge revisions.
3364
3365 If no revision range is specified, the default is ``tip:0`` unless
3366 --follow is set, in which case the working directory parent is
3367 used as the starting revision. You can specify a revision set for
3368 log, see :hg:`help revsets` for more information.
3369
3370 See :hg:`help dates` for a list of formats valid for -d/--date.
3729 ancestors or descendants of the starting revision.
3371 3730
3372 3731 By default this command prints revision number and changeset id,
3373 3732 tags, non-trivial parents, user, date and time, and a summary for
@@ -3380,6 +3739,57 b' def log(ui, repo, *pats, **opts):'
3380 3739 its first parent. Also, only files different from BOTH parents
3381 3740 will appear in files:.
3382 3741
3742 .. note::
3743 for performance reasons, log FILE may omit duplicate changes
3744 made on branches and will not show deletions. To see all
3745 changes including duplicates and deletions, use the --removed
3746 switch.
3747
3748 .. container:: verbose
3749
3750 Some examples:
3751
3752 - changesets with full descriptions and file lists::
3753
3754 hg log -v
3755
3756 - changesets ancestral to the working directory::
3757
3758 hg log -f
3759
3760 - last 10 commits on the current branch::
3761
3762 hg log -l 10 -b .
3763
3764 - changesets showing all modifications of a file, including removals::
3765
3766 hg log --removed file.c
3767
3768 - all changesets that touch a directory, with diffs, excluding merges::
3769
3770 hg log -Mp lib/
3771
3772 - all revision numbers that match a keyword::
3773
3774 hg log -k bug --template "{rev}\\n"
3775
3776 - check if a given changeset is included is a tagged release::
3777
3778 hg log -r "a21ccf and ancestor(1.9)"
3779
3780 - find all changesets by some user in a date range::
3781
3782 hg log -k alice -d "may 2008 to jul 2008"
3783
3784 - summary of all changesets after the last tag::
3785
3786 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3787
3788 See :hg:`help dates` for a list of formats valid for -d/--date.
3789
3790 See :hg:`help revisions` and :hg:`help revsets` for more about
3791 specifying revisions.
3792
3383 3793 Returns 0 on success.
3384 3794 """
3385 3795
@@ -3507,10 +3917,10 b' def manifest(ui, repo, node=None, rev=No'
3507 3917
3508 3918 @command('^merge',
3509 3919 [('f', 'force', None, _('force a merge with outstanding changes')),
3510 ('t', 'tool', '', _('specify merge tool')),
3511 3920 ('r', 'rev', '', _('revision to merge'), _('REV')),
3512 3921 ('P', 'preview', None,
3513 _('review revisions to merge (no merge is performed)'))],
3922 _('review revisions to merge (no merge is performed)'))
3923 ] + mergetoolopts,
3514 3924 _('[-P] [-f] [[-r] REV]'))
3515 3925 def merge(ui, repo, node=None, **opts):
3516 3926 """merge working directory with another revision
@@ -3589,7 +3999,7 b' def merge(ui, repo, node=None, **opts):'
3589 3999
3590 4000 try:
3591 4001 # ui.forcemerge is an internal variable, do not document
3592 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4002 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3593 4003 return hg.merge(repo, node, force=opts.get('force'))
3594 4004 finally:
3595 4005 ui.setconfig('ui', 'forcemerge', '')
@@ -3935,31 +4345,36 b' def recover(ui, repo):'
3935 4345 def remove(ui, repo, *pats, **opts):
3936 4346 """remove the specified files on the next commit
3937 4347
3938 Schedule the indicated files for removal from the repository.
3939
3940 This only removes files from the current branch, not from the
3941 entire project history. -A/--after can be used to remove only
3942 files that have already been deleted, -f/--force can be used to
3943 force deletion, and -Af can be used to remove files from the next
3944 revision without deleting them from the working directory.
3945
3946 The following table details the behavior of remove for different
3947 file states (columns) and option combinations (rows). The file
3948 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3949 reported by :hg:`status`). The actions are Warn, Remove (from
3950 branch) and Delete (from disk)::
3951
3952 A C M !
3953 none W RD W R
3954 -f R RD RD R
3955 -A W W W R
3956 -Af R R R R
3957
3958 Note that remove never deletes files in Added [A] state from the
3959 working directory, not even if option --force is specified.
4348 Schedule the indicated files for removal from the current branch.
3960 4349
3961 4350 This command schedules the files to be removed at the next commit.
3962 To undo a remove before that, see :hg:`revert`.
4351 To undo a remove before that, see :hg:`revert`. To undo added
4352 files, see :hg:`forget`.
4353
4354 .. container:: verbose
4355
4356 -A/--after can be used to remove only files that have already
4357 been deleted, -f/--force can be used to force deletion, and -Af
4358 can be used to remove files from the next revision without
4359 deleting them from the working directory.
4360
4361 The following table details the behavior of remove for different
4362 file states (columns) and option combinations (rows). The file
4363 states are Added [A], Clean [C], Modified [M] and Missing [!]
4364 (as reported by :hg:`status`). The actions are Warn, Remove
4365 (from branch) and Delete (from disk):
4366
4367 ======= == == == ==
4368 A C M !
4369 ======= == == == ==
4370 none W RD W R
4371 -f R RD RD R
4372 -A W W W R
4373 -Af R R R R
4374 ======= == == == ==
4375
4376 Note that remove never deletes files in Added [A] state from the
4377 working directory, not even if option --force is specified.
3963 4378
3964 4379 Returns 0 on success, 1 if any warnings encountered.
3965 4380 """
@@ -3994,8 +4409,8 b' def remove(ui, repo, *pats, **opts):'
3994 4409 ' to force removal)\n') % m.rel(f))
3995 4410 ret = 1
3996 4411 for f in added:
3997 ui.warn(_('not removing %s: file has been marked for add (use -f'
3998 ' to force removal)\n') % m.rel(f))
4412 ui.warn(_('not removing %s: file has been marked for add'
4413 ' (use forget to undo)\n') % m.rel(f))
3999 4414 ret = 1
4000 4415
4001 4416 for f in sorted(list):
@@ -4051,9 +4466,8 b' def rename(ui, repo, *pats, **opts):'
4051 4466 ('l', 'list', None, _('list state of files needing merge')),
4052 4467 ('m', 'mark', None, _('mark files as resolved')),
4053 4468 ('u', 'unmark', None, _('mark files as unresolved')),
4054 ('t', 'tool', '', _('specify merge tool')),
4055 4469 ('n', 'no-status', None, _('hide status prefix'))]
4056 + walkopts,
4470 + mergetoolopts + walkopts,
4057 4471 _('[OPTION]... [FILE]...'))
4058 4472 def resolve(ui, repo, *pats, **opts):
4059 4473 """redo merges or set/view the merge status of files
@@ -4072,7 +4486,8 b' def resolve(ui, repo, *pats, **opts):'
4072 4486 performed for files already marked as resolved. Use ``--all/-a``
4073 4487 to select all unresolved files. ``--tool`` can be used to specify
4074 4488 the merge tool used for the given files. It overrides the HGMERGE
4075 environment variable and your configuration files.
4489 environment variable and your configuration files. Previous file
4490 contents are saved with a ``.orig`` suffix.
4076 4491
4077 4492 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4078 4493 (e.g. after having manually fixed-up the files). The default is
@@ -4145,7 +4560,7 b' def resolve(ui, repo, *pats, **opts):'
4145 4560 [('a', 'all', None, _('revert all changes when no arguments given')),
4146 4561 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4147 4562 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4148 ('', 'no-backup', None, _('do not save backup copies of files')),
4563 ('C', 'no-backup', None, _('do not save backup copies of files')),
4149 4564 ] + walkopts + dryrunopts,
4150 4565 _('[OPTION]... [-r REV] [NAME]...'))
4151 4566 def revert(ui, repo, *pats, **opts):
@@ -4237,6 +4652,10 b' def revert(ui, repo, *pats, **opts):'
4237 4652 def badfn(path, msg):
4238 4653 if path in names:
4239 4654 return
4655 if path in repo[node].substate:
4656 ui.warn("%s: %s\n" % (m.rel(path),
4657 'reverting subrepos is unsupported'))
4658 return
4240 4659 path_ = path + '/'
4241 4660 for f in names:
4242 4661 if f.startswith(path_):
@@ -4381,7 +4800,8 b' def revert(ui, repo, *pats, **opts):'
4381 4800 finally:
4382 4801 wlock.release()
4383 4802
4384 @command('rollback', dryrunopts)
4803 @command('rollback', dryrunopts +
4804 [('f', 'force', False, _('ignore safety measures'))])
4385 4805 def rollback(ui, repo, **opts):
4386 4806 """roll back the last transaction (dangerous)
4387 4807
@@ -4402,6 +4822,12 b' def rollback(ui, repo, **opts):'
4402 4822 - push (with this repository as the destination)
4403 4823 - unbundle
4404 4824
4825 It's possible to lose data with rollback: commit, update back to
4826 an older changeset, and then rollback. The update removes the
4827 changes you committed from the working directory, and rollback
4828 removes them from history. To avoid data loss, you must pass
4829 --force in this case.
4830
4405 4831 This command is not intended for use on public repositories. Once
4406 4832 changes are visible for pull by other users, rolling a transaction
4407 4833 back locally is ineffective (someone else may already have pulled
@@ -4411,7 +4837,8 b' def rollback(ui, repo, **opts):'
4411 4837
4412 4838 Returns 0 on success, 1 if no rollback data is available.
4413 4839 """
4414 return repo.rollback(opts.get('dry_run'))
4840 return repo.rollback(dryrun=opts.get('dry_run'),
4841 force=opts.get('force'))
4415 4842
4416 4843 @command('root', [])
4417 4844 def root(ui, repo):
@@ -4653,6 +5080,22 b' def status(ui, repo, *pats, **opts):'
4653 5080 I = ignored
4654 5081 = origin of the previous file listed as A (added)
4655 5082
5083 .. container:: verbose
5084
5085 Examples:
5086
5087 - show changes in the working directory relative to a changeset:
5088
5089 hg status --rev 9353
5090
5091 - show all changes including copies in an existing changeset::
5092
5093 hg status --copies --change 9353
5094
5095 - get a NUL separated list of added files, suitable for xargs::
5096
5097 hg status -an0
5098
4656 5099 Returns 0 on success.
4657 5100 """
4658 5101
@@ -4727,6 +5170,7 b' def summary(ui, repo, **opts):'
4727 5170 ctx = repo[None]
4728 5171 parents = ctx.parents()
4729 5172 pnode = parents[0].node()
5173 marks = []
4730 5174
4731 5175 for p in parents:
4732 5176 # label with log.changeset (instead of log.parent) since this
@@ -4735,7 +5179,7 b' def summary(ui, repo, **opts):'
4735 5179 label='log.changeset')
4736 5180 ui.write(' '.join(p.tags()), label='log.tag')
4737 5181 if p.bookmarks():
4738 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
5182 marks.extend(p.bookmarks())
4739 5183 if p.rev() == -1:
4740 5184 if not len(repo):
4741 5185 ui.write(_(' (empty repository)'))
@@ -4754,6 +5198,20 b' def summary(ui, repo, **opts):'
4754 5198 else:
4755 5199 ui.status(m, label='log.branch')
4756 5200
5201 if marks:
5202 current = repo._bookmarkcurrent
5203 ui.write(_('bookmarks:'), label='log.bookmark')
5204 if current is not None:
5205 try:
5206 marks.remove(current)
5207 ui.write(' *' + current, label='bookmarks.current')
5208 except ValueError:
5209 # current bookmark not in parent ctx marks
5210 pass
5211 for m in marks:
5212 ui.write(' ' + m, label='log.bookmark')
5213 ui.write('\n', label='log.bookmark')
5214
4757 5215 st = list(repo.status(unknown=True))[:6]
4758 5216
4759 5217 c = repo.dirstate.copies()
@@ -4988,19 +5446,22 b' def tags(ui, repo):'
4988 5446
4989 5447 for t, n in reversed(repo.tagslist()):
4990 5448 if ui.quiet:
4991 ui.write("%s\n" % t)
5449 ui.write("%s\n" % t, label='tags.normal')
4992 5450 continue
4993 5451
4994 5452 hn = hexfunc(n)
4995 5453 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5454 rev = ui.label(r, 'log.changeset')
4996 5455 spaces = " " * (30 - encoding.colwidth(t))
4997 5456
5457 tag = ui.label(t, 'tags.normal')
4998 5458 if ui.verbose:
4999 5459 if repo.tagtype(t) == 'local':
5000 5460 tagtype = " local"
5461 tag = ui.label(t, 'tags.local')
5001 5462 else:
5002 5463 tagtype = ""
5003 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
5464 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5004 5465
5005 5466 @command('tip',
5006 5467 [('p', 'patch', None, _('show patch')),
@@ -185,6 +185,7 b' class server(object):'
185 185 copiedui = self.ui.copy()
186 186 self.repo.baseui = copiedui
187 187 self.repo.ui = self.repo.dirstate._ui = self.repoui.copy()
188 self.repo.invalidate()
188 189
189 190 req = dispatch.request(args[:], copiedui, self.repo, self.cin,
190 191 self.cout, self.cerr)
@@ -27,6 +27,17 b' These imports will not be delayed:'
27 27 import __builtin__
28 28 _origimport = __import__
29 29
30 nothing = object()
31
32 try:
33 _origimport(__builtin__.__name__, {}, {}, None, -1)
34 except TypeError: # no level argument
35 def _import(name, globals, locals, fromlist, level):
36 "call _origimport with no level argument"
37 return _origimport(name, globals, locals, fromlist)
38 else:
39 _import = _origimport
40
30 41 class _demandmod(object):
31 42 """module demand-loader and proxy"""
32 43 def __init__(self, name, globals, locals):
@@ -50,7 +61,7 b' class _demandmod(object):'
50 61 h, t = p, None
51 62 if '.' in p:
52 63 h, t = p.split('.', 1)
53 if not hasattr(mod, h):
64 if getattr(mod, h, nothing) is nothing:
54 65 setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
55 66 elif t:
56 67 subload(getattr(mod, h), t)
@@ -81,20 +92,14 b' class _demandmod(object):'
81 92 def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
82 93 if not locals or name in ignore or fromlist == ('*',):
83 94 # these cases we can't really delay
84 if level == -1:
85 return _origimport(name, globals, locals, fromlist)
86 else:
87 return _origimport(name, globals, locals, fromlist, level)
95 return _import(name, globals, locals, fromlist, level)
88 96 elif not fromlist:
89 97 # import a [as b]
90 98 if '.' in name: # a.b
91 99 base, rest = name.split('.', 1)
92 100 # email.__init__ loading email.mime
93 101 if globals and globals.get('__name__', None) == base:
94 if level != -1:
95 return _origimport(name, globals, locals, fromlist, level)
96 else:
97 return _origimport(name, globals, locals, fromlist)
102 return _import(name, globals, locals, fromlist, level)
98 103 # if a is already demand-loaded, add b to its submodule list
99 104 if base in locals:
100 105 if isinstance(locals[base], _demandmod):
@@ -109,12 +114,12 b' def _demandimport(name, globals=None, lo'
109 114 mod = _origimport(name, globals, locals)
110 115 # recurse down the module chain
111 116 for comp in name.split('.')[1:]:
112 if not hasattr(mod, comp):
117 if getattr(mod, comp, nothing) is nothing:
113 118 setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
114 119 mod = getattr(mod, comp)
115 120 for x in fromlist:
116 121 # set requested submodules for demand load
117 if not hasattr(mod, x):
122 if getattr(mod, x, nothing) is nothing:
118 123 setattr(mod, x, _demandmod(x, mod.__dict__, locals))
119 124 return mod
120 125
@@ -137,6 +142,8 b' ignore = ['
137 142 # raise ImportError if x not defined
138 143 '__main__',
139 144 '_ssl', # conditional imports in the stdlib, issue1964
145 'rfc822',
146 'mimetools',
140 147 ]
141 148
142 149 def enable():
@@ -146,4 +153,3 b' def enable():'
146 153 def disable():
147 154 "disable global demand-loading of modules"
148 155 __builtin__.__import__ = _origimport
149
@@ -453,7 +453,7 b' class dirstate(object):'
453 453 write(e)
454 454 write(f)
455 455 st.write(cs.getvalue())
456 st.rename()
456 st.close()
457 457 self._lastnormaltime = None
458 458 self._dirty = self._dirtypl = False
459 459
@@ -123,6 +123,9 b' def _runcatch(req):'
123 123 else:
124 124 ui.warn(_("hg: %s\n") % inst.args[1])
125 125 commands.help_(ui, 'shortlist')
126 except error.OutOfBandError, inst:
127 ui.warn("abort: remote error:\n")
128 ui.warn(''.join(inst.args))
126 129 except error.RepoError, inst:
127 130 ui.warn(_("abort: %s!\n") % inst)
128 131 if inst.hint:
@@ -159,16 +162,16 b' def _runcatch(req):'
159 162 elif m in "zlib".split():
160 163 ui.warn(_("(is your Python install correct?)\n"))
161 164 except IOError, inst:
162 if hasattr(inst, "code"):
165 if util.safehasattr(inst, "code"):
163 166 ui.warn(_("abort: %s\n") % inst)
164 elif hasattr(inst, "reason"):
167 elif util.safehasattr(inst, "reason"):
165 168 try: # usually it is in the form (errno, strerror)
166 169 reason = inst.reason.args[1]
167 170 except (AttributeError, IndexError):
168 171 # it might be anything, for example a string
169 172 reason = inst.reason
170 173 ui.warn(_("abort: error: %s\n") % reason)
171 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
174 elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
172 175 if ui.debugflag:
173 176 ui.warn(_("broken pipe\n"))
174 177 elif getattr(inst, "strerror", None):
@@ -338,7 +341,7 b' class cmdalias(object):'
338 341 ui.debug("alias '%s' shadows command '%s'\n" %
339 342 (self.name, self.cmdname))
340 343
341 if hasattr(self, 'shell'):
344 if util.safehasattr(self, 'shell'):
342 345 return self.fn(ui, *args, **opts)
343 346 else:
344 347 try:
@@ -363,7 +366,7 b' def addaliases(ui, cmdtable):'
363 366 # definition might not exist or it might not be a cmdalias
364 367 pass
365 368
366 cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help)
369 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
367 370 if aliasdef.norepo:
368 371 commands.norepo += ' %s' % alias
369 372
@@ -483,15 +486,14 b' def _getlocal(ui, rpath):'
483 486 lui = ui.copy()
484 487 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
485 488
486 if rpath:
489 if rpath and rpath[-1]:
487 490 path = lui.expandpath(rpath[-1])
488 491 lui = ui.copy()
489 492 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
490 493
491 494 return path, lui
492 495
493 def _checkshellalias(ui, args):
494 cwd = os.getcwd()
496 def _checkshellalias(lui, ui, args):
495 497 norepo = commands.norepo
496 498 options = {}
497 499
@@ -503,12 +505,6 b' def _checkshellalias(ui, args):'
503 505 if not args:
504 506 return
505 507
506 _parseconfig(ui, options['config'])
507 if options['cwd']:
508 os.chdir(options['cwd'])
509
510 path, lui = _getlocal(ui, [options['repository']])
511
512 508 cmdtable = commands.table.copy()
513 509 addaliases(lui, cmdtable)
514 510
@@ -517,28 +513,22 b' def _checkshellalias(ui, args):'
517 513 aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict"))
518 514 except (error.AmbiguousCommand, error.UnknownCommand):
519 515 commands.norepo = norepo
520 os.chdir(cwd)
521 516 return
522 517
523 518 cmd = aliases[0]
524 519 fn = entry[0]
525 520
526 if cmd and hasattr(fn, 'shell'):
521 if cmd and util.safehasattr(fn, 'shell'):
527 522 d = lambda: fn(ui, *args[1:])
528 523 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
529 524
530 525 commands.norepo = norepo
531 os.chdir(cwd)
532 526
533 527 _loaded = set()
534 528 def _dispatch(req):
535 529 args = req.args
536 530 ui = req.ui
537 531
538 shellaliasfn = _checkshellalias(ui, args)
539 if shellaliasfn:
540 return shellaliasfn()
541
542 532 # read --config before doing anything else
543 533 # (e.g. to change trust settings for reading .hg/hgrc)
544 534 cfgs = _parseconfig(ui, _earlygetopt(['--config'], args))
@@ -551,6 +541,12 b' def _dispatch(req):'
551 541 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
552 542 path, lui = _getlocal(ui, rpath)
553 543
544 # Now that we're operating in the right directory/repository with
545 # the right config settings, check for shell aliases
546 shellaliasfn = _checkshellalias(lui, ui, args)
547 if shellaliasfn:
548 return shellaliasfn()
549
554 550 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
555 551 # reposetup. Programs like TortoiseHg will call _dispatch several
556 552 # times so we keep track of configured extensions in _loaded.
@@ -635,10 +631,10 b' def _dispatch(req):'
635 631 for ui_ in uis:
636 632 ui_.setconfig('web', 'cacerts', '')
637 633
634 if options['version']:
635 return commands.version_(ui)
638 636 if options['help']:
639 return commands.help_(ui, cmd, options['version'])
640 elif options['version']:
641 return commands.version_(ui)
637 return commands.help_(ui, cmd)
642 638 elif not cmd:
643 639 return commands.help_(ui, 'shortlist')
644 640
@@ -139,7 +139,7 b' wide = (os.environ.get("HGENCODINGAMBIGU'
139 139 and "WFA" or "WF")
140 140
141 141 def colwidth(s):
142 "Find the column width of a UTF-8 string for display"
142 "Find the column width of a string for display in the local encoding"
143 143 return ucolwidth(s.decode(encoding, 'replace'))
144 144
145 145 def ucolwidth(d):
@@ -149,6 +149,14 b' def ucolwidth(d):'
149 149 return sum([eaw(c) in wide and 2 or 1 for c in d])
150 150 return len(d)
151 151
152 def getcols(s, start, c):
153 '''Use colwidth to find a c-column substring of s starting at byte
154 index start'''
155 for x in xrange(start + c, len(s)):
156 t = s[start:x]
157 if colwidth(t) == c:
158 return t
159
152 160 def lower(s):
153 161 "best-effort encoding-aware case-folding of local string s"
154 162 try:
@@ -39,6 +39,9 b' class Abort(Exception):'
39 39 class ConfigError(Abort):
40 40 'Exception raised when parsing config files'
41 41
42 class OutOfBandError(Exception):
43 'Exception raised when a remote repo reports failure'
44
42 45 class ParseError(Exception):
43 46 'Exception raised when parsing config files (msg[, pos])'
44 47
@@ -69,7 +69,9 b' def load(ui, name, path):'
69 69 return mod
70 70 try:
71 71 mod = importh("hgext.%s" % name)
72 except ImportError:
72 except ImportError, err:
73 ui.debug('could not import hgext.%s (%s): trying %s\n'
74 % (name, err, name))
73 75 mod = importh(name)
74 76 _extensions[shortname] = mod
75 77 _order.append(shortname)
@@ -124,7 +126,7 b' def wrapcommand(table, command, wrapper)'
124 126 where orig is the original (wrapped) function, and *args, **kwargs
125 127 are the arguments passed to it.
126 128 '''
127 assert hasattr(wrapper, '__call__')
129 assert util.safehasattr(wrapper, '__call__')
128 130 aliases, entry = cmdutil.findcmd(command, table)
129 131 for alias, e in table.iteritems():
130 132 if e is entry:
@@ -177,12 +179,12 b' def wrapfunction(container, funcname, wr'
177 179 your end users, you should play nicely with others by using the
178 180 subclass trick.
179 181 '''
180 assert hasattr(wrapper, '__call__')
182 assert util.safehasattr(wrapper, '__call__')
181 183 def wrap(*args, **kwargs):
182 184 return wrapper(origfn, *args, **kwargs)
183 185
184 186 origfn = getattr(container, funcname)
185 assert hasattr(origfn, '__call__')
187 assert util.safehasattr(origfn, '__call__')
186 188 setattr(container, funcname, wrap)
187 189 return origfn
188 190
@@ -75,7 +75,7 b' def fancyopts(args, options, state, gnu='
75 75 # copy defaults to state
76 76 if isinstance(default, list):
77 77 state[name] = default[:]
78 elif hasattr(default, '__call__'):
78 elif getattr(default, '__call__', False):
79 79 state[name] = None
80 80 else:
81 81 state[name] = default
@@ -34,7 +34,8 b' def _findtool(ui, tool):'
34 34 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
35 35 if p:
36 36 return p
37 return util.findexe(_toolstr(ui, tool, "executable", tool))
37 exe = _toolstr(ui, tool, "executable", tool)
38 return util.findexe(util.expandpath(exe))
38 39
39 40 def _picktool(repo, ui, path, binary, symlink):
40 41 def check(tool, pat, symlink, binary):
@@ -8,7 +8,7 b''
8 8 # This software may be used and distributed according to the terms of the
9 9 # GNU General Public License version 2 or any later version.
10 10
11 import os
11 import os, error
12 12 from i18n import _
13 13 from node import short, hex
14 14 import util
@@ -35,17 +35,18 b' def bisect(changelog, state):'
35 35 # build visit array
36 36 ancestors = [None] * (len(changelog) + 1) # an extra for [-1]
37 37
38 # set nodes descended from goodrev
39 ancestors[goodrev] = []
38 # set nodes descended from goodrevs
39 for rev in goodrevs:
40 ancestors[rev] = []
40 41 for rev in xrange(goodrev + 1, len(changelog)):
41 42 for prev in clparents(rev):
42 43 if ancestors[prev] == []:
43 44 ancestors[rev] = []
44 45
45 46 # clear good revs from array
46 for node in goodrevs:
47 ancestors[node] = None
48 for rev in xrange(len(changelog), -1, -1):
47 for rev in goodrevs:
48 ancestors[rev] = None
49 for rev in xrange(len(changelog), goodrev, -1):
49 50 if ancestors[rev] is None:
50 51 for prev in clparents(rev):
51 52 ancestors[prev] = None
@@ -149,7 +150,102 b' def save_state(repo, state):'
149 150 for kind in state:
150 151 for node in state[kind]:
151 152 f.write("%s %s\n" % (kind, hex(node)))
152 f.rename()
153 f.close()
153 154 finally:
154 155 wlock.release()
155 156
157 def get(repo, status):
158 """
159 Return a list of revision(s) that match the given status:
160
161 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
162 - ``goods``, ``bads`` : csets topologicaly good/bad
163 - ``range`` : csets taking part in the bisection
164 - ``pruned`` : csets that are goods, bads or skipped
165 - ``untested`` : csets whose fate is yet unknown
166 - ``ignored`` : csets ignored due to DAG topology
167 """
168 state = load_state(repo)
169 if status in ('good', 'bad', 'skip'):
170 return [repo.changelog.rev(n) for n in state[status]]
171 else:
172 # In the floowing sets, we do *not* call 'bisect()' with more
173 # than one level of recusrsion, because that can be very, very
174 # time consuming. Instead, we always develop the expression as
175 # much as possible.
176
177 # 'range' is all csets that make the bisection:
178 # - have a good ancestor and a bad descendant, or conversely
179 # that's because the bisection can go either way
180 range = '( bisect(bad)::bisect(good) | bisect(good)::bisect(bad) )'
181
182 _t = [c.rev() for c in repo.set('bisect(good)::bisect(bad)')]
183 # The sets of topologically good or bad csets
184 if len(_t) == 0:
185 # Goods are topologically after bads
186 goods = 'bisect(good)::' # Pruned good csets
187 bads = '::bisect(bad)' # Pruned bad csets
188 else:
189 # Goods are topologically before bads
190 goods = '::bisect(good)' # Pruned good csets
191 bads = 'bisect(bad)::' # Pruned bad csets
192
193 # 'pruned' is all csets whose fate is already known: good, bad, skip
194 skips = 'bisect(skip)' # Pruned skipped csets
195 pruned = '( (%s) | (%s) | (%s) )' % (goods, bads, skips)
196
197 # 'untested' is all cset that are- in 'range', but not in 'pruned'
198 untested = '( (%s) - (%s) )' % (range, pruned)
199
200 # 'ignored' is all csets that were not used during the bisection
201 # due to DAG topology, but may however have had an impact.
202 # Eg., a branch merged between bads and goods, but whose branch-
203 # point is out-side of the range.
204 iba = '::bisect(bad) - ::bisect(good)' # Ignored bads' ancestors
205 iga = '::bisect(good) - ::bisect(bad)' # Ignored goods' ancestors
206 ignored = '( ( (%s) | (%s) ) - (%s) )' % (iba, iga, range)
207
208 if status == 'range':
209 return [c.rev() for c in repo.set(range)]
210 elif status == 'pruned':
211 return [c.rev() for c in repo.set(pruned)]
212 elif status == 'untested':
213 return [c.rev() for c in repo.set(untested)]
214 elif status == 'ignored':
215 return [c.rev() for c in repo.set(ignored)]
216 elif status == "goods":
217 return [c.rev() for c in repo.set(goods)]
218 elif status == "bads":
219 return [c.rev() for c in repo.set(bads)]
220
221 else:
222 raise error.ParseError(_('invalid bisect state'))
223
224 def label(repo, node, short=False):
225 rev = repo.changelog.rev(node)
226
227 # Try explicit sets
228 if rev in get(repo, 'good'):
229 return _('good')
230 if rev in get(repo, 'bad'):
231 return _('bad')
232 if rev in get(repo, 'skip'):
233 return _('skipped')
234 if rev in get(repo, 'untested'):
235 return _('untested')
236 if rev in get(repo, 'ignored'):
237 return _('ignored')
238
239 # Try implicit sets
240 if rev in get(repo, 'goods'):
241 return _('good (implicit)')
242 if rev in get(repo, 'bads'):
243 return _('bad (implicit)')
244
245 return None
246
247 def shortlabel(label):
248 if label:
249 return label[0].upper()
250
251 return None
@@ -31,7 +31,7 b' def loaddoc(topic):'
31 31 """Return a delayed loader for help/topic.txt."""
32 32
33 33 def loader():
34 if hasattr(sys, 'frozen'):
34 if util.mainfrozen():
35 35 module = sys.executable
36 36 else:
37 37 module = __file__
@@ -223,6 +223,10 b' alias, as was done above for the purge a'
223 223 ``$HG_ARGS`` expand to the arguments given to Mercurial. In the ``hg
224 224 echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``.
225 225
226 .. note:: Some global configuration options such as ``-R`` are
227 processed before shell aliases and will thus not be passed to
228 aliases.
229
226 230 ``auth``
227 231 """"""""
228 232
@@ -1261,6 +1265,12 b' The full set of options is:'
1261 1265 ``ipv6``
1262 1266 Whether to use IPv6. Default is False.
1263 1267
1268 ``logoimg``
1269 File name of the logo image that some templates display on each page.
1270 The file name is relative to ``staticurl``. That is, the full path to
1271 the logo image is "staticurl/logoimg".
1272 If unset, ``hglogo.png`` will be used.
1273
1264 1274 ``logourl``
1265 1275 Base URL to use for logos. If unset, ``http://mercurial.selenic.com/``
1266 1276 will be used.
@@ -1,13 +1,14 b''
1 1 Subrepositories let you nest external repositories or projects into a
2 2 parent Mercurial repository, and make commands operate on them as a
3 group. External Mercurial and Subversion projects are currently
4 supported.
3 group.
4
5 Mercurial currently supports Mercurial, Git, and Subversion
6 subrepositories.
5 7
6 8 Subrepositories are made of three components:
7 9
8 10 1. Nested repository checkouts. They can appear anywhere in the
9 parent working directory, and are Mercurial clones or Subversion
10 checkouts.
11 parent working directory.
11 12
12 13 2. Nested repository references. They are defined in ``.hgsub`` and
13 14 tell where the subrepository checkouts come from. Mercurial
@@ -15,12 +16,15 b' 2. Nested repository references. They ar'
15 16
16 17 path/to/nested = https://example.com/nested/repo/path
17 18
19 Git and Subversion subrepos are also supported:
20
21 path/to/nested = [git]git://example.com/nested/repo/path
22 path/to/nested = [svn]https://example.com/nested/trunk/path
23
18 24 where ``path/to/nested`` is the checkout location relatively to the
19 25 parent Mercurial root, and ``https://example.com/nested/repo/path``
20 26 is the source repository path. The source can also reference a
21 filesystem path. Subversion repositories are defined with:
22
23 path/to/nested = [svn]https://example.com/nested/trunk/path
27 filesystem path.
24 28
25 29 Note that ``.hgsub`` does not exist by default in Mercurial
26 30 repositories, you have to create and add it to the parent
@@ -98,9 +98,9 b" def repository(ui, path='', create=False"
98 98 hook(ui, repo)
99 99 return repo
100 100
101 def peer(ui, opts, path, create=False):
101 def peer(uiorrepo, opts, path, create=False):
102 102 '''return a repository peer for the specified path'''
103 rui = remoteui(ui, opts)
103 rui = remoteui(uiorrepo, opts)
104 104 return repository(rui, path, create)
105 105
106 106 def defaultdest(source):
@@ -174,6 +174,36 b' def share(ui, source, dest=None, update='
174 174 continue
175 175 _update(r, uprev)
176 176
177 def copystore(ui, srcrepo, destpath):
178 '''copy files from store of srcrepo in destpath
179
180 returns destlock
181 '''
182 destlock = None
183 try:
184 hardlink = None
185 num = 0
186 for f in srcrepo.store.copylist():
187 src = os.path.join(srcrepo.sharedpath, f)
188 dst = os.path.join(destpath, f)
189 dstbase = os.path.dirname(dst)
190 if dstbase and not os.path.exists(dstbase):
191 os.mkdir(dstbase)
192 if os.path.exists(src):
193 if dst.endswith('data'):
194 # lock to avoid premature writing to the target
195 destlock = lock.lock(os.path.join(dstbase, "lock"))
196 hardlink, n = util.copyfiles(src, dst, hardlink)
197 num += n
198 if hardlink:
199 ui.debug("linked %d files\n" % num)
200 else:
201 ui.debug("copied %d files\n" % num)
202 return destlock
203 except:
204 release(destlock)
205 raise
206
177 207 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
178 208 update=True, stream=False, branch=None):
179 209 """Make a copy of an existing repository.
@@ -287,24 +317,7 b' def clone(ui, peeropts, source, dest=Non'
287 317 % dest)
288 318 raise
289 319
290 hardlink = None
291 num = 0
292 for f in srcrepo.store.copylist():
293 src = os.path.join(srcrepo.sharedpath, f)
294 dst = os.path.join(destpath, f)
295 dstbase = os.path.dirname(dst)
296 if dstbase and not os.path.exists(dstbase):
297 os.mkdir(dstbase)
298 if os.path.exists(src):
299 if dst.endswith('data'):
300 # lock to avoid premature writing to the target
301 destlock = lock.lock(os.path.join(dstbase, "lock"))
302 hardlink, n = util.copyfiles(src, dst, hardlink)
303 num += n
304 if hardlink:
305 ui.debug("linked %d files\n" % num)
306 else:
307 ui.debug("copied %d files\n" % num)
320 destlock = copystore(ui, srcrepo, destpath)
308 321
309 322 # we need to re-init the repo after manually copying the data
310 323 # into it
@@ -537,7 +550,7 b' def verify(repo):'
537 550
538 551 def remoteui(src, opts):
539 552 'build a remote ui from ui or repo and opts'
540 if hasattr(src, 'baseui'): # looks like a repository
553 if util.safehasattr(src, 'baseui'): # looks like a repository
541 554 dst = src.baseui.copy() # drop repo-specific config
542 555 src = src.ui # copy target options from repo
543 556 else: # assume it's a global ui object
@@ -7,7 +7,7 b''
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import os
10 from mercurial import ui, hg, hook, error, encoding, templater
10 from mercurial import ui, hg, hook, error, encoding, templater, util
11 11 from common import get_stat, ErrorResponse, permhooks, caching
12 12 from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
13 13 from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
@@ -148,7 +148,7 b' class hgweb(object):'
148 148 cmd = cmd[style + 1:]
149 149
150 150 # avoid accepting e.g. style parameter as command
151 if hasattr(webcommands, cmd):
151 if util.safehasattr(webcommands, cmd):
152 152 req.form['cmd'] = [cmd]
153 153 else:
154 154 cmd = ''
@@ -236,6 +236,7 b' class hgweb(object):'
236 236 port = port != default_port and (":" + port) or ""
237 237 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
238 238 logourl = self.config("web", "logourl", "http://mercurial.selenic.com/")
239 logoimg = self.config("web", "logoimg", "hglogo.png")
239 240 staticurl = self.config("web", "staticurl") or req.url + 'static/'
240 241 if not staticurl.endswith('/'):
241 242 staticurl += '/'
@@ -276,6 +277,7 b' class hgweb(object):'
276 277 tmpl = templater.templater(mapfile,
277 278 defaults={"url": req.url,
278 279 "logourl": logourl,
280 "logoimg": logoimg,
279 281 "staticurl": staticurl,
280 282 "urlbase": urlbase,
281 283 "repo": self.reponame,
@@ -51,6 +51,33 b' def urlrepos(prefix, roothead, paths):'
51 51 yield (prefix + '/' +
52 52 util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
53 53
54 def geturlcgivars(baseurl, port):
55 """
56 Extract CGI variables from baseurl
57
58 >>> geturlcgivars("http://host.org/base", "80")
59 ('host.org', '80', '/base')
60 >>> geturlcgivars("http://host.org:8000/base", "80")
61 ('host.org', '8000', '/base')
62 >>> geturlcgivars('/base', 8000)
63 ('', '8000', '/base')
64 >>> geturlcgivars("base", '8000')
65 ('', '8000', '/base')
66 >>> geturlcgivars("http://host", '8000')
67 ('host', '8000', '/')
68 >>> geturlcgivars("http://host/", '8000')
69 ('host', '8000', '/')
70 """
71 u = util.url(baseurl)
72 name = u.host or ''
73 if u.port:
74 port = u.port
75 path = u.path or ""
76 if not path.startswith('/'):
77 path = '/' + path
78
79 return name, str(port), path
80
54 81 class hgwebdir(object):
55 82 refreshinterval = 20
56 83
@@ -348,6 +375,7 b' class hgwebdir(object):'
348 375 start = url[-1] == '?' and '&' or '?'
349 376 sessionvars = webutil.sessionvars(vars, start)
350 377 logourl = config('web', 'logourl', 'http://mercurial.selenic.com/')
378 logoimg = config('web', 'logoimg', 'hglogo.png')
351 379 staticurl = config('web', 'staticurl') or url + 'static/'
352 380 if not staticurl.endswith('/'):
353 381 staticurl += '/'
@@ -358,17 +386,14 b' class hgwebdir(object):'
358 386 "motd": motd,
359 387 "url": url,
360 388 "logourl": logourl,
389 "logoimg": logoimg,
361 390 "staticurl": staticurl,
362 391 "sessionvars": sessionvars})
363 392 return tmpl
364 393
365 394 def updatereqenv(self, env):
366 395 if self._baseurl is not None:
367 u = util.url(self._baseurl)
368 env['SERVER_NAME'] = u.host
369 if u.port:
370 env['SERVER_PORT'] = u.port
371 path = u.path or ""
372 if not path.startswith('/'):
373 path = '/' + path
396 name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT'])
397 env['SERVER_NAME'] = name
398 env['SERVER_PORT'] = port
374 399 env['SCRIPT_NAME'] = path
@@ -10,6 +10,7 b' from mercurial import util, wireproto'
10 10 from common import HTTP_OK
11 11
12 12 HGTYPE = 'application/mercurial-0.1'
13 HGERRTYPE = 'application/hg-error'
13 14
14 15 class webproto(object):
15 16 def __init__(self, req, ui):
@@ -90,3 +91,7 b' def call(repo, req, cmd):'
90 91 rsp = '0\n%s\n' % rsp.res
91 92 req.respond(HTTP_OK, HGTYPE, length=len(rsp))
92 93 return [rsp]
94 elif isinstance(rsp, wireproto.ooberror):
95 rsp = rsp.message
96 req.respond(HTTP_OK, HGERRTYPE, length=len(rsp))
97 return [rsp]
@@ -101,7 +101,7 b' class wsgirequest(object):'
101 101 self.headers = []
102 102
103 103 def write(self, thing):
104 if hasattr(thing, "__iter__"):
104 if util.safehasattr(thing, "__iter__"):
105 105 for part in thing:
106 106 self.write(part)
107 107 else:
@@ -246,9 +246,10 b' class _httprequesthandlerssl(_httpreques'
246 246
247 247 try:
248 248 from threading import activeCount
249 activeCount() # silence pyflakes
249 250 _mixin = SocketServer.ThreadingMixIn
250 251 except ImportError:
251 if hasattr(os, "fork"):
252 if util.safehasattr(os, "fork"):
252 253 _mixin = SocketServer.ForkingMixIn
253 254 else:
254 255 class _mixin(object):
@@ -72,7 +72,7 b' def _siblings(siblings=[], hiderev=None)'
72 72 d['date'] = s.date()
73 73 d['description'] = s.description()
74 74 d['branch'] = s.branch()
75 if hasattr(s, 'path'):
75 if util.safehasattr(s, 'path'):
76 76 d['file'] = s.path()
77 77 yield d
78 78
@@ -78,5 +78,4 b' def launch(application):'
78 78 for chunk in content:
79 79 write(chunk)
80 80 finally:
81 if hasattr(content, 'close'):
82 content.close()
81 getattr(content, 'close', lambda : None)()
@@ -21,14 +21,14 b' def _pythonhook(ui, repo, name, hname, f'
21 21
22 22 ui.note(_("calling hook %s: %s\n") % (hname, funcname))
23 23 obj = funcname
24 if not hasattr(obj, '__call__'):
24 if not util.safehasattr(obj, '__call__'):
25 25 d = funcname.rfind('.')
26 26 if d == -1:
27 27 raise util.Abort(_('%s hook is invalid ("%s" not in '
28 28 'a module)') % (hname, funcname))
29 29 modname = funcname[:d]
30 30 oldpaths = sys.path
31 if hasattr(sys, "frozen"):
31 if util.mainfrozen():
32 32 # binary installs require sys.path manipulation
33 33 modpath, modfile = os.path.split(modname)
34 34 if modpath and modfile:
@@ -60,7 +60,7 b' def _pythonhook(ui, repo, name, hname, f'
60 60 raise util.Abort(_('%s hook is invalid '
61 61 '("%s" is not defined)') %
62 62 (hname, funcname))
63 if not hasattr(obj, '__call__'):
63 if not util.safehasattr(obj, '__call__'):
64 64 raise util.Abort(_('%s hook is invalid '
65 65 '("%s" is not callable)') %
66 66 (hname, funcname))
@@ -99,7 +99,7 b' def _exthook(ui, repo, name, cmd, args, '
99 99
100 100 env = {}
101 101 for k, v in args.iteritems():
102 if hasattr(v, '__call__'):
102 if util.safehasattr(v, '__call__'):
103 103 v = v()
104 104 if isinstance(v, dict):
105 105 # make the dictionary element order stable across Python
@@ -149,7 +149,7 b' def hook(ui, repo, name, throw=False, **'
149 149 for hname, cmd in ui.configitems('hooks'):
150 150 if hname.split('.')[0] != name or not cmd:
151 151 continue
152 if hasattr(cmd, '__call__'):
152 if util.safehasattr(cmd, '__call__'):
153 153 r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
154 154 elif cmd.startswith('python:'):
155 155 if cmd.count(':') >= 2:
@@ -171,6 +171,14 b' class HTTPResponse(object):'
171 171 logger.info('cl: %r body: %r', self._content_len, self._body)
172 172 try:
173 173 data = self.sock.recv(INCOMING_BUFFER_SIZE)
174 # If the socket was readable and no data was read, that
175 # means the socket was closed. If this isn't a
176 # _CLOSE_IS_END socket, then something is wrong if we're
177 # here (we shouldn't enter _select() if the response is
178 # complete), so abort.
179 if not data and self._content_len != _LEN_CLOSE_IS_END:
180 raise HTTPRemoteClosedError(
181 'server appears to have closed the socket mid-response')
174 182 except socket.sslerror, e:
175 183 if e.args[0] != socket.SSL_ERROR_WANT_READ:
176 184 raise
@@ -693,6 +701,11 b' class BadRequestData(httplib.HTTPExcepti'
693 701 class HTTPProxyConnectFailedException(httplib.HTTPException):
694 702 """Connecting to the HTTP proxy failed."""
695 703
704
696 705 class HTTPStateError(httplib.HTTPException):
697 706 """Invalid internal state encountered."""
707
708
709 class HTTPRemoteClosedError(httplib.HTTPException):
710 """The server closed the remote socket in the middle of a response."""
698 711 # no-check-code
@@ -380,6 +380,21 b' dotencode'
380 380 con.request('GET', '/')
381 381 self.assertEqual(2, len(sockets))
382 382
383 def test_server_closes_before_end_of_body(self):
384 con = http.HTTPConnection('1.2.3.4:80')
385 con._connect()
386 s = con.sock
387 s.data = ['HTTP/1.1 200 OK\r\n',
388 'Server: BogusServer 1.0\r\n',
389 'Connection: Keep-Alive\r\n',
390 'Content-Length: 16',
391 '\r\n\r\n',
392 'You can '] # Note: this is shorter than content-length
393 s.close_on_empty = True
394 con.request('GET', '/')
395 r1 = con.getresponse()
396 self.assertRaises(http.HTTPRemoteClosedError, r1.read)
397
383 398 def test_no_response_raises_response_not_ready(self):
384 399 con = http.HTTPConnection('foo')
385 400 self.assertRaises(http.httplib.ResponseNotReady, con.getresponse)
@@ -134,4 +134,20 b' class ChunkedTransferTest(util.HttpTestB'
134 134 con.request('GET', '/')
135 135 self.assertStringEqual('hi there\nthere\nthere\nthere\nthere\n',
136 136 con.getresponse().read())
137
138 def testChunkedDownloadEarlyHangup(self):
139 con = http.HTTPConnection('1.2.3.4:80')
140 con._connect()
141 sock = con.sock
142 broken = chunkedblock('hi'*20)[:-1]
143 sock.data = ['HTTP/1.1 200 OK\r\n',
144 'Server: BogusServer 1.0\r\n',
145 'transfer-encoding: chunked',
146 '\r\n\r\n',
147 broken,
148 ]
149 sock.close_on_empty = True
150 con.request('GET', '/')
151 resp = con.getresponse()
152 self.assertRaises(http.HTTPRemoteClosedError, resp.read)
137 153 # no-check-code
@@ -28,6 +28,7 b' class httprepository(wireproto.wirerepos'
28 28 self.path = path
29 29 self.caps = None
30 30 self.handler = None
31 self.urlopener = None
31 32 u = util.url(path)
32 33 if u.query or u.fragment:
33 34 raise util.Abort(_('unsupported URL component: "%s"') %
@@ -42,10 +43,10 b' class httprepository(wireproto.wirerepos'
42 43 self.urlopener = url.opener(ui, authinfo)
43 44
44 45 def __del__(self):
45 for h in self.urlopener.handlers:
46 h.close()
47 if hasattr(h, "close_all"):
48 h.close_all()
46 if self.urlopener:
47 for h in self.urlopener.handlers:
48 h.close()
49 getattr(h, "close_all", lambda : None)()
49 50
50 51 def url(self):
51 52 return self.path
@@ -139,6 +140,8 b' class httprepository(wireproto.wirerepos'
139 140 proto = resp.headers.get('content-type', '')
140 141
141 142 safeurl = util.hidepassword(self._url)
143 if proto.startswith('application/hg-error'):
144 raise error.OutOfBandError(resp.read())
142 145 # accept old "text/plain" and "application/hg-changegroup" for now
143 146 if not (proto.startswith('application/mercurial-') or
144 147 proto.startswith('text/plain') or
@@ -9,7 +9,7 b' import encoding'
9 9 import gettext, sys, os
10 10
11 11 # modelled after templater.templatepath:
12 if hasattr(sys, 'frozen'):
12 if getattr(sys, 'frozen', None) is not None:
13 13 module = sys.executable
14 14 else:
15 15 module = __file__
@@ -61,4 +61,3 b' if _plain():'
61 61 _ = lambda message: message
62 62 else:
63 63 _ = gettext
64
@@ -547,13 +547,14 b' def safesend(self, str):'
547 547 print "send:", repr(str)
548 548 try:
549 549 blocksize = 8192
550 if hasattr(str,'read') :
550 read = getattr(str, 'read', None)
551 if read is not None:
551 552 if self.debuglevel > 0:
552 553 print "sendIng a read()able"
553 data = str.read(blocksize)
554 data = read(blocksize)
554 555 while data:
555 556 self.sock.sendall(data)
556 data = str.read(blocksize)
557 data = read(blocksize)
557 558 else:
558 559 self.sock.sendall(str)
559 560 except socket.error, v:
@@ -10,13 +10,14 b' from i18n import _'
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error
13 import scmutil, util, extensions, hook, error, revset
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 21
21 22 class localrepository(repo.repository):
22 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
@@ -63,6 +64,7 b' class localrepository(repo.repository):'
63 64 )
64 65 if self.ui.configbool('format', 'generaldelta', False):
65 66 requirements.append("generaldelta")
67 requirements = set(requirements)
66 68 else:
67 69 raise error.RepoError(_("repository %s not found") % path)
68 70 elif create:
@@ -77,7 +79,7 b' class localrepository(repo.repository):'
77 79
78 80 self.sharedpath = self.path
79 81 try:
80 s = os.path.realpath(self.opener.read("sharedpath"))
82 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
81 83 if not os.path.exists(s):
82 84 raise error.RepoError(
83 85 _('.hg/sharedpath points to nonexistent directory %s') % s)
@@ -95,21 +97,19 b' class localrepository(repo.repository):'
95 97 if create:
96 98 self._writerequirements()
97 99
98 # These two define the set of tags for this repository. _tags
99 # maps tag name to node; _tagtypes maps tag name to 'global' or
100 # 'local'. (Global tags are defined by .hgtags across all
101 # heads, and local tags are defined in .hg/localtags.) They
102 # constitute the in-memory cache of tags.
103 self._tags = None
104 self._tagtypes = None
105 100
106 101 self._branchcache = None
107 102 self._branchcachetip = None
108 self.nodetagscache = None
109 103 self.filterpats = {}
110 104 self._datafilters = {}
111 105 self._transref = self._lockref = self._wlockref = None
112 106
107 # A cache for various files under .hg/ that tracks file changes,
108 # (used by the filecache decorator)
109 #
110 # Maps a property name to its util.filecacheentry
111 self._filecache = {}
112
113 113 def _applyrequirements(self, requirements):
114 114 self.requirements = requirements
115 115 openerreqs = set(('revlogv1', 'generaldelta'))
@@ -159,15 +159,18 b' class localrepository(repo.repository):'
159 159 parts.pop()
160 160 return False
161 161
162 @util.propertycache
162 @filecache('bookmarks')
163 163 def _bookmarks(self):
164 164 return bookmarks.read(self)
165 165
166 @util.propertycache
166 @filecache('bookmarks.current')
167 167 def _bookmarkcurrent(self):
168 168 return bookmarks.readcurrent(self)
169 169
170 @propertycache
170 def _writebookmarks(self, marks):
171 bookmarks.write(self)
172
173 @filecache('00changelog.i', True)
171 174 def changelog(self):
172 175 c = changelog.changelog(self.sopener)
173 176 if 'HG_PENDING' in os.environ:
@@ -176,11 +179,11 b' class localrepository(repo.repository):'
176 179 c.readpending('00changelog.i.a')
177 180 return c
178 181
179 @propertycache
182 @filecache('00manifest.i', True)
180 183 def manifest(self):
181 184 return manifest.manifest(self.sopener)
182 185
183 @propertycache
186 @filecache('dirstate')
184 187 def dirstate(self):
185 188 warned = [0]
186 189 def validate(node):
@@ -217,6 +220,17 b' class localrepository(repo.repository):'
217 220 for i in xrange(len(self)):
218 221 yield i
219 222
223 def set(self, expr, *args):
224 '''
225 Yield a context for each matching revision, after doing arg
226 replacement via revset.formatspec
227 '''
228
229 expr = revset.formatspec(expr, *args)
230 m = revset.match(None, expr)
231 for r in m(self, range(len(self))):
232 yield self[r]
233
220 234 def url(self):
221 235 return 'file:' + self.root
222 236
@@ -249,8 +263,8 b' class localrepository(repo.repository):'
249 263 fp.write('\n')
250 264 for name in names:
251 265 m = munge and munge(name) or name
252 if self._tagtypes and name in self._tagtypes:
253 old = self._tags.get(name, nullid)
266 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
267 old = self.tags().get(name, nullid)
254 268 fp.write('%s %s\n' % (hex(old), m))
255 269 fp.write('%s %s\n' % (hex(node), m))
256 270 fp.close()
@@ -325,12 +339,31 b' class localrepository(repo.repository):'
325 339 self.tags() # instantiate the cache
326 340 self._tag(names, node, message, local, user, date)
327 341
342 @propertycache
343 def _tagscache(self):
344 '''Returns a tagscache object that contains various tags related caches.'''
345
346 # This simplifies its cache management by having one decorated
347 # function (this one) and the rest simply fetch things from it.
348 class tagscache(object):
349 def __init__(self):
350 # These two define the set of tags for this repository. tags
351 # maps tag name to node; tagtypes maps tag name to 'global' or
352 # 'local'. (Global tags are defined by .hgtags across all
353 # heads, and local tags are defined in .hg/localtags.)
354 # They constitute the in-memory cache of tags.
355 self.tags = self.tagtypes = None
356
357 self.nodetagscache = self.tagslist = None
358
359 cache = tagscache()
360 cache.tags, cache.tagtypes = self._findtags()
361
362 return cache
363
328 364 def tags(self):
329 365 '''return a mapping of tag to node'''
330 if self._tags is None:
331 (self._tags, self._tagtypes) = self._findtags()
332
333 return self._tags
366 return self._tagscache.tags
334 367
335 368 def _findtags(self):
336 369 '''Do the hard work of finding tags. Return a pair of dicts
@@ -379,27 +412,29 b' class localrepository(repo.repository):'
379 412 None : tag does not exist
380 413 '''
381 414
382 self.tags()
383
384 return self._tagtypes.get(tagname)
415 return self._tagscache.tagtypes.get(tagname)
385 416
386 417 def tagslist(self):
387 418 '''return a list of tags ordered by revision'''
388 l = []
389 for t, n in self.tags().iteritems():
390 r = self.changelog.rev(n)
391 l.append((r, t, n))
392 return [(t, n) for r, t, n in sorted(l)]
419 if not self._tagscache.tagslist:
420 l = []
421 for t, n in self.tags().iteritems():
422 r = self.changelog.rev(n)
423 l.append((r, t, n))
424 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
425
426 return self._tagscache.tagslist
393 427
394 428 def nodetags(self, node):
395 429 '''return the tags associated with a node'''
396 if not self.nodetagscache:
397 self.nodetagscache = {}
430 if not self._tagscache.nodetagscache:
431 nodetagscache = {}
398 432 for t, n in self.tags().iteritems():
399 self.nodetagscache.setdefault(n, []).append(t)
400 for tags in self.nodetagscache.itervalues():
433 nodetagscache.setdefault(n, []).append(t)
434 for tags in nodetagscache.itervalues():
401 435 tags.sort()
402 return self.nodetagscache.get(node, [])
436 self._tagscache.nodetagscache = nodetagscache
437 return self._tagscache.nodetagscache.get(node, [])
403 438
404 439 def nodebookmarks(self, node):
405 440 marks = []
@@ -489,7 +524,7 b' class localrepository(repo.repository):'
489 524 for label, nodes in branches.iteritems():
490 525 for node in nodes:
491 526 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
492 f.rename()
527 f.close()
493 528 except (IOError, OSError):
494 529 pass
495 530
@@ -722,67 +757,112 b' class localrepository(repo.repository):'
722 757 finally:
723 758 lock.release()
724 759
725 def rollback(self, dryrun=False):
760 def rollback(self, dryrun=False, force=False):
726 761 wlock = lock = None
727 762 try:
728 763 wlock = self.wlock()
729 764 lock = self.lock()
730 765 if os.path.exists(self.sjoin("undo")):
731 try:
732 args = self.opener.read("undo.desc").splitlines()
733 if len(args) >= 3 and self.ui.verbose:
734 desc = _("repository tip rolled back to revision %s"
735 " (undo %s: %s)\n") % (
736 int(args[0]) - 1, args[1], args[2])
737 elif len(args) >= 2:
738 desc = _("repository tip rolled back to revision %s"
739 " (undo %s)\n") % (
740 int(args[0]) - 1, args[1])
741 except IOError:
742 desc = _("rolling back unknown transaction\n")
743 self.ui.status(desc)
744 if dryrun:
745 return
746 transaction.rollback(self.sopener, self.sjoin("undo"),
747 self.ui.warn)
748 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
749 if os.path.exists(self.join('undo.bookmarks')):
750 util.rename(self.join('undo.bookmarks'),
751 self.join('bookmarks'))
752 try:
753 branch = self.opener.read("undo.branch")
754 self.dirstate.setbranch(branch)
755 except IOError:
756 self.ui.warn(_("named branch could not be reset, "
757 "current branch is still: %s\n")
758 % self.dirstate.branch())
759 self.invalidate()
760 self.dirstate.invalidate()
761 self.destroyed()
762 parents = tuple([p.rev() for p in self.parents()])
763 if len(parents) > 1:
764 self.ui.status(_("working directory now based on "
765 "revisions %d and %d\n") % parents)
766 else:
767 self.ui.status(_("working directory now based on "
768 "revision %d\n") % parents)
766 return self._rollback(dryrun, force)
769 767 else:
770 768 self.ui.warn(_("no rollback information available\n"))
771 769 return 1
772 770 finally:
773 771 release(lock, wlock)
774 772
773 def _rollback(self, dryrun, force):
774 ui = self.ui
775 try:
776 args = self.opener.read('undo.desc').splitlines()
777 (oldlen, desc, detail) = (int(args[0]), args[1], None)
778 if len(args) >= 3:
779 detail = args[2]
780 oldtip = oldlen - 1
781
782 if detail and ui.verbose:
783 msg = (_('repository tip rolled back to revision %s'
784 ' (undo %s: %s)\n')
785 % (oldtip, desc, detail))
786 else:
787 msg = (_('repository tip rolled back to revision %s'
788 ' (undo %s)\n')
789 % (oldtip, desc))
790 except IOError:
791 msg = _('rolling back unknown transaction\n')
792 desc = None
793
794 if not force and self['.'] != self['tip'] and desc == 'commit':
795 raise util.Abort(
796 _('rollback of last commit while not checked out '
797 'may lose data'), hint=_('use -f to force'))
798
799 ui.status(msg)
800 if dryrun:
801 return 0
802
803 parents = self.dirstate.parents()
804 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
805 if os.path.exists(self.join('undo.bookmarks')):
806 util.rename(self.join('undo.bookmarks'),
807 self.join('bookmarks'))
808 self.invalidate()
809
810 parentgone = (parents[0] not in self.changelog.nodemap or
811 parents[1] not in self.changelog.nodemap)
812 if parentgone:
813 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
814 try:
815 branch = self.opener.read('undo.branch')
816 self.dirstate.setbranch(branch)
817 except IOError:
818 ui.warn(_('named branch could not be reset: '
819 'current branch is still \'%s\'\n')
820 % self.dirstate.branch())
821
822 self.dirstate.invalidate()
823 self.destroyed()
824 parents = tuple([p.rev() for p in self.parents()])
825 if len(parents) > 1:
826 ui.status(_('working directory now based on '
827 'revisions %d and %d\n') % parents)
828 else:
829 ui.status(_('working directory now based on '
830 'revision %d\n') % parents)
831 return 0
832
775 833 def invalidatecaches(self):
776 self._tags = None
777 self._tagtypes = None
778 self.nodetagscache = None
834 try:
835 delattr(self, '_tagscache')
836 except AttributeError:
837 pass
838
779 839 self._branchcache = None # in UTF-8
780 840 self._branchcachetip = None
781 841
842 def invalidatedirstate(self):
843 '''Invalidates the dirstate, causing the next call to dirstate
844 to check if it was modified since the last time it was read,
845 rereading it if it has.
846
847 This is different to dirstate.invalidate() that it doesn't always
848 rereads the dirstate. Use dirstate.invalidate() if you want to
849 explicitly read the dirstate again (i.e. restoring it to a previous
850 known good state).'''
851 try:
852 delattr(self, 'dirstate')
853 except AttributeError:
854 pass
855
782 856 def invalidate(self):
783 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
784 if a in self.__dict__:
785 delattr(self, a)
857 for k in self._filecache:
858 # dirstate is invalidated separately in invalidatedirstate()
859 if k == 'dirstate':
860 continue
861
862 try:
863 delattr(self, k)
864 except AttributeError:
865 pass
786 866 self.invalidatecaches()
787 867
788 868 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
@@ -809,7 +889,14 b' class localrepository(repo.repository):'
809 889 l.lock()
810 890 return l
811 891
812 l = self._lock(self.sjoin("lock"), wait, self.store.write,
892 def unlock():
893 self.store.write()
894 for k, ce in self._filecache.items():
895 if k == 'dirstate':
896 continue
897 ce.refresh()
898
899 l = self._lock(self.sjoin("lock"), wait, unlock,
813 900 self.invalidate, _('repository %s') % self.origroot)
814 901 self._lockref = weakref.ref(l)
815 902 return l
@@ -823,8 +910,14 b' class localrepository(repo.repository):'
823 910 l.lock()
824 911 return l
825 912
826 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
827 self.dirstate.invalidate, _('working directory of %s') %
913 def unlock():
914 self.dirstate.write()
915 ce = self._filecache.get('dirstate')
916 if ce:
917 ce.refresh()
918
919 l = self._lock(self.join("wlock"), wait, unlock,
920 self.invalidatedirstate, _('working directory of %s') %
828 921 self.origroot)
829 922 self._wlockref = weakref.ref(l)
830 923 return l
@@ -86,9 +86,7 b' def label(code):'
86 86 for k, v in list(sys.modules.iteritems()):
87 87 if v is None:
88 88 continue
89 if not hasattr(v, '__file__'):
90 continue
91 if not isinstance(v.__file__, str):
89 if not isinstance(getattr(v, '__file__', None), str):
92 90 continue
93 91 if v.__file__.startswith(code.co_filename):
94 92 mname = _fn2mod[code.co_filename] = k
@@ -37,7 +37,7 b' def _smtp(ui):'
37 37 # backward compatible: when tls = true, we use starttls.
38 38 starttls = tls == 'starttls' or util.parsebool(tls)
39 39 smtps = tls == 'smtps'
40 if (starttls or smtps) and not hasattr(socket, 'ssl'):
40 if (starttls or smtps) and not util.safehasattr(socket, 'ssl'):
41 41 raise util.Abort(_("can't use TLS: Python SSL support not installed"))
42 42 if smtps:
43 43 ui.note(_('(using smtps)\n'))
@@ -49,7 +49,6 b' class match(object):'
49 49 '<something>' - a pattern of the specified default type
50 50 """
51 51
52 self._ctx = None
53 52 self._root = root
54 53 self._cwd = cwd
55 54 self._files = []
@@ -157,6 +157,7 b' def _unidiff(t1, t2, l1, l2, opts=defaul'
157 157 return 0
158 158 return ret
159 159
160 lastfunc = [0, '']
160 161 def yieldhunk(hunk):
161 162 (astart, a2, bstart, b2, delta) = hunk
162 163 aend = contextend(a2, len(l1))
@@ -165,13 +166,19 b' def _unidiff(t1, t2, l1, l2, opts=defaul'
165 166
166 167 func = ""
167 168 if opts.showfunc:
168 # walk backwards from the start of the context
169 # to find a line starting with an alphanumeric char.
170 for x in xrange(astart - 1, -1, -1):
171 t = l1[x].rstrip()
172 if funcre.match(t):
173 func = ' ' + t[:40]
169 lastpos, func = lastfunc
170 # walk backwards from the start of the context up to the start of
171 # the previous hunk context until we find a line starting with an
172 # alphanumeric char.
173 for i in xrange(astart - 1, lastpos - 1, -1):
174 if l1[i][0].isalnum():
175 func = ' ' + l1[i].rstrip()[:40]
176 lastfunc[1] = func
174 177 break
178 # by recording this hunk's starting point as the next place to
179 # start looking for function lines, we avoid reading any line in
180 # the file more than once.
181 lastfunc[0] = astart
175 182
176 183 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
177 184 bstart + 1, blen, func)
@@ -180,9 +187,6 b' def _unidiff(t1, t2, l1, l2, opts=defaul'
180 187 for x in xrange(a2, aend):
181 188 yield ' ' + l1[x]
182 189
183 if opts.showfunc:
184 funcre = re.compile('\w')
185
186 190 # bdiff.blocks gives us the matching sequences in the files. The loop
187 191 # below finds the spaces between those matching sequences and translates
188 192 # them into diff output.
@@ -273,7 +273,6 b' def applyupdates(repo, action, wctx, mct'
273 273 action.sort(key=actionkey)
274 274
275 275 # prescan for merges
276 u = repo.ui
277 276 for a in action:
278 277 f, m = a[:2]
279 278 if m == 'm': # merge
@@ -308,8 +307,8 b' def applyupdates(repo, action, wctx, mct'
308 307 numupdates = len(action)
309 308 for i, a in enumerate(action):
310 309 f, m = a[:2]
311 u.progress(_('updating'), i + 1, item=f, total=numupdates,
312 unit=_('files'))
310 repo.ui.progress(_('updating'), i + 1, item=f, total=numupdates,
311 unit=_('files'))
313 312 if f and f[0] == "/":
314 313 continue
315 314 if m == "r": # remove
@@ -377,7 +376,7 b' def applyupdates(repo, action, wctx, mct'
377 376 repo.wopener.audit(f)
378 377 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
379 378 ms.commit()
380 u.progress(_('updating'), None, total=numupdates, unit=_('files'))
379 repo.ui.progress(_('updating'), None, total=numupdates, unit=_('files'))
381 380
382 381 return updated, merged, removed, unresolved
383 382
@@ -18,17 +18,14 b' Remember to update http://mercurial.sele'
18 18 when adding support for new constructs.
19 19 """
20 20
21 import re, sys
21 import re
22 22 import util, encoding
23 23 from i18n import _
24 24
25
26 25 def replace(text, substs):
27 utext = text.decode(encoding.encoding)
28 26 for f, t in substs:
29 utext = utext.replace(f, t)
30 return utext.encode(encoding.encoding)
31
27 text = text.replace(f, t)
28 return text
32 29
33 30 _blockre = re.compile(r"\n(?:\s*\n)+")
34 31
@@ -39,14 +36,14 b' def findblocks(text):'
39 36 has an 'indent' field and a 'lines' field.
40 37 """
41 38 blocks = []
42 for b in _blockre.split(text.strip()):
39 for b in _blockre.split(text.lstrip('\n').rstrip()):
43 40 lines = b.splitlines()
44 indent = min((len(l) - len(l.lstrip())) for l in lines)
45 lines = [l[indent:] for l in lines]
46 blocks.append(dict(indent=indent, lines=lines))
41 if lines:
42 indent = min((len(l) - len(l.lstrip())) for l in lines)
43 lines = [l[indent:] for l in lines]
44 blocks.append(dict(indent=indent, lines=lines))
47 45 return blocks
48 46
49
50 47 def findliteralblocks(blocks):
51 48 """Finds literal blocks and adds a 'type' field to the blocks.
52 49
@@ -103,6 +100,7 b' def findliteralblocks(blocks):'
103 100 r'((.*) +)(.*)$')
104 101 _fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)')
105 102 _definitionre = re.compile(r'[^ ]')
103 _tablere = re.compile(r'(=+\s+)*=+')
106 104
107 105 def splitparagraphs(blocks):
108 106 """Split paragraphs into lists."""
@@ -146,7 +144,6 b' def splitparagraphs(blocks):'
146 144 i += 1
147 145 return blocks
148 146
149
150 147 _fieldwidth = 12
151 148
152 149 def updatefieldlists(blocks):
@@ -173,7 +170,6 b' def updatefieldlists(blocks):'
173 170
174 171 return blocks
175 172
176
177 173 def updateoptionlists(blocks):
178 174 i = 0
179 175 while i < len(blocks):
@@ -238,18 +234,67 b' def prunecontainers(blocks, keep):'
238 234 # Always delete "..container:: type" block
239 235 del blocks[i]
240 236 j = i
237 i -= 1
241 238 while j < len(blocks) and blocks[j]['indent'] > indent:
242 239 if prune:
243 240 del blocks[j]
244 i -= 1 # adjust outer index
245 241 else:
246 242 blocks[j]['indent'] -= adjustment
247 243 j += 1
248 244 i += 1
249 245 return blocks, pruned
250 246
247 _sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""")
251 248
252 _sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""")
249 def findtables(blocks):
250 '''Find simple tables
251
252 Only simple one-line table elements are supported
253 '''
254
255 for block in blocks:
256 # Searching for a block that looks like this:
257 #
258 # === ==== ===
259 # A B C
260 # === ==== === <- optional
261 # 1 2 3
262 # x y z
263 # === ==== ===
264 if (block['type'] == 'paragraph' and
265 len(block['lines']) > 2 and
266 _tablere.match(block['lines'][0]) and
267 block['lines'][0] == block['lines'][-1]):
268 block['type'] = 'table'
269 block['header'] = False
270 div = block['lines'][0]
271
272 # column markers are ASCII so we can calculate column
273 # position in bytes
274 columns = [x for x in xrange(len(div))
275 if div[x] == '=' and (x == 0 or div[x - 1] == ' ')]
276 rows = []
277 for l in block['lines'][1:-1]:
278 if l == div:
279 block['header'] = True
280 continue
281 row = []
282 # we measure columns not in bytes or characters but in
283 # colwidth which makes things tricky
284 pos = columns[0] # leading whitespace is bytes
285 for n, start in enumerate(columns):
286 if n + 1 < len(columns):
287 width = columns[n + 1] - start
288 v = encoding.getcols(l, pos, width) # gather columns
289 pos += len(v) # calculate byte position of end
290 row.append(v.strip())
291 else:
292 row.append(l[pos:].strip())
293 rows.append(row)
294
295 block['table'] = rows
296
297 return blocks
253 298
254 299 def findsections(blocks):
255 300 """Finds sections.
@@ -273,7 +318,6 b' def findsections(blocks):'
273 318 del block['lines'][1]
274 319 return blocks
275 320
276
277 321 def inlineliterals(blocks):
278 322 substs = [('``', '"')]
279 323 for b in blocks:
@@ -281,7 +325,6 b' def inlineliterals(blocks):'
281 325 b['lines'] = [replace(l, substs) for l in b['lines']]
282 326 return blocks
283 327
284
285 328 def hgrole(blocks):
286 329 substs = [(':hg:`', '"hg '), ('`', '"')]
287 330 for b in blocks:
@@ -293,7 +336,6 b' def hgrole(blocks):'
293 336 b['lines'] = [replace(l, substs) for l in b['lines']]
294 337 return blocks
295 338
296
297 339 def addmargins(blocks):
298 340 """Adds empty blocks for vertical spacing.
299 341
@@ -366,7 +408,7 b' def formatoption(block, width):'
366 408 hanging = block['optstrwidth']
367 409 initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth)))
368 410 hangindent = ' ' * (encoding.colwidth(initindent) + 1)
369 return ' %s' % (util.wrap(desc, usablewidth,
411 return ' %s\n' % (util.wrap(desc, usablewidth,
370 412 initindent=initindent,
371 413 hangindent=hangindent))
372 414
@@ -381,25 +423,47 b' def formatblock(block, width):'
381 423
382 424 defindent = indent + hang * ' '
383 425 text = ' '.join(map(str.strip, block['lines']))
384 return '%s\n%s' % (indent + admonition, util.wrap(text, width=width,
385 initindent=defindent,
386 hangindent=defindent))
426 return '%s\n%s\n' % (indent + admonition,
427 util.wrap(text, width=width,
428 initindent=defindent,
429 hangindent=defindent))
387 430 if block['type'] == 'margin':
388 return ''
431 return '\n'
389 432 if block['type'] == 'literal':
390 433 indent += ' '
391 return indent + ('\n' + indent).join(block['lines'])
434 return indent + ('\n' + indent).join(block['lines']) + '\n'
392 435 if block['type'] == 'section':
393 436 underline = encoding.colwidth(block['lines'][0]) * block['underline']
394 return "%s%s\n%s%s" % (indent, block['lines'][0],indent, underline)
437 return "%s%s\n%s%s\n" % (indent, block['lines'][0],indent, underline)
438 if block['type'] == 'table':
439 table = block['table']
440 # compute column widths
441 widths = [max([encoding.colwidth(e) for e in c]) for c in zip(*table)]
442 text = ''
443 span = sum(widths) + len(widths) - 1
444 indent = ' ' * block['indent']
445 hang = ' ' * (len(indent) + span - widths[-1])
446
447 for row in table:
448 l = []
449 for w, v in zip(widths, row):
450 pad = ' ' * (w - encoding.colwidth(v))
451 l.append(v + pad)
452 l = ' '.join(l)
453 l = util.wrap(l, width=width, initindent=indent, hangindent=hang)
454 if not text and block['header']:
455 text = l + '\n' + indent + '-' * (min(width, span)) + '\n'
456 else:
457 text += l + "\n"
458 return text
395 459 if block['type'] == 'definition':
396 460 term = indent + block['lines'][0]
397 461 hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
398 462 defindent = indent + hang * ' '
399 463 text = ' '.join(map(str.strip, block['lines'][1:]))
400 return '%s\n%s' % (term, util.wrap(text, width=width,
401 initindent=defindent,
402 hangindent=defindent))
464 return '%s\n%s\n' % (term, util.wrap(text, width=width,
465 initindent=defindent,
466 hangindent=defindent))
403 467 subindent = indent
404 468 if block['type'] == 'bullet':
405 469 if block['lines'][0].startswith('| '):
@@ -431,15 +495,103 b' def formatblock(block, width):'
431 495 text = ' '.join(map(str.strip, block['lines']))
432 496 return util.wrap(text, width=width,
433 497 initindent=indent,
434 hangindent=subindent)
498 hangindent=subindent) + '\n'
499
500 def formathtml(blocks):
501 """Format RST blocks as HTML"""
502
503 out = []
504 headernest = ''
505 listnest = []
435 506
507 def openlist(start, level):
508 if not listnest or listnest[-1][0] != start:
509 listnest.append((start, level))
510 out.append('<%s>\n' % start)
511
512 blocks = [b for b in blocks if b['type'] != 'margin']
513
514 for pos, b in enumerate(blocks):
515 btype = b['type']
516 level = b['indent']
517 lines = b['lines']
436 518
437 def format(text, width, indent=0, keep=None):
438 """Parse and format the text according to width."""
519 if btype == 'admonition':
520 admonition = _admonitiontitles[b['admonitiontitle']]
521 text = ' '.join(map(str.strip, lines))
522 out.append('<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
523 elif btype == 'paragraph':
524 out.append('<p>\n%s\n</p>\n' % '\n'.join(lines))
525 elif btype == 'margin':
526 pass
527 elif btype == 'literal':
528 out.append('<pre>\n%s\n</pre>\n' % '\n'.join(lines))
529 elif btype == 'section':
530 i = b['underline']
531 if i not in headernest:
532 headernest += i
533 level = headernest.index(i) + 1
534 out.append('<h%d>%s</h%d>\n' % (level, lines[0], level))
535 elif btype == 'table':
536 table = b['table']
537 t = []
538 for row in table:
539 l = []
540 for v in zip(row):
541 if not t:
542 l.append('<th>%s</th>' % v)
543 else:
544 l.append('<td>%s</td>' % v)
545 t.append(' <tr>%s</tr>\n' % ''.join(l))
546 out.append('<table>\n%s</table>\n' % ''.join(t))
547 elif btype == 'definition':
548 openlist('dl', level)
549 term = lines[0]
550 text = ' '.join(map(str.strip, lines[1:]))
551 out.append(' <dt>%s\n <dd>%s\n' % (term, text))
552 elif btype == 'bullet':
553 bullet, head = lines[0].split(' ', 1)
554 if bullet == '-':
555 openlist('ul', level)
556 else:
557 openlist('ol', level)
558 out.append(' <li> %s\n' % ' '.join([head] + lines[1:]))
559 elif btype == 'field':
560 openlist('dl', level)
561 key = b['key']
562 text = ' '.join(map(str.strip, lines))
563 out.append(' <dt>%s\n <dd>%s\n' % (key, text))
564 elif btype == 'option':
565 openlist('dl', level)
566 opt = b['optstr']
567 desc = ' '.join(map(str.strip, lines))
568 out.append(' <dt>%s\n <dd>%s\n' % (opt, desc))
569
570 # close lists if indent level of next block is lower
571 if listnest:
572 start, level = listnest[-1]
573 if pos == len(blocks) - 1:
574 out.append('</%s>\n' % start)
575 listnest.pop()
576 else:
577 nb = blocks[pos + 1]
578 ni = nb['indent']
579 if (ni < level or
580 (ni == level and
581 nb['type'] not in 'definition bullet field option')):
582 out.append('</%s>\n' % start)
583 listnest.pop()
584
585 return ''.join(out)
586
587 def parse(text, indent=0, keep=None):
588 """Parse text into a list of blocks"""
589 pruned = []
439 590 blocks = findblocks(text)
440 591 for b in blocks:
441 592 b['indent'] += indent
442 593 blocks = findliteralblocks(blocks)
594 blocks = findtables(blocks)
443 595 blocks, pruned = prunecontainers(blocks, keep or [])
444 596 blocks = findsections(blocks)
445 597 blocks = inlineliterals(blocks)
@@ -450,33 +602,68 b' def format(text, width, indent=0, keep=N'
450 602 blocks = addmargins(blocks)
451 603 blocks = prunecomments(blocks)
452 604 blocks = findadmonitions(blocks)
453 text = '\n'.join(formatblock(b, width) for b in blocks)
605 return blocks, pruned
606
607 def formatblocks(blocks, width):
608 text = ''.join(formatblock(b, width) for b in blocks)
609 return text
610
611 def format(text, width=80, indent=0, keep=None, style='plain'):
612 """Parse and format the text according to width."""
613 blocks, pruned = parse(text, indent, keep or [])
614 if style == 'html':
615 text = formathtml(blocks)
616 else:
617 text = ''.join(formatblock(b, width) for b in blocks)
454 618 if keep is None:
455 619 return text
456 620 else:
457 621 return text, pruned
458 622
459
460 if __name__ == "__main__":
461 from pprint import pprint
462
463 def debug(func, *args):
464 blocks = func(*args)
465 print "*** after %s:" % func.__name__
466 pprint(blocks)
467 print
468 return blocks
623 def getsections(blocks):
624 '''return a list of (section name, nesting level, blocks) tuples'''
625 nest = ""
626 level = 0
627 secs = []
628 for b in blocks:
629 if b['type'] == 'section':
630 i = b['underline']
631 if i not in nest:
632 nest += i
633 level = nest.index(i) + 1
634 nest = nest[:level]
635 secs.append((b['lines'][0], level, [b]))
636 else:
637 if not secs:
638 # add an initial empty section
639 secs = [('', 0, [])]
640 secs[-1][2].append(b)
641 return secs
469 642
470 text = sys.stdin.read()
471 blocks = debug(findblocks, text)
472 blocks = debug(findliteralblocks, blocks)
473 blocks, pruned = debug(prunecontainers, blocks, sys.argv[1:])
474 blocks = debug(inlineliterals, blocks)
475 blocks = debug(splitparagraphs, blocks)
476 blocks = debug(updatefieldlists, blocks)
477 blocks = debug(updateoptionlists, blocks)
478 blocks = debug(findsections, blocks)
479 blocks = debug(addmargins, blocks)
480 blocks = debug(prunecomments, blocks)
481 blocks = debug(findadmonitions, blocks)
482 print '\n'.join(formatblock(b, 30) for b in blocks)
643 def decorateblocks(blocks, width):
644 '''generate a list of (section name, line text) pairs for search'''
645 lines = []
646 for s in getsections(blocks):
647 section = s[0]
648 text = formatblocks(s[2], width)
649 lines.append([(section, l) for l in text.splitlines(True)])
650 return lines
651
652 def maketable(data, indent=0, header=False):
653 '''Generate an RST table for the given table data'''
654
655 widths = [max(encoding.colwidth(e) for e in c) for c in zip(*data)]
656 indent = ' ' * indent
657 div = indent + ' '.join('=' * w for w in widths) + '\n'
658
659 out = [div]
660 for row in data:
661 l = []
662 for w, v in zip(widths, row):
663 pad = ' ' * (w - encoding.colwidth(v))
664 l.append(v + pad)
665 out.append(indent + ' '.join(l) + "\n")
666 if header and len(data) > 1:
667 out.insert(2, div)
668 out.append(div)
669 return ''.join(out)
@@ -12,6 +12,7 b''
12 12 #include <fcntl.h>
13 13 #include <stdio.h>
14 14 #include <string.h>
15 #include <errno.h>
15 16
16 17 #ifdef _WIN32
17 18 #include <windows.h>
@@ -288,7 +289,8 b' static PyObject *_listdir(char *path, in'
288 289 #endif
289 290
290 291 if (pathlen >= PATH_MAX) {
291 PyErr_SetString(PyExc_ValueError, "path too long");
292 errno = ENAMETOOLONG;
293 PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
292 294 goto error_value;
293 295 }
294 296 strncpy(fullpath, path, PATH_MAX);
@@ -126,7 +126,7 b' def split(stream):'
126 126
127 127 mimeheaders = ['content-type']
128 128
129 if not hasattr(stream, 'next'):
129 if not util.safehasattr(stream, 'next'):
130 130 # http responses, for example, have readline but not next
131 131 stream = fiter(stream)
132 132
@@ -1619,27 +1619,36 b' def diff(repo, node1=None, node2=None, m'
1619 1619
1620 1620 def difflabel(func, *args, **kw):
1621 1621 '''yields 2-tuples of (output, label) based on the output of func()'''
1622 prefixes = [('diff', 'diff.diffline'),
1623 ('copy', 'diff.extended'),
1624 ('rename', 'diff.extended'),
1625 ('old', 'diff.extended'),
1626 ('new', 'diff.extended'),
1627 ('deleted', 'diff.extended'),
1628 ('---', 'diff.file_a'),
1629 ('+++', 'diff.file_b'),
1630 ('@@', 'diff.hunk'),
1631 ('-', 'diff.deleted'),
1632 ('+', 'diff.inserted')]
1633
1622 headprefixes = [('diff', 'diff.diffline'),
1623 ('copy', 'diff.extended'),
1624 ('rename', 'diff.extended'),
1625 ('old', 'diff.extended'),
1626 ('new', 'diff.extended'),
1627 ('deleted', 'diff.extended'),
1628 ('---', 'diff.file_a'),
1629 ('+++', 'diff.file_b')]
1630 textprefixes = [('@', 'diff.hunk'),
1631 ('-', 'diff.deleted'),
1632 ('+', 'diff.inserted')]
1633 head = False
1634 1634 for chunk in func(*args, **kw):
1635 1635 lines = chunk.split('\n')
1636 1636 for i, line in enumerate(lines):
1637 1637 if i != 0:
1638 1638 yield ('\n', '')
1639 if head:
1640 if line.startswith('@'):
1641 head = False
1642 else:
1643 if line and not line[0] in ' +-@':
1644 head = True
1639 1645 stripline = line
1640 if line and line[0] in '+-':
1646 if not head and line and line[0] in '+-':
1641 1647 # highlight trailing whitespace, but only in changed lines
1642 1648 stripline = line.rstrip()
1649 prefixes = textprefixes
1650 if head:
1651 prefixes = headprefixes
1643 1652 for prefix, label in prefixes:
1644 1653 if stripline.startswith(prefix):
1645 1654 yield (stripline, label)
@@ -84,6 +84,21 b' def setflags(f, l, x):'
84 84 # Turn off all +x bits
85 85 os.chmod(f, s & 0666)
86 86
87 def copymode(src, dst, mode=None):
88 '''Copy the file mode from the file at path src to dst.
89 If src doesn't exist, we're using mode instead. If mode is None, we're
90 using umask.'''
91 try:
92 st_mode = os.lstat(src).st_mode & 0777
93 except OSError, inst:
94 if inst.errno != errno.ENOENT:
95 raise
96 st_mode = mode
97 if st_mode is None:
98 st_mode = ~umask
99 st_mode &= 0666
100 os.chmod(dst, st_mode)
101
87 102 def checkexec(path):
88 103 """
89 104 Check whether the given path is on a filesystem with UNIX-like exec flags
@@ -241,7 +256,9 b' def findexe(command):'
241 256 for path in os.environ.get('PATH', '').split(os.pathsep):
242 257 executable = findexisting(os.path.join(path, command))
243 258 if executable is not None:
244 return executable
259 st = os.stat(executable)
260 if (st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)):
261 return executable
245 262 return None
246 263
247 264 def setsignalhandler():
@@ -325,3 +342,45 b' def termwidth():'
325 342 except ImportError:
326 343 pass
327 344 return 80
345
346 def makedir(path, notindexed):
347 os.mkdir(path)
348
349 def unlinkpath(f):
350 """unlink and remove the directory if it is empty"""
351 os.unlink(f)
352 # try removing directories that might now be empty
353 try:
354 os.removedirs(os.path.dirname(f))
355 except OSError:
356 pass
357
358 def lookupreg(key, name=None, scope=None):
359 return None
360
361 def hidewindow():
362 """Hide current shell window.
363
364 Used to hide the window opened when starting asynchronous
365 child process under Windows, unneeded on other systems.
366 """
367 pass
368
369 class cachestat(object):
370 def __init__(self, path):
371 self.stat = os.stat(path)
372
373 def cacheable(self):
374 return bool(self.stat.st_ino)
375
376 def __eq__(self, other):
377 try:
378 return self.stat == other.stat
379 except AttributeError:
380 return False
381
382 def __ne__(self, other):
383 return not self == other
384
385 def executablepath():
386 return None # available on Windows only
@@ -36,7 +36,7 b' def parse_index2(data, inline):'
36 36 s = struct.calcsize(indexformatng)
37 37 index = []
38 38 cache = None
39 n = off = 0
39 off = 0
40 40
41 41 l = len(data) - s
42 42 append = index.append
@@ -45,7 +45,6 b' def parse_index2(data, inline):'
45 45 while off <= l:
46 46 e = _unpack(indexformatng, data[off:off + s])
47 47 append(e)
48 n += 1
49 48 if e[1] < 0:
50 49 break
51 50 off += e[1] + s
@@ -53,7 +52,6 b' def parse_index2(data, inline):'
53 52 while off <= l:
54 53 e = _unpack(indexformatng, data[off:off + s])
55 54 append(e)
56 n += 1
57 55 off += s
58 56
59 57 if off != len(data):
@@ -11,9 +11,8 b' from mercurial.node import short'
11 11 from mercurial.i18n import _
12 12 import os
13 13
14 def _bundle(repo, bases, heads, node, suffix, compress=True):
14 def _bundle(repo, cg, node, suffix, compress=True):
15 15 """create a bundle with the specified revisions as a backup"""
16 cg = repo.changegroupsubset(bases, heads, 'strip')
17 16 backupdir = repo.join("strip-backup")
18 17 if not os.path.isdir(backupdir):
19 18 os.mkdir(backupdir)
@@ -83,11 +82,9 b' def strip(ui, repo, node, backup="all"):'
83 82 saveheads.add(r)
84 83 saveheads = [cl.node(r) for r in saveheads]
85 84
86 # compute base nodes
87 if saverevs:
88 descendants = set(cl.descendants(*saverevs))
89 saverevs.difference_update(descendants)
90 savebases = [cl.node(r) for r in saverevs]
85 # compute common nodes
86 savecommon = set(cl.node(p) for r in saverevs for p in cl.parentrevs(r)
87 if p not in saverevs and p not in tostrip)
91 88
92 89 bm = repo._bookmarks
93 90 updatebm = []
@@ -99,12 +96,14 b' def strip(ui, repo, node, backup="all"):'
99 96 # create a changegroup for all the branches we need to keep
100 97 backupfile = None
101 98 if backup == "all":
102 backupfile = _bundle(repo, [node], cl.heads(), node, 'backup')
99 allnodes=[cl.node(r) for r in xrange(striprev, len(cl))]
100 cg = repo._changegroup(allnodes, 'strip')
101 backupfile = _bundle(repo, cg, node, 'backup')
103 102 repo.ui.status(_("saved backup bundle to %s\n") % backupfile)
104 if saveheads or savebases:
103 if saveheads or savecommon:
105 104 # do not compress partial bundle if we remove it from disk later
106 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
107 compress=keeppartialbundle)
105 cg = repo.getbundle('strip', common=savecommon, heads=saveheads)
106 chgrpfile = _bundle(repo, cg, node, 'temp', compress=keeppartialbundle)
108 107
109 108 mfst = repo.manifest
110 109
@@ -128,7 +127,7 b' def strip(ui, repo, node, backup="all"):'
128 127 tr.abort()
129 128 raise
130 129
131 if saveheads or savebases:
130 if saveheads or savecommon:
132 131 ui.note(_("adding branch\n"))
133 132 f = open(chgrpfile, "rb")
134 133 gen = changegroup.readbundle(f, chgrpfile)
@@ -226,9 +226,10 b' class revlog(object):'
226 226 self._nodepos = None
227 227
228 228 v = REVLOG_DEFAULT_VERSION
229 if hasattr(opener, 'options'):
230 if 'revlogv1' in opener.options:
231 if 'generaldelta' in opener.options:
229 opts = getattr(opener, 'options', None)
230 if opts is not None:
231 if 'revlogv1' in opts:
232 if 'generaldelta' in opts:
232 233 v |= REVLOGGENERALDELTA
233 234 else:
234 235 v = 0
@@ -945,9 +946,9 b' class revlog(object):'
945 946 e = self._io.packentry(self.index[i], self.node, self.version, i)
946 947 fp.write(e)
947 948
948 # if we don't call rename, the temp file will never replace the
949 # if we don't call close, the temp file will never replace the
949 950 # real index
950 fp.rename()
951 fp.close()
951 952
952 953 tr.replace(self.indexfile, trindex * self._io.size)
953 954 self._chunkclear()
@@ -6,7 +6,7 b''
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 import parser, util, error, discovery, hbisect
9 import parser, util, error, discovery, hbisect, node
10 10 import bookmarks as bookmarksmod
11 11 import match as matchmod
12 12 from i18n import _
@@ -235,15 +235,24 b' def author(repo, subset, x):'
235 235 n = getstring(x, _("author requires a string")).lower()
236 236 return [r for r in subset if n in repo[r].user().lower()]
237 237
238 def bisected(repo, subset, x):
239 """``bisected(string)``
240 Changesets marked in the specified bisect state (good, bad, skip).
238 def bisect(repo, subset, x):
239 """``bisect(string)``
240 Changesets marked in the specified bisect status:
241
242 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
243 - ``goods``, ``bads`` : csets topologicaly good/bad
244 - ``range`` : csets taking part in the bisection
245 - ``pruned`` : csets that are goods, bads or skipped
246 - ``untested`` : csets whose fate is yet unknown
247 - ``ignored`` : csets ignored due to DAG topology
241 248 """
242 state = getstring(x, _("bisect requires a string")).lower()
243 if state not in ('good', 'bad', 'skip', 'unknown'):
244 raise error.ParseError(_('invalid bisect state'))
245 marked = set(repo.changelog.rev(n) for n in hbisect.load_state(repo)[state])
246 return [r for r in subset if r in marked]
249 status = getstring(x, _("bisect requires a string")).lower()
250 return [r for r in subset if r in hbisect.get(repo, status)]
251
252 # Backward-compatibility
253 # - no help entry so that we do not advertise it any more
254 def bisected(repo, subset, x):
255 return bisect(repo, subset, x)
247 256
248 257 def bookmark(repo, subset, x):
249 258 """``bookmark([name])``
@@ -407,6 +416,12 b' def filelog(repo, subset, x):'
407 416
408 417 return [r for r in subset if r in s]
409 418
419 def first(repo, subset, x):
420 """``first(set, [n])``
421 An alias for limit().
422 """
423 return limit(repo, subset, x)
424
410 425 def follow(repo, subset, x):
411 426 """``follow([file])``
412 427 An alias for ``::.`` (ancestors of the working copy's first parent).
@@ -513,14 +528,16 b' def keyword(repo, subset, x):'
513 528 return l
514 529
515 530 def limit(repo, subset, x):
516 """``limit(set, n)``
517 First n members of set.
531 """``limit(set, [n])``
532 First n members of set, defaulting to 1.
518 533 """
519 534 # i18n: "limit" is a keyword
520 l = getargs(x, 2, 2, _("limit requires two arguments"))
535 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
521 536 try:
522 # i18n: "limit" is a keyword
523 lim = int(getstring(l[1], _("limit requires a number")))
537 lim = 1
538 if len(l) == 2:
539 # i18n: "limit" is a keyword
540 lim = int(getstring(l[1], _("limit requires a number")))
524 541 except (TypeError, ValueError):
525 542 # i18n: "limit" is a keyword
526 543 raise error.ParseError(_("limit expects a number"))
@@ -529,14 +546,16 b' def limit(repo, subset, x):'
529 546 return [r for r in os if r in ss]
530 547
531 548 def last(repo, subset, x):
532 """``last(set, n)``
533 Last n members of set.
549 """``last(set, [n])``
550 Last n members of set, defaulting to 1.
534 551 """
535 552 # i18n: "last" is a keyword
536 l = getargs(x, 2, 2, _("last requires two arguments"))
553 l = getargs(x, 1, 2, _("last requires one or two arguments"))
537 554 try:
538 # i18n: "last" is a keyword
539 lim = int(getstring(l[1], _("last requires a number")))
555 lim = 1
556 if len(l) == 2:
557 # i18n: "last" is a keyword
558 lim = int(getstring(l[1], _("last requires a number")))
540 559 except (TypeError, ValueError):
541 560 # i18n: "last" is a keyword
542 561 raise error.ParseError(_("last expects a number"))
@@ -827,6 +846,7 b' symbols = {'
827 846 "ancestor": ancestor,
828 847 "ancestors": ancestors,
829 848 "author": author,
849 "bisect": bisect,
830 850 "bisected": bisected,
831 851 "bookmark": bookmark,
832 852 "branch": branch,
@@ -838,6 +858,7 b' symbols = {'
838 858 "descendants": descendants,
839 859 "file": hasfile,
840 860 "filelog": filelog,
861 "first": first,
841 862 "follow": follow,
842 863 "grep": grep,
843 864 "head": head,
@@ -951,7 +972,7 b' def optimize(x, small):'
951 972 w = 100 # very slow
952 973 elif f == "ancestor":
953 974 w = 1 * smallbonus
954 elif f in "reverse limit":
975 elif f in "reverse limit first":
955 976 w = 0
956 977 elif f in "sort":
957 978 w = 10 # assume most sorts look at changelog
@@ -1019,11 +1040,87 b' def match(ui, spec):'
1019 1040 tree, pos = parse(spec)
1020 1041 if (pos != len(spec)):
1021 1042 raise error.ParseError(_("invalid token"), pos)
1022 tree = findaliases(ui, tree)
1043 if ui:
1044 tree = findaliases(ui, tree)
1023 1045 weight, tree = optimize(tree, True)
1024 1046 def mfunc(repo, subset):
1025 1047 return getset(repo, subset, tree)
1026 1048 return mfunc
1027 1049
1050 def formatspec(expr, *args):
1051 '''
1052 This is a convenience function for using revsets internally, and
1053 escapes arguments appropriately. Aliases are intentionally ignored
1054 so that intended expression behavior isn't accidentally subverted.
1055
1056 Supported arguments:
1057
1058 %r = revset expression, parenthesized
1059 %d = int(arg), no quoting
1060 %s = string(arg), escaped and single-quoted
1061 %b = arg.branch(), escaped and single-quoted
1062 %n = hex(arg), single-quoted
1063 %% = a literal '%'
1064
1065 Prefixing the type with 'l' specifies a parenthesized list of that type.
1066
1067 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
1068 '(10 or 11):: and ((this()) or (that()))'
1069 >>> formatspec('%d:: and not %d::', 10, 20)
1070 '10:: and not 20::'
1071 >>> formatspec('keyword(%s)', 'foo\\xe9')
1072 "keyword('foo\\\\xe9')"
1073 >>> b = lambda: 'default'
1074 >>> b.branch = b
1075 >>> formatspec('branch(%b)', b)
1076 "branch('default')"
1077 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
1078 "root(('a' or 'b' or 'c' or 'd'))"
1079 '''
1080
1081 def quote(s):
1082 return repr(str(s))
1083
1084 def argtype(c, arg):
1085 if c == 'd':
1086 return str(int(arg))
1087 elif c == 's':
1088 return quote(arg)
1089 elif c == 'r':
1090 parse(arg) # make sure syntax errors are confined
1091 return '(%s)' % arg
1092 elif c == 'n':
1093 return quote(node.hex(arg))
1094 elif c == 'b':
1095 return quote(arg.branch())
1096
1097 ret = ''
1098 pos = 0
1099 arg = 0
1100 while pos < len(expr):
1101 c = expr[pos]
1102 if c == '%':
1103 pos += 1
1104 d = expr[pos]
1105 if d == '%':
1106 ret += d
1107 elif d in 'dsnbr':
1108 ret += argtype(d, args[arg])
1109 arg += 1
1110 elif d == 'l':
1111 # a list of some type
1112 pos += 1
1113 d = expr[pos]
1114 lv = ' or '.join(argtype(d, e) for e in args[arg])
1115 ret += '(%s)' % lv
1116 arg += 1
1117 else:
1118 raise util.Abort('unexpected revspec format character %s' % d)
1119 else:
1120 ret += c
1121 pos += 1
1122
1123 return ret
1124
1028 1125 # tell hggettext to extract docstrings from these functions:
1029 1126 i18nfunctions = symbols.values()
@@ -324,10 +324,10 b' def walkrepos(path, followsym=False, see'
324 324 def errhandler(err):
325 325 if err.filename == path:
326 326 raise err
327 if followsym and hasattr(os.path, 'samestat'):
327 samestat = getattr(os.path, 'samestat', None)
328 if followsym and samestat is not None:
328 329 def adddir(dirlst, dirname):
329 330 match = False
330 samestat = os.path.samestat
331 331 dirstat = os.stat(dirname)
332 332 for lstdirstat in dirlst:
333 333 if samestat(dirstat, lstdirstat):
@@ -709,3 +709,95 b' def readrequires(opener, supported):'
709 709 raise error.RequirementError(_("unknown repository format: "
710 710 "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
711 711 return requirements
712
713 class filecacheentry(object):
714 def __init__(self, path):
715 self.path = path
716 self.cachestat = filecacheentry.stat(self.path)
717
718 if self.cachestat:
719 self._cacheable = self.cachestat.cacheable()
720 else:
721 # None means we don't know yet
722 self._cacheable = None
723
724 def refresh(self):
725 if self.cacheable():
726 self.cachestat = filecacheentry.stat(self.path)
727
728 def cacheable(self):
729 if self._cacheable is not None:
730 return self._cacheable
731
732 # we don't know yet, assume it is for now
733 return True
734
735 def changed(self):
736 # no point in going further if we can't cache it
737 if not self.cacheable():
738 return True
739
740 newstat = filecacheentry.stat(self.path)
741
742 # we may not know if it's cacheable yet, check again now
743 if newstat and self._cacheable is None:
744 self._cacheable = newstat.cacheable()
745
746 # check again
747 if not self._cacheable:
748 return True
749
750 if self.cachestat != newstat:
751 self.cachestat = newstat
752 return True
753 else:
754 return False
755
756 @staticmethod
757 def stat(path):
758 try:
759 return util.cachestat(path)
760 except OSError, e:
761 if e.errno != errno.ENOENT:
762 raise
763
764 class filecache(object):
765 '''A property like decorator that tracks a file under .hg/ for updates.
766
767 Records stat info when called in _filecache.
768
769 On subsequent calls, compares old stat info with new info, and recreates
770 the object when needed, updating the new stat info in _filecache.
771
772 Mercurial either atomic renames or appends for files under .hg,
773 so to ensure the cache is reliable we need the filesystem to be able
774 to tell us if a file has been replaced. If it can't, we fallback to
775 recreating the object on every call (essentially the same behaviour as
776 propertycache).'''
777 def __init__(self, path, instore=False):
778 self.path = path
779 self.instore = instore
780
781 def __call__(self, func):
782 self.func = func
783 self.name = func.__name__
784 return self
785
786 def __get__(self, obj, type=None):
787 entry = obj._filecache.get(self.name)
788
789 if entry:
790 if entry.changed():
791 entry.obj = self.func(obj)
792 else:
793 path = self.instore and obj.sjoin(self.path) or obj.join(self.path)
794
795 # We stat -before- creating the object so our cache doesn't lie if
796 # a writer modified between the time we read and stat
797 entry = filecacheentry(path)
798 entry.obj = self.func(obj)
799
800 obj._filecache[self.name] = entry
801
802 setattr(obj, self.name, entry.obj)
803 return entry.obj
@@ -445,7 +445,7 b' def simplemerge(ui, local, base, other, '
445 445 out.write(line)
446 446
447 447 if not opts.get('print'):
448 out.rename()
448 out.close()
449 449
450 450 if m3.conflicts:
451 451 if not opts.get('quiet'):
@@ -164,6 +164,17 b' class sshrepository(wireproto.wirereposi'
164 164
165 165 def _recv(self):
166 166 l = self.pipei.readline()
167 if l == '\n':
168 err = []
169 while True:
170 line = self.pipee.readline()
171 if line == '-\n':
172 break
173 err.extend([line])
174 if len(err) > 0:
175 # strip the trailing newline added to the last line server-side
176 err[-1] = err[-1][:-1]
177 self._abort(error.OutOfBandError(*err))
167 178 self.readerr()
168 179 try:
169 180 l = int(l)
@@ -82,6 +82,12 b' class sshserver(object):'
82 82 def sendpusherror(self, rsp):
83 83 self.sendresponse(rsp.res)
84 84
85 def sendooberror(self, rsp):
86 self.ui.ferr.write('%s\n-\n' % rsp.message)
87 self.ui.ferr.flush()
88 self.fout.write('\n')
89 self.fout.flush()
90
85 91 def serve_forever(self):
86 92 try:
87 93 while self.serve_one():
@@ -96,6 +102,7 b' class sshserver(object):'
96 102 wireproto.streamres: sendstream,
97 103 wireproto.pushres: sendpushresponse,
98 104 wireproto.pusherr: sendpusherror,
105 wireproto.ooberror: sendooberror,
99 106 }
100 107
101 108 def serve_one(self):
@@ -22,6 +22,8 b' except ImportError:'
22 22
23 23 def ssl_wrap_socket(sock, key_file, cert_file,
24 24 cert_reqs=CERT_REQUIRED, ca_certs=None):
25 if not util.safehasattr(socket, 'ssl'):
26 raise util.Abort(_('Python SSL support not found'))
25 27 if ca_certs:
26 28 raise util.Abort(_(
27 29 'certificate checking requires Python 2.6'))
@@ -31,15 +31,11 b' class httprangereader(object):'
31 31 try:
32 32 f = self.opener.open(req)
33 33 data = f.read()
34 if hasattr(f, 'getcode'):
35 # python 2.6+
36 code = f.getcode()
37 elif hasattr(f, 'code'):
38 # undocumented attribute, seems to be set in 2.4 and 2.5
39 code = f.code
40 else:
41 # Don't know how to check, hope for the best.
42 code = 206
34 # Python 2.6+ defines a getcode() function, and 2.4 and
35 # 2.5 appear to always have an undocumented code attribute
36 # set. If we can't read either of those, fall back to 206
37 # and hope for the best.
38 code = getattr(f, 'getcode', lambda : getattr(f, 'code', 206))()
43 39 except urllib2.HTTPError, inst:
44 40 num = inst.code == 404 and errno.ENOENT or None
45 41 raise IOError(num, inst)
@@ -125,6 +121,7 b' class statichttprepository(localrepo.loc'
125 121 self.encodepats = None
126 122 self.decodepats = None
127 123 self.capabilities.difference_update(["pushkey"])
124 self._filecache = {}
128 125
129 126 def url(self):
130 127 return self._url
@@ -345,7 +345,7 b' class fncache(object):'
345 345 fp = self.opener('fncache', mode='wb', atomictemp=True)
346 346 for p in self.entries:
347 347 fp.write(encodedir(p) + '\n')
348 fp.rename()
348 fp.close()
349 349 self._dirty = False
350 350
351 351 def add(self, fn):
@@ -50,15 +50,7 b' def state(ctx, ui):'
50 50 if err.errno != errno.ENOENT:
51 51 raise
52 52
53 state = {}
54 for path, src in p[''].items():
55 kind = 'hg'
56 if src.startswith('['):
57 if ']' not in src:
58 raise util.Abort(_('missing ] in subrepo source'))
59 kind, src = src.split(']', 1)
60 kind = kind[1:]
61
53 def remap(src):
62 54 for pattern, repl in p.items('subpaths'):
63 55 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
64 56 # does a string decode.
@@ -72,7 +64,34 b' def state(ctx, ui):'
72 64 except re.error, e:
73 65 raise util.Abort(_("bad subrepository pattern in %s: %s")
74 66 % (p.source('subpaths', pattern), e))
67 return src
75 68
69 state = {}
70 for path, src in p[''].items():
71 kind = 'hg'
72 if src.startswith('['):
73 if ']' not in src:
74 raise util.Abort(_('missing ] in subrepo source'))
75 kind, src = src.split(']', 1)
76 kind = kind[1:]
77 src = src.lstrip() # strip any extra whitespace after ']'
78
79 if not util.url(src).isabs():
80 parent = _abssource(ctx._repo, abort=False)
81 if parent:
82 parent = util.url(parent)
83 parent.path = posixpath.join(parent.path or '', src)
84 parent.path = posixpath.normpath(parent.path)
85 joined = str(parent)
86 # Remap the full joined path and use it if it changes,
87 # else remap the original source.
88 remapped = remap(joined)
89 if remapped == joined:
90 src = remap(src)
91 else:
92 src = remapped
93
94 src = remap(src)
76 95 state[path] = (src.strip(), rev.get(path, ''), kind)
77 96
78 97 return state
@@ -181,23 +200,23 b' def _updateprompt(ui, sub, dirty, local,'
181 200 def reporelpath(repo):
182 201 """return path to this (sub)repo as seen from outermost repo"""
183 202 parent = repo
184 while hasattr(parent, '_subparent'):
203 while util.safehasattr(parent, '_subparent'):
185 204 parent = parent._subparent
186 205 p = parent.root.rstrip(os.sep)
187 206 return repo.root[len(p) + 1:]
188 207
189 208 def subrelpath(sub):
190 209 """return path to this subrepo as seen from outermost repo"""
191 if hasattr(sub, '_relpath'):
210 if util.safehasattr(sub, '_relpath'):
192 211 return sub._relpath
193 if not hasattr(sub, '_repo'):
212 if not util.safehasattr(sub, '_repo'):
194 213 return sub._path
195 214 return reporelpath(sub._repo)
196 215
197 216 def _abssource(repo, push=False, abort=True):
198 217 """return pull/push path of repo - either based on parent repo .hgsub info
199 218 or on the top repo config. Abort or return None if no source found."""
200 if hasattr(repo, '_subparent'):
219 if util.safehasattr(repo, '_subparent'):
201 220 source = util.url(repo._subsource)
202 221 if source.isabs():
203 222 return str(source)
@@ -209,7 +228,7 b' def _abssource(repo, push=False, abort=T'
209 228 parent.path = posixpath.normpath(parent.path)
210 229 return str(parent)
211 230 else: # recursion reached top repo
212 if hasattr(repo, '_subtoppath'):
231 if util.safehasattr(repo, '_subtoppath'):
213 232 return repo._subtoppath
214 233 if push and repo.ui.config('paths', 'default-push'):
215 234 return repo.ui.config('paths', 'default-push')
@@ -530,9 +549,13 b' class svnsubrepo(abstractsubrepo):'
530 549 self._state = state
531 550 self._ctx = ctx
532 551 self._ui = ctx._repo.ui
552 self._exe = util.findexe('svn')
553 if not self._exe:
554 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
555 % self._path)
533 556
534 557 def _svncommand(self, commands, filename='', failok=False):
535 cmd = ['svn']
558 cmd = [self._exe]
536 559 extrakw = {}
537 560 if not self._ui.interactive():
538 561 # Making stdin be a pipe should prevent svn from behaving
@@ -810,9 +833,10 b' class gitsubrepo(abstractsubrepo):'
810 833 for b in branches:
811 834 if b.startswith('refs/remotes/'):
812 835 continue
813 remote = self._gitcommand(['config', 'branch.%s.remote' % b])
836 bname = b.split('/', 2)[2]
837 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
814 838 if remote:
815 ref = self._gitcommand(['config', 'branch.%s.merge' % b])
839 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
816 840 tracking['refs/remotes/%s/%s' %
817 841 (remote, ref.split('/', 2)[2])] = b
818 842 return tracking
@@ -287,6 +287,6 b' def _writetagcache(ui, repo, heads, tagf'
287 287 cachefile.write("%s %s\n" % (hex(node), name))
288 288
289 289 try:
290 cachefile.rename()
290 cachefile.close()
291 291 except (OSError, IOError):
292 292 pass
@@ -7,6 +7,7 b''
7 7
8 8 import cgi, re, os, time, urllib
9 9 import encoding, node, util
10 import hbisect
10 11
11 12 def addbreaks(text):
12 13 """:addbreaks: Any text. Add an XHTML "<br />" tag before the end of
@@ -188,13 +189,13 b' def json(obj):'
188 189 return '"%s"' % jsonescape(u)
189 190 elif isinstance(obj, unicode):
190 191 return '"%s"' % jsonescape(obj)
191 elif hasattr(obj, 'keys'):
192 elif util.safehasattr(obj, 'keys'):
192 193 out = []
193 194 for k, v in obj.iteritems():
194 195 s = '%s: %s' % (json(k), json(v))
195 196 out.append(s)
196 197 return '{' + ', '.join(out) + '}'
197 elif hasattr(obj, '__iter__'):
198 elif util.safehasattr(obj, '__iter__'):
198 199 out = []
199 200 for i in obj:
200 201 out.append(json(i))
@@ -268,6 +269,14 b' def short(text):'
268 269 """
269 270 return text[:12]
270 271
272 def shortbisect(text):
273 """:shortbisect: Any text. Treats `text` as a bisection status, and
274 returns a single-character representing the status (G: good, B: bad,
275 S: skipped, U: untested, I: ignored). Returns single space if `text`
276 is not a valid bisection status.
277 """
278 return hbisect.shortlabel(text) or ' '
279
271 280 def shortdate(text):
272 281 """:shortdate: Date. Returns a date like "2006-09-18"."""
273 282 return util.shortdate(text)
@@ -279,7 +288,7 b' def stringify(thing):'
279 288 """:stringify: Any type. Turns the value into text by converting values into
280 289 text and concatenating them.
281 290 """
282 if hasattr(thing, '__iter__') and not isinstance(thing, str):
291 if util.safehasattr(thing, '__iter__') and not isinstance(thing, str):
283 292 return "".join([stringify(t) for t in thing if t is not None])
284 293 return str(thing)
285 294
@@ -347,6 +356,7 b' filters = {'
347 356 "rfc3339date": rfc3339date,
348 357 "rfc822date": rfc822date,
349 358 "short": short,
359 "shortbisect": shortbisect,
350 360 "shortdate": shortdate,
351 361 "stringescape": stringescape,
352 362 "stringify": stringify,
@@ -7,6 +7,7 b''
7 7
8 8 from node import hex
9 9 import patch, util, error
10 import hbisect
10 11
11 12 def showlist(name, values, plural=None, **args):
12 13 '''expand set of values.
@@ -145,6 +146,10 b' def showauthor(repo, ctx, templ, **args)'
145 146 """:author: String. The unmodified author of the changeset."""
146 147 return ctx.user()
147 148
149 def showbisect(repo, ctx, templ, **args):
150 """:bisect: String. The changeset bisection status."""
151 return hbisect.label(repo, ctx.node())
152
148 153 def showbranch(**args):
149 154 """:branch: String. The name of the branch on which the changeset was
150 155 committed.
@@ -288,6 +293,7 b' def showtags(**args):'
288 293 # revcache - a cache dictionary for the current revision
289 294 keywords = {
290 295 'author': showauthor,
296 'bisect': showbisect,
291 297 'branch': showbranch,
292 298 'branches': showbranches,
293 299 'bookmarks': showbookmarks,
@@ -135,7 +135,7 b' def runsymbol(context, mapping, key):'
135 135 v = mapping.get(key)
136 136 if v is None:
137 137 v = context._defaults.get(key, '')
138 if hasattr(v, '__call__'):
138 if util.safehasattr(v, '__call__'):
139 139 return v(**mapping)
140 140 return v
141 141
@@ -172,14 +172,14 b' def runmap(context, mapping, data):'
172 172 def buildfunc(exp, context):
173 173 n = getsymbol(exp[1])
174 174 args = [compileexp(x, context) for x in getlist(exp[2])]
175 if n in funcs:
176 f = funcs[n]
177 return (f, args)
175 178 if n in context._filters:
176 179 if len(args) != 1:
177 180 raise error.ParseError(_("filter %s expects one argument") % n)
178 181 f = context._filters[n]
179 182 return (runfilter, (args[0][0], args[0][1], f))
180 elif n in context._funcs:
181 f = context._funcs[n]
182 return (f, args)
183 183
184 184 methods = {
185 185 "string": lambda e, c: (runstring, e[1]),
@@ -191,6 +191,9 b' methods = {'
191 191 "func": buildfunc,
192 192 }
193 193
194 funcs = {
195 }
196
194 197 # template engine
195 198
196 199 path = ['templates', '../templates']
@@ -200,14 +203,14 b' def _flatten(thing):'
200 203 '''yield a single stream from a possibly nested set of iterators'''
201 204 if isinstance(thing, str):
202 205 yield thing
203 elif not hasattr(thing, '__iter__'):
206 elif not util.safehasattr(thing, '__iter__'):
204 207 if thing is not None:
205 208 yield str(thing)
206 209 else:
207 210 for i in thing:
208 211 if isinstance(i, str):
209 212 yield i
210 elif not hasattr(i, '__iter__'):
213 elif not util.safehasattr(i, '__iter__'):
211 214 if i is not None:
212 215 yield str(i)
213 216 elif i is not None:
@@ -338,7 +341,7 b' def templatepath(name=None):'
338 341 normpaths = []
339 342
340 343 # executable version (py2exe) doesn't support __file__
341 if hasattr(sys, 'frozen'):
344 if util.mainfrozen():
342 345 module = sys.executable
343 346 else:
344 347 module = __file__
@@ -1,7 +1,7 b''
1 changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
2 changeset_quiet = '{rev}:{node|short}\n'
3 changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
4 changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
1 changeset = 'changeset: {rev}:{node|short}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
2 changeset_quiet = '{bisect|shortbisect} {rev}:{node|short}\n'
3 changeset_verbose = 'changeset: {rev}:{node|short}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
4 changeset_debug = 'changeset: {rev}:{node}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
5 5 start_files = 'files: '
6 6 file = ' {file}'
7 7 end_files = '\n'
@@ -9,7 +9,7 b''
9 9 </div>
10 10
11 11 <div id="powered-by">
12 <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p>
12 <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
13 13 </div>
14 14
15 15 <div id="corner-top-left"></div>
@@ -26,7 +26,7 b''
26 26 </div>
27 27
28 28 <div id="powered-by">
29 <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p>
29 <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
30 30 </div>
31 31
32 32 <div id="corner-top-left"></div>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -6,7 +6,7 b''
6 6 <div class="menu">
7 7 <div class="logo">
8 8 <a href="{logourl}">
9 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
9 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
10 10 </div>
11 11 <ul>
12 12 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
10 <img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
10 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
10 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
10 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -12,7 +12,7 b''
12 12 <div class="menu">
13 13 <div class="logo">
14 14 <a href="{logourl}">
15 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
15 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
16 16 </div>
17 17 <ul>
18 18 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -6,7 +6,7 b''
6 6 <div class="container">
7 7 <div class="menu">
8 8 <a href="{logourl}">
9 <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
9 <img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a>
10 10 </div>
11 11 <div class="main">
12 12 <h2>Mercurial Repositories</h2>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
10 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -7,7 +7,7 b''
7 7 <div class="menu">
8 8 <div class="logo">
9 9 <a href="{logourl}">
10 <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
10 <img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a>
11 11 </div>
12 12 <ul>
13 13 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li class="active">log</li>
@@ -11,7 +11,7 b''
11 11 <div class="menu">
12 12 <div class="logo">
13 13 <a href="{logourl}">
14 <img src="{staticurl}hglogo.png" alt="mercurial" /></a>
14 <img src="{staticurl}{logoimg}" alt="mercurial" /></a>
15 15 </div>
16 16 <ul>
17 17 <li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -2,7 +2,7 b''
2 2 {motd}
3 3 <div class="logo">
4 4 <a href="{logourl}">
5 <img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
5 <img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a>
6 6 </div>
7 7
8 8 </body>
@@ -46,7 +46,7 b' class ui(object):'
46 46 def copy(self):
47 47 return self.__class__(self)
48 48
49 def _is_trusted(self, fp, f):
49 def _trusted(self, fp, f):
50 50 st = util.fstat(fp)
51 51 if util.isowner(st):
52 52 return True
@@ -75,7 +75,7 b' class ui(object):'
75 75 raise
76 76
77 77 cfg = config.config()
78 trusted = sections or trust or self._is_trusted(fp, filename)
78 trusted = sections or trust or self._trusted(fp, filename)
79 79
80 80 try:
81 81 cfg.read(filename, fp, sections=sections, remap=remap)
@@ -155,7 +155,19 b' class ui(object):'
155 155 return self._data(untrusted).source(section, name) or 'none'
156 156
157 157 def config(self, section, name, default=None, untrusted=False):
158 value = self._data(untrusted).get(section, name, default)
158 if isinstance(name, list):
159 alternates = name
160 else:
161 alternates = [name]
162
163 for n in alternates:
164 value = self._data(untrusted).get(section, name, None)
165 if value is not None:
166 name = n
167 break
168 else:
169 value = default
170
159 171 if self.debugflag and not untrusted and self._reportuntrusted:
160 172 uvalue = self._ucfg.get(section, name)
161 173 if uvalue is not None and uvalue != value:
@@ -164,12 +176,14 b' class ui(object):'
164 176 return value
165 177
166 178 def configpath(self, section, name, default=None, untrusted=False):
167 'get a path config item, expanded relative to config file'
179 'get a path config item, expanded relative to repo root or config file'
168 180 v = self.config(section, name, default, untrusted)
181 if v is None:
182 return None
169 183 if not os.path.isabs(v) or "://" not in v:
170 184 src = self.configsource(section, name, untrusted)
171 185 if ':' in src:
172 base = os.path.dirname(src.rsplit(':'))
186 base = os.path.dirname(src.rsplit(':')[0])
173 187 v = os.path.join(base, os.path.expanduser(v))
174 188 return v
175 189
@@ -135,7 +135,7 b' def _gen_sendfile(orgsend):'
135 135 orgsend(self, data)
136 136 return _sendfile
137 137
138 has_https = hasattr(urllib2, 'HTTPSHandler')
138 has_https = util.safehasattr(urllib2, 'HTTPSHandler')
139 139 if has_https:
140 140 try:
141 141 _create_connection = socket.create_connection
@@ -192,8 +192,8 b' class httpconnection(keepalive.HTTPConne'
192 192 # general transaction handler to support different ways to handle
193 193 # HTTPS proxying before and after Python 2.6.3.
194 194 def _generic_start_transaction(handler, h, req):
195 if hasattr(req, '_tunnel_host') and req._tunnel_host:
196 tunnel_host = req._tunnel_host
195 tunnel_host = getattr(req, '_tunnel_host', None)
196 if tunnel_host:
197 197 if tunnel_host[:7] not in ['http://', 'https:/']:
198 198 tunnel_host = 'https://' + tunnel_host
199 199 new_tunnel = True
@@ -19,6 +19,58 b' import errno, re, shutil, sys, tempfile,'
19 19 import os, time, calendar, textwrap, signal
20 20 import imp, socket, urllib
21 21
22 if os.name == 'nt':
23 import windows as platform
24 else:
25 import posix as platform
26
27 cachestat = platform.cachestat
28 checkexec = platform.checkexec
29 checklink = platform.checklink
30 copymode = platform.copymode
31 executablepath = platform.executablepath
32 expandglobs = platform.expandglobs
33 explainexit = platform.explainexit
34 findexe = platform.findexe
35 gethgcmd = platform.gethgcmd
36 getuser = platform.getuser
37 groupmembers = platform.groupmembers
38 groupname = platform.groupname
39 hidewindow = platform.hidewindow
40 isexec = platform.isexec
41 isowner = platform.isowner
42 localpath = platform.localpath
43 lookupreg = platform.lookupreg
44 makedir = platform.makedir
45 nlinks = platform.nlinks
46 normpath = platform.normpath
47 nulldev = platform.nulldev
48 openhardlinks = platform.openhardlinks
49 oslink = platform.oslink
50 parsepatchoutput = platform.parsepatchoutput
51 pconvert = platform.pconvert
52 popen = platform.popen
53 posixfile = platform.posixfile
54 quotecommand = platform.quotecommand
55 realpath = platform.realpath
56 rename = platform.rename
57 samedevice = platform.samedevice
58 samefile = platform.samefile
59 samestat = platform.samestat
60 setbinary = platform.setbinary
61 setflags = platform.setflags
62 setsignalhandler = platform.setsignalhandler
63 shellquote = platform.shellquote
64 spawndetached = platform.spawndetached
65 sshargs = platform.sshargs
66 statfiles = platform.statfiles
67 termwidth = platform.termwidth
68 testpid = platform.testpid
69 umask = platform.umask
70 unlink = platform.unlink
71 unlinkpath = platform.unlinkpath
72 username = platform.username
73
22 74 # Python compatibility
23 75
24 76 def sha1(s):
@@ -307,8 +359,8 b' def mainfrozen():'
307 359 The code supports py2exe (most common, Windows only) and tools/freeze
308 360 (portable, not much used).
309 361 """
310 return (hasattr(sys, "frozen") or # new py2exe
311 hasattr(sys, "importers") or # old py2exe
362 return (safehasattr(sys, "frozen") or # new py2exe
363 safehasattr(sys, "importers") or # old py2exe
312 364 imp.is_frozen("__main__")) # tools/freeze
313 365
314 366 def hgexecutable():
@@ -318,10 +370,13 b' def hgexecutable():'
318 370 """
319 371 if _hgexecutable is None:
320 372 hg = os.environ.get('HG')
373 mainmod = sys.modules['__main__']
321 374 if hg:
322 375 _sethgexecutable(hg)
323 376 elif mainfrozen():
324 377 _sethgexecutable(sys.executable)
378 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
379 _sethgexecutable(mainmod.__file__)
325 380 else:
326 381 exe = findexe('hg') or os.path.basename(sys.argv[0])
327 382 _sethgexecutable(exe)
@@ -394,18 +449,6 b' def checksignature(func):'
394 449
395 450 return check
396 451
397 def makedir(path, notindexed):
398 os.mkdir(path)
399
400 def unlinkpath(f):
401 """unlink and remove the directory if it is empty"""
402 os.unlink(f)
403 # try removing directories that might now be empty
404 try:
405 os.removedirs(os.path.dirname(f))
406 except OSError:
407 pass
408
409 452 def copyfile(src, dest):
410 453 "copy a file, preserving mode and atime/mtime"
411 454 if os.path.islink(src):
@@ -491,22 +534,10 b' def checkwinfilename(path):'
491 534 return _("filename ends with '%s', which is not allowed "
492 535 "on Windows") % t
493 536
494 def lookupreg(key, name=None, scope=None):
495 return None
496
497 def hidewindow():
498 """Hide current shell window.
499
500 Used to hide the window opened when starting asynchronous
501 child process under Windows, unneeded on other systems.
502 """
503 pass
504
505 537 if os.name == 'nt':
506 538 checkosfilename = checkwinfilename
507 from windows import *
508 539 else:
509 from posix import *
540 checkosfilename = platform.checkosfilename
510 541
511 542 def makelock(info, pathname):
512 543 try:
@@ -690,16 +721,7 b' def mktempcopy(name, emptyok=False, crea'
690 721 # Temporary files are created with mode 0600, which is usually not
691 722 # what we want. If the original file already exists, just copy
692 723 # its mode. Otherwise, manually obey umask.
693 try:
694 st_mode = os.lstat(name).st_mode & 0777
695 except OSError, inst:
696 if inst.errno != errno.ENOENT:
697 raise
698 st_mode = createmode
699 if st_mode is None:
700 st_mode = ~umask
701 st_mode &= 0666
702 os.chmod(temp, st_mode)
724 copymode(name, temp, createmode)
703 725 if emptyok:
704 726 return temp
705 727 try:
@@ -726,11 +748,10 b' class atomictempfile(object):'
726 748 '''writeable file object that atomically updates a file
727 749
728 750 All writes will go to a temporary copy of the original file. Call
729 rename() when you are done writing, and atomictempfile will rename
730 the temporary copy to the original name, making the changes visible.
731
732 Unlike other file-like objects, close() discards your writes by
733 simply deleting the temporary file.
751 close() when you are done writing, and atomictempfile will rename
752 the temporary copy to the original name, making the changes
753 visible. If the object is destroyed without being closed, all your
754 writes are discarded.
734 755 '''
735 756 def __init__(self, name, mode='w+b', createmode=None):
736 757 self.__name = name # permanent name
@@ -742,12 +763,12 b' class atomictempfile(object):'
742 763 self.write = self._fp.write
743 764 self.fileno = self._fp.fileno
744 765
745 def rename(self):
766 def close(self):
746 767 if not self._fp.closed:
747 768 self._fp.close()
748 769 rename(self._tempname, localpath(self.__name))
749 770
750 def close(self):
771 def discard(self):
751 772 if not self._fp.closed:
752 773 try:
753 774 os.unlink(self._tempname)
@@ -756,24 +777,25 b' class atomictempfile(object):'
756 777 self._fp.close()
757 778
758 779 def __del__(self):
759 if hasattr(self, '_fp'): # constructor actually did something
760 self.close()
780 if safehasattr(self, '_fp'): # constructor actually did something
781 self.discard()
761 782
762 783 def makedirs(name, mode=None):
763 784 """recursive directory creation with parent mode inheritance"""
764 parent = os.path.abspath(os.path.dirname(name))
765 785 try:
766 786 os.mkdir(name)
767 if mode is not None:
768 os.chmod(name, mode)
769 return
770 787 except OSError, err:
771 788 if err.errno == errno.EEXIST:
772 789 return
773 if not name or parent == name or err.errno != errno.ENOENT:
790 if err.errno != errno.ENOENT or not name:
791 raise
792 parent = os.path.dirname(os.path.abspath(name))
793 if parent == name:
774 794 raise
775 makedirs(parent, mode)
776 makedirs(name, mode)
795 makedirs(parent, mode)
796 os.mkdir(name)
797 if mode is not None:
798 os.chmod(name, mode)
777 799
778 800 def readfile(path):
779 801 fp = open(path, 'rb')
@@ -1303,8 +1325,9 b' def rundetached(args, condfn):'
1303 1325 def handler(signum, frame):
1304 1326 terminated.add(os.wait())
1305 1327 prevhandler = None
1306 if hasattr(signal, 'SIGCHLD'):
1307 prevhandler = signal.signal(signal.SIGCHLD, handler)
1328 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1329 if SIGCHLD is not None:
1330 prevhandler = signal.signal(SIGCHLD, handler)
1308 1331 try:
1309 1332 pid = spawndetached(args)
1310 1333 while not condfn():
@@ -1648,8 +1671,10 b' class url(object):'
1648 1671 self.user, self.passwd = user, passwd
1649 1672 if not self.user:
1650 1673 return (s, None)
1651 # authinfo[1] is passed to urllib2 password manager, and its URIs
1652 # must not contain credentials.
1674 # authinfo[1] is passed to urllib2 password manager, and its
1675 # URIs must not contain credentials. The host is passed in the
1676 # URIs list because Python < 2.4.3 uses only that to search for
1677 # a password.
1653 1678 return (s, (None, (s, self.host),
1654 1679 self.user, self.passwd or ''))
1655 1680
@@ -9,6 +9,22 b' from i18n import _'
9 9 import osutil
10 10 import errno, msvcrt, os, re, sys
11 11
12 import win32
13 executablepath = win32.executablepath
14 getuser = win32.getuser
15 hidewindow = win32.hidewindow
16 lookupreg = win32.lookupreg
17 makedir = win32.makedir
18 nlinks = win32.nlinks
19 oslink = win32.oslink
20 samedevice = win32.samedevice
21 samefile = win32.samefile
22 setsignalhandler = win32.setsignalhandler
23 spawndetached = win32.spawndetached
24 termwidth = win32.termwidth
25 testpid = win32.testpid
26 unlink = win32.unlink
27
12 28 nulldev = 'NUL:'
13 29 umask = 002
14 30
@@ -90,6 +106,9 b' def sshargs(sshcmd, host, user, port):'
90 106 def setflags(f, l, x):
91 107 pass
92 108
109 def copymode(src, dst, mode=None):
110 pass
111
93 112 def checkexec(path):
94 113 return False
95 114
@@ -99,8 +118,9 b' def checklink(path):'
99 118 def setbinary(fd):
100 119 # When run without console, pipes may expose invalid
101 120 # fileno(), usually set to -1.
102 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
103 msvcrt.setmode(fd.fileno(), os.O_BINARY)
121 fno = getattr(fd, 'fileno', None)
122 if fno is not None and fno() >= 0:
123 msvcrt.setmode(fno(), os.O_BINARY)
104 124
105 125 def pconvert(path):
106 126 return '/'.join(path.split(os.sep))
@@ -281,6 +301,14 b' def groupmembers(name):'
281 301 # Don't support groups on Windows for now
282 302 raise KeyError()
283 303
284 from win32 import *
304 def isexec(f):
305 return False
306
307 class cachestat(object):
308 def __init__(self, path):
309 pass
310
311 def cacheable(self):
312 return False
285 313
286 314 expandglobs = True
@@ -10,14 +10,13 b' from i18n import _'
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod
12 12 import repo, error, encoding, util, store
13 import pushkey as pushkeymod
14 13
15 14 # abstract batching support
16 15
17 16 class future(object):
18 17 '''placeholder for a value to be set later'''
19 18 def set(self, value):
20 if hasattr(self, 'value'):
19 if util.safehasattr(self, 'value'):
21 20 raise error.RepoError("future is already set")
22 21 self.value = value
23 22
@@ -58,8 +57,9 b' class remotebatch(batcher):'
58 57 req, rsp = [], []
59 58 for name, args, opts, resref in self.calls:
60 59 mtd = getattr(self.remote, name)
61 if hasattr(mtd, 'batchable'):
62 batchable = getattr(mtd, 'batchable')(mtd.im_self, *args, **opts)
60 batchablefn = getattr(mtd, 'batchable', None)
61 if batchablefn is not None:
62 batchable = batchablefn(mtd.im_self, *args, **opts)
63 63 encargsorres, encresref = batchable.next()
64 64 if encresref:
65 65 req.append((name, encargsorres,))
@@ -334,6 +334,10 b' class pusherr(object):'
334 334 def __init__(self, res):
335 335 self.res = res
336 336
337 class ooberror(object):
338 def __init__(self, message):
339 self.message = message
340
337 341 def dispatch(repo, proto, command):
338 342 func, spec = commands[command]
339 343 args = proto.getargs(spec)
@@ -375,6 +379,8 b' def batch(repo, proto, cmds, others):'
375 379 result = func(repo, proto, *[data[k] for k in keys])
376 380 else:
377 381 result = func(repo, proto)
382 if isinstance(result, ooberror):
383 return result
378 384 res.append(escapearg(result))
379 385 return ';'.join(res)
380 386
@@ -454,7 +460,7 b' def hello(repo, proto):'
454 460 return "capabilities: %s\n" % (capabilities(repo, proto))
455 461
456 462 def listkeys(repo, proto, namespace):
457 d = pushkeymod.list(repo, encoding.tolocal(namespace)).items()
463 d = repo.listkeys(encoding.tolocal(namespace)).items()
458 464 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
459 465 for k, v in d])
460 466 return t
@@ -484,9 +490,8 b' def pushkey(repo, proto, namespace, key,'
484 490 else:
485 491 new = encoding.tolocal(new) # normal path
486 492
487 r = pushkeymod.push(repo,
488 encoding.tolocal(namespace), encoding.tolocal(key),
489 encoding.tolocal(old), new)
493 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
494 encoding.tolocal(old), new)
490 495 return '%s\n' % int(r)
491 496
492 497 def _allowstream(ui):
@@ -5,7 +5,7 b''
5 5 # 'python setup.py --help' for more options
6 6
7 7 import sys, platform
8 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
8 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
9 9 raise SystemExit("Mercurial requires Python 2.4 or later.")
10 10
11 11 if sys.version_info[0] >= 3:
@@ -342,7 +342,8 b" cmdclass = {'build_mo': hgbuildmo,"
342 342
343 343 packages = ['mercurial', 'mercurial.hgweb',
344 344 'mercurial.httpclient', 'mercurial.httpclient.tests',
345 'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf']
345 'hgext', 'hgext.convert', 'hgext.highlight', 'hgext.zeroconf',
346 'hgext.largefiles']
346 347
347 348 pymodules = []
348 349
@@ -101,6 +101,16 b' def has_inotify():'
101 101 def has_fifo():
102 102 return hasattr(os, "mkfifo")
103 103
104 def has_cacheable_fs():
105 from mercurial import util
106
107 fd, path = tempfile.mkstemp(prefix=tempprefix)
108 os.close(fd)
109 try:
110 return util.cachestat(path).cacheable()
111 finally:
112 os.remove(path)
113
104 114 def has_lsprof():
105 115 try:
106 116 import _lsprof
@@ -200,6 +210,7 b' checks = {'
200 210 "baz": (has_baz, "GNU Arch baz client"),
201 211 "bzr": (has_bzr, "Canonical's Bazaar client"),
202 212 "bzr114": (has_bzr114, "Canonical's Bazaar client >= 1.14"),
213 "cacheable": (has_cacheable_fs, "cacheable filesystem"),
203 214 "cvs": (has_cvs, "cvs client/server"),
204 215 "darcs": (has_darcs, "darcs client"),
205 216 "docutils": (has_docutils, "Docutils text processing library"),
@@ -340,10 +340,7 b' def terminate(proc):'
340 340 """Terminate subprocess (with fallback for Python versions < 2.6)"""
341 341 vlog('# Terminating process %d' % proc.pid)
342 342 try:
343 if hasattr(proc, 'terminate'):
344 proc.terminate()
345 else:
346 os.kill(proc.pid, signal.SIGTERM)
343 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
347 344 except OSError:
348 345 pass
349 346
@@ -524,6 +521,26 b' def escapef(m):'
524 521 def stringescape(s):
525 522 return escapesub(escapef, s)
526 523
524 def transformtst(lines):
525 inblock = False
526 for l in lines:
527 if inblock:
528 if l.startswith(' $ ') or not l.startswith(' '):
529 inblock = False
530 yield ' > EOF\n'
531 yield l
532 else:
533 yield ' > ' + l[2:]
534 else:
535 if l.startswith(' >>> '):
536 inblock = True
537 yield ' $ %s -m heredoctest <<EOF\n' % PYTHON
538 yield ' > ' + l[2:]
539 else:
540 yield l
541 if inblock:
542 yield ' > EOF\n'
543
527 544 def tsttest(test, wd, options, replacements):
528 545 t = open(test)
529 546 out = []
@@ -533,7 +550,7 b' def tsttest(test, wd, options, replaceme'
533 550 pos = prepos = -1
534 551 after = {}
535 552 expected = {}
536 for n, l in enumerate(t):
553 for n, l in enumerate(transformtst(t)):
537 554 if not l.endswith('\n'):
538 555 l += '\n'
539 556 if l.startswith(' $ '): # commands
@@ -726,6 +743,7 b' def runone(options, test):'
726 743 rename(testpath + ".err", testpath)
727 744 else:
728 745 rename(testpath + ".err", testpath + ".out")
746 result('p', test)
729 747 return
730 748 result('f', (test, msg))
731 749
@@ -835,7 +853,7 b' def runone(options, test):'
835 853 refout = None # to match "out is None"
836 854 elif os.path.exists(ref):
837 855 f = open(ref, "r")
838 refout = splitnewlines(f.read())
856 refout = list(transformtst(splitnewlines(f.read())))
839 857 f.close()
840 858 else:
841 859 refout = []
@@ -1,6 +1,5 b''
1 1 try:
2 2 import coverage
3 if hasattr(coverage, 'process_startup'):
4 coverage.process_startup()
3 getattr(coverage, 'process_startup', lambda: None)()
5 4 except ImportError:
6 5 pass
@@ -121,7 +121,6 b' Extension disabled for lack of a hook'
121 121 updating the branch cache
122 122 checking for updated bookmarks
123 123 repository tip rolled back to revision 0 (undo push)
124 working directory now based on revision 0
125 124 0:6675d58eff77
126 125
127 126
@@ -179,7 +178,6 b' Extension disabled for lack of acl.sourc'
179 178 updating the branch cache
180 179 checking for updated bookmarks
181 180 repository tip rolled back to revision 0 (undo push)
182 working directory now based on revision 0
183 181 0:6675d58eff77
184 182
185 183
@@ -234,20 +232,20 b' No [acl.allow]/[acl.deny]'
234 232 files: 3/3 chunks (100.00%)
235 233 added 3 changesets with 3 changes to 3 files
236 234 calling hook pretxnchangegroup.acl: hgext.acl.hook
235 acl: checking access for user "fred"
237 236 acl: acl.allow.branches not enabled
238 237 acl: acl.deny.branches not enabled
239 238 acl: acl.allow not enabled
240 239 acl: acl.deny not enabled
241 240 acl: branch access granted: "ef1ea85a6374" on branch "default"
242 acl: allowing changeset ef1ea85a6374
241 acl: path access granted: "ef1ea85a6374"
243 242 acl: branch access granted: "f9cafe1212c8" on branch "default"
244 acl: allowing changeset f9cafe1212c8
243 acl: path access granted: "f9cafe1212c8"
245 244 acl: branch access granted: "911600dab2ae" on branch "default"
246 acl: allowing changeset 911600dab2ae
245 acl: path access granted: "911600dab2ae"
247 246 updating the branch cache
248 247 checking for updated bookmarks
249 248 repository tip rolled back to revision 0 (undo push)
250 working directory now based on revision 0
251 249 0:6675d58eff77
252 250
253 251
@@ -302,16 +300,16 b' Empty [acl.allow]'
302 300 files: 3/3 chunks (100.00%)
303 301 added 3 changesets with 3 changes to 3 files
304 302 calling hook pretxnchangegroup.acl: hgext.acl.hook
303 acl: checking access for user "fred"
305 304 acl: acl.allow.branches not enabled
306 305 acl: acl.deny.branches not enabled
307 306 acl: acl.allow enabled, 0 entries for user fred
308 307 acl: acl.deny not enabled
309 308 acl: branch access granted: "ef1ea85a6374" on branch "default"
310 acl: user fred not allowed on foo/file.txt
311 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
309 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
312 310 transaction abort!
313 311 rollback completed
314 abort: acl: access denied for changeset ef1ea85a6374
312 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
315 313 no rollback information available
316 314 0:6675d58eff77
317 315
@@ -367,20 +365,20 b' fred is allowed inside foo/'
367 365 files: 3/3 chunks (100.00%)
368 366 added 3 changesets with 3 changes to 3 files
369 367 calling hook pretxnchangegroup.acl: hgext.acl.hook
368 acl: checking access for user "fred"
370 369 acl: acl.allow.branches not enabled
371 370 acl: acl.deny.branches not enabled
372 371 acl: acl.allow enabled, 1 entries for user fred
373 372 acl: acl.deny not enabled
374 373 acl: branch access granted: "ef1ea85a6374" on branch "default"
375 acl: allowing changeset ef1ea85a6374
374 acl: path access granted: "ef1ea85a6374"
376 375 acl: branch access granted: "f9cafe1212c8" on branch "default"
377 acl: allowing changeset f9cafe1212c8
376 acl: path access granted: "f9cafe1212c8"
378 377 acl: branch access granted: "911600dab2ae" on branch "default"
379 acl: user fred not allowed on quux/file.py
380 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
378 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
381 379 transaction abort!
382 380 rollback completed
383 abort: acl: access denied for changeset 911600dab2ae
381 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
384 382 no rollback information available
385 383 0:6675d58eff77
386 384
@@ -437,16 +435,16 b' Empty [acl.deny]'
437 435 files: 3/3 chunks (100.00%)
438 436 added 3 changesets with 3 changes to 3 files
439 437 calling hook pretxnchangegroup.acl: hgext.acl.hook
438 acl: checking access for user "barney"
440 439 acl: acl.allow.branches not enabled
441 440 acl: acl.deny.branches not enabled
442 441 acl: acl.allow enabled, 0 entries for user barney
443 442 acl: acl.deny enabled, 0 entries for user barney
444 443 acl: branch access granted: "ef1ea85a6374" on branch "default"
445 acl: user barney not allowed on foo/file.txt
446 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
444 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
447 445 transaction abort!
448 446 rollback completed
449 abort: acl: access denied for changeset ef1ea85a6374
447 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
450 448 no rollback information available
451 449 0:6675d58eff77
452 450
@@ -504,20 +502,20 b' fred is allowed inside foo/, but not foo'
504 502 files: 3/3 chunks (100.00%)
505 503 added 3 changesets with 3 changes to 3 files
506 504 calling hook pretxnchangegroup.acl: hgext.acl.hook
505 acl: checking access for user "fred"
507 506 acl: acl.allow.branches not enabled
508 507 acl: acl.deny.branches not enabled
509 508 acl: acl.allow enabled, 1 entries for user fred
510 509 acl: acl.deny enabled, 1 entries for user fred
511 510 acl: branch access granted: "ef1ea85a6374" on branch "default"
512 acl: allowing changeset ef1ea85a6374
511 acl: path access granted: "ef1ea85a6374"
513 512 acl: branch access granted: "f9cafe1212c8" on branch "default"
514 acl: allowing changeset f9cafe1212c8
513 acl: path access granted: "f9cafe1212c8"
515 514 acl: branch access granted: "911600dab2ae" on branch "default"
516 acl: user fred not allowed on quux/file.py
517 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
515 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
518 516 transaction abort!
519 517 rollback completed
520 abort: acl: access denied for changeset 911600dab2ae
518 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
521 519 no rollback information available
522 520 0:6675d58eff77
523 521
@@ -576,18 +574,18 b' fred is allowed inside foo/, but not foo'
576 574 files: 3/3 chunks (100.00%)
577 575 added 3 changesets with 3 changes to 3 files
578 576 calling hook pretxnchangegroup.acl: hgext.acl.hook
577 acl: checking access for user "fred"
579 578 acl: acl.allow.branches not enabled
580 579 acl: acl.deny.branches not enabled
581 580 acl: acl.allow enabled, 1 entries for user fred
582 581 acl: acl.deny enabled, 2 entries for user fred
583 582 acl: branch access granted: "ef1ea85a6374" on branch "default"
584 acl: allowing changeset ef1ea85a6374
583 acl: path access granted: "ef1ea85a6374"
585 584 acl: branch access granted: "f9cafe1212c8" on branch "default"
586 acl: user fred denied on foo/Bar/file.txt
587 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
585 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
588 586 transaction abort!
589 587 rollback completed
590 abort: acl: access denied for changeset f9cafe1212c8
588 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
591 589 no rollback information available
592 590 0:6675d58eff77
593 591
@@ -645,16 +643,16 b' fred is allowed inside foo/, but not foo'
645 643 files: 3/3 chunks (100.00%)
646 644 added 3 changesets with 3 changes to 3 files
647 645 calling hook pretxnchangegroup.acl: hgext.acl.hook
646 acl: checking access for user "barney"
648 647 acl: acl.allow.branches not enabled
649 648 acl: acl.deny.branches not enabled
650 649 acl: acl.allow enabled, 0 entries for user barney
651 650 acl: acl.deny enabled, 0 entries for user barney
652 651 acl: branch access granted: "ef1ea85a6374" on branch "default"
653 acl: user barney not allowed on foo/file.txt
654 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
652 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
655 653 transaction abort!
656 654 rollback completed
657 abort: acl: access denied for changeset ef1ea85a6374
655 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
658 656 no rollback information available
659 657 0:6675d58eff77
660 658
@@ -716,20 +714,20 b' barney is allowed everywhere'
716 714 files: 3/3 chunks (100.00%)
717 715 added 3 changesets with 3 changes to 3 files
718 716 calling hook pretxnchangegroup.acl: hgext.acl.hook
717 acl: checking access for user "barney"
719 718 acl: acl.allow.branches not enabled
720 719 acl: acl.deny.branches not enabled
721 720 acl: acl.allow enabled, 1 entries for user barney
722 721 acl: acl.deny enabled, 0 entries for user barney
723 722 acl: branch access granted: "ef1ea85a6374" on branch "default"
724 acl: allowing changeset ef1ea85a6374
723 acl: path access granted: "ef1ea85a6374"
725 724 acl: branch access granted: "f9cafe1212c8" on branch "default"
726 acl: allowing changeset f9cafe1212c8
725 acl: path access granted: "f9cafe1212c8"
727 726 acl: branch access granted: "911600dab2ae" on branch "default"
728 acl: allowing changeset 911600dab2ae
727 acl: path access granted: "911600dab2ae"
729 728 updating the branch cache
730 729 checking for updated bookmarks
731 730 repository tip rolled back to revision 0 (undo push)
732 working directory now based on revision 0
733 731 0:6675d58eff77
734 732
735 733
@@ -791,20 +789,20 b' wilma can change files with a .txt exten'
791 789 files: 3/3 chunks (100.00%)
792 790 added 3 changesets with 3 changes to 3 files
793 791 calling hook pretxnchangegroup.acl: hgext.acl.hook
792 acl: checking access for user "wilma"
794 793 acl: acl.allow.branches not enabled
795 794 acl: acl.deny.branches not enabled
796 795 acl: acl.allow enabled, 1 entries for user wilma
797 796 acl: acl.deny enabled, 0 entries for user wilma
798 797 acl: branch access granted: "ef1ea85a6374" on branch "default"
799 acl: allowing changeset ef1ea85a6374
798 acl: path access granted: "ef1ea85a6374"
800 799 acl: branch access granted: "f9cafe1212c8" on branch "default"
801 acl: allowing changeset f9cafe1212c8
800 acl: path access granted: "f9cafe1212c8"
802 801 acl: branch access granted: "911600dab2ae" on branch "default"
803 acl: user wilma not allowed on quux/file.py
804 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
802 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
805 803 transaction abort!
806 804 rollback completed
807 abort: acl: access denied for changeset 911600dab2ae
805 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
808 806 no rollback information available
809 807 0:6675d58eff77
810 808
@@ -869,6 +867,7 b' file specified by acl.config does not ex'
869 867 files: 3/3 chunks (100.00%)
870 868 added 3 changesets with 3 changes to 3 files
871 869 calling hook pretxnchangegroup.acl: hgext.acl.hook
870 acl: checking access for user "barney"
872 871 error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config'
873 872 transaction abort!
874 873 rollback completed
@@ -941,20 +940,20 b' betty is allowed inside foo/ by a acl.co'
941 940 files: 3/3 chunks (100.00%)
942 941 added 3 changesets with 3 changes to 3 files
943 942 calling hook pretxnchangegroup.acl: hgext.acl.hook
943 acl: checking access for user "betty"
944 944 acl: acl.allow.branches not enabled
945 945 acl: acl.deny.branches not enabled
946 946 acl: acl.allow enabled, 1 entries for user betty
947 947 acl: acl.deny enabled, 0 entries for user betty
948 948 acl: branch access granted: "ef1ea85a6374" on branch "default"
949 acl: allowing changeset ef1ea85a6374
949 acl: path access granted: "ef1ea85a6374"
950 950 acl: branch access granted: "f9cafe1212c8" on branch "default"
951 acl: allowing changeset f9cafe1212c8
951 acl: path access granted: "f9cafe1212c8"
952 952 acl: branch access granted: "911600dab2ae" on branch "default"
953 acl: user betty not allowed on quux/file.py
954 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
953 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
955 954 transaction abort!
956 955 rollback completed
957 abort: acl: access denied for changeset 911600dab2ae
956 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
958 957 no rollback information available
959 958 0:6675d58eff77
960 959
@@ -1025,20 +1024,20 b' acl.config can set only [acl.allow]/[acl'
1025 1024 files: 3/3 chunks (100.00%)
1026 1025 added 3 changesets with 3 changes to 3 files
1027 1026 calling hook pretxnchangegroup.acl: hgext.acl.hook
1027 acl: checking access for user "barney"
1028 1028 acl: acl.allow.branches not enabled
1029 1029 acl: acl.deny.branches not enabled
1030 1030 acl: acl.allow enabled, 1 entries for user barney
1031 1031 acl: acl.deny enabled, 0 entries for user barney
1032 1032 acl: branch access granted: "ef1ea85a6374" on branch "default"
1033 acl: allowing changeset ef1ea85a6374
1033 acl: path access granted: "ef1ea85a6374"
1034 1034 acl: branch access granted: "f9cafe1212c8" on branch "default"
1035 acl: allowing changeset f9cafe1212c8
1035 acl: path access granted: "f9cafe1212c8"
1036 1036 acl: branch access granted: "911600dab2ae" on branch "default"
1037 acl: allowing changeset 911600dab2ae
1037 acl: path access granted: "911600dab2ae"
1038 1038 updating the branch cache
1039 1039 checking for updated bookmarks
1040 1040 repository tip rolled back to revision 0 (undo push)
1041 working directory now based on revision 0
1042 1041 0:6675d58eff77
1043 1042
1044 1043
@@ -1101,20 +1100,20 b' fred is always allowed'
1101 1100 files: 3/3 chunks (100.00%)
1102 1101 added 3 changesets with 3 changes to 3 files
1103 1102 calling hook pretxnchangegroup.acl: hgext.acl.hook
1103 acl: checking access for user "fred"
1104 1104 acl: acl.allow.branches not enabled
1105 1105 acl: acl.deny.branches not enabled
1106 1106 acl: acl.allow enabled, 1 entries for user fred
1107 1107 acl: acl.deny not enabled
1108 1108 acl: branch access granted: "ef1ea85a6374" on branch "default"
1109 acl: allowing changeset ef1ea85a6374
1109 acl: path access granted: "ef1ea85a6374"
1110 1110 acl: branch access granted: "f9cafe1212c8" on branch "default"
1111 acl: allowing changeset f9cafe1212c8
1111 acl: path access granted: "f9cafe1212c8"
1112 1112 acl: branch access granted: "911600dab2ae" on branch "default"
1113 acl: allowing changeset 911600dab2ae
1113 acl: path access granted: "911600dab2ae"
1114 1114 updating the branch cache
1115 1115 checking for updated bookmarks
1116 1116 repository tip rolled back to revision 0 (undo push)
1117 working directory now based on revision 0
1118 1117 0:6675d58eff77
1119 1118
1120 1119
@@ -1173,18 +1172,18 b' no one is allowed inside foo/Bar/'
1173 1172 files: 3/3 chunks (100.00%)
1174 1173 added 3 changesets with 3 changes to 3 files
1175 1174 calling hook pretxnchangegroup.acl: hgext.acl.hook
1175 acl: checking access for user "fred"
1176 1176 acl: acl.allow.branches not enabled
1177 1177 acl: acl.deny.branches not enabled
1178 1178 acl: acl.allow enabled, 1 entries for user fred
1179 1179 acl: acl.deny enabled, 1 entries for user fred
1180 1180 acl: branch access granted: "ef1ea85a6374" on branch "default"
1181 acl: allowing changeset ef1ea85a6374
1181 acl: path access granted: "ef1ea85a6374"
1182 1182 acl: branch access granted: "f9cafe1212c8" on branch "default"
1183 acl: user fred denied on foo/Bar/file.txt
1184 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
1183 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1185 1184 transaction abort!
1186 1185 rollback completed
1187 abort: acl: access denied for changeset f9cafe1212c8
1186 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1188 1187 no rollback information available
1189 1188 0:6675d58eff77
1190 1189
@@ -1247,21 +1246,21 b' OS-level groups'
1247 1246 files: 3/3 chunks (100.00%)
1248 1247 added 3 changesets with 3 changes to 3 files
1249 1248 calling hook pretxnchangegroup.acl: hgext.acl.hook
1249 acl: checking access for user "fred"
1250 1250 acl: acl.allow.branches not enabled
1251 1251 acl: acl.deny.branches not enabled
1252 1252 acl: "group1" not defined in [acl.groups]
1253 1253 acl: acl.allow enabled, 1 entries for user fred
1254 1254 acl: acl.deny not enabled
1255 1255 acl: branch access granted: "ef1ea85a6374" on branch "default"
1256 acl: allowing changeset ef1ea85a6374
1256 acl: path access granted: "ef1ea85a6374"
1257 1257 acl: branch access granted: "f9cafe1212c8" on branch "default"
1258 acl: allowing changeset f9cafe1212c8
1258 acl: path access granted: "f9cafe1212c8"
1259 1259 acl: branch access granted: "911600dab2ae" on branch "default"
1260 acl: allowing changeset 911600dab2ae
1260 acl: path access granted: "911600dab2ae"
1261 1261 updating the branch cache
1262 1262 checking for updated bookmarks
1263 1263 repository tip rolled back to revision 0 (undo push)
1264 working directory now based on revision 0
1265 1264 0:6675d58eff77
1266 1265
1267 1266
@@ -1320,6 +1319,7 b' OS-level groups'
1320 1319 files: 3/3 chunks (100.00%)
1321 1320 added 3 changesets with 3 changes to 3 files
1322 1321 calling hook pretxnchangegroup.acl: hgext.acl.hook
1322 acl: checking access for user "fred"
1323 1323 acl: acl.allow.branches not enabled
1324 1324 acl: acl.deny.branches not enabled
1325 1325 acl: "group1" not defined in [acl.groups]
@@ -1327,13 +1327,12 b' OS-level groups'
1327 1327 acl: "group1" not defined in [acl.groups]
1328 1328 acl: acl.deny enabled, 1 entries for user fred
1329 1329 acl: branch access granted: "ef1ea85a6374" on branch "default"
1330 acl: allowing changeset ef1ea85a6374
1330 acl: path access granted: "ef1ea85a6374"
1331 1331 acl: branch access granted: "f9cafe1212c8" on branch "default"
1332 acl: user fred denied on foo/Bar/file.txt
1333 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
1332 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1334 1333 transaction abort!
1335 1334 rollback completed
1336 abort: acl: access denied for changeset f9cafe1212c8
1335 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1337 1336 no rollback information available
1338 1337 0:6675d58eff77
1339 1338
@@ -1441,22 +1440,22 b' No branch acls specified'
1441 1440 files: 4/4 chunks (100.00%)
1442 1441 added 4 changesets with 4 changes to 4 files (+1 heads)
1443 1442 calling hook pretxnchangegroup.acl: hgext.acl.hook
1443 acl: checking access for user "astro"
1444 1444 acl: acl.allow.branches not enabled
1445 1445 acl: acl.deny.branches not enabled
1446 1446 acl: acl.allow not enabled
1447 1447 acl: acl.deny not enabled
1448 1448 acl: branch access granted: "ef1ea85a6374" on branch "default"
1449 acl: allowing changeset ef1ea85a6374
1449 acl: path access granted: "ef1ea85a6374"
1450 1450 acl: branch access granted: "f9cafe1212c8" on branch "default"
1451 acl: allowing changeset f9cafe1212c8
1451 acl: path access granted: "f9cafe1212c8"
1452 1452 acl: branch access granted: "911600dab2ae" on branch "default"
1453 acl: allowing changeset 911600dab2ae
1453 acl: path access granted: "911600dab2ae"
1454 1454 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1455 acl: allowing changeset e8fc755d4d82
1455 acl: path access granted: "e8fc755d4d82"
1456 1456 updating the branch cache
1457 1457 checking for updated bookmarks
1458 1458 repository tip rolled back to revision 2 (undo push)
1459 working directory now based on revision 2
1460 1459 2:fb35475503ef
1461 1460
1462 1461
@@ -1521,16 +1520,17 b' Branch acl deny test'
1521 1520 files: 4/4 chunks (100.00%)
1522 1521 added 4 changesets with 4 changes to 4 files (+1 heads)
1523 1522 calling hook pretxnchangegroup.acl: hgext.acl.hook
1523 acl: checking access for user "astro"
1524 1524 acl: acl.allow.branches not enabled
1525 1525 acl: acl.deny.branches enabled, 1 entries for user astro
1526 1526 acl: acl.allow not enabled
1527 1527 acl: acl.deny not enabled
1528 1528 acl: branch access granted: "ef1ea85a6374" on branch "default"
1529 acl: allowing changeset ef1ea85a6374
1529 acl: path access granted: "ef1ea85a6374"
1530 1530 acl: branch access granted: "f9cafe1212c8" on branch "default"
1531 acl: allowing changeset f9cafe1212c8
1531 acl: path access granted: "f9cafe1212c8"
1532 1532 acl: branch access granted: "911600dab2ae" on branch "default"
1533 acl: allowing changeset 911600dab2ae
1533 acl: path access granted: "911600dab2ae"
1534 1534 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1535 1535 transaction abort!
1536 1536 rollback completed
@@ -1598,6 +1598,7 b' Branch acl empty allow test'
1598 1598 files: 4/4 chunks (100.00%)
1599 1599 added 4 changesets with 4 changes to 4 files (+1 heads)
1600 1600 calling hook pretxnchangegroup.acl: hgext.acl.hook
1601 acl: checking access for user "astro"
1601 1602 acl: acl.allow.branches enabled, 0 entries for user astro
1602 1603 acl: acl.deny.branches not enabled
1603 1604 acl: acl.allow not enabled
@@ -1671,6 +1672,7 b' Branch acl allow other'
1671 1672 files: 4/4 chunks (100.00%)
1672 1673 added 4 changesets with 4 changes to 4 files (+1 heads)
1673 1674 calling hook pretxnchangegroup.acl: hgext.acl.hook
1675 acl: checking access for user "astro"
1674 1676 acl: acl.allow.branches enabled, 0 entries for user astro
1675 1677 acl: acl.deny.branches not enabled
1676 1678 acl: acl.allow not enabled
@@ -1738,22 +1740,22 b' Branch acl allow other'
1738 1740 files: 4/4 chunks (100.00%)
1739 1741 added 4 changesets with 4 changes to 4 files (+1 heads)
1740 1742 calling hook pretxnchangegroup.acl: hgext.acl.hook
1743 acl: checking access for user "george"
1741 1744 acl: acl.allow.branches enabled, 1 entries for user george
1742 1745 acl: acl.deny.branches not enabled
1743 1746 acl: acl.allow not enabled
1744 1747 acl: acl.deny not enabled
1745 1748 acl: branch access granted: "ef1ea85a6374" on branch "default"
1746 acl: allowing changeset ef1ea85a6374
1749 acl: path access granted: "ef1ea85a6374"
1747 1750 acl: branch access granted: "f9cafe1212c8" on branch "default"
1748 acl: allowing changeset f9cafe1212c8
1751 acl: path access granted: "f9cafe1212c8"
1749 1752 acl: branch access granted: "911600dab2ae" on branch "default"
1750 acl: allowing changeset 911600dab2ae
1753 acl: path access granted: "911600dab2ae"
1751 1754 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1752 acl: allowing changeset e8fc755d4d82
1755 acl: path access granted: "e8fc755d4d82"
1753 1756 updating the branch cache
1754 1757 checking for updated bookmarks
1755 1758 repository tip rolled back to revision 2 (undo push)
1756 working directory now based on revision 2
1757 1759 2:fb35475503ef
1758 1760
1759 1761
@@ -1823,22 +1825,22 b' push foobar into the remote'
1823 1825 files: 4/4 chunks (100.00%)
1824 1826 added 4 changesets with 4 changes to 4 files (+1 heads)
1825 1827 calling hook pretxnchangegroup.acl: hgext.acl.hook
1828 acl: checking access for user "george"
1826 1829 acl: acl.allow.branches enabled, 1 entries for user george
1827 1830 acl: acl.deny.branches not enabled
1828 1831 acl: acl.allow not enabled
1829 1832 acl: acl.deny not enabled
1830 1833 acl: branch access granted: "ef1ea85a6374" on branch "default"
1831 acl: allowing changeset ef1ea85a6374
1834 acl: path access granted: "ef1ea85a6374"
1832 1835 acl: branch access granted: "f9cafe1212c8" on branch "default"
1833 acl: allowing changeset f9cafe1212c8
1836 acl: path access granted: "f9cafe1212c8"
1834 1837 acl: branch access granted: "911600dab2ae" on branch "default"
1835 acl: allowing changeset 911600dab2ae
1838 acl: path access granted: "911600dab2ae"
1836 1839 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1837 acl: allowing changeset e8fc755d4d82
1840 acl: path access granted: "e8fc755d4d82"
1838 1841 updating the branch cache
1839 1842 checking for updated bookmarks
1840 1843 repository tip rolled back to revision 2 (undo push)
1841 working directory now based on revision 2
1842 1844 2:fb35475503ef
1843 1845
1844 1846 Branch acl conflicting deny
@@ -1907,6 +1909,7 b' Branch acl conflicting deny'
1907 1909 files: 4/4 chunks (100.00%)
1908 1910 added 4 changesets with 4 changes to 4 files (+1 heads)
1909 1911 calling hook pretxnchangegroup.acl: hgext.acl.hook
1912 acl: checking access for user "george"
1910 1913 acl: acl.allow.branches not enabled
1911 1914 acl: acl.deny.branches enabled, 1 entries for user george
1912 1915 acl: acl.allow not enabled
@@ -4,6 +4,8 b''
4 4 > graphlog=
5 5 >
6 6 > [alias]
7 > # should clobber ci but not commit (issue2993)
8 > ci = version
7 9 > myinit = init
8 10 > cleanstatus = status -c
9 11 > unknown = bargle
@@ -113,7 +115,7 b' no usage'
113 115 no rollback information available
114 116
115 117 $ echo foo > foo
116 $ hg ci -Amfoo
118 $ hg commit -Amfoo
117 119 adding foo
118 120
119 121
@@ -195,7 +197,7 b' simple shell aliases'
195 197 $ hg echo2 foo
196 198
197 199 $ echo bar > bar
198 $ hg ci -qA -m bar
200 $ hg commit -qA -m bar
199 201 $ hg count .
200 202 1
201 203 $ hg count 'branch(default)'
@@ -251,7 +253,7 b' shell aliases with global options'
251 253 $ hg --cwd .. count 'branch(default)'
252 254 2
253 255 $ hg echo --cwd ..
254 --cwd ..
256
255 257
256 258
257 259 repo specific shell aliases
@@ -305,7 +307,7 b' invalid arguments'
305 307
306 308 $ hg rt foo
307 309 hg rt: invalid arguments
308 hg rt
310 hg rt
309 311
310 312 alias for: hg root
311 313
@@ -12,22 +12,21 b' def test1_simple():'
12 12 assert basename in glob.glob('.foo-*')
13 13
14 14 file.write('argh\n')
15 file.rename()
15 file.close()
16 16
17 17 assert os.path.isfile('foo')
18 18 assert basename not in glob.glob('.foo-*')
19 19 print 'OK'
20 20
21 # close() removes the temp file but does not make the write
22 # permanent -- essentially discards your work (WTF?!)
23 def test2_close():
21 # discard() removes the temp file without making the write permanent
22 def test2_discard():
24 23 if os.path.exists('foo'):
25 24 os.remove('foo')
26 25 file = atomictempfile('foo')
27 26 (dir, basename) = os.path.split(file._tempname)
28 27
29 28 file.write('yo\n')
30 file.close()
29 file.discard()
31 30
32 31 assert not os.path.isfile('foo')
33 32 assert basename not in os.listdir('.')
@@ -45,5 +44,5 b' def test3_oops():'
45 44
46 45 if __name__ == '__main__':
47 46 test1_simple()
48 test2_close()
47 test2_discard()
49 48 test3_oops()
@@ -182,7 +182,7 b' with --merge'
182 182 backout of merge should fail
183 183
184 184 $ hg backout 4
185 abort: cannot backout a merge changeset without --parent
185 abort: cannot backout a merge changeset
186 186 [255]
187 187
188 188 backout of merge with bad parent should fail
@@ -377,7 +377,7 b' reproduce AssertionError, issue1445'
377 377 date: Thu Jan 01 00:00:06 1970 +0000
378 378 summary: msg 6
379 379
380 $ hg log -r "bisected(good)"
380 $ hg log -r "bisect(good)"
381 381 changeset: 0:b99c7b9c8e11
382 382 user: test
383 383 date: Thu Jan 01 00:00:00 1970 +0000
@@ -388,13 +388,13 b' reproduce AssertionError, issue1445'
388 388 date: Thu Jan 01 00:00:05 1970 +0000
389 389 summary: msg 5
390 390
391 $ hg log -r "bisected(bad)"
391 $ hg log -r "bisect(bad)"
392 392 changeset: 6:a3d5c6fdf0d3
393 393 user: test
394 394 date: Thu Jan 01 00:00:06 1970 +0000
395 395 summary: msg 6
396 396
397 $ hg log -r "bisected(skip)"
397 $ hg log -r "bisect(skip)"
398 398 changeset: 1:5cd978ea5149
399 399 user: test
400 400 date: Thu Jan 01 00:00:01 1970 +0000
@@ -416,6 +416,15 b' reproduce AssertionError, issue1445'
416 416 summary: msg 4
417 417
418 418
419 test legacy bisected() keyword
420
421 $ hg log -r "bisected(bad)"
422 changeset: 6:a3d5c6fdf0d3
423 user: test
424 date: Thu Jan 01 00:00:06 1970 +0000
425 summary: msg 6
426
427
419 428 $ set +e
420 429
421 430 test invalid command
@@ -252,6 +252,25 b' complex bisect test 1 # first bad rev i'
252 252 $ hg bisect -b 17 # -> update to rev 6
253 253 Testing changeset 6:a214d5d3811a (15 changesets remaining, ~3 tests)
254 254 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
255 $ hg log -q -r 'bisect(pruned)'
256 0:33b1f9bc8bc5
257 17:228c06deef46
258 $ hg log -q -r 'bisect(untested)'
259 1:4ca5088da217
260 2:051e12f87bf1
261 3:0950834f0a9c
262 4:5c668c22234f
263 5:385a529b6670
264 6:a214d5d3811a
265 8:dab8161ac8fc
266 9:3c77083deb4a
267 10:429fcd26f52d
268 11:82ca6f06eccd
269 12:9f259202bbe7
270 13:b0a32c86eb31
271 15:857b178a7cf3
272 16:609d82a7ebae
273 $ hg log -q -r 'bisect(ignored)'
255 274 $ hg bisect -g # -> update to rev 13
256 275 Testing changeset 13:b0a32c86eb31 (9 changesets remaining, ~3 tests)
257 276 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -271,6 +290,58 b' complex bisect test 1 # first bad rev i'
271 290 date: Thu Jan 01 00:00:09 1970 +0000
272 291 summary: 9
273 292
293 $ hg log -q -r 'bisect(range)'
294 0:33b1f9bc8bc5
295 1:4ca5088da217
296 2:051e12f87bf1
297 3:0950834f0a9c
298 4:5c668c22234f
299 5:385a529b6670
300 6:a214d5d3811a
301 8:dab8161ac8fc
302 9:3c77083deb4a
303 10:429fcd26f52d
304 11:82ca6f06eccd
305 12:9f259202bbe7
306 13:b0a32c86eb31
307 15:857b178a7cf3
308 16:609d82a7ebae
309 17:228c06deef46
310 $ hg log -q -r 'bisect(pruned)'
311 0:33b1f9bc8bc5
312 1:4ca5088da217
313 2:051e12f87bf1
314 3:0950834f0a9c
315 4:5c668c22234f
316 5:385a529b6670
317 6:a214d5d3811a
318 8:dab8161ac8fc
319 9:3c77083deb4a
320 10:429fcd26f52d
321 13:b0a32c86eb31
322 15:857b178a7cf3
323 16:609d82a7ebae
324 17:228c06deef46
325 18:d42e18c7bc9b
326 $ hg log -q -r 'bisect(untested)'
327 11:82ca6f06eccd
328 12:9f259202bbe7
329 $ hg log -q -r 'bisect(goods)'
330 0:33b1f9bc8bc5
331 1:4ca5088da217
332 2:051e12f87bf1
333 3:0950834f0a9c
334 4:5c668c22234f
335 5:385a529b6670
336 6:a214d5d3811a
337 8:dab8161ac8fc
338 $ hg log -q -r 'bisect(bads)'
339 9:3c77083deb4a
340 10:429fcd26f52d
341 15:857b178a7cf3
342 16:609d82a7ebae
343 17:228c06deef46
344 18:d42e18c7bc9b
274 345
275 346 complex bisect test 2 # first good rev is 13
276 347
@@ -282,9 +353,31 b' complex bisect test 2 # first good rev '
282 353 $ hg bisect -s # -> update to rev 10
283 354 Testing changeset 10:429fcd26f52d (13 changesets remaining, ~3 tests)
284 355 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
356 $ hg log -q -r 'bisect(pruned)'
357 0:33b1f9bc8bc5
358 1:4ca5088da217
359 6:a214d5d3811a
360 18:d42e18c7bc9b
285 361 $ hg bisect -b # -> update to rev 12
286 362 Testing changeset 12:9f259202bbe7 (5 changesets remaining, ~2 tests)
287 363 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
364 $ hg log -q -r 'bisect(pruned)'
365 0:33b1f9bc8bc5
366 1:4ca5088da217
367 2:051e12f87bf1
368 3:0950834f0a9c
369 4:5c668c22234f
370 5:385a529b6670
371 6:a214d5d3811a
372 8:dab8161ac8fc
373 9:3c77083deb4a
374 10:429fcd26f52d
375 18:d42e18c7bc9b
376 $ hg log -q -r 'bisect(untested)'
377 11:82ca6f06eccd
378 12:9f259202bbe7
379 13:b0a32c86eb31
380 15:857b178a7cf3
288 381 $ hg bisect -b # -> update to rev 13
289 382 Testing changeset 13:b0a32c86eb31 (3 changesets remaining, ~1 tests)
290 383 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -295,6 +388,21 b' complex bisect test 2 # first good rev '
295 388 date: Thu Jan 01 00:00:13 1970 +0000
296 389 summary: 13
297 390
391 $ hg log -q -r 'bisect(range)'
392 1:4ca5088da217
393 2:051e12f87bf1
394 3:0950834f0a9c
395 4:5c668c22234f
396 5:385a529b6670
397 6:a214d5d3811a
398 8:dab8161ac8fc
399 9:3c77083deb4a
400 10:429fcd26f52d
401 11:82ca6f06eccd
402 12:9f259202bbe7
403 13:b0a32c86eb31
404 15:857b178a7cf3
405 18:d42e18c7bc9b
298 406
299 407 complex bisect test 3
300 408
@@ -306,6 +414,11 b' 10,9,13 are skipped an might be the firs'
306 414 $ hg bisect -b 16 # -> update to rev 6
307 415 Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests)
308 416 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
417 $ hg log -q -r 'bisect(pruned)'
418 0:33b1f9bc8bc5
419 1:4ca5088da217
420 16:609d82a7ebae
421 17:228c06deef46
309 422 $ hg bisect -g # -> update to rev 13
310 423 Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests)
311 424 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -315,12 +428,25 b' 10,9,13 are skipped an might be the firs'
315 428 $ hg bisect -s # -> update to rev 12
316 429 Testing changeset 12:9f259202bbe7 (8 changesets remaining, ~3 tests)
317 430 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
431 $ hg log -q -r 'bisect(pruned)'
432 0:33b1f9bc8bc5
433 1:4ca5088da217
434 2:051e12f87bf1
435 3:0950834f0a9c
436 4:5c668c22234f
437 5:385a529b6670
438 6:a214d5d3811a
439 10:429fcd26f52d
440 13:b0a32c86eb31
441 16:609d82a7ebae
442 17:228c06deef46
318 443 $ hg bisect -g # -> update to rev 9
319 444 Testing changeset 9:3c77083deb4a (5 changesets remaining, ~2 tests)
320 445 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
321 446 $ hg bisect -s # -> update to rev 15
322 447 Testing changeset 15:857b178a7cf3 (5 changesets remaining, ~2 tests)
323 448 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
449 $ hg log -q -r 'bisect(ignored)'
324 450 $ hg bisect -b
325 451 Due to skipped revisions, the first bad revision could be any of:
326 452 changeset: 9:3c77083deb4a
@@ -347,6 +473,22 b' 10,9,13 are skipped an might be the firs'
347 473 date: Thu Jan 01 00:00:15 1970 +0000
348 474 summary: merge 10,13
349 475
476 $ hg log -q -r 'bisect(range)'
477 1:4ca5088da217
478 2:051e12f87bf1
479 3:0950834f0a9c
480 4:5c668c22234f
481 5:385a529b6670
482 6:a214d5d3811a
483 8:dab8161ac8fc
484 9:3c77083deb4a
485 10:429fcd26f52d
486 11:82ca6f06eccd
487 12:9f259202bbe7
488 13:b0a32c86eb31
489 15:857b178a7cf3
490 16:609d82a7ebae
491 $ hg log -q -r 'bisect(ignored)'
350 492
351 493 complex bisect test 4
352 494
@@ -364,9 +506,40 b' 15,16 are skipped an might be the first '
364 506 $ hg bisect -b # -> update to rev 15
365 507 Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests)
366 508 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
509 $ hg log -q -r 'bisect(pruned)'
510 0:33b1f9bc8bc5
511 1:4ca5088da217
512 2:051e12f87bf1
513 3:0950834f0a9c
514 4:5c668c22234f
515 5:385a529b6670
516 6:a214d5d3811a
517 8:dab8161ac8fc
518 9:3c77083deb4a
519 10:429fcd26f52d
520 11:82ca6f06eccd
521 12:9f259202bbe7
522 13:b0a32c86eb31
523 17:228c06deef46
367 524 $ hg bisect -s # -> update to rev 16
368 525 Testing changeset 16:609d82a7ebae (3 changesets remaining, ~1 tests)
369 526 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
527 $ hg log -q -r 'bisect(pruned)'
528 0:33b1f9bc8bc5
529 1:4ca5088da217
530 2:051e12f87bf1
531 3:0950834f0a9c
532 4:5c668c22234f
533 5:385a529b6670
534 6:a214d5d3811a
535 8:dab8161ac8fc
536 9:3c77083deb4a
537 10:429fcd26f52d
538 11:82ca6f06eccd
539 12:9f259202bbe7
540 13:b0a32c86eb31
541 15:857b178a7cf3
542 17:228c06deef46
370 543 $ hg bisect -s
371 544 Due to skipped revisions, the first good revision could be any of:
372 545 changeset: 15:857b178a7cf3
@@ -386,6 +559,33 b' 15,16 are skipped an might be the first '
386 559 date: Thu Jan 01 00:00:17 1970 +0000
387 560 summary: 17
388 561
562 $ hg log -q -r 'bisect(range)'
563 8:dab8161ac8fc
564 9:3c77083deb4a
565 10:429fcd26f52d
566 11:82ca6f06eccd
567 12:9f259202bbe7
568 13:b0a32c86eb31
569 15:857b178a7cf3
570 16:609d82a7ebae
571 17:228c06deef46
572 $ hg log -q -r 'bisect(pruned)'
573 0:33b1f9bc8bc5
574 1:4ca5088da217
575 2:051e12f87bf1
576 3:0950834f0a9c
577 4:5c668c22234f
578 5:385a529b6670
579 6:a214d5d3811a
580 8:dab8161ac8fc
581 9:3c77083deb4a
582 10:429fcd26f52d
583 11:82ca6f06eccd
584 12:9f259202bbe7
585 13:b0a32c86eb31
586 15:857b178a7cf3
587 16:609d82a7ebae
588 17:228c06deef46
389 589
390 590 test unrelated revs:
391 591
@@ -394,6 +594,15 b' test unrelated revs:'
394 594 $ hg bisect -g 14
395 595 abort: starting revisions are not directly related
396 596 [255]
597 $ hg log -q -r 'bisect(range)'
598 $ hg log -q -r 'bisect(pruned)'
599 0:33b1f9bc8bc5
600 1:4ca5088da217
601 2:051e12f87bf1
602 3:0950834f0a9c
603 4:5c668c22234f
604 7:50c76098bbf2
605 14:faa450606157
397 606 $ hg bisect --reset
398 607
399 608 end at merge: 17 bad, 11 good (but 9 is first bad)
@@ -403,6 +612,14 b' end at merge: 17 bad, 11 good (but 9 is '
403 612 $ hg bisect -g 11
404 613 Testing changeset 13:b0a32c86eb31 (5 changesets remaining, ~2 tests)
405 614 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
615 $ hg log -q -r 'bisect(ignored)'
616 2:051e12f87bf1
617 3:0950834f0a9c
618 4:5c668c22234f
619 5:385a529b6670
620 6:a214d5d3811a
621 9:3c77083deb4a
622 10:429fcd26f52d
406 623 $ hg bisect -g
407 624 Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests)
408 625 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -418,12 +635,69 b' end at merge: 17 bad, 11 good (but 9 is '
418 635 Not all ancestors of this changeset have been checked.
419 636 Use bisect --extend to continue the bisection from
420 637 the common ancestor, dab8161ac8fc.
638 $ hg log -q -r 'bisect(range)'
639 11:82ca6f06eccd
640 12:9f259202bbe7
641 13:b0a32c86eb31
642 15:857b178a7cf3
643 16:609d82a7ebae
644 17:228c06deef46
645 $ hg log -q -r 'bisect(pruned)'
646 0:33b1f9bc8bc5
647 1:4ca5088da217
648 8:dab8161ac8fc
649 11:82ca6f06eccd
650 12:9f259202bbe7
651 13:b0a32c86eb31
652 15:857b178a7cf3
653 16:609d82a7ebae
654 17:228c06deef46
655 18:d42e18c7bc9b
656 $ hg log -q -r 'bisect(untested)'
657 $ hg log -q -r 'bisect(ignored)'
658 2:051e12f87bf1
659 3:0950834f0a9c
660 4:5c668c22234f
661 5:385a529b6670
662 6:a214d5d3811a
663 9:3c77083deb4a
664 10:429fcd26f52d
421 665 $ hg bisect --extend
422 666 Extending search to changeset 8:dab8161ac8fc
423 667 2 files updated, 0 files merged, 2 files removed, 0 files unresolved
668 $ hg log -q -r 'bisect(untested)'
669 $ hg log -q -r 'bisect(ignored)'
670 2:051e12f87bf1
671 3:0950834f0a9c
672 4:5c668c22234f
673 5:385a529b6670
674 6:a214d5d3811a
675 9:3c77083deb4a
676 10:429fcd26f52d
424 677 $ hg bisect -g # dab8161ac8fc
425 678 Testing changeset 9:3c77083deb4a (3 changesets remaining, ~1 tests)
426 679 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
680 $ hg log -q -r 'bisect(untested)'
681 9:3c77083deb4a
682 10:429fcd26f52d
683 $ hg log -q -r 'bisect(ignored)'
684 2:051e12f87bf1
685 3:0950834f0a9c
686 4:5c668c22234f
687 5:385a529b6670
688 6:a214d5d3811a
689 $ hg log -q -r 'bisect(goods)'
690 0:33b1f9bc8bc5
691 1:4ca5088da217
692 8:dab8161ac8fc
693 11:82ca6f06eccd
694 12:9f259202bbe7
695 13:b0a32c86eb31
696 $ hg log -q -r 'bisect(bads)'
697 15:857b178a7cf3
698 16:609d82a7ebae
699 17:228c06deef46
700 18:d42e18c7bc9b
427 701 $ hg bisect -b
428 702 The first bad revision is:
429 703 changeset: 9:3c77083deb4a
@@ -431,3 +705,91 b' end at merge: 17 bad, 11 good (but 9 is '
431 705 date: Thu Jan 01 00:00:09 1970 +0000
432 706 summary: 9
433 707
708 $ hg log -q -r 'bisect(range)'
709 8:dab8161ac8fc
710 9:3c77083deb4a
711 10:429fcd26f52d
712 11:82ca6f06eccd
713 12:9f259202bbe7
714 13:b0a32c86eb31
715 15:857b178a7cf3
716 16:609d82a7ebae
717 17:228c06deef46
718 $ hg log -q -r 'bisect(pruned)'
719 0:33b1f9bc8bc5
720 1:4ca5088da217
721 8:dab8161ac8fc
722 9:3c77083deb4a
723 10:429fcd26f52d
724 11:82ca6f06eccd
725 12:9f259202bbe7
726 13:b0a32c86eb31
727 15:857b178a7cf3
728 16:609d82a7ebae
729 17:228c06deef46
730 18:d42e18c7bc9b
731 $ hg log -q -r 'bisect(untested)'
732 $ hg log -q -r 'bisect(ignored)'
733 2:051e12f87bf1
734 3:0950834f0a9c
735 4:5c668c22234f
736 5:385a529b6670
737 6:a214d5d3811a
738 $ hg log -q -r 'bisect(goods)'
739 0:33b1f9bc8bc5
740 1:4ca5088da217
741 8:dab8161ac8fc
742 11:82ca6f06eccd
743 12:9f259202bbe7
744 13:b0a32c86eb31
745 $ hg log -q -r 'bisect(bads)'
746 9:3c77083deb4a
747 10:429fcd26f52d
748 15:857b178a7cf3
749 16:609d82a7ebae
750 17:228c06deef46
751 18:d42e18c7bc9b
752
753 user adds irrelevant but consistent information (here: -g 2) to bisect state
754
755 $ hg bisect -r
756 $ hg bisect -b 13
757 $ hg bisect -g 8
758 Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests)
759 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
760 $ hg log -q -r 'bisect(untested)'
761 11:82ca6f06eccd
762 12:9f259202bbe7
763 $ hg bisect -g 2
764 Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests)
765 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
766 $ hg log -q -r 'bisect(untested)'
767 11:82ca6f06eccd
768 12:9f259202bbe7
769 $ hg bisect -b
770 The first bad revision is:
771 changeset: 11:82ca6f06eccd
772 parent: 8:dab8161ac8fc
773 user: test
774 date: Thu Jan 01 00:00:11 1970 +0000
775 summary: 11
776
777 $ hg log -q -r 'bisect(range)'
778 8:dab8161ac8fc
779 11:82ca6f06eccd
780 12:9f259202bbe7
781 13:b0a32c86eb31
782 $ hg log -q -r 'bisect(pruned)'
783 0:33b1f9bc8bc5
784 1:4ca5088da217
785 2:051e12f87bf1
786 8:dab8161ac8fc
787 11:82ca6f06eccd
788 12:9f259202bbe7
789 13:b0a32c86eb31
790 14:faa450606157
791 15:857b178a7cf3
792 16:609d82a7ebae
793 17:228c06deef46
794 18:d42e18c7bc9b
795 $ hg log -q -r 'bisect(untested)'
@@ -239,9 +239,10 b' the bookmark extension should be ignored'
239 239 test summary
240 240
241 241 $ hg summary
242 parent: 2:db815d6d32e6 tip Y Z x y
242 parent: 2:db815d6d32e6 tip
243 243 2
244 244 branch: default
245 bookmarks: *Z Y x y
245 246 commit: (clean)
246 247 update: 1 new changesets, 2 branch heads (merge)
247 248
@@ -342,3 +343,19 b' create bundle with two heads'
342 343 * Z 3:125c9a1d6df6
343 344 x y 2:db815d6d32e6
344 345
346 test wrongly formated bookmark
347
348 $ echo '' >> .hg/bookmarks
349 $ hg bookmarks
350 X2 1:925d80f479bb
351 Y 2:db815d6d32e6
352 * Z 3:125c9a1d6df6
353 x y 2:db815d6d32e6
354 $ echo "Ican'thasformatedlines" >> .hg/bookmarks
355 $ hg bookmarks
356 malformed line in .hg/bookmarks: "Ican'thasformatedlines"
357 X2 1:925d80f479bb
358 Y 2:db815d6d32e6
359 * Z 3:125c9a1d6df6
360 x y 2:db815d6d32e6
361
@@ -154,7 +154,6 b''
154 154 4 files, 9 changesets, 7 total revisions
155 155 $ hg rollback
156 156 repository tip rolled back to revision 4 (undo pull)
157 working directory now based on revision -1
158 157 $ cd ..
159 158
160 159 should fail
@@ -232,7 +231,6 b' revision 8'
232 231 4 files, 9 changesets, 7 total revisions
233 232 $ hg rollback
234 233 repository tip rolled back to revision 2 (undo unbundle)
235 working directory now based on revision 2
236 234
237 235 revision 2
238 236
@@ -257,7 +255,6 b' revision 4'
257 255 2 files, 5 changesets, 5 total revisions
258 256 $ hg rollback
259 257 repository tip rolled back to revision 2 (undo unbundle)
260 working directory now based on revision 2
261 258 $ hg unbundle ../test-bundle-branch2.hg
262 259 adding changesets
263 260 adding manifests
@@ -277,7 +274,6 b' revision 6'
277 274 3 files, 7 changesets, 6 total revisions
278 275 $ hg rollback
279 276 repository tip rolled back to revision 2 (undo unbundle)
280 working directory now based on revision 2
281 277 $ hg unbundle ../test-bundle-cset-7.hg
282 278 adding changesets
283 279 adding manifests
@@ -90,7 +90,6 b' Rollback empty'
90 90
91 91 $ hg -R empty rollback
92 92 repository tip rolled back to revision -1 (undo pull)
93 working directory now based on revision -1
94 93
95 94 Pull full.hg into empty again (using --cwd)
96 95
@@ -121,7 +120,6 b' Rollback empty'
121 120
122 121 $ hg -R empty rollback
123 122 repository tip rolled back to revision -1 (undo pull)
124 working directory now based on revision -1
125 123
126 124 Pull full.hg into empty again (using -R)
127 125
@@ -219,7 +217,6 b' Rollback empty'
219 217
220 218 $ hg rollback
221 219 repository tip rolled back to revision -1 (undo pull)
222 working directory now based on revision -1
223 220 $ cd ..
224 221
225 222 Log -R bundle:empty+full.hg
@@ -1,14 +1,6 b''
1 1 $ "$TESTDIR/hghave" pyflakes || exit 80
2 2 $ cd $(dirname $TESTDIR)
3 3 $ pyflakes mercurial hgext 2>&1 | $TESTDIR/filterpyflakes.py
4 mercurial/hgweb/server.py:*: 'activeCount' imported but unused (glob)
5 mercurial/commands.py:*: 'base85' imported but unused (glob)
6 mercurial/commands.py:*: 'bdiff' imported but unused (glob)
7 mercurial/commands.py:*: 'mpatch' imported but unused (glob)
8 mercurial/commands.py:*: 'osutil' imported but unused (glob)
9 4 hgext/inotify/linux/__init__.py:*: 'from _inotify import *' used; unable to detect undefined names (glob)
10 mercurial/util.py:*: 'from posix import *' used; unable to detect undefined names (glob)
11 mercurial/windows.py:*: 'from win32 import *' used; unable to detect undefined names (glob)
12 mercurial/util.py:*: 'from windows import *' used; unable to detect undefined names (glob)
13 5
14 6
@@ -154,6 +154,30 b' def hookoutput(server):'
154 154 'hooks.pre-identify=python:test-commandserver.hook', 'id'],
155 155 input=cStringIO.StringIO('some input'))
156 156
157 def outsidechanges(server):
158 readchannel(server)
159 os.system('echo a >> a && hg ci -Am2')
160 runcommand(server, ['tip'])
161
162 def bookmarks(server):
163 readchannel(server)
164 runcommand(server, ['bookmarks'])
165
166 # changes .hg/bookmarks
167 os.system('hg bookmark -i bm1')
168 os.system('hg bookmark -i bm2')
169 runcommand(server, ['bookmarks'])
170
171 # changes .hg/bookmarks.current
172 os.system('hg upd bm1 -q')
173 runcommand(server, ['bookmarks'])
174
175 def tagscache(server):
176 readchannel(server)
177 runcommand(server, ['id', '-t', '-r', '0'])
178 os.system('hg tag -r 0 foo')
179 runcommand(server, ['id', '-t', '-r', '0'])
180
157 181 if __name__ == '__main__':
158 182 os.system('hg init')
159 183
@@ -169,3 +193,6 b" if __name__ == '__main__':"
169 193 hgrc.close()
170 194 check(localhgrc)
171 195 check(hookoutput)
196 check(outsidechanges)
197 check(bookmarks)
198 check(tagscache)
@@ -52,3 +52,16 b' ui.slash=True'
52 52 hook talking
53 53 now try to read something: 'some input'
54 54 eff892de26ec tip
55 changeset: 1:d3a0a68be6de
56 tag: tip
57 user: test
58 date: Thu Jan 01 00:00:00 1970 +0000
59 summary: 2
60
61 no bookmarks set
62 bm1 1:d3a0a68be6de
63 bm2 1:d3a0a68be6de
64 * bm1 1:d3a0a68be6de
65 bm2 1:d3a0a68be6de
66
67 foo
@@ -27,7 +27,7 b' Explicit --authors'
27 27 sorting...
28 28 converting...
29 29 0 foo
30 Writing author map file new/.hg/authormap
30 Writing author map file $TESTTMP/new/.hg/authormap
31 31 $ cat new/.hg/authormap
32 32 user name=Long User Name
33 33 $ hg -Rnew log
@@ -44,7 +44,7 b' Implicit .hg/authormap'
44 44 $ hg init new
45 45 $ mv authormap.txt new/.hg/authormap
46 46 $ hg convert orig new
47 Ignoring bad line in author map file new/.hg/authormap: this line is ignored
47 Ignoring bad line in author map file $TESTTMP/new/.hg/authormap: this line is ignored
48 48 scanning source...
49 49 sorting...
50 50 converting...
@@ -112,7 +112,6 b' convert fresh repo with --filemap'
112 112 1 import
113 113 filtering out empty revision
114 114 repository tip rolled back to revision 0 (undo commit)
115 working directory now based on revision -1
116 115 0 ci0
117 116 updating tags
118 117 $ hgcat b/c
@@ -86,8 +86,7 b' final file versions in this repo:'
86 86 bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644 quux
87 87 $ hg debugrename copied
88 88 copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd
89 $ echo
90
89
91 90 $ cd ..
92 91 $ splitrepo()
93 92 > {
@@ -198,8 +198,6 b' full conversion'
198 198 354ae8da6e890359ef49ade27b68bbc361f3ca88 644 baz
199 199 9277c9cc8dd4576fc01a17939b4351e5ada93466 644 foo
200 200 88dfeab657e8cf2cef3dec67b914f49791ae76b1 644 quux
201 $ echo
202
203 201
204 202 test binary conversion (issue 1359)
205 203
@@ -226,8 +224,6 b' convert binary file'
226 224 $ python -c 'print len(file("b", "rb").read())'
227 225 4096
228 226 $ cd ..
229 $ echo
230
231 227
232 228 test author vs committer
233 229
@@ -167,6 +167,7 b" Test convert progress bar'"
167 167 > [progress]
168 168 > assume-tty = 1
169 169 > delay = 0
170 > changedelay = 0
170 171 > format = topic bar number
171 172 > refresh = 0
172 173 > width = 60
@@ -219,8 +219,9 b' Copy'
219 219 newlink
220 220
221 221 $ hg --cwd a rm b
222 $ echo % remove
223 % remove
222
223 Remove
224
224 225 $ hg --cwd a ci -d '4 0' -m 'remove a file'
225 226 $ hg --cwd a tip -q
226 227 4:07b2e34a5b17
@@ -249,18 +249,18 b''
249 249
250 250 options:
251 251
252 -s --source-type TYPE source repository type
253 -d --dest-type TYPE destination repository type
254 -r --rev REV import up to target revision REV
255 -A --authormap FILE remap usernames using this file
256 --filemap FILE remap file names using contents of file
257 --splicemap FILE splice synthesized history into place
258 --branchmap FILE change branch names while converting
259 --branchsort try to sort changesets by branches
260 --datesort try to sort changesets by date
261 --sourcesort preserve source changesets order
252 -s --source-type TYPE source repository type
253 -d --dest-type TYPE destination repository type
254 -r --rev REV import up to target revision REV
255 -A --authormap FILE remap usernames using this file
256 --filemap FILE remap file names using contents of file
257 --splicemap FILE splice synthesized history into place
258 --branchmap FILE change branch names while converting
259 --branchsort try to sort changesets by branches
260 --datesort try to sort changesets by date
261 --sourcesort preserve source changesets order
262 262
263 use "hg -v help convert" to show global options
263 use "hg -v help convert" to show more info
264 264 $ hg init a
265 265 $ cd a
266 266 $ echo a > a
@@ -17,6 +17,7 b' Show all commands except debug commands'
17 17 diff
18 18 export
19 19 forget
20 graft
20 21 grep
21 22 heads
22 23 help
@@ -196,7 +197,7 b' Show all commands + options'
196 197 forget: include, exclude
197 198 init: ssh, remotecmd, insecure
198 199 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, hidden, patch, git, limit, no-merges, stat, style, template, include, exclude
199 merge: force, tool, rev, preview
200 merge: force, rev, preview, tool
200 201 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
201 202 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
202 203 remove: after, force, include, exclude
@@ -206,7 +207,7 b' Show all commands + options'
206 207 update: clean, check, date, rev
207 208 addremove: similarity, include, exclude, dry-run
208 209 archive: no-decode, prefix, rev, type, subrepos, include, exclude
209 backout: merge, parent, tool, rev, include, exclude, message, logfile, date, user
210 backout: merge, parent, rev, tool, include, exclude, message, logfile, date, user
210 211 bisect: reset, good, bad, skip, extend, command, noupdate
211 212 bookmarks: force, rev, delete, rename, inactive
212 213 branch: force, clean
@@ -242,11 +243,12 b' Show all commands + options'
242 243 debugsub: rev
243 244 debugwalk: include, exclude
244 245 debugwireargs: three, four, five, ssh, remotecmd, insecure
246 graft: continue, edit, currentdate, currentuser, date, user, tool
245 247 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
246 248 heads: rev, topo, active, closed, style, template
247 249 help: extension, command
248 250 identify: rev, num, id, branch, tags, bookmarks
249 import: strip, base, force, no-commit, bypass, exact, import-branch, message, logfile, date, user, similarity
251 import: strip, base, edit, force, no-commit, bypass, exact, import-branch, message, logfile, date, user, similarity
250 252 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd, insecure, subrepos
251 253 locate: rev, print0, fullpath, include, exclude
252 254 manifest: rev, all
@@ -255,9 +257,9 b' Show all commands + options'
255 257 paths:
256 258 recover:
257 259 rename: after, force, include, exclude, dry-run
258 resolve: all, list, mark, unmark, tool, no-status, include, exclude
260 resolve: all, list, mark, unmark, no-status, tool, include, exclude
259 261 revert: all, date, rev, no-backup, include, exclude, dry-run
260 rollback: dry-run
262 rollback: dry-run, force
261 263 root:
262 264 showconfig: untrusted
263 265 tag: force, local, rev, remove, edit, message, date, user
@@ -94,8 +94,7 b' record'
94 94 a
95 95 c
96 96 \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m (esc)
97 $ echo
98
97
99 98 $ echo "[extensions]" >> $HGRCPATH
100 99 $ echo "mq=" >> $HGRCPATH
101 100 $ hg rollback
@@ -123,5 +122,3 b' qrecord'
123 122 a
124 123 c
125 124 \x1b[0;33mrecord this change to 'a'? [Ynsfdaq?]\x1b[0m (esc)
126 $ echo
127
@@ -25,11 +25,11 b' Missing arg:'
25 25
26 26 options:
27 27
28 -o --output FORMAT print output to file with formatted name
29 -r --rev REV print the given revision
30 --decode apply any matching decode filter
31 -I --include PATTERN [+] include names matching the given patterns
32 -X --exclude PATTERN [+] exclude names matching the given patterns
28 -o --output FORMAT print output to file with formatted name
29 -r --rev REV print the given revision
30 --decode apply any matching decode filter
31 -I --include PATTERN [+] include names matching the given patterns
32 -X --exclude PATTERN [+] exclude names matching the given patterns
33 33
34 34 [+] marked option can be specified multiple times
35 35
@@ -33,3 +33,6 b' doctest.testmod(mercurial.hgweb.hgwebdir'
33 33
34 34 import hgext.convert.cvsps
35 35 doctest.testmod(hgext.convert.cvsps)
36
37 import mercurial.revset
38 doctest.testmod(mercurial.revset)
@@ -19,9 +19,15 b' hgrc.close()'
19 19 u = ui.ui()
20 20 extensions.loadall(u)
21 21
22 globalshort = set()
23 globallong = set()
24 for option in commands.globalopts:
25 option[0] and globalshort.add(option[0])
26 option[1] and globallong.add(option[1])
27
22 28 for cmd, entry in commands.table.iteritems():
23 seenshort = set()
24 seenlong = set()
29 seenshort = globalshort.copy()
30 seenlong = globallong.copy()
25 31 for option in entry[1]:
26 32 if (option[0] and option[0] in seenshort) or \
27 33 (option[1] and option[1] in seenlong):
@@ -46,20 +46,20 b' alignment of option descriptions in help'
46 46 check alignment of option descriptions in help
47 47
48 48 $ hg help showoptlist
49 hg showoptlist
49 hg showoptlist
50 50
51 51 dummy command to show option descriptions
52 52
53 53 options:
54 54
55 -s --opt1 \xe7\x9f\xad\xe5\x90\x8d short width \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d (esc)
56 -m --opt2 MIDDLE_ middle width MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_
57 MIDDLE_ MIDDLE_ MIDDLE_
58 -l --opt3 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d long width \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
59 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
60 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
55 -s --opt1 \xe7\x9f\xad\xe5\x90\x8d short width \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d (esc)
56 -m --opt2 MIDDLE_ middle width MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_
57 MIDDLE_ MIDDLE_ MIDDLE_
58 -l --opt3 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d long width \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
59 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
60 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
61 61
62 use "hg -v help showoptlist" to show global options
62 use "hg -v help showoptlist" to show more info
63 63
64 64
65 65 $ rm -f s; touch s
@@ -57,7 +57,7 b' define commands to display help text'
57 57 (1-1) display Japanese full-width characters in cp932
58 58
59 59 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_full_ja
60 hg show_full_ja
60 hg show_full_ja
61 61
62 62 \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc)
63 63
@@ -67,12 +67,12 b' define commands to display help text'
67 67 \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc)
68 68 \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc)
69 69
70 use "hg -v help show_full_ja" to show global options
70 use "hg -v help show_full_ja" to show more info
71 71
72 72 (1-2) display Japanese full-width characters in utf-8
73 73
74 74 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_full_ja
75 hg show_full_ja
75 hg show_full_ja
76 76
77 77 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc)
78 78
@@ -82,13 +82,13 b' define commands to display help text'
82 82 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc)
83 83 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc)
84 84
85 use "hg -v help show_full_ja" to show global options
85 use "hg -v help show_full_ja" to show more info
86 86
87 87
88 88 (1-3) display Japanese half-width characters in cp932
89 89
90 90 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_half_ja
91 hg show_half_ja
91 hg show_half_ja
92 92
93 93 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc)
94 94
@@ -98,12 +98,12 b' define commands to display help text'
98 98 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc)
99 99 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc)
100 100
101 use "hg -v help show_half_ja" to show global options
101 use "hg -v help show_half_ja" to show more info
102 102
103 103 (1-4) display Japanese half-width characters in utf-8
104 104
105 105 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_half_ja
106 hg show_half_ja
106 hg show_half_ja
107 107
108 108 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc)
109 109
@@ -113,7 +113,7 b' define commands to display help text'
113 113 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc)
114 114 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc)
115 115
116 use "hg -v help show_half_ja" to show global options
116 use "hg -v help show_half_ja" to show more info
117 117
118 118
119 119
@@ -124,7 +124,7 b' define commands to display help text'
124 124 (2-1-1) display Japanese ambiguous-width characters in cp932
125 125
126 126 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_ambig_ja
127 hg show_ambig_ja
127 hg show_ambig_ja
128 128
129 129 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
130 130
@@ -134,12 +134,12 b' define commands to display help text'
134 134 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
135 135 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
136 136
137 use "hg -v help show_ambig_ja" to show global options
137 use "hg -v help show_ambig_ja" to show more info
138 138
139 139 (2-1-2) display Japanese ambiguous-width characters in utf-8
140 140
141 141 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ja
142 hg show_ambig_ja
142 hg show_ambig_ja
143 143
144 144 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
145 145
@@ -149,12 +149,12 b' define commands to display help text'
149 149 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
150 150 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
151 151
152 use "hg -v help show_ambig_ja" to show global options
152 use "hg -v help show_ambig_ja" to show more info
153 153
154 154 (2-1-3) display Russian ambiguous-width characters in cp1251
155 155
156 156 $ COLUMNS=60 hg --encoding cp1251 --config extensions.show=./show.py help show_ambig_ru
157 hg show_ambig_ru
157 hg show_ambig_ru
158 158
159 159 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
160 160
@@ -164,12 +164,12 b' define commands to display help text'
164 164 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
165 165 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
166 166
167 use "hg -v help show_ambig_ru" to show global options
167 use "hg -v help show_ambig_ru" to show more info
168 168
169 169 (2-1-4) display Russian ambiguous-width characters in utf-8
170 170
171 171 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ru
172 hg show_ambig_ru
172 hg show_ambig_ru
173 173
174 174 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
175 175
@@ -179,7 +179,7 b' define commands to display help text'
179 179 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
180 180 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
181 181
182 use "hg -v help show_ambig_ru" to show global options
182 use "hg -v help show_ambig_ru" to show more info
183 183
184 184
185 185 (2-2) treat width of ambiguous characters as wide
@@ -187,7 +187,7 b' define commands to display help text'
187 187 (2-2-1) display Japanese ambiguous-width characters in cp932
188 188
189 189 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding cp932 --config extensions.show=./show.py help show_ambig_ja
190 hg show_ambig_ja
190 hg show_ambig_ja
191 191
192 192 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
193 193
@@ -200,12 +200,12 b' define commands to display help text'
200 200 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
201 201 \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
202 202
203 use "hg -v help show_ambig_ja" to show global options
203 use "hg -v help show_ambig_ja" to show more info
204 204
205 205 (2-2-2) display Japanese ambiguous-width characters in utf-8
206 206
207 207 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ja
208 hg show_ambig_ja
208 hg show_ambig_ja
209 209
210 210 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
211 211
@@ -218,12 +218,12 b' define commands to display help text'
218 218 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
219 219 \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
220 220
221 use "hg -v help show_ambig_ja" to show global options
221 use "hg -v help show_ambig_ja" to show more info
222 222
223 223 (2-2-3) display Russian ambiguous-width characters in cp1251
224 224
225 225 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding cp1251 --config extensions.show=./show.py help show_ambig_ru
226 hg show_ambig_ru
226 hg show_ambig_ru
227 227
228 228 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
229 229 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
@@ -236,12 +236,12 b' define commands to display help text'
236 236 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
237 237 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
238 238
239 use "hg -v help show_ambig_ru" to show global options
239 use "hg -v help show_ambig_ru" to show more info
240 240
241 241 (2-2-4) display Russian ambiguous-width charactes in utf-8
242 242
243 243 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ru
244 hg show_ambig_ru
244 hg show_ambig_ru
245 245
246 246 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
247 247 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
@@ -254,4 +254,4 b' define commands to display help text'
254 254 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
255 255 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
256 256
257 use "hg -v help show_ambig_ru" to show global options
257 use "hg -v help show_ambig_ru" to show more info
@@ -161,7 +161,6 b' We can fix the head and push again'
161 161 added 3 changesets with 3 changes to 2 files (+1 heads)
162 162 $ hg -R ../main rollback
163 163 repository tip rolled back to revision 5 (undo push)
164 working directory now based on revision -1
165 164
166 165 Test it still fails with checkallhook
167 166
@@ -441,3 +441,82 b' Test handling of a broken .hgeol file:'
441 441 warning: ignoring .hgeol file due to parse error at .hgeol:1: bad
442 442 $ hg status
443 443 ? .hgeol.orig
444
445 Test eol.only-consistent can be specified in .hgeol
446
447 $ cd $TESTTMP
448 $ hg init only-consistent
449 $ cd only-consistent
450 $ printf "first\nsecond\r\n" > a.txt
451 $ hg add a.txt
452 $ cat > .hgeol << EOF
453 > [eol]
454 > only-consistent = True
455 > EOF
456 $ hg commit -m 'inconsistent'
457 abort: inconsistent newline style in a.txt
458
459 [255]
460 $ cat > .hgeol << EOF
461 > [eol]
462 > only-consistent = False
463 > EOF
464 $ hg commit -m 'consistent'
465
466
467 Test trailing newline
468
469 $ cat >> $HGRCPATH <<EOF
470 > [extensions]
471 > eol=
472 > EOF
473
474 setup repository
475
476 $ cd $TESTTMP
477 $ hg init trailing
478 $ cd trailing
479 $ cat > .hgeol <<EOF
480 > [patterns]
481 > **.txt = native
482 > [eol]
483 > fix-trailing-newline = False
484 > EOF
485
486 add text without trailing newline
487
488 $ printf "first\nsecond" > a.txt
489 $ hg commit --addremove -m 'checking in'
490 adding .hgeol
491 adding a.txt
492 $ rm a.txt
493 $ hg update -C -q
494 $ cat a.txt
495 first
496 second (no-eol)
497
498 $ cat > .hgeol <<EOF
499 > [patterns]
500 > **.txt = native
501 > [eol]
502 > fix-trailing-newline = True
503 > EOF
504 $ printf "third\nfourth" > a.txt
505 $ hg commit -m 'checking in with newline fix'
506 $ rm a.txt
507 $ hg update -C -q
508 $ cat a.txt
509 third
510 fourth
511
512 append a line without trailing newline
513
514 $ printf "fifth" >> a.txt
515 $ hg commit -m 'adding another line line'
516 $ rm a.txt
517 $ hg update -C -q
518 $ cat a.txt
519 third
520 fourth
521 fifth
522
@@ -7,7 +7,7 b''
7 7 > hg ci -m "foo-$i"
8 8 > done
9 9
10 $ for out in "%nof%N" "%%%H" "%b-%R" "%h" "%r"; do
10 $ for out in "%nof%N" "%%%H" "%b-%R" "%h" "%r" "%m"; do
11 11 > echo
12 12 > echo "# foo-$out.patch"
13 13 > hg export -v -o "foo-$out.patch" 2:tip
@@ -77,6 +77,19 b''
77 77 foo-09.patch
78 78 foo-10.patch
79 79 foo-11.patch
80
81 # foo-%m.patch
82 exporting patches:
83 foo-foo_2.patch
84 foo-foo_3.patch
85 foo-foo_4.patch
86 foo-foo_5.patch
87 foo-foo_6.patch
88 foo-foo_7.patch
89 foo-foo_8.patch
90 foo-foo_9.patch
91 foo-foo_10.patch
92 foo-foo_11.patch
80 93
81 94 Exporting 4 changesets to a file:
82 95
@@ -108,3 +121,11 b' Exporting revision -2 to a file:'
108 121 foo-9
109 122 +foo-10
110 123
124 Checking if only alphanumeric characters are used in the file name (%m option):
125
126 $ echo "line" >> foo
127 $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~"
128 $ hg export -v -o %m.patch tip
129 exporting patch:
130 ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch
131
@@ -39,15 +39,15 b' Should diff cloned directories:'
39 39
40 40 options:
41 41
42 -o --option OPT [+] pass option to comparison program
43 -r --rev REV [+] revision
44 -c --change REV change made by revision
45 -I --include PATTERN [+] include names matching the given patterns
46 -X --exclude PATTERN [+] exclude names matching the given patterns
42 -o --option OPT [+] pass option to comparison program
43 -r --rev REV [+] revision
44 -c --change REV change made by revision
45 -I --include PATTERN [+] include names matching the given patterns
46 -X --exclude PATTERN [+] exclude names matching the given patterns
47 47
48 48 [+] marked option can be specified multiple times
49 49
50 use "hg -v help falabala" to show global options
50 use "hg -v help falabala" to show more info
51 51
52 52 $ hg ci -d '0 0' -mtest1
53 53
@@ -182,23 +182,24 b" Check hgweb's load order:"
182 182 yet another foo command
183 183
184 184 global options:
185 -R --repository REPO repository root directory or name of overlay bundle
186 file
187 --cwd DIR change working directory
188 -y --noninteractive do not prompt, automatically pick the first choice
189 for all prompts
190 -q --quiet suppress output
191 -v --verbose enable additional output
192 --config CONFIG [+] set/override config option (use 'section.name=value')
193 --debug enable debugging output
194 --debugger start debugger
195 --encoding ENCODE set the charset encoding (default: ascii)
196 --encodingmode MODE set the charset encoding mode (default: strict)
197 --traceback always print a traceback on exception
198 --time time how long the command takes
199 --profile print command execution profile
200 --version output version information and exit
201 -h --help display help and exit
185
186 -R --repository REPO repository root directory or name of overlay bundle
187 file
188 --cwd DIR change working directory
189 -y --noninteractive do not prompt, automatically pick the first choice for
190 all prompts
191 -q --quiet suppress output
192 -v --verbose enable additional output
193 --config CONFIG [+] set/override config option (use 'section.name=value')
194 --debug enable debugging output
195 --debugger start debugger
196 --encoding ENCODE set the charset encoding (default: ascii)
197 --encodingmode MODE set the charset encoding mode (default: strict)
198 --traceback always print a traceback on exception
199 --time time how long the command takes
200 --profile print command execution profile
201 --version output version information and exit
202 -h --help display help and exit
202 203
203 204 [+] marked option can be specified multiple times
204 205
@@ -213,23 +214,24 b" Check hgweb's load order:"
213 214 yet another foo command
214 215
215 216 global options:
216 -R --repository REPO repository root directory or name of overlay bundle
217 file
218 --cwd DIR change working directory
219 -y --noninteractive do not prompt, automatically pick the first choice
220 for all prompts
221 -q --quiet suppress output
222 -v --verbose enable additional output
223 --config CONFIG [+] set/override config option (use 'section.name=value')
224 --debug enable debugging output
225 --debugger start debugger
226 --encoding ENCODE set the charset encoding (default: ascii)
227 --encodingmode MODE set the charset encoding mode (default: strict)
228 --traceback always print a traceback on exception
229 --time time how long the command takes
230 --profile print command execution profile
231 --version output version information and exit
232 -h --help display help and exit
217
218 -R --repository REPO repository root directory or name of overlay bundle
219 file
220 --cwd DIR change working directory
221 -y --noninteractive do not prompt, automatically pick the first choice for
222 all prompts
223 -q --quiet suppress output
224 -v --verbose enable additional output
225 --config CONFIG [+] set/override config option (use 'section.name=value')
226 --debug enable debugging output
227 --debugger start debugger
228 --encoding ENCODE set the charset encoding (default: ascii)
229 --encodingmode MODE set the charset encoding mode (default: strict)
230 --traceback always print a traceback on exception
231 --time time how long the command takes
232 --profile print command execution profile
233 --version output version information and exit
234 -h --help display help and exit
233 235
234 236 [+] marked option can be specified multiple times
235 237 $ echo 'debugextension = !' >> $HGRCPATH
@@ -260,16 +262,16 b' Extension module help vs command help:'
260 262
261 263 options:
262 264
263 -p --program CMD comparison program to run
264 -o --option OPT [+] pass option to comparison program
265 -r --rev REV [+] revision
266 -c --change REV change made by revision
267 -I --include PATTERN [+] include names matching the given patterns
268 -X --exclude PATTERN [+] exclude names matching the given patterns
265 -p --program CMD comparison program to run
266 -o --option OPT [+] pass option to comparison program
267 -r --rev REV [+] revision
268 -c --change REV change made by revision
269 -I --include PATTERN [+] include names matching the given patterns
270 -X --exclude PATTERN [+] exclude names matching the given patterns
269 271
270 272 [+] marked option can be specified multiple times
271 273
272 use "hg -v help extdiff" to show global options
274 use "hg -v help extdiff" to show more info
273 275
274 276 $ hg help --extension extdiff
275 277 extdiff extension - command to allow external programs to compare revisions
@@ -371,7 +373,7 b' Test help topic with same name as extens'
371 373
372 374 multirevs command
373 375
374 use "hg -v help multirevs" to show global options
376 use "hg -v help multirevs" to show more info
375 377
376 378 $ hg multirevs
377 379 hg multirevs: invalid arguments
@@ -1,5 +1,3 b''
1 adjust to non-default HGPORT, e.g. with run-tests.py -j
2
3 1 $ echo "[extensions]" >> $HGRCPATH
4 2 $ echo "fetch=" >> $HGRCPATH
5 3
@@ -7,7 +5,7 b' test fetch with default branches only'
7 5
8 6 $ hg init a
9 7 $ echo a > a/a
10 $ hg --cwd a commit -d '1 0' -Ama
8 $ hg --cwd a commit -Ama
11 9 adding a
12 10 $ hg clone a b
13 11 updating to branch default
@@ -16,10 +14,10 b' test fetch with default branches only'
16 14 updating to branch default
17 15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
18 16 $ echo b > a/b
19 $ hg --cwd a commit -d '2 0' -Amb
17 $ hg --cwd a commit -Amb
20 18 adding b
21 19 $ hg --cwd a parents -q
22 1:97d72e5f12c7
20 1:d2ae7f538514
23 21
24 22 should pull one change
25 23
@@ -32,9 +30,9 b' should pull one change'
32 30 added 1 changesets with 1 changes to 1 files
33 31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
34 32 $ hg --cwd b parents -q
35 1:97d72e5f12c7
33 1:d2ae7f538514
36 34 $ echo c > c/c
37 $ hg --cwd c commit -d '3 0' -Amc
35 $ hg --cwd c commit -Amc
38 36 adding c
39 37 $ hg clone c d
40 38 updating to branch default
@@ -48,39 +46,37 b' repo, because the path of the repo will '
48 46 message, making every commit appear different.
49 47 should merge c into a
50 48
51 $ hg --cwd c fetch -d '4 0' -m 'automated merge' ../a
49 $ hg --cwd c fetch -d '0 0' -m 'automated merge' ../a
52 50 pulling from ../a
53 51 searching for changes
54 52 adding changesets
55 53 adding manifests
56 54 adding file changes
57 55 added 1 changesets with 1 changes to 1 files (+1 heads)
58 updating to 2:97d72e5f12c7
56 updating to 2:d2ae7f538514
59 57 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
60 merging with 1:5e056962225c
58 merging with 1:d36c0562f908
61 59 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 new changeset 3:cd3a41621cf0 merges remote changes with local
60 new changeset 3:a323a0c43ec4 merges remote changes with local
63 61 $ ls c
64 62 a
65 63 b
66 64 c
67 $ netstat -tnap 2>/dev/null | grep $HGPORT | grep LISTEN
68 [1]
69 65 $ hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid
70 66 $ cat a/hg.pid >> "$DAEMON_PIDS"
71 67
72 68 fetch over http, no auth
73 69
74 $ hg --cwd d fetch -d '5 0' http://localhost:$HGPORT/
70 $ hg --cwd d fetch http://localhost:$HGPORT/
75 71 pulling from http://localhost:$HGPORT/
76 72 searching for changes
77 73 adding changesets
78 74 adding manifests
79 75 adding file changes
80 76 added 1 changesets with 1 changes to 1 files (+1 heads)
81 updating to 2:97d72e5f12c7
77 updating to 2:d2ae7f538514
82 78 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 merging with 1:5e056962225c
79 merging with 1:d36c0562f908
84 80 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 81 new changeset 3:* merges remote changes with local (glob)
86 82 $ hg --cwd d tip --template '{desc}\n'
@@ -88,16 +84,16 b' fetch over http, no auth'
88 84
89 85 fetch over http with auth (should be hidden in desc)
90 86
91 $ hg --cwd e fetch -d '5 0' http://user:password@localhost:$HGPORT/
87 $ hg --cwd e fetch http://user:password@localhost:$HGPORT/
92 88 pulling from http://user:***@localhost:$HGPORT/
93 89 searching for changes
94 90 adding changesets
95 91 adding manifests
96 92 adding file changes
97 93 added 1 changesets with 1 changes to 1 files (+1 heads)
98 updating to 2:97d72e5f12c7
94 updating to 2:d2ae7f538514
99 95 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
100 merging with 1:5e056962225c
96 merging with 1:d36c0562f908
101 97 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 98 new changeset 3:* merges remote changes with local (glob)
103 99 $ hg --cwd e tip --template '{desc}\n'
@@ -109,17 +105,17 b' fetch over http with auth (should be hid'
109 105 updating to branch default
110 106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 107 $ echo f > f/f
112 $ hg --cwd f ci -d '6 0' -Amf
108 $ hg --cwd f ci -Amf
113 109 adding f
114 110 $ echo g > g/g
115 $ hg --cwd g ci -d '6 0' -Amg
111 $ hg --cwd g ci -Amg
116 112 adding g
117 113 $ hg clone -q f h
118 114 $ hg clone -q g i
119 115
120 116 should merge f into g
121 117
122 $ hg --cwd g fetch -d '7 0' --switch -m 'automated merge' ../f
118 $ hg --cwd g fetch -d '0 0' --switch -m 'automated merge' ../f
123 119 pulling from ../f
124 120 searching for changes
125 121 adding changesets
@@ -127,9 +123,9 b' should merge f into g'
127 123 adding file changes
128 124 added 1 changesets with 1 changes to 1 files (+1 heads)
129 125 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 merging with 3:cc6a3744834d
126 merging with 3:6343ca3eff20
131 127 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 new changeset 4:55aa4f32ec59 merges remote changes with local
128 new changeset 4:f7faa0b7d3c6 merges remote changes with local
133 129 $ rm i/g
134 130
135 131 should abort, because i is modified
@@ -142,21 +138,19 b' test fetch with named branches'
142 138
143 139 $ hg init nbase
144 140 $ echo base > nbase/a
145 $ hg -R nbase ci -d '1 0' -Am base
141 $ hg -R nbase ci -Am base
146 142 adding a
147 143 $ hg -R nbase branch a
148 144 marked working directory as branch a
149 145 $ echo a > nbase/a
150 $ hg -R nbase ci -d '2 0' -m a
146 $ hg -R nbase ci -m a
151 147 $ hg -R nbase up -C 0
152 148 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
153 149 $ hg -R nbase branch b
154 150 marked working directory as branch b
155 151 $ echo b > nbase/b
156 $ hg -R nbase ci -Ad '3 0' -m b
152 $ hg -R nbase ci -Am b
157 153 adding b
158 $ echo
159
160 154
161 155 pull in change on foreign branch
162 156
@@ -169,10 +163,10 b' pull in change on foreign branch'
169 163 $ hg -R n1 up -C a
170 164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 165 $ echo aa > n1/a
172 $ hg -R n1 ci -d '4 0' -m a1
166 $ hg -R n1 ci -m a1
173 167 $ hg -R n2 up -C b
174 168 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 $ hg -R n2 fetch -d '9 0' -m 'merge' n1
169 $ hg -R n2 fetch -m 'merge' n1
176 170 pulling from n1
177 171 searching for changes
178 172 adding changesets
@@ -185,8 +179,6 b' parent should be 2 (no automatic update)'
185 179 $ hg -R n2 parents --template '{rev}\n'
186 180 2
187 181 $ rm -fr n1 n2
188 $ echo
189
190 182
191 183 pull in changes on both foreign and local branches
192 184
@@ -199,14 +191,14 b' pull in changes on both foreign and loca'
199 191 $ hg -R n1 up -C a
200 192 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 193 $ echo aa > n1/a
202 $ hg -R n1 ci -d '4 0' -m a1
194 $ hg -R n1 ci -m a1
203 195 $ hg -R n1 up -C b
204 196 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
205 197 $ echo bb > n1/b
206 $ hg -R n1 ci -d '5 0' -m b1
198 $ hg -R n1 ci -m b1
207 199 $ hg -R n2 up -C b
208 200 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
209 $ hg -R n2 fetch -d '9 0' -m 'merge' n1
201 $ hg -R n2 fetch -m 'merge' n1
210 202 pulling from n1
211 203 searching for changes
212 204 adding changesets
@@ -220,8 +212,6 b' parent should be 4 (fast forward)'
220 212 $ hg -R n2 parents --template '{rev}\n'
221 213 4
222 214 $ rm -fr n1 n2
223 $ echo
224
225 215
226 216 pull changes on foreign (2 new heads) and local (1 new head) branches
227 217 with a local change
@@ -235,33 +225,33 b' with a local change'
235 225 $ hg -R n1 up -C a
236 226 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
237 227 $ echo a1 > n1/a
238 $ hg -R n1 ci -d '4 0' -m a1
228 $ hg -R n1 ci -m a1
239 229 $ hg -R n1 up -C b
240 230 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
241 231 $ echo bb > n1/b
242 $ hg -R n1 ci -d '5 0' -m b1
232 $ hg -R n1 ci -m b1
243 233 $ hg -R n1 up -C 1
244 234 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
245 235 $ echo a2 > n1/a
246 $ hg -R n1 ci -d '6 0' -m a2
236 $ hg -R n1 ci -m a2
247 237 created new head
248 238 $ hg -R n2 up -C b
249 239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
250 240 $ echo change >> n2/c
251 $ hg -R n2 ci -Ad '7 0' -m local
241 $ hg -R n2 ci -A -m local
252 242 adding c
253 $ hg -R n2 fetch -d '9 0' -m 'merge' n1
243 $ hg -R n2 fetch -d '0 0' -m 'merge' n1
254 244 pulling from n1
255 245 searching for changes
256 246 adding changesets
257 247 adding manifests
258 248 adding file changes
259 249 added 3 changesets with 3 changes to 2 files (+2 heads)
260 updating to 5:708c6cce3d26
250 updating to 5:3c4a837a864f
261 251 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
262 merging with 3:d83427717b1f
252 merging with 3:1267f84a9ea5
263 253 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
264 new changeset 7:48f1a33f52af merges remote changes with local
254 new changeset 7:2cf2a1261f21 merges remote changes with local
265 255
266 256 parent should be 7 (new merge changeset)
267 257
@@ -283,21 +273,21 b' heads) with a local change'
283 273 $ hg -R n1 merge b
284 274 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
285 275 (branch merge, don't forget to commit)
286 $ hg -R n1 ci -d '4 0' -m merge
276 $ hg -R n1 ci -m merge
287 277 $ hg -R n1 up -C 2
288 278 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
289 279 $ echo c > n1/a
290 $ hg -R n1 ci -d '5 0' -m c
280 $ hg -R n1 ci -m c
291 281 $ hg -R n1 up -C 2
292 282 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
293 283 $ echo cc > n1/a
294 $ hg -R n1 ci -d '6 0' -m cc
284 $ hg -R n1 ci -m cc
295 285 created new head
296 286 $ hg -R n2 up -C b
297 287 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
298 288 $ echo change >> n2/b
299 $ hg -R n2 ci -Ad '7 0' -m local
300 $ hg -R n2 fetch -d '9 0' -m 'merge' n1
289 $ hg -R n2 ci -A -m local
290 $ hg -R n2 fetch -m 'merge' n1
301 291 pulling from n1
302 292 searching for changes
303 293 adding changesets
@@ -326,7 +316,7 b' pull in change on different branch than '
326 316 $ hg -R n1 ci -m next
327 317 $ hg -R n2 branch topic
328 318 marked working directory as branch topic
329 $ hg -R n2 fetch -d '0 0' -m merge n1
319 $ hg -R n2 fetch -m merge n1
330 320 abort: working dir not at branch tip (use "hg update" to check out branch tip)
331 321 [255]
332 322
@@ -393,8 +383,6 b' test issue1726'
393 383 new changeset 3:* merges remote changes with local (glob)
394 384 $ hg --cwd i1726r2 heads default --template '{rev}\n'
395 385 3
396 $ echo
397
398 386
399 387 test issue2047
400 388
@@ -415,4 +403,3 b' test issue2047'
415 403 adding manifests
416 404 adding file changes
417 405 added 1 changesets with 1 changes to 1 files
418 $ "$TESTDIR/killdaemons.py"
@@ -296,6 +296,7 b' Testing -h/--help:'
296 296 diff diff repository (or selected files)
297 297 export dump the header and diffs for one or more changesets
298 298 forget forget the specified files on the next commit
299 graft copy changes from other branches onto the current branch
299 300 grep search for a pattern in specified files and revisions
300 301 heads show current repository heads or show branch heads
301 302 help show help for a given topic or a help overview
@@ -377,6 +378,7 b' Testing -h/--help:'
377 378 diff diff repository (or selected files)
378 379 export dump the header and diffs for one or more changesets
379 380 forget forget the specified files on the next commit
381 graft copy changes from other branches onto the current branch
380 382 grep search for a pattern in specified files and revisions
381 383 heads show current repository heads or show branch heads
382 384 help show help for a given topic or a help overview
@@ -66,6 +66,7 b' Short help:'
66 66 diff diff repository (or selected files)
67 67 export dump the header and diffs for one or more changesets
68 68 forget forget the specified files on the next commit
69 graft copy changes from other branches onto the current branch
69 70 grep search for a pattern in specified files and revisions
70 71 heads show current repository heads or show branch heads
71 72 help show help for a given topic or a help overview
@@ -141,6 +142,7 b' Short help:'
141 142 diff diff repository (or selected files)
142 143 export dump the header and diffs for one or more changesets
143 144 forget forget the specified files on the next commit
145 graft copy changes from other branches onto the current branch
144 146 grep search for a pattern in specified files and revisions
145 147 heads show current repository heads or show branch heads
146 148 help show help for a given topic or a help overview
@@ -199,12 +201,7 b' Short help:'
199 201 Test short command list with verbose option
200 202
201 203 $ hg -v help shortlist
202 Mercurial Distributed SCM (version *) (glob)
203 (see http://mercurial.selenic.com for more information)
204
205 Copyright (C) 2005-2011 Matt Mackall and others
206 This is free software; see the source for copying conditions. There is NO
207 warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
204 Mercurial Distributed SCM
208 205
209 206 basic commands:
210 207
@@ -244,23 +241,24 b' Test short command list with verbose opt'
244 241 update working directory (or switch revisions)
245 242
246 243 global options:
247 -R --repository REPO repository root directory or name of overlay bundle
248 file
249 --cwd DIR change working directory
250 -y --noninteractive do not prompt, automatically pick the first choice
251 for all prompts
252 -q --quiet suppress output
253 -v --verbose enable additional output
254 --config CONFIG [+] set/override config option (use 'section.name=value')
255 --debug enable debugging output
256 --debugger start debugger
257 --encoding ENCODE set the charset encoding (default: ascii)
258 --encodingmode MODE set the charset encoding mode (default: strict)
259 --traceback always print a traceback on exception
260 --time time how long the command takes
261 --profile print command execution profile
262 --version output version information and exit
263 -h --help display help and exit
244
245 -R --repository REPO repository root directory or name of overlay bundle
246 file
247 --cwd DIR change working directory
248 -y --noninteractive do not prompt, automatically pick the first choice for
249 all prompts
250 -q --quiet suppress output
251 -v --verbose enable additional output
252 --config CONFIG [+] set/override config option (use 'section.name=value')
253 --debug enable debugging output
254 --debugger start debugger
255 --encoding ENCODE set the charset encoding (default: ascii)
256 --encodingmode MODE set the charset encoding mode (default: strict)
257 --traceback always print a traceback on exception
258 --time time how long the command takes
259 --profile print command execution profile
260 --version output version information and exit
261 -h --help display help and exit
264 262
265 263 [+] marked option can be specified multiple times
266 264
@@ -280,18 +278,16 b' Test short command list with verbose opt'
280 278
281 279 Returns 0 if all files are successfully added.
282 280
283 use "hg -v help add" to show verbose help
284
285 281 options:
286 282
287 -I --include PATTERN [+] include names matching the given patterns
288 -X --exclude PATTERN [+] exclude names matching the given patterns
289 -S --subrepos recurse into subrepositories
290 -n --dry-run do not perform actions, just print output
283 -I --include PATTERN [+] include names matching the given patterns
284 -X --exclude PATTERN [+] exclude names matching the given patterns
285 -S --subrepos recurse into subrepositories
286 -n --dry-run do not perform actions, just print output
291 287
292 288 [+] marked option can be specified multiple times
293 289
294 use "hg -v help add" to show global options
290 use "hg -v help add" to show more info
295 291
296 292 Verbose help for add
297 293
@@ -323,30 +319,32 b' Verbose help for add'
323 319
324 320 options:
325 321
326 -I --include PATTERN [+] include names matching the given patterns
327 -X --exclude PATTERN [+] exclude names matching the given patterns
328 -S --subrepos recurse into subrepositories
329 -n --dry-run do not perform actions, just print output
322 -I --include PATTERN [+] include names matching the given patterns
323 -X --exclude PATTERN [+] exclude names matching the given patterns
324 -S --subrepos recurse into subrepositories
325 -n --dry-run do not perform actions, just print output
326
327 [+] marked option can be specified multiple times
330 328
331 329 global options:
332 -R --repository REPO repository root directory or name of overlay bundle
333 file
334 --cwd DIR change working directory
335 -y --noninteractive do not prompt, automatically pick the first choice
336 for all prompts
337 -q --quiet suppress output
338 -v --verbose enable additional output
339 --config CONFIG [+] set/override config option (use
340 'section.name=value')
341 --debug enable debugging output
342 --debugger start debugger
343 --encoding ENCODE set the charset encoding (default: ascii)
344 --encodingmode MODE set the charset encoding mode (default: strict)
345 --traceback always print a traceback on exception
346 --time time how long the command takes
347 --profile print command execution profile
348 --version output version information and exit
349 -h --help display help and exit
330
331 -R --repository REPO repository root directory or name of overlay bundle
332 file
333 --cwd DIR change working directory
334 -y --noninteractive do not prompt, automatically pick the first choice for
335 all prompts
336 -q --quiet suppress output
337 -v --verbose enable additional output
338 --config CONFIG [+] set/override config option (use 'section.name=value')
339 --debug enable debugging output
340 --debugger start debugger
341 --encoding ENCODE set the charset encoding (default: ascii)
342 --encodingmode MODE set the charset encoding mode (default: strict)
343 --traceback always print a traceback on exception
344 --time time how long the command takes
345 --profile print command execution profile
346 --version output version information and exit
347 -h --help display help and exit
350 348
351 349 [+] marked option can be specified multiple times
352 350
@@ -359,32 +357,6 b' Test help option with version option'
359 357 Copyright (C) 2005-2011 Matt Mackall and others
360 358 This is free software; see the source for copying conditions. There is NO
361 359 warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
362
363 hg add [OPTION]... [FILE]...
364
365 add the specified files on the next commit
366
367 Schedule files to be version controlled and added to the repository.
368
369 The files will be added to the repository at the next commit. To undo an
370 add before that, see "hg forget".
371
372 If no names are given, add all files to the repository.
373
374 Returns 0 if all files are successfully added.
375
376 use "hg -v help add" to show verbose help
377
378 options:
379
380 -I --include PATTERN [+] include names matching the given patterns
381 -X --exclude PATTERN [+] exclude names matching the given patterns
382 -S --subrepos recurse into subrepositories
383 -n --dry-run do not perform actions, just print output
384
385 [+] marked option can be specified multiple times
386
387 use "hg -v help add" to show global options
388 360
389 361 $ hg add --skjdfks
390 362 hg add: option --skjdfks not recognized
@@ -394,10 +366,10 b' Test help option with version option'
394 366
395 367 options:
396 368
397 -I --include PATTERN [+] include names matching the given patterns
398 -X --exclude PATTERN [+] exclude names matching the given patterns
399 -S --subrepos recurse into subrepositories
400 -n --dry-run do not perform actions, just print output
369 -I --include PATTERN [+] include names matching the given patterns
370 -X --exclude PATTERN [+] exclude names matching the given patterns
371 -S --subrepos recurse into subrepositories
372 -n --dry-run do not perform actions, just print output
401 373
402 374 [+] marked option can be specified multiple times
403 375
@@ -430,7 +402,7 b' Test command without options'
430 402
431 403 Returns 0 on success, 1 if errors are encountered.
432 404
433 use "hg -v help verify" to show global options
405 use "hg -v help verify" to show more info
434 406
435 407 $ hg help diff
436 408 hg diff [OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...
@@ -465,25 +437,25 b' Test command without options'
465 437
466 438 options:
467 439
468 -r --rev REV [+] revision
469 -c --change REV change made by revision
470 -a --text treat all files as text
471 -g --git use git extended diff format
472 --nodates omit dates from diff headers
473 -p --show-function show which function each change is in
474 --reverse produce a diff that undoes the changes
475 -w --ignore-all-space ignore white space when comparing lines
476 -b --ignore-space-change ignore changes in the amount of white space
477 -B --ignore-blank-lines ignore changes whose lines are all blank
478 -U --unified NUM number of lines of context to show
479 --stat output diffstat-style summary of changes
480 -I --include PATTERN [+] include names matching the given patterns
481 -X --exclude PATTERN [+] exclude names matching the given patterns
482 -S --subrepos recurse into subrepositories
440 -r --rev REV [+] revision
441 -c --change REV change made by revision
442 -a --text treat all files as text
443 -g --git use git extended diff format
444 --nodates omit dates from diff headers
445 -p --show-function show which function each change is in
446 --reverse produce a diff that undoes the changes
447 -w --ignore-all-space ignore white space when comparing lines
448 -b --ignore-space-change ignore changes in the amount of white space
449 -B --ignore-blank-lines ignore changes whose lines are all blank
450 -U --unified NUM number of lines of context to show
451 --stat output diffstat-style summary of changes
452 -I --include PATTERN [+] include names matching the given patterns
453 -X --exclude PATTERN [+] exclude names matching the given patterns
454 -S --subrepos recurse into subrepositories
483 455
484 456 [+] marked option can be specified multiple times
485 457
486 use "hg -v help diff" to show global options
458 use "hg -v help diff" to show more info
487 459
488 460 $ hg help status
489 461 hg status [OPTION]... [FILE]...
@@ -527,26 +499,26 b' Test command without options'
527 499
528 500 options:
529 501
530 -A --all show status of all files
531 -m --modified show only modified files
532 -a --added show only added files
533 -r --removed show only removed files
534 -d --deleted show only deleted (but tracked) files
535 -c --clean show only files without changes
536 -u --unknown show only unknown (not tracked) files
537 -i --ignored show only ignored files
538 -n --no-status hide status prefix
539 -C --copies show source of copied files
540 -0 --print0 end filenames with NUL, for use with xargs
541 --rev REV [+] show difference from revision
542 --change REV list the changed files of a revision
543 -I --include PATTERN [+] include names matching the given patterns
544 -X --exclude PATTERN [+] exclude names matching the given patterns
545 -S --subrepos recurse into subrepositories
502 -A --all show status of all files
503 -m --modified show only modified files
504 -a --added show only added files
505 -r --removed show only removed files
506 -d --deleted show only deleted (but tracked) files
507 -c --clean show only files without changes
508 -u --unknown show only unknown (not tracked) files
509 -i --ignored show only ignored files
510 -n --no-status hide status prefix
511 -C --copies show source of copied files
512 -0 --print0 end filenames with NUL, for use with xargs
513 --rev REV [+] show difference from revision
514 --change REV list the changed files of a revision
515 -I --include PATTERN [+] include names matching the given patterns
516 -X --exclude PATTERN [+] exclude names matching the given patterns
517 -S --subrepos recurse into subrepositories
546 518
547 519 [+] marked option can be specified multiple times
548 520
549 use "hg -v help status" to show global options
521 use "hg -v help status" to show more info
550 522
551 523 $ hg -q help status
552 524 hg status [OPTION]... [FILE]...
@@ -630,7 +602,7 b' Test command with no help text'
630 602
631 603 (no help text available)
632 604
633 use "hg -v help nohelp" to show global options
605 use "hg -v help nohelp" to show more info
634 606
635 607 Test that default list of commands omits extension commands
636 608
@@ -656,6 +628,7 b' Test that default list of commands omits'
656 628 diff diff repository (or selected files)
657 629 export dump the header and diffs for one or more changesets
658 630 forget forget the specified files on the next commit
631 graft copy changes from other branches onto the current branch
659 632 grep search for a pattern in specified files and revisions
660 633 heads show current repository heads or show branch heads
661 634 help show help for a given topic or a help overview
@@ -520,8 +520,6 b' hgweb fileannotate, raw'
520 520 $ echo "" >> b
521 521 $ echo "" >> b
522 522 $ diff -u b a
523 $ echo
524
525 523
526 524 hgweb filerevision, raw
527 525
@@ -531,8 +529,6 b' hgweb filerevision, raw'
531 529 $ echo "" >> b
532 530 $ hg cat primes.py >> b
533 531 $ diff -u b a
534 $ echo
535
536 532
537 533 hgweb highlightcss friendly
538 534
@@ -277,7 +277,6 b' outgoing hooks can see env vars'
277 277 (run 'hg update' to get a working copy)
278 278 $ hg rollback
279 279 repository tip rolled back to revision 3 (undo pull)
280 working directory now based on revision 0
281 280
282 281 preoutgoing hook can prevent outgoing changes
283 282
@@ -61,8 +61,7 b' Test --user, --date and --message'
61 61 @ 0:07f494440405 test 0 0 - default - adda
62 62
63 63 $ hg rollback
64 repository tip rolled back to revision 1 (undo commit)
65 working directory now based on revision 0
64 repository tip rolled back to revision 1 (undo import)
66 65
67 66 Test --import-branch
68 67
@@ -74,8 +73,7 b' Test --import-branch'
74 73 @ 0:07f494440405 test 0 0 - default - adda
75 74
76 75 $ hg rollback
77 repository tip rolled back to revision 1 (undo commit)
78 working directory now based on revision 0
76 repository tip rolled back to revision 1 (undo import)
79 77
80 78 Test --strip
81 79
@@ -97,8 +95,7 b' Test --strip'
97 95 > EOF
98 96 applying patch from stdin
99 97 $ hg rollback
100 repository tip rolled back to revision 1 (undo commit)
101 working directory now based on revision 0
98 repository tip rolled back to revision 1 (undo import)
102 99
103 100 Test unsupported combinations
104 101
@@ -174,7 +171,6 b' Test applying multiple patches'
174 171 $ hg import --bypass ../patch1.diff ../patch2.diff
175 172 applying ../patch1.diff
176 173 applying ../patch2.diff
177 applied 16581080145e
178 174 $ shortlog
179 175 o 3:bc8ca3f8a7c4 test 0 0 - default - addf
180 176 |
@@ -199,7 +195,6 b' Test applying multiple patches with --ex'
199 195 $ hg import --bypass --exact ../patch1.diff ../patch2.diff
200 196 applying ../patch1.diff
201 197 applying ../patch2.diff
202 applied 16581080145e
203 198 $ shortlog
204 199 o 3:d60cb8989666 test 0 0 - foo - addf
205 200 |
@@ -199,7 +199,6 b' import two patches in one stream'
199 199 $ hg init b
200 200 $ hg --cwd a export 0:tip | hg --cwd b import -
201 201 applying patch from stdin
202 applied 80971e65b431
203 202 $ hg --cwd a id
204 203 1d4bd90af0e4 tip
205 204 $ hg --cwd b id
@@ -356,15 +355,20 b' patches: import patch1 patch2; rollback'
356 355 $ hg clone -qr0 a b
357 356 $ hg --cwd b parents --template 'parent: {rev}\n'
358 357 parent: 0
359 $ hg --cwd b import ../patch1 ../patch2
358 $ hg --cwd b import -v ../patch1 ../patch2
360 359 applying ../patch1
360 patching file a
361 a
362 created 1d4bd90af0e4
361 363 applying ../patch2
362 applied 1d4bd90af0e4
364 patching file a
365 a
366 created 6d019af21222
363 367 $ hg --cwd b rollback
364 repository tip rolled back to revision 1 (undo commit)
365 working directory now based on revision 1
368 repository tip rolled back to revision 0 (undo import)
369 working directory now based on revision 0
366 370 $ hg --cwd b parents --template 'parent: {rev}\n'
367 parent: 1
371 parent: 0
368 372 $ rm -r b
369 373
370 374
@@ -433,6 +437,7 b' Test fuzziness (ambiguous patch location'
433 437 applying fuzzy-tip.patch
434 438 patching file a
435 439 Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
440 applied to working directory
436 441 $ hg revert -a
437 442 reverting a
438 443
@@ -449,6 +454,7 b' test fuzziness with eol=auto'
449 454 applying fuzzy-tip.patch
450 455 patching file a
451 456 Hunk #1 succeeded at 1 with fuzz 2 (offset -2 lines).
457 applied to working directory
452 458 $ cd ..
453 459
454 460
@@ -651,6 +657,7 b' test import with similarity and git and '
651 657 removing a
652 658 adding b
653 659 recording removal of a as rename to b (88% similar)
660 applied to working directory
654 661 $ hg st -C
655 662 A b
656 663 a
@@ -665,6 +672,7 b' test import with similarity and git and '
665 672 patching file b
666 673 removing a
667 674 adding b
675 applied to working directory
668 676 $ hg st -C
669 677 A b
670 678 R a
@@ -680,6 +688,7 b' Issue1495: add empty file from the end o'
680 688 adding a
681 689 $ hg ci -m "commit"
682 690 $ cat > a.patch <<EOF
691 > add a, b
683 692 > diff --git a/a b/a
684 693 > --- a/a
685 694 > +++ b/a
@@ -690,9 +699,25 b' Issue1495: add empty file from the end o'
690 699 > EOF
691 700 $ hg import --no-commit a.patch
692 701 applying a.patch
702
703 apply a good patch followed by an empty patch (mainly to ensure
704 that dirstate is *not* updated when import crashes)
705 $ hg update -q -C .
706 $ rm b
707 $ touch empty.patch
708 $ hg import a.patch empty.patch
709 applying a.patch
710 applying empty.patch
711 transaction abort!
712 rollback completed
713 abort: empty.patch: no diffs found
714 [255]
715 $ hg tip --template '{rev} {desc|firstline}\n'
716 0 commit
717 $ hg -q status
718 M a
693 719 $ cd ..
694 720
695
696 721 create file when source is not /dev/null
697 722
698 723 $ cat > create.patch <<EOF
@@ -19,8 +19,8 b" creating 'local'"
19 19 store created
20 20 00changelog.i created
21 21 revlogv1
22 fncache
22 23 store
23 fncache
24 24 dotencode
25 25 $ echo this > local/foo
26 26 $ hg ci --cwd local -A -m "init"
@@ -48,8 +48,8 b' creating repo with format.dotencode=fals'
48 48 store created
49 49 00changelog.i created
50 50 revlogv1
51 fncache
51 52 store
52 fncache
53 53
54 54 test failure
55 55
@@ -145,8 +145,8 b" creating 'local/sub/repo'"
145 145 store created
146 146 00changelog.i created
147 147 revlogv1
148 fncache
148 149 store
149 fncache
150 150 dotencode
151 151
152 152 prepare test of init of url configured from paths
@@ -162,8 +162,8 b' init should (for consistency with clone)'
162 162 store created
163 163 00changelog.i created
164 164 revlogv1
165 fncache
165 166 store
166 fncache
167 167 dotencode
168 168
169 169 verify that clone also expand urls
@@ -175,8 +175,8 b' verify that clone also expand urls'
175 175 store created
176 176 00changelog.i created
177 177 revlogv1
178 fncache
178 179 store
179 fncache
180 180 dotencode
181 181
182 182 clone bookmarks
@@ -2,7 +2,7 b' hg debuginstall'
2 2 $ hg debuginstall
3 3 Checking encoding (ascii)...
4 4 Checking installed modules (*/mercurial)... (glob)
5 Checking templates...
5 Checking templates (*/mercurial/templates)... (glob)
6 6 Checking commit editor...
7 7 Checking username...
8 8 No problems detected
@@ -11,7 +11,7 b' hg debuginstall with no username'
11 11 $ HGUSER= hg debuginstall
12 12 Checking encoding (ascii)...
13 13 Checking installed modules (*/mercurial)... (glob)
14 Checking templates...
14 Checking templates (*/mercurial/templates)... (glob)
15 15 Checking commit editor...
16 16 Checking username...
17 17 no username supplied (see "hg help config")
@@ -825,7 +825,7 b' Imported patch should not be rejected'
825 825 ignore $Id$
826 826
827 827 $ hg rollback
828 repository tip rolled back to revision 2 (undo commit)
828 repository tip rolled back to revision 2 (undo import)
829 829 working directory now based on revision 2
830 830 $ hg update --clean
831 831 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now