##// END OF EJS Templates
merge default into stable for 2.0 code freeze
Matt Mackall -
r15273:38408275 merge 2.0-rc stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,47
1 #!/usr/bin/env python
2 #
3 # Dumps output generated by Mercurial's command server in a formatted style to a
4 # given file or stderr if '-' is specified. Output is also written in its raw
5 # format to stdout.
6 #
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
9
10 import sys, struct
11
12 if len(sys.argv) != 2:
13 print 'usage: debugcmdserver.py FILE'
14 sys.exit(1)
15
16 outputfmt = '>cI'
17 outputfmtsize = struct.calcsize(outputfmt)
18
19 if sys.argv[1] == '-':
20 log = sys.stderr
21 else:
22 log = open(sys.argv[1], 'a')
23
24 def read(size):
25 data = sys.stdin.read(size)
26 if not data:
27 raise EOFError()
28 sys.stdout.write(data)
29 sys.stdout.flush()
30 return data
31
32 try:
33 while True:
34 header = read(outputfmtsize)
35 channel, length = struct.unpack(outputfmt, header)
36 log.write('%s, %-4d' % (channel, length))
37 if channel in 'IL':
38 log.write(' -> waiting for input\n')
39 else:
40 data = read(length)
41 log.write(' -> %r\n' % data)
42 log.flush()
43 except EOFError:
44 pass
45 finally:
46 if log != sys.stderr:
47 log.close()
@@ -0,0 +1,4
1 Greg Ward, author of the original bfiles extension
2 Na'Tosha Bard of Unity Technologies
3 Fog Creek Software
4 Special thanks to the University of Toronto and the UCOSP program
@@ -0,0 +1,94
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''track large binary files
10
11 Large binary files tend to be not very compressible, not very
12 diffable, and not at all mergeable. Such files are not handled
13 efficiently by Mercurial's storage format (revlog), which is based on
14 compressed binary deltas; storing large binary files as regular
15 Mercurial files wastes bandwidth and disk space and increases
16 Mercurial's memory usage. The largefiles extension addresses these
17 problems by adding a centralized client-server layer on top of
18 Mercurial: largefiles live in a *central store* out on the network
19 somewhere, and you only fetch the revisions that you need when you
20 need them.
21
22 largefiles works by maintaining a "standin file" in .hglf/ for each
23 largefile. The standins are small (41 bytes: an SHA-1 hash plus
24 newline) and are tracked by Mercurial. Largefile revisions are
25 identified by the SHA-1 hash of their contents, which is written to
26 the standin. largefiles uses that revision ID to get/put largefile
27 revisions from/to the central store. This saves both disk space and
28 bandwidth, since you don't need to retrieve all historical revisions
29 of large files when you clone or pull.
30
31 To start a new repository or add new large binary files, just add
32 --large to your ``hg add`` command. For example::
33
34 $ dd if=/dev/urandom of=randomdata count=2000
35 $ hg add --large randomdata
36 $ hg commit -m 'add randomdata as a largefile'
37
38 When you push a changeset that adds/modifies largefiles to a remote
39 repository, its largefile revisions will be uploaded along with it.
40 Note that the remote Mercurial must also have the largefiles extension
41 enabled for this to work.
42
43 When you pull a changeset that affects largefiles from a remote
44 repository, Mercurial behaves as normal. However, when you update to
45 such a revision, any largefiles needed by that revision are downloaded
46 and cached (if they have never been downloaded before). This means
47 that network access may be required to update to changesets you have
48 not previously updated to.
49
50 If you already have large files tracked by Mercurial without the
51 largefiles extension, you will need to convert your repository in
52 order to benefit from largefiles. This is done with the 'hg lfconvert'
53 command::
54
55 $ hg lfconvert --size 10 oldrepo newrepo
56
57 In repositories that already have largefiles in them, any new file
58 over 10MB will automatically be added as a largefile. To change this
59 threshhold, set ``largefiles.size`` in your Mercurial config file to
60 the minimum size in megabytes to track as a largefile, or use the
61 --lfsize option to the add command (also in megabytes)::
62
63 [largefiles]
64 size = 2 XXX wouldn't minsize be a better name?
65
66 $ hg add --lfsize 2
67
68 The ``largefiles.patterns`` config option allows you to specify a list
69 of filename patterns (see ``hg help patterns``) that should always be
70 tracked as largefiles::
71
72 [largefiles]
73 patterns =
74 *.jpg
75 re:.*\.(png|bmp)$
76 library.zip
77 content/audio/*
78
79 Files that match one of these patterns will be added as largefiles
80 regardless of their size.
81 '''
82
83 from mercurial import commands
84
85 import lfcommands
86 import reposetup
87 import uisetup
88
89 reposetup = reposetup.reposetup
90 uisetup = uisetup.uisetup
91
92 commands.norepo += " lfconvert"
93
94 cmdtable = lfcommands.cmdtable
@@ -0,0 +1,202
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''base class for store implementations and store-related utility code'''
10
11 import os
12 import tempfile
13 import binascii
14 import re
15
16 from mercurial import util, node, hg
17 from mercurial.i18n import _
18
19 import lfutil
20
21 class StoreError(Exception):
22 '''Raised when there is a problem getting files from or putting
23 files to a central store.'''
24 def __init__(self, filename, hash, url, detail):
25 self.filename = filename
26 self.hash = hash
27 self.url = url
28 self.detail = detail
29
30 def longmessage(self):
31 if self.url:
32 return ('%s: %s\n'
33 '(failed URL: %s)\n'
34 % (self.filename, self.detail, self.url))
35 else:
36 return ('%s: %s\n'
37 '(no default or default-push path set in hgrc)\n'
38 % (self.filename, self.detail))
39
40 def __str__(self):
41 return "%s: %s" % (self.url, self.detail)
42
43 class basestore(object):
44 def __init__(self, ui, repo, url):
45 self.ui = ui
46 self.repo = repo
47 self.url = url
48
49 def put(self, source, hash):
50 '''Put source file into the store under <filename>/<hash>.'''
51 raise NotImplementedError('abstract method')
52
53 def exists(self, hash):
54 '''Check to see if the store contains the given hash.'''
55 raise NotImplementedError('abstract method')
56
57 def get(self, files):
58 '''Get the specified largefiles from the store and write to local
59 files under repo.root. files is a list of (filename, hash)
60 tuples. Return (success, missing), lists of files successfuly
61 downloaded and those not found in the store. success is a list
62 of (filename, hash) tuples; missing is a list of filenames that
63 we could not get. (The detailed error message will already have
64 been presented to the user, so missing is just supplied as a
65 summary.)'''
66 success = []
67 missing = []
68 ui = self.ui
69
70 at = 0
71 for filename, hash in files:
72 ui.progress(_('getting largefiles'), at, unit='lfile',
73 total=len(files))
74 at += 1
75 ui.note(_('getting %s:%s\n') % (filename, hash))
76
77 cachefilename = lfutil.cachepath(self.repo, hash)
78 cachedir = os.path.dirname(cachefilename)
79
80 # No need to pass mode='wb' to fdopen(), since mkstemp() already
81 # opened the file in binary mode.
82 (tmpfd, tmpfilename) = tempfile.mkstemp(
83 dir=cachedir, prefix=os.path.basename(filename))
84 tmpfile = os.fdopen(tmpfd, 'w')
85
86 try:
87 hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash))
88 except StoreError, err:
89 ui.warn(err.longmessage())
90 hhash = ""
91
92 if hhash != hash:
93 if hhash != "":
94 ui.warn(_('%s: data corruption (expected %s, got %s)\n')
95 % (filename, hash, hhash))
96 tmpfile.close() # no-op if it's already closed
97 os.remove(tmpfilename)
98 missing.append(filename)
99 continue
100
101 if os.path.exists(cachefilename): # Windows
102 os.remove(cachefilename)
103 os.rename(tmpfilename, cachefilename)
104 lfutil.linktosystemcache(self.repo, hash)
105 success.append((filename, hhash))
106
107 ui.progress(_('getting largefiles'), None)
108 return (success, missing)
109
110 def verify(self, revs, contents=False):
111 '''Verify the existence (and, optionally, contents) of every big
112 file revision referenced by every changeset in revs.
113 Return 0 if all is well, non-zero on any errors.'''
114 write = self.ui.write
115 failed = False
116
117 write(_('searching %d changesets for largefiles\n') % len(revs))
118 verified = set() # set of (filename, filenode) tuples
119
120 for rev in revs:
121 cctx = self.repo[rev]
122 cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
123
124 failed = lfutil.any_(self._verifyfile(
125 cctx, cset, contents, standin, verified) for standin in cctx)
126
127 num_revs = len(verified)
128 num_lfiles = len(set([fname for (fname, fnode) in verified]))
129 if contents:
130 write(_('verified contents of %d revisions of %d largefiles\n')
131 % (num_revs, num_lfiles))
132 else:
133 write(_('verified existence of %d revisions of %d largefiles\n')
134 % (num_revs, num_lfiles))
135
136 return int(failed)
137
138 def _getfile(self, tmpfile, filename, hash):
139 '''Fetch one revision of one file from the store and write it
140 to tmpfile. Compute the hash of the file on-the-fly as it
141 downloads and return the binary hash. Close tmpfile. Raise
142 StoreError if unable to download the file (e.g. it does not
143 exist in the store).'''
144 raise NotImplementedError('abstract method')
145
146 def _verifyfile(self, cctx, cset, contents, standin, verified):
147 '''Perform the actual verification of a file in the store.
148 '''
149 raise NotImplementedError('abstract method')
150
151 import localstore, wirestore
152
153 _storeprovider = {
154 'file': [localstore.localstore],
155 'http': [wirestore.wirestore],
156 'https': [wirestore.wirestore],
157 'ssh': [wirestore.wirestore],
158 }
159
160 _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
161
162 # During clone this function is passed the src's ui object
163 # but it needs the dest's ui object so it can read out of
164 # the config file. Use repo.ui instead.
165 def _openstore(repo, remote=None, put=False):
166 ui = repo.ui
167
168 if not remote:
169 path = (getattr(repo, 'lfpullsource', None) or
170 ui.expandpath('default-push', 'default'))
171
172 # ui.expandpath() leaves 'default-push' and 'default' alone if
173 # they cannot be expanded: fallback to the empty string,
174 # meaning the current directory.
175 if path == 'default-push' or path == 'default':
176 path = ''
177 remote = repo
178 else:
179 remote = hg.peer(repo, {}, path)
180
181 # The path could be a scheme so use Mercurial's normal functionality
182 # to resolve the scheme to a repository and use its path
183 path = util.safehasattr(remote, 'url') and remote.url() or remote.path
184
185 match = _scheme_re.match(path)
186 if not match: # regular filesystem path
187 scheme = 'file'
188 else:
189 scheme = match.group(1)
190
191 try:
192 storeproviders = _storeprovider[scheme]
193 except KeyError:
194 raise util.Abort(_('unsupported URL scheme %r') % scheme)
195
196 for class_obj in storeproviders:
197 try:
198 return class_obj(ui, repo, remote)
199 except lfutil.storeprotonotcapable:
200 pass
201
202 raise util.Abort(_('%s does not appear to be a largefile store'), path)
@@ -0,0 +1,49
1 = largefiles - manage large binary files =
2 This extension is based off of Greg Ward's bfiles extension which can be found
3 at http://mercurial.selenic.com/wiki/BfilesExtension.
4
5 == The largefile store ==
6
7 largefile stores are, in the typical use case, centralized servers that have
8 every past revision of a given binary file. Each largefile is identified by
9 its sha1 hash, and all interactions with the store take one of the following
10 forms.
11
12 -Download a bfile with this hash
13 -Upload a bfile with this hash
14 -Check if the store has a bfile with this hash
15
16 largefiles stores can take one of two forms:
17
18 -Directories on a network file share
19 -Mercurial wireproto servers, either via ssh or http (hgweb)
20
21 == The Local Repository ==
22
23 The local repository has a largefile cache in .hg/largefiles which holds a
24 subset of the largefiles needed. On a clone only the largefiles at tip are
25 downloaded. When largefiles are downloaded from the central store, a copy is
26 saved in this store.
27
28 == The Global Cache ==
29
30 largefiles in a local repository cache are hardlinked to files in the global
31 cache. Before a file is downloaded we check if it is in the global cache.
32
33 == Implementation Details ==
34
35 Each largefile has a standin which is in .hglf. The standin is tracked by
36 Mercurial. The standin contains the SHA1 hash of the largefile. When a
37 largefile is added/removed/copied/renamed/etc the same operation is applied to
38 the standin. Thus the history of the standin is the history of the largefile.
39
40 For performance reasons, the contents of a standin are only updated before a
41 commit. Standins are added/removed/copied/renamed from add/remove/copy/rename
42 Mercurial commands but their contents will not be updated. The contents of a
43 standin will always be the hash of the largefile as of the last commit. To
44 support some commands (revert) some standins are temporarily updated but will
45 be changed back after the command is finished.
46
47 A Mercurial dirstate object tracks the state of the largefiles. The dirstate
48 uses the last modified time and current size to detect if a file has changed
49 (without reading the entire contents of the file).
@@ -0,0 +1,481
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
11 import os
12 import shutil
13
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
16
17 import lfutil
18 import basestore
19
20 # -- Commands ----------------------------------------------------------
21
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''convert a normal repository to a largefiles repository
24
25 Convert repository SOURCE to a new repository DEST, identical to
26 SOURCE except that certain files will be converted as largefiles:
27 specifically, any file that matches any PATTERN *or* whose size is
28 above the minimum size threshold is converted as a largefile. The
29 size used to determine whether or not to track a file as a
30 largefile is the size of the first version of the file. The
31 minimum size can be specified either with --size or in
32 configuration as ``largefiles.size``.
33
34 After running this command you will need to make sure that
35 largefiles is enabled anywhere you intend to push the new
36 repository.
37
38 Use --tonormal to convert largefiles back to normal files; after
39 this, the DEST repository can be used without largefiles at all.'''
40
41 if opts['tonormal']:
42 tolfile = False
43 else:
44 tolfile = True
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
46 try:
47 rsrc = hg.repository(ui, src)
48 if not rsrc.local():
49 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 except error.RepoError, err:
51 ui.traceback()
52 raise util.Abort(err.args[0])
53 if os.path.exists(dest):
54 if not os.path.isdir(dest):
55 raise util.Abort(_('destination %s already exists') % dest)
56 elif os.listdir(dest):
57 raise util.Abort(_('destination %s is not empty') % dest)
58 try:
59 ui.status(_('initializing destination %s\n') % dest)
60 rdst = hg.repository(ui, dest, create=True)
61 if not rdst.local():
62 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
63 except error.RepoError:
64 ui.traceback()
65 raise util.Abort(_('%s is not a repo') % dest)
66
67 success = False
68 try:
69 # Lock destination to prevent modification while it is converted to.
70 # Don't need to lock src because we are just reading from its history
71 # which can't change.
72 dst_lock = rdst.lock()
73
74 # Get a list of all changesets in the source. The easy way to do this
75 # is to simply walk the changelog, using changelog.nodesbewteen().
76 # Take a look at mercurial/revlog.py:639 for more details.
77 # Use a generator instead of a list to decrease memory usage
78 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
79 rsrc.heads())[0])
80 revmap = {node.nullid: node.nullid}
81 if tolfile:
82 lfiles = set()
83 normalfiles = set()
84 if not pats:
85 pats = ui.config(lfutil.longname, 'patterns', default=())
86 if pats:
87 pats = pats.split(' ')
88 if pats:
89 matcher = match_.match(rsrc.root, '', list(pats))
90 else:
91 matcher = None
92
93 lfiletohash = {}
94 for ctx in ctxs:
95 ui.progress(_('converting revisions'), ctx.rev(),
96 unit=_('revision'), total=rsrc['tip'].rev())
97 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
98 lfiles, normalfiles, matcher, size, lfiletohash)
99 ui.progress(_('converting revisions'), None)
100
101 if os.path.exists(rdst.wjoin(lfutil.shortname)):
102 shutil.rmtree(rdst.wjoin(lfutil.shortname))
103
104 for f in lfiletohash.keys():
105 if os.path.isfile(rdst.wjoin(f)):
106 os.unlink(rdst.wjoin(f))
107 try:
108 os.removedirs(os.path.dirname(rdst.wjoin(f)))
109 except OSError:
110 pass
111
112 else:
113 for ctx in ctxs:
114 ui.progress(_('converting revisions'), ctx.rev(),
115 unit=_('revision'), total=rsrc['tip'].rev())
116 _addchangeset(ui, rsrc, rdst, ctx, revmap)
117
118 ui.progress(_('converting revisions'), None)
119 success = True
120 finally:
121 if not success:
122 # we failed, remove the new directory
123 shutil.rmtree(rdst.root)
124 dst_lock.release()
125
126 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
127 # Convert src parents to dst parents
128 parents = []
129 for p in ctx.parents():
130 parents.append(revmap[p.node()])
131 while len(parents) < 2:
132 parents.append(node.nullid)
133
134 # Generate list of changed files
135 files = set(ctx.files())
136 if node.nullid not in parents:
137 mc = ctx.manifest()
138 mp1 = ctx.parents()[0].manifest()
139 mp2 = ctx.parents()[1].manifest()
140 files |= (set(mp1) | set(mp2)) - set(mc)
141 for f in mc:
142 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
143 files.add(f)
144
145 def getfilectx(repo, memctx, f):
146 if lfutil.standin(f) in files:
147 # if the file isn't in the manifest then it was removed
148 # or renamed, raise IOError to indicate this
149 try:
150 fctx = ctx.filectx(lfutil.standin(f))
151 except error.LookupError:
152 raise IOError()
153 renamed = fctx.renamed()
154 if renamed:
155 renamed = lfutil.splitstandin(renamed[0])
156
157 hash = fctx.data().strip()
158 path = lfutil.findfile(rsrc, hash)
159 ### TODO: What if the file is not cached?
160 data = ''
161 fd = None
162 try:
163 fd = open(path, 'rb')
164 data = fd.read()
165 finally:
166 if fd:
167 fd.close()
168 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 'x' in fctx.flags(), renamed)
170 else:
171 try:
172 fctx = ctx.filectx(f)
173 except error.LookupError:
174 raise IOError()
175 renamed = fctx.renamed()
176 if renamed:
177 renamed = renamed[0]
178 data = fctx.data()
179 if f == '.hgtags':
180 newdata = []
181 for line in data.splitlines():
182 id, name = line.split(' ', 1)
183 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 name))
185 data = ''.join(newdata)
186 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 'x' in fctx.flags(), renamed)
188
189 dstfiles = []
190 for file in files:
191 if lfutil.isstandin(file):
192 dstfiles.append(lfutil.splitstandin(file))
193 else:
194 dstfiles.append(file)
195 # Commit
196 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 ret = rdst.commitctx(mctx)
199 rdst.dirstate.setparents(ret)
200 revmap[ctx.node()] = rdst.changelog.tip()
201
202 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 matcher, size, lfiletohash):
204 # Convert src parents to dst parents
205 parents = []
206 for p in ctx.parents():
207 parents.append(revmap[p.node()])
208 while len(parents) < 2:
209 parents.append(node.nullid)
210
211 # Generate list of changed files
212 files = set(ctx.files())
213 if node.nullid not in parents:
214 mc = ctx.manifest()
215 mp1 = ctx.parents()[0].manifest()
216 mp2 = ctx.parents()[1].manifest()
217 files |= (set(mp1) | set(mp2)) - set(mc)
218 for f in mc:
219 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 files.add(f)
221
222 dstfiles = []
223 for f in files:
224 if f not in lfiles and f not in normalfiles:
225 islfile = _islfile(f, ctx, matcher, size)
226 # If this file was renamed or copied then copy
227 # the lfileness of its predecessor
228 if f in ctx.manifest():
229 fctx = ctx.filectx(f)
230 renamed = fctx.renamed()
231 renamedlfile = renamed and renamed[0] in lfiles
232 islfile |= renamedlfile
233 if 'l' in fctx.flags():
234 if renamedlfile:
235 raise util.Abort(
236 _('Renamed/copied largefile %s becomes symlink')
237 % f)
238 islfile = False
239 if islfile:
240 lfiles.add(f)
241 else:
242 normalfiles.add(f)
243
244 if f in lfiles:
245 dstfiles.append(lfutil.standin(f))
246 # largefile in manifest if it has not been removed/renamed
247 if f in ctx.manifest():
248 if 'l' in ctx.filectx(f).flags():
249 if renamed and renamed[0] in lfiles:
250 raise util.Abort(_('largefile %s becomes symlink') % f)
251
252 # largefile was modified, update standins
253 fullpath = rdst.wjoin(f)
254 lfutil.createdir(os.path.dirname(fullpath))
255 m = util.sha1('')
256 m.update(ctx[f].data())
257 hash = m.hexdigest()
258 if f not in lfiletohash or lfiletohash[f] != hash:
259 try:
260 fd = open(fullpath, 'wb')
261 fd.write(ctx[f].data())
262 finally:
263 if fd:
264 fd.close()
265 executable = 'x' in ctx[f].flags()
266 os.chmod(fullpath, lfutil.getmode(executable))
267 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 executable)
269 lfiletohash[f] = hash
270 else:
271 # normal file
272 dstfiles.append(f)
273
274 def getfilectx(repo, memctx, f):
275 if lfutil.isstandin(f):
276 # if the file isn't in the manifest then it was removed
277 # or renamed, raise IOError to indicate this
278 srcfname = lfutil.splitstandin(f)
279 try:
280 fctx = ctx.filectx(srcfname)
281 except error.LookupError:
282 raise IOError()
283 renamed = fctx.renamed()
284 if renamed:
285 # standin is always a largefile because largefile-ness
286 # doesn't change after rename or copy
287 renamed = lfutil.standin(renamed[0])
288
289 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 fctx.flags(), 'x' in fctx.flags(), renamed)
291 else:
292 try:
293 fctx = ctx.filectx(f)
294 except error.LookupError:
295 raise IOError()
296 renamed = fctx.renamed()
297 if renamed:
298 renamed = renamed[0]
299
300 data = fctx.data()
301 if f == '.hgtags':
302 newdata = []
303 for line in data.splitlines():
304 id, name = line.split(' ', 1)
305 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 name))
307 data = ''.join(newdata)
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 'x' in fctx.flags(), renamed)
310
311 # Commit
312 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 ret = rdst.commitctx(mctx)
315 rdst.dirstate.setparents(ret)
316 revmap[ctx.node()] = rdst.changelog.tip()
317
318 def _islfile(file, ctx, matcher, size):
319 '''Return true if file should be considered a largefile, i.e.
320 matcher matches it or it is larger than size.'''
321 # never store special .hg* files as largefiles
322 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
323 return False
324 if matcher and matcher(file):
325 return True
326 try:
327 return ctx.filectx(file).size() >= size * 1024 * 1024
328 except error.LookupError:
329 return False
330
331 def uploadlfiles(ui, rsrc, rdst, files):
332 '''upload largefiles to the central store'''
333
334 # Don't upload locally. All largefiles are in the system wide cache
335 # so the other repo can just get them from there.
336 if not files or rdst.local():
337 return
338
339 store = basestore._openstore(rsrc, rdst, put=True)
340
341 at = 0
342 files = filter(lambda h: not store.exists(h), files)
343 for hash in files:
344 ui.progress(_('uploading largefiles'), at, unit='largefile',
345 total=len(files))
346 source = lfutil.findfile(rsrc, hash)
347 if not source:
348 raise util.Abort(_('largefile %s missing from store'
349 ' (needs to be uploaded)') % hash)
350 # XXX check for errors here
351 store.put(source, hash)
352 at += 1
353 ui.progress(_('uploading largefiles'), None)
354
355 def verifylfiles(ui, repo, all=False, contents=False):
356 '''Verify that every big file revision in the current changeset
357 exists in the central store. With --contents, also verify that
358 the contents of each big file revision are correct (SHA-1 hash
359 matches the revision ID). With --all, check every changeset in
360 this repository.'''
361 if all:
362 # Pass a list to the function rather than an iterator because we know a
363 # list will work.
364 revs = range(len(repo))
365 else:
366 revs = ['.']
367
368 store = basestore._openstore(repo)
369 return store.verify(revs, contents=contents)
370
371 def cachelfiles(ui, repo, node):
372 '''cachelfiles ensures that all largefiles needed by the specified revision
373 are present in the repository's largefile cache.
374
375 returns a tuple (cached, missing). cached is the list of files downloaded
376 by this operation; missing is the list of files that were needed but could
377 not be found.'''
378 lfiles = lfutil.listlfiles(repo, node)
379 toget = []
380
381 for lfile in lfiles:
382 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
383 # if it exists and its hash matches, it might have been locally
384 # modified before updating and the user chose 'local'. in this case,
385 # it will not be in any store, so don't look for it.
386 if ((not os.path.exists(repo.wjoin(lfile)) or
387 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
388 not lfutil.findfile(repo, expectedhash)):
389 toget.append((lfile, expectedhash))
390
391 if toget:
392 store = basestore._openstore(repo)
393 ret = store.get(toget)
394 return ret
395
396 return ([], [])
397
398 def updatelfiles(ui, repo, filelist=None, printmessage=True):
399 wlock = repo.wlock()
400 try:
401 lfdirstate = lfutil.openlfdirstate(ui, repo)
402 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
403
404 if filelist is not None:
405 lfiles = [f for f in lfiles if f in filelist]
406
407 printed = False
408 if printmessage and lfiles:
409 ui.status(_('getting changed largefiles\n'))
410 printed = True
411 cachelfiles(ui, repo, '.')
412
413 updated, removed = 0, 0
414 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
415 # increment the appropriate counter according to _updatelfile's
416 # return value
417 updated += i > 0 and i or 0
418 removed -= i < 0 and i or 0
419 if printmessage and (removed or updated) and not printed:
420 ui.status(_('getting changed largefiles\n'))
421 printed = True
422
423 lfdirstate.write()
424 if printed and printmessage:
425 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
426 removed))
427 finally:
428 wlock.release()
429
430 def _updatelfile(repo, lfdirstate, lfile):
431 '''updates a single largefile and copies the state of its standin from
432 the repository's dirstate to its state in the lfdirstate.
433
434 returns 1 if the file was modified, -1 if the file was removed, 0 if the
435 file was unchanged, and None if the needed largefile was missing from the
436 cache.'''
437 ret = 0
438 abslfile = repo.wjoin(lfile)
439 absstandin = repo.wjoin(lfutil.standin(lfile))
440 if os.path.exists(absstandin):
441 if os.path.exists(absstandin+'.orig'):
442 shutil.copyfile(abslfile, abslfile+'.orig')
443 expecthash = lfutil.readstandin(repo, lfile)
444 if (expecthash != '' and
445 (not os.path.exists(abslfile) or
446 expecthash != lfutil.hashfile(abslfile))):
447 if not lfutil.copyfromcache(repo, expecthash, lfile):
448 return None # don't try to set the mode or update the dirstate
449 ret = 1
450 mode = os.stat(absstandin).st_mode
451 if mode != os.stat(abslfile).st_mode:
452 os.chmod(abslfile, mode)
453 ret = 1
454 else:
455 if os.path.exists(abslfile):
456 os.unlink(abslfile)
457 ret = -1
458 state = repo.dirstate[lfutil.standin(lfile)]
459 if state == 'n':
460 lfdirstate.normal(lfile)
461 elif state == 'r':
462 lfdirstate.remove(lfile)
463 elif state == 'a':
464 lfdirstate.add(lfile)
465 elif state == '?':
466 lfdirstate.drop(lfile)
467 return ret
468
469 # -- hg commands declarations ------------------------------------------------
470
471 cmdtable = {
472 'lfconvert': (lfconvert,
473 [('s', 'size', '',
474 _('minimum size (MB) for files to be converted '
475 'as largefiles'),
476 'SIZE'),
477 ('', 'tonormal', False,
478 _('convert from a largefiles repo to a normal repo')),
479 ],
480 _('hg lfconvert SOURCE DEST [FILE ...]')),
481 }
@@ -0,0 +1,448
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''largefiles utility code: must not import other modules in this package.'''
10
11 import os
12 import errno
13 import shutil
14 import stat
15 import hashlib
16
17 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
18 from mercurial.i18n import _
19
20 shortname = '.hglf'
21 longname = 'largefiles'
22
23
24 # -- Portability wrappers ----------------------------------------------
25
26 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
27 return dirstate.walk(matcher, [], unknown, ignored)
28
29 def repo_add(repo, list):
30 add = repo[None].add
31 return add(list)
32
33 def repo_remove(repo, list, unlink=False):
34 def remove(list, unlink):
35 wlock = repo.wlock()
36 try:
37 if unlink:
38 for f in list:
39 try:
40 util.unlinkpath(repo.wjoin(f))
41 except OSError, inst:
42 if inst.errno != errno.ENOENT:
43 raise
44 repo[None].forget(list)
45 finally:
46 wlock.release()
47 return remove(list, unlink=unlink)
48
49 def repo_forget(repo, list):
50 forget = repo[None].forget
51 return forget(list)
52
53 def findoutgoing(repo, remote, force):
54 from mercurial import discovery
55 common, _anyinc, _heads = discovery.findcommonincoming(repo,
56 remote, force=force)
57 return repo.changelog.findmissing(common)
58
59 # -- Private worker functions ------------------------------------------
60
61 def getminsize(ui, assumelfiles, opt, default=10):
62 lfsize = opt
63 if not lfsize and assumelfiles:
64 lfsize = ui.config(longname, 'size', default=default)
65 if lfsize:
66 try:
67 lfsize = float(lfsize)
68 except ValueError:
69 raise util.Abort(_('largefiles: size must be number (not %s)\n')
70 % lfsize)
71 if lfsize is None:
72 raise util.Abort(_('minimum size for largefiles must be specified'))
73 return lfsize
74
75 def link(src, dest):
76 try:
77 util.oslink(src, dest)
78 except OSError:
79 # if hardlinks fail, fallback on copy
80 shutil.copyfile(src, dest)
81 os.chmod(dest, os.stat(src).st_mode)
82
83 def systemcachepath(ui, hash):
84 path = ui.config(longname, 'systemcache', None)
85 if path:
86 path = os.path.join(path, hash)
87 else:
88 if os.name == 'nt':
89 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
90 path = os.path.join(appdata, longname, hash)
91 elif os.name == 'posix':
92 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
93 else:
94 raise util.Abort(_('unknown operating system: %s\n') % os.name)
95 return path
96
97 def insystemcache(ui, hash):
98 return os.path.exists(systemcachepath(ui, hash))
99
100 def findfile(repo, hash):
101 if incache(repo, hash):
102 repo.ui.note(_('Found %s in cache\n') % hash)
103 return cachepath(repo, hash)
104 if insystemcache(repo.ui, hash):
105 repo.ui.note(_('Found %s in system cache\n') % hash)
106 return systemcachepath(repo.ui, hash)
107 return None
108
109 class largefiles_dirstate(dirstate.dirstate):
110 def __getitem__(self, key):
111 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
112 def normal(self, f):
113 return super(largefiles_dirstate, self).normal(unixpath(f))
114 def remove(self, f):
115 return super(largefiles_dirstate, self).remove(unixpath(f))
116 def add(self, f):
117 return super(largefiles_dirstate, self).add(unixpath(f))
118 def drop(self, f):
119 return super(largefiles_dirstate, self).drop(unixpath(f))
120 def forget(self, f):
121 return super(largefiles_dirstate, self).forget(unixpath(f))
122
123 def openlfdirstate(ui, repo):
124 '''
125 Return a dirstate object that tracks largefiles: i.e. its root is
126 the repo root, but it is saved in .hg/largefiles/dirstate.
127 '''
128 admin = repo.join(longname)
129 opener = scmutil.opener(admin)
130 if util.safehasattr(repo.dirstate, '_validate'):
131 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
132 repo.dirstate._validate)
133 else:
134 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
135
136 # If the largefiles dirstate does not exist, populate and create
137 # it. This ensures that we create it on the first meaningful
138 # largefiles operation in a new clone. It also gives us an easy
139 # way to forcibly rebuild largefiles state:
140 # rm .hg/largefiles/dirstate && hg status
141 # Or even, if things are really messed up:
142 # rm -rf .hg/largefiles && hg status
143 if not os.path.exists(os.path.join(admin, 'dirstate')):
144 util.makedirs(admin)
145 matcher = getstandinmatcher(repo)
146 for standin in dirstate_walk(repo.dirstate, matcher):
147 lfile = splitstandin(standin)
148 hash = readstandin(repo, lfile)
149 lfdirstate.normallookup(lfile)
150 try:
151 if hash == hashfile(lfile):
152 lfdirstate.normal(lfile)
153 except IOError, err:
154 if err.errno != errno.ENOENT:
155 raise
156
157 lfdirstate.write()
158
159 return lfdirstate
160
161 def lfdirstate_status(lfdirstate, repo, rev):
162 wlock = repo.wlock()
163 try:
164 match = match_.always(repo.root, repo.getcwd())
165 s = lfdirstate.status(match, [], False, False, False)
166 unsure, modified, added, removed, missing, unknown, ignored, clean = s
167 for lfile in unsure:
168 if repo[rev][standin(lfile)].data().strip() != \
169 hashfile(repo.wjoin(lfile)):
170 modified.append(lfile)
171 else:
172 clean.append(lfile)
173 lfdirstate.normal(lfile)
174 lfdirstate.write()
175 finally:
176 wlock.release()
177 return (modified, added, removed, missing, unknown, ignored, clean)
178
179 def listlfiles(repo, rev=None, matcher=None):
180 '''return a list of largefiles in the working copy or the
181 specified changeset'''
182
183 if matcher is None:
184 matcher = getstandinmatcher(repo)
185
186 # ignore unknown files in working directory
187 return [splitstandin(f)
188 for f in repo[rev].walk(matcher)
189 if rev is not None or repo.dirstate[f] != '?']
190
191 def incache(repo, hash):
192 return os.path.exists(cachepath(repo, hash))
193
194 def createdir(dir):
195 if not os.path.exists(dir):
196 os.makedirs(dir)
197
198 def cachepath(repo, hash):
199 return repo.join(os.path.join(longname, hash))
200
201 def copyfromcache(repo, hash, filename):
202 '''Copy the specified largefile from the repo or system cache to
203 filename in the repository. Return true on success or false if the
204 file was not found in either cache (which should not happened:
205 this is meant to be called only after ensuring that the needed
206 largefile exists in the cache).'''
207 path = findfile(repo, hash)
208 if path is None:
209 return False
210 util.makedirs(os.path.dirname(repo.wjoin(filename)))
211 shutil.copy(path, repo.wjoin(filename))
212 return True
213
214 def copytocache(repo, rev, file, uploaded=False):
215 hash = readstandin(repo, file)
216 if incache(repo, hash):
217 return
218 copytocacheabsolute(repo, repo.wjoin(file), hash)
219
220 def copytocacheabsolute(repo, file, hash):
221 createdir(os.path.dirname(cachepath(repo, hash)))
222 if insystemcache(repo.ui, hash):
223 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
224 else:
225 shutil.copyfile(file, cachepath(repo, hash))
226 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
227 linktosystemcache(repo, hash)
228
229 def linktosystemcache(repo, hash):
230 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
231 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
232
233 def getstandinmatcher(repo, pats=[], opts={}):
234 '''Return a match object that applies pats to the standin directory'''
235 standindir = repo.pathto(shortname)
236 if pats:
237 # patterns supplied: search standin directory relative to current dir
238 cwd = repo.getcwd()
239 if os.path.isabs(cwd):
240 # cwd is an absolute path for hg -R <reponame>
241 # work relative to the repository root in this case
242 cwd = ''
243 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
244 elif os.path.isdir(standindir):
245 # no patterns: relative to repo root
246 pats = [standindir]
247 else:
248 # no patterns and no standin dir: return matcher that matches nothing
249 match = match_.match(repo.root, None, [], exact=True)
250 match.matchfn = lambda f: False
251 return match
252 return getmatcher(repo, pats, opts, showbad=False)
253
254 def getmatcher(repo, pats=[], opts={}, showbad=True):
255 '''Wrapper around scmutil.match() that adds showbad: if false,
256 neuter the match object's bad() method so it does not print any
257 warnings about missing files or directories.'''
258 match = scmutil.match(repo[None], pats, opts)
259
260 if not showbad:
261 match.bad = lambda f, msg: None
262 return match
263
264 def composestandinmatcher(repo, rmatcher):
265 '''Return a matcher that accepts standins corresponding to the
266 files accepted by rmatcher. Pass the list of files in the matcher
267 as the paths specified by the user.'''
268 smatcher = getstandinmatcher(repo, rmatcher.files())
269 isstandin = smatcher.matchfn
270 def composed_matchfn(f):
271 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
272 smatcher.matchfn = composed_matchfn
273
274 return smatcher
275
276 def standin(filename):
277 '''Return the repo-relative path to the standin for the specified big
278 file.'''
279 # Notes:
280 # 1) Most callers want an absolute path, but _create_standin() needs
281 # it repo-relative so lfadd() can pass it to repo_add(). So leave
282 # it up to the caller to use repo.wjoin() to get an absolute path.
283 # 2) Join with '/' because that's what dirstate always uses, even on
284 # Windows. Change existing separator to '/' first in case we are
285 # passed filenames from an external source (like the command line).
286 return shortname + '/' + filename.replace(os.sep, '/')
287
288 def isstandin(filename):
289 '''Return true if filename is a big file standin. filename must be
290 in Mercurial's internal form (slash-separated).'''
291 return filename.startswith(shortname + '/')
292
293 def splitstandin(filename):
294 # Split on / because that's what dirstate always uses, even on Windows.
295 # Change local separator to / first just in case we are passed filenames
296 # from an external source (like the command line).
297 bits = filename.replace(os.sep, '/').split('/', 1)
298 if len(bits) == 2 and bits[0] == shortname:
299 return bits[1]
300 else:
301 return None
302
303 def updatestandin(repo, standin):
304 file = repo.wjoin(splitstandin(standin))
305 if os.path.exists(file):
306 hash = hashfile(file)
307 executable = getexecutable(file)
308 writestandin(repo, standin, hash, executable)
309
310 def readstandin(repo, filename, node=None):
311 '''read hex hash from standin for filename at given node, or working
312 directory if no node is given'''
313 return repo[node][standin(filename)].data().strip()
314
315 def writestandin(repo, standin, hash, executable):
316 '''write hash to <repo.root>/<standin>'''
317 writehash(hash, repo.wjoin(standin), executable)
318
319 def copyandhash(instream, outfile):
320 '''Read bytes from instream (iterable) and write them to outfile,
321 computing the SHA-1 hash of the data along the way. Close outfile
322 when done and return the binary hash.'''
323 hasher = util.sha1('')
324 for data in instream:
325 hasher.update(data)
326 outfile.write(data)
327
328 # Blecch: closing a file that somebody else opened is rude and
329 # wrong. But it's so darn convenient and practical! After all,
330 # outfile was opened just to copy and hash.
331 outfile.close()
332
333 return hasher.digest()
334
335 def hashrepofile(repo, file):
336 return hashfile(repo.wjoin(file))
337
338 def hashfile(file):
339 if not os.path.exists(file):
340 return ''
341 hasher = util.sha1('')
342 fd = open(file, 'rb')
343 for data in blockstream(fd):
344 hasher.update(data)
345 fd.close()
346 return hasher.hexdigest()
347
348 class limitreader(object):
349 def __init__(self, f, limit):
350 self.f = f
351 self.limit = limit
352
353 def read(self, length):
354 if self.limit == 0:
355 return ''
356 length = length > self.limit and self.limit or length
357 self.limit -= length
358 return self.f.read(length)
359
360 def close(self):
361 pass
362
363 def blockstream(infile, blocksize=128 * 1024):
364 """Generator that yields blocks of data from infile and closes infile."""
365 while True:
366 data = infile.read(blocksize)
367 if not data:
368 break
369 yield data
370 # same blecch as copyandhash() above
371 infile.close()
372
373 def readhash(filename):
374 rfile = open(filename, 'rb')
375 hash = rfile.read(40)
376 rfile.close()
377 if len(hash) < 40:
378 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
379 % (filename, len(hash)))
380 return hash
381
382 def writehash(hash, filename, executable):
383 util.makedirs(os.path.dirname(filename))
384 if os.path.exists(filename):
385 os.unlink(filename)
386 wfile = open(filename, 'wb')
387
388 try:
389 wfile.write(hash)
390 wfile.write('\n')
391 finally:
392 wfile.close()
393 if os.path.exists(filename):
394 os.chmod(filename, getmode(executable))
395
396 def getexecutable(filename):
397 mode = os.stat(filename).st_mode
398 return ((mode & stat.S_IXUSR) and
399 (mode & stat.S_IXGRP) and
400 (mode & stat.S_IXOTH))
401
402 def getmode(executable):
403 if executable:
404 return 0755
405 else:
406 return 0644
407
408 def urljoin(first, second, *arg):
409 def join(left, right):
410 if not left.endswith('/'):
411 left += '/'
412 if right.startswith('/'):
413 right = right[1:]
414 return left + right
415
416 url = join(first, second)
417 for a in arg:
418 url = join(url, a)
419 return url
420
421 def hexsha1(data):
422 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
423 object data"""
424 h = hashlib.sha1()
425 for chunk in util.filechunkiter(data):
426 h.update(chunk)
427 return h.hexdigest()
428
429 def httpsendfile(ui, filename):
430 return httpconnection.httpsendfile(ui, filename, 'rb')
431
432 def unixpath(path):
433 '''Return a version of path normalized for use with the lfdirstate.'''
434 return os.path.normpath(path).replace(os.sep, '/')
435
436 def islfilesrepo(repo):
437 return ('largefiles' in repo.requirements and
438 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
439
440 def any_(gen):
441 for x in gen:
442 if x:
443 return True
444 return False
445
446 class storeprotonotcapable(BaseException):
447 def __init__(self, storetypes):
448 self.storetypes = storetypes
@@ -0,0 +1,71
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''store class for local filesystem'''
10
11 import os
12
13 from mercurial import util
14 from mercurial.i18n import _
15
16 import lfutil
17 import basestore
18
19 class localstore(basestore.basestore):
20 '''Because there is a system-wide cache, the local store always
21 uses that cache. Since the cache is updated elsewhere, we can
22 just read from it here as if it were the store.'''
23
24 def __init__(self, ui, repo, remote):
25 url = os.path.join(remote.path, '.hg', lfutil.longname)
26 super(localstore, self).__init__(ui, repo, util.expandpath(url))
27
28 def put(self, source, filename, hash):
29 '''Any file that is put must already be in the system-wide
30 cache so do nothing.'''
31 return
32
33 def exists(self, hash):
34 return lfutil.insystemcache(self.repo.ui, hash)
35
36 def _getfile(self, tmpfile, filename, hash):
37 if lfutil.insystemcache(self.ui, hash):
38 return lfutil.systemcachepath(self.ui, hash)
39 raise basestore.StoreError(filename, hash, '',
40 _("Can't get file locally"))
41
42 def _verifyfile(self, cctx, cset, contents, standin, verified):
43 filename = lfutil.splitstandin(standin)
44 if not filename:
45 return False
46 fctx = cctx[standin]
47 key = (filename, fctx.filenode())
48 if key in verified:
49 return False
50
51 expecthash = fctx.data()[0:40]
52 verified.add(key)
53 if not lfutil.insystemcache(self.ui, expecthash):
54 self.ui.warn(
55 _('changeset %s: %s missing\n'
56 ' (looked for hash %s)\n')
57 % (cset, filename, expecthash))
58 return True # failed
59
60 if contents:
61 storepath = lfutil.systemcachepath(self.ui, expecthash)
62 actualhash = lfutil.hashfile(storepath)
63 if actualhash != expecthash:
64 self.ui.warn(
65 _('changeset %s: %s: contents differ\n'
66 ' (%s:\n'
67 ' expected hash %s,\n'
68 ' but got %s)\n')
69 % (cset, filename, storepath, expecthash, actualhash))
70 return True # failed
71 return False
This diff has been collapsed as it changes many lines, (830 lines changed) Show them Hide them
@@ -0,0 +1,830
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
11 import os
12 import copy
13
14 from mercurial import hg, commands, util, cmdutil, match as match_, node, \
15 archival, error, merge
16 from mercurial.i18n import _
17 from mercurial.node import hex
18 from hgext import rebase
19 import lfutil
20
21 try:
22 from mercurial import scmutil
23 except ImportError:
24 pass
25
26 import lfutil
27 import lfcommands
28
29 def installnormalfilesmatchfn(manifest):
30 '''overrides scmutil.match so that the matcher it returns will ignore all
31 largefiles'''
32 oldmatch = None # for the closure
33 def override_match(repo, pats=[], opts={}, globbed=False,
34 default='relpath'):
35 match = oldmatch(repo, pats, opts, globbed, default)
36 m = copy.copy(match)
37 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
38 manifest)
39 m._files = filter(notlfile, m._files)
40 m._fmap = set(m._files)
41 orig_matchfn = m.matchfn
42 m.matchfn = lambda f: notlfile(f) and orig_matchfn(f) or None
43 return m
44 oldmatch = installmatchfn(override_match)
45
46 def installmatchfn(f):
47 oldmatch = scmutil.match
48 setattr(f, 'oldmatch', oldmatch)
49 scmutil.match = f
50 return oldmatch
51
52 def restorematchfn():
53 '''restores scmutil.match to what it was before installnormalfilesmatchfn
54 was called. no-op if scmutil.match is its original function.
55
56 Note that n calls to installnormalfilesmatchfn will require n calls to
57 restore matchfn to reverse'''
58 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
59
60 # -- Wrappers: modify existing commands --------------------------------
61
62 # Add works by going through the files that the user wanted to add and
63 # checking if they should be added as largefiles. Then it makes a new
64 # matcher which matches only the normal files and runs the original
65 # version of add.
66 def override_add(orig, ui, repo, *pats, **opts):
67 large = opts.pop('large', None)
68 lfsize = lfutil.getminsize(
69 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
70
71 lfmatcher = None
72 if os.path.exists(repo.wjoin(lfutil.shortname)):
73 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
74 if lfpats:
75 lfmatcher = match_.match(repo.root, '', list(lfpats))
76
77 lfnames = []
78 m = scmutil.match(repo[None], pats, opts)
79 m.bad = lambda x, y: None
80 wctx = repo[None]
81 for f in repo.walk(m):
82 exact = m.exact(f)
83 lfile = lfutil.standin(f) in wctx
84 nfile = f in wctx
85 exists = lfile or nfile
86
87 # Don't warn the user when they attempt to add a normal tracked file.
88 # The normal add code will do that for us.
89 if exact and exists:
90 if lfile:
91 ui.warn(_('%s already a largefile\n') % f)
92 continue
93
94 if exact or not exists:
95 abovemin = (lfsize and
96 os.path.getsize(repo.wjoin(f)) >= lfsize * 1024 * 1024)
97 if large or abovemin or (lfmatcher and lfmatcher(f)):
98 lfnames.append(f)
99 if ui.verbose or not exact:
100 ui.status(_('adding %s as a largefile\n') % m.rel(f))
101
102 bad = []
103 standins = []
104
105 # Need to lock, otherwise there could be a race condition between
106 # when standins are created and added to the repo.
107 wlock = repo.wlock()
108 try:
109 if not opts.get('dry_run'):
110 lfdirstate = lfutil.openlfdirstate(ui, repo)
111 for f in lfnames:
112 standinname = lfutil.standin(f)
113 lfutil.writestandin(repo, standinname, hash='',
114 executable=lfutil.getexecutable(repo.wjoin(f)))
115 standins.append(standinname)
116 if lfdirstate[f] == 'r':
117 lfdirstate.normallookup(f)
118 else:
119 lfdirstate.add(f)
120 lfdirstate.write()
121 bad += [lfutil.splitstandin(f)
122 for f in lfutil.repo_add(repo, standins)
123 if f in m.files()]
124 finally:
125 wlock.release()
126
127 installnormalfilesmatchfn(repo[None].manifest())
128 result = orig(ui, repo, *pats, **opts)
129 restorematchfn()
130
131 return (result == 1 or bad) and 1 or 0
132
133 def override_remove(orig, ui, repo, *pats, **opts):
134 manifest = repo[None].manifest()
135 installnormalfilesmatchfn(manifest)
136 orig(ui, repo, *pats, **opts)
137 restorematchfn()
138
139 after, force = opts.get('after'), opts.get('force')
140 if not pats and not after:
141 raise util.Abort(_('no files specified'))
142 m = scmutil.match(repo[None], pats, opts)
143 try:
144 repo.lfstatus = True
145 s = repo.status(match=m, clean=True)
146 finally:
147 repo.lfstatus = False
148 modified, added, deleted, clean = [[f for f in list
149 if lfutil.standin(f) in manifest]
150 for list in [s[0], s[1], s[3], s[6]]]
151
152 def warn(files, reason):
153 for f in files:
154 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
155 % (m.rel(f), reason))
156
157 if force:
158 remove, forget = modified + deleted + clean, added
159 elif after:
160 remove, forget = deleted, []
161 warn(modified + added + clean, _('still exists'))
162 else:
163 remove, forget = deleted + clean, []
164 warn(modified, _('is modified'))
165 warn(added, _('has been marked for add'))
166
167 for f in sorted(remove + forget):
168 if ui.verbose or not m.exact(f):
169 ui.status(_('removing %s\n') % m.rel(f))
170
171 # Need to lock because standin files are deleted then removed from the
172 # repository and we could race inbetween.
173 wlock = repo.wlock()
174 try:
175 lfdirstate = lfutil.openlfdirstate(ui, repo)
176 for f in remove:
177 if not after:
178 os.unlink(repo.wjoin(f))
179 currentdir = os.path.split(f)[0]
180 while currentdir and not os.listdir(repo.wjoin(currentdir)):
181 os.rmdir(repo.wjoin(currentdir))
182 currentdir = os.path.split(currentdir)[0]
183 lfdirstate.remove(f)
184 lfdirstate.write()
185
186 forget = [lfutil.standin(f) for f in forget]
187 remove = [lfutil.standin(f) for f in remove]
188 lfutil.repo_forget(repo, forget)
189 lfutil.repo_remove(repo, remove, unlink=True)
190 finally:
191 wlock.release()
192
193 def override_status(orig, ui, repo, *pats, **opts):
194 try:
195 repo.lfstatus = True
196 return orig(ui, repo, *pats, **opts)
197 finally:
198 repo.lfstatus = False
199
200 def override_log(orig, ui, repo, *pats, **opts):
201 try:
202 repo.lfstatus = True
203 orig(ui, repo, *pats, **opts)
204 finally:
205 repo.lfstatus = False
206
207 def override_verify(orig, ui, repo, *pats, **opts):
208 large = opts.pop('large', False)
209 all = opts.pop('lfa', False)
210 contents = opts.pop('lfc', False)
211
212 result = orig(ui, repo, *pats, **opts)
213 if large:
214 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
215 return result
216
217 # Override needs to refresh standins so that update's normal merge
218 # will go through properly. Then the other update hook (overriding repo.update)
219 # will get the new files. Filemerge is also overriden so that the merge
220 # will merge standins correctly.
221 def override_update(orig, ui, repo, *pats, **opts):
222 lfdirstate = lfutil.openlfdirstate(ui, repo)
223 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
224 False, False)
225 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
226
227 # Need to lock between the standins getting updated and their
228 # largefiles getting updated
229 wlock = repo.wlock()
230 try:
231 if opts['check']:
232 mod = len(modified) > 0
233 for lfile in unsure:
234 standin = lfutil.standin(lfile)
235 if repo['.'][standin].data().strip() != \
236 lfutil.hashfile(repo.wjoin(lfile)):
237 mod = True
238 else:
239 lfdirstate.normal(lfile)
240 lfdirstate.write()
241 if mod:
242 raise util.Abort(_('uncommitted local changes'))
243 # XXX handle removed differently
244 if not opts['clean']:
245 for lfile in unsure + modified + added:
246 lfutil.updatestandin(repo, lfutil.standin(lfile))
247 finally:
248 wlock.release()
249 return orig(ui, repo, *pats, **opts)
250
251 # Override filemerge to prompt the user about how they wish to merge
252 # largefiles. This will handle identical edits, and copy/rename +
253 # edit without prompting the user.
254 def override_filemerge(origfn, repo, mynode, orig, fcd, fco, fca):
255 # Use better variable names here. Because this is a wrapper we cannot
256 # change the variable names in the function declaration.
257 fcdest, fcother, fcancestor = fcd, fco, fca
258 if not lfutil.isstandin(orig):
259 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
260 else:
261 if not fcother.cmp(fcdest): # files identical?
262 return None
263
264 # backwards, use working dir parent as ancestor
265 if fcancestor == fcother:
266 fcancestor = fcdest.parents()[0]
267
268 if orig != fcother.path():
269 repo.ui.status(_('merging %s and %s to %s\n')
270 % (lfutil.splitstandin(orig),
271 lfutil.splitstandin(fcother.path()),
272 lfutil.splitstandin(fcdest.path())))
273 else:
274 repo.ui.status(_('merging %s\n')
275 % lfutil.splitstandin(fcdest.path()))
276
277 if fcancestor.path() != fcother.path() and fcother.data() == \
278 fcancestor.data():
279 return 0
280 if fcancestor.path() != fcdest.path() and fcdest.data() == \
281 fcancestor.data():
282 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
283 return 0
284
285 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
286 'keep (l)ocal or take (o)ther?') %
287 lfutil.splitstandin(orig),
288 (_('&Local'), _('&Other')), 0) == 0:
289 return 0
290 else:
291 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
292 return 0
293
294 # Copy first changes the matchers to match standins instead of
295 # largefiles. Then it overrides util.copyfile in that function it
296 # checks if the destination largefile already exists. It also keeps a
297 # list of copied files so that the largefiles can be copied and the
298 # dirstate updated.
299 def override_copy(orig, ui, repo, pats, opts, rename=False):
300 # doesn't remove largefile on rename
301 if len(pats) < 2:
302 # this isn't legal, let the original function deal with it
303 return orig(ui, repo, pats, opts, rename)
304
305 def makestandin(relpath):
306 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
307 return os.path.join(os.path.relpath('.', repo.getcwd()),
308 lfutil.standin(path))
309
310 fullpats = scmutil.expandpats(pats)
311 dest = fullpats[-1]
312
313 if os.path.isdir(dest):
314 if not os.path.isdir(makestandin(dest)):
315 os.makedirs(makestandin(dest))
316 # This could copy both lfiles and normal files in one command,
317 # but we don't want to do that. First replace their matcher to
318 # only match normal files and run it, then replace it to just
319 # match largefiles and run it again.
320 nonormalfiles = False
321 nolfiles = False
322 try:
323 installnormalfilesmatchfn(repo[None].manifest())
324 result = orig(ui, repo, pats, opts, rename)
325 except util.Abort, e:
326 if str(e) != 'no files to copy':
327 raise e
328 else:
329 nonormalfiles = True
330 result = 0
331 finally:
332 restorematchfn()
333
334 # The first rename can cause our current working directory to be removed.
335 # In that case there is nothing left to copy/rename so just quit.
336 try:
337 repo.getcwd()
338 except OSError:
339 return result
340
341 try:
342 # When we call orig below it creates the standins but we don't add them
343 # to the dir state until later so lock during that time.
344 wlock = repo.wlock()
345
346 manifest = repo[None].manifest()
347 oldmatch = None # for the closure
348 def override_match(repo, pats=[], opts={}, globbed=False,
349 default='relpath'):
350 newpats = []
351 # The patterns were previously mangled to add the standin
352 # directory; we need to remove that now
353 for pat in pats:
354 if match_.patkind(pat) is None and lfutil.shortname in pat:
355 newpats.append(pat.replace(lfutil.shortname, ''))
356 else:
357 newpats.append(pat)
358 match = oldmatch(repo, newpats, opts, globbed, default)
359 m = copy.copy(match)
360 lfile = lambda f: lfutil.standin(f) in manifest
361 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
362 m._fmap = set(m._files)
363 orig_matchfn = m.matchfn
364 m.matchfn = lambda f: (lfutil.isstandin(f) and
365 lfile(lfutil.splitstandin(f)) and
366 orig_matchfn(lfutil.splitstandin(f)) or
367 None)
368 return m
369 oldmatch = installmatchfn(override_match)
370 listpats = []
371 for pat in pats:
372 if match_.patkind(pat) is not None:
373 listpats.append(pat)
374 else:
375 listpats.append(makestandin(pat))
376
377 try:
378 origcopyfile = util.copyfile
379 copiedfiles = []
380 def override_copyfile(src, dest):
381 if lfutil.shortname in src and lfutil.shortname in dest:
382 destlfile = dest.replace(lfutil.shortname, '')
383 if not opts['force'] and os.path.exists(destlfile):
384 raise IOError('',
385 _('destination largefile already exists'))
386 copiedfiles.append((src, dest))
387 origcopyfile(src, dest)
388
389 util.copyfile = override_copyfile
390 result += orig(ui, repo, listpats, opts, rename)
391 finally:
392 util.copyfile = origcopyfile
393
394 lfdirstate = lfutil.openlfdirstate(ui, repo)
395 for (src, dest) in copiedfiles:
396 if lfutil.shortname in src and lfutil.shortname in dest:
397 srclfile = src.replace(lfutil.shortname, '')
398 destlfile = dest.replace(lfutil.shortname, '')
399 destlfiledir = os.path.dirname(destlfile) or '.'
400 if not os.path.isdir(destlfiledir):
401 os.makedirs(destlfiledir)
402 if rename:
403 os.rename(srclfile, destlfile)
404 lfdirstate.remove(os.path.relpath(srclfile,
405 repo.root))
406 else:
407 util.copyfile(srclfile, destlfile)
408 lfdirstate.add(os.path.relpath(destlfile,
409 repo.root))
410 lfdirstate.write()
411 except util.Abort, e:
412 if str(e) != 'no files to copy':
413 raise e
414 else:
415 nolfiles = True
416 finally:
417 restorematchfn()
418 wlock.release()
419
420 if nolfiles and nonormalfiles:
421 raise util.Abort(_('no files to copy'))
422
423 return result
424
425 # When the user calls revert, we have to be careful to not revert any
426 # changes to other largefiles accidentally. This means we have to keep
427 # track of the largefiles that are being reverted so we only pull down
428 # the necessary largefiles.
429 #
430 # Standins are only updated (to match the hash of largefiles) before
431 # commits. Update the standins then run the original revert, changing
432 # the matcher to hit standins instead of largefiles. Based on the
433 # resulting standins update the largefiles. Then return the standins
434 # to their proper state
435 def override_revert(orig, ui, repo, *pats, **opts):
436 # Because we put the standins in a bad state (by updating them)
437 # and then return them to a correct state we need to lock to
438 # prevent others from changing them in their incorrect state.
439 wlock = repo.wlock()
440 try:
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
442 (modified, added, removed, missing, unknown, ignored, clean) = \
443 lfutil.lfdirstate_status(lfdirstate, repo, repo['.'].rev())
444 for lfile in modified:
445 lfutil.updatestandin(repo, lfutil.standin(lfile))
446
447 try:
448 ctx = repo[opts.get('rev')]
449 oldmatch = None # for the closure
450 def override_match(ctxorrepo, pats=[], opts={}, globbed=False,
451 default='relpath'):
452 if util.safehasattr(ctxorrepo, 'match'):
453 ctx0 = ctxorrepo
454 else:
455 ctx0 = ctxorrepo[None]
456 match = oldmatch(ctxorrepo, pats, opts, globbed, default)
457 m = copy.copy(match)
458 def tostandin(f):
459 if lfutil.standin(f) in ctx0 or lfutil.standin(f) in ctx:
460 return lfutil.standin(f)
461 elif lfutil.standin(f) in repo[None]:
462 return None
463 return f
464 m._files = [tostandin(f) for f in m._files]
465 m._files = [f for f in m._files if f is not None]
466 m._fmap = set(m._files)
467 orig_matchfn = m.matchfn
468 def matchfn(f):
469 if lfutil.isstandin(f):
470 # We need to keep track of what largefiles are being
471 # matched so we know which ones to update later --
472 # otherwise we accidentally revert changes to other
473 # largefiles. This is repo-specific, so duckpunch the
474 # repo object to keep the list of largefiles for us
475 # later.
476 if orig_matchfn(lfutil.splitstandin(f)) and \
477 (f in repo[None] or f in ctx):
478 lfileslist = getattr(repo, '_lfilestoupdate', [])
479 lfileslist.append(lfutil.splitstandin(f))
480 repo._lfilestoupdate = lfileslist
481 return True
482 else:
483 return False
484 return orig_matchfn(f)
485 m.matchfn = matchfn
486 return m
487 oldmatch = installmatchfn(override_match)
488 scmutil.match
489 matches = override_match(repo[None], pats, opts)
490 orig(ui, repo, *pats, **opts)
491 finally:
492 restorematchfn()
493 lfileslist = getattr(repo, '_lfilestoupdate', [])
494 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
495 printmessage=False)
496
497 # empty out the largefiles list so we start fresh next time
498 repo._lfilestoupdate = []
499 for lfile in modified:
500 if lfile in lfileslist:
501 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
502 in repo['.']:
503 lfutil.writestandin(repo, lfutil.standin(lfile),
504 repo['.'][lfile].data().strip(),
505 'x' in repo['.'][lfile].flags())
506 lfdirstate = lfutil.openlfdirstate(ui, repo)
507 for lfile in added:
508 standin = lfutil.standin(lfile)
509 if standin not in ctx and (standin in matches or opts.get('all')):
510 if lfile in lfdirstate:
511 lfdirstate.drop(lfile)
512 util.unlinkpath(repo.wjoin(standin))
513 lfdirstate.write()
514 finally:
515 wlock.release()
516
517 def hg_update(orig, repo, node):
518 result = orig(repo, node)
519 # XXX check if it worked first
520 lfcommands.updatelfiles(repo.ui, repo)
521 return result
522
523 def hg_clean(orig, repo, node, show_stats=True):
524 result = orig(repo, node, show_stats)
525 lfcommands.updatelfiles(repo.ui, repo)
526 return result
527
528 def hg_merge(orig, repo, node, force=None, remind=True):
529 result = orig(repo, node, force, remind)
530 lfcommands.updatelfiles(repo.ui, repo)
531 return result
532
533 # When we rebase a repository with remotely changed largefiles, we need to
534 # take some extra care so that the largefiles are correctly updated in the
535 # working copy
536 def override_pull(orig, ui, repo, source=None, **opts):
537 if opts.get('rebase', False):
538 repo._isrebasing = True
539 try:
540 if opts.get('update'):
541 del opts['update']
542 ui.debug('--update and --rebase are not compatible, ignoring '
543 'the update flag\n')
544 del opts['rebase']
545 cmdutil.bailifchanged(repo)
546 revsprepull = len(repo)
547 origpostincoming = commands.postincoming
548 def _dummy(*args, **kwargs):
549 pass
550 commands.postincoming = _dummy
551 repo.lfpullsource = source
552 if not source:
553 source = 'default'
554 try:
555 result = commands.pull(ui, repo, source, **opts)
556 finally:
557 commands.postincoming = origpostincoming
558 revspostpull = len(repo)
559 if revspostpull > revsprepull:
560 result = result or rebase.rebase(ui, repo)
561 finally:
562 repo._isrebasing = False
563 else:
564 repo.lfpullsource = source
565 if not source:
566 source = 'default'
567 result = orig(ui, repo, source, **opts)
568 return result
569
570 def override_rebase(orig, ui, repo, **opts):
571 repo._isrebasing = True
572 try:
573 orig(ui, repo, **opts)
574 finally:
575 repo._isrebasing = False
576
577 def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None,
578 prefix=None, mtime=None, subrepos=None):
579 # No need to lock because we are only reading history and
580 # largefile caches, neither of which are modified.
581 lfcommands.cachelfiles(repo.ui, repo, node)
582
583 if kind not in archival.archivers:
584 raise util.Abort(_("unknown archive type '%s'") % kind)
585
586 ctx = repo[node]
587
588 if kind == 'files':
589 if prefix:
590 raise util.Abort(
591 _('cannot give prefix when archiving to files'))
592 else:
593 prefix = archival.tidyprefix(dest, kind, prefix)
594
595 def write(name, mode, islink, getdata):
596 if matchfn and not matchfn(name):
597 return
598 data = getdata()
599 if decode:
600 data = repo.wwritedata(name, data)
601 archiver.addfile(prefix + name, mode, islink, data)
602
603 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
604
605 if repo.ui.configbool("ui", "archivemeta", True):
606 def metadata():
607 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
608 hex(repo.changelog.node(0)), hex(node), ctx.branch())
609
610 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
611 if repo.tagtype(t) == 'global')
612 if not tags:
613 repo.ui.pushbuffer()
614 opts = {'template': '{latesttag}\n{latesttagdistance}',
615 'style': '', 'patch': None, 'git': None}
616 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
617 ltags, dist = repo.ui.popbuffer().split('\n')
618 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
619 tags += 'latesttagdistance: %s\n' % dist
620
621 return base + tags
622
623 write('.hg_archival.txt', 0644, False, metadata)
624
625 for f in ctx:
626 ff = ctx.flags(f)
627 getdata = ctx[f].data
628 if lfutil.isstandin(f):
629 path = lfutil.findfile(repo, getdata().strip())
630 f = lfutil.splitstandin(f)
631
632 def getdatafn():
633 try:
634 fd = open(path, 'rb')
635 return fd.read()
636 finally:
637 fd.close()
638
639 getdata = getdatafn
640 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
641
642 if subrepos:
643 for subpath in ctx.substate:
644 sub = ctx.sub(subpath)
645 try:
646 sub.archive(repo.ui, archiver, prefix)
647 except TypeError:
648 sub.archive(archiver, prefix)
649
650 archiver.done()
651
652 # If a largefile is modified, the change is not reflected in its
653 # standin until a commit. cmdutil.bailifchanged() raises an exception
654 # if the repo has uncommitted changes. Wrap it to also check if
655 # largefiles were changed. This is used by bisect and backout.
656 def override_bailifchanged(orig, repo):
657 orig(repo)
658 repo.lfstatus = True
659 modified, added, removed, deleted = repo.status()[:4]
660 repo.lfstatus = False
661 if modified or added or removed or deleted:
662 raise util.Abort(_('outstanding uncommitted changes'))
663
664 # Fetch doesn't use cmdutil.bail_if_changed so override it to add the check
665 def override_fetch(orig, ui, repo, *pats, **opts):
666 repo.lfstatus = True
667 modified, added, removed, deleted = repo.status()[:4]
668 repo.lfstatus = False
669 if modified or added or removed or deleted:
670 raise util.Abort(_('outstanding uncommitted changes'))
671 return orig(ui, repo, *pats, **opts)
672
673 def override_forget(orig, ui, repo, *pats, **opts):
674 installnormalfilesmatchfn(repo[None].manifest())
675 orig(ui, repo, *pats, **opts)
676 restorematchfn()
677 m = scmutil.match(repo[None], pats, opts)
678
679 try:
680 repo.lfstatus = True
681 s = repo.status(match=m, clean=True)
682 finally:
683 repo.lfstatus = False
684 forget = sorted(s[0] + s[1] + s[3] + s[6])
685 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
686
687 for f in forget:
688 if lfutil.standin(f) not in repo.dirstate and not \
689 os.path.isdir(m.rel(lfutil.standin(f))):
690 ui.warn(_('not removing %s: file is already untracked\n')
691 % m.rel(f))
692
693 for f in forget:
694 if ui.verbose or not m.exact(f):
695 ui.status(_('removing %s\n') % m.rel(f))
696
697 # Need to lock because standin files are deleted then removed from the
698 # repository and we could race inbetween.
699 wlock = repo.wlock()
700 try:
701 lfdirstate = lfutil.openlfdirstate(ui, repo)
702 for f in forget:
703 if lfdirstate[f] == 'a':
704 lfdirstate.drop(f)
705 else:
706 lfdirstate.remove(f)
707 lfdirstate.write()
708 lfutil.repo_remove(repo, [lfutil.standin(f) for f in forget],
709 unlink=True)
710 finally:
711 wlock.release()
712
713 def getoutgoinglfiles(ui, repo, dest=None, **opts):
714 dest = ui.expandpath(dest or 'default-push', dest or 'default')
715 dest, branches = hg.parseurl(dest, opts.get('branch'))
716 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
717 if revs:
718 revs = [repo.lookup(rev) for rev in revs]
719
720 remoteui = hg.remoteui
721
722 try:
723 remote = hg.repository(remoteui(repo, opts), dest)
724 except error.RepoError:
725 return None
726 o = lfutil.findoutgoing(repo, remote, False)
727 if not o:
728 return None
729 o = repo.changelog.nodesbetween(o, revs)[0]
730 if opts.get('newest_first'):
731 o.reverse()
732
733 toupload = set()
734 for n in o:
735 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
736 ctx = repo[n]
737 files = set(ctx.files())
738 if len(parents) == 2:
739 mc = ctx.manifest()
740 mp1 = ctx.parents()[0].manifest()
741 mp2 = ctx.parents()[1].manifest()
742 for f in mp1:
743 if f not in mc:
744 files.add(f)
745 for f in mp2:
746 if f not in mc:
747 files.add(f)
748 for f in mc:
749 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
750 files.add(f)
751 toupload = toupload.union(
752 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
753 return toupload
754
755 def override_outgoing(orig, ui, repo, dest=None, **opts):
756 orig(ui, repo, dest, **opts)
757
758 if opts.pop('large', None):
759 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
760 if toupload is None:
761 ui.status(_('largefiles: No remote repo\n'))
762 else:
763 ui.status(_('largefiles to upload:\n'))
764 for file in toupload:
765 ui.status(lfutil.splitstandin(file) + '\n')
766 ui.status('\n')
767
768 def override_summary(orig, ui, repo, *pats, **opts):
769 orig(ui, repo, *pats, **opts)
770
771 if opts.pop('large', None):
772 toupload = getoutgoinglfiles(ui, repo, None, **opts)
773 if toupload is None:
774 ui.status(_('largefiles: No remote repo\n'))
775 else:
776 ui.status(_('largefiles: %d to upload\n') % len(toupload))
777
778 def override_addremove(orig, ui, repo, *pats, **opts):
779 # Check if the parent or child has largefiles; if so, disallow
780 # addremove. If there is a symlink in the manifest then getting
781 # the manifest throws an exception: catch it and let addremove
782 # deal with it.
783 try:
784 manifesttip = set(repo['tip'].manifest())
785 except util.Abort:
786 manifesttip = set()
787 try:
788 manifestworking = set(repo[None].manifest())
789 except util.Abort:
790 manifestworking = set()
791
792 # Manifests are only iterable so turn them into sets then union
793 for file in manifesttip.union(manifestworking):
794 if file.startswith(lfutil.shortname):
795 raise util.Abort(
796 _('addremove cannot be run on a repo with largefiles'))
797
798 return orig(ui, repo, *pats, **opts)
799
800 # Calling purge with --all will cause the largefiles to be deleted.
801 # Override repo.status to prevent this from happening.
802 def override_purge(orig, ui, repo, *dirs, **opts):
803 oldstatus = repo.status
804 def override_status(node1='.', node2=None, match=None, ignored=False,
805 clean=False, unknown=False, listsubrepos=False):
806 r = oldstatus(node1, node2, match, ignored, clean, unknown,
807 listsubrepos)
808 lfdirstate = lfutil.openlfdirstate(ui, repo)
809 modified, added, removed, deleted, unknown, ignored, clean = r
810 unknown = [f for f in unknown if lfdirstate[f] == '?']
811 ignored = [f for f in ignored if lfdirstate[f] == '?']
812 return modified, added, removed, deleted, unknown, ignored, clean
813 repo.status = override_status
814 orig(ui, repo, *dirs, **opts)
815 repo.status = oldstatus
816
817 def override_rollback(orig, ui, repo, **opts):
818 result = orig(ui, repo, **opts)
819 merge.update(repo, node=None, branchmerge=False, force=True,
820 partial=lfutil.isstandin)
821 lfdirstate = lfutil.openlfdirstate(ui, repo)
822 lfiles = lfutil.listlfiles(repo)
823 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
824 for file in lfiles:
825 if file in oldlfiles:
826 lfdirstate.normallookup(file)
827 else:
828 lfdirstate.add(file)
829 lfdirstate.write()
830 return result
@@ -0,0 +1,160
1 # Copyright 2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 import os
7 import tempfile
8 import urllib2
9
10 from mercurial import error, httprepo, util, wireproto
11 from mercurial.i18n import _
12
13 import lfutil
14
15 LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
16 '\n\nPlease enable it in your Mercurial config '
17 'file.\n')
18
19 def putlfile(repo, proto, sha):
20 '''Put a largefile into a repository's local cache and into the
21 system cache.'''
22 f = None
23 proto.redirect()
24 try:
25 try:
26 f = tempfile.NamedTemporaryFile(mode='wb+', prefix='hg-putlfile-')
27 proto.getfile(f)
28 f.seek(0)
29 if sha != lfutil.hexsha1(f):
30 return wireproto.pushres(1)
31 lfutil.copytocacheabsolute(repo, f.name, sha)
32 except IOError:
33 repo.ui.warn(
34 _('error: could not put received data into largefile store'))
35 return wireproto.pushres(1)
36 finally:
37 if f:
38 f.close()
39
40 return wireproto.pushres(0)
41
42 def getlfile(repo, proto, sha):
43 '''Retrieve a largefile from the repository-local cache or system
44 cache.'''
45 filename = lfutil.findfile(repo, sha)
46 if not filename:
47 raise util.Abort(_('requested largefile %s not present in cache') % sha)
48 f = open(filename, 'rb')
49 length = os.fstat(f.fileno())[6]
50
51 # Since we can't set an HTTP content-length header here, and
52 # Mercurial core provides no way to give the length of a streamres
53 # (and reading the entire file into RAM would be ill-advised), we
54 # just send the length on the first line of the response, like the
55 # ssh proto does for string responses.
56 def generator():
57 yield '%d\n' % length
58 for chunk in f:
59 yield chunk
60 return wireproto.streamres(generator())
61
62 def statlfile(repo, proto, sha):
63 '''Return '2\n' if the largefile is missing, '1\n' if it has a
64 mismatched checksum, or '0\n' if it is in good condition'''
65 filename = lfutil.findfile(repo, sha)
66 if not filename:
67 return '2\n'
68 fd = None
69 try:
70 fd = open(filename, 'rb')
71 return lfutil.hexsha1(fd) == sha and '0\n' or '1\n'
72 finally:
73 if fd:
74 fd.close()
75
76 def wirereposetup(ui, repo):
77 class lfileswirerepository(repo.__class__):
78 def putlfile(self, sha, fd):
79 # unfortunately, httprepository._callpush tries to convert its
80 # input file-like into a bundle before sending it, so we can't use
81 # it ...
82 if issubclass(self.__class__, httprepo.httprepository):
83 try:
84 return int(self._call('putlfile', data=fd, sha=sha,
85 headers={'content-type':'application/mercurial-0.1'}))
86 except (ValueError, urllib2.HTTPError):
87 return 1
88 # ... but we can't use sshrepository._call because the data=
89 # argument won't get sent, and _callpush does exactly what we want
90 # in this case: send the data straight through
91 else:
92 try:
93 ret, output = self._callpush("putlfile", fd, sha=sha)
94 if ret == "":
95 raise error.ResponseError(_('putlfile failed:'),
96 output)
97 return int(ret)
98 except IOError:
99 return 1
100 except ValueError:
101 raise error.ResponseError(
102 _('putlfile failed (unexpected response):'), ret)
103
104 def getlfile(self, sha):
105 stream = self._callstream("getlfile", sha=sha)
106 length = stream.readline()
107 try:
108 length = int(length)
109 except ValueError:
110 self._abort(error.ResponseError(_("unexpected response:"),
111 length))
112 return (length, stream)
113
114 def statlfile(self, sha):
115 try:
116 return int(self._call("statlfile", sha=sha))
117 except (ValueError, urllib2.HTTPError):
118 # If the server returns anything but an integer followed by a
119 # newline, newline, it's not speaking our language; if we get
120 # an HTTP error, we can't be sure the largefile is present;
121 # either way, consider it missing.
122 return 2
123
124 repo.__class__ = lfileswirerepository
125
126 # advertise the largefiles=serve capability
127 def capabilities(repo, proto):
128 return capabilities_orig(repo, proto) + ' largefiles=serve'
129
130 # duplicate what Mercurial's new out-of-band errors mechanism does, because
131 # clients old and new alike both handle it well
132 def webproto_refuseclient(self, message):
133 self.req.header([('Content-Type', 'application/hg-error')])
134 return message
135
136 def sshproto_refuseclient(self, message):
137 self.ui.write_err('%s\n-\n' % message)
138 self.fout.write('\n')
139 self.fout.flush()
140
141 return ''
142
143 def heads(repo, proto):
144 if lfutil.islfilesrepo(repo):
145 return wireproto.ooberror(LARGEFILES_REQUIRED_MSG)
146 return wireproto.heads(repo, proto)
147
148 def sshrepo_callstream(self, cmd, **args):
149 if cmd == 'heads' and self.capable('largefiles'):
150 cmd = 'lheads'
151 if cmd == 'batch' and self.capable('largefiles'):
152 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
153 return ssh_oldcallstream(self, cmd, **args)
154
155 def httprepo_callstream(self, cmd, **args):
156 if cmd == 'heads' and self.capable('largefiles'):
157 cmd = 'lheads'
158 if cmd == 'batch' and self.capable('largefiles'):
159 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
160 return http_oldcallstream(self, cmd, **args)
@@ -0,0 +1,106
1 # Copyright 2010-2011 Fog Creek Software
2 # Copyright 2010-2011 Unity Technologies
3 #
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
6
7 '''remote largefile store; the base class for servestore'''
8
9 import urllib2
10
11 from mercurial import util
12 from mercurial.i18n import _
13
14 import lfutil
15 import basestore
16
17 class remotestore(basestore.basestore):
18 '''a largefile store accessed over a network'''
19 def __init__(self, ui, repo, url):
20 super(remotestore, self).__init__(ui, repo, url)
21
22 def put(self, source, hash):
23 if self._verify(hash):
24 return
25 if self.sendfile(source, hash):
26 raise util.Abort(
27 _('remotestore: could not put %s to remote store %s')
28 % (source, self.url))
29 self.ui.debug(
30 _('remotestore: put %s to remote store %s') % (source, self.url))
31
32 def exists(self, hash):
33 return self._verify(hash)
34
35 def sendfile(self, filename, hash):
36 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
37 fd = None
38 try:
39 try:
40 fd = lfutil.httpsendfile(self.ui, filename)
41 except IOError, e:
42 raise util.Abort(
43 _('remotestore: could not open file %s: %s')
44 % (filename, str(e)))
45 return self._put(hash, fd)
46 finally:
47 if fd:
48 fd.close()
49
50 def _getfile(self, tmpfile, filename, hash):
51 # quit if the largefile isn't there
52 stat = self._stat(hash)
53 if stat == 1:
54 raise util.Abort(_('remotestore: largefile %s is invalid') % hash)
55 elif stat == 2:
56 raise util.Abort(_('remotestore: largefile %s is missing') % hash)
57
58 try:
59 length, infile = self._get(hash)
60 except urllib2.HTTPError, e:
61 # 401s get converted to util.Aborts; everything else is fine being
62 # turned into a StoreError
63 raise basestore.StoreError(filename, hash, self.url, str(e))
64 except urllib2.URLError, e:
65 # This usually indicates a connection problem, so don't
66 # keep trying with the other files... they will probably
67 # all fail too.
68 raise util.Abort('%s: %s' % (self.url, e.reason))
69 except IOError, e:
70 raise basestore.StoreError(filename, hash, self.url, str(e))
71
72 # Mercurial does not close its SSH connections after writing a stream
73 if length is not None:
74 infile = lfutil.limitreader(infile, length)
75 return lfutil.copyandhash(lfutil.blockstream(infile), tmpfile)
76
77 def _verify(self, hash):
78 return not self._stat(hash)
79
80 def _verifyfile(self, cctx, cset, contents, standin, verified):
81 filename = lfutil.splitstandin(standin)
82 if not filename:
83 return False
84 fctx = cctx[standin]
85 key = (filename, fctx.filenode())
86 if key in verified:
87 return False
88
89 verified.add(key)
90
91 stat = self._stat(hash)
92 if not stat:
93 return False
94 elif stat == 1:
95 self.ui.warn(
96 _('changeset %s: %s: contents differ\n')
97 % (cset, filename))
98 return True # failed
99 elif stat == 2:
100 self.ui.warn(
101 _('changeset %s: %s missing\n')
102 % (cset, filename))
103 return True # failed
104 else:
105 raise RuntimeError('verify failed: unexpected response from '
106 'statlfile (%r)' % stat)
@@ -0,0 +1,416
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles repositories: reposetup'''
10 import copy
11 import types
12 import os
13 import re
14
15 from mercurial import context, error, manifest, match as match_, \
16 node, util
17 from mercurial.i18n import _
18
19 import lfcommands
20 import proto
21 import lfutil
22
23 def reposetup(ui, repo):
24 # wire repositories should be given new wireproto functions but not the
25 # other largefiles modifications
26 if not repo.local():
27 return proto.wirereposetup(ui, repo)
28
29 for name in ('status', 'commitctx', 'commit', 'push'):
30 method = getattr(repo, name)
31 #if not (isinstance(method, types.MethodType) and
32 # method.im_func is repo.__class__.commitctx.im_func):
33 if (isinstance(method, types.FunctionType) and
34 method.func_name == 'wrap'):
35 ui.warn(_('largefiles: repo method %r appears to have already been'
36 ' wrapped by another extension: '
37 'largefiles may behave incorrectly\n')
38 % name)
39
40 class lfiles_repo(repo.__class__):
41 lfstatus = False
42 def status_nolfiles(self, *args, **kwargs):
43 return super(lfiles_repo, self).status(*args, **kwargs)
44
45 # When lfstatus is set, return a context that gives the names
46 # of largefiles instead of their corresponding standins and
47 # identifies the largefiles as always binary, regardless of
48 # their actual contents.
49 def __getitem__(self, changeid):
50 ctx = super(lfiles_repo, self).__getitem__(changeid)
51 if self.lfstatus:
52 class lfiles_manifestdict(manifest.manifestdict):
53 def __contains__(self, filename):
54 if super(lfiles_manifestdict,
55 self).__contains__(filename):
56 return True
57 return super(lfiles_manifestdict,
58 self).__contains__(lfutil.shortname+'/' + filename)
59 class lfiles_ctx(ctx.__class__):
60 def files(self):
61 filenames = super(lfiles_ctx, self).files()
62 return [re.sub('^\\'+lfutil.shortname+'/', '',
63 filename) for filename in filenames]
64 def manifest(self):
65 man1 = super(lfiles_ctx, self).manifest()
66 man1.__class__ = lfiles_manifestdict
67 return man1
68 def filectx(self, path, fileid=None, filelog=None):
69 try:
70 result = super(lfiles_ctx, self).filectx(path,
71 fileid, filelog)
72 except error.LookupError:
73 # Adding a null character will cause Mercurial to
74 # identify this as a binary file.
75 result = super(lfiles_ctx, self).filectx(
76 lfutil.shortname + '/' + path, fileid,
77 filelog)
78 olddata = result.data
79 result.data = lambda: olddata() + '\0'
80 return result
81 ctx.__class__ = lfiles_ctx
82 return ctx
83
84 # Figure out the status of big files and insert them into the
85 # appropriate list in the result. Also removes standin files
86 # from the listing. Revert to the original status if
87 # self.lfstatus is False.
88 def status(self, node1='.', node2=None, match=None, ignored=False,
89 clean=False, unknown=False, listsubrepos=False):
90 listignored, listclean, listunknown = ignored, clean, unknown
91 if not self.lfstatus:
92 try:
93 return super(lfiles_repo, self).status(node1, node2, match,
94 listignored, listclean, listunknown, listsubrepos)
95 except TypeError:
96 return super(lfiles_repo, self).status(node1, node2, match,
97 listignored, listclean, listunknown)
98 else:
99 # some calls in this function rely on the old version of status
100 self.lfstatus = False
101 if isinstance(node1, context.changectx):
102 ctx1 = node1
103 else:
104 ctx1 = repo[node1]
105 if isinstance(node2, context.changectx):
106 ctx2 = node2
107 else:
108 ctx2 = repo[node2]
109 working = ctx2.rev() is None
110 parentworking = working and ctx1 == self['.']
111
112 def inctx(file, ctx):
113 try:
114 if ctx.rev() is None:
115 return file in ctx.manifest()
116 ctx[file]
117 return True
118 except KeyError:
119 return False
120
121 if match is None:
122 match = match_.always(self.root, self.getcwd())
123
124 # Create a copy of match that matches standins instead
125 # of largefiles.
126 def tostandin(file):
127 if inctx(lfutil.standin(file), ctx2):
128 return lfutil.standin(file)
129 return file
130
131 m = copy.copy(match)
132 m._files = [tostandin(f) for f in m._files]
133
134 # get ignored, clean, and unknown but remove them
135 # later if they were not asked for
136 try:
137 result = super(lfiles_repo, self).status(node1, node2, m,
138 True, True, True, listsubrepos)
139 except TypeError:
140 result = super(lfiles_repo, self).status(node1, node2, m,
141 True, True, True)
142 if working:
143 # hold the wlock while we read largefiles and
144 # update the lfdirstate
145 wlock = repo.wlock()
146 try:
147 # Any non-largefiles that were explicitly listed must be
148 # taken out or lfdirstate.status will report an error.
149 # The status of these files was already computed using
150 # super's status.
151 lfdirstate = lfutil.openlfdirstate(ui, self)
152 match._files = [f for f in match._files if f in
153 lfdirstate]
154 s = lfdirstate.status(match, [], listignored,
155 listclean, listunknown)
156 (unsure, modified, added, removed, missing, unknown,
157 ignored, clean) = s
158 if parentworking:
159 for lfile in unsure:
160 if ctx1[lfutil.standin(lfile)].data().strip() \
161 != lfutil.hashfile(self.wjoin(lfile)):
162 modified.append(lfile)
163 else:
164 clean.append(lfile)
165 lfdirstate.normal(lfile)
166 lfdirstate.write()
167 else:
168 tocheck = unsure + modified + added + clean
169 modified, added, clean = [], [], []
170
171 for lfile in tocheck:
172 standin = lfutil.standin(lfile)
173 if inctx(standin, ctx1):
174 if ctx1[standin].data().strip() != \
175 lfutil.hashfile(self.wjoin(lfile)):
176 modified.append(lfile)
177 else:
178 clean.append(lfile)
179 else:
180 added.append(lfile)
181 finally:
182 wlock.release()
183
184 for standin in ctx1.manifest():
185 if not lfutil.isstandin(standin):
186 continue
187 lfile = lfutil.splitstandin(standin)
188 if not match(lfile):
189 continue
190 if lfile not in lfdirstate:
191 removed.append(lfile)
192 # Handle unknown and ignored differently
193 lfiles = (modified, added, removed, missing, [], [], clean)
194 result = list(result)
195 # Unknown files
196 result[4] = [f for f in unknown
197 if (repo.dirstate[f] == '?' and
198 not lfutil.isstandin(f))]
199 # Ignored files must be ignored by both the dirstate and
200 # lfdirstate
201 result[5] = set(ignored).intersection(set(result[5]))
202 # combine normal files and largefiles
203 normals = [[fn for fn in filelist
204 if not lfutil.isstandin(fn)]
205 for filelist in result]
206 result = [sorted(list1 + list2)
207 for (list1, list2) in zip(normals, lfiles)]
208 else:
209 def toname(f):
210 if lfutil.isstandin(f):
211 return lfutil.splitstandin(f)
212 return f
213 result = [[toname(f) for f in items] for items in result]
214
215 if not listunknown:
216 result[4] = []
217 if not listignored:
218 result[5] = []
219 if not listclean:
220 result[6] = []
221 self.lfstatus = True
222 return result
223
224 # As part of committing, copy all of the largefiles into the
225 # cache.
226 def commitctx(self, *args, **kwargs):
227 node = super(lfiles_repo, self).commitctx(*args, **kwargs)
228 ctx = self[node]
229 for filename in ctx.files():
230 if lfutil.isstandin(filename) and filename in ctx.manifest():
231 realfile = lfutil.splitstandin(filename)
232 lfutil.copytocache(self, ctx.node(), realfile)
233
234 return node
235
236 # Before commit, largefile standins have not had their
237 # contents updated to reflect the hash of their largefile.
238 # Do that here.
239 def commit(self, text="", user=None, date=None, match=None,
240 force=False, editor=False, extra={}):
241 orig = super(lfiles_repo, self).commit
242
243 wlock = repo.wlock()
244 try:
245 if getattr(repo, "_isrebasing", False):
246 # We have to take the time to pull down the new
247 # largefiles now. Otherwise if we are rebasing,
248 # any largefiles that were modified in the
249 # destination changesets get overwritten, either
250 # by the rebase or in the first commit after the
251 # rebase.
252 lfcommands.updatelfiles(repo.ui, repo)
253 # Case 1: user calls commit with no specific files or
254 # include/exclude patterns: refresh and commit all files that
255 # are "dirty".
256 if ((match is None) or
257 (not match.anypats() and not match.files())):
258 # Spend a bit of time here to get a list of files we know
259 # are modified so we can compare only against those.
260 # It can cost a lot of time (several seconds)
261 # otherwise to update all standins if the largefiles are
262 # large.
263 lfdirstate = lfutil.openlfdirstate(ui, self)
264 dirtymatch = match_.always(repo.root, repo.getcwd())
265 s = lfdirstate.status(dirtymatch, [], False, False, False)
266 modifiedfiles = []
267 for i in s:
268 modifiedfiles.extend(i)
269 lfiles = lfutil.listlfiles(self)
270 # this only loops through largefiles that exist (not
271 # removed/renamed)
272 for lfile in lfiles:
273 if lfile in modifiedfiles:
274 if os.path.exists(self.wjoin(lfutil.standin(lfile))):
275 # this handles the case where a rebase is being
276 # performed and the working copy is not updated
277 # yet.
278 if os.path.exists(self.wjoin(lfile)):
279 lfutil.updatestandin(self,
280 lfutil.standin(lfile))
281 lfdirstate.normal(lfile)
282 for lfile in lfdirstate:
283 if lfile in modifiedfiles:
284 if not os.path.exists(
285 repo.wjoin(lfutil.standin(lfile))):
286 lfdirstate.drop(lfile)
287 lfdirstate.write()
288
289 return orig(text=text, user=user, date=date, match=match,
290 force=force, editor=editor, extra=extra)
291
292 for f in match.files():
293 if lfutil.isstandin(f):
294 raise util.Abort(
295 _('file "%s" is a largefile standin') % f,
296 hint=('commit the largefile itself instead'))
297
298 # Case 2: user calls commit with specified patterns: refresh
299 # any matching big files.
300 smatcher = lfutil.composestandinmatcher(self, match)
301 standins = lfutil.dirstate_walk(self.dirstate, smatcher)
302
303 # No matching big files: get out of the way and pass control to
304 # the usual commit() method.
305 if not standins:
306 return orig(text=text, user=user, date=date, match=match,
307 force=force, editor=editor, extra=extra)
308
309 # Refresh all matching big files. It's possible that the
310 # commit will end up failing, in which case the big files will
311 # stay refreshed. No harm done: the user modified them and
312 # asked to commit them, so sooner or later we're going to
313 # refresh the standins. Might as well leave them refreshed.
314 lfdirstate = lfutil.openlfdirstate(ui, self)
315 for standin in standins:
316 lfile = lfutil.splitstandin(standin)
317 if lfdirstate[lfile] <> 'r':
318 lfutil.updatestandin(self, standin)
319 lfdirstate.normal(lfile)
320 else:
321 lfdirstate.drop(lfile)
322 lfdirstate.write()
323
324 # Cook up a new matcher that only matches regular files or
325 # standins corresponding to the big files requested by the
326 # user. Have to modify _files to prevent commit() from
327 # complaining "not tracked" for big files.
328 lfiles = lfutil.listlfiles(repo)
329 match = copy.copy(match)
330 orig_matchfn = match.matchfn
331
332 # Check both the list of largefiles and the list of
333 # standins because if a largefile was removed, it
334 # won't be in the list of largefiles at this point
335 match._files += sorted(standins)
336
337 actualfiles = []
338 for f in match._files:
339 fstandin = lfutil.standin(f)
340
341 # ignore known largefiles and standins
342 if f in lfiles or fstandin in standins:
343 continue
344
345 # append directory separator to avoid collisions
346 if not fstandin.endswith(os.sep):
347 fstandin += os.sep
348
349 # prevalidate matching standin directories
350 if lfutil.any_(st for st in match._files
351 if st.startswith(fstandin)):
352 continue
353 actualfiles.append(f)
354 match._files = actualfiles
355
356 def matchfn(f):
357 if orig_matchfn(f):
358 return f not in lfiles
359 else:
360 return f in standins
361
362 match.matchfn = matchfn
363 return orig(text=text, user=user, date=date, match=match,
364 force=force, editor=editor, extra=extra)
365 finally:
366 wlock.release()
367
368 def push(self, remote, force=False, revs=None, newbranch=False):
369 o = lfutil.findoutgoing(repo, remote, force)
370 if o:
371 toupload = set()
372 o = repo.changelog.nodesbetween(o, revs)[0]
373 for n in o:
374 parents = [p for p in repo.changelog.parents(n)
375 if p != node.nullid]
376 ctx = repo[n]
377 files = set(ctx.files())
378 if len(parents) == 2:
379 mc = ctx.manifest()
380 mp1 = ctx.parents()[0].manifest()
381 mp2 = ctx.parents()[1].manifest()
382 for f in mp1:
383 if f not in mc:
384 files.add(f)
385 for f in mp2:
386 if f not in mc:
387 files.add(f)
388 for f in mc:
389 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f,
390 None):
391 files.add(f)
392
393 toupload = toupload.union(
394 set([ctx[f].data().strip()
395 for f in files
396 if lfutil.isstandin(f) and f in ctx]))
397 lfcommands.uploadlfiles(ui, self, remote, toupload)
398 return super(lfiles_repo, self).push(remote, force, revs,
399 newbranch)
400
401 repo.__class__ = lfiles_repo
402
403 def checkrequireslfiles(ui, repo, **kwargs):
404 if 'largefiles' not in repo.requirements and lfutil.any_(
405 lfutil.shortname+'/' in f[0] for f in repo.store.datafiles()):
406 # workaround bug in Mercurial 1.9 whereby requirements is
407 # a list on newly-cloned repos
408 repo.requirements = set(repo.requirements)
409
410 repo.requirements |= set(['largefiles'])
411 repo._writerequirements()
412
413 checkrequireslfiles(ui, repo)
414
415 ui.setconfig('hooks', 'changegroup.lfiles', checkrequireslfiles)
416 ui.setconfig('hooks', 'commit.lfiles', checkrequireslfiles)
@@ -0,0 +1,138
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles extension: uisetup'''
10
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
12 httprepo, localrepo, sshrepo, sshserver, util, wireproto
13 from mercurial.i18n import _
14 from mercurial.hgweb import hgweb_mod, protocol
15
16 import overrides
17 import proto
18
19 def uisetup(ui):
20 # Disable auto-status for some commands which assume that all
21 # files in the result are under Mercurial's control
22
23 entry = extensions.wrapcommand(commands.table, 'add',
24 overrides.override_add)
25 addopt = [('', 'large', None, _('add as largefile')),
26 ('', 'lfsize', '', _('add all files above this size (in megabytes)'
27 'as largefiles (default: 10)'))]
28 entry[1].extend(addopt)
29
30 entry = extensions.wrapcommand(commands.table, 'addremove',
31 overrides.override_addremove)
32 entry = extensions.wrapcommand(commands.table, 'remove',
33 overrides.override_remove)
34 entry = extensions.wrapcommand(commands.table, 'forget',
35 overrides.override_forget)
36 entry = extensions.wrapcommand(commands.table, 'status',
37 overrides.override_status)
38 entry = extensions.wrapcommand(commands.table, 'log',
39 overrides.override_log)
40 entry = extensions.wrapcommand(commands.table, 'rollback',
41 overrides.override_rollback)
42 entry = extensions.wrapcommand(commands.table, 'verify',
43 overrides.override_verify)
44
45 verifyopt = [('', 'large', None, _('verify largefiles')),
46 ('', 'lfa', None,
47 _('verify all revisions of largefiles not just current')),
48 ('', 'lfc', None,
49 _('verify largefile contents not just existence'))]
50 entry[1].extend(verifyopt)
51
52 entry = extensions.wrapcommand(commands.table, 'outgoing',
53 overrides.override_outgoing)
54 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
55 entry[1].extend(outgoingopt)
56 entry = extensions.wrapcommand(commands.table, 'summary',
57 overrides.override_summary)
58 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
59 entry[1].extend(summaryopt)
60
61 entry = extensions.wrapcommand(commands.table, 'update',
62 overrides.override_update)
63 entry = extensions.wrapcommand(commands.table, 'pull',
64 overrides.override_pull)
65 entry = extensions.wrapfunction(filemerge, 'filemerge',
66 overrides.override_filemerge)
67 entry = extensions.wrapfunction(cmdutil, 'copy',
68 overrides.override_copy)
69
70 # Backout calls revert so we need to override both the command and the
71 # function
72 entry = extensions.wrapcommand(commands.table, 'revert',
73 overrides.override_revert)
74 entry = extensions.wrapfunction(commands, 'revert',
75 overrides.override_revert)
76
77 # clone uses hg._update instead of hg.update even though they are the
78 # same function... so wrap both of them)
79 extensions.wrapfunction(hg, 'update', overrides.hg_update)
80 extensions.wrapfunction(hg, '_update', overrides.hg_update)
81 extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
82 extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
83
84 extensions.wrapfunction(archival, 'archive', overrides.override_archive)
85 if util.safehasattr(cmdutil, 'bailifchanged'):
86 extensions.wrapfunction(cmdutil, 'bailifchanged',
87 overrides.override_bailifchanged)
88 else:
89 extensions.wrapfunction(cmdutil, 'bail_if_changed',
90 overrides.override_bailifchanged)
91
92 # create the new wireproto commands ...
93 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
94 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
95 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
96
97 # ... and wrap some existing ones
98 wireproto.commands['capabilities'] = (proto.capabilities, '')
99 wireproto.commands['heads'] = (proto.heads, '')
100 wireproto.commands['lheads'] = (wireproto.heads, '')
101
102 # make putlfile behave the same as push and {get,stat}lfile behave
103 # the same as pull w.r.t. permissions checks
104 hgweb_mod.perms['putlfile'] = 'push'
105 hgweb_mod.perms['getlfile'] = 'pull'
106 hgweb_mod.perms['statlfile'] = 'pull'
107
108 # the hello wireproto command uses wireproto.capabilities, so it won't see
109 # our largefiles capability unless we replace the actual function as well.
110 proto.capabilities_orig = wireproto.capabilities
111 wireproto.capabilities = proto.capabilities
112
113 # these let us reject non-largefiles clients and make them display
114 # our error messages
115 protocol.webproto.refuseclient = proto.webproto_refuseclient
116 sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
117
118 # can't do this in reposetup because it needs to have happened before
119 # wirerepo.__init__ is called
120 proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
121 proto.http_oldcallstream = httprepo.httprepository._callstream
122 sshrepo.sshrepository._callstream = proto.sshrepo_callstream
123 httprepo.httprepository._callstream = proto.httprepo_callstream
124
125 # don't die on seeing a repo with the largefiles requirement
126 localrepo.localrepository.supported |= set(['largefiles'])
127
128 # override some extensions' stuff as well
129 for name, module in extensions.extensions():
130 if name == 'fetch':
131 extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
132 overrides.override_fetch)
133 if name == 'purge':
134 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
135 overrides.override_purge)
136 if name == 'rebase':
137 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
138 overrides.override_rebase)
@@ -0,0 +1,51
1 Largefiles allows for tracking large, incompressible binary files in Mercurial
2 without requiring excessive bandwidth for clones and pulls. Files added as
3 largefiles are not tracked directly by Mercurial; rather, their revisions are
4 identified by a checksum, and Mercurial tracks these checksums. This way, when
5 you clone a repository or pull in changesets, the large files in older
6 revisions of the repository are not needed, and only the ones needed to update
7 to the current version are downloaded. This saves both disk space and
8 bandwidth.
9
10 If you are starting a new repository or adding new large binary files, using
11 largefiles for them is as easy as adding '--large' to your hg add command. For
12 example:
13
14 $ dd if=/dev/urandom of=thisfileislarge count=2000
15 $ hg add --large thisfileislarge
16 $ hg commit -m 'add thisfileislarge, which is large, as a largefile'
17
18 When you push a changeset that affects largefiles to a remote repository, its
19 largefile revisions will be uploaded along with it. Note that the remote
20 Mercurial must also have the largefiles extension enabled for this to work.
21
22 When you pull a changeset that affects largefiles from a remote repository,
23 nothing different from Mercurial's normal behavior happens. However, when you
24 update to such a revision, any largefiles needed by that revision are
25 downloaded and cached if they have never been downloaded before. This means
26 that network access is required to update to revision you have not yet updated
27 to.
28
29 If you already have large files tracked by Mercurial without the largefiles
30 extension, you will need to convert your repository in order to benefit from
31 largefiles. This is done with the 'hg lfconvert' command:
32
33 $ hg lfconvert --size 10 oldrepo newrepo
34
35 By default, in repositories that already have largefiles in them, any new file
36 over 10MB will automatically be added as largefiles. To change this
37 threshhold, set [largefiles].size in your Mercurial config file to the minimum
38 size in megabytes to track as a largefile, or use the --lfsize option to the
39 add command (also in megabytes):
40
41 [largefiles]
42 size = 2
43
44 $ hg add --lfsize 2
45
46 The [largefiles].patterns config option allows you to specify specific
47 space-separated filename patterns (in shell glob syntax) that should always be
48 tracked as largefiles:
49
50 [largefiles]
51 pattens = *.jpg *.{png,bmp} library.zip content/audio/*
@@ -0,0 +1,29
1 # Copyright 2010-2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 '''largefile store working over Mercurial's wire protocol'''
7
8 import lfutil
9 import remotestore
10
11 class wirestore(remotestore.remotestore):
12 def __init__(self, ui, repo, remote):
13 cap = remote.capable('largefiles')
14 if not cap:
15 raise lfutil.storeprotonotcapable([])
16 storetypes = cap.split(',')
17 if not 'serve' in storetypes:
18 raise lfutil.storeprotonotcapable(storetypes)
19 self.remote = remote
20 super(wirestore, self).__init__(ui, repo, remote.url())
21
22 def _put(self, hash, fd):
23 return self.remote.putlfile(hash, fd)
24
25 def _get(self, hash):
26 return self.remote.getlfile(hash)
27
28 def _stat(self, hash):
29 return self.remote.statlfile(hash)
1 NO CONTENT: new file 100644, binary diff hidden
NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,384 +1,386
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-code - a style and portability checker for Mercurial
3 # check-code - a style and portability checker for Mercurial
4 #
4 #
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import re, glob, os, sys
10 import re, glob, os, sys
11 import keyword
11 import keyword
12 import optparse
12 import optparse
13
13
14 def repquote(m):
14 def repquote(m):
15 t = re.sub(r"\w", "x", m.group('text'))
15 t = re.sub(r"\w", "x", m.group('text'))
16 t = re.sub(r"[^\sx]", "o", t)
16 t = re.sub(r"[^\sx]", "o", t)
17 return m.group('quote') + t + m.group('quote')
17 return m.group('quote') + t + m.group('quote')
18
18
19 def reppython(m):
19 def reppython(m):
20 comment = m.group('comment')
20 comment = m.group('comment')
21 if comment:
21 if comment:
22 return "#" * len(comment)
22 return "#" * len(comment)
23 return repquote(m)
23 return repquote(m)
24
24
25 def repcomment(m):
25 def repcomment(m):
26 return m.group(1) + "#" * len(m.group(2))
26 return m.group(1) + "#" * len(m.group(2))
27
27
28 def repccomment(m):
28 def repccomment(m):
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
30 return m.group(1) + t + "*/"
30 return m.group(1) + t + "*/"
31
31
32 def repcallspaces(m):
32 def repcallspaces(m):
33 t = re.sub(r"\n\s+", "\n", m.group(2))
33 t = re.sub(r"\n\s+", "\n", m.group(2))
34 return m.group(1) + t
34 return m.group(1) + t
35
35
36 def repinclude(m):
36 def repinclude(m):
37 return m.group(1) + "<foo>"
37 return m.group(1) + "<foo>"
38
38
39 def rephere(m):
39 def rephere(m):
40 t = re.sub(r"\S", "x", m.group(2))
40 t = re.sub(r"\S", "x", m.group(2))
41 return m.group(1) + t
41 return m.group(1) + t
42
42
43
43
44 testpats = [
44 testpats = [
45 [
45 [
46 (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"),
46 (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"),
47 (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"),
47 (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"),
48 (r'^function', "don't use 'function', use old style"),
48 (r'^function', "don't use 'function', use old style"),
49 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
49 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
50 (r'echo.*\\n', "don't use 'echo \\n', use printf"),
50 (r'echo.*\\n', "don't use 'echo \\n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
52 (r'^diff.*-\w*N', "don't use 'diff -N'"),
52 (r'^diff.*-\w*N', "don't use 'diff -N'"),
53 (r'(^| )wc[^|]*$', "filter wc output"),
53 (r'(^| )wc[^|]*$', "filter wc output"),
54 (r'head -c', "don't use 'head -c', use 'dd'"),
54 (r'head -c', "don't use 'head -c', use 'dd'"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
56 (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"),
56 (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
61 "use egrep for extended grep syntax"),
61 "use egrep for extended grep syntax"),
62 (r'/bin/', "don't use explicit paths for tools"),
62 (r'/bin/', "don't use explicit paths for tools"),
63 (r'\$PWD', "don't use $PWD, use `pwd`"),
63 (r'\$PWD', "don't use $PWD, use `pwd`"),
64 (r'[^\n]\Z', "no trailing newline"),
64 (r'[^\n]\Z', "no trailing newline"),
65 (r'export.*=', "don't export and assign at once"),
65 (r'export.*=', "don't export and assign at once"),
66 ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"),
66 ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"),
67 (r'^source\b', "don't use 'source', use '.'"),
67 (r'^source\b', "don't use 'source', use '.'"),
68 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
68 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
69 (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"),
69 (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"),
70 (r'[^>]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
70 (r'[^>]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
71 (r'stop\(\)', "don't use 'stop' as a shell function name"),
71 (r'stop\(\)', "don't use 'stop' as a shell function name"),
72 ],
72 ],
73 # warnings
73 # warnings
74 []
74 []
75 ]
75 ]
76
76
77 testfilters = [
77 testfilters = [
78 (r"( *)(#([^\n]*\S)?)", repcomment),
78 (r"( *)(#([^\n]*\S)?)", repcomment),
79 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
79 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
80 ]
80 ]
81
81
82 uprefix = r"^ \$ "
82 uprefix = r"^ \$ "
83 uprefixc = r"^ > "
83 uprefixc = r"^ > "
84 utestpats = [
84 utestpats = [
85 [
85 [
86 (r'^(\S| $ ).*(\S\s+|^\s+)\n', "trailing whitespace on non-output"),
86 (r'^(\S| $ ).*(\S\s+|^\s+)\n', "trailing whitespace on non-output"),
87 (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"),
87 (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"),
88 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
88 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
89 (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"),
89 (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"),
90 (uprefix + r'.*\|\| echo.*(fail|error)',
90 (uprefix + r'.*\|\| echo.*(fail|error)',
91 "explicit exit code checks unnecessary"),
91 "explicit exit code checks unnecessary"),
92 (uprefix + r'set -e', "don't use set -e"),
92 (uprefix + r'set -e', "don't use set -e"),
93 (uprefixc + r'( *)\t', "don't use tabs to indent"),
93 (uprefixc + r'( *)\t', "don't use tabs to indent"),
94 ],
94 ],
95 # warnings
95 # warnings
96 []
96 []
97 ]
97 ]
98
98
99 for i in [0, 1]:
99 for i in [0, 1]:
100 for p, m in testpats[i]:
100 for p, m in testpats[i]:
101 if p.startswith('^'):
101 if p.startswith('^'):
102 p = uprefix + p[1:]
102 p = uprefix + p[1:]
103 else:
103 else:
104 p = uprefix + p
104 p = uprefix + p
105 utestpats[i].append((p, m))
105 utestpats[i].append((p, m))
106
106
107 utestfilters = [
107 utestfilters = [
108 (r"( *)(#([^\n]*\S)?)", repcomment),
108 (r"( *)(#([^\n]*\S)?)", repcomment),
109 ]
109 ]
110
110
111 pypats = [
111 pypats = [
112 [
112 [
113 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
113 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
114 "tuple parameter unpacking not available in Python 3+"),
114 "tuple parameter unpacking not available in Python 3+"),
115 (r'lambda\s*\(.*,.*\)',
115 (r'lambda\s*\(.*,.*\)',
116 "tuple parameter unpacking not available in Python 3+"),
116 "tuple parameter unpacking not available in Python 3+"),
117 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
117 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
118 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
118 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
119 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
119 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
120 (r'^\s*\t', "don't use tabs"),
120 (r'^\s*\t', "don't use tabs"),
121 (r'\S;\s*\n', "semicolon"),
121 (r'\S;\s*\n', "semicolon"),
122 (r'\w,\w', "missing whitespace after ,"),
122 (r'\w,\w', "missing whitespace after ,"),
123 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
123 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
124 (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"),
124 (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"),
125 (r'.{85}', "line too long"),
125 (r'.{85}', "line too long"),
126 (r'[^\n]\Z', "no trailing newline"),
126 (r'[^\n]\Z', "no trailing newline"),
127 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
127 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
128 # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"),
128 # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"),
129 # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"),
129 # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"),
130 (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+',
130 (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+',
131 "linebreak after :"),
131 "linebreak after :"),
132 (r'class\s[^( ]+:', "old-style class, use class foo(object)"),
132 (r'class\s[^( ]+:', "old-style class, use class foo(object)"),
133 (r'class\s[^( ]+\(\):',
133 (r'class\s[^( ]+\(\):',
134 "class foo() not available in Python 2.4, use class foo(object)"),
134 "class foo() not available in Python 2.4, use class foo(object)"),
135 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
135 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
136 "Python keyword is not a function"),
136 "Python keyword is not a function"),
137 (r',]', "unneeded trailing ',' in list"),
137 (r',]', "unneeded trailing ',' in list"),
138 # (r'class\s[A-Z][^\(]*\((?!Exception)',
138 # (r'class\s[A-Z][^\(]*\((?!Exception)',
139 # "don't capitalize non-exception classes"),
139 # "don't capitalize non-exception classes"),
140 # (r'in range\(', "use xrange"),
140 # (r'in range\(', "use xrange"),
141 # (r'^\s*print\s+', "avoid using print in core and extensions"),
141 # (r'^\s*print\s+', "avoid using print in core and extensions"),
142 (r'[\x80-\xff]', "non-ASCII character literal"),
142 (r'[\x80-\xff]', "non-ASCII character literal"),
143 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
143 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
144 (r'^\s*with\s+', "with not available in Python 2.4"),
144 (r'^\s*with\s+', "with not available in Python 2.4"),
145 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
145 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
146 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
146 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
147 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
147 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
148 (r'(?<!def)\s+(any|all|format)\(',
148 (r'(?<!def)\s+(any|all|format)\(',
149 "any/all/format not available in Python 2.4"),
149 "any/all/format not available in Python 2.4"),
150 (r'(?<!def)\s+(callable)\(',
150 (r'(?<!def)\s+(callable)\(',
151 "callable not available in Python 3, use hasattr(f, '__call__')"),
151 "callable not available in Python 3, use getattr(f, '__call__', None)"),
152 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
152 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
153 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
153 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
154 "gratuitous whitespace after Python keyword"),
154 "gratuitous whitespace after Python keyword"),
155 (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"),
155 (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"),
156 # (r'\s\s=', "gratuitous whitespace before ="),
156 # (r'\s\s=', "gratuitous whitespace before ="),
157 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
157 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
158 "missing whitespace around operator"),
158 "missing whitespace around operator"),
159 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s',
159 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s',
160 "missing whitespace around operator"),
160 "missing whitespace around operator"),
161 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
161 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
162 "missing whitespace around operator"),
162 "missing whitespace around operator"),
163 (r'[^+=*/!<>&| -](\s=|=\s)[^= ]',
163 (r'[^+=*/!<>&| -](\s=|=\s)[^= ]',
164 "wrong whitespace around ="),
164 "wrong whitespace around ="),
165 (r'raise Exception', "don't raise generic exceptions"),
165 (r'raise Exception', "don't raise generic exceptions"),
166 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
166 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
167 (r' [=!]=\s+(True|False|None)',
167 (r' [=!]=\s+(True|False|None)',
168 "comparison with singleton, use 'is' or 'is not' instead"),
168 "comparison with singleton, use 'is' or 'is not' instead"),
169 (r'^\s*(while|if) [01]:',
169 (r'^\s*(while|if) [01]:',
170 "use True/False for constant Boolean expression"),
170 "use True/False for constant Boolean expression"),
171 (r'(?<!def)\s+hasattr',
172 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
171 (r'opener\([^)]*\).read\(',
173 (r'opener\([^)]*\).read\(',
172 "use opener.read() instead"),
174 "use opener.read() instead"),
173 (r'opener\([^)]*\).write\(',
175 (r'opener\([^)]*\).write\(',
174 "use opener.write() instead"),
176 "use opener.write() instead"),
175 (r'[\s\(](open|file)\([^)]*\)\.read\(',
177 (r'[\s\(](open|file)\([^)]*\)\.read\(',
176 "use util.readfile() instead"),
178 "use util.readfile() instead"),
177 (r'[\s\(](open|file)\([^)]*\)\.write\(',
179 (r'[\s\(](open|file)\([^)]*\)\.write\(',
178 "use util.readfile() instead"),
180 "use util.readfile() instead"),
179 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
181 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
180 "always assign an opened file to a variable, and close it afterwards"),
182 "always assign an opened file to a variable, and close it afterwards"),
181 (r'[\s\(](open|file)\([^)]*\)\.',
183 (r'[\s\(](open|file)\([^)]*\)\.',
182 "always assign an opened file to a variable, and close it afterwards"),
184 "always assign an opened file to a variable, and close it afterwards"),
183 (r'(?i)descendent', "the proper spelling is descendAnt"),
185 (r'(?i)descendent', "the proper spelling is descendAnt"),
184 (r'\.debug\(\_', "don't mark debug messages for translation"),
186 (r'\.debug\(\_', "don't mark debug messages for translation"),
185 ],
187 ],
186 # warnings
188 # warnings
187 [
189 [
188 (r'.{81}', "warning: line over 80 characters"),
190 (r'.{81}', "warning: line over 80 characters"),
189 (r'^\s*except:$', "warning: naked except clause"),
191 (r'^\s*except:$', "warning: naked except clause"),
190 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
192 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
191 "warning: unwrapped ui message"),
193 "warning: unwrapped ui message"),
192 ]
194 ]
193 ]
195 ]
194
196
195 pyfilters = [
197 pyfilters = [
196 (r"""(?msx)(?P<comment>\#.*?$)|
198 (r"""(?msx)(?P<comment>\#.*?$)|
197 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
199 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
198 (?P<text>(([^\\]|\\.)*?))
200 (?P<text>(([^\\]|\\.)*?))
199 (?P=quote))""", reppython),
201 (?P=quote))""", reppython),
200 ]
202 ]
201
203
202 cpats = [
204 cpats = [
203 [
205 [
204 (r'//', "don't use //-style comments"),
206 (r'//', "don't use //-style comments"),
205 (r'^ ', "don't use spaces to indent"),
207 (r'^ ', "don't use spaces to indent"),
206 (r'\S\t', "don't use tabs except for indent"),
208 (r'\S\t', "don't use tabs except for indent"),
207 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
209 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
208 (r'.{85}', "line too long"),
210 (r'.{85}', "line too long"),
209 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
211 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
210 (r'return\(', "return is not a function"),
212 (r'return\(', "return is not a function"),
211 (r' ;', "no space before ;"),
213 (r' ;', "no space before ;"),
212 (r'\w+\* \w+', "use int *foo, not int* foo"),
214 (r'\w+\* \w+', "use int *foo, not int* foo"),
213 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
215 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
214 (r'\S+ (\+\+|--)', "use foo++, not foo ++"),
216 (r'\S+ (\+\+|--)', "use foo++, not foo ++"),
215 (r'\w,\w', "missing whitespace after ,"),
217 (r'\w,\w', "missing whitespace after ,"),
216 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
218 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
217 (r'^#\s+\w', "use #foo, not # foo"),
219 (r'^#\s+\w', "use #foo, not # foo"),
218 (r'[^\n]\Z', "no trailing newline"),
220 (r'[^\n]\Z', "no trailing newline"),
219 (r'^\s*#import\b', "use only #include in standard C code"),
221 (r'^\s*#import\b', "use only #include in standard C code"),
220 ],
222 ],
221 # warnings
223 # warnings
222 []
224 []
223 ]
225 ]
224
226
225 cfilters = [
227 cfilters = [
226 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
228 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
227 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
229 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
228 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
230 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
229 (r'(\()([^)]+\))', repcallspaces),
231 (r'(\()([^)]+\))', repcallspaces),
230 ]
232 ]
231
233
232 inutilpats = [
234 inutilpats = [
233 [
235 [
234 (r'\bui\.', "don't use ui in util"),
236 (r'\bui\.', "don't use ui in util"),
235 ],
237 ],
236 # warnings
238 # warnings
237 []
239 []
238 ]
240 ]
239
241
240 inrevlogpats = [
242 inrevlogpats = [
241 [
243 [
242 (r'\brepo\.', "don't use repo in revlog"),
244 (r'\brepo\.', "don't use repo in revlog"),
243 ],
245 ],
244 # warnings
246 # warnings
245 []
247 []
246 ]
248 ]
247
249
248 checks = [
250 checks = [
249 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
251 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
250 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
252 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
251 ('c', r'.*\.c$', cfilters, cpats),
253 ('c', r'.*\.c$', cfilters, cpats),
252 ('unified test', r'.*\.t$', utestfilters, utestpats),
254 ('unified test', r'.*\.t$', utestfilters, utestpats),
253 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
255 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
254 inrevlogpats),
256 inrevlogpats),
255 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
257 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
256 inutilpats),
258 inutilpats),
257 ]
259 ]
258
260
259 class norepeatlogger(object):
261 class norepeatlogger(object):
260 def __init__(self):
262 def __init__(self):
261 self._lastseen = None
263 self._lastseen = None
262
264
263 def log(self, fname, lineno, line, msg, blame):
265 def log(self, fname, lineno, line, msg, blame):
264 """print error related a to given line of a given file.
266 """print error related a to given line of a given file.
265
267
266 The faulty line will also be printed but only once in the case
268 The faulty line will also be printed but only once in the case
267 of multiple errors.
269 of multiple errors.
268
270
269 :fname: filename
271 :fname: filename
270 :lineno: line number
272 :lineno: line number
271 :line: actual content of the line
273 :line: actual content of the line
272 :msg: error message
274 :msg: error message
273 """
275 """
274 msgid = fname, lineno, line
276 msgid = fname, lineno, line
275 if msgid != self._lastseen:
277 if msgid != self._lastseen:
276 if blame:
278 if blame:
277 print "%s:%d (%s):" % (fname, lineno, blame)
279 print "%s:%d (%s):" % (fname, lineno, blame)
278 else:
280 else:
279 print "%s:%d:" % (fname, lineno)
281 print "%s:%d:" % (fname, lineno)
280 print " > %s" % line
282 print " > %s" % line
281 self._lastseen = msgid
283 self._lastseen = msgid
282 print " " + msg
284 print " " + msg
283
285
284 _defaultlogger = norepeatlogger()
286 _defaultlogger = norepeatlogger()
285
287
286 def getblame(f):
288 def getblame(f):
287 lines = []
289 lines = []
288 for l in os.popen('hg annotate -un %s' % f):
290 for l in os.popen('hg annotate -un %s' % f):
289 start, line = l.split(':', 1)
291 start, line = l.split(':', 1)
290 user, rev = start.split()
292 user, rev = start.split()
291 lines.append((line[1:-1], user, rev))
293 lines.append((line[1:-1], user, rev))
292 return lines
294 return lines
293
295
294 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
296 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
295 blame=False, debug=False):
297 blame=False, debug=False):
296 """checks style and portability of a given file
298 """checks style and portability of a given file
297
299
298 :f: filepath
300 :f: filepath
299 :logfunc: function used to report error
301 :logfunc: function used to report error
300 logfunc(filename, linenumber, linecontent, errormessage)
302 logfunc(filename, linenumber, linecontent, errormessage)
301 :maxerr: number of error to display before arborting.
303 :maxerr: number of error to display before arborting.
302 Set to None (default) to report all errors
304 Set to None (default) to report all errors
303
305
304 return True if no error is found, False otherwise.
306 return True if no error is found, False otherwise.
305 """
307 """
306 blamecache = None
308 blamecache = None
307 result = True
309 result = True
308 for name, match, filters, pats in checks:
310 for name, match, filters, pats in checks:
309 if debug:
311 if debug:
310 print name, f
312 print name, f
311 fc = 0
313 fc = 0
312 if not re.match(match, f):
314 if not re.match(match, f):
313 if debug:
315 if debug:
314 print "Skipping %s for %s it doesn't match %s" % (
316 print "Skipping %s for %s it doesn't match %s" % (
315 name, match, f)
317 name, match, f)
316 continue
318 continue
317 fp = open(f)
319 fp = open(f)
318 pre = post = fp.read()
320 pre = post = fp.read()
319 fp.close()
321 fp.close()
320 if "no-" + "check-code" in pre:
322 if "no-" + "check-code" in pre:
321 if debug:
323 if debug:
322 print "Skipping %s for %s it has no- and check-code" % (
324 print "Skipping %s for %s it has no- and check-code" % (
323 name, f)
325 name, f)
324 break
326 break
325 for p, r in filters:
327 for p, r in filters:
326 post = re.sub(p, r, post)
328 post = re.sub(p, r, post)
327 if warnings:
329 if warnings:
328 pats = pats[0] + pats[1]
330 pats = pats[0] + pats[1]
329 else:
331 else:
330 pats = pats[0]
332 pats = pats[0]
331 # print post # uncomment to show filtered version
333 # print post # uncomment to show filtered version
332 z = enumerate(zip(pre.splitlines(), post.splitlines(True)))
334 z = enumerate(zip(pre.splitlines(), post.splitlines(True)))
333 if debug:
335 if debug:
334 print "Checking %s for %s" % (name, f)
336 print "Checking %s for %s" % (name, f)
335 for n, l in z:
337 for n, l in z:
336 if "check-code" + "-ignore" in l[0]:
338 if "check-code" + "-ignore" in l[0]:
337 if debug:
339 if debug:
338 print "Skipping %s for %s:%s (check-code -ignore)" % (
340 print "Skipping %s for %s:%s (check-code -ignore)" % (
339 name, f, n)
341 name, f, n)
340 continue
342 continue
341 for p, msg in pats:
343 for p, msg in pats:
342 if re.search(p, l[1]):
344 if re.search(p, l[1]):
343 bd = ""
345 bd = ""
344 if blame:
346 if blame:
345 bd = 'working directory'
347 bd = 'working directory'
346 if not blamecache:
348 if not blamecache:
347 blamecache = getblame(f)
349 blamecache = getblame(f)
348 if n < len(blamecache):
350 if n < len(blamecache):
349 bl, bu, br = blamecache[n]
351 bl, bu, br = blamecache[n]
350 if bl == l[0]:
352 if bl == l[0]:
351 bd = '%s@%s' % (bu, br)
353 bd = '%s@%s' % (bu, br)
352 logfunc(f, n + 1, l[0], msg, bd)
354 logfunc(f, n + 1, l[0], msg, bd)
353 fc += 1
355 fc += 1
354 result = False
356 result = False
355 if maxerr is not None and fc >= maxerr:
357 if maxerr is not None and fc >= maxerr:
356 print " (too many errors, giving up)"
358 print " (too many errors, giving up)"
357 break
359 break
358 return result
360 return result
359
361
360 if __name__ == "__main__":
362 if __name__ == "__main__":
361 parser = optparse.OptionParser("%prog [options] [files]")
363 parser = optparse.OptionParser("%prog [options] [files]")
362 parser.add_option("-w", "--warnings", action="store_true",
364 parser.add_option("-w", "--warnings", action="store_true",
363 help="include warning-level checks")
365 help="include warning-level checks")
364 parser.add_option("-p", "--per-file", type="int",
366 parser.add_option("-p", "--per-file", type="int",
365 help="max warnings per file")
367 help="max warnings per file")
366 parser.add_option("-b", "--blame", action="store_true",
368 parser.add_option("-b", "--blame", action="store_true",
367 help="use annotate to generate blame info")
369 help="use annotate to generate blame info")
368 parser.add_option("", "--debug", action="store_true",
370 parser.add_option("", "--debug", action="store_true",
369 help="show debug information")
371 help="show debug information")
370
372
371 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False)
373 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False)
372 (options, args) = parser.parse_args()
374 (options, args) = parser.parse_args()
373
375
374 if len(args) == 0:
376 if len(args) == 0:
375 check = glob.glob("*")
377 check = glob.glob("*")
376 else:
378 else:
377 check = args
379 check = args
378
380
379 for f in check:
381 for f in check:
380 ret = 0
382 ret = 0
381 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
383 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
382 blame=options.blame, debug=options.debug):
384 blame=options.blame, debug=options.debug):
383 ret = 1
385 ret = 1
384 sys.exit(ret)
386 sys.exit(ret)
@@ -1,373 +1,373
1 #
1 #
2 # This is an experimental py3k-enabled mercurial setup script.
2 # This is an experimental py3k-enabled mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6
6
7 from distutils.command.build_py import build_py_2to3
7 from distutils.command.build_py import build_py_2to3
8 from lib2to3.refactor import get_fixers_from_package as getfixers
8 from lib2to3.refactor import get_fixers_from_package as getfixers
9
9
10 import sys
10 import sys
11 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
11 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
12 raise SystemExit("Mercurial requires Python 2.4 or later.")
12 raise SystemExit("Mercurial requires Python 2.4 or later.")
13
13
14 if sys.version_info[0] >= 3:
14 if sys.version_info[0] >= 3:
15 def b(s):
15 def b(s):
16 '''A helper function to emulate 2.6+ bytes literals using string
16 '''A helper function to emulate 2.6+ bytes literals using string
17 literals.'''
17 literals.'''
18 return s.encode('latin1')
18 return s.encode('latin1')
19 else:
19 else:
20 def b(s):
20 def b(s):
21 '''A helper function to emulate 2.6+ bytes literals using string
21 '''A helper function to emulate 2.6+ bytes literals using string
22 literals.'''
22 literals.'''
23 return s
23 return s
24
24
25 # Solaris Python packaging brain damage
25 # Solaris Python packaging brain damage
26 try:
26 try:
27 import hashlib
27 import hashlib
28 sha = hashlib.sha1()
28 sha = hashlib.sha1()
29 except:
29 except:
30 try:
30 try:
31 import sha
31 import sha
32 except:
32 except:
33 raise SystemExit(
33 raise SystemExit(
34 "Couldn't import standard hashlib (incomplete Python install).")
34 "Couldn't import standard hashlib (incomplete Python install).")
35
35
36 try:
36 try:
37 import zlib
37 import zlib
38 except:
38 except:
39 raise SystemExit(
39 raise SystemExit(
40 "Couldn't import standard zlib (incomplete Python install).")
40 "Couldn't import standard zlib (incomplete Python install).")
41
41
42 try:
42 try:
43 import bz2
43 import bz2
44 except:
44 except:
45 raise SystemExit(
45 raise SystemExit(
46 "Couldn't import standard bz2 (incomplete Python install).")
46 "Couldn't import standard bz2 (incomplete Python install).")
47
47
48 import os, subprocess, time
48 import os, subprocess, time
49 import shutil
49 import shutil
50 import tempfile
50 import tempfile
51 from distutils import log
51 from distutils import log
52 from distutils.core import setup, Extension
52 from distutils.core import setup, Extension
53 from distutils.dist import Distribution
53 from distutils.dist import Distribution
54 from distutils.command.build import build
54 from distutils.command.build import build
55 from distutils.command.build_ext import build_ext
55 from distutils.command.build_ext import build_ext
56 from distutils.command.build_py import build_py
56 from distutils.command.build_py import build_py
57 from distutils.spawn import spawn, find_executable
57 from distutils.spawn import spawn, find_executable
58 from distutils.ccompiler import new_compiler
58 from distutils.ccompiler import new_compiler
59 from distutils.errors import CCompilerError
59 from distutils.errors import CCompilerError
60
60
61 scripts = ['hg']
61 scripts = ['hg']
62 if os.name == 'nt':
62 if os.name == 'nt':
63 scripts.append('contrib/win32/hg.bat')
63 scripts.append('contrib/win32/hg.bat')
64
64
65 # simplified version of distutils.ccompiler.CCompiler.has_function
65 # simplified version of distutils.ccompiler.CCompiler.has_function
66 # that actually removes its temporary files.
66 # that actually removes its temporary files.
67 def hasfunction(cc, funcname):
67 def hasfunction(cc, funcname):
68 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
68 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
69 devnull = oldstderr = None
69 devnull = oldstderr = None
70 try:
70 try:
71 try:
71 try:
72 fname = os.path.join(tmpdir, 'funcname.c')
72 fname = os.path.join(tmpdir, 'funcname.c')
73 f = open(fname, 'w')
73 f = open(fname, 'w')
74 f.write('int main(void) {\n')
74 f.write('int main(void) {\n')
75 f.write(' %s();\n' % funcname)
75 f.write(' %s();\n' % funcname)
76 f.write('}\n')
76 f.write('}\n')
77 f.close()
77 f.close()
78 # Redirect stderr to /dev/null to hide any error messages
78 # Redirect stderr to /dev/null to hide any error messages
79 # from the compiler.
79 # from the compiler.
80 # This will have to be changed if we ever have to check
80 # This will have to be changed if we ever have to check
81 # for a function on Windows.
81 # for a function on Windows.
82 devnull = open('/dev/null', 'w')
82 devnull = open('/dev/null', 'w')
83 oldstderr = os.dup(sys.stderr.fileno())
83 oldstderr = os.dup(sys.stderr.fileno())
84 os.dup2(devnull.fileno(), sys.stderr.fileno())
84 os.dup2(devnull.fileno(), sys.stderr.fileno())
85 objects = cc.compile([fname], output_dir=tmpdir)
85 objects = cc.compile([fname], output_dir=tmpdir)
86 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
86 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
87 except:
87 except:
88 return False
88 return False
89 return True
89 return True
90 finally:
90 finally:
91 if oldstderr is not None:
91 if oldstderr is not None:
92 os.dup2(oldstderr, sys.stderr.fileno())
92 os.dup2(oldstderr, sys.stderr.fileno())
93 if devnull is not None:
93 if devnull is not None:
94 devnull.close()
94 devnull.close()
95 shutil.rmtree(tmpdir)
95 shutil.rmtree(tmpdir)
96
96
97 # py2exe needs to be installed to work
97 # py2exe needs to be installed to work
98 try:
98 try:
99 import py2exe
99 import py2exe
100 py2exeloaded = True
100 py2exeloaded = True
101
101
102 # Help py2exe to find win32com.shell
102 # Help py2exe to find win32com.shell
103 try:
103 try:
104 import modulefinder
104 import modulefinder
105 import win32com
105 import win32com
106 for p in win32com.__path__[1:]: # Take the path to win32comext
106 for p in win32com.__path__[1:]: # Take the path to win32comext
107 modulefinder.AddPackagePath("win32com", p)
107 modulefinder.AddPackagePath("win32com", p)
108 pn = "win32com.shell"
108 pn = "win32com.shell"
109 __import__(pn)
109 __import__(pn)
110 m = sys.modules[pn]
110 m = sys.modules[pn]
111 for p in m.__path__[1:]:
111 for p in m.__path__[1:]:
112 modulefinder.AddPackagePath(pn, p)
112 modulefinder.AddPackagePath(pn, p)
113 except ImportError:
113 except ImportError:
114 pass
114 pass
115
115
116 except ImportError:
116 except ImportError:
117 py2exeloaded = False
117 py2exeloaded = False
118 pass
118 pass
119
119
120 def runcmd(cmd, env):
120 def runcmd(cmd, env):
121 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
121 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
122 stderr=subprocess.PIPE, env=env)
122 stderr=subprocess.PIPE, env=env)
123 out, err = p.communicate()
123 out, err = p.communicate()
124 # If root is executing setup.py, but the repository is owned by
124 # If root is executing setup.py, but the repository is owned by
125 # another user (as in "sudo python setup.py install") we will get
125 # another user (as in "sudo python setup.py install") we will get
126 # trust warnings since the .hg/hgrc file is untrusted. That is
126 # trust warnings since the .hg/hgrc file is untrusted. That is
127 # fine, we don't want to load it anyway. Python may warn about
127 # fine, we don't want to load it anyway. Python may warn about
128 # a missing __init__.py in mercurial/locale, we also ignore that.
128 # a missing __init__.py in mercurial/locale, we also ignore that.
129 err = [e for e in err.splitlines()
129 err = [e for e in err.splitlines()
130 if not e.startswith(b('Not trusting file')) \
130 if not e.startswith(b('Not trusting file')) \
131 and not e.startswith(b('warning: Not importing'))]
131 and not e.startswith(b('warning: Not importing'))]
132 if err:
132 if err:
133 return ''
133 return ''
134 return out
134 return out
135
135
136 version = ''
136 version = ''
137
137
138 if os.path.isdir('.hg'):
138 if os.path.isdir('.hg'):
139 # Execute hg out of this directory with a custom environment which
139 # Execute hg out of this directory with a custom environment which
140 # includes the pure Python modules in mercurial/pure. We also take
140 # includes the pure Python modules in mercurial/pure. We also take
141 # care to not use any hgrc files and do no localization.
141 # care to not use any hgrc files and do no localization.
142 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
142 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
143 env = {'PYTHONPATH': os.pathsep.join(pypath),
143 env = {'PYTHONPATH': os.pathsep.join(pypath),
144 'HGRCPATH': '',
144 'HGRCPATH': '',
145 'LANGUAGE': 'C'}
145 'LANGUAGE': 'C'}
146 if 'LD_LIBRARY_PATH' in os.environ:
146 if 'LD_LIBRARY_PATH' in os.environ:
147 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
147 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
148 if 'SystemRoot' in os.environ:
148 if 'SystemRoot' in os.environ:
149 # Copy SystemRoot into the custom environment for Python 2.6
149 # Copy SystemRoot into the custom environment for Python 2.6
150 # under Windows. Otherwise, the subprocess will fail with
150 # under Windows. Otherwise, the subprocess will fail with
151 # error 0xc0150004. See: http://bugs.python.org/issue3440
151 # error 0xc0150004. See: http://bugs.python.org/issue3440
152 env['SystemRoot'] = os.environ['SystemRoot']
152 env['SystemRoot'] = os.environ['SystemRoot']
153 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
153 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
154 l = runcmd(cmd, env).split()
154 l = runcmd(cmd, env).split()
155 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
155 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
156 l.pop()
156 l.pop()
157 if len(l) > 1: # tag found
157 if len(l) > 1: # tag found
158 version = l[-1]
158 version = l[-1]
159 if l[0].endswith('+'): # propagate the dirty status to the tag
159 if l[0].endswith('+'): # propagate the dirty status to the tag
160 version += '+'
160 version += '+'
161 elif len(l) == 1: # no tag found
161 elif len(l) == 1: # no tag found
162 cmd = [sys.executable, 'hg', 'parents', '--template',
162 cmd = [sys.executable, 'hg', 'parents', '--template',
163 '{latesttag}+{latesttagdistance}-']
163 '{latesttag}+{latesttagdistance}-']
164 version = runcmd(cmd, env) + l[0]
164 version = runcmd(cmd, env) + l[0]
165 if version.endswith('+'):
165 if version.endswith('+'):
166 version += time.strftime('%Y%m%d')
166 version += time.strftime('%Y%m%d')
167 elif os.path.exists('.hg_archival.txt'):
167 elif os.path.exists('.hg_archival.txt'):
168 kw = dict([[t.strip() for t in l.split(':', 1)]
168 kw = dict([[t.strip() for t in l.split(':', 1)]
169 for l in open('.hg_archival.txt')])
169 for l in open('.hg_archival.txt')])
170 if 'tag' in kw:
170 if 'tag' in kw:
171 version = kw['tag']
171 version = kw['tag']
172 elif 'latesttag' in kw:
172 elif 'latesttag' in kw:
173 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
173 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
174 else:
174 else:
175 version = kw.get('node', '')[:12]
175 version = kw.get('node', '')[:12]
176
176
177 if version:
177 if version:
178 f = open("mercurial/__version__.py", "w")
178 f = open("mercurial/__version__.py", "w")
179 f.write('# this file is autogenerated by setup.py\n')
179 f.write('# this file is autogenerated by setup.py\n')
180 f.write('version = "%s"\n' % version)
180 f.write('version = "%s"\n' % version)
181 f.close()
181 f.close()
182
182
183
183
184 try:
184 try:
185 from mercurial import __version__
185 from mercurial import __version__
186 version = __version__.version
186 version = __version__.version
187 except ImportError:
187 except ImportError:
188 version = 'unknown'
188 version = 'unknown'
189
189
190 class hgbuildmo(build):
190 class hgbuildmo(build):
191
191
192 description = "build translations (.mo files)"
192 description = "build translations (.mo files)"
193
193
194 def run(self):
194 def run(self):
195 if not find_executable('msgfmt'):
195 if not find_executable('msgfmt'):
196 self.warn("could not find msgfmt executable, no translations "
196 self.warn("could not find msgfmt executable, no translations "
197 "will be built")
197 "will be built")
198 return
198 return
199
199
200 podir = 'i18n'
200 podir = 'i18n'
201 if not os.path.isdir(podir):
201 if not os.path.isdir(podir):
202 self.warn("could not find %s/ directory" % podir)
202 self.warn("could not find %s/ directory" % podir)
203 return
203 return
204
204
205 join = os.path.join
205 join = os.path.join
206 for po in os.listdir(podir):
206 for po in os.listdir(podir):
207 if not po.endswith('.po'):
207 if not po.endswith('.po'):
208 continue
208 continue
209 pofile = join(podir, po)
209 pofile = join(podir, po)
210 modir = join('locale', po[:-3], 'LC_MESSAGES')
210 modir = join('locale', po[:-3], 'LC_MESSAGES')
211 mofile = join(modir, 'hg.mo')
211 mofile = join(modir, 'hg.mo')
212 mobuildfile = join('mercurial', mofile)
212 mobuildfile = join('mercurial', mofile)
213 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
213 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
214 if sys.platform != 'sunos5':
214 if sys.platform != 'sunos5':
215 # msgfmt on Solaris does not know about -c
215 # msgfmt on Solaris does not know about -c
216 cmd.append('-c')
216 cmd.append('-c')
217 self.mkpath(join('mercurial', modir))
217 self.mkpath(join('mercurial', modir))
218 self.make_file([pofile], mobuildfile, spawn, (cmd,))
218 self.make_file([pofile], mobuildfile, spawn, (cmd,))
219
219
220 # Insert hgbuildmo first so that files in mercurial/locale/ are found
220 # Insert hgbuildmo first so that files in mercurial/locale/ are found
221 # when build_py is run next.
221 # when build_py is run next.
222 build.sub_commands.insert(0, ('build_mo', None))
222 build.sub_commands.insert(0, ('build_mo', None))
223 # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
223 # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
224 # build_py), it will not find osutil & friends, thinking that those modules are
224 # build_py), it will not find osutil & friends, thinking that those modules are
225 # global and, consequently, making a mess, now that all module imports are
225 # global and, consequently, making a mess, now that all module imports are
226 # global.
226 # global.
227 build.sub_commands.insert(1, ('build_ext', None))
227 build.sub_commands.insert(1, ('build_ext', None))
228
228
229 Distribution.pure = 0
229 Distribution.pure = 0
230 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
230 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
231 "code instead of C extensions"))
231 "code instead of C extensions"))
232
232
233 class hgbuildext(build_ext):
233 class hgbuildext(build_ext):
234
234
235 def build_extension(self, ext):
235 def build_extension(self, ext):
236 try:
236 try:
237 build_ext.build_extension(self, ext)
237 build_ext.build_extension(self, ext)
238 except CCompilerError:
238 except CCompilerError:
239 if not hasattr(ext, 'optional') or not ext.optional:
239 if getattr(ext, 'optional', False):
240 raise
240 raise
241 log.warn("Failed to build optional extension '%s' (skipping)",
241 log.warn("Failed to build optional extension '%s' (skipping)",
242 ext.name)
242 ext.name)
243
243
244 class hgbuildpy(build_py_2to3):
244 class hgbuildpy(build_py_2to3):
245 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
245 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
246 getfixers("hgfixes")))
246 getfixers("hgfixes")))
247
247
248 def finalize_options(self):
248 def finalize_options(self):
249 build_py.finalize_options(self)
249 build_py.finalize_options(self)
250
250
251 if self.distribution.pure:
251 if self.distribution.pure:
252 if self.py_modules is None:
252 if self.py_modules is None:
253 self.py_modules = []
253 self.py_modules = []
254 for ext in self.distribution.ext_modules:
254 for ext in self.distribution.ext_modules:
255 if ext.name.startswith("mercurial."):
255 if ext.name.startswith("mercurial."):
256 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
256 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
257 self.distribution.ext_modules = []
257 self.distribution.ext_modules = []
258
258
259 def find_modules(self):
259 def find_modules(self):
260 modules = build_py.find_modules(self)
260 modules = build_py.find_modules(self)
261 for module in modules:
261 for module in modules:
262 if module[0] == "mercurial.pure":
262 if module[0] == "mercurial.pure":
263 if module[1] != "__init__":
263 if module[1] != "__init__":
264 yield ("mercurial", module[1], module[2])
264 yield ("mercurial", module[1], module[2])
265 else:
265 else:
266 yield module
266 yield module
267
267
268 def run(self):
268 def run(self):
269 # In the build_py_2to3 class, self.updated_files = [], but I couldn't
269 # In the build_py_2to3 class, self.updated_files = [], but I couldn't
270 # see when that variable was updated to point to the updated files, as
270 # see when that variable was updated to point to the updated files, as
271 # its names suggests. Thus, I decided to just find_all_modules and feed
271 # its names suggests. Thus, I decided to just find_all_modules and feed
272 # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
272 # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
273 # incur in 2to3 analysis overhead.
273 # incur in 2to3 analysis overhead.
274 self.updated_files = [i[2] for i in self.find_all_modules()]
274 self.updated_files = [i[2] for i in self.find_all_modules()]
275
275
276 # Base class code
276 # Base class code
277 if self.py_modules:
277 if self.py_modules:
278 self.build_modules()
278 self.build_modules()
279 if self.packages:
279 if self.packages:
280 self.build_packages()
280 self.build_packages()
281 self.build_package_data()
281 self.build_package_data()
282
282
283 # 2to3
283 # 2to3
284 self.run_2to3(self.updated_files)
284 self.run_2to3(self.updated_files)
285
285
286 # Remaining base class code
286 # Remaining base class code
287 self.byte_compile(self.get_outputs(include_bytecode=0))
287 self.byte_compile(self.get_outputs(include_bytecode=0))
288
288
289 cmdclass = {'build_mo': hgbuildmo,
289 cmdclass = {'build_mo': hgbuildmo,
290 'build_ext': hgbuildext,
290 'build_ext': hgbuildext,
291 'build_py': hgbuildpy}
291 'build_py': hgbuildpy}
292
292
293 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
293 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
294 'hgext.highlight', 'hgext.zeroconf']
294 'hgext.highlight', 'hgext.zeroconf']
295
295
296 pymodules = []
296 pymodules = []
297
297
298 extmodules = [
298 extmodules = [
299 Extension('mercurial.base85', ['mercurial/base85.c']),
299 Extension('mercurial.base85', ['mercurial/base85.c']),
300 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
300 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
301 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
301 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
302 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
302 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
303 Extension('mercurial.parsers', ['mercurial/parsers.c']),
303 Extension('mercurial.parsers', ['mercurial/parsers.c']),
304 ]
304 ]
305
305
306 # disable osutil.c under windows + python 2.4 (issue1364)
306 # disable osutil.c under windows + python 2.4 (issue1364)
307 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
307 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
308 pymodules.append('mercurial.pure.osutil')
308 pymodules.append('mercurial.pure.osutil')
309 else:
309 else:
310 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
310 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
311
311
312 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
312 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
313 # The inotify extension is only usable with Linux 2.6 kernels.
313 # The inotify extension is only usable with Linux 2.6 kernels.
314 # You also need a reasonably recent C library.
314 # You also need a reasonably recent C library.
315 # In any case, if it fails to build the error will be skipped ('optional').
315 # In any case, if it fails to build the error will be skipped ('optional').
316 cc = new_compiler()
316 cc = new_compiler()
317 if hasfunction(cc, 'inotify_add_watch'):
317 if hasfunction(cc, 'inotify_add_watch'):
318 inotify = Extension('hgext.inotify.linux._inotify',
318 inotify = Extension('hgext.inotify.linux._inotify',
319 ['hgext/inotify/linux/_inotify.c'],
319 ['hgext/inotify/linux/_inotify.c'],
320 ['mercurial'])
320 ['mercurial'])
321 inotify.optional = True
321 inotify.optional = True
322 extmodules.append(inotify)
322 extmodules.append(inotify)
323 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
323 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
324
324
325 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
325 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
326 'help/*.txt']}
326 'help/*.txt']}
327
327
328 def ordinarypath(p):
328 def ordinarypath(p):
329 return p and p[0] != '.' and p[-1] != '~'
329 return p and p[0] != '.' and p[-1] != '~'
330
330
331 for root in ('templates',):
331 for root in ('templates',):
332 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
332 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
333 curdir = curdir.split(os.sep, 1)[1]
333 curdir = curdir.split(os.sep, 1)[1]
334 dirs[:] = filter(ordinarypath, dirs)
334 dirs[:] = filter(ordinarypath, dirs)
335 for f in filter(ordinarypath, files):
335 for f in filter(ordinarypath, files):
336 f = os.path.join(curdir, f)
336 f = os.path.join(curdir, f)
337 packagedata['mercurial'].append(f)
337 packagedata['mercurial'].append(f)
338
338
339 datafiles = []
339 datafiles = []
340 setupversion = version
340 setupversion = version
341 extra = {}
341 extra = {}
342
342
343 if py2exeloaded:
343 if py2exeloaded:
344 extra['console'] = [
344 extra['console'] = [
345 {'script':'hg',
345 {'script':'hg',
346 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
346 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
347 'product_version':version}]
347 'product_version':version}]
348
348
349 if os.name == 'nt':
349 if os.name == 'nt':
350 # Windows binary file versions for exe/dll files must have the
350 # Windows binary file versions for exe/dll files must have the
351 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
351 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
352 setupversion = version.split('+', 1)[0]
352 setupversion = version.split('+', 1)[0]
353
353
354 setup(name='mercurial',
354 setup(name='mercurial',
355 version=setupversion,
355 version=setupversion,
356 author='Matt Mackall',
356 author='Matt Mackall',
357 author_email='mpm@selenic.com',
357 author_email='mpm@selenic.com',
358 url='http://mercurial.selenic.com/',
358 url='http://mercurial.selenic.com/',
359 description='Scalable distributed SCM',
359 description='Scalable distributed SCM',
360 license='GNU GPLv2+',
360 license='GNU GPLv2+',
361 scripts=scripts,
361 scripts=scripts,
362 packages=packages,
362 packages=packages,
363 py_modules=pymodules,
363 py_modules=pymodules,
364 ext_modules=extmodules,
364 ext_modules=extmodules,
365 data_files=datafiles,
365 data_files=datafiles,
366 package_data=packagedata,
366 package_data=packagedata,
367 cmdclass=cmdclass,
367 cmdclass=cmdclass,
368 options=dict(py2exe=dict(packages=['hgext', 'email']),
368 options=dict(py2exe=dict(packages=['hgext', 'email']),
369 bdist_mpkg=dict(zipdist=True,
369 bdist_mpkg=dict(zipdist=True,
370 license='COPYING',
370 license='COPYING',
371 readme='contrib/macosx/Readme.html',
371 readme='contrib/macosx/Readme.html',
372 welcome='contrib/macosx/Welcome.html')),
372 welcome='contrib/macosx/Welcome.html')),
373 **extra)
373 **extra)
@@ -1,95 +1,95
1 # An example WSGI script for IIS/isapi-wsgi to export multiple hgweb repos
1 # An example WSGI script for IIS/isapi-wsgi to export multiple hgweb repos
2 # Copyright 2010 Sune Foldager <cryo@cyanite.org>
2 # Copyright 2010 Sune Foldager <cryo@cyanite.org>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6 #
6 #
7 # Requirements:
7 # Requirements:
8 # - Python 2.6
8 # - Python 2.6
9 # - PyWin32 build 214 or newer
9 # - PyWin32 build 214 or newer
10 # - Mercurial installed from source (python setup.py install)
10 # - Mercurial installed from source (python setup.py install)
11 # - IIS 7
11 # - IIS 7
12 #
12 #
13 # Earlier versions will in general work as well, but the PyWin32 version is
13 # Earlier versions will in general work as well, but the PyWin32 version is
14 # necessary for win32traceutil to work correctly.
14 # necessary for win32traceutil to work correctly.
15 #
15 #
16 #
16 #
17 # Installation and use:
17 # Installation and use:
18 #
18 #
19 # - Download the isapi-wsgi source and run python setup.py install:
19 # - Download the isapi-wsgi source and run python setup.py install:
20 # http://code.google.com/p/isapi-wsgi/
20 # http://code.google.com/p/isapi-wsgi/
21 #
21 #
22 # - Run this script (i.e. python hgwebdir_wsgi.py) to get a shim dll. The
22 # - Run this script (i.e. python hgwebdir_wsgi.py) to get a shim dll. The
23 # shim is identical for all scripts, so you can just copy and rename one
23 # shim is identical for all scripts, so you can just copy and rename one
24 # from an earlier run, if you wish.
24 # from an earlier run, if you wish.
25 #
25 #
26 # - Setup an IIS application where your hgwebdir is to be served from.
26 # - Setup an IIS application where your hgwebdir is to be served from.
27 # On 64-bit systems, make sure it's assigned a 32-bit app pool.
27 # On 64-bit systems, make sure it's assigned a 32-bit app pool.
28 #
28 #
29 # - In the application, setup a wildcard script handler mapping of type
29 # - In the application, setup a wildcard script handler mapping of type
30 # IpsapiModule with the shim dll as its executable. This file MUST reside
30 # IpsapiModule with the shim dll as its executable. This file MUST reside
31 # in the same directory as the shim. Remove all other handlers, if you wish.
31 # in the same directory as the shim. Remove all other handlers, if you wish.
32 #
32 #
33 # - Make sure the ISAPI and CGI restrictions (configured globally on the
33 # - Make sure the ISAPI and CGI restrictions (configured globally on the
34 # web server) includes the shim dll, to allow it to run.
34 # web server) includes the shim dll, to allow it to run.
35 #
35 #
36 # - Adjust the configuration variables below to match your needs.
36 # - Adjust the configuration variables below to match your needs.
37 #
37 #
38
38
39 # Configuration file location
39 # Configuration file location
40 hgweb_config = r'c:\src\iis\hg\hgweb.config'
40 hgweb_config = r'c:\src\iis\hg\hgweb.config'
41
41
42 # Global settings for IIS path translation
42 # Global settings for IIS path translation
43 path_strip = 0 # Strip this many path elements off (when using url rewrite)
43 path_strip = 0 # Strip this many path elements off (when using url rewrite)
44 path_prefix = 1 # This many path elements are prefixes (depends on the
44 path_prefix = 1 # This many path elements are prefixes (depends on the
45 # virtual path of the IIS application).
45 # virtual path of the IIS application).
46
46
47 import sys
47 import sys
48
48
49 # Adjust python path if this is not a system-wide install
49 # Adjust python path if this is not a system-wide install
50 #sys.path.insert(0, r'c:\path\to\python\lib')
50 #sys.path.insert(0, r'c:\path\to\python\lib')
51
51
52 # Enable tracing. Run 'python -m win32traceutil' to debug
52 # Enable tracing. Run 'python -m win32traceutil' to debug
53 if hasattr(sys, 'isapidllhandle'):
53 if getattr(sys, 'isapidllhandle', None) is not None:
54 import win32traceutil
54 import win32traceutil
55
55
56 # To serve pages in local charset instead of UTF-8, remove the two lines below
56 # To serve pages in local charset instead of UTF-8, remove the two lines below
57 import os
57 import os
58 os.environ['HGENCODING'] = 'UTF-8'
58 os.environ['HGENCODING'] = 'UTF-8'
59
59
60
60
61 import isapi_wsgi
61 import isapi_wsgi
62 from mercurial import demandimport; demandimport.enable()
62 from mercurial import demandimport; demandimport.enable()
63 from mercurial.hgweb.hgwebdir_mod import hgwebdir
63 from mercurial.hgweb.hgwebdir_mod import hgwebdir
64
64
65 # Example tweak: Replace isapi_wsgi's handler to provide better error message
65 # Example tweak: Replace isapi_wsgi's handler to provide better error message
66 # Other stuff could also be done here, like logging errors etc.
66 # Other stuff could also be done here, like logging errors etc.
67 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
67 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
68 error_status = '500 Internal Server Error' # less silly error message
68 error_status = '500 Internal Server Error' # less silly error message
69
69
70 isapi_wsgi.IsapiWsgiHandler = WsgiHandler
70 isapi_wsgi.IsapiWsgiHandler = WsgiHandler
71
71
72 # Only create the hgwebdir instance once
72 # Only create the hgwebdir instance once
73 application = hgwebdir(hgweb_config)
73 application = hgwebdir(hgweb_config)
74
74
75 def handler(environ, start_response):
75 def handler(environ, start_response):
76
76
77 # Translate IIS's weird URLs
77 # Translate IIS's weird URLs
78 url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
78 url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
79 paths = url[1:].split('/')[path_strip:]
79 paths = url[1:].split('/')[path_strip:]
80 script_name = '/' + '/'.join(paths[:path_prefix])
80 script_name = '/' + '/'.join(paths[:path_prefix])
81 path_info = '/'.join(paths[path_prefix:])
81 path_info = '/'.join(paths[path_prefix:])
82 if path_info:
82 if path_info:
83 path_info = '/' + path_info
83 path_info = '/' + path_info
84 environ['SCRIPT_NAME'] = script_name
84 environ['SCRIPT_NAME'] = script_name
85 environ['PATH_INFO'] = path_info
85 environ['PATH_INFO'] = path_info
86
86
87 return application(environ, start_response)
87 return application(environ, start_response)
88
88
89 def __ExtensionFactory__():
89 def __ExtensionFactory__():
90 return isapi_wsgi.ISAPISimpleHandler(handler)
90 return isapi_wsgi.ISAPISimpleHandler(handler)
91
91
92 if __name__=='__main__':
92 if __name__=='__main__':
93 from isapi.install import *
93 from isapi.install import *
94 params = ISAPIParameters()
94 params = ISAPIParameters()
95 HandleCommandLine(params)
95 HandleCommandLine(params)
@@ -1,1010 +1,1023
1 #compdef hg
1 #compdef hg
2
2
3 # Zsh completion script for mercurial. Rename this file to _hg and copy
3 # Zsh completion script for mercurial. Rename this file to _hg and copy
4 # it into your zsh function path (/usr/share/zsh/site-functions for
4 # it into your zsh function path (/usr/share/zsh/site-functions for
5 # instance)
5 # instance)
6 #
6 #
7 # If you do not want to install it globally, you can copy it somewhere
7 # If you do not want to install it globally, you can copy it somewhere
8 # else and add that directory to $fpath. This must be done before
8 # else and add that directory to $fpath. This must be done before
9 # compinit is called. If the file is copied to ~/.zsh.d, your ~/.zshrc
9 # compinit is called. If the file is copied to ~/.zsh.d, your ~/.zshrc
10 # file could look like this:
10 # file could look like this:
11 #
11 #
12 # fpath=("$HOME/.zsh.d" $fpath)
12 # fpath=("$HOME/.zsh.d" $fpath)
13 # autoload -U compinit
13 # autoload -U compinit
14 # compinit
14 # compinit
15 #
15 #
16 # Copyright (C) 2005, 2006 Steve Borho <steve@borho.org>
16 # Copyright (C) 2005, 2006 Steve Borho <steve@borho.org>
17 # Copyright (C) 2006-10 Brendan Cully <brendan@kublai.com>
17 # Copyright (C) 2006-10 Brendan Cully <brendan@kublai.com>
18 #
18 #
19 # Permission is hereby granted, without written agreement and without
19 # Permission is hereby granted, without written agreement and without
20 # licence or royalty fees, to use, copy, modify, and distribute this
20 # licence or royalty fees, to use, copy, modify, and distribute this
21 # software and to distribute modified versions of this software for any
21 # software and to distribute modified versions of this software for any
22 # purpose, provided that the above copyright notice and the following
22 # purpose, provided that the above copyright notice and the following
23 # two paragraphs appear in all copies of this software.
23 # two paragraphs appear in all copies of this software.
24 #
24 #
25 # In no event shall the authors be liable to any party for direct,
25 # In no event shall the authors be liable to any party for direct,
26 # indirect, special, incidental, or consequential damages arising out of
26 # indirect, special, incidental, or consequential damages arising out of
27 # the use of this software and its documentation, even if the authors
27 # the use of this software and its documentation, even if the authors
28 # have been advised of the possibility of such damage.
28 # have been advised of the possibility of such damage.
29 #
29 #
30 # The authors specifically disclaim any warranties, including, but not
30 # The authors specifically disclaim any warranties, including, but not
31 # limited to, the implied warranties of merchantability and fitness for
31 # limited to, the implied warranties of merchantability and fitness for
32 # a particular purpose. The software provided hereunder is on an "as
32 # a particular purpose. The software provided hereunder is on an "as
33 # is" basis, and the authors have no obligation to provide maintenance,
33 # is" basis, and the authors have no obligation to provide maintenance,
34 # support, updates, enhancements, or modifications.
34 # support, updates, enhancements, or modifications.
35
35
36 emulate -LR zsh
36 emulate -LR zsh
37 setopt extendedglob
37 setopt extendedglob
38
38
39 local curcontext="$curcontext" state line
39 local curcontext="$curcontext" state line
40 typeset -A _hg_cmd_globals
40 typeset -A _hg_cmd_globals
41
41
42 _hg() {
42 _hg() {
43 local cmd _hg_root
43 local cmd _hg_root
44 integer i=2
44 integer i=2
45 _hg_cmd_globals=()
45 _hg_cmd_globals=()
46
46
47 while (( i < $#words ))
47 while (( i < $#words ))
48 do
48 do
49 case "$words[$i]" in
49 case "$words[$i]" in
50 -R|--repository)
50 -R|--repository)
51 eval _hg_root="$words[$i+1]"
51 eval _hg_root="$words[$i+1]"
52 _hg_cmd_globals+=("$words[$i]" "$_hg_root")
52 _hg_cmd_globals+=("$words[$i]" "$_hg_root")
53 (( i += 2 ))
53 (( i += 2 ))
54 continue
54 continue
55 ;;
55 ;;
56 -R*)
56 -R*)
57 _hg_cmd_globals+="$words[$i]"
57 _hg_cmd_globals+="$words[$i]"
58 eval _hg_root="${words[$i]#-R}"
58 eval _hg_root="${words[$i]#-R}"
59 (( i++ ))
59 (( i++ ))
60 continue
60 continue
61 ;;
61 ;;
62 --cwd|--config)
62 --cwd|--config)
63 # pass along arguments to hg completer
63 # pass along arguments to hg completer
64 _hg_cmd_globals+=("$words[$i]" "$words[$i+1]")
64 _hg_cmd_globals+=("$words[$i]" "$words[$i+1]")
65 (( i += 2 ))
65 (( i += 2 ))
66 continue
66 continue
67 ;;
67 ;;
68 -*)
68 -*)
69 # skip option
69 # skip option
70 (( i++ ))
70 (( i++ ))
71 continue
71 continue
72 ;;
72 ;;
73 esac
73 esac
74 if [[ -z "$cmd" ]]
74 if [[ -z "$cmd" ]]
75 then
75 then
76 cmd="$words[$i]"
76 cmd="$words[$i]"
77 words[$i]=()
77 words[$i]=()
78 (( CURRENT-- ))
78 (( CURRENT-- ))
79 fi
79 fi
80 (( i++ ))
80 (( i++ ))
81 done
81 done
82
82
83 if [[ -z "$cmd" ]]
83 if [[ -z "$cmd" ]]
84 then
84 then
85 _arguments -s -w : $_hg_global_opts \
85 _arguments -s -w : $_hg_global_opts \
86 ':mercurial command:_hg_commands'
86 ':mercurial command:_hg_commands'
87 return
87 return
88 fi
88 fi
89
89
90 # resolve abbreviations and aliases
90 # resolve abbreviations and aliases
91 if ! (( $+functions[_hg_cmd_${cmd}] ))
91 if ! (( $+functions[_hg_cmd_${cmd}] ))
92 then
92 then
93 local cmdexp
93 local cmdexp
94 (( $#_hg_cmd_list )) || _hg_get_commands
94 (( $#_hg_cmd_list )) || _hg_get_commands
95
95
96 cmdexp=$_hg_cmd_list[(r)${cmd}*]
96 cmdexp=$_hg_cmd_list[(r)${cmd}*]
97 if [[ $cmdexp == $_hg_cmd_list[(R)${cmd}*] ]]
97 if [[ $cmdexp == $_hg_cmd_list[(R)${cmd}*] ]]
98 then
98 then
99 # might be nice to rewrite the command line with the expansion
99 # might be nice to rewrite the command line with the expansion
100 cmd="$cmdexp"
100 cmd="$cmdexp"
101 fi
101 fi
102 if [[ -n $_hg_alias_list[$cmd] ]]
102 if [[ -n $_hg_alias_list[$cmd] ]]
103 then
103 then
104 cmd=$_hg_alias_list[$cmd]
104 cmd=$_hg_alias_list[$cmd]
105 fi
105 fi
106 fi
106 fi
107
107
108 curcontext="${curcontext%:*:*}:hg-${cmd}:"
108 curcontext="${curcontext%:*:*}:hg-${cmd}:"
109
109
110 zstyle -s ":completion:$curcontext:" cache-policy update_policy
110 zstyle -s ":completion:$curcontext:" cache-policy update_policy
111
111
112 if [[ -z "$update_policy" ]]
112 if [[ -z "$update_policy" ]]
113 then
113 then
114 zstyle ":completion:$curcontext:" cache-policy _hg_cache_policy
114 zstyle ":completion:$curcontext:" cache-policy _hg_cache_policy
115 fi
115 fi
116
116
117 if (( $+functions[_hg_cmd_${cmd}] ))
117 if (( $+functions[_hg_cmd_${cmd}] ))
118 then
118 then
119 _hg_cmd_${cmd}
119 _hg_cmd_${cmd}
120 else
120 else
121 # complete unknown commands normally
121 # complete unknown commands normally
122 _arguments -s -w : $_hg_global_opts \
122 _arguments -s -w : $_hg_global_opts \
123 '*:files:_hg_files'
123 '*:files:_hg_files'
124 fi
124 fi
125 }
125 }
126
126
127 _hg_cache_policy() {
127 _hg_cache_policy() {
128 typeset -a old
128 typeset -a old
129
129
130 # cache for a minute
130 # cache for a minute
131 old=( "$1"(mm+10) )
131 old=( "$1"(mm+10) )
132 (( $#old )) && return 0
132 (( $#old )) && return 0
133
133
134 return 1
134 return 1
135 }
135 }
136
136
137 _hg_get_commands() {
137 _hg_get_commands() {
138 typeset -ga _hg_cmd_list
138 typeset -ga _hg_cmd_list
139 typeset -gA _hg_alias_list
139 typeset -gA _hg_alias_list
140 local hline cmd cmdalias
140 local hline cmd cmdalias
141
141
142 _call_program hg hg debugcomplete -v | while read -A hline
142 _call_program hg hg debugcomplete -v | while read -A hline
143 do
143 do
144 cmd=$hline[1]
144 cmd=$hline[1]
145 _hg_cmd_list+=($cmd)
145 _hg_cmd_list+=($cmd)
146
146
147 for cmdalias in $hline[2,-1]
147 for cmdalias in $hline[2,-1]
148 do
148 do
149 _hg_cmd_list+=($cmdalias)
149 _hg_cmd_list+=($cmdalias)
150 _hg_alias_list+=($cmdalias $cmd)
150 _hg_alias_list+=($cmdalias $cmd)
151 done
151 done
152 done
152 done
153 }
153 }
154
154
155 _hg_commands() {
155 _hg_commands() {
156 (( $#_hg_cmd_list )) || _hg_get_commands
156 (( $#_hg_cmd_list )) || _hg_get_commands
157 _describe -t commands 'mercurial command' _hg_cmd_list
157 _describe -t commands 'mercurial command' _hg_cmd_list
158 }
158 }
159
159
160 _hg_revrange() {
160 _hg_revrange() {
161 compset -P 1 '*:'
161 compset -P 1 '*:'
162 _hg_labels "$@"
162 _hg_labels "$@"
163 }
163 }
164
164
165 _hg_labels() {
165 _hg_labels() {
166 _hg_tags "$@"
166 _hg_tags "$@"
167 _hg_bookmarks "$@"
167 _hg_bookmarks "$@"
168 _hg_branches "$@"
168 }
169 }
169
170
170 _hg_tags() {
171 _hg_tags() {
171 typeset -a tags
172 typeset -a tags
172 local tag rev
173 local tag rev
173
174
174 _hg_cmd tags | while read tag
175 _hg_cmd tags | while read tag
175 do
176 do
176 tags+=(${tag/ # [0-9]#:*})
177 tags+=(${tag/ # [0-9]#:*})
177 done
178 done
178 (( $#tags )) && _describe -t tags 'tags' tags
179 (( $#tags )) && _describe -t tags 'tags' tags
179 }
180 }
180
181
181 _hg_bookmarks() {
182 _hg_bookmarks() {
182 typeset -a bookmark bookmarks
183 typeset -a bookmark bookmarks
183
184
184 _hg_cmd bookmarks | while read -A bookmark
185 _hg_cmd bookmarks | while read -A bookmark
185 do
186 do
186 if test -z ${bookmark[-1]:#[0-9]*}
187 if test -z ${bookmark[-1]:#[0-9]*}
187 then
188 then
188 bookmarks+=($bookmark[-2])
189 bookmarks+=($bookmark[-2])
189 fi
190 fi
190 done
191 done
191 (( $#bookmarks )) && _describe -t bookmarks 'bookmarks' bookmarks
192 (( $#bookmarks )) && _describe -t bookmarks 'bookmarks' bookmarks
192 }
193 }
193
194
195 _hg_branches() {
196 typeset -a branches
197 local branch
198
199 _hg_cmd branches | while read branch
200 do
201 branches+=(${branch/ # [0-9]#:*})
202 done
203 (( $#branches )) && _describe -t branches 'branches' branches
204 }
205
194 # likely merge candidates
206 # likely merge candidates
195 _hg_mergerevs() {
207 _hg_mergerevs() {
196 typeset -a heads
208 typeset -a heads
197 local myrev
209 local myrev
198
210
199 heads=(${(f)"$(_hg_cmd heads --template '{rev}\\n')"})
211 heads=(${(f)"$(_hg_cmd heads --template '{rev}\\n')"})
200 # exclude own revision
212 # exclude own revision
201 myrev=$(_hg_cmd log -r . --template '{rev}\\n')
213 myrev=$(_hg_cmd log -r . --template '{rev}\\n')
202 heads=(${heads:#$myrev})
214 heads=(${heads:#$myrev})
203
215
204 (( $#heads )) && _describe -t heads 'heads' heads
216 (( $#heads )) && _describe -t heads 'heads' heads
205 }
217 }
206
218
207 _hg_files() {
219 _hg_files() {
208 if [[ -n "$_hg_root" ]]
220 if [[ -n "$_hg_root" ]]
209 then
221 then
210 [[ -d "$_hg_root/.hg" ]] || return
222 [[ -d "$_hg_root/.hg" ]] || return
211 case "$_hg_root" in
223 case "$_hg_root" in
212 /*)
224 /*)
213 _files -W $_hg_root
225 _files -W $_hg_root
214 ;;
226 ;;
215 *)
227 *)
216 _files -W $PWD/$_hg_root
228 _files -W $PWD/$_hg_root
217 ;;
229 ;;
218 esac
230 esac
219 else
231 else
220 _files
232 _files
221 fi
233 fi
222 }
234 }
223
235
224 _hg_status() {
236 _hg_status() {
225 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
237 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
226 status_files=(${(ps:\0:)"$(_hg_cmd status -0n$1 ./$PREFIX)"})
238 status_files=(${(ps:\0:)"$(_hg_cmd status -0n$1 ./$PREFIX)"})
227 }
239 }
228
240
229 _hg_unknown() {
241 _hg_unknown() {
230 typeset -a status_files
242 typeset -a status_files
231 _hg_status u
243 _hg_status u
232 _wanted files expl 'unknown files' _multi_parts / status_files
244 _wanted files expl 'unknown files' _multi_parts / status_files
233 }
245 }
234
246
235 _hg_missing() {
247 _hg_missing() {
236 typeset -a status_files
248 typeset -a status_files
237 _hg_status d
249 _hg_status d
238 _wanted files expl 'missing files' _multi_parts / status_files
250 _wanted files expl 'missing files' _multi_parts / status_files
239 }
251 }
240
252
241 _hg_modified() {
253 _hg_modified() {
242 typeset -a status_files
254 typeset -a status_files
243 _hg_status m
255 _hg_status m
244 _wanted files expl 'modified files' _multi_parts / status_files
256 _wanted files expl 'modified files' _multi_parts / status_files
245 }
257 }
246
258
247 _hg_resolve() {
259 _hg_resolve() {
248 local rstate rpath
260 local rstate rpath
249
261
250 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
262 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
251
263
252 _hg_cmd resolve -l ./$PREFIX | while read rstate rpath
264 _hg_cmd resolve -l ./$PREFIX | while read rstate rpath
253 do
265 do
254 [[ $rstate == 'R' ]] && resolved_files+=($rpath)
266 [[ $rstate == 'R' ]] && resolved_files+=($rpath)
255 [[ $rstate == 'U' ]] && unresolved_files+=($rpath)
267 [[ $rstate == 'U' ]] && unresolved_files+=($rpath)
256 done
268 done
257 }
269 }
258
270
259 _hg_resolved() {
271 _hg_resolved() {
260 typeset -a resolved_files unresolved_files
272 typeset -a resolved_files unresolved_files
261 _hg_resolve
273 _hg_resolve
262 _wanted files expl 'resolved files' _multi_parts / resolved_files
274 _wanted files expl 'resolved files' _multi_parts / resolved_files
263 }
275 }
264
276
265 _hg_unresolved() {
277 _hg_unresolved() {
266 typeset -a resolved_files unresolved_files
278 typeset -a resolved_files unresolved_files
267 _hg_resolve
279 _hg_resolve
268 _wanted files expl 'unresolved files' _multi_parts / unresolved_files
280 _wanted files expl 'unresolved files' _multi_parts / unresolved_files
269 }
281 }
270
282
271 _hg_config() {
283 _hg_config() {
272 typeset -a items
284 typeset -a items
273 items=(${${(%f)"$(_call_program hg hg showconfig)"}%%\=*})
285 items=(${${(%f)"$(_call_program hg hg showconfig)"}%%\=*})
274 (( $#items )) && _describe -t config 'config item' items
286 (( $#items )) && _describe -t config 'config item' items
275 }
287 }
276
288
277 _hg_addremove() {
289 _hg_addremove() {
278 _alternative 'files:unknown files:_hg_unknown' \
290 _alternative 'files:unknown files:_hg_unknown' \
279 'files:missing files:_hg_missing'
291 'files:missing files:_hg_missing'
280 }
292 }
281
293
282 _hg_ssh_urls() {
294 _hg_ssh_urls() {
283 if [[ -prefix */ ]]
295 if [[ -prefix */ ]]
284 then
296 then
285 if zstyle -T ":completion:${curcontext}:files" remote-access
297 if zstyle -T ":completion:${curcontext}:files" remote-access
286 then
298 then
287 local host=${PREFIX%%/*}
299 local host=${PREFIX%%/*}
288 typeset -a remdirs
300 typeset -a remdirs
289 compset -p $(( $#host + 1 ))
301 compset -p $(( $#host + 1 ))
290 local rempath=${(M)PREFIX##*/}
302 local rempath=${(M)PREFIX##*/}
291 local cacheid="hg:${host}-${rempath//\//_}"
303 local cacheid="hg:${host}-${rempath//\//_}"
292 cacheid=${cacheid%[-_]}
304 cacheid=${cacheid%[-_]}
293 compset -P '*/'
305 compset -P '*/'
294 if _cache_invalid "$cacheid" || ! _retrieve_cache "$cacheid"
306 if _cache_invalid "$cacheid" || ! _retrieve_cache "$cacheid"
295 then
307 then
296 remdirs=(${${(M)${(f)"$(_call_program files ssh -a -x $host ls -1FL "${(q)rempath}")"}##*/}%/})
308 remdirs=(${${(M)${(f)"$(_call_program files ssh -a -x $host ls -1FL "${(q)rempath}")"}##*/}%/})
297 _store_cache "$cacheid" remdirs
309 _store_cache "$cacheid" remdirs
298 fi
310 fi
299 _describe -t directories 'remote directory' remdirs -S/
311 _describe -t directories 'remote directory' remdirs -S/
300 else
312 else
301 _message 'remote directory'
313 _message 'remote directory'
302 fi
314 fi
303 else
315 else
304 if compset -P '*@'
316 if compset -P '*@'
305 then
317 then
306 _hosts -S/
318 _hosts -S/
307 else
319 else
308 _alternative 'hosts:remote host name:_hosts -S/' \
320 _alternative 'hosts:remote host name:_hosts -S/' \
309 'users:user:_users -S@'
321 'users:user:_users -S@'
310 fi
322 fi
311 fi
323 fi
312 }
324 }
313
325
314 _hg_urls() {
326 _hg_urls() {
315 if compset -P bundle://
327 if compset -P bundle://
316 then
328 then
317 _files
329 _files
318 elif compset -P ssh://
330 elif compset -P ssh://
319 then
331 then
320 _hg_ssh_urls
332 _hg_ssh_urls
321 elif [[ -prefix *: ]]
333 elif [[ -prefix *: ]]
322 then
334 then
323 _urls
335 _urls
324 else
336 else
325 local expl
337 local expl
326 compset -S '[^:]*'
338 compset -S '[^:]*'
327 _wanted url-schemas expl 'URL schema' compadd -S '' - \
339 _wanted url-schemas expl 'URL schema' compadd -S '' - \
328 http:// https:// ssh:// bundle://
340 http:// https:// ssh:// bundle://
329 fi
341 fi
330 }
342 }
331
343
332 _hg_paths() {
344 _hg_paths() {
333 typeset -a paths pnames
345 typeset -a paths pnames
334 _hg_cmd paths | while read -A pnames
346 _hg_cmd paths | while read -A pnames
335 do
347 do
336 paths+=($pnames[1])
348 paths+=($pnames[1])
337 done
349 done
338 (( $#paths )) && _describe -t path-aliases 'repository alias' paths
350 (( $#paths )) && _describe -t path-aliases 'repository alias' paths
339 }
351 }
340
352
341 _hg_remote() {
353 _hg_remote() {
342 _alternative 'path-aliases:repository alias:_hg_paths' \
354 _alternative 'path-aliases:repository alias:_hg_paths' \
343 'directories:directory:_files -/' \
355 'directories:directory:_files -/' \
344 'urls:URL:_hg_urls'
356 'urls:URL:_hg_urls'
345 }
357 }
346
358
347 _hg_clone_dest() {
359 _hg_clone_dest() {
348 _alternative 'directories:directory:_files -/' \
360 _alternative 'directories:directory:_files -/' \
349 'urls:URL:_hg_urls'
361 'urls:URL:_hg_urls'
350 }
362 }
351
363
352 # Common options
364 # Common options
353 _hg_global_opts=(
365 _hg_global_opts=(
354 '(--repository -R)'{-R+,--repository}'[repository root directory]:repository:_files -/'
366 '(--repository -R)'{-R+,--repository}'[repository root directory]:repository:_files -/'
355 '--cwd[change working directory]:new working directory:_files -/'
367 '--cwd[change working directory]:new working directory:_files -/'
356 '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]'
368 '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]'
357 '(--verbose -v)'{-v,--verbose}'[enable additional output]'
369 '(--verbose -v)'{-v,--verbose}'[enable additional output]'
358 '*--config[set/override config option]:defined config items:_hg_config'
370 '*--config[set/override config option]:defined config items:_hg_config'
359 '(--quiet -q)'{-q,--quiet}'[suppress output]'
371 '(--quiet -q)'{-q,--quiet}'[suppress output]'
360 '(--help -h)'{-h,--help}'[display help and exit]'
372 '(--help -h)'{-h,--help}'[display help and exit]'
361 '--debug[debug mode]'
373 '--debug[debug mode]'
362 '--debugger[start debugger]'
374 '--debugger[start debugger]'
363 '--encoding[set the charset encoding]'
375 '--encoding[set the charset encoding]'
364 '--encodingmode[set the charset encoding mode]'
376 '--encodingmode[set the charset encoding mode]'
365 '--lsprof[print improved command execution profile]'
377 '--lsprof[print improved command execution profile]'
366 '--traceback[print traceback on exception]'
378 '--traceback[print traceback on exception]'
367 '--time[time how long the command takes]'
379 '--time[time how long the command takes]'
368 '--profile[profile]'
380 '--profile[profile]'
369 '--version[output version information and exit]'
381 '--version[output version information and exit]'
370 )
382 )
371
383
372 _hg_pat_opts=(
384 _hg_pat_opts=(
373 '*'{-I+,--include}'[include names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/'
385 '*'{-I+,--include}'[include names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/'
374 '*'{-X+,--exclude}'[exclude names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/')
386 '*'{-X+,--exclude}'[exclude names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/')
375
387
376 _hg_diff_opts=(
388 _hg_diff_opts=(
377 '(--text -a)'{-a,--text}'[treat all files as text]'
389 '(--text -a)'{-a,--text}'[treat all files as text]'
378 '(--git -g)'{-g,--git}'[use git extended diff format]'
390 '(--git -g)'{-g,--git}'[use git extended diff format]'
379 "--nodates[omit dates from diff headers]")
391 "--nodates[omit dates from diff headers]")
380
392
381 _hg_dryrun_opts=(
393 _hg_dryrun_opts=(
382 '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]')
394 '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]')
383
395
384 _hg_style_opts=(
396 _hg_style_opts=(
385 '--style[display using template map file]:'
397 '--style[display using template map file]:'
386 '--template[display with template]:')
398 '--template[display with template]:')
387
399
388 _hg_commit_opts=(
400 _hg_commit_opts=(
389 '(-m --message -l --logfile --edit -e)'{-e,--edit}'[edit commit message]'
401 '(-m --message -l --logfile --edit -e)'{-e,--edit}'[edit commit message]'
390 '(-e --edit -l --logfile --message -m)'{-m+,--message}'[use <text> as commit message]:message:'
402 '(-e --edit -l --logfile --message -m)'{-m+,--message}'[use <text> as commit message]:message:'
391 '(-e --edit -m --message --logfile -l)'{-l+,--logfile}'[read the commit message from <file>]:log file:_files')
403 '(-e --edit -m --message --logfile -l)'{-l+,--logfile}'[read the commit message from <file>]:log file:_files')
392
404
393 _hg_remote_opts=(
405 _hg_remote_opts=(
394 '(--ssh -e)'{-e+,--ssh}'[specify ssh command to use]:'
406 '(--ssh -e)'{-e+,--ssh}'[specify ssh command to use]:'
395 '--remotecmd[specify hg command to run on the remote side]:')
407 '--remotecmd[specify hg command to run on the remote side]:')
396
408
397 _hg_cmd() {
409 _hg_cmd() {
398 _call_program hg HGPLAIN=1 hg "$_hg_cmd_globals[@]" "$@" 2> /dev/null
410 _call_program hg HGPLAIN=1 hg "$_hg_cmd_globals[@]" "$@" 2> /dev/null
399 }
411 }
400
412
401 _hg_cmd_add() {
413 _hg_cmd_add() {
402 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
414 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
403 '*:unknown files:_hg_unknown'
415 '*:unknown files:_hg_unknown'
404 }
416 }
405
417
406 _hg_cmd_addremove() {
418 _hg_cmd_addremove() {
407 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
419 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
408 '(--similarity -s)'{-s+,--similarity}'[guess renamed files by similarity (0<=s<=100)]:' \
420 '(--similarity -s)'{-s+,--similarity}'[guess renamed files by similarity (0<=s<=100)]:' \
409 '*:unknown or missing files:_hg_addremove'
421 '*:unknown or missing files:_hg_addremove'
410 }
422 }
411
423
412 _hg_cmd_annotate() {
424 _hg_cmd_annotate() {
413 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
425 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
414 '(--rev -r)'{-r+,--rev}'[annotate the specified revision]:revision:_hg_labels' \
426 '(--rev -r)'{-r+,--rev}'[annotate the specified revision]:revision:_hg_labels' \
415 '(--follow -f)'{-f,--follow}'[follow file copies and renames]' \
427 '(--follow -f)'{-f,--follow}'[follow file copies and renames]' \
416 '(--text -a)'{-a,--text}'[treat all files as text]' \
428 '(--text -a)'{-a,--text}'[treat all files as text]' \
417 '(--user -u)'{-u,--user}'[list the author]' \
429 '(--user -u)'{-u,--user}'[list the author]' \
418 '(--date -d)'{-d,--date}'[list the date]' \
430 '(--date -d)'{-d,--date}'[list the date]' \
419 '(--number -n)'{-n,--number}'[list the revision number (default)]' \
431 '(--number -n)'{-n,--number}'[list the revision number (default)]' \
420 '(--changeset -c)'{-c,--changeset}'[list the changeset]' \
432 '(--changeset -c)'{-c,--changeset}'[list the changeset]' \
421 '*:files:_hg_files'
433 '*:files:_hg_files'
422 }
434 }
423
435
424 _hg_cmd_archive() {
436 _hg_cmd_archive() {
425 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
437 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
426 '--no-decode[do not pass files through decoders]' \
438 '--no-decode[do not pass files through decoders]' \
427 '(--prefix -p)'{-p+,--prefix}'[directory prefix for files in archive]:' \
439 '(--prefix -p)'{-p+,--prefix}'[directory prefix for files in archive]:' \
428 '(--rev -r)'{-r+,--rev}'[revision to distribute]:revision:_hg_labels' \
440 '(--rev -r)'{-r+,--rev}'[revision to distribute]:revision:_hg_labels' \
429 '(--type -t)'{-t+,--type}'[type of distribution to create]:archive type:(files tar tbz2 tgz uzip zip)' \
441 '(--type -t)'{-t+,--type}'[type of distribution to create]:archive type:(files tar tbz2 tgz uzip zip)' \
430 '*:destination:_files'
442 '*:destination:_files'
431 }
443 }
432
444
433 _hg_cmd_backout() {
445 _hg_cmd_backout() {
434 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
446 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
435 '--merge[merge with old dirstate parent after backout]' \
447 '--merge[merge with old dirstate parent after backout]' \
436 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
448 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
437 '--parent[parent to choose when backing out merge]' \
449 '--parent[parent to choose when backing out merge]' \
438 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
450 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
439 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
451 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
440 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
452 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
441 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt'
453 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt'
442 }
454 }
443
455
444 _hg_cmd_bisect() {
456 _hg_cmd_bisect() {
445 _arguments -s -w : $_hg_global_opts \
457 _arguments -s -w : $_hg_global_opts \
446 '(-)'{-r,--reset}'[reset bisect state]' \
458 '(-)'{-r,--reset}'[reset bisect state]' \
447 '(--good -g --bad -b --skip -s --reset -r)'{-g,--good}'[mark changeset good]'::revision:_hg_labels \
459 '(--good -g --bad -b --skip -s --reset -r)'{-g,--good}'[mark changeset good]'::revision:_hg_labels \
448 '(--good -g --bad -b --skip -s --reset -r)'{-b,--bad}'[mark changeset bad]'::revision:_hg_labels \
460 '(--good -g --bad -b --skip -s --reset -r)'{-b,--bad}'[mark changeset bad]'::revision:_hg_labels \
449 '(--good -g --bad -b --skip -s --reset -r)'{-s,--skip}'[skip testing changeset]' \
461 '(--good -g --bad -b --skip -s --reset -r)'{-s,--skip}'[skip testing changeset]' \
450 '(--command -c --noupdate -U)'{-c+,--command}'[use command to check changeset state]':commands:_command_names \
462 '(--command -c --noupdate -U)'{-c+,--command}'[use command to check changeset state]':commands:_command_names \
451 '(--command -c --noupdate -U)'{-U,--noupdate}'[do not update to target]'
463 '(--command -c --noupdate -U)'{-U,--noupdate}'[do not update to target]'
452 }
464 }
453
465
454 _hg_cmd_bookmarks() {
466 _hg_cmd_bookmarks() {
455 _arguments -s -w : $_hg_global_opts \
467 _arguments -s -w : $_hg_global_opts \
456 '(--force -f)'{-f,--force}'[force]' \
468 '(--force -f)'{-f,--force}'[force]' \
457 '(--rev -r --delete -d --rename -m)'{-r+,--rev}'[revision]:revision:_hg_labels' \
469 '(--rev -r --delete -d --rename -m)'{-r+,--rev}'[revision]:revision:_hg_labels' \
458 '(--rev -r --delete -d --rename -m)'{-d,--delete}'[delete a given bookmark]' \
470 '(--rev -r --delete -d --rename -m)'{-d,--delete}'[delete a given bookmark]' \
459 '(--rev -r --delete -d --rename -m)'{-m+,--rename}'[rename a given bookmark]:bookmark:_hg_bookmarks' \
471 '(--rev -r --delete -d --rename -m)'{-m+,--rename}'[rename a given bookmark]:bookmark:_hg_bookmarks' \
460 ':bookmark:_hg_bookmarks'
472 ':bookmark:_hg_bookmarks'
461 }
473 }
462
474
463 _hg_cmd_branch() {
475 _hg_cmd_branch() {
464 _arguments -s -w : $_hg_global_opts \
476 _arguments -s -w : $_hg_global_opts \
465 '(--force -f)'{-f,--force}'[set branch name even if it shadows an existing branch]' \
477 '(--force -f)'{-f,--force}'[set branch name even if it shadows an existing branch]' \
466 '(--clean -C)'{-C,--clean}'[reset branch name to parent branch name]'
478 '(--clean -C)'{-C,--clean}'[reset branch name to parent branch name]'
467 }
479 }
468
480
469 _hg_cmd_branches() {
481 _hg_cmd_branches() {
470 _arguments -s -w : $_hg_global_opts \
482 _arguments -s -w : $_hg_global_opts \
471 '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]'
483 '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]'
472 }
484 }
473
485
474 _hg_cmd_bundle() {
486 _hg_cmd_bundle() {
475 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
487 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
476 '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
488 '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
477 '(2)*--base[a base changeset to specify instead of a destination]:revision:_hg_labels' \
489 '(2)*--base[a base changeset to specify instead of a destination]:revision:_hg_labels' \
478 ':output file:_files' \
490 ':output file:_files' \
479 ':destination repository:_files -/'
491 ':destination repository:_files -/'
480 }
492 }
481
493
482 _hg_cmd_cat() {
494 _hg_cmd_cat() {
483 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
495 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
484 '(--output -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
496 '(--output -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
485 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
497 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
486 '*:file:_hg_files'
498 '*:file:_hg_files'
487 }
499 }
488
500
489 _hg_cmd_clone() {
501 _hg_cmd_clone() {
490 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
502 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
491 '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' \
503 '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' \
492 '(--rev -r)'{-r+,--rev}'[a changeset you would like to have after cloning]:' \
504 '(--rev -r)'{-r+,--rev}'[a changeset you would like to have after cloning]:' \
493 '--uncompressed[use uncompressed transfer (fast over LAN)]' \
505 '--uncompressed[use uncompressed transfer (fast over LAN)]' \
494 ':source repository:_hg_remote' \
506 ':source repository:_hg_remote' \
495 ':destination:_hg_clone_dest'
507 ':destination:_hg_clone_dest'
496 }
508 }
497
509
498 _hg_cmd_commit() {
510 _hg_cmd_commit() {
499 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
511 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
500 '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \
512 '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \
501 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
513 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
502 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt' \
514 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt' \
503 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
515 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
504 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
516 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
505 '*:file:_hg_files'
517 '*:file:_hg_files'
506 }
518 }
507
519
508 _hg_cmd_copy() {
520 _hg_cmd_copy() {
509 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
521 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
510 '(--after -A)'{-A,--after}'[record a copy that has already occurred]' \
522 '(--after -A)'{-A,--after}'[record a copy that has already occurred]' \
511 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
523 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
512 '*:file:_hg_files'
524 '*:file:_hg_files'
513 }
525 }
514
526
515 _hg_cmd_diff() {
527 _hg_cmd_diff() {
516 typeset -A opt_args
528 typeset -A opt_args
517 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \
529 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \
518 '*'{-r,--rev}'+[revision]:revision:_hg_revrange' \
530 '*'{-r,--rev}'+[revision]:revision:_hg_revrange' \
519 '(--show-function -p)'{-p,--show-function}'[show which function each change is in]' \
531 '(--show-function -p)'{-p,--show-function}'[show which function each change is in]' \
520 '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' \
532 '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' \
521 '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' \
533 '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' \
522 '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]' \
534 '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]' \
523 '*:file:->diff_files'
535 '*:file:->diff_files'
524
536
525 if [[ $state == 'diff_files' ]]
537 if [[ $state == 'diff_files' ]]
526 then
538 then
527 if [[ -n $opt_args[-r] ]]
539 if [[ -n $opt_args[-r] ]]
528 then
540 then
529 _hg_files
541 _hg_files
530 else
542 else
531 _hg_modified
543 _hg_modified
532 fi
544 fi
533 fi
545 fi
534 }
546 }
535
547
536 _hg_cmd_export() {
548 _hg_cmd_export() {
537 _arguments -s -w : $_hg_global_opts $_hg_diff_opts \
549 _arguments -s -w : $_hg_global_opts $_hg_diff_opts \
538 '(--outout -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
550 '(--outout -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
539 '--switch-parent[diff against the second parent]' \
551 '--switch-parent[diff against the second parent]' \
540 '*:revision:_hg_labels'
552 '*:revision:_hg_labels'
541 }
553 }
542
554
543 _hg_cmd_grep() {
555 _hg_cmd_grep() {
544 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
556 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
545 '(--print0 -0)'{-0,--print0}'[end filenames with NUL]' \
557 '(--print0 -0)'{-0,--print0}'[end filenames with NUL]' \
546 '--all[print all revisions with matches]' \
558 '--all[print all revisions with matches]' \
547 '(--follow -f)'{-f,--follow}'[follow changeset or file history]' \
559 '(--follow -f)'{-f,--follow}'[follow changeset or file history]' \
548 '(--ignore-case -i)'{-i,--ignore-case}'[ignore case when matching]' \
560 '(--ignore-case -i)'{-i,--ignore-case}'[ignore case when matching]' \
549 '(--files-with-matches -l)'{-l,--files-with-matches}'[print only filenames and revs that match]' \
561 '(--files-with-matches -l)'{-l,--files-with-matches}'[print only filenames and revs that match]' \
550 '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
562 '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
551 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
563 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
552 '(--user -u)'{-u,--user}'[print user who committed change]' \
564 '(--user -u)'{-u,--user}'[print user who committed change]' \
553 '1:search pattern:' \
565 '1:search pattern:' \
554 '*:files:_hg_files'
566 '*:files:_hg_files'
555 }
567 }
556
568
557 _hg_cmd_heads() {
569 _hg_cmd_heads() {
558 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
570 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
559 '(--rev -r)'{-r+,--rev}'[show only heads which are descendants of rev]:revision:_hg_labels'
571 '(--rev -r)'{-r+,--rev}'[show only heads which are descendants of rev]:revision:_hg_labels'
560 }
572 }
561
573
562 _hg_cmd_help() {
574 _hg_cmd_help() {
563 _arguments -s -w : $_hg_global_opts \
575 _arguments -s -w : $_hg_global_opts \
564 '*:mercurial command:_hg_commands'
576 '*:mercurial command:_hg_commands'
565 }
577 }
566
578
567 _hg_cmd_identify() {
579 _hg_cmd_identify() {
568 _arguments -s -w : $_hg_global_opts \
580 _arguments -s -w : $_hg_global_opts \
569 '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_labels' \
581 '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_labels' \
570 '(--num -n)'{-n+,--num}'[show local revision number]' \
582 '(--num -n)'{-n+,--num}'[show local revision number]' \
571 '(--id -i)'{-i+,--id}'[show global revision id]' \
583 '(--id -i)'{-i+,--id}'[show global revision id]' \
572 '(--branch -b)'{-b+,--branch}'[show branch]' \
584 '(--branch -b)'{-b+,--branch}'[show branch]' \
573 '(--tags -t)'{-t+,--tags}'[show tags]'
585 '(--tags -t)'{-t+,--tags}'[show tags]'
574 }
586 }
575
587
576 _hg_cmd_import() {
588 _hg_cmd_import() {
577 _arguments -s -w : $_hg_global_opts \
589 _arguments -s -w : $_hg_global_opts \
578 '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
590 '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
579 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
591 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
580 '(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \
592 '(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \
581 '*:patch:_files'
593 '*:patch:_files'
582 }
594 }
583
595
584 _hg_cmd_incoming() {
596 _hg_cmd_incoming() {
585 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
597 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
586 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
598 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
587 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
599 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
588 '(--patch -p)'{-p,--patch}'[show patch]' \
600 '(--patch -p)'{-p,--patch}'[show patch]' \
589 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
601 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
590 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
602 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
591 '--bundle[file to store the bundles into]:bundle file:_files' \
603 '--bundle[file to store the bundles into]:bundle file:_files' \
592 ':source:_hg_remote'
604 ':source:_hg_remote'
593 }
605 }
594
606
595 _hg_cmd_init() {
607 _hg_cmd_init() {
596 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
608 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
597 ':dir:_files -/'
609 ':dir:_files -/'
598 }
610 }
599
611
600 _hg_cmd_locate() {
612 _hg_cmd_locate() {
601 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
613 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
602 '(--rev -r)'{-r+,--rev}'[search repository as it stood at revision]:revision:_hg_labels' \
614 '(--rev -r)'{-r+,--rev}'[search repository as it stood at revision]:revision:_hg_labels' \
603 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
615 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
604 '(--fullpath -f)'{-f,--fullpath}'[print complete paths]' \
616 '(--fullpath -f)'{-f,--fullpath}'[print complete paths]' \
605 '*:search pattern:_hg_files'
617 '*:search pattern:_hg_files'
606 }
618 }
607
619
608 _hg_cmd_log() {
620 _hg_cmd_log() {
609 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_style_opts \
621 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_style_opts \
610 '(--follow --follow-first -f)'{-f,--follow}'[follow changeset or history]' \
622 '(--follow --follow-first -f)'{-f,--follow}'[follow changeset or history]' \
611 '(-f --follow)--follow-first[only follow the first parent of merge changesets]' \
623 '(-f --follow)--follow-first[only follow the first parent of merge changesets]' \
612 '(--copies -C)'{-C,--copies}'[show copied files]' \
624 '(--copies -C)'{-C,--copies}'[show copied files]' \
613 '(--keyword -k)'{-k+,--keyword}'[search for a keyword]:' \
625 '(--keyword -k)'{-k+,--keyword}'[search for a keyword]:' \
614 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
626 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
615 '*'{-r,--rev}'[show the specified revision or range]:revision:_hg_revrange' \
627 '*'{-r,--rev}'[show the specified revision or range]:revision:_hg_revrange' \
616 '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' \
628 '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' \
617 '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \
629 '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \
618 '(--patch -p)'{-p,--patch}'[show patch]' \
630 '(--patch -p)'{-p,--patch}'[show patch]' \
619 '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \
631 '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \
632 '(--branch -b)'{-b+,--branch}'[show changesets within the given named branch]:branch:_hg_branches' \
620 '*:files:_hg_files'
633 '*:files:_hg_files'
621 }
634 }
622
635
623 _hg_cmd_manifest() {
636 _hg_cmd_manifest() {
624 _arguments -s -w : $_hg_global_opts \
637 _arguments -s -w : $_hg_global_opts \
625 ':revision:_hg_labels'
638 ':revision:_hg_labels'
626 }
639 }
627
640
628 _hg_cmd_merge() {
641 _hg_cmd_merge() {
629 _arguments -s -w : $_hg_global_opts \
642 _arguments -s -w : $_hg_global_opts \
630 '(--force -f)'{-f,--force}'[force a merge with outstanding changes]' \
643 '(--force -f)'{-f,--force}'[force a merge with outstanding changes]' \
631 '(--rev -r 1)'{-r,--rev}'[revision to merge]:revision:_hg_mergerevs' \
644 '(--rev -r 1)'{-r,--rev}'[revision to merge]:revision:_hg_mergerevs' \
632 '(--preview -P)'{-P,--preview}'[review revisions to merge (no merge is performed)]' \
645 '(--preview -P)'{-P,--preview}'[review revisions to merge (no merge is performed)]' \
633 ':revision:_hg_mergerevs'
646 ':revision:_hg_mergerevs'
634 }
647 }
635
648
636 _hg_cmd_outgoing() {
649 _hg_cmd_outgoing() {
637 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
650 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
638 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
651 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
639 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
652 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
640 '(--patch -p)'{-p,--patch}'[show patch]' \
653 '(--patch -p)'{-p,--patch}'[show patch]' \
641 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
654 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
642 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
655 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
643 ':destination:_hg_remote'
656 ':destination:_hg_remote'
644 }
657 }
645
658
646 _hg_cmd_parents() {
659 _hg_cmd_parents() {
647 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
660 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
648 '(--rev -r)'{-r+,--rev}'[show parents of the specified rev]:revision:_hg_labels' \
661 '(--rev -r)'{-r+,--rev}'[show parents of the specified rev]:revision:_hg_labels' \
649 ':last modified file:_hg_files'
662 ':last modified file:_hg_files'
650 }
663 }
651
664
652 _hg_cmd_paths() {
665 _hg_cmd_paths() {
653 _arguments -s -w : $_hg_global_opts \
666 _arguments -s -w : $_hg_global_opts \
654 ':path:_hg_paths'
667 ':path:_hg_paths'
655 }
668 }
656
669
657 _hg_cmd_pull() {
670 _hg_cmd_pull() {
658 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
671 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
659 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
672 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
660 '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
673 '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
661 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
674 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
662 ':source:_hg_remote'
675 ':source:_hg_remote'
663 }
676 }
664
677
665 _hg_cmd_push() {
678 _hg_cmd_push() {
666 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
679 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
667 '(--force -f)'{-f,--force}'[force push]' \
680 '(--force -f)'{-f,--force}'[force push]' \
668 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_labels' \
681 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_labels' \
669 ':destination:_hg_remote'
682 ':destination:_hg_remote'
670 }
683 }
671
684
672 _hg_cmd_remove() {
685 _hg_cmd_remove() {
673 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
686 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
674 '(--after -A)'{-A,--after}'[record remove that has already occurred]' \
687 '(--after -A)'{-A,--after}'[record remove that has already occurred]' \
675 '(--force -f)'{-f,--force}'[remove file even if modified]' \
688 '(--force -f)'{-f,--force}'[remove file even if modified]' \
676 '*:file:_hg_files'
689 '*:file:_hg_files'
677 }
690 }
678
691
679 _hg_cmd_rename() {
692 _hg_cmd_rename() {
680 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
693 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
681 '(--after -A)'{-A,--after}'[record a rename that has already occurred]' \
694 '(--after -A)'{-A,--after}'[record a rename that has already occurred]' \
682 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
695 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
683 '*:file:_hg_files'
696 '*:file:_hg_files'
684 }
697 }
685
698
686 _hg_cmd_resolve() {
699 _hg_cmd_resolve() {
687 local context state line
700 local context state line
688 typeset -A opt_args
701 typeset -A opt_args
689
702
690 _arguments -s -w : $_hg_global_opts \
703 _arguments -s -w : $_hg_global_opts \
691 '(--list -l --mark -m --unmark -u)'{-l,--list}'[list state of files needing merge]:*:merged files:->resolve_files' \
704 '(--list -l --mark -m --unmark -u)'{-l,--list}'[list state of files needing merge]:*:merged files:->resolve_files' \
692 '(--mark -m --list -l --unmark -u)'{-m,--mark}'[mark files as resolved]:*:unresolved files:_hg_unresolved' \
705 '(--mark -m --list -l --unmark -u)'{-m,--mark}'[mark files as resolved]:*:unresolved files:_hg_unresolved' \
693 '(--unmark -u --list -l --mark -m)'{-u,--unmark}'[unmark files as resolved]:*:resolved files:_hg_resolved' \
706 '(--unmark -u --list -l --mark -m)'{-u,--unmark}'[unmark files as resolved]:*:resolved files:_hg_resolved' \
694 '*:file:_hg_unresolved'
707 '*:file:_hg_unresolved'
695
708
696 if [[ $state == 'resolve_files' ]]
709 if [[ $state == 'resolve_files' ]]
697 then
710 then
698 _alternative 'files:resolved files:_hg_resolved' \
711 _alternative 'files:resolved files:_hg_resolved' \
699 'files:unresolved files:_hg_unresolved'
712 'files:unresolved files:_hg_unresolved'
700 fi
713 fi
701 }
714 }
702
715
703 _hg_cmd_revert() {
716 _hg_cmd_revert() {
704 local context state line
717 local context state line
705 typeset -A opt_args
718 typeset -A opt_args
706
719
707 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
720 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
708 '(--all -a :)'{-a,--all}'[revert all changes when no arguments given]' \
721 '(--all -a :)'{-a,--all}'[revert all changes when no arguments given]' \
709 '(--rev -r)'{-r+,--rev}'[revision to revert to]:revision:_hg_labels' \
722 '(--rev -r)'{-r+,--rev}'[revision to revert to]:revision:_hg_labels' \
710 '--no-backup[do not save backup copies of files]' \
723 '--no-backup[do not save backup copies of files]' \
711 '*:file:->diff_files'
724 '*:file:->diff_files'
712
725
713 if [[ $state == 'diff_files' ]]
726 if [[ $state == 'diff_files' ]]
714 then
727 then
715 if [[ -n $opt_args[-r] ]]
728 if [[ -n $opt_args[-r] ]]
716 then
729 then
717 _hg_files
730 _hg_files
718 else
731 else
719 typeset -a status_files
732 typeset -a status_files
720 _hg_status mard
733 _hg_status mard
721 _wanted files expl 'modified, added, removed or deleted file' _multi_parts / status_files
734 _wanted files expl 'modified, added, removed or deleted file' _multi_parts / status_files
722 fi
735 fi
723 fi
736 fi
724 }
737 }
725
738
726 _hg_cmd_serve() {
739 _hg_cmd_serve() {
727 _arguments -s -w : $_hg_global_opts \
740 _arguments -s -w : $_hg_global_opts \
728 '(--accesslog -A)'{-A+,--accesslog}'[name of access log file]:log file:_files' \
741 '(--accesslog -A)'{-A+,--accesslog}'[name of access log file]:log file:_files' \
729 '(--errorlog -E)'{-E+,--errorlog}'[name of error log file]:log file:_files' \
742 '(--errorlog -E)'{-E+,--errorlog}'[name of error log file]:log file:_files' \
730 '(--daemon -d)'{-d,--daemon}'[run server in background]' \
743 '(--daemon -d)'{-d,--daemon}'[run server in background]' \
731 '(--port -p)'{-p+,--port}'[listen port]:listen port:' \
744 '(--port -p)'{-p+,--port}'[listen port]:listen port:' \
732 '(--address -a)'{-a+,--address}'[interface address]:interface address:' \
745 '(--address -a)'{-a+,--address}'[interface address]:interface address:' \
733 '(--name -n)'{-n+,--name}'[name to show in web pages]:repository name:' \
746 '(--name -n)'{-n+,--name}'[name to show in web pages]:repository name:' \
734 '(--templates -t)'{-t,--templates}'[web template directory]:template dir:_files -/' \
747 '(--templates -t)'{-t,--templates}'[web template directory]:template dir:_files -/' \
735 '--style[web template style]:style' \
748 '--style[web template style]:style' \
736 '--stdio[for remote clients]' \
749 '--stdio[for remote clients]' \
737 '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
750 '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
738 }
751 }
739
752
740 _hg_cmd_showconfig() {
753 _hg_cmd_showconfig() {
741 _arguments -s -w : $_hg_global_opts \
754 _arguments -s -w : $_hg_global_opts \
742 '(--untrusted -u)'{-u+,--untrusted}'[show untrusted configuration options]' \
755 '(--untrusted -u)'{-u+,--untrusted}'[show untrusted configuration options]' \
743 ':config item:_hg_config'
756 ':config item:_hg_config'
744 }
757 }
745
758
746 _hg_cmd_status() {
759 _hg_cmd_status() {
747 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
760 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
748 '(--all -A)'{-A,--all}'[show status of all files]' \
761 '(--all -A)'{-A,--all}'[show status of all files]' \
749 '(--modified -m)'{-m,--modified}'[show only modified files]' \
762 '(--modified -m)'{-m,--modified}'[show only modified files]' \
750 '(--added -a)'{-a,--added}'[show only added files]' \
763 '(--added -a)'{-a,--added}'[show only added files]' \
751 '(--removed -r)'{-r,--removed}'[show only removed files]' \
764 '(--removed -r)'{-r,--removed}'[show only removed files]' \
752 '(--deleted -d)'{-d,--deleted}'[show only deleted (but tracked) files]' \
765 '(--deleted -d)'{-d,--deleted}'[show only deleted (but tracked) files]' \
753 '(--clean -c)'{-c,--clean}'[show only files without changes]' \
766 '(--clean -c)'{-c,--clean}'[show only files without changes]' \
754 '(--unknown -u)'{-u,--unknown}'[show only unknown files]' \
767 '(--unknown -u)'{-u,--unknown}'[show only unknown files]' \
755 '(--ignored -i)'{-i,--ignored}'[show ignored files]' \
768 '(--ignored -i)'{-i,--ignored}'[show ignored files]' \
756 '(--no-status -n)'{-n,--no-status}'[hide status prefix]' \
769 '(--no-status -n)'{-n,--no-status}'[hide status prefix]' \
757 '(--copies -C)'{-C,--copies}'[show source of copied files]' \
770 '(--copies -C)'{-C,--copies}'[show source of copied files]' \
758 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
771 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
759 '--rev[show difference from revision]:revision:_hg_labels' \
772 '--rev[show difference from revision]:revision:_hg_labels' \
760 '*:files:_files'
773 '*:files:_files'
761 }
774 }
762
775
763 _hg_cmd_summary() {
776 _hg_cmd_summary() {
764 _arguments -s -w : $_hg_global_opts \
777 _arguments -s -w : $_hg_global_opts \
765 '--remote[check for push and pull]'
778 '--remote[check for push and pull]'
766 }
779 }
767
780
768 _hg_cmd_tag() {
781 _hg_cmd_tag() {
769 _arguments -s -w : $_hg_global_opts \
782 _arguments -s -w : $_hg_global_opts \
770 '(--local -l)'{-l,--local}'[make the tag local]' \
783 '(--local -l)'{-l,--local}'[make the tag local]' \
771 '(--message -m)'{-m+,--message}'[message for tag commit log entry]:message:' \
784 '(--message -m)'{-m+,--message}'[message for tag commit log entry]:message:' \
772 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
785 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
773 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
786 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
774 '(--rev -r)'{-r+,--rev}'[revision to tag]:revision:_hg_labels' \
787 '(--rev -r)'{-r+,--rev}'[revision to tag]:revision:_hg_labels' \
775 ':tag name:'
788 ':tag name:'
776 }
789 }
777
790
778 _hg_cmd_tip() {
791 _hg_cmd_tip() {
779 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
792 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
780 '(--patch -p)'{-p,--patch}'[show patch]'
793 '(--patch -p)'{-p,--patch}'[show patch]'
781 }
794 }
782
795
783 _hg_cmd_unbundle() {
796 _hg_cmd_unbundle() {
784 _arguments -s -w : $_hg_global_opts \
797 _arguments -s -w : $_hg_global_opts \
785 '(--update -u)'{-u,--update}'[update to new tip if changesets were unbundled]' \
798 '(--update -u)'{-u,--update}'[update to new tip if changesets were unbundled]' \
786 ':files:_files'
799 ':files:_files'
787 }
800 }
788
801
789 _hg_cmd_update() {
802 _hg_cmd_update() {
790 _arguments -s -w : $_hg_global_opts \
803 _arguments -s -w : $_hg_global_opts \
791 '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
804 '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
792 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
805 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
793 ':revision:_hg_labels'
806 ':revision:_hg_labels'
794 }
807 }
795
808
796 ## extensions ##
809 ## extensions ##
797
810
798 # HGK
811 # HGK
799 _hg_cmd_view() {
812 _hg_cmd_view() {
800 _arguments -s -w : $_hg_global_opts \
813 _arguments -s -w : $_hg_global_opts \
801 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
814 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
802 ':revision range:_hg_tags'
815 ':revision range:_hg_tags'
803 }
816 }
804
817
805 # MQ
818 # MQ
806 _hg_qseries() {
819 _hg_qseries() {
807 typeset -a patches
820 typeset -a patches
808 patches=(${(f)"$(_hg_cmd qseries)"})
821 patches=(${(f)"$(_hg_cmd qseries)"})
809 (( $#patches )) && _describe -t hg-patches 'patches' patches
822 (( $#patches )) && _describe -t hg-patches 'patches' patches
810 }
823 }
811
824
812 _hg_qapplied() {
825 _hg_qapplied() {
813 typeset -a patches
826 typeset -a patches
814 patches=(${(f)"$(_hg_cmd qapplied)"})
827 patches=(${(f)"$(_hg_cmd qapplied)"})
815 if (( $#patches ))
828 if (( $#patches ))
816 then
829 then
817 patches+=(qbase qtip)
830 patches+=(qbase qtip)
818 _describe -t hg-applied-patches 'applied patches' patches
831 _describe -t hg-applied-patches 'applied patches' patches
819 fi
832 fi
820 }
833 }
821
834
822 _hg_qunapplied() {
835 _hg_qunapplied() {
823 typeset -a patches
836 typeset -a patches
824 patches=(${(f)"$(_hg_cmd qunapplied)"})
837 patches=(${(f)"$(_hg_cmd qunapplied)"})
825 (( $#patches )) && _describe -t hg-unapplied-patches 'unapplied patches' patches
838 (( $#patches )) && _describe -t hg-unapplied-patches 'unapplied patches' patches
826 }
839 }
827
840
828 # unapplied, including guarded patches
841 # unapplied, including guarded patches
829 _hg_qdeletable() {
842 _hg_qdeletable() {
830 typeset -a unapplied
843 typeset -a unapplied
831 unapplied=(${(f)"$(_hg_cmd qseries)"})
844 unapplied=(${(f)"$(_hg_cmd qseries)"})
832 for p in $(_hg_cmd qapplied)
845 for p in $(_hg_cmd qapplied)
833 do
846 do
834 unapplied=(${unapplied:#$p})
847 unapplied=(${unapplied:#$p})
835 done
848 done
836
849
837 (( $#unapplied )) && _describe -t hg-allunapplied-patches 'all unapplied patches' unapplied
850 (( $#unapplied )) && _describe -t hg-allunapplied-patches 'all unapplied patches' unapplied
838 }
851 }
839
852
840 _hg_qguards() {
853 _hg_qguards() {
841 typeset -a guards
854 typeset -a guards
842 local guard
855 local guard
843 compset -P "+|-"
856 compset -P "+|-"
844 _hg_cmd qselect -s | while read guard
857 _hg_cmd qselect -s | while read guard
845 do
858 do
846 guards+=(${guard#(+|-)})
859 guards+=(${guard#(+|-)})
847 done
860 done
848 (( $#guards )) && _describe -t hg-guards 'guards' guards
861 (( $#guards )) && _describe -t hg-guards 'guards' guards
849 }
862 }
850
863
851 _hg_qseries_opts=(
864 _hg_qseries_opts=(
852 '(--summary -s)'{-s,--summary}'[print first line of patch header]')
865 '(--summary -s)'{-s,--summary}'[print first line of patch header]')
853
866
854 _hg_cmd_qapplied() {
867 _hg_cmd_qapplied() {
855 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
868 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
856 }
869 }
857
870
858 _hg_cmd_qdelete() {
871 _hg_cmd_qdelete() {
859 _arguments -s -w : $_hg_global_opts \
872 _arguments -s -w : $_hg_global_opts \
860 '(--keep -k)'{-k,--keep}'[keep patch file]' \
873 '(--keep -k)'{-k,--keep}'[keep patch file]' \
861 '*'{-r+,--rev}'[stop managing a revision]:applied patch:_hg_revrange' \
874 '*'{-r+,--rev}'[stop managing a revision]:applied patch:_hg_revrange' \
862 '*:unapplied patch:_hg_qdeletable'
875 '*:unapplied patch:_hg_qdeletable'
863 }
876 }
864
877
865 _hg_cmd_qdiff() {
878 _hg_cmd_qdiff() {
866 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
879 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
867 '*:pattern:_hg_files'
880 '*:pattern:_hg_files'
868 }
881 }
869
882
870 _hg_cmd_qfold() {
883 _hg_cmd_qfold() {
871 _arguments -s -w : $_hg_global_opts $_h_commit_opts \
884 _arguments -s -w : $_hg_global_opts $_h_commit_opts \
872 '(--keep,-k)'{-k,--keep}'[keep folded patch files]' \
885 '(--keep,-k)'{-k,--keep}'[keep folded patch files]' \
873 '*:unapplied patch:_hg_qunapplied'
886 '*:unapplied patch:_hg_qunapplied'
874 }
887 }
875
888
876 _hg_cmd_qgoto() {
889 _hg_cmd_qgoto() {
877 _arguments -s -w : $_hg_global_opts \
890 _arguments -s -w : $_hg_global_opts \
878 '(--force -f)'{-f,--force}'[overwrite any local changes]' \
891 '(--force -f)'{-f,--force}'[overwrite any local changes]' \
879 ':patch:_hg_qseries'
892 ':patch:_hg_qseries'
880 }
893 }
881
894
882 _hg_cmd_qguard() {
895 _hg_cmd_qguard() {
883 _arguments -s -w : $_hg_global_opts \
896 _arguments -s -w : $_hg_global_opts \
884 '(--list -l)'{-l,--list}'[list all patches and guards]' \
897 '(--list -l)'{-l,--list}'[list all patches and guards]' \
885 '(--none -n)'{-n,--none}'[drop all guards]' \
898 '(--none -n)'{-n,--none}'[drop all guards]' \
886 ':patch:_hg_qseries' \
899 ':patch:_hg_qseries' \
887 '*:guards:_hg_qguards'
900 '*:guards:_hg_qguards'
888 }
901 }
889
902
890 _hg_cmd_qheader() {
903 _hg_cmd_qheader() {
891 _arguments -s -w : $_hg_global_opts \
904 _arguments -s -w : $_hg_global_opts \
892 ':patch:_hg_qseries'
905 ':patch:_hg_qseries'
893 }
906 }
894
907
895 _hg_cmd_qimport() {
908 _hg_cmd_qimport() {
896 _arguments -s -w : $_hg_global_opts \
909 _arguments -s -w : $_hg_global_opts \
897 '(--existing -e)'{-e,--existing}'[import file in patch dir]' \
910 '(--existing -e)'{-e,--existing}'[import file in patch dir]' \
898 '(--name -n 2)'{-n+,--name}'[patch file name]:name:' \
911 '(--name -n 2)'{-n+,--name}'[patch file name]:name:' \
899 '(--force -f)'{-f,--force}'[overwrite existing files]' \
912 '(--force -f)'{-f,--force}'[overwrite existing files]' \
900 '*'{-r+,--rev}'[place existing revisions under mq control]:revision:_hg_revrange' \
913 '*'{-r+,--rev}'[place existing revisions under mq control]:revision:_hg_revrange' \
901 '*:patch:_files'
914 '*:patch:_files'
902 }
915 }
903
916
904 _hg_cmd_qnew() {
917 _hg_cmd_qnew() {
905 _arguments -s -w : $_hg_global_opts $_hg_commit_opts \
918 _arguments -s -w : $_hg_global_opts $_hg_commit_opts \
906 '(--force -f)'{-f,--force}'[import uncommitted changes into patch]' \
919 '(--force -f)'{-f,--force}'[import uncommitted changes into patch]' \
907 ':patch:'
920 ':patch:'
908 }
921 }
909
922
910 _hg_cmd_qnext() {
923 _hg_cmd_qnext() {
911 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
924 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
912 }
925 }
913
926
914 _hg_cmd_qpop() {
927 _hg_cmd_qpop() {
915 _arguments -s -w : $_hg_global_opts \
928 _arguments -s -w : $_hg_global_opts \
916 '(--all -a :)'{-a,--all}'[pop all patches]' \
929 '(--all -a :)'{-a,--all}'[pop all patches]' \
917 '(--name -n)'{-n+,--name}'[queue name to pop]:' \
930 '(--name -n)'{-n+,--name}'[queue name to pop]:' \
918 '(--force -f)'{-f,--force}'[forget any local changes]' \
931 '(--force -f)'{-f,--force}'[forget any local changes]' \
919 ':patch:_hg_qapplied'
932 ':patch:_hg_qapplied'
920 }
933 }
921
934
922 _hg_cmd_qprev() {
935 _hg_cmd_qprev() {
923 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
936 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
924 }
937 }
925
938
926 _hg_cmd_qpush() {
939 _hg_cmd_qpush() {
927 _arguments -s -w : $_hg_global_opts \
940 _arguments -s -w : $_hg_global_opts \
928 '(--all -a :)'{-a,--all}'[apply all patches]' \
941 '(--all -a :)'{-a,--all}'[apply all patches]' \
929 '(--list -l)'{-l,--list}'[list patch name in commit text]' \
942 '(--list -l)'{-l,--list}'[list patch name in commit text]' \
930 '(--merge -m)'{-m+,--merge}'[merge from another queue]:' \
943 '(--merge -m)'{-m+,--merge}'[merge from another queue]:' \
931 '(--name -n)'{-n+,--name}'[merge queue name]:' \
944 '(--name -n)'{-n+,--name}'[merge queue name]:' \
932 '(--force -f)'{-f,--force}'[apply if the patch has rejects]' \
945 '(--force -f)'{-f,--force}'[apply if the patch has rejects]' \
933 '--move[reorder patch series and apply only the patch]' \
946 '--move[reorder patch series and apply only the patch]' \
934 ':patch:_hg_qunapplied'
947 ':patch:_hg_qunapplied'
935 }
948 }
936
949
937 _hg_cmd_qrefresh() {
950 _hg_cmd_qrefresh() {
938 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts \
951 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts \
939 '(--git -g)'{-g,--git}'[use git extended diff format]' \
952 '(--git -g)'{-g,--git}'[use git extended diff format]' \
940 '(--short -s)'{-s,--short}'[short refresh]' \
953 '(--short -s)'{-s,--short}'[short refresh]' \
941 '*:files:_hg_files'
954 '*:files:_hg_files'
942 }
955 }
943
956
944 _hg_cmd_qrename() {
957 _hg_cmd_qrename() {
945 _arguments -s -w : $_hg_global_opts \
958 _arguments -s -w : $_hg_global_opts \
946 ':patch:_hg_qseries' \
959 ':patch:_hg_qseries' \
947 ':destination:'
960 ':destination:'
948 }
961 }
949
962
950 _hg_cmd_qselect() {
963 _hg_cmd_qselect() {
951 _arguments -s -w : $_hg_global_opts \
964 _arguments -s -w : $_hg_global_opts \
952 '(--none -n :)'{-n,--none}'[disable all guards]' \
965 '(--none -n :)'{-n,--none}'[disable all guards]' \
953 '(--series -s :)'{-s,--series}'[list all guards in series file]' \
966 '(--series -s :)'{-s,--series}'[list all guards in series file]' \
954 '--pop[pop to before first guarded applied patch]' \
967 '--pop[pop to before first guarded applied patch]' \
955 '--reapply[pop and reapply patches]' \
968 '--reapply[pop and reapply patches]' \
956 '*:guards:_hg_qguards'
969 '*:guards:_hg_qguards'
957 }
970 }
958
971
959 _hg_cmd_qseries() {
972 _hg_cmd_qseries() {
960 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \
973 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \
961 '(--missing -m)'{-m,--missing}'[print patches not in series]'
974 '(--missing -m)'{-m,--missing}'[print patches not in series]'
962 }
975 }
963
976
964 _hg_cmd_qunapplied() {
977 _hg_cmd_qunapplied() {
965 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
978 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
966 }
979 }
967
980
968 _hg_cmd_qtop() {
981 _hg_cmd_qtop() {
969 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
982 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
970 }
983 }
971
984
972 _hg_cmd_strip() {
985 _hg_cmd_strip() {
973 _arguments -s -w : $_hg_global_opts \
986 _arguments -s -w : $_hg_global_opts \
974 '(--force -f)'{-f,--force}'[force multi-head removal]' \
987 '(--force -f)'{-f,--force}'[force multi-head removal]' \
975 '(--backup -b)'{-b,--backup}'[bundle unrelated changesets]' \
988 '(--backup -b)'{-b,--backup}'[bundle unrelated changesets]' \
976 '(--nobackup -n)'{-n,--nobackup}'[no backups]' \
989 '(--nobackup -n)'{-n,--nobackup}'[no backups]' \
977 ':revision:_hg_labels'
990 ':revision:_hg_labels'
978 }
991 }
979
992
980 # Patchbomb
993 # Patchbomb
981 _hg_cmd_email() {
994 _hg_cmd_email() {
982 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
995 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
983 '(--git -g)'{-g,--git}'[use git extended diff format]' \
996 '(--git -g)'{-g,--git}'[use git extended diff format]' \
984 '--plain[omit hg patch header]' \
997 '--plain[omit hg patch header]' \
985 '(--outgoing -o)'{-o,--outgoing}'[send changes not found in the target repository]' \
998 '(--outgoing -o)'{-o,--outgoing}'[send changes not found in the target repository]' \
986 '(--bundle -b)'{-b,--bundle}'[send changes not in target as a binary bundle]' \
999 '(--bundle -b)'{-b,--bundle}'[send changes not in target as a binary bundle]' \
987 '--bundlename[name of the bundle attachment file (default: bundle)]:' \
1000 '--bundlename[name of the bundle attachment file (default: bundle)]:' \
988 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
1001 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
989 '--force[run even when remote repository is unrelated (with -b/--bundle)]' \
1002 '--force[run even when remote repository is unrelated (with -b/--bundle)]' \
990 '*--base[a base changeset to specify instead of a destination (with -b/--bundle)]:revision:_hg_labels' \
1003 '*--base[a base changeset to specify instead of a destination (with -b/--bundle)]:revision:_hg_labels' \
991 '--intro[send an introduction email for a single patch]' \
1004 '--intro[send an introduction email for a single patch]' \
992 '(--inline -i --attach -a)'{-a,--attach}'[send patches as attachments]' \
1005 '(--inline -i --attach -a)'{-a,--attach}'[send patches as attachments]' \
993 '(--attach -a --inline -i)'{-i,--inline}'[send patches as inline attachments]' \
1006 '(--attach -a --inline -i)'{-i,--inline}'[send patches as inline attachments]' \
994 '*--bcc[email addresses of blind carbon copy recipients]:email:' \
1007 '*--bcc[email addresses of blind carbon copy recipients]:email:' \
995 '*'{-c+,--cc}'[email addresses of copy recipients]:email:' \
1008 '*'{-c+,--cc}'[email addresses of copy recipients]:email:' \
996 '(--diffstat -d)'{-d,--diffstat}'[add diffstat output to messages]' \
1009 '(--diffstat -d)'{-d,--diffstat}'[add diffstat output to messages]' \
997 '--date[use the given date as the sending date]:date:' \
1010 '--date[use the given date as the sending date]:date:' \
998 '--desc[use the given file as the series description]:files:_files' \
1011 '--desc[use the given file as the series description]:files:_files' \
999 '(--from -f)'{-f,--from}'[email address of sender]:email:' \
1012 '(--from -f)'{-f,--from}'[email address of sender]:email:' \
1000 '(--test -n)'{-n,--test}'[print messages that would be sent]' \
1013 '(--test -n)'{-n,--test}'[print messages that would be sent]' \
1001 '(--mbox -m)'{-m,--mbox}'[write messages to mbox file instead of sending them]:file:' \
1014 '(--mbox -m)'{-m,--mbox}'[write messages to mbox file instead of sending them]:file:' \
1002 '*--reply-to[email addresses replies should be sent to]:email:' \
1015 '*--reply-to[email addresses replies should be sent to]:email:' \
1003 '(--subject -s)'{-s,--subject}'[subject of first message (intro or single patch)]:subject:' \
1016 '(--subject -s)'{-s,--subject}'[subject of first message (intro or single patch)]:subject:' \
1004 '--in-reply-to[message identifier to reply to]:msgid:' \
1017 '--in-reply-to[message identifier to reply to]:msgid:' \
1005 '*--flag[flags to add in subject prefixes]:flag:' \
1018 '*--flag[flags to add in subject prefixes]:flag:' \
1006 '*'{-t,--to}'[email addresses of recipients]:email:' \
1019 '*'{-t,--to}'[email addresses of recipients]:email:' \
1007 ':revision:_hg_revrange'
1020 ':revision:_hg_revrange'
1008 }
1021 }
1009
1022
1010 _hg "$@"
1023 _hg "$@"
@@ -1,167 +1,168
1 import os, sys, textwrap
1 import os, sys, textwrap
2 # import from the live mercurial repo
2 # import from the live mercurial repo
3 sys.path.insert(0, "..")
3 sys.path.insert(0, "..")
4 # fall back to pure modules if required C extensions are not available
4 # fall back to pure modules if required C extensions are not available
5 sys.path.append(os.path.join('..', 'mercurial', 'pure'))
5 sys.path.append(os.path.join('..', 'mercurial', 'pure'))
6 from mercurial import demandimport; demandimport.enable()
6 from mercurial import demandimport; demandimport.enable()
7 from mercurial import encoding
7 from mercurial import encoding
8 from mercurial.commands import table, globalopts
8 from mercurial.commands import table, globalopts
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial.help import helptable
10 from mercurial.help import helptable
11 from mercurial import extensions
11 from mercurial import extensions
12 from mercurial import util
12
13
13 def get_desc(docstr):
14 def get_desc(docstr):
14 if not docstr:
15 if not docstr:
15 return "", ""
16 return "", ""
16 # sanitize
17 # sanitize
17 docstr = docstr.strip("\n")
18 docstr = docstr.strip("\n")
18 docstr = docstr.rstrip()
19 docstr = docstr.rstrip()
19 shortdesc = docstr.splitlines()[0].strip()
20 shortdesc = docstr.splitlines()[0].strip()
20
21
21 i = docstr.find("\n")
22 i = docstr.find("\n")
22 if i != -1:
23 if i != -1:
23 desc = docstr[i + 2:]
24 desc = docstr[i + 2:]
24 else:
25 else:
25 desc = shortdesc
26 desc = shortdesc
26
27
27 desc = textwrap.dedent(desc)
28 desc = textwrap.dedent(desc)
28
29
29 return (shortdesc, desc)
30 return (shortdesc, desc)
30
31
31 def get_opts(opts):
32 def get_opts(opts):
32 for opt in opts:
33 for opt in opts:
33 if len(opt) == 5:
34 if len(opt) == 5:
34 shortopt, longopt, default, desc, optlabel = opt
35 shortopt, longopt, default, desc, optlabel = opt
35 else:
36 else:
36 shortopt, longopt, default, desc = opt
37 shortopt, longopt, default, desc = opt
37 allopts = []
38 allopts = []
38 if shortopt:
39 if shortopt:
39 allopts.append("-%s" % shortopt)
40 allopts.append("-%s" % shortopt)
40 if longopt:
41 if longopt:
41 allopts.append("--%s" % longopt)
42 allopts.append("--%s" % longopt)
42 desc += default and _(" (default: %s)") % default or ""
43 desc += default and _(" (default: %s)") % default or ""
43 yield (", ".join(allopts), desc)
44 yield (", ".join(allopts), desc)
44
45
45 def get_cmd(cmd, cmdtable):
46 def get_cmd(cmd, cmdtable):
46 d = {}
47 d = {}
47 attr = cmdtable[cmd]
48 attr = cmdtable[cmd]
48 cmds = cmd.lstrip("^").split("|")
49 cmds = cmd.lstrip("^").split("|")
49
50
50 d['cmd'] = cmds[0]
51 d['cmd'] = cmds[0]
51 d['aliases'] = cmd.split("|")[1:]
52 d['aliases'] = cmd.split("|")[1:]
52 d['desc'] = get_desc(attr[0].__doc__)
53 d['desc'] = get_desc(attr[0].__doc__)
53 d['opts'] = list(get_opts(attr[1]))
54 d['opts'] = list(get_opts(attr[1]))
54
55
55 s = 'hg ' + cmds[0]
56 s = 'hg ' + cmds[0]
56 if len(attr) > 2:
57 if len(attr) > 2:
57 if not attr[2].startswith('hg'):
58 if not attr[2].startswith('hg'):
58 s += ' ' + attr[2]
59 s += ' ' + attr[2]
59 else:
60 else:
60 s = attr[2]
61 s = attr[2]
61 d['synopsis'] = s.strip()
62 d['synopsis'] = s.strip()
62
63
63 return d
64 return d
64
65
65 def section(ui, s):
66 def section(ui, s):
66 ui.write("%s\n%s\n\n" % (s, "-" * encoding.colwidth(s)))
67 ui.write("%s\n%s\n\n" % (s, "-" * encoding.colwidth(s)))
67
68
68 def subsection(ui, s):
69 def subsection(ui, s):
69 ui.write("%s\n%s\n\n" % (s, '"' * encoding.colwidth(s)))
70 ui.write("%s\n%s\n\n" % (s, '"' * encoding.colwidth(s)))
70
71
71 def subsubsection(ui, s):
72 def subsubsection(ui, s):
72 ui.write("%s\n%s\n\n" % (s, "." * encoding.colwidth(s)))
73 ui.write("%s\n%s\n\n" % (s, "." * encoding.colwidth(s)))
73
74
74 def subsubsubsection(ui, s):
75 def subsubsubsection(ui, s):
75 ui.write("%s\n%s\n\n" % (s, "#" * encoding.colwidth(s)))
76 ui.write("%s\n%s\n\n" % (s, "#" * encoding.colwidth(s)))
76
77
77
78
78 def show_doc(ui):
79 def show_doc(ui):
79 # print options
80 # print options
80 section(ui, _("Options"))
81 section(ui, _("Options"))
81 for optstr, desc in get_opts(globalopts):
82 for optstr, desc in get_opts(globalopts):
82 ui.write("%s\n %s\n\n" % (optstr, desc))
83 ui.write("%s\n %s\n\n" % (optstr, desc))
83
84
84 # print cmds
85 # print cmds
85 section(ui, _("Commands"))
86 section(ui, _("Commands"))
86 commandprinter(ui, table, subsection)
87 commandprinter(ui, table, subsection)
87
88
88 # print topics
89 # print topics
89 for names, sec, doc in helptable:
90 for names, sec, doc in helptable:
90 if names[0] == "config":
91 if names[0] == "config":
91 # The config help topic is included in the hgrc.5 man
92 # The config help topic is included in the hgrc.5 man
92 # page.
93 # page.
93 continue
94 continue
94 for name in names:
95 for name in names:
95 ui.write(".. _%s:\n" % name)
96 ui.write(".. _%s:\n" % name)
96 ui.write("\n")
97 ui.write("\n")
97 section(ui, sec)
98 section(ui, sec)
98 if hasattr(doc, '__call__'):
99 if util.safehasattr(doc, '__call__'):
99 doc = doc()
100 doc = doc()
100 ui.write(doc)
101 ui.write(doc)
101 ui.write("\n")
102 ui.write("\n")
102
103
103 section(ui, _("Extensions"))
104 section(ui, _("Extensions"))
104 ui.write(_("This section contains help for extensions that are distributed "
105 ui.write(_("This section contains help for extensions that are distributed "
105 "together with Mercurial. Help for other extensions is available "
106 "together with Mercurial. Help for other extensions is available "
106 "in the help system."))
107 "in the help system."))
107 ui.write("\n\n"
108 ui.write("\n\n"
108 ".. contents::\n"
109 ".. contents::\n"
109 " :class: htmlonly\n"
110 " :class: htmlonly\n"
110 " :local:\n"
111 " :local:\n"
111 " :depth: 1\n\n")
112 " :depth: 1\n\n")
112
113
113 for extensionname in sorted(allextensionnames()):
114 for extensionname in sorted(allextensionnames()):
114 mod = extensions.load(None, extensionname, None)
115 mod = extensions.load(None, extensionname, None)
115 subsection(ui, extensionname)
116 subsection(ui, extensionname)
116 ui.write("%s\n\n" % mod.__doc__)
117 ui.write("%s\n\n" % mod.__doc__)
117 cmdtable = getattr(mod, 'cmdtable', None)
118 cmdtable = getattr(mod, 'cmdtable', None)
118 if cmdtable:
119 if cmdtable:
119 subsubsection(ui, _('Commands'))
120 subsubsection(ui, _('Commands'))
120 commandprinter(ui, cmdtable, subsubsubsection)
121 commandprinter(ui, cmdtable, subsubsubsection)
121
122
122 def commandprinter(ui, cmdtable, sectionfunc):
123 def commandprinter(ui, cmdtable, sectionfunc):
123 h = {}
124 h = {}
124 for c, attr in cmdtable.items():
125 for c, attr in cmdtable.items():
125 f = c.split("|")[0]
126 f = c.split("|")[0]
126 f = f.lstrip("^")
127 f = f.lstrip("^")
127 h[f] = c
128 h[f] = c
128 cmds = h.keys()
129 cmds = h.keys()
129 cmds.sort()
130 cmds.sort()
130
131
131 for f in cmds:
132 for f in cmds:
132 if f.startswith("debug"):
133 if f.startswith("debug"):
133 continue
134 continue
134 d = get_cmd(h[f], cmdtable)
135 d = get_cmd(h[f], cmdtable)
135 sectionfunc(ui, d['cmd'])
136 sectionfunc(ui, d['cmd'])
136 # synopsis
137 # synopsis
137 ui.write("::\n\n")
138 ui.write("::\n\n")
138 synopsislines = d['synopsis'].splitlines()
139 synopsislines = d['synopsis'].splitlines()
139 for line in synopsislines:
140 for line in synopsislines:
140 # some commands (such as rebase) have a multi-line
141 # some commands (such as rebase) have a multi-line
141 # synopsis
142 # synopsis
142 ui.write(" %s\n" % line)
143 ui.write(" %s\n" % line)
143 ui.write('\n')
144 ui.write('\n')
144 # description
145 # description
145 ui.write("%s\n\n" % d['desc'][1])
146 ui.write("%s\n\n" % d['desc'][1])
146 # options
147 # options
147 opt_output = list(d['opts'])
148 opt_output = list(d['opts'])
148 if opt_output:
149 if opt_output:
149 opts_len = max([len(line[0]) for line in opt_output])
150 opts_len = max([len(line[0]) for line in opt_output])
150 ui.write(_("Options:\n\n"))
151 ui.write(_("Options:\n\n"))
151 for optstr, desc in opt_output:
152 for optstr, desc in opt_output:
152 if desc:
153 if desc:
153 s = "%-*s %s" % (opts_len, optstr, desc)
154 s = "%-*s %s" % (opts_len, optstr, desc)
154 else:
155 else:
155 s = optstr
156 s = optstr
156 ui.write("%s\n" % s)
157 ui.write("%s\n" % s)
157 ui.write("\n")
158 ui.write("\n")
158 # aliases
159 # aliases
159 if d['aliases']:
160 if d['aliases']:
160 ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
161 ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
161
162
162
163
163 def allextensionnames():
164 def allextensionnames():
164 return extensions.enabled().keys() + extensions.disabled().keys()
165 return extensions.enabled().keys() + extensions.disabled().keys()
165
166
166 if __name__ == "__main__":
167 if __name__ == "__main__":
167 show_doc(sys.stdout)
168 show_doc(sys.stdout)
@@ -1,250 +1,252
1 # acl.py - changeset access control for mercurial
1 # acl.py - changeset access control for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''hooks for controlling repository access
8 '''hooks for controlling repository access
9
9
10 This hook makes it possible to allow or deny write access to given
10 This hook makes it possible to allow or deny write access to given
11 branches and paths of a repository when receiving incoming changesets
11 branches and paths of a repository when receiving incoming changesets
12 via pretxnchangegroup and pretxncommit.
12 via pretxnchangegroup and pretxncommit.
13
13
14 The authorization is matched based on the local user name on the
14 The authorization is matched based on the local user name on the
15 system where the hook runs, and not the committer of the original
15 system where the hook runs, and not the committer of the original
16 changeset (since the latter is merely informative).
16 changeset (since the latter is merely informative).
17
17
18 The acl hook is best used along with a restricted shell like hgsh,
18 The acl hook is best used along with a restricted shell like hgsh,
19 preventing authenticating users from doing anything other than pushing
19 preventing authenticating users from doing anything other than pushing
20 or pulling. The hook is not safe to use if users have interactive
20 or pulling. The hook is not safe to use if users have interactive
21 shell access, as they can then disable the hook. Nor is it safe if
21 shell access, as they can then disable the hook. Nor is it safe if
22 remote users share an account, because then there is no way to
22 remote users share an account, because then there is no way to
23 distinguish them.
23 distinguish them.
24
24
25 The order in which access checks are performed is:
25 The order in which access checks are performed is:
26
26
27 1) Deny list for branches (section ``acl.deny.branches``)
27 1) Deny list for branches (section ``acl.deny.branches``)
28 2) Allow list for branches (section ``acl.allow.branches``)
28 2) Allow list for branches (section ``acl.allow.branches``)
29 3) Deny list for paths (section ``acl.deny``)
29 3) Deny list for paths (section ``acl.deny``)
30 4) Allow list for paths (section ``acl.allow``)
30 4) Allow list for paths (section ``acl.allow``)
31
31
32 The allow and deny sections take key-value pairs.
32 The allow and deny sections take key-value pairs.
33
33
34 Branch-based Access Control
34 Branch-based Access Control
35 ...........................
35 ...........................
36
36
37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
38 have branch-based access control. Keys in these sections can be
38 have branch-based access control. Keys in these sections can be
39 either:
39 either:
40
40
41 - a branch name, or
41 - a branch name, or
42 - an asterisk, to match any branch;
42 - an asterisk, to match any branch;
43
43
44 The corresponding values can be either:
44 The corresponding values can be either:
45
45
46 - a comma-separated list containing users and groups, or
46 - a comma-separated list containing users and groups, or
47 - an asterisk, to match anyone;
47 - an asterisk, to match anyone;
48
48
49 Path-based Access Control
49 Path-based Access Control
50 .........................
50 .........................
51
51
52 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
52 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
53 access control. Keys in these sections accept a subtree pattern (with
53 access control. Keys in these sections accept a subtree pattern (with
54 a glob syntax by default). The corresponding values follow the same
54 a glob syntax by default). The corresponding values follow the same
55 syntax as the other sections above.
55 syntax as the other sections above.
56
56
57 Groups
57 Groups
58 ......
58 ......
59
59
60 Group names must be prefixed with an ``@`` symbol. Specifying a group
60 Group names must be prefixed with an ``@`` symbol. Specifying a group
61 name has the same effect as specifying all the users in that group.
61 name has the same effect as specifying all the users in that group.
62
62
63 You can define group members in the ``acl.groups`` section.
63 You can define group members in the ``acl.groups`` section.
64 If a group name is not defined there, and Mercurial is running under
64 If a group name is not defined there, and Mercurial is running under
65 a Unix-like system, the list of users will be taken from the OS.
65 a Unix-like system, the list of users will be taken from the OS.
66 Otherwise, an exception will be raised.
66 Otherwise, an exception will be raised.
67
67
68 Example Configuration
68 Example Configuration
69 .....................
69 .....................
70
70
71 ::
71 ::
72
72
73 [hooks]
73 [hooks]
74
74
75 # Use this if you want to check access restrictions at commit time
75 # Use this if you want to check access restrictions at commit time
76 pretxncommit.acl = python:hgext.acl.hook
76 pretxncommit.acl = python:hgext.acl.hook
77
77
78 # Use this if you want to check access restrictions for pull, push,
78 # Use this if you want to check access restrictions for pull, push,
79 # bundle and serve.
79 # bundle and serve.
80 pretxnchangegroup.acl = python:hgext.acl.hook
80 pretxnchangegroup.acl = python:hgext.acl.hook
81
81
82 [acl]
82 [acl]
83 # Allow or deny access for incoming changes only if their source is
83 # Allow or deny access for incoming changes only if their source is
84 # listed here, let them pass otherwise. Source is "serve" for all
84 # listed here, let them pass otherwise. Source is "serve" for all
85 # remote access (http or ssh), "push", "pull" or "bundle" when the
85 # remote access (http or ssh), "push", "pull" or "bundle" when the
86 # related commands are run locally.
86 # related commands are run locally.
87 # Default: serve
87 # Default: serve
88 sources = serve
88 sources = serve
89
89
90 [acl.deny.branches]
90 [acl.deny.branches]
91
91
92 # Everyone is denied to the frozen branch:
92 # Everyone is denied to the frozen branch:
93 frozen-branch = *
93 frozen-branch = *
94
94
95 # A bad user is denied on all branches:
95 # A bad user is denied on all branches:
96 * = bad-user
96 * = bad-user
97
97
98 [acl.allow.branches]
98 [acl.allow.branches]
99
99
100 # A few users are allowed on branch-a:
100 # A few users are allowed on branch-a:
101 branch-a = user-1, user-2, user-3
101 branch-a = user-1, user-2, user-3
102
102
103 # Only one user is allowed on branch-b:
103 # Only one user is allowed on branch-b:
104 branch-b = user-1
104 branch-b = user-1
105
105
106 # The super user is allowed on any branch:
106 # The super user is allowed on any branch:
107 * = super-user
107 * = super-user
108
108
109 # Everyone is allowed on branch-for-tests:
109 # Everyone is allowed on branch-for-tests:
110 branch-for-tests = *
110 branch-for-tests = *
111
111
112 [acl.deny]
112 [acl.deny]
113 # This list is checked first. If a match is found, acl.allow is not
113 # This list is checked first. If a match is found, acl.allow is not
114 # checked. All users are granted access if acl.deny is not present.
114 # checked. All users are granted access if acl.deny is not present.
115 # Format for both lists: glob pattern = user, ..., @group, ...
115 # Format for both lists: glob pattern = user, ..., @group, ...
116
116
117 # To match everyone, use an asterisk for the user:
117 # To match everyone, use an asterisk for the user:
118 # my/glob/pattern = *
118 # my/glob/pattern = *
119
119
120 # user6 will not have write access to any file:
120 # user6 will not have write access to any file:
121 ** = user6
121 ** = user6
122
122
123 # Group "hg-denied" will not have write access to any file:
123 # Group "hg-denied" will not have write access to any file:
124 ** = @hg-denied
124 ** = @hg-denied
125
125
126 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
126 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
127 # everyone being able to change all other files. See below.
127 # everyone being able to change all other files. See below.
128 src/main/resources/DONT-TOUCH-THIS.txt = *
128 src/main/resources/DONT-TOUCH-THIS.txt = *
129
129
130 [acl.allow]
130 [acl.allow]
131 # if acl.allow is not present, all users are allowed by default
131 # if acl.allow is not present, all users are allowed by default
132 # empty acl.allow = no users allowed
132 # empty acl.allow = no users allowed
133
133
134 # User "doc_writer" has write access to any file under the "docs"
134 # User "doc_writer" has write access to any file under the "docs"
135 # folder:
135 # folder:
136 docs/** = doc_writer
136 docs/** = doc_writer
137
137
138 # User "jack" and group "designers" have write access to any file
138 # User "jack" and group "designers" have write access to any file
139 # under the "images" folder:
139 # under the "images" folder:
140 images/** = jack, @designers
140 images/** = jack, @designers
141
141
142 # Everyone (except for "user6" - see acl.deny above) will have write
142 # Everyone (except for "user6" - see acl.deny above) will have write
143 # access to any file under the "resources" folder (except for 1
143 # access to any file under the "resources" folder (except for 1
144 # file. See acl.deny):
144 # file. See acl.deny):
145 src/main/resources/** = *
145 src/main/resources/** = *
146
146
147 .hgtags = release_engineer
147 .hgtags = release_engineer
148
148
149 '''
149 '''
150
150
151 from mercurial.i18n import _
151 from mercurial.i18n import _
152 from mercurial import util, match
152 from mercurial import util, match
153 import getpass, urllib
153 import getpass, urllib
154
154
155 def _getusers(ui, group):
155 def _getusers(ui, group):
156
156
157 # First, try to use group definition from section [acl.groups]
157 # First, try to use group definition from section [acl.groups]
158 hgrcusers = ui.configlist('acl.groups', group)
158 hgrcusers = ui.configlist('acl.groups', group)
159 if hgrcusers:
159 if hgrcusers:
160 return hgrcusers
160 return hgrcusers
161
161
162 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
162 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
163 # If no users found in group definition, get users from OS-level group
163 # If no users found in group definition, get users from OS-level group
164 try:
164 try:
165 return util.groupmembers(group)
165 return util.groupmembers(group)
166 except KeyError:
166 except KeyError:
167 raise util.Abort(_("group '%s' is undefined") % group)
167 raise util.Abort(_("group '%s' is undefined") % group)
168
168
169 def _usermatch(ui, user, usersorgroups):
169 def _usermatch(ui, user, usersorgroups):
170
170
171 if usersorgroups == '*':
171 if usersorgroups == '*':
172 return True
172 return True
173
173
174 for ug in usersorgroups.replace(',', ' ').split():
174 for ug in usersorgroups.replace(',', ' ').split():
175 if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]):
175 if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]):
176 return True
176 return True
177
177
178 return False
178 return False
179
179
180 def buildmatch(ui, repo, user, key):
180 def buildmatch(ui, repo, user, key):
181 '''return tuple of (match function, list enabled).'''
181 '''return tuple of (match function, list enabled).'''
182 if not ui.has_section(key):
182 if not ui.has_section(key):
183 ui.debug('acl: %s not enabled\n' % key)
183 ui.debug('acl: %s not enabled\n' % key)
184 return None
184 return None
185
185
186 pats = [pat for pat, users in ui.configitems(key)
186 pats = [pat for pat, users in ui.configitems(key)
187 if _usermatch(ui, user, users)]
187 if _usermatch(ui, user, users)]
188 ui.debug('acl: %s enabled, %d entries for user %s\n' %
188 ui.debug('acl: %s enabled, %d entries for user %s\n' %
189 (key, len(pats), user))
189 (key, len(pats), user))
190
190
191 if not repo:
191 if not repo:
192 if pats:
192 if pats:
193 return lambda b: '*' in pats or b in pats
193 return lambda b: '*' in pats or b in pats
194 return lambda b: False
194 return lambda b: False
195
195
196 if pats:
196 if pats:
197 return match.match(repo.root, '', pats)
197 return match.match(repo.root, '', pats)
198 return match.exact(repo.root, '', [])
198 return match.exact(repo.root, '', [])
199
199
200
200
201 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
201 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
202 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
202 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
203 raise util.Abort(_('config error - hook type "%s" cannot stop '
203 raise util.Abort(_('config error - hook type "%s" cannot stop '
204 'incoming changesets nor commits') % hooktype)
204 'incoming changesets nor commits') % hooktype)
205 if (hooktype == 'pretxnchangegroup' and
205 if (hooktype == 'pretxnchangegroup' and
206 source not in ui.config('acl', 'sources', 'serve').split()):
206 source not in ui.config('acl', 'sources', 'serve').split()):
207 ui.debug('acl: changes have source "%s" - skipping\n' % source)
207 ui.debug('acl: changes have source "%s" - skipping\n' % source)
208 return
208 return
209
209
210 user = None
210 user = None
211 if source == 'serve' and 'url' in kwargs:
211 if source == 'serve' and 'url' in kwargs:
212 url = kwargs['url'].split(':')
212 url = kwargs['url'].split(':')
213 if url[0] == 'remote' and url[1].startswith('http'):
213 if url[0] == 'remote' and url[1].startswith('http'):
214 user = urllib.unquote(url[3])
214 user = urllib.unquote(url[3])
215
215
216 if user is None:
216 if user is None:
217 user = getpass.getuser()
217 user = getpass.getuser()
218
218
219 ui.debug('acl: checking access for user "%s"\n' % user)
220
219 cfg = ui.config('acl', 'config')
221 cfg = ui.config('acl', 'config')
220 if cfg:
222 if cfg:
221 ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches',
223 ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches',
222 'acl.deny.branches', 'acl.allow', 'acl.deny'])
224 'acl.deny.branches', 'acl.allow', 'acl.deny'])
223
225
224 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
226 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
225 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
227 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
226 allow = buildmatch(ui, repo, user, 'acl.allow')
228 allow = buildmatch(ui, repo, user, 'acl.allow')
227 deny = buildmatch(ui, repo, user, 'acl.deny')
229 deny = buildmatch(ui, repo, user, 'acl.deny')
228
230
229 for rev in xrange(repo[node], len(repo)):
231 for rev in xrange(repo[node], len(repo)):
230 ctx = repo[rev]
232 ctx = repo[rev]
231 branch = ctx.branch()
233 branch = ctx.branch()
232 if denybranches and denybranches(branch):
234 if denybranches and denybranches(branch):
233 raise util.Abort(_('acl: user "%s" denied on branch "%s"'
235 raise util.Abort(_('acl: user "%s" denied on branch "%s"'
234 ' (changeset "%s")')
236 ' (changeset "%s")')
235 % (user, branch, ctx))
237 % (user, branch, ctx))
236 if allowbranches and not allowbranches(branch):
238 if allowbranches and not allowbranches(branch):
237 raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
239 raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
238 ' (changeset "%s")')
240 ' (changeset "%s")')
239 % (user, branch, ctx))
241 % (user, branch, ctx))
240 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
242 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
241 % (ctx, branch))
243 % (ctx, branch))
242
244
243 for f in ctx.files():
245 for f in ctx.files():
244 if deny and deny(f):
246 if deny and deny(f):
245 ui.debug('acl: user %s denied on %s\n' % (user, f))
247 raise util.Abort(_('acl: user "%s" denied on "%s"'
246 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
248 ' (changeset "%s")') % (user, f, ctx))
247 if allow and not allow(f):
249 if allow and not allow(f):
248 ui.debug('acl: user %s not allowed on %s\n' % (user, f))
250 raise util.Abort(_('acl: user "%s" not allowed on "%s"'
249 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
251 ' (changeset "%s")') % (user, f, ctx))
250 ui.debug('acl: allowing changeset %s\n' % ctx)
252 ui.debug('acl: path access granted: "%s"\n' % ctx)
@@ -1,507 +1,512
1 # color.py color output for the status and qseries commands
1 # color.py color output for the status and qseries commands
2 #
2 #
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 #
4 #
5 # This program is free software; you can redistribute it and/or modify it
5 # This program is free software; you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the
6 # under the terms of the GNU General Public License as published by the
7 # Free Software Foundation; either version 2 of the License, or (at your
7 # Free Software Foundation; either version 2 of the License, or (at your
8 # option) any later version.
8 # option) any later version.
9 #
9 #
10 # This program is distributed in the hope that it will be useful, but
10 # This program is distributed in the hope that it will be useful, but
11 # WITHOUT ANY WARRANTY; without even the implied warranty of
11 # WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13 # Public License for more details.
13 # Public License for more details.
14 #
14 #
15 # You should have received a copy of the GNU General Public License along
15 # You should have received a copy of the GNU General Public License along
16 # with this program; if not, write to the Free Software Foundation, Inc.,
16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
18
19 '''colorize output from some commands
19 '''colorize output from some commands
20
20
21 This extension modifies the status and resolve commands to add color
21 This extension modifies the status and resolve commands to add color
22 to their output to reflect file status, the qseries command to add
22 to their output to reflect file status, the qseries command to add
23 color to reflect patch status (applied, unapplied, missing), and to
23 color to reflect patch status (applied, unapplied, missing), and to
24 diff-related commands to highlight additions, removals, diff headers,
24 diff-related commands to highlight additions, removals, diff headers,
25 and trailing whitespace.
25 and trailing whitespace.
26
26
27 Other effects in addition to color, like bold and underlined text, are
27 Other effects in addition to color, like bold and underlined text, are
28 also available. By default, the terminfo database is used to find the
28 also available. By default, the terminfo database is used to find the
29 terminal codes used to change color and effect. If terminfo is not
29 terminal codes used to change color and effect. If terminfo is not
30 available, then effects are rendered with the ECMA-48 SGR control
30 available, then effects are rendered with the ECMA-48 SGR control
31 function (aka ANSI escape codes).
31 function (aka ANSI escape codes).
32
32
33 Default effects may be overridden from your configuration file::
33 Default effects may be overridden from your configuration file::
34
34
35 [color]
35 [color]
36 status.modified = blue bold underline red_background
36 status.modified = blue bold underline red_background
37 status.added = green bold
37 status.added = green bold
38 status.removed = red bold blue_background
38 status.removed = red bold blue_background
39 status.deleted = cyan bold underline
39 status.deleted = cyan bold underline
40 status.unknown = magenta bold underline
40 status.unknown = magenta bold underline
41 status.ignored = black bold
41 status.ignored = black bold
42
42
43 # 'none' turns off all effects
43 # 'none' turns off all effects
44 status.clean = none
44 status.clean = none
45 status.copied = none
45 status.copied = none
46
46
47 qseries.applied = blue bold underline
47 qseries.applied = blue bold underline
48 qseries.unapplied = black bold
48 qseries.unapplied = black bold
49 qseries.missing = red bold
49 qseries.missing = red bold
50
50
51 diff.diffline = bold
51 diff.diffline = bold
52 diff.extended = cyan bold
52 diff.extended = cyan bold
53 diff.file_a = red bold
53 diff.file_a = red bold
54 diff.file_b = green bold
54 diff.file_b = green bold
55 diff.hunk = magenta
55 diff.hunk = magenta
56 diff.deleted = red
56 diff.deleted = red
57 diff.inserted = green
57 diff.inserted = green
58 diff.changed = white
58 diff.changed = white
59 diff.trailingwhitespace = bold red_background
59 diff.trailingwhitespace = bold red_background
60
60
61 resolve.unresolved = red bold
61 resolve.unresolved = red bold
62 resolve.resolved = green bold
62 resolve.resolved = green bold
63
63
64 bookmarks.current = green
64 bookmarks.current = green
65
65
66 branches.active = none
66 branches.active = none
67 branches.closed = black bold
67 branches.closed = black bold
68 branches.current = green
68 branches.current = green
69 branches.inactive = none
69 branches.inactive = none
70
70
71 tags.normal = green
72 tags.local = black bold
73
71 The available effects in terminfo mode are 'blink', 'bold', 'dim',
74 The available effects in terminfo mode are 'blink', 'bold', 'dim',
72 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
75 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
73 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
76 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
74 'underline'. How each is rendered depends on the terminal emulator.
77 'underline'. How each is rendered depends on the terminal emulator.
75 Some may not be available for a given terminal type, and will be
78 Some may not be available for a given terminal type, and will be
76 silently ignored.
79 silently ignored.
77
80
78 Note that on some systems, terminfo mode may cause problems when using
81 Note that on some systems, terminfo mode may cause problems when using
79 color with the pager extension and less -R. less with the -R option
82 color with the pager extension and less -R. less with the -R option
80 will only display ECMA-48 color codes, and terminfo mode may sometimes
83 will only display ECMA-48 color codes, and terminfo mode may sometimes
81 emit codes that less doesn't understand. You can work around this by
84 emit codes that less doesn't understand. You can work around this by
82 either using ansi mode (or auto mode), or by using less -r (which will
85 either using ansi mode (or auto mode), or by using less -r (which will
83 pass through all terminal control codes, not just color control
86 pass through all terminal control codes, not just color control
84 codes).
87 codes).
85
88
86 Because there are only eight standard colors, this module allows you
89 Because there are only eight standard colors, this module allows you
87 to define color names for other color slots which might be available
90 to define color names for other color slots which might be available
88 for your terminal type, assuming terminfo mode. For instance::
91 for your terminal type, assuming terminfo mode. For instance::
89
92
90 color.brightblue = 12
93 color.brightblue = 12
91 color.pink = 207
94 color.pink = 207
92 color.orange = 202
95 color.orange = 202
93
96
94 to set 'brightblue' to color slot 12 (useful for 16 color terminals
97 to set 'brightblue' to color slot 12 (useful for 16 color terminals
95 that have brighter colors defined in the upper eight) and, 'pink' and
98 that have brighter colors defined in the upper eight) and, 'pink' and
96 'orange' to colors in 256-color xterm's default color cube. These
99 'orange' to colors in 256-color xterm's default color cube. These
97 defined colors may then be used as any of the pre-defined eight,
100 defined colors may then be used as any of the pre-defined eight,
98 including appending '_background' to set the background to that color.
101 including appending '_background' to set the background to that color.
99
102
100 By default, the color extension will use ANSI mode (or win32 mode on
103 By default, the color extension will use ANSI mode (or win32 mode on
101 Windows) if it detects a terminal. To override auto mode (to enable
104 Windows) if it detects a terminal. To override auto mode (to enable
102 terminfo mode, for example), set the following configuration option::
105 terminfo mode, for example), set the following configuration option::
103
106
104 [color]
107 [color]
105 mode = terminfo
108 mode = terminfo
106
109
107 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
110 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
108 disable color.
111 disable color.
109 '''
112 '''
110
113
111 import os
114 import os
112
115
113 from mercurial import commands, dispatch, extensions, ui as uimod, util
116 from mercurial import commands, dispatch, extensions, ui as uimod, util
114 from mercurial.i18n import _
117 from mercurial.i18n import _
115
118
116 # start and stop parameters for effects
119 # start and stop parameters for effects
117 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
120 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
118 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
121 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
119 'italic': 3, 'underline': 4, 'inverse': 7,
122 'italic': 3, 'underline': 4, 'inverse': 7,
120 'black_background': 40, 'red_background': 41,
123 'black_background': 40, 'red_background': 41,
121 'green_background': 42, 'yellow_background': 43,
124 'green_background': 42, 'yellow_background': 43,
122 'blue_background': 44, 'purple_background': 45,
125 'blue_background': 44, 'purple_background': 45,
123 'cyan_background': 46, 'white_background': 47}
126 'cyan_background': 46, 'white_background': 47}
124
127
125 def _terminfosetup(ui, mode):
128 def _terminfosetup(ui, mode):
126 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
129 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
127
130
128 global _terminfo_params
131 global _terminfo_params
129 # If we failed to load curses, we go ahead and return.
132 # If we failed to load curses, we go ahead and return.
130 if not _terminfo_params:
133 if not _terminfo_params:
131 return
134 return
132 # Otherwise, see what the config file says.
135 # Otherwise, see what the config file says.
133 if mode not in ('auto', 'terminfo'):
136 if mode not in ('auto', 'terminfo'):
134 return
137 return
135
138
136 _terminfo_params.update((key[6:], (False, int(val)))
139 _terminfo_params.update((key[6:], (False, int(val)))
137 for key, val in ui.configitems('color')
140 for key, val in ui.configitems('color')
138 if key.startswith('color.'))
141 if key.startswith('color.'))
139
142
140 try:
143 try:
141 curses.setupterm()
144 curses.setupterm()
142 except curses.error, e:
145 except curses.error, e:
143 _terminfo_params = {}
146 _terminfo_params = {}
144 return
147 return
145
148
146 for key, (b, e) in _terminfo_params.items():
149 for key, (b, e) in _terminfo_params.items():
147 if not b:
150 if not b:
148 continue
151 continue
149 if not curses.tigetstr(e):
152 if not curses.tigetstr(e):
150 # Most terminals don't support dim, invis, etc, so don't be
153 # Most terminals don't support dim, invis, etc, so don't be
151 # noisy and use ui.debug().
154 # noisy and use ui.debug().
152 ui.debug("no terminfo entry for %s\n" % e)
155 ui.debug("no terminfo entry for %s\n" % e)
153 del _terminfo_params[key]
156 del _terminfo_params[key]
154 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
157 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
155 # Only warn about missing terminfo entries if we explicitly asked for
158 # Only warn about missing terminfo entries if we explicitly asked for
156 # terminfo mode.
159 # terminfo mode.
157 if mode == "terminfo":
160 if mode == "terminfo":
158 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
161 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
159 "ECMA-48 color\n"))
162 "ECMA-48 color\n"))
160 _terminfo_params = {}
163 _terminfo_params = {}
161
164
162 def _modesetup(ui, opts):
165 def _modesetup(ui, opts):
163 global _terminfo_params
166 global _terminfo_params
164
167
165 coloropt = opts['color']
168 coloropt = opts['color']
166 auto = coloropt == 'auto'
169 auto = coloropt == 'auto'
167 always = not auto and util.parsebool(coloropt)
170 always = not auto and util.parsebool(coloropt)
168 if not always and not auto:
171 if not always and not auto:
169 return None
172 return None
170
173
171 formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
174 formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
172
175
173 mode = ui.config('color', 'mode', 'auto')
176 mode = ui.config('color', 'mode', 'auto')
174 realmode = mode
177 realmode = mode
175 if mode == 'auto':
178 if mode == 'auto':
176 if os.name == 'nt' and 'TERM' not in os.environ:
179 if os.name == 'nt' and 'TERM' not in os.environ:
177 # looks line a cmd.exe console, use win32 API or nothing
180 # looks line a cmd.exe console, use win32 API or nothing
178 realmode = 'win32'
181 realmode = 'win32'
179 else:
182 else:
180 realmode = 'ansi'
183 realmode = 'ansi'
181
184
182 if realmode == 'win32':
185 if realmode == 'win32':
183 _terminfo_params = {}
186 _terminfo_params = {}
184 if not w32effects:
187 if not w32effects:
185 if mode == 'win32':
188 if mode == 'win32':
186 # only warn if color.mode is explicitly set to win32
189 # only warn if color.mode is explicitly set to win32
187 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
190 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
188 return None
191 return None
189 _effects.update(w32effects)
192 _effects.update(w32effects)
190 elif realmode == 'ansi':
193 elif realmode == 'ansi':
191 _terminfo_params = {}
194 _terminfo_params = {}
192 elif realmode == 'terminfo':
195 elif realmode == 'terminfo':
193 _terminfosetup(ui, mode)
196 _terminfosetup(ui, mode)
194 if not _terminfo_params:
197 if not _terminfo_params:
195 if mode == 'terminfo':
198 if mode == 'terminfo':
196 ## FIXME Shouldn't we return None in this case too?
199 ## FIXME Shouldn't we return None in this case too?
197 # only warn if color.mode is explicitly set to win32
200 # only warn if color.mode is explicitly set to win32
198 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
201 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
199 realmode = 'ansi'
202 realmode = 'ansi'
200 else:
203 else:
201 return None
204 return None
202
205
203 if always or (auto and formatted):
206 if always or (auto and formatted):
204 return realmode
207 return realmode
205 return None
208 return None
206
209
207 try:
210 try:
208 import curses
211 import curses
209 # Mapping from effect name to terminfo attribute name or color number.
212 # Mapping from effect name to terminfo attribute name or color number.
210 # This will also force-load the curses module.
213 # This will also force-load the curses module.
211 _terminfo_params = {'none': (True, 'sgr0'),
214 _terminfo_params = {'none': (True, 'sgr0'),
212 'standout': (True, 'smso'),
215 'standout': (True, 'smso'),
213 'underline': (True, 'smul'),
216 'underline': (True, 'smul'),
214 'reverse': (True, 'rev'),
217 'reverse': (True, 'rev'),
215 'inverse': (True, 'rev'),
218 'inverse': (True, 'rev'),
216 'blink': (True, 'blink'),
219 'blink': (True, 'blink'),
217 'dim': (True, 'dim'),
220 'dim': (True, 'dim'),
218 'bold': (True, 'bold'),
221 'bold': (True, 'bold'),
219 'invisible': (True, 'invis'),
222 'invisible': (True, 'invis'),
220 'italic': (True, 'sitm'),
223 'italic': (True, 'sitm'),
221 'black': (False, curses.COLOR_BLACK),
224 'black': (False, curses.COLOR_BLACK),
222 'red': (False, curses.COLOR_RED),
225 'red': (False, curses.COLOR_RED),
223 'green': (False, curses.COLOR_GREEN),
226 'green': (False, curses.COLOR_GREEN),
224 'yellow': (False, curses.COLOR_YELLOW),
227 'yellow': (False, curses.COLOR_YELLOW),
225 'blue': (False, curses.COLOR_BLUE),
228 'blue': (False, curses.COLOR_BLUE),
226 'magenta': (False, curses.COLOR_MAGENTA),
229 'magenta': (False, curses.COLOR_MAGENTA),
227 'cyan': (False, curses.COLOR_CYAN),
230 'cyan': (False, curses.COLOR_CYAN),
228 'white': (False, curses.COLOR_WHITE)}
231 'white': (False, curses.COLOR_WHITE)}
229 except ImportError:
232 except ImportError:
230 _terminfo_params = False
233 _terminfo_params = False
231
234
232 _styles = {'grep.match': 'red bold',
235 _styles = {'grep.match': 'red bold',
233 'bookmarks.current': 'green',
236 'bookmarks.current': 'green',
234 'branches.active': 'none',
237 'branches.active': 'none',
235 'branches.closed': 'black bold',
238 'branches.closed': 'black bold',
236 'branches.current': 'green',
239 'branches.current': 'green',
237 'branches.inactive': 'none',
240 'branches.inactive': 'none',
238 'diff.changed': 'white',
241 'diff.changed': 'white',
239 'diff.deleted': 'red',
242 'diff.deleted': 'red',
240 'diff.diffline': 'bold',
243 'diff.diffline': 'bold',
241 'diff.extended': 'cyan bold',
244 'diff.extended': 'cyan bold',
242 'diff.file_a': 'red bold',
245 'diff.file_a': 'red bold',
243 'diff.file_b': 'green bold',
246 'diff.file_b': 'green bold',
244 'diff.hunk': 'magenta',
247 'diff.hunk': 'magenta',
245 'diff.inserted': 'green',
248 'diff.inserted': 'green',
246 'diff.trailingwhitespace': 'bold red_background',
249 'diff.trailingwhitespace': 'bold red_background',
247 'diffstat.deleted': 'red',
250 'diffstat.deleted': 'red',
248 'diffstat.inserted': 'green',
251 'diffstat.inserted': 'green',
249 'ui.prompt': 'yellow',
252 'ui.prompt': 'yellow',
250 'log.changeset': 'yellow',
253 'log.changeset': 'yellow',
251 'resolve.resolved': 'green bold',
254 'resolve.resolved': 'green bold',
252 'resolve.unresolved': 'red bold',
255 'resolve.unresolved': 'red bold',
253 'status.added': 'green bold',
256 'status.added': 'green bold',
254 'status.clean': 'none',
257 'status.clean': 'none',
255 'status.copied': 'none',
258 'status.copied': 'none',
256 'status.deleted': 'cyan bold underline',
259 'status.deleted': 'cyan bold underline',
257 'status.ignored': 'black bold',
260 'status.ignored': 'black bold',
258 'status.modified': 'blue bold',
261 'status.modified': 'blue bold',
259 'status.removed': 'red bold',
262 'status.removed': 'red bold',
260 'status.unknown': 'magenta bold underline'}
263 'status.unknown': 'magenta bold underline',
264 'tags.normal': 'green',
265 'tags.local': 'black bold'}
261
266
262
267
263 def _effect_str(effect):
268 def _effect_str(effect):
264 '''Helper function for render_effects().'''
269 '''Helper function for render_effects().'''
265
270
266 bg = False
271 bg = False
267 if effect.endswith('_background'):
272 if effect.endswith('_background'):
268 bg = True
273 bg = True
269 effect = effect[:-11]
274 effect = effect[:-11]
270 attr, val = _terminfo_params[effect]
275 attr, val = _terminfo_params[effect]
271 if attr:
276 if attr:
272 return curses.tigetstr(val)
277 return curses.tigetstr(val)
273 elif bg:
278 elif bg:
274 return curses.tparm(curses.tigetstr('setab'), val)
279 return curses.tparm(curses.tigetstr('setab'), val)
275 else:
280 else:
276 return curses.tparm(curses.tigetstr('setaf'), val)
281 return curses.tparm(curses.tigetstr('setaf'), val)
277
282
278 def render_effects(text, effects):
283 def render_effects(text, effects):
279 'Wrap text in commands to turn on each effect.'
284 'Wrap text in commands to turn on each effect.'
280 if not text:
285 if not text:
281 return text
286 return text
282 if not _terminfo_params:
287 if not _terminfo_params:
283 start = [str(_effects[e]) for e in ['none'] + effects.split()]
288 start = [str(_effects[e]) for e in ['none'] + effects.split()]
284 start = '\033[' + ';'.join(start) + 'm'
289 start = '\033[' + ';'.join(start) + 'm'
285 stop = '\033[' + str(_effects['none']) + 'm'
290 stop = '\033[' + str(_effects['none']) + 'm'
286 else:
291 else:
287 start = ''.join(_effect_str(effect)
292 start = ''.join(_effect_str(effect)
288 for effect in ['none'] + effects.split())
293 for effect in ['none'] + effects.split())
289 stop = _effect_str('none')
294 stop = _effect_str('none')
290 return ''.join([start, text, stop])
295 return ''.join([start, text, stop])
291
296
292 def extstyles():
297 def extstyles():
293 for name, ext in extensions.extensions():
298 for name, ext in extensions.extensions():
294 _styles.update(getattr(ext, 'colortable', {}))
299 _styles.update(getattr(ext, 'colortable', {}))
295
300
296 def configstyles(ui):
301 def configstyles(ui):
297 for status, cfgeffects in ui.configitems('color'):
302 for status, cfgeffects in ui.configitems('color'):
298 if '.' not in status or status.startswith('color.'):
303 if '.' not in status or status.startswith('color.'):
299 continue
304 continue
300 cfgeffects = ui.configlist('color', status)
305 cfgeffects = ui.configlist('color', status)
301 if cfgeffects:
306 if cfgeffects:
302 good = []
307 good = []
303 for e in cfgeffects:
308 for e in cfgeffects:
304 if not _terminfo_params and e in _effects:
309 if not _terminfo_params and e in _effects:
305 good.append(e)
310 good.append(e)
306 elif e in _terminfo_params or e[:-11] in _terminfo_params:
311 elif e in _terminfo_params or e[:-11] in _terminfo_params:
307 good.append(e)
312 good.append(e)
308 else:
313 else:
309 ui.warn(_("ignoring unknown color/effect %r "
314 ui.warn(_("ignoring unknown color/effect %r "
310 "(configured in color.%s)\n")
315 "(configured in color.%s)\n")
311 % (e, status))
316 % (e, status))
312 _styles[status] = ' '.join(good)
317 _styles[status] = ' '.join(good)
313
318
314 class colorui(uimod.ui):
319 class colorui(uimod.ui):
315 def popbuffer(self, labeled=False):
320 def popbuffer(self, labeled=False):
316 if labeled:
321 if labeled:
317 return ''.join(self.label(a, label) for a, label
322 return ''.join(self.label(a, label) for a, label
318 in self._buffers.pop())
323 in self._buffers.pop())
319 return ''.join(a for a, label in self._buffers.pop())
324 return ''.join(a for a, label in self._buffers.pop())
320
325
321 _colormode = 'ansi'
326 _colormode = 'ansi'
322 def write(self, *args, **opts):
327 def write(self, *args, **opts):
323 label = opts.get('label', '')
328 label = opts.get('label', '')
324 if self._buffers:
329 if self._buffers:
325 self._buffers[-1].extend([(str(a), label) for a in args])
330 self._buffers[-1].extend([(str(a), label) for a in args])
326 elif self._colormode == 'win32':
331 elif self._colormode == 'win32':
327 for a in args:
332 for a in args:
328 win32print(a, super(colorui, self).write, **opts)
333 win32print(a, super(colorui, self).write, **opts)
329 else:
334 else:
330 return super(colorui, self).write(
335 return super(colorui, self).write(
331 *[self.label(str(a), label) for a in args], **opts)
336 *[self.label(str(a), label) for a in args], **opts)
332
337
333 def write_err(self, *args, **opts):
338 def write_err(self, *args, **opts):
334 label = opts.get('label', '')
339 label = opts.get('label', '')
335 if self._colormode == 'win32':
340 if self._colormode == 'win32':
336 for a in args:
341 for a in args:
337 win32print(a, super(colorui, self).write_err, **opts)
342 win32print(a, super(colorui, self).write_err, **opts)
338 else:
343 else:
339 return super(colorui, self).write_err(
344 return super(colorui, self).write_err(
340 *[self.label(str(a), label) for a in args], **opts)
345 *[self.label(str(a), label) for a in args], **opts)
341
346
342 def label(self, msg, label):
347 def label(self, msg, label):
343 effects = []
348 effects = []
344 for l in label.split():
349 for l in label.split():
345 s = _styles.get(l, '')
350 s = _styles.get(l, '')
346 if s:
351 if s:
347 effects.append(s)
352 effects.append(s)
348 effects = ' '.join(effects)
353 effects = ' '.join(effects)
349 if effects:
354 if effects:
350 return '\n'.join([render_effects(s, effects)
355 return '\n'.join([render_effects(s, effects)
351 for s in msg.split('\n')])
356 for s in msg.split('\n')])
352 return msg
357 return msg
353
358
354
359
355 def uisetup(ui):
360 def uisetup(ui):
356 global _terminfo_params
361 global _terminfo_params
357 if ui.plain():
362 if ui.plain():
358 return
363 return
359 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
364 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
360 mode = _modesetup(ui_, opts)
365 mode = _modesetup(ui_, opts)
361 if mode:
366 if mode:
362 colorui._colormode = mode
367 colorui._colormode = mode
363 if not issubclass(ui_.__class__, colorui):
368 if not issubclass(ui_.__class__, colorui):
364 colorui.__bases__ = (ui_.__class__,)
369 colorui.__bases__ = (ui_.__class__,)
365 ui_.__class__ = colorui
370 ui_.__class__ = colorui
366 extstyles()
371 extstyles()
367 configstyles(ui_)
372 configstyles(ui_)
368 return orig(ui_, opts, cmd, cmdfunc)
373 return orig(ui_, opts, cmd, cmdfunc)
369 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
374 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
370
375
371 def extsetup(ui):
376 def extsetup(ui):
372 commands.globalopts.append(
377 commands.globalopts.append(
373 ('', 'color', 'auto',
378 ('', 'color', 'auto',
374 # i18n: 'always', 'auto', and 'never' are keywords and should
379 # i18n: 'always', 'auto', and 'never' are keywords and should
375 # not be translated
380 # not be translated
376 _("when to colorize (boolean, always, auto, or never)"),
381 _("when to colorize (boolean, always, auto, or never)"),
377 _('TYPE')))
382 _('TYPE')))
378
383
379 if os.name != 'nt':
384 if os.name != 'nt':
380 w32effects = None
385 w32effects = None
381 else:
386 else:
382 import re, ctypes
387 import re, ctypes
383
388
384 _kernel32 = ctypes.windll.kernel32
389 _kernel32 = ctypes.windll.kernel32
385
390
386 _WORD = ctypes.c_ushort
391 _WORD = ctypes.c_ushort
387
392
388 _INVALID_HANDLE_VALUE = -1
393 _INVALID_HANDLE_VALUE = -1
389
394
390 class _COORD(ctypes.Structure):
395 class _COORD(ctypes.Structure):
391 _fields_ = [('X', ctypes.c_short),
396 _fields_ = [('X', ctypes.c_short),
392 ('Y', ctypes.c_short)]
397 ('Y', ctypes.c_short)]
393
398
394 class _SMALL_RECT(ctypes.Structure):
399 class _SMALL_RECT(ctypes.Structure):
395 _fields_ = [('Left', ctypes.c_short),
400 _fields_ = [('Left', ctypes.c_short),
396 ('Top', ctypes.c_short),
401 ('Top', ctypes.c_short),
397 ('Right', ctypes.c_short),
402 ('Right', ctypes.c_short),
398 ('Bottom', ctypes.c_short)]
403 ('Bottom', ctypes.c_short)]
399
404
400 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
405 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
401 _fields_ = [('dwSize', _COORD),
406 _fields_ = [('dwSize', _COORD),
402 ('dwCursorPosition', _COORD),
407 ('dwCursorPosition', _COORD),
403 ('wAttributes', _WORD),
408 ('wAttributes', _WORD),
404 ('srWindow', _SMALL_RECT),
409 ('srWindow', _SMALL_RECT),
405 ('dwMaximumWindowSize', _COORD)]
410 ('dwMaximumWindowSize', _COORD)]
406
411
407 _STD_OUTPUT_HANDLE = 0xfffffff5L # (DWORD)-11
412 _STD_OUTPUT_HANDLE = 0xfffffff5L # (DWORD)-11
408 _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12
413 _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12
409
414
410 _FOREGROUND_BLUE = 0x0001
415 _FOREGROUND_BLUE = 0x0001
411 _FOREGROUND_GREEN = 0x0002
416 _FOREGROUND_GREEN = 0x0002
412 _FOREGROUND_RED = 0x0004
417 _FOREGROUND_RED = 0x0004
413 _FOREGROUND_INTENSITY = 0x0008
418 _FOREGROUND_INTENSITY = 0x0008
414
419
415 _BACKGROUND_BLUE = 0x0010
420 _BACKGROUND_BLUE = 0x0010
416 _BACKGROUND_GREEN = 0x0020
421 _BACKGROUND_GREEN = 0x0020
417 _BACKGROUND_RED = 0x0040
422 _BACKGROUND_RED = 0x0040
418 _BACKGROUND_INTENSITY = 0x0080
423 _BACKGROUND_INTENSITY = 0x0080
419
424
420 _COMMON_LVB_REVERSE_VIDEO = 0x4000
425 _COMMON_LVB_REVERSE_VIDEO = 0x4000
421 _COMMON_LVB_UNDERSCORE = 0x8000
426 _COMMON_LVB_UNDERSCORE = 0x8000
422
427
423 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
428 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
424 w32effects = {
429 w32effects = {
425 'none': -1,
430 'none': -1,
426 'black': 0,
431 'black': 0,
427 'red': _FOREGROUND_RED,
432 'red': _FOREGROUND_RED,
428 'green': _FOREGROUND_GREEN,
433 'green': _FOREGROUND_GREEN,
429 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
434 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
430 'blue': _FOREGROUND_BLUE,
435 'blue': _FOREGROUND_BLUE,
431 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
436 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
432 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
437 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
433 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
438 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
434 'bold': _FOREGROUND_INTENSITY,
439 'bold': _FOREGROUND_INTENSITY,
435 'black_background': 0x100, # unused value > 0x0f
440 'black_background': 0x100, # unused value > 0x0f
436 'red_background': _BACKGROUND_RED,
441 'red_background': _BACKGROUND_RED,
437 'green_background': _BACKGROUND_GREEN,
442 'green_background': _BACKGROUND_GREEN,
438 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
443 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
439 'blue_background': _BACKGROUND_BLUE,
444 'blue_background': _BACKGROUND_BLUE,
440 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
445 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
441 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
446 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
442 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
447 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
443 _BACKGROUND_BLUE),
448 _BACKGROUND_BLUE),
444 'bold_background': _BACKGROUND_INTENSITY,
449 'bold_background': _BACKGROUND_INTENSITY,
445 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
450 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
446 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
451 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
447 }
452 }
448
453
449 passthrough = set([_FOREGROUND_INTENSITY,
454 passthrough = set([_FOREGROUND_INTENSITY,
450 _BACKGROUND_INTENSITY,
455 _BACKGROUND_INTENSITY,
451 _COMMON_LVB_UNDERSCORE,
456 _COMMON_LVB_UNDERSCORE,
452 _COMMON_LVB_REVERSE_VIDEO])
457 _COMMON_LVB_REVERSE_VIDEO])
453
458
454 stdout = _kernel32.GetStdHandle(
459 stdout = _kernel32.GetStdHandle(
455 _STD_OUTPUT_HANDLE) # don't close the handle returned
460 _STD_OUTPUT_HANDLE) # don't close the handle returned
456 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
461 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
457 w32effects = None
462 w32effects = None
458 else:
463 else:
459 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
464 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
460 if not _kernel32.GetConsoleScreenBufferInfo(
465 if not _kernel32.GetConsoleScreenBufferInfo(
461 stdout, ctypes.byref(csbi)):
466 stdout, ctypes.byref(csbi)):
462 # stdout may not support GetConsoleScreenBufferInfo()
467 # stdout may not support GetConsoleScreenBufferInfo()
463 # when called from subprocess or redirected
468 # when called from subprocess or redirected
464 w32effects = None
469 w32effects = None
465 else:
470 else:
466 origattr = csbi.wAttributes
471 origattr = csbi.wAttributes
467 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
472 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
468 re.MULTILINE | re.DOTALL)
473 re.MULTILINE | re.DOTALL)
469
474
470 def win32print(text, orig, **opts):
475 def win32print(text, orig, **opts):
471 label = opts.get('label', '')
476 label = opts.get('label', '')
472 attr = origattr
477 attr = origattr
473
478
474 def mapcolor(val, attr):
479 def mapcolor(val, attr):
475 if val == -1:
480 if val == -1:
476 return origattr
481 return origattr
477 elif val in passthrough:
482 elif val in passthrough:
478 return attr | val
483 return attr | val
479 elif val > 0x0f:
484 elif val > 0x0f:
480 return (val & 0x70) | (attr & 0x8f)
485 return (val & 0x70) | (attr & 0x8f)
481 else:
486 else:
482 return (val & 0x07) | (attr & 0xf8)
487 return (val & 0x07) | (attr & 0xf8)
483
488
484 # determine console attributes based on labels
489 # determine console attributes based on labels
485 for l in label.split():
490 for l in label.split():
486 style = _styles.get(l, '')
491 style = _styles.get(l, '')
487 for effect in style.split():
492 for effect in style.split():
488 attr = mapcolor(w32effects[effect], attr)
493 attr = mapcolor(w32effects[effect], attr)
489
494
490 # hack to ensure regexp finds data
495 # hack to ensure regexp finds data
491 if not text.startswith('\033['):
496 if not text.startswith('\033['):
492 text = '\033[m' + text
497 text = '\033[m' + text
493
498
494 # Look for ANSI-like codes embedded in text
499 # Look for ANSI-like codes embedded in text
495 m = re.match(ansire, text)
500 m = re.match(ansire, text)
496
501
497 try:
502 try:
498 while m:
503 while m:
499 for sattr in m.group(1).split(';'):
504 for sattr in m.group(1).split(';'):
500 if sattr:
505 if sattr:
501 attr = mapcolor(int(sattr), attr)
506 attr = mapcolor(int(sattr), attr)
502 _kernel32.SetConsoleTextAttribute(stdout, attr)
507 _kernel32.SetConsoleTextAttribute(stdout, attr)
503 orig(m.group(2), **opts)
508 orig(m.group(2), **opts)
504 m = re.match(ansire, m.group(3))
509 m = re.match(ansire, m.group(3))
505 finally:
510 finally:
506 # Explicity reset original attributes
511 # Explicity reset original attributes
507 _kernel32.SetConsoleTextAttribute(stdout, origattr)
512 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,847 +1,854
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import re
9 import re
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import hook
13 from mercurial import hook
14 from mercurial import util
14
15
15 class logentry(object):
16 class logentry(object):
16 '''Class logentry has the following attributes:
17 '''Class logentry has the following attributes:
17 .author - author name as CVS knows it
18 .author - author name as CVS knows it
18 .branch - name of branch this revision is on
19 .branch - name of branch this revision is on
19 .branches - revision tuple of branches starting at this revision
20 .branches - revision tuple of branches starting at this revision
20 .comment - commit message
21 .comment - commit message
21 .date - the commit date as a (time, tz) tuple
22 .date - the commit date as a (time, tz) tuple
22 .dead - true if file revision is dead
23 .dead - true if file revision is dead
23 .file - Name of file
24 .file - Name of file
24 .lines - a tuple (+lines, -lines) or None
25 .lines - a tuple (+lines, -lines) or None
25 .parent - Previous revision of this entry
26 .parent - Previous revision of this entry
26 .rcs - name of file as returned from CVS
27 .rcs - name of file as returned from CVS
27 .revision - revision number as tuple
28 .revision - revision number as tuple
28 .tags - list of tags on the file
29 .tags - list of tags on the file
29 .synthetic - is this a synthetic "file ... added on ..." revision?
30 .synthetic - is this a synthetic "file ... added on ..." revision?
30 .mergepoint- the branch that has been merged from
31 .mergepoint- the branch that has been merged from
31 (if present in rlog output)
32 (if present in rlog output)
32 .branchpoints- the branches that start at the current entry
33 .branchpoints- the branches that start at the current entry
33 '''
34 '''
34 def __init__(self, **entries):
35 def __init__(self, **entries):
35 self.synthetic = False
36 self.synthetic = False
36 self.__dict__.update(entries)
37 self.__dict__.update(entries)
37
38
38 def __repr__(self):
39 def __repr__(self):
39 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
40 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
40 id(self),
41 id(self),
41 self.file,
42 self.file,
42 ".".join(map(str, self.revision)))
43 ".".join(map(str, self.revision)))
43
44
44 class logerror(Exception):
45 class logerror(Exception):
45 pass
46 pass
46
47
47 def getrepopath(cvspath):
48 def getrepopath(cvspath):
48 """Return the repository path from a CVS path.
49 """Return the repository path from a CVS path.
49
50
50 >>> getrepopath('/foo/bar')
51 >>> getrepopath('/foo/bar')
51 '/foo/bar'
52 '/foo/bar'
52 >>> getrepopath('c:/foo/bar')
53 >>> getrepopath('c:/foo/bar')
53 'c:/foo/bar'
54 'c:/foo/bar'
54 >>> getrepopath(':pserver:10/foo/bar')
55 >>> getrepopath(':pserver:10/foo/bar')
55 '/foo/bar'
56 '/foo/bar'
56 >>> getrepopath(':pserver:10c:/foo/bar')
57 >>> getrepopath(':pserver:10c:/foo/bar')
57 '/foo/bar'
58 '/foo/bar'
58 >>> getrepopath(':pserver:/foo/bar')
59 >>> getrepopath(':pserver:/foo/bar')
59 '/foo/bar'
60 '/foo/bar'
60 >>> getrepopath(':pserver:c:/foo/bar')
61 >>> getrepopath(':pserver:c:/foo/bar')
61 'c:/foo/bar'
62 'c:/foo/bar'
62 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
63 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
63 '/foo/bar'
64 '/foo/bar'
64 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
65 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
65 'c:/foo/bar'
66 'c:/foo/bar'
66 """
67 """
67 # According to CVS manual, CVS paths are expressed like:
68 # According to CVS manual, CVS paths are expressed like:
68 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
69 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
69 #
70 #
70 # Unfortunately, Windows absolute paths start with a drive letter
71 # Unfortunately, Windows absolute paths start with a drive letter
71 # like 'c:' making it harder to parse. Here we assume that drive
72 # like 'c:' making it harder to parse. Here we assume that drive
72 # letters are only one character long and any CVS component before
73 # letters are only one character long and any CVS component before
73 # the repository path is at least 2 characters long, and use this
74 # the repository path is at least 2 characters long, and use this
74 # to disambiguate.
75 # to disambiguate.
75 parts = cvspath.split(':')
76 parts = cvspath.split(':')
76 if len(parts) == 1:
77 if len(parts) == 1:
77 return parts[0]
78 return parts[0]
78 # Here there is an ambiguous case if we have a port number
79 # Here there is an ambiguous case if we have a port number
79 # immediately followed by a Windows driver letter. We assume this
80 # immediately followed by a Windows driver letter. We assume this
80 # never happens and decide it must be CVS path component,
81 # never happens and decide it must be CVS path component,
81 # therefore ignoring it.
82 # therefore ignoring it.
82 if len(parts[-2]) > 1:
83 if len(parts[-2]) > 1:
83 return parts[-1].lstrip('0123456789')
84 return parts[-1].lstrip('0123456789')
84 return parts[-2] + ':' + parts[-1]
85 return parts[-2] + ':' + parts[-1]
85
86
86 def createlog(ui, directory=None, root="", rlog=True, cache=None):
87 def createlog(ui, directory=None, root="", rlog=True, cache=None):
87 '''Collect the CVS rlog'''
88 '''Collect the CVS rlog'''
88
89
89 # Because we store many duplicate commit log messages, reusing strings
90 # Because we store many duplicate commit log messages, reusing strings
90 # saves a lot of memory and pickle storage space.
91 # saves a lot of memory and pickle storage space.
91 _scache = {}
92 _scache = {}
92 def scache(s):
93 def scache(s):
93 "return a shared version of a string"
94 "return a shared version of a string"
94 return _scache.setdefault(s, s)
95 return _scache.setdefault(s, s)
95
96
96 ui.status(_('collecting CVS rlog\n'))
97 ui.status(_('collecting CVS rlog\n'))
97
98
98 log = [] # list of logentry objects containing the CVS state
99 log = [] # list of logentry objects containing the CVS state
99
100
100 # patterns to match in CVS (r)log output, by state of use
101 # patterns to match in CVS (r)log output, by state of use
101 re_00 = re.compile('RCS file: (.+)$')
102 re_00 = re.compile('RCS file: (.+)$')
102 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
103 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
103 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
104 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
104 re_03 = re.compile("(Cannot access.+CVSROOT)|"
105 re_03 = re.compile("(Cannot access.+CVSROOT)|"
105 "(can't create temporary directory.+)$")
106 "(can't create temporary directory.+)$")
106 re_10 = re.compile('Working file: (.+)$')
107 re_10 = re.compile('Working file: (.+)$')
107 re_20 = re.compile('symbolic names:')
108 re_20 = re.compile('symbolic names:')
108 re_30 = re.compile('\t(.+): ([\\d.]+)$')
109 re_30 = re.compile('\t(.+): ([\\d.]+)$')
109 re_31 = re.compile('----------------------------$')
110 re_31 = re.compile('----------------------------$')
110 re_32 = re.compile('======================================='
111 re_32 = re.compile('======================================='
111 '======================================$')
112 '======================================$')
112 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
113 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
113 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
114 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
114 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
115 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
115 r'(.*mergepoint:\s+([^;]+);)?')
116 r'(.*mergepoint:\s+([^;]+);)?')
116 re_70 = re.compile('branches: (.+);$')
117 re_70 = re.compile('branches: (.+);$')
117
118
118 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
119 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
119
120
120 prefix = '' # leading path to strip of what we get from CVS
121 prefix = '' # leading path to strip of what we get from CVS
121
122
122 if directory is None:
123 if directory is None:
123 # Current working directory
124 # Current working directory
124
125
125 # Get the real directory in the repository
126 # Get the real directory in the repository
126 try:
127 try:
127 prefix = open(os.path.join('CVS','Repository')).read().strip()
128 prefix = open(os.path.join('CVS','Repository')).read().strip()
128 directory = prefix
129 directory = prefix
129 if prefix == ".":
130 if prefix == ".":
130 prefix = ""
131 prefix = ""
131 except IOError:
132 except IOError:
132 raise logerror(_('not a CVS sandbox'))
133 raise logerror(_('not a CVS sandbox'))
133
134
134 if prefix and not prefix.endswith(os.sep):
135 if prefix and not prefix.endswith(os.sep):
135 prefix += os.sep
136 prefix += os.sep
136
137
137 # Use the Root file in the sandbox, if it exists
138 # Use the Root file in the sandbox, if it exists
138 try:
139 try:
139 root = open(os.path.join('CVS','Root')).read().strip()
140 root = open(os.path.join('CVS','Root')).read().strip()
140 except IOError:
141 except IOError:
141 pass
142 pass
142
143
143 if not root:
144 if not root:
144 root = os.environ.get('CVSROOT', '')
145 root = os.environ.get('CVSROOT', '')
145
146
146 # read log cache if one exists
147 # read log cache if one exists
147 oldlog = []
148 oldlog = []
148 date = None
149 date = None
149
150
150 if cache:
151 if cache:
151 cachedir = os.path.expanduser('~/.hg.cvsps')
152 cachedir = os.path.expanduser('~/.hg.cvsps')
152 if not os.path.exists(cachedir):
153 if not os.path.exists(cachedir):
153 os.mkdir(cachedir)
154 os.mkdir(cachedir)
154
155
155 # The cvsps cache pickle needs a uniquified name, based on the
156 # The cvsps cache pickle needs a uniquified name, based on the
156 # repository location. The address may have all sort of nasties
157 # repository location. The address may have all sort of nasties
157 # in it, slashes, colons and such. So here we take just the
158 # in it, slashes, colons and such. So here we take just the
158 # alphanumerics, concatenated in a way that does not mix up the
159 # alphanumerics, concatenated in a way that does not mix up the
159 # various components, so that
160 # various components, so that
160 # :pserver:user@server:/path
161 # :pserver:user@server:/path
161 # and
162 # and
162 # /pserver/user/server/path
163 # /pserver/user/server/path
163 # are mapped to different cache file names.
164 # are mapped to different cache file names.
164 cachefile = root.split(":") + [directory, "cache"]
165 cachefile = root.split(":") + [directory, "cache"]
165 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
166 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
166 cachefile = os.path.join(cachedir,
167 cachefile = os.path.join(cachedir,
167 '.'.join([s for s in cachefile if s]))
168 '.'.join([s for s in cachefile if s]))
168
169
169 if cache == 'update':
170 if cache == 'update':
170 try:
171 try:
171 ui.note(_('reading cvs log cache %s\n') % cachefile)
172 ui.note(_('reading cvs log cache %s\n') % cachefile)
172 oldlog = pickle.load(open(cachefile))
173 oldlog = pickle.load(open(cachefile))
173 ui.note(_('cache has %d log entries\n') % len(oldlog))
174 ui.note(_('cache has %d log entries\n') % len(oldlog))
174 except Exception, e:
175 except Exception, e:
175 ui.note(_('error reading cache: %r\n') % e)
176 ui.note(_('error reading cache: %r\n') % e)
176
177
177 if oldlog:
178 if oldlog:
178 date = oldlog[-1].date # last commit date as a (time,tz) tuple
179 date = oldlog[-1].date # last commit date as a (time,tz) tuple
179 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
180 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
180
181
181 # build the CVS commandline
182 # build the CVS commandline
182 cmd = ['cvs', '-q']
183 cmd = ['cvs', '-q']
183 if root:
184 if root:
184 cmd.append('-d%s' % root)
185 cmd.append('-d%s' % root)
185 p = util.normpath(getrepopath(root))
186 p = util.normpath(getrepopath(root))
186 if not p.endswith('/'):
187 if not p.endswith('/'):
187 p += '/'
188 p += '/'
188 if prefix:
189 if prefix:
189 # looks like normpath replaces "" by "."
190 # looks like normpath replaces "" by "."
190 prefix = p + util.normpath(prefix)
191 prefix = p + util.normpath(prefix)
191 else:
192 else:
192 prefix = p
193 prefix = p
193 cmd.append(['log', 'rlog'][rlog])
194 cmd.append(['log', 'rlog'][rlog])
194 if date:
195 if date:
195 # no space between option and date string
196 # no space between option and date string
196 cmd.append('-d>%s' % date)
197 cmd.append('-d>%s' % date)
197 cmd.append(directory)
198 cmd.append(directory)
198
199
199 # state machine begins here
200 # state machine begins here
200 tags = {} # dictionary of revisions on current file with their tags
201 tags = {} # dictionary of revisions on current file with their tags
201 branchmap = {} # mapping between branch names and revision numbers
202 branchmap = {} # mapping between branch names and revision numbers
202 state = 0
203 state = 0
203 store = False # set when a new record can be appended
204 store = False # set when a new record can be appended
204
205
205 cmd = [util.shellquote(arg) for arg in cmd]
206 cmd = [util.shellquote(arg) for arg in cmd]
206 ui.note(_("running %s\n") % (' '.join(cmd)))
207 ui.note(_("running %s\n") % (' '.join(cmd)))
207 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
208 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
208
209
209 pfp = util.popen(' '.join(cmd))
210 pfp = util.popen(' '.join(cmd))
210 peek = pfp.readline()
211 peek = pfp.readline()
211 while True:
212 while True:
212 line = peek
213 line = peek
213 if line == '':
214 if line == '':
214 break
215 break
215 peek = pfp.readline()
216 peek = pfp.readline()
216 if line.endswith('\n'):
217 if line.endswith('\n'):
217 line = line[:-1]
218 line = line[:-1]
218 #ui.debug('state=%d line=%r\n' % (state, line))
219 #ui.debug('state=%d line=%r\n' % (state, line))
219
220
220 if state == 0:
221 if state == 0:
221 # initial state, consume input until we see 'RCS file'
222 # initial state, consume input until we see 'RCS file'
222 match = re_00.match(line)
223 match = re_00.match(line)
223 if match:
224 if match:
224 rcs = match.group(1)
225 rcs = match.group(1)
225 tags = {}
226 tags = {}
226 if rlog:
227 if rlog:
227 filename = util.normpath(rcs[:-2])
228 filename = util.normpath(rcs[:-2])
228 if filename.startswith(prefix):
229 if filename.startswith(prefix):
229 filename = filename[len(prefix):]
230 filename = filename[len(prefix):]
230 if filename.startswith('/'):
231 if filename.startswith('/'):
231 filename = filename[1:]
232 filename = filename[1:]
232 if filename.startswith('Attic/'):
233 if filename.startswith('Attic/'):
233 filename = filename[6:]
234 filename = filename[6:]
234 else:
235 else:
235 filename = filename.replace('/Attic/', '/')
236 filename = filename.replace('/Attic/', '/')
236 state = 2
237 state = 2
237 continue
238 continue
238 state = 1
239 state = 1
239 continue
240 continue
240 match = re_01.match(line)
241 match = re_01.match(line)
241 if match:
242 if match:
242 raise logerror(match.group(1))
243 raise logerror(match.group(1))
243 match = re_02.match(line)
244 match = re_02.match(line)
244 if match:
245 if match:
245 raise logerror(match.group(2))
246 raise logerror(match.group(2))
246 if re_03.match(line):
247 if re_03.match(line):
247 raise logerror(line)
248 raise logerror(line)
248
249
249 elif state == 1:
250 elif state == 1:
250 # expect 'Working file' (only when using log instead of rlog)
251 # expect 'Working file' (only when using log instead of rlog)
251 match = re_10.match(line)
252 match = re_10.match(line)
252 assert match, _('RCS file must be followed by working file')
253 assert match, _('RCS file must be followed by working file')
253 filename = util.normpath(match.group(1))
254 filename = util.normpath(match.group(1))
254 state = 2
255 state = 2
255
256
256 elif state == 2:
257 elif state == 2:
257 # expect 'symbolic names'
258 # expect 'symbolic names'
258 if re_20.match(line):
259 if re_20.match(line):
259 branchmap = {}
260 branchmap = {}
260 state = 3
261 state = 3
261
262
262 elif state == 3:
263 elif state == 3:
263 # read the symbolic names and store as tags
264 # read the symbolic names and store as tags
264 match = re_30.match(line)
265 match = re_30.match(line)
265 if match:
266 if match:
266 rev = [int(x) for x in match.group(2).split('.')]
267 rev = [int(x) for x in match.group(2).split('.')]
267
268
268 # Convert magic branch number to an odd-numbered one
269 # Convert magic branch number to an odd-numbered one
269 revn = len(rev)
270 revn = len(rev)
270 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
271 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
271 rev = rev[:-2] + rev[-1:]
272 rev = rev[:-2] + rev[-1:]
272 rev = tuple(rev)
273 rev = tuple(rev)
273
274
274 if rev not in tags:
275 if rev not in tags:
275 tags[rev] = []
276 tags[rev] = []
276 tags[rev].append(match.group(1))
277 tags[rev].append(match.group(1))
277 branchmap[match.group(1)] = match.group(2)
278 branchmap[match.group(1)] = match.group(2)
278
279
279 elif re_31.match(line):
280 elif re_31.match(line):
280 state = 5
281 state = 5
281 elif re_32.match(line):
282 elif re_32.match(line):
282 state = 0
283 state = 0
283
284
284 elif state == 4:
285 elif state == 4:
285 # expecting '------' separator before first revision
286 # expecting '------' separator before first revision
286 if re_31.match(line):
287 if re_31.match(line):
287 state = 5
288 state = 5
288 else:
289 else:
289 assert not re_32.match(line), _('must have at least '
290 assert not re_32.match(line), _('must have at least '
290 'some revisions')
291 'some revisions')
291
292
292 elif state == 5:
293 elif state == 5:
293 # expecting revision number and possibly (ignored) lock indication
294 # expecting revision number and possibly (ignored) lock indication
294 # we create the logentry here from values stored in states 0 to 4,
295 # we create the logentry here from values stored in states 0 to 4,
295 # as this state is re-entered for subsequent revisions of a file.
296 # as this state is re-entered for subsequent revisions of a file.
296 match = re_50.match(line)
297 match = re_50.match(line)
297 assert match, _('expected revision number')
298 assert match, _('expected revision number')
298 e = logentry(rcs=scache(rcs), file=scache(filename),
299 e = logentry(rcs=scache(rcs), file=scache(filename),
299 revision=tuple([int(x) for x in match.group(1).split('.')]),
300 revision=tuple([int(x) for x in match.group(1).split('.')]),
300 branches=[], parent=None)
301 branches=[], parent=None)
301 state = 6
302 state = 6
302
303
303 elif state == 6:
304 elif state == 6:
304 # expecting date, author, state, lines changed
305 # expecting date, author, state, lines changed
305 match = re_60.match(line)
306 match = re_60.match(line)
306 assert match, _('revision must be followed by date line')
307 assert match, _('revision must be followed by date line')
307 d = match.group(1)
308 d = match.group(1)
308 if d[2] == '/':
309 if d[2] == '/':
309 # Y2K
310 # Y2K
310 d = '19' + d
311 d = '19' + d
311
312
312 if len(d.split()) != 3:
313 if len(d.split()) != 3:
313 # cvs log dates always in GMT
314 # cvs log dates always in GMT
314 d = d + ' UTC'
315 d = d + ' UTC'
315 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
316 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
316 '%Y/%m/%d %H:%M:%S',
317 '%Y/%m/%d %H:%M:%S',
317 '%Y-%m-%d %H:%M:%S'])
318 '%Y-%m-%d %H:%M:%S'])
318 e.author = scache(match.group(2))
319 e.author = scache(match.group(2))
319 e.dead = match.group(3).lower() == 'dead'
320 e.dead = match.group(3).lower() == 'dead'
320
321
321 if match.group(5):
322 if match.group(5):
322 if match.group(6):
323 if match.group(6):
323 e.lines = (int(match.group(5)), int(match.group(6)))
324 e.lines = (int(match.group(5)), int(match.group(6)))
324 else:
325 else:
325 e.lines = (int(match.group(5)), 0)
326 e.lines = (int(match.group(5)), 0)
326 elif match.group(6):
327 elif match.group(6):
327 e.lines = (0, int(match.group(6)))
328 e.lines = (0, int(match.group(6)))
328 else:
329 else:
329 e.lines = None
330 e.lines = None
330
331
331 if match.group(7): # cvsnt mergepoint
332 if match.group(7): # cvsnt mergepoint
332 myrev = match.group(8).split('.')
333 myrev = match.group(8).split('.')
333 if len(myrev) == 2: # head
334 if len(myrev) == 2: # head
334 e.mergepoint = 'HEAD'
335 e.mergepoint = 'HEAD'
335 else:
336 else:
336 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
337 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
337 branches = [b for b in branchmap if branchmap[b] == myrev]
338 branches = [b for b in branchmap if branchmap[b] == myrev]
338 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
339 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
339 e.mergepoint = branches[0]
340 e.mergepoint = branches[0]
340 else:
341 else:
341 e.mergepoint = None
342 e.mergepoint = None
342 e.comment = []
343 e.comment = []
343 state = 7
344 state = 7
344
345
345 elif state == 7:
346 elif state == 7:
346 # read the revision numbers of branches that start at this revision
347 # read the revision numbers of branches that start at this revision
347 # or store the commit log message otherwise
348 # or store the commit log message otherwise
348 m = re_70.match(line)
349 m = re_70.match(line)
349 if m:
350 if m:
350 e.branches = [tuple([int(y) for y in x.strip().split('.')])
351 e.branches = [tuple([int(y) for y in x.strip().split('.')])
351 for x in m.group(1).split(';')]
352 for x in m.group(1).split(';')]
352 state = 8
353 state = 8
353 elif re_31.match(line) and re_50.match(peek):
354 elif re_31.match(line) and re_50.match(peek):
354 state = 5
355 state = 5
355 store = True
356 store = True
356 elif re_32.match(line):
357 elif re_32.match(line):
357 state = 0
358 state = 0
358 store = True
359 store = True
359 else:
360 else:
360 e.comment.append(line)
361 e.comment.append(line)
361
362
362 elif state == 8:
363 elif state == 8:
363 # store commit log message
364 # store commit log message
364 if re_31.match(line):
365 if re_31.match(line):
365 state = 5
366 cpeek = peek
366 store = True
367 if cpeek.endswith('\n'):
368 cpeek = cpeek[:-1]
369 if re_50.match(cpeek):
370 state = 5
371 store = True
372 else:
373 e.comment.append(line)
367 elif re_32.match(line):
374 elif re_32.match(line):
368 state = 0
375 state = 0
369 store = True
376 store = True
370 else:
377 else:
371 e.comment.append(line)
378 e.comment.append(line)
372
379
373 # When a file is added on a branch B1, CVS creates a synthetic
380 # When a file is added on a branch B1, CVS creates a synthetic
374 # dead trunk revision 1.1 so that the branch has a root.
381 # dead trunk revision 1.1 so that the branch has a root.
375 # Likewise, if you merge such a file to a later branch B2 (one
382 # Likewise, if you merge such a file to a later branch B2 (one
376 # that already existed when the file was added on B1), CVS
383 # that already existed when the file was added on B1), CVS
377 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
384 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
378 # these revisions now, but mark them synthetic so
385 # these revisions now, but mark them synthetic so
379 # createchangeset() can take care of them.
386 # createchangeset() can take care of them.
380 if (store and
387 if (store and
381 e.dead and
388 e.dead and
382 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
389 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
383 len(e.comment) == 1 and
390 len(e.comment) == 1 and
384 file_added_re.match(e.comment[0])):
391 file_added_re.match(e.comment[0])):
385 ui.debug('found synthetic revision in %s: %r\n'
392 ui.debug('found synthetic revision in %s: %r\n'
386 % (e.rcs, e.comment[0]))
393 % (e.rcs, e.comment[0]))
387 e.synthetic = True
394 e.synthetic = True
388
395
389 if store:
396 if store:
390 # clean up the results and save in the log.
397 # clean up the results and save in the log.
391 store = False
398 store = False
392 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
399 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
393 e.comment = scache('\n'.join(e.comment))
400 e.comment = scache('\n'.join(e.comment))
394
401
395 revn = len(e.revision)
402 revn = len(e.revision)
396 if revn > 3 and (revn % 2) == 0:
403 if revn > 3 and (revn % 2) == 0:
397 e.branch = tags.get(e.revision[:-1], [None])[0]
404 e.branch = tags.get(e.revision[:-1], [None])[0]
398 else:
405 else:
399 e.branch = None
406 e.branch = None
400
407
401 # find the branches starting from this revision
408 # find the branches starting from this revision
402 branchpoints = set()
409 branchpoints = set()
403 for branch, revision in branchmap.iteritems():
410 for branch, revision in branchmap.iteritems():
404 revparts = tuple([int(i) for i in revision.split('.')])
411 revparts = tuple([int(i) for i in revision.split('.')])
405 if len(revparts) < 2: # bad tags
412 if len(revparts) < 2: # bad tags
406 continue
413 continue
407 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
414 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
408 # normal branch
415 # normal branch
409 if revparts[:-2] == e.revision:
416 if revparts[:-2] == e.revision:
410 branchpoints.add(branch)
417 branchpoints.add(branch)
411 elif revparts == (1, 1, 1): # vendor branch
418 elif revparts == (1, 1, 1): # vendor branch
412 if revparts in e.branches:
419 if revparts in e.branches:
413 branchpoints.add(branch)
420 branchpoints.add(branch)
414 e.branchpoints = branchpoints
421 e.branchpoints = branchpoints
415
422
416 log.append(e)
423 log.append(e)
417
424
418 if len(log) % 100 == 0:
425 if len(log) % 100 == 0:
419 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
426 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
420
427
421 log.sort(key=lambda x: (x.rcs, x.revision))
428 log.sort(key=lambda x: (x.rcs, x.revision))
422
429
423 # find parent revisions of individual files
430 # find parent revisions of individual files
424 versions = {}
431 versions = {}
425 for e in log:
432 for e in log:
426 branch = e.revision[:-1]
433 branch = e.revision[:-1]
427 p = versions.get((e.rcs, branch), None)
434 p = versions.get((e.rcs, branch), None)
428 if p is None:
435 if p is None:
429 p = e.revision[:-2]
436 p = e.revision[:-2]
430 e.parent = p
437 e.parent = p
431 versions[(e.rcs, branch)] = e.revision
438 versions[(e.rcs, branch)] = e.revision
432
439
433 # update the log cache
440 # update the log cache
434 if cache:
441 if cache:
435 if log:
442 if log:
436 # join up the old and new logs
443 # join up the old and new logs
437 log.sort(key=lambda x: x.date)
444 log.sort(key=lambda x: x.date)
438
445
439 if oldlog and oldlog[-1].date >= log[0].date:
446 if oldlog and oldlog[-1].date >= log[0].date:
440 raise logerror(_('log cache overlaps with new log entries,'
447 raise logerror(_('log cache overlaps with new log entries,'
441 ' re-run without cache.'))
448 ' re-run without cache.'))
442
449
443 log = oldlog + log
450 log = oldlog + log
444
451
445 # write the new cachefile
452 # write the new cachefile
446 ui.note(_('writing cvs log cache %s\n') % cachefile)
453 ui.note(_('writing cvs log cache %s\n') % cachefile)
447 pickle.dump(log, open(cachefile, 'w'))
454 pickle.dump(log, open(cachefile, 'w'))
448 else:
455 else:
449 log = oldlog
456 log = oldlog
450
457
451 ui.status(_('%d log entries\n') % len(log))
458 ui.status(_('%d log entries\n') % len(log))
452
459
453 hook.hook(ui, None, "cvslog", True, log=log)
460 hook.hook(ui, None, "cvslog", True, log=log)
454
461
455 return log
462 return log
456
463
457
464
458 class changeset(object):
465 class changeset(object):
459 '''Class changeset has the following attributes:
466 '''Class changeset has the following attributes:
460 .id - integer identifying this changeset (list index)
467 .id - integer identifying this changeset (list index)
461 .author - author name as CVS knows it
468 .author - author name as CVS knows it
462 .branch - name of branch this changeset is on, or None
469 .branch - name of branch this changeset is on, or None
463 .comment - commit message
470 .comment - commit message
464 .date - the commit date as a (time,tz) tuple
471 .date - the commit date as a (time,tz) tuple
465 .entries - list of logentry objects in this changeset
472 .entries - list of logentry objects in this changeset
466 .parents - list of one or two parent changesets
473 .parents - list of one or two parent changesets
467 .tags - list of tags on this changeset
474 .tags - list of tags on this changeset
468 .synthetic - from synthetic revision "file ... added on branch ..."
475 .synthetic - from synthetic revision "file ... added on branch ..."
469 .mergepoint- the branch that has been merged from
476 .mergepoint- the branch that has been merged from
470 (if present in rlog output)
477 (if present in rlog output)
471 .branchpoints- the branches that start at the current entry
478 .branchpoints- the branches that start at the current entry
472 '''
479 '''
473 def __init__(self, **entries):
480 def __init__(self, **entries):
474 self.synthetic = False
481 self.synthetic = False
475 self.__dict__.update(entries)
482 self.__dict__.update(entries)
476
483
477 def __repr__(self):
484 def __repr__(self):
478 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
485 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
479 id(self),
486 id(self),
480 getattr(self, 'id', "(no id)"))
487 getattr(self, 'id', "(no id)"))
481
488
482 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
489 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
483 '''Convert log into changesets.'''
490 '''Convert log into changesets.'''
484
491
485 ui.status(_('creating changesets\n'))
492 ui.status(_('creating changesets\n'))
486
493
487 # Merge changesets
494 # Merge changesets
488
495
489 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
496 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
490
497
491 changesets = []
498 changesets = []
492 files = set()
499 files = set()
493 c = None
500 c = None
494 for i, e in enumerate(log):
501 for i, e in enumerate(log):
495
502
496 # Check if log entry belongs to the current changeset or not.
503 # Check if log entry belongs to the current changeset or not.
497
504
498 # Since CVS is file centric, two different file revisions with
505 # Since CVS is file centric, two different file revisions with
499 # different branchpoints should be treated as belonging to two
506 # different branchpoints should be treated as belonging to two
500 # different changesets (and the ordering is important and not
507 # different changesets (and the ordering is important and not
501 # honoured by cvsps at this point).
508 # honoured by cvsps at this point).
502 #
509 #
503 # Consider the following case:
510 # Consider the following case:
504 # foo 1.1 branchpoints: [MYBRANCH]
511 # foo 1.1 branchpoints: [MYBRANCH]
505 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
512 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
506 #
513 #
507 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
514 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
508 # later version of foo may be in MYBRANCH2, so foo should be the
515 # later version of foo may be in MYBRANCH2, so foo should be the
509 # first changeset and bar the next and MYBRANCH and MYBRANCH2
516 # first changeset and bar the next and MYBRANCH and MYBRANCH2
510 # should both start off of the bar changeset. No provisions are
517 # should both start off of the bar changeset. No provisions are
511 # made to ensure that this is, in fact, what happens.
518 # made to ensure that this is, in fact, what happens.
512 if not (c and
519 if not (c and
513 e.comment == c.comment and
520 e.comment == c.comment and
514 e.author == c.author and
521 e.author == c.author and
515 e.branch == c.branch and
522 e.branch == c.branch and
516 (not hasattr(e, 'branchpoints') or
523 (not util.safehasattr(e, 'branchpoints') or
517 not hasattr (c, 'branchpoints') or
524 not util.safehasattr (c, 'branchpoints') or
518 e.branchpoints == c.branchpoints) and
525 e.branchpoints == c.branchpoints) and
519 ((c.date[0] + c.date[1]) <=
526 ((c.date[0] + c.date[1]) <=
520 (e.date[0] + e.date[1]) <=
527 (e.date[0] + e.date[1]) <=
521 (c.date[0] + c.date[1]) + fuzz) and
528 (c.date[0] + c.date[1]) + fuzz) and
522 e.file not in files):
529 e.file not in files):
523 c = changeset(comment=e.comment, author=e.author,
530 c = changeset(comment=e.comment, author=e.author,
524 branch=e.branch, date=e.date, entries=[],
531 branch=e.branch, date=e.date, entries=[],
525 mergepoint=getattr(e, 'mergepoint', None),
532 mergepoint=getattr(e, 'mergepoint', None),
526 branchpoints=getattr(e, 'branchpoints', set()))
533 branchpoints=getattr(e, 'branchpoints', set()))
527 changesets.append(c)
534 changesets.append(c)
528 files = set()
535 files = set()
529 if len(changesets) % 100 == 0:
536 if len(changesets) % 100 == 0:
530 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
537 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
531 ui.status(util.ellipsis(t, 80) + '\n')
538 ui.status(util.ellipsis(t, 80) + '\n')
532
539
533 c.entries.append(e)
540 c.entries.append(e)
534 files.add(e.file)
541 files.add(e.file)
535 c.date = e.date # changeset date is date of latest commit in it
542 c.date = e.date # changeset date is date of latest commit in it
536
543
537 # Mark synthetic changesets
544 # Mark synthetic changesets
538
545
539 for c in changesets:
546 for c in changesets:
540 # Synthetic revisions always get their own changeset, because
547 # Synthetic revisions always get their own changeset, because
541 # the log message includes the filename. E.g. if you add file3
548 # the log message includes the filename. E.g. if you add file3
542 # and file4 on a branch, you get four log entries and three
549 # and file4 on a branch, you get four log entries and three
543 # changesets:
550 # changesets:
544 # "File file3 was added on branch ..." (synthetic, 1 entry)
551 # "File file3 was added on branch ..." (synthetic, 1 entry)
545 # "File file4 was added on branch ..." (synthetic, 1 entry)
552 # "File file4 was added on branch ..." (synthetic, 1 entry)
546 # "Add file3 and file4 to fix ..." (real, 2 entries)
553 # "Add file3 and file4 to fix ..." (real, 2 entries)
547 # Hence the check for 1 entry here.
554 # Hence the check for 1 entry here.
548 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
555 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
549
556
550 # Sort files in each changeset
557 # Sort files in each changeset
551
558
552 for c in changesets:
559 for c in changesets:
553 def pathcompare(l, r):
560 def pathcompare(l, r):
554 'Mimic cvsps sorting order'
561 'Mimic cvsps sorting order'
555 l = l.split('/')
562 l = l.split('/')
556 r = r.split('/')
563 r = r.split('/')
557 nl = len(l)
564 nl = len(l)
558 nr = len(r)
565 nr = len(r)
559 n = min(nl, nr)
566 n = min(nl, nr)
560 for i in range(n):
567 for i in range(n):
561 if i + 1 == nl and nl < nr:
568 if i + 1 == nl and nl < nr:
562 return -1
569 return -1
563 elif i + 1 == nr and nl > nr:
570 elif i + 1 == nr and nl > nr:
564 return +1
571 return +1
565 elif l[i] < r[i]:
572 elif l[i] < r[i]:
566 return -1
573 return -1
567 elif l[i] > r[i]:
574 elif l[i] > r[i]:
568 return +1
575 return +1
569 return 0
576 return 0
570 def entitycompare(l, r):
577 def entitycompare(l, r):
571 return pathcompare(l.file, r.file)
578 return pathcompare(l.file, r.file)
572
579
573 c.entries.sort(entitycompare)
580 c.entries.sort(entitycompare)
574
581
575 # Sort changesets by date
582 # Sort changesets by date
576
583
577 def cscmp(l, r):
584 def cscmp(l, r):
578 d = sum(l.date) - sum(r.date)
585 d = sum(l.date) - sum(r.date)
579 if d:
586 if d:
580 return d
587 return d
581
588
582 # detect vendor branches and initial commits on a branch
589 # detect vendor branches and initial commits on a branch
583 le = {}
590 le = {}
584 for e in l.entries:
591 for e in l.entries:
585 le[e.rcs] = e.revision
592 le[e.rcs] = e.revision
586 re = {}
593 re = {}
587 for e in r.entries:
594 for e in r.entries:
588 re[e.rcs] = e.revision
595 re[e.rcs] = e.revision
589
596
590 d = 0
597 d = 0
591 for e in l.entries:
598 for e in l.entries:
592 if re.get(e.rcs, None) == e.parent:
599 if re.get(e.rcs, None) == e.parent:
593 assert not d
600 assert not d
594 d = 1
601 d = 1
595 break
602 break
596
603
597 for e in r.entries:
604 for e in r.entries:
598 if le.get(e.rcs, None) == e.parent:
605 if le.get(e.rcs, None) == e.parent:
599 assert not d
606 assert not d
600 d = -1
607 d = -1
601 break
608 break
602
609
603 return d
610 return d
604
611
605 changesets.sort(cscmp)
612 changesets.sort(cscmp)
606
613
607 # Collect tags
614 # Collect tags
608
615
609 globaltags = {}
616 globaltags = {}
610 for c in changesets:
617 for c in changesets:
611 for e in c.entries:
618 for e in c.entries:
612 for tag in e.tags:
619 for tag in e.tags:
613 # remember which is the latest changeset to have this tag
620 # remember which is the latest changeset to have this tag
614 globaltags[tag] = c
621 globaltags[tag] = c
615
622
616 for c in changesets:
623 for c in changesets:
617 tags = set()
624 tags = set()
618 for e in c.entries:
625 for e in c.entries:
619 tags.update(e.tags)
626 tags.update(e.tags)
620 # remember tags only if this is the latest changeset to have it
627 # remember tags only if this is the latest changeset to have it
621 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
628 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
622
629
623 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
630 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
624 # by inserting dummy changesets with two parents, and handle
631 # by inserting dummy changesets with two parents, and handle
625 # {{mergefrombranch BRANCHNAME}} by setting two parents.
632 # {{mergefrombranch BRANCHNAME}} by setting two parents.
626
633
627 if mergeto is None:
634 if mergeto is None:
628 mergeto = r'{{mergetobranch ([-\w]+)}}'
635 mergeto = r'{{mergetobranch ([-\w]+)}}'
629 if mergeto:
636 if mergeto:
630 mergeto = re.compile(mergeto)
637 mergeto = re.compile(mergeto)
631
638
632 if mergefrom is None:
639 if mergefrom is None:
633 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
640 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
634 if mergefrom:
641 if mergefrom:
635 mergefrom = re.compile(mergefrom)
642 mergefrom = re.compile(mergefrom)
636
643
637 versions = {} # changeset index where we saw any particular file version
644 versions = {} # changeset index where we saw any particular file version
638 branches = {} # changeset index where we saw a branch
645 branches = {} # changeset index where we saw a branch
639 n = len(changesets)
646 n = len(changesets)
640 i = 0
647 i = 0
641 while i < n:
648 while i < n:
642 c = changesets[i]
649 c = changesets[i]
643
650
644 for f in c.entries:
651 for f in c.entries:
645 versions[(f.rcs, f.revision)] = i
652 versions[(f.rcs, f.revision)] = i
646
653
647 p = None
654 p = None
648 if c.branch in branches:
655 if c.branch in branches:
649 p = branches[c.branch]
656 p = branches[c.branch]
650 else:
657 else:
651 # first changeset on a new branch
658 # first changeset on a new branch
652 # the parent is a changeset with the branch in its
659 # the parent is a changeset with the branch in its
653 # branchpoints such that it is the latest possible
660 # branchpoints such that it is the latest possible
654 # commit without any intervening, unrelated commits.
661 # commit without any intervening, unrelated commits.
655
662
656 for candidate in xrange(i):
663 for candidate in xrange(i):
657 if c.branch not in changesets[candidate].branchpoints:
664 if c.branch not in changesets[candidate].branchpoints:
658 if p is not None:
665 if p is not None:
659 break
666 break
660 continue
667 continue
661 p = candidate
668 p = candidate
662
669
663 c.parents = []
670 c.parents = []
664 if p is not None:
671 if p is not None:
665 p = changesets[p]
672 p = changesets[p]
666
673
667 # Ensure no changeset has a synthetic changeset as a parent.
674 # Ensure no changeset has a synthetic changeset as a parent.
668 while p.synthetic:
675 while p.synthetic:
669 assert len(p.parents) <= 1, \
676 assert len(p.parents) <= 1, \
670 _('synthetic changeset cannot have multiple parents')
677 _('synthetic changeset cannot have multiple parents')
671 if p.parents:
678 if p.parents:
672 p = p.parents[0]
679 p = p.parents[0]
673 else:
680 else:
674 p = None
681 p = None
675 break
682 break
676
683
677 if p is not None:
684 if p is not None:
678 c.parents.append(p)
685 c.parents.append(p)
679
686
680 if c.mergepoint:
687 if c.mergepoint:
681 if c.mergepoint == 'HEAD':
688 if c.mergepoint == 'HEAD':
682 c.mergepoint = None
689 c.mergepoint = None
683 c.parents.append(changesets[branches[c.mergepoint]])
690 c.parents.append(changesets[branches[c.mergepoint]])
684
691
685 if mergefrom:
692 if mergefrom:
686 m = mergefrom.search(c.comment)
693 m = mergefrom.search(c.comment)
687 if m:
694 if m:
688 m = m.group(1)
695 m = m.group(1)
689 if m == 'HEAD':
696 if m == 'HEAD':
690 m = None
697 m = None
691 try:
698 try:
692 candidate = changesets[branches[m]]
699 candidate = changesets[branches[m]]
693 except KeyError:
700 except KeyError:
694 ui.warn(_("warning: CVS commit message references "
701 ui.warn(_("warning: CVS commit message references "
695 "non-existent branch %r:\n%s\n")
702 "non-existent branch %r:\n%s\n")
696 % (m, c.comment))
703 % (m, c.comment))
697 if m in branches and c.branch != m and not candidate.synthetic:
704 if m in branches and c.branch != m and not candidate.synthetic:
698 c.parents.append(candidate)
705 c.parents.append(candidate)
699
706
700 if mergeto:
707 if mergeto:
701 m = mergeto.search(c.comment)
708 m = mergeto.search(c.comment)
702 if m:
709 if m:
703 try:
710 try:
704 m = m.group(1)
711 m = m.group(1)
705 if m == 'HEAD':
712 if m == 'HEAD':
706 m = None
713 m = None
707 except:
714 except:
708 m = None # if no group found then merge to HEAD
715 m = None # if no group found then merge to HEAD
709 if m in branches and c.branch != m:
716 if m in branches and c.branch != m:
710 # insert empty changeset for merge
717 # insert empty changeset for merge
711 cc = changeset(
718 cc = changeset(
712 author=c.author, branch=m, date=c.date,
719 author=c.author, branch=m, date=c.date,
713 comment='convert-repo: CVS merge from branch %s'
720 comment='convert-repo: CVS merge from branch %s'
714 % c.branch,
721 % c.branch,
715 entries=[], tags=[],
722 entries=[], tags=[],
716 parents=[changesets[branches[m]], c])
723 parents=[changesets[branches[m]], c])
717 changesets.insert(i + 1, cc)
724 changesets.insert(i + 1, cc)
718 branches[m] = i + 1
725 branches[m] = i + 1
719
726
720 # adjust our loop counters now we have inserted a new entry
727 # adjust our loop counters now we have inserted a new entry
721 n += 1
728 n += 1
722 i += 2
729 i += 2
723 continue
730 continue
724
731
725 branches[c.branch] = i
732 branches[c.branch] = i
726 i += 1
733 i += 1
727
734
728 # Drop synthetic changesets (safe now that we have ensured no other
735 # Drop synthetic changesets (safe now that we have ensured no other
729 # changesets can have them as parents).
736 # changesets can have them as parents).
730 i = 0
737 i = 0
731 while i < len(changesets):
738 while i < len(changesets):
732 if changesets[i].synthetic:
739 if changesets[i].synthetic:
733 del changesets[i]
740 del changesets[i]
734 else:
741 else:
735 i += 1
742 i += 1
736
743
737 # Number changesets
744 # Number changesets
738
745
739 for i, c in enumerate(changesets):
746 for i, c in enumerate(changesets):
740 c.id = i + 1
747 c.id = i + 1
741
748
742 ui.status(_('%d changeset entries\n') % len(changesets))
749 ui.status(_('%d changeset entries\n') % len(changesets))
743
750
744 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
751 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
745
752
746 return changesets
753 return changesets
747
754
748
755
749 def debugcvsps(ui, *args, **opts):
756 def debugcvsps(ui, *args, **opts):
750 '''Read CVS rlog for current directory or named path in
757 '''Read CVS rlog for current directory or named path in
751 repository, and convert the log to changesets based on matching
758 repository, and convert the log to changesets based on matching
752 commit log entries and dates.
759 commit log entries and dates.
753 '''
760 '''
754 if opts["new_cache"]:
761 if opts["new_cache"]:
755 cache = "write"
762 cache = "write"
756 elif opts["update_cache"]:
763 elif opts["update_cache"]:
757 cache = "update"
764 cache = "update"
758 else:
765 else:
759 cache = None
766 cache = None
760
767
761 revisions = opts["revisions"]
768 revisions = opts["revisions"]
762
769
763 try:
770 try:
764 if args:
771 if args:
765 log = []
772 log = []
766 for d in args:
773 for d in args:
767 log += createlog(ui, d, root=opts["root"], cache=cache)
774 log += createlog(ui, d, root=opts["root"], cache=cache)
768 else:
775 else:
769 log = createlog(ui, root=opts["root"], cache=cache)
776 log = createlog(ui, root=opts["root"], cache=cache)
770 except logerror, e:
777 except logerror, e:
771 ui.write("%r\n"%e)
778 ui.write("%r\n"%e)
772 return
779 return
773
780
774 changesets = createchangeset(ui, log, opts["fuzz"])
781 changesets = createchangeset(ui, log, opts["fuzz"])
775 del log
782 del log
776
783
777 # Print changesets (optionally filtered)
784 # Print changesets (optionally filtered)
778
785
779 off = len(revisions)
786 off = len(revisions)
780 branches = {} # latest version number in each branch
787 branches = {} # latest version number in each branch
781 ancestors = {} # parent branch
788 ancestors = {} # parent branch
782 for cs in changesets:
789 for cs in changesets:
783
790
784 if opts["ancestors"]:
791 if opts["ancestors"]:
785 if cs.branch not in branches and cs.parents and cs.parents[0].id:
792 if cs.branch not in branches and cs.parents and cs.parents[0].id:
786 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
793 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
787 cs.parents[0].id)
794 cs.parents[0].id)
788 branches[cs.branch] = cs.id
795 branches[cs.branch] = cs.id
789
796
790 # limit by branches
797 # limit by branches
791 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
798 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
792 continue
799 continue
793
800
794 if not off:
801 if not off:
795 # Note: trailing spaces on several lines here are needed to have
802 # Note: trailing spaces on several lines here are needed to have
796 # bug-for-bug compatibility with cvsps.
803 # bug-for-bug compatibility with cvsps.
797 ui.write('---------------------\n')
804 ui.write('---------------------\n')
798 ui.write('PatchSet %d \n' % cs.id)
805 ui.write('PatchSet %d \n' % cs.id)
799 ui.write('Date: %s\n' % util.datestr(cs.date,
806 ui.write('Date: %s\n' % util.datestr(cs.date,
800 '%Y/%m/%d %H:%M:%S %1%2'))
807 '%Y/%m/%d %H:%M:%S %1%2'))
801 ui.write('Author: %s\n' % cs.author)
808 ui.write('Author: %s\n' % cs.author)
802 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
809 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
803 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
810 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
804 ','.join(cs.tags) or '(none)'))
811 ','.join(cs.tags) or '(none)'))
805 branchpoints = getattr(cs, 'branchpoints', None)
812 branchpoints = getattr(cs, 'branchpoints', None)
806 if branchpoints:
813 if branchpoints:
807 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
814 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
808 if opts["parents"] and cs.parents:
815 if opts["parents"] and cs.parents:
809 if len(cs.parents) > 1:
816 if len(cs.parents) > 1:
810 ui.write('Parents: %s\n' %
817 ui.write('Parents: %s\n' %
811 (','.join([str(p.id) for p in cs.parents])))
818 (','.join([str(p.id) for p in cs.parents])))
812 else:
819 else:
813 ui.write('Parent: %d\n' % cs.parents[0].id)
820 ui.write('Parent: %d\n' % cs.parents[0].id)
814
821
815 if opts["ancestors"]:
822 if opts["ancestors"]:
816 b = cs.branch
823 b = cs.branch
817 r = []
824 r = []
818 while b:
825 while b:
819 b, c = ancestors[b]
826 b, c = ancestors[b]
820 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
827 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
821 if r:
828 if r:
822 ui.write('Ancestors: %s\n' % (','.join(r)))
829 ui.write('Ancestors: %s\n' % (','.join(r)))
823
830
824 ui.write('Log:\n')
831 ui.write('Log:\n')
825 ui.write('%s\n\n' % cs.comment)
832 ui.write('%s\n\n' % cs.comment)
826 ui.write('Members: \n')
833 ui.write('Members: \n')
827 for f in cs.entries:
834 for f in cs.entries:
828 fn = f.file
835 fn = f.file
829 if fn.startswith(opts["prefix"]):
836 if fn.startswith(opts["prefix"]):
830 fn = fn[len(opts["prefix"]):]
837 fn = fn[len(opts["prefix"]):]
831 ui.write('\t%s:%s->%s%s \n' % (
838 ui.write('\t%s:%s->%s%s \n' % (
832 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
839 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
833 '.'.join([str(x) for x in f.revision]),
840 '.'.join([str(x) for x in f.revision]),
834 ['', '(DEAD)'][f.dead]))
841 ['', '(DEAD)'][f.dead]))
835 ui.write('\n')
842 ui.write('\n')
836
843
837 # have we seen the start tag?
844 # have we seen the start tag?
838 if revisions and off:
845 if revisions and off:
839 if revisions[0] == str(cs.id) or \
846 if revisions[0] == str(cs.id) or \
840 revisions[0] in cs.tags:
847 revisions[0] in cs.tags:
841 off = False
848 off = False
842
849
843 # see if we reached the end tag
850 # see if we reached the end tag
844 if len(revisions) > 1 and not off:
851 if len(revisions) > 1 and not off:
845 if revisions[1] == str(cs.id) or \
852 if revisions[1] == str(cs.id) or \
846 revisions[1] in cs.tags:
853 revisions[1] in cs.tags:
847 break
854 break
@@ -1,377 +1,380
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 import shlex
7 import shlex
8 from mercurial.i18n import _
8 from mercurial.i18n import _
9 from mercurial import util
9 from mercurial import util
10 from common import SKIPREV, converter_source
10 from common import SKIPREV, converter_source
11
11
12 def rpairs(name):
12 def rpairs(name):
13 e = len(name)
13 e = len(name)
14 while e != -1:
14 while e != -1:
15 yield name[:e], name[e + 1:]
15 yield name[:e], name[e + 1:]
16 e = name.rfind('/', 0, e)
16 e = name.rfind('/', 0, e)
17 yield '.', name
17 yield '.', name
18
18
19 class filemapper(object):
19 class filemapper(object):
20 '''Map and filter filenames when importing.
20 '''Map and filter filenames when importing.
21 A name can be mapped to itself, a new name, or None (omit from new
21 A name can be mapped to itself, a new name, or None (omit from new
22 repository).'''
22 repository).'''
23
23
24 def __init__(self, ui, path=None):
24 def __init__(self, ui, path=None):
25 self.ui = ui
25 self.ui = ui
26 self.include = {}
26 self.include = {}
27 self.exclude = {}
27 self.exclude = {}
28 self.rename = {}
28 self.rename = {}
29 if path:
29 if path:
30 if self.parse(path):
30 if self.parse(path):
31 raise util.Abort(_('errors in filemap'))
31 raise util.Abort(_('errors in filemap'))
32
32
33 def parse(self, path):
33 def parse(self, path):
34 errs = 0
34 errs = 0
35 def check(name, mapping, listname):
35 def check(name, mapping, listname):
36 if not name:
36 if not name:
37 self.ui.warn(_('%s:%d: path to %s is missing\n') %
37 self.ui.warn(_('%s:%d: path to %s is missing\n') %
38 (lex.infile, lex.lineno, listname))
38 (lex.infile, lex.lineno, listname))
39 return 1
39 return 1
40 if name in mapping:
40 if name in mapping:
41 self.ui.warn(_('%s:%d: %r already in %s list\n') %
41 self.ui.warn(_('%s:%d: %r already in %s list\n') %
42 (lex.infile, lex.lineno, name, listname))
42 (lex.infile, lex.lineno, name, listname))
43 return 1
43 return 1
44 if (name.startswith('/') or
44 if (name.startswith('/') or
45 name.endswith('/') or
45 name.endswith('/') or
46 '//' in name):
46 '//' in name):
47 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
47 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
48 (lex.infile, lex.lineno, listname, name))
48 (lex.infile, lex.lineno, listname, name))
49 return 1
49 return 1
50 return 0
50 return 0
51 lex = shlex.shlex(open(path), path, True)
51 lex = shlex.shlex(open(path), path, True)
52 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
52 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
53 cmd = lex.get_token()
53 cmd = lex.get_token()
54 while cmd:
54 while cmd:
55 if cmd == 'include':
55 if cmd == 'include':
56 name = lex.get_token()
56 name = lex.get_token()
57 errs += check(name, self.exclude, 'exclude')
57 errs += check(name, self.exclude, 'exclude')
58 self.include[name] = name
58 self.include[name] = name
59 elif cmd == 'exclude':
59 elif cmd == 'exclude':
60 name = lex.get_token()
60 name = lex.get_token()
61 errs += check(name, self.include, 'include')
61 errs += check(name, self.include, 'include')
62 errs += check(name, self.rename, 'rename')
62 errs += check(name, self.rename, 'rename')
63 self.exclude[name] = name
63 self.exclude[name] = name
64 elif cmd == 'rename':
64 elif cmd == 'rename':
65 src = lex.get_token()
65 src = lex.get_token()
66 dest = lex.get_token()
66 dest = lex.get_token()
67 errs += check(src, self.exclude, 'exclude')
67 errs += check(src, self.exclude, 'exclude')
68 self.rename[src] = dest
68 self.rename[src] = dest
69 elif cmd == 'source':
69 elif cmd == 'source':
70 errs += self.parse(lex.get_token())
70 errs += self.parse(lex.get_token())
71 else:
71 else:
72 self.ui.warn(_('%s:%d: unknown directive %r\n') %
72 self.ui.warn(_('%s:%d: unknown directive %r\n') %
73 (lex.infile, lex.lineno, cmd))
73 (lex.infile, lex.lineno, cmd))
74 errs += 1
74 errs += 1
75 cmd = lex.get_token()
75 cmd = lex.get_token()
76 return errs
76 return errs
77
77
78 def lookup(self, name, mapping):
78 def lookup(self, name, mapping):
79 for pre, suf in rpairs(name):
79 for pre, suf in rpairs(name):
80 try:
80 try:
81 return mapping[pre], pre, suf
81 return mapping[pre], pre, suf
82 except KeyError:
82 except KeyError:
83 pass
83 pass
84 return '', name, ''
84 return '', name, ''
85
85
86 def __call__(self, name):
86 def __call__(self, name):
87 if self.include:
87 if self.include:
88 inc = self.lookup(name, self.include)[0]
88 inc = self.lookup(name, self.include)[0]
89 else:
89 else:
90 inc = name
90 inc = name
91 if self.exclude:
91 if self.exclude:
92 exc = self.lookup(name, self.exclude)[0]
92 exc = self.lookup(name, self.exclude)[0]
93 else:
93 else:
94 exc = ''
94 exc = ''
95 if (not self.include and exc) or (len(inc) <= len(exc)):
95 if (not self.include and exc) or (len(inc) <= len(exc)):
96 return None
96 return None
97 newpre, pre, suf = self.lookup(name, self.rename)
97 newpre, pre, suf = self.lookup(name, self.rename)
98 if newpre:
98 if newpre:
99 if newpre == '.':
99 if newpre == '.':
100 return suf
100 return suf
101 if suf:
101 if suf:
102 return newpre + '/' + suf
102 return newpre + '/' + suf
103 return newpre
103 return newpre
104 return name
104 return name
105
105
106 def active(self):
106 def active(self):
107 return bool(self.include or self.exclude or self.rename)
107 return bool(self.include or self.exclude or self.rename)
108
108
109 # This class does two additional things compared to a regular source:
109 # This class does two additional things compared to a regular source:
110 #
110 #
111 # - Filter and rename files. This is mostly wrapped by the filemapper
111 # - Filter and rename files. This is mostly wrapped by the filemapper
112 # class above. We hide the original filename in the revision that is
112 # class above. We hide the original filename in the revision that is
113 # returned by getchanges to be able to find things later in getfile.
113 # returned by getchanges to be able to find things later in getfile.
114 #
114 #
115 # - Return only revisions that matter for the files we're interested in.
115 # - Return only revisions that matter for the files we're interested in.
116 # This involves rewriting the parents of the original revision to
116 # This involves rewriting the parents of the original revision to
117 # create a graph that is restricted to those revisions.
117 # create a graph that is restricted to those revisions.
118 #
118 #
119 # This set of revisions includes not only revisions that directly
119 # This set of revisions includes not only revisions that directly
120 # touch files we're interested in, but also merges that merge two
120 # touch files we're interested in, but also merges that merge two
121 # or more interesting revisions.
121 # or more interesting revisions.
122
122
123 class filemap_source(converter_source):
123 class filemap_source(converter_source):
124 def __init__(self, ui, baseconverter, filemap):
124 def __init__(self, ui, baseconverter, filemap):
125 super(filemap_source, self).__init__(ui)
125 super(filemap_source, self).__init__(ui)
126 self.base = baseconverter
126 self.base = baseconverter
127 self.filemapper = filemapper(ui, filemap)
127 self.filemapper = filemapper(ui, filemap)
128 self.commits = {}
128 self.commits = {}
129 # if a revision rev has parent p in the original revision graph, then
129 # if a revision rev has parent p in the original revision graph, then
130 # rev will have parent self.parentmap[p] in the restricted graph.
130 # rev will have parent self.parentmap[p] in the restricted graph.
131 self.parentmap = {}
131 self.parentmap = {}
132 # self.wantedancestors[rev] is the set of all ancestors of rev that
132 # self.wantedancestors[rev] is the set of all ancestors of rev that
133 # are in the restricted graph.
133 # are in the restricted graph.
134 self.wantedancestors = {}
134 self.wantedancestors = {}
135 self.convertedorder = None
135 self.convertedorder = None
136 self._rebuilt = False
136 self._rebuilt = False
137 self.origparents = {}
137 self.origparents = {}
138 self.children = {}
138 self.children = {}
139 self.seenchildren = {}
139 self.seenchildren = {}
140
140
141 def before(self):
141 def before(self):
142 self.base.before()
142 self.base.before()
143
143
144 def after(self):
144 def after(self):
145 self.base.after()
145 self.base.after()
146
146
147 def setrevmap(self, revmap):
147 def setrevmap(self, revmap):
148 # rebuild our state to make things restartable
148 # rebuild our state to make things restartable
149 #
149 #
150 # To avoid calling getcommit for every revision that has already
150 # To avoid calling getcommit for every revision that has already
151 # been converted, we rebuild only the parentmap, delaying the
151 # been converted, we rebuild only the parentmap, delaying the
152 # rebuild of wantedancestors until we need it (i.e. until a
152 # rebuild of wantedancestors until we need it (i.e. until a
153 # merge).
153 # merge).
154 #
154 #
155 # We assume the order argument lists the revisions in
155 # We assume the order argument lists the revisions in
156 # topological order, so that we can infer which revisions were
156 # topological order, so that we can infer which revisions were
157 # wanted by previous runs.
157 # wanted by previous runs.
158 self._rebuilt = not revmap
158 self._rebuilt = not revmap
159 seen = {SKIPREV: SKIPREV}
159 seen = {SKIPREV: SKIPREV}
160 dummyset = set()
160 dummyset = set()
161 converted = []
161 converted = []
162 for rev in revmap.order:
162 for rev in revmap.order:
163 mapped = revmap[rev]
163 mapped = revmap[rev]
164 wanted = mapped not in seen
164 wanted = mapped not in seen
165 if wanted:
165 if wanted:
166 seen[mapped] = rev
166 seen[mapped] = rev
167 self.parentmap[rev] = rev
167 self.parentmap[rev] = rev
168 else:
168 else:
169 self.parentmap[rev] = seen[mapped]
169 self.parentmap[rev] = seen[mapped]
170 self.wantedancestors[rev] = dummyset
170 self.wantedancestors[rev] = dummyset
171 arg = seen[mapped]
171 arg = seen[mapped]
172 if arg == SKIPREV:
172 if arg == SKIPREV:
173 arg = None
173 arg = None
174 converted.append((rev, wanted, arg))
174 converted.append((rev, wanted, arg))
175 self.convertedorder = converted
175 self.convertedorder = converted
176 return self.base.setrevmap(revmap)
176 return self.base.setrevmap(revmap)
177
177
178 def rebuild(self):
178 def rebuild(self):
179 if self._rebuilt:
179 if self._rebuilt:
180 return True
180 return True
181 self._rebuilt = True
181 self._rebuilt = True
182 self.parentmap.clear()
182 self.parentmap.clear()
183 self.wantedancestors.clear()
183 self.wantedancestors.clear()
184 self.seenchildren.clear()
184 self.seenchildren.clear()
185 for rev, wanted, arg in self.convertedorder:
185 for rev, wanted, arg in self.convertedorder:
186 if rev not in self.origparents:
186 if rev not in self.origparents:
187 self.origparents[rev] = self.getcommit(rev).parents
187 self.origparents[rev] = self.getcommit(rev).parents
188 if arg is not None:
188 if arg is not None:
189 self.children[arg] = self.children.get(arg, 0) + 1
189 self.children[arg] = self.children.get(arg, 0) + 1
190
190
191 for rev, wanted, arg in self.convertedorder:
191 for rev, wanted, arg in self.convertedorder:
192 parents = self.origparents[rev]
192 parents = self.origparents[rev]
193 if wanted:
193 if wanted:
194 self.mark_wanted(rev, parents)
194 self.mark_wanted(rev, parents)
195 else:
195 else:
196 self.mark_not_wanted(rev, arg)
196 self.mark_not_wanted(rev, arg)
197 self._discard(arg, *parents)
197 self._discard(arg, *parents)
198
198
199 return True
199 return True
200
200
201 def getheads(self):
201 def getheads(self):
202 return self.base.getheads()
202 return self.base.getheads()
203
203
204 def getcommit(self, rev):
204 def getcommit(self, rev):
205 # We want to save a reference to the commit objects to be able
205 # We want to save a reference to the commit objects to be able
206 # to rewrite their parents later on.
206 # to rewrite their parents later on.
207 c = self.commits[rev] = self.base.getcommit(rev)
207 c = self.commits[rev] = self.base.getcommit(rev)
208 for p in c.parents:
208 for p in c.parents:
209 self.children[p] = self.children.get(p, 0) + 1
209 self.children[p] = self.children.get(p, 0) + 1
210 return c
210 return c
211
211
212 def _cachedcommit(self, rev):
212 def _cachedcommit(self, rev):
213 if rev in self.commits:
213 if rev in self.commits:
214 return self.commits[rev]
214 return self.commits[rev]
215 return self.base.getcommit(rev)
215 return self.base.getcommit(rev)
216
216
217 def _discard(self, *revs):
217 def _discard(self, *revs):
218 for r in revs:
218 for r in revs:
219 if r is None:
219 if r is None:
220 continue
220 continue
221 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
221 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
222 if self.seenchildren[r] == self.children[r]:
222 if self.seenchildren[r] == self.children[r]:
223 del self.wantedancestors[r]
223 del self.wantedancestors[r]
224 del self.parentmap[r]
224 del self.parentmap[r]
225 del self.seenchildren[r]
225 del self.seenchildren[r]
226 if self._rebuilt:
226 if self._rebuilt:
227 del self.children[r]
227 del self.children[r]
228
228
229 def wanted(self, rev, i):
229 def wanted(self, rev, i):
230 # Return True if we're directly interested in rev.
230 # Return True if we're directly interested in rev.
231 #
231 #
232 # i is an index selecting one of the parents of rev (if rev
232 # i is an index selecting one of the parents of rev (if rev
233 # has no parents, i is None). getchangedfiles will give us
233 # has no parents, i is None). getchangedfiles will give us
234 # the list of files that are different in rev and in the parent
234 # the list of files that are different in rev and in the parent
235 # indicated by i. If we're interested in any of these files,
235 # indicated by i. If we're interested in any of these files,
236 # we're interested in rev.
236 # we're interested in rev.
237 try:
237 try:
238 files = self.base.getchangedfiles(rev, i)
238 files = self.base.getchangedfiles(rev, i)
239 except NotImplementedError:
239 except NotImplementedError:
240 raise util.Abort(_("source repository doesn't support --filemap"))
240 raise util.Abort(_("source repository doesn't support --filemap"))
241 for f in files:
241 for f in files:
242 if self.filemapper(f):
242 if self.filemapper(f):
243 return True
243 return True
244 return False
244 return False
245
245
246 def mark_not_wanted(self, rev, p):
246 def mark_not_wanted(self, rev, p):
247 # Mark rev as not interesting and update data structures.
247 # Mark rev as not interesting and update data structures.
248
248
249 if p is None:
249 if p is None:
250 # A root revision. Use SKIPREV to indicate that it doesn't
250 # A root revision. Use SKIPREV to indicate that it doesn't
251 # map to any revision in the restricted graph. Put SKIPREV
251 # map to any revision in the restricted graph. Put SKIPREV
252 # in the set of wanted ancestors to simplify code elsewhere
252 # in the set of wanted ancestors to simplify code elsewhere
253 self.parentmap[rev] = SKIPREV
253 self.parentmap[rev] = SKIPREV
254 self.wantedancestors[rev] = set((SKIPREV,))
254 self.wantedancestors[rev] = set((SKIPREV,))
255 return
255 return
256
256
257 # Reuse the data from our parent.
257 # Reuse the data from our parent.
258 self.parentmap[rev] = self.parentmap[p]
258 self.parentmap[rev] = self.parentmap[p]
259 self.wantedancestors[rev] = self.wantedancestors[p]
259 self.wantedancestors[rev] = self.wantedancestors[p]
260
260
261 def mark_wanted(self, rev, parents):
261 def mark_wanted(self, rev, parents):
262 # Mark rev ss wanted and update data structures.
262 # Mark rev ss wanted and update data structures.
263
263
264 # rev will be in the restricted graph, so children of rev in
264 # rev will be in the restricted graph, so children of rev in
265 # the original graph should still have rev as a parent in the
265 # the original graph should still have rev as a parent in the
266 # restricted graph.
266 # restricted graph.
267 self.parentmap[rev] = rev
267 self.parentmap[rev] = rev
268
268
269 # The set of wanted ancestors of rev is the union of the sets
269 # The set of wanted ancestors of rev is the union of the sets
270 # of wanted ancestors of its parents. Plus rev itself.
270 # of wanted ancestors of its parents. Plus rev itself.
271 wrev = set()
271 wrev = set()
272 for p in parents:
272 for p in parents:
273 wrev.update(self.wantedancestors[p])
273 wrev.update(self.wantedancestors[p])
274 wrev.add(rev)
274 wrev.add(rev)
275 self.wantedancestors[rev] = wrev
275 self.wantedancestors[rev] = wrev
276
276
277 def getchanges(self, rev):
277 def getchanges(self, rev):
278 parents = self.commits[rev].parents
278 parents = self.commits[rev].parents
279 if len(parents) > 1:
279 if len(parents) > 1:
280 self.rebuild()
280 self.rebuild()
281
281
282 # To decide whether we're interested in rev we:
282 # To decide whether we're interested in rev we:
283 #
283 #
284 # - calculate what parents rev will have if it turns out we're
284 # - calculate what parents rev will have if it turns out we're
285 # interested in it. If it's going to have more than 1 parent,
285 # interested in it. If it's going to have more than 1 parent,
286 # we're interested in it.
286 # we're interested in it.
287 #
287 #
288 # - otherwise, we'll compare it with the single parent we found.
288 # - otherwise, we'll compare it with the single parent we found.
289 # If any of the files we're interested in is different in the
289 # If any of the files we're interested in is different in the
290 # the two revisions, we're interested in rev.
290 # the two revisions, we're interested in rev.
291
291
292 # A parent p is interesting if its mapped version (self.parentmap[p]):
292 # A parent p is interesting if its mapped version (self.parentmap[p]):
293 # - is not SKIPREV
293 # - is not SKIPREV
294 # - is still not in the list of parents (we don't want duplicates)
294 # - is still not in the list of parents (we don't want duplicates)
295 # - is not an ancestor of the mapped versions of the other parents
295 # - is not an ancestor of the mapped versions of the other parents
296 mparents = []
296 mparents = []
297 wp = None
297 wp = None
298 for i, p1 in enumerate(parents):
298 for i, p1 in enumerate(parents):
299 mp1 = self.parentmap[p1]
299 mp1 = self.parentmap[p1]
300 if mp1 == SKIPREV or mp1 in mparents:
300 if mp1 == SKIPREV or mp1 in mparents:
301 continue
301 continue
302 for p2 in parents:
302 for p2 in parents:
303 if p1 == p2 or mp1 == self.parentmap[p2]:
303 if p1 == p2 or mp1 == self.parentmap[p2]:
304 continue
304 continue
305 if mp1 in self.wantedancestors[p2]:
305 if mp1 in self.wantedancestors[p2]:
306 break
306 break
307 else:
307 else:
308 mparents.append(mp1)
308 mparents.append(mp1)
309 wp = i
309 wp = i
310
310
311 if wp is None and parents:
311 if wp is None and parents:
312 wp = 0
312 wp = 0
313
313
314 self.origparents[rev] = parents
314 self.origparents[rev] = parents
315
315
316 closed = False
316 closed = False
317 if 'close' in self.commits[rev].extra:
317 if 'close' in self.commits[rev].extra:
318 # A branch closing revision is only useful if one of its
318 # A branch closing revision is only useful if one of its
319 # parents belong to the branch being closed
319 # parents belong to the branch being closed
320 branch = self.commits[rev].branch
320 branch = self.commits[rev].branch
321 pbranches = [self._cachedcommit(p).branch for p in mparents]
321 pbranches = [self._cachedcommit(p).branch for p in mparents]
322 if branch in pbranches:
322 if branch in pbranches:
323 closed = True
323 closed = True
324
324
325 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
325 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
326 # We don't want this revision.
326 # We don't want this revision.
327 # Update our state and tell the convert process to map this
327 # Update our state and tell the convert process to map this
328 # revision to the same revision its parent as mapped to.
328 # revision to the same revision its parent as mapped to.
329 p = None
329 p = None
330 if parents:
330 if parents:
331 p = parents[wp]
331 p = parents[wp]
332 self.mark_not_wanted(rev, p)
332 self.mark_not_wanted(rev, p)
333 self.convertedorder.append((rev, False, p))
333 self.convertedorder.append((rev, False, p))
334 self._discard(*parents)
334 self._discard(*parents)
335 return self.parentmap[rev]
335 return self.parentmap[rev]
336
336
337 # We want this revision.
337 # We want this revision.
338 # Rewrite the parents of the commit object
338 # Rewrite the parents of the commit object
339 self.commits[rev].parents = mparents
339 self.commits[rev].parents = mparents
340 self.mark_wanted(rev, parents)
340 self.mark_wanted(rev, parents)
341 self.convertedorder.append((rev, True, None))
341 self.convertedorder.append((rev, True, None))
342 self._discard(*parents)
342 self._discard(*parents)
343
343
344 # Get the real changes and do the filtering/mapping. To be
344 # Get the real changes and do the filtering/mapping. To be
345 # able to get the files later on in getfile, we hide the
345 # able to get the files later on in getfile, we hide the
346 # original filename in the rev part of the return value.
346 # original filename in the rev part of the return value.
347 changes, copies = self.base.getchanges(rev)
347 changes, copies = self.base.getchanges(rev)
348 newnames = {}
348 newnames = {}
349 files = []
349 files = []
350 for f, r in changes:
350 for f, r in changes:
351 newf = self.filemapper(f)
351 newf = self.filemapper(f)
352 if newf:
352 if newf:
353 files.append((newf, (f, r)))
353 files.append((newf, (f, r)))
354 newnames[f] = newf
354 newnames[f] = newf
355
355
356 ncopies = {}
356 ncopies = {}
357 for c in copies:
357 for c in copies:
358 newc = self.filemapper(c)
358 newc = self.filemapper(c)
359 if newc:
359 if newc:
360 newsource = self.filemapper(copies[c])
360 newsource = self.filemapper(copies[c])
361 if newsource:
361 if newsource:
362 ncopies[newc] = newsource
362 ncopies[newc] = newsource
363
363
364 return files, ncopies
364 return files, ncopies
365
365
366 def getfile(self, name, rev):
366 def getfile(self, name, rev):
367 realname, realrev = rev
367 realname, realrev = rev
368 return self.base.getfile(realname, realrev)
368 return self.base.getfile(realname, realrev)
369
369
370 def gettags(self):
370 def gettags(self):
371 return self.base.gettags()
371 return self.base.gettags()
372
372
373 def hasnativeorder(self):
373 def hasnativeorder(self):
374 return self.base.hasnativeorder()
374 return self.base.hasnativeorder()
375
375
376 def lookuprev(self, rev):
376 def lookuprev(self, rev):
377 return self.base.lookuprev(rev)
377 return self.base.lookuprev(rev)
378
379 def getbookmarks(self):
380 return self.base.getbookmarks()
@@ -1,205 +1,205
1 # git.py - git support for the convert extension
1 # git.py - git support for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 from mercurial import util
9 from mercurial import util
10 from mercurial.node import hex, nullid
10 from mercurial.node import hex, nullid
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 from common import NoRepo, commit, converter_source, checktool
13 from common import NoRepo, commit, converter_source, checktool
14
14
15 class convert_git(converter_source):
15 class convert_git(converter_source):
16 # Windows does not support GIT_DIR= construct while other systems
16 # Windows does not support GIT_DIR= construct while other systems
17 # cannot remove environment variable. Just assume none have
17 # cannot remove environment variable. Just assume none have
18 # both issues.
18 # both issues.
19 if hasattr(os, 'unsetenv'):
19 if util.safehasattr(os, 'unsetenv'):
20 def gitopen(self, s, noerr=False):
20 def gitopen(self, s, noerr=False):
21 prevgitdir = os.environ.get('GIT_DIR')
21 prevgitdir = os.environ.get('GIT_DIR')
22 os.environ['GIT_DIR'] = self.path
22 os.environ['GIT_DIR'] = self.path
23 try:
23 try:
24 if noerr:
24 if noerr:
25 (stdin, stdout, stderr) = util.popen3(s)
25 (stdin, stdout, stderr) = util.popen3(s)
26 return stdout
26 return stdout
27 else:
27 else:
28 return util.popen(s, 'rb')
28 return util.popen(s, 'rb')
29 finally:
29 finally:
30 if prevgitdir is None:
30 if prevgitdir is None:
31 del os.environ['GIT_DIR']
31 del os.environ['GIT_DIR']
32 else:
32 else:
33 os.environ['GIT_DIR'] = prevgitdir
33 os.environ['GIT_DIR'] = prevgitdir
34 else:
34 else:
35 def gitopen(self, s, noerr=False):
35 def gitopen(self, s, noerr=False):
36 if noerr:
36 if noerr:
37 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
37 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
38 return so
38 return so
39 else:
39 else:
40 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
40 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
41
41
42 def gitread(self, s):
42 def gitread(self, s):
43 fh = self.gitopen(s)
43 fh = self.gitopen(s)
44 data = fh.read()
44 data = fh.read()
45 return data, fh.close()
45 return data, fh.close()
46
46
47 def __init__(self, ui, path, rev=None):
47 def __init__(self, ui, path, rev=None):
48 super(convert_git, self).__init__(ui, path, rev=rev)
48 super(convert_git, self).__init__(ui, path, rev=rev)
49
49
50 if os.path.isdir(path + "/.git"):
50 if os.path.isdir(path + "/.git"):
51 path += "/.git"
51 path += "/.git"
52 if not os.path.exists(path + "/objects"):
52 if not os.path.exists(path + "/objects"):
53 raise NoRepo(_("%s does not look like a Git repository") % path)
53 raise NoRepo(_("%s does not look like a Git repository") % path)
54
54
55 checktool('git', 'git')
55 checktool('git', 'git')
56
56
57 self.path = path
57 self.path = path
58
58
59 def getheads(self):
59 def getheads(self):
60 if not self.rev:
60 if not self.rev:
61 heads, ret = self.gitread('git rev-parse --branches --remotes')
61 heads, ret = self.gitread('git rev-parse --branches --remotes')
62 heads = heads.splitlines()
62 heads = heads.splitlines()
63 else:
63 else:
64 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
64 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
65 heads = [heads[:-1]]
65 heads = [heads[:-1]]
66 if ret:
66 if ret:
67 raise util.Abort(_('cannot retrieve git heads'))
67 raise util.Abort(_('cannot retrieve git heads'))
68 return heads
68 return heads
69
69
70 def catfile(self, rev, type):
70 def catfile(self, rev, type):
71 if rev == hex(nullid):
71 if rev == hex(nullid):
72 raise IOError()
72 raise IOError()
73 data, ret = self.gitread("git cat-file %s %s" % (type, rev))
73 data, ret = self.gitread("git cat-file %s %s" % (type, rev))
74 if ret:
74 if ret:
75 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
75 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
76 return data
76 return data
77
77
78 def getfile(self, name, rev):
78 def getfile(self, name, rev):
79 data = self.catfile(rev, "blob")
79 data = self.catfile(rev, "blob")
80 mode = self.modecache[(name, rev)]
80 mode = self.modecache[(name, rev)]
81 return data, mode
81 return data, mode
82
82
83 def getchanges(self, version):
83 def getchanges(self, version):
84 self.modecache = {}
84 self.modecache = {}
85 fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
85 fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
86 changes = []
86 changes = []
87 seen = set()
87 seen = set()
88 entry = None
88 entry = None
89 for l in fh.read().split('\x00'):
89 for l in fh.read().split('\x00'):
90 if not entry:
90 if not entry:
91 if not l.startswith(':'):
91 if not l.startswith(':'):
92 continue
92 continue
93 entry = l
93 entry = l
94 continue
94 continue
95 f = l
95 f = l
96 if f not in seen:
96 if f not in seen:
97 seen.add(f)
97 seen.add(f)
98 entry = entry.split()
98 entry = entry.split()
99 h = entry[3]
99 h = entry[3]
100 p = (entry[1] == "100755")
100 p = (entry[1] == "100755")
101 s = (entry[1] == "120000")
101 s = (entry[1] == "120000")
102 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
102 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
103 changes.append((f, h))
103 changes.append((f, h))
104 entry = None
104 entry = None
105 if fh.close():
105 if fh.close():
106 raise util.Abort(_('cannot read changes in %s') % version)
106 raise util.Abort(_('cannot read changes in %s') % version)
107 return (changes, {})
107 return (changes, {})
108
108
109 def getcommit(self, version):
109 def getcommit(self, version):
110 c = self.catfile(version, "commit") # read the commit hash
110 c = self.catfile(version, "commit") # read the commit hash
111 end = c.find("\n\n")
111 end = c.find("\n\n")
112 message = c[end + 2:]
112 message = c[end + 2:]
113 message = self.recode(message)
113 message = self.recode(message)
114 l = c[:end].splitlines()
114 l = c[:end].splitlines()
115 parents = []
115 parents = []
116 author = committer = None
116 author = committer = None
117 for e in l[1:]:
117 for e in l[1:]:
118 n, v = e.split(" ", 1)
118 n, v = e.split(" ", 1)
119 if n == "author":
119 if n == "author":
120 p = v.split()
120 p = v.split()
121 tm, tz = p[-2:]
121 tm, tz = p[-2:]
122 author = " ".join(p[:-2])
122 author = " ".join(p[:-2])
123 if author[0] == "<": author = author[1:-1]
123 if author[0] == "<": author = author[1:-1]
124 author = self.recode(author)
124 author = self.recode(author)
125 if n == "committer":
125 if n == "committer":
126 p = v.split()
126 p = v.split()
127 tm, tz = p[-2:]
127 tm, tz = p[-2:]
128 committer = " ".join(p[:-2])
128 committer = " ".join(p[:-2])
129 if committer[0] == "<": committer = committer[1:-1]
129 if committer[0] == "<": committer = committer[1:-1]
130 committer = self.recode(committer)
130 committer = self.recode(committer)
131 if n == "parent":
131 if n == "parent":
132 parents.append(v)
132 parents.append(v)
133
133
134 if committer and committer != author:
134 if committer and committer != author:
135 message += "\ncommitter: %s\n" % committer
135 message += "\ncommitter: %s\n" % committer
136 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
136 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
137 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
137 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
138 date = tm + " " + str(tz)
138 date = tm + " " + str(tz)
139
139
140 c = commit(parents=parents, date=date, author=author, desc=message,
140 c = commit(parents=parents, date=date, author=author, desc=message,
141 rev=version)
141 rev=version)
142 return c
142 return c
143
143
144 def gettags(self):
144 def gettags(self):
145 tags = {}
145 tags = {}
146 fh = self.gitopen('git ls-remote --tags "%s"' % self.path)
146 fh = self.gitopen('git ls-remote --tags "%s"' % self.path)
147 prefix = 'refs/tags/'
147 prefix = 'refs/tags/'
148 for line in fh:
148 for line in fh:
149 line = line.strip()
149 line = line.strip()
150 if not line.endswith("^{}"):
150 if not line.endswith("^{}"):
151 continue
151 continue
152 node, tag = line.split(None, 1)
152 node, tag = line.split(None, 1)
153 if not tag.startswith(prefix):
153 if not tag.startswith(prefix):
154 continue
154 continue
155 tag = tag[len(prefix):-3]
155 tag = tag[len(prefix):-3]
156 tags[tag] = node
156 tags[tag] = node
157 if fh.close():
157 if fh.close():
158 raise util.Abort(_('cannot read tags from %s') % self.path)
158 raise util.Abort(_('cannot read tags from %s') % self.path)
159
159
160 return tags
160 return tags
161
161
162 def getchangedfiles(self, version, i):
162 def getchangedfiles(self, version, i):
163 changes = []
163 changes = []
164 if i is None:
164 if i is None:
165 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
165 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
166 for l in fh:
166 for l in fh:
167 if "\t" not in l:
167 if "\t" not in l:
168 continue
168 continue
169 m, f = l[:-1].split("\t")
169 m, f = l[:-1].split("\t")
170 changes.append(f)
170 changes.append(f)
171 else:
171 else:
172 fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
172 fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
173 % (version, version, i + 1))
173 % (version, version, i + 1))
174 changes = [f.rstrip('\n') for f in fh]
174 changes = [f.rstrip('\n') for f in fh]
175 if fh.close():
175 if fh.close():
176 raise util.Abort(_('cannot read changes in %s') % version)
176 raise util.Abort(_('cannot read changes in %s') % version)
177
177
178 return changes
178 return changes
179
179
180 def getbookmarks(self):
180 def getbookmarks(self):
181 bookmarks = {}
181 bookmarks = {}
182
182
183 # Interesting references in git are prefixed
183 # Interesting references in git are prefixed
184 prefix = 'refs/heads/'
184 prefix = 'refs/heads/'
185 prefixlen = len(prefix)
185 prefixlen = len(prefix)
186
186
187 # factor two commands
187 # factor two commands
188 gitcmd = { 'remote/': 'git ls-remote --heads origin',
188 gitcmd = { 'remote/': 'git ls-remote --heads origin',
189 '': 'git show-ref'}
189 '': 'git show-ref'}
190
190
191 # Origin heads
191 # Origin heads
192 for reftype in gitcmd:
192 for reftype in gitcmd:
193 try:
193 try:
194 fh = self.gitopen(gitcmd[reftype], noerr=True)
194 fh = self.gitopen(gitcmd[reftype], noerr=True)
195 for line in fh:
195 for line in fh:
196 line = line.strip()
196 line = line.strip()
197 rev, name = line.split(None, 1)
197 rev, name = line.split(None, 1)
198 if not name.startswith(prefix):
198 if not name.startswith(prefix):
199 continue
199 continue
200 name = '%s%s' % (reftype, name[prefixlen:])
200 name = '%s%s' % (reftype, name[prefixlen:])
201 bookmarks[name] = rev
201 bookmarks[name] = rev
202 except:
202 except:
203 pass
203 pass
204
204
205 return bookmarks
205 return bookmarks
@@ -1,388 +1,387
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19
19
20
20
21 import os, time, cStringIO
21 import os, time, cStringIO
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import bin, hex, nullid
23 from mercurial.node import bin, hex, nullid
24 from mercurial import hg, util, context, bookmarks, error
24 from mercurial import hg, util, context, bookmarks, error
25
25
26 from common import NoRepo, commit, converter_source, converter_sink
26 from common import NoRepo, commit, converter_source, converter_sink
27
27
28 class mercurial_sink(converter_sink):
28 class mercurial_sink(converter_sink):
29 def __init__(self, ui, path):
29 def __init__(self, ui, path):
30 converter_sink.__init__(self, ui, path)
30 converter_sink.__init__(self, ui, path)
31 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
31 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
32 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
32 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
33 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
33 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
34 self.lastbranch = None
34 self.lastbranch = None
35 if os.path.isdir(path) and len(os.listdir(path)) > 0:
35 if os.path.isdir(path) and len(os.listdir(path)) > 0:
36 try:
36 try:
37 self.repo = hg.repository(self.ui, path)
37 self.repo = hg.repository(self.ui, path)
38 if not self.repo.local():
38 if not self.repo.local():
39 raise NoRepo(_('%s is not a local Mercurial repository')
39 raise NoRepo(_('%s is not a local Mercurial repository')
40 % path)
40 % path)
41 except error.RepoError, err:
41 except error.RepoError, err:
42 ui.traceback()
42 ui.traceback()
43 raise NoRepo(err.args[0])
43 raise NoRepo(err.args[0])
44 else:
44 else:
45 try:
45 try:
46 ui.status(_('initializing destination %s repository\n') % path)
46 ui.status(_('initializing destination %s repository\n') % path)
47 self.repo = hg.repository(self.ui, path, create=True)
47 self.repo = hg.repository(self.ui, path, create=True)
48 if not self.repo.local():
48 if not self.repo.local():
49 raise NoRepo(_('%s is not a local Mercurial repository')
49 raise NoRepo(_('%s is not a local Mercurial repository')
50 % path)
50 % path)
51 self.created.append(path)
51 self.created.append(path)
52 except error.RepoError:
52 except error.RepoError:
53 ui.traceback()
53 ui.traceback()
54 raise NoRepo(_("could not create hg repository %s as sink")
54 raise NoRepo(_("could not create hg repository %s as sink")
55 % path)
55 % path)
56 self.lock = None
56 self.lock = None
57 self.wlock = None
57 self.wlock = None
58 self.filemapmode = False
58 self.filemapmode = False
59
59
60 def before(self):
60 def before(self):
61 self.ui.debug('run hg sink pre-conversion action\n')
61 self.ui.debug('run hg sink pre-conversion action\n')
62 self.wlock = self.repo.wlock()
62 self.wlock = self.repo.wlock()
63 self.lock = self.repo.lock()
63 self.lock = self.repo.lock()
64
64
65 def after(self):
65 def after(self):
66 self.ui.debug('run hg sink post-conversion action\n')
66 self.ui.debug('run hg sink post-conversion action\n')
67 if self.lock:
67 if self.lock:
68 self.lock.release()
68 self.lock.release()
69 if self.wlock:
69 if self.wlock:
70 self.wlock.release()
70 self.wlock.release()
71
71
72 def revmapfile(self):
72 def revmapfile(self):
73 return os.path.join(self.path, ".hg", "shamap")
73 return self.repo.join("shamap")
74
74
75 def authorfile(self):
75 def authorfile(self):
76 return os.path.join(self.path, ".hg", "authormap")
76 return self.repo.join("authormap")
77
77
78 def getheads(self):
78 def getheads(self):
79 h = self.repo.changelog.heads()
79 h = self.repo.changelog.heads()
80 return [hex(x) for x in h]
80 return [hex(x) for x in h]
81
81
82 def setbranch(self, branch, pbranches):
82 def setbranch(self, branch, pbranches):
83 if not self.clonebranches:
83 if not self.clonebranches:
84 return
84 return
85
85
86 setbranch = (branch != self.lastbranch)
86 setbranch = (branch != self.lastbranch)
87 self.lastbranch = branch
87 self.lastbranch = branch
88 if not branch:
88 if not branch:
89 branch = 'default'
89 branch = 'default'
90 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
90 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
91 pbranch = pbranches and pbranches[0][1] or 'default'
91 pbranch = pbranches and pbranches[0][1] or 'default'
92
92
93 branchpath = os.path.join(self.path, branch)
93 branchpath = os.path.join(self.path, branch)
94 if setbranch:
94 if setbranch:
95 self.after()
95 self.after()
96 try:
96 try:
97 self.repo = hg.repository(self.ui, branchpath)
97 self.repo = hg.repository(self.ui, branchpath)
98 except:
98 except:
99 self.repo = hg.repository(self.ui, branchpath, create=True)
99 self.repo = hg.repository(self.ui, branchpath, create=True)
100 self.before()
100 self.before()
101
101
102 # pbranches may bring revisions from other branches (merge parents)
102 # pbranches may bring revisions from other branches (merge parents)
103 # Make sure we have them, or pull them.
103 # Make sure we have them, or pull them.
104 missings = {}
104 missings = {}
105 for b in pbranches:
105 for b in pbranches:
106 try:
106 try:
107 self.repo.lookup(b[0])
107 self.repo.lookup(b[0])
108 except:
108 except:
109 missings.setdefault(b[1], []).append(b[0])
109 missings.setdefault(b[1], []).append(b[0])
110
110
111 if missings:
111 if missings:
112 self.after()
112 self.after()
113 for pbranch, heads in missings.iteritems():
113 for pbranch, heads in missings.iteritems():
114 pbranchpath = os.path.join(self.path, pbranch)
114 pbranchpath = os.path.join(self.path, pbranch)
115 prepo = hg.peer(self.ui, {}, pbranchpath)
115 prepo = hg.peer(self.ui, {}, pbranchpath)
116 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
116 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
117 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
117 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
118 self.before()
118 self.before()
119
119
120 def _rewritetags(self, source, revmap, data):
120 def _rewritetags(self, source, revmap, data):
121 fp = cStringIO.StringIO()
121 fp = cStringIO.StringIO()
122 for line in data.splitlines():
122 for line in data.splitlines():
123 s = line.split(' ', 1)
123 s = line.split(' ', 1)
124 if len(s) != 2:
124 if len(s) != 2:
125 continue
125 continue
126 revid = revmap.get(source.lookuprev(s[0]))
126 revid = revmap.get(source.lookuprev(s[0]))
127 if not revid:
127 if not revid:
128 continue
128 continue
129 fp.write('%s %s\n' % (revid, s[1]))
129 fp.write('%s %s\n' % (revid, s[1]))
130 return fp.getvalue()
130 return fp.getvalue()
131
131
132 def putcommit(self, files, copies, parents, commit, source, revmap):
132 def putcommit(self, files, copies, parents, commit, source, revmap):
133
133
134 files = dict(files)
134 files = dict(files)
135 def getfilectx(repo, memctx, f):
135 def getfilectx(repo, memctx, f):
136 v = files[f]
136 v = files[f]
137 data, mode = source.getfile(f, v)
137 data, mode = source.getfile(f, v)
138 if f == '.hgtags':
138 if f == '.hgtags':
139 data = self._rewritetags(source, revmap, data)
139 data = self._rewritetags(source, revmap, data)
140 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
140 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
141 copies.get(f))
141 copies.get(f))
142
142
143 pl = []
143 pl = []
144 for p in parents:
144 for p in parents:
145 if p not in pl:
145 if p not in pl:
146 pl.append(p)
146 pl.append(p)
147 parents = pl
147 parents = pl
148 nparents = len(parents)
148 nparents = len(parents)
149 if self.filemapmode and nparents == 1:
149 if self.filemapmode and nparents == 1:
150 m1node = self.repo.changelog.read(bin(parents[0]))[0]
150 m1node = self.repo.changelog.read(bin(parents[0]))[0]
151 parent = parents[0]
151 parent = parents[0]
152
152
153 if len(parents) < 2:
153 if len(parents) < 2:
154 parents.append(nullid)
154 parents.append(nullid)
155 if len(parents) < 2:
155 if len(parents) < 2:
156 parents.append(nullid)
156 parents.append(nullid)
157 p2 = parents.pop(0)
157 p2 = parents.pop(0)
158
158
159 text = commit.desc
159 text = commit.desc
160 extra = commit.extra.copy()
160 extra = commit.extra.copy()
161 if self.branchnames and commit.branch:
161 if self.branchnames and commit.branch:
162 extra['branch'] = commit.branch
162 extra['branch'] = commit.branch
163 if commit.rev:
163 if commit.rev:
164 extra['convert_revision'] = commit.rev
164 extra['convert_revision'] = commit.rev
165
165
166 while parents:
166 while parents:
167 p1 = p2
167 p1 = p2
168 p2 = parents.pop(0)
168 p2 = parents.pop(0)
169 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
169 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
170 getfilectx, commit.author, commit.date, extra)
170 getfilectx, commit.author, commit.date, extra)
171 self.repo.commitctx(ctx)
171 self.repo.commitctx(ctx)
172 text = "(octopus merge fixup)\n"
172 text = "(octopus merge fixup)\n"
173 p2 = hex(self.repo.changelog.tip())
173 p2 = hex(self.repo.changelog.tip())
174
174
175 if self.filemapmode and nparents == 1:
175 if self.filemapmode and nparents == 1:
176 man = self.repo.manifest
176 man = self.repo.manifest
177 mnode = self.repo.changelog.read(bin(p2))[0]
177 mnode = self.repo.changelog.read(bin(p2))[0]
178 closed = 'close' in commit.extra
178 closed = 'close' in commit.extra
179 if not closed and not man.cmp(m1node, man.revision(mnode)):
179 if not closed and not man.cmp(m1node, man.revision(mnode)):
180 self.ui.status(_("filtering out empty revision\n"))
180 self.ui.status(_("filtering out empty revision\n"))
181 self.repo.rollback()
181 self.repo.rollback(force=True)
182 return parent
182 return parent
183 return p2
183 return p2
184
184
185 def puttags(self, tags):
185 def puttags(self, tags):
186 try:
186 try:
187 parentctx = self.repo[self.tagsbranch]
187 parentctx = self.repo[self.tagsbranch]
188 tagparent = parentctx.node()
188 tagparent = parentctx.node()
189 except error.RepoError:
189 except error.RepoError:
190 parentctx = None
190 parentctx = None
191 tagparent = nullid
191 tagparent = nullid
192
192
193 try:
193 try:
194 oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
194 oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
195 except:
195 except:
196 oldlines = []
196 oldlines = []
197
197
198 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
198 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
199 if newlines == oldlines:
199 if newlines == oldlines:
200 return None, None
200 return None, None
201 data = "".join(newlines)
201 data = "".join(newlines)
202 def getfilectx(repo, memctx, f):
202 def getfilectx(repo, memctx, f):
203 return context.memfilectx(f, data, False, False, None)
203 return context.memfilectx(f, data, False, False, None)
204
204
205 self.ui.status(_("updating tags\n"))
205 self.ui.status(_("updating tags\n"))
206 date = "%s 0" % int(time.mktime(time.gmtime()))
206 date = "%s 0" % int(time.mktime(time.gmtime()))
207 extra = {'branch': self.tagsbranch}
207 extra = {'branch': self.tagsbranch}
208 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
208 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
209 [".hgtags"], getfilectx, "convert-repo", date,
209 [".hgtags"], getfilectx, "convert-repo", date,
210 extra)
210 extra)
211 self.repo.commitctx(ctx)
211 self.repo.commitctx(ctx)
212 return hex(self.repo.changelog.tip()), hex(tagparent)
212 return hex(self.repo.changelog.tip()), hex(tagparent)
213
213
214 def setfilemapmode(self, active):
214 def setfilemapmode(self, active):
215 self.filemapmode = active
215 self.filemapmode = active
216
216
217 def putbookmarks(self, updatedbookmark):
217 def putbookmarks(self, updatedbookmark):
218 if not len(updatedbookmark):
218 if not len(updatedbookmark):
219 return
219 return
220
220
221 self.ui.status(_("updating bookmarks\n"))
221 self.ui.status(_("updating bookmarks\n"))
222 for bookmark in updatedbookmark:
222 for bookmark in updatedbookmark:
223 self.repo._bookmarks[bookmark] = bin(updatedbookmark[bookmark])
223 self.repo._bookmarks[bookmark] = bin(updatedbookmark[bookmark])
224 bookmarks.write(self.repo)
224 bookmarks.write(self.repo)
225
225
226
226
227 class mercurial_source(converter_source):
227 class mercurial_source(converter_source):
228 def __init__(self, ui, path, rev=None):
228 def __init__(self, ui, path, rev=None):
229 converter_source.__init__(self, ui, path, rev)
229 converter_source.__init__(self, ui, path, rev)
230 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
230 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
231 self.ignored = set()
231 self.ignored = set()
232 self.saverev = ui.configbool('convert', 'hg.saverev', False)
232 self.saverev = ui.configbool('convert', 'hg.saverev', False)
233 try:
233 try:
234 self.repo = hg.repository(self.ui, path)
234 self.repo = hg.repository(self.ui, path)
235 # try to provoke an exception if this isn't really a hg
235 # try to provoke an exception if this isn't really a hg
236 # repo, but some other bogus compatible-looking url
236 # repo, but some other bogus compatible-looking url
237 if not self.repo.local():
237 if not self.repo.local():
238 raise error.RepoError()
238 raise error.RepoError()
239 except error.RepoError:
239 except error.RepoError:
240 ui.traceback()
240 ui.traceback()
241 raise NoRepo(_("%s is not a local Mercurial repository") % path)
241 raise NoRepo(_("%s is not a local Mercurial repository") % path)
242 self.lastrev = None
242 self.lastrev = None
243 self.lastctx = None
243 self.lastctx = None
244 self._changescache = None
244 self._changescache = None
245 self.convertfp = None
245 self.convertfp = None
246 # Restrict converted revisions to startrev descendants
246 # Restrict converted revisions to startrev descendants
247 startnode = ui.config('convert', 'hg.startrev')
247 startnode = ui.config('convert', 'hg.startrev')
248 if startnode is not None:
248 if startnode is not None:
249 try:
249 try:
250 startnode = self.repo.lookup(startnode)
250 startnode = self.repo.lookup(startnode)
251 except error.RepoError:
251 except error.RepoError:
252 raise util.Abort(_('%s is not a valid start revision')
252 raise util.Abort(_('%s is not a valid start revision')
253 % startnode)
253 % startnode)
254 startrev = self.repo.changelog.rev(startnode)
254 startrev = self.repo.changelog.rev(startnode)
255 children = {startnode: 1}
255 children = {startnode: 1}
256 for rev in self.repo.changelog.descendants(startrev):
256 for rev in self.repo.changelog.descendants(startrev):
257 children[self.repo.changelog.node(rev)] = 1
257 children[self.repo.changelog.node(rev)] = 1
258 self.keep = children.__contains__
258 self.keep = children.__contains__
259 else:
259 else:
260 self.keep = util.always
260 self.keep = util.always
261
261
262 def changectx(self, rev):
262 def changectx(self, rev):
263 if self.lastrev != rev:
263 if self.lastrev != rev:
264 self.lastctx = self.repo[rev]
264 self.lastctx = self.repo[rev]
265 self.lastrev = rev
265 self.lastrev = rev
266 return self.lastctx
266 return self.lastctx
267
267
268 def parents(self, ctx):
268 def parents(self, ctx):
269 return [p for p in ctx.parents() if p and self.keep(p.node())]
269 return [p for p in ctx.parents() if p and self.keep(p.node())]
270
270
271 def getheads(self):
271 def getheads(self):
272 if self.rev:
272 if self.rev:
273 heads = [self.repo[self.rev].node()]
273 heads = [self.repo[self.rev].node()]
274 else:
274 else:
275 heads = self.repo.heads()
275 heads = self.repo.heads()
276 return [hex(h) for h in heads if self.keep(h)]
276 return [hex(h) for h in heads if self.keep(h)]
277
277
278 def getfile(self, name, rev):
278 def getfile(self, name, rev):
279 try:
279 try:
280 fctx = self.changectx(rev)[name]
280 fctx = self.changectx(rev)[name]
281 return fctx.data(), fctx.flags()
281 return fctx.data(), fctx.flags()
282 except error.LookupError, err:
282 except error.LookupError, err:
283 raise IOError(err)
283 raise IOError(err)
284
284
285 def getchanges(self, rev):
285 def getchanges(self, rev):
286 ctx = self.changectx(rev)
286 ctx = self.changectx(rev)
287 parents = self.parents(ctx)
287 parents = self.parents(ctx)
288 if not parents:
288 if not parents:
289 files = sorted(ctx.manifest())
289 files = sorted(ctx.manifest())
290 # getcopies() is not needed for roots, but it is a simple way to
290 # getcopies() is not needed for roots, but it is a simple way to
291 # detect missing revlogs and abort on errors or populate self.ignored
291 # detect missing revlogs and abort on errors or populate self.ignored
292 self.getcopies(ctx, parents, files)
292 self.getcopies(ctx, parents, files)
293 return [(f, rev) for f in files if f not in self.ignored], {}
293 return [(f, rev) for f in files if f not in self.ignored], {}
294 if self._changescache and self._changescache[0] == rev:
294 if self._changescache and self._changescache[0] == rev:
295 m, a, r = self._changescache[1]
295 m, a, r = self._changescache[1]
296 else:
296 else:
297 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
297 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
298 # getcopies() detects missing revlogs early, run it before
298 # getcopies() detects missing revlogs early, run it before
299 # filtering the changes.
299 # filtering the changes.
300 copies = self.getcopies(ctx, parents, m + a)
300 copies = self.getcopies(ctx, parents, m + a)
301 changes = [(name, rev) for name in m + a + r
301 changes = [(name, rev) for name in m + a + r
302 if name not in self.ignored]
302 if name not in self.ignored]
303 return sorted(changes), copies
303 return sorted(changes), copies
304
304
305 def getcopies(self, ctx, parents, files):
305 def getcopies(self, ctx, parents, files):
306 copies = {}
306 copies = {}
307 for name in files:
307 for name in files:
308 if name in self.ignored:
308 if name in self.ignored:
309 continue
309 continue
310 try:
310 try:
311 copysource, copynode = ctx.filectx(name).renamed()
311 copysource, copynode = ctx.filectx(name).renamed()
312 if copysource in self.ignored or not self.keep(copynode):
312 if copysource in self.ignored or not self.keep(copynode):
313 continue
313 continue
314 # Ignore copy sources not in parent revisions
314 # Ignore copy sources not in parent revisions
315 found = False
315 found = False
316 for p in parents:
316 for p in parents:
317 if copysource in p:
317 if copysource in p:
318 found = True
318 found = True
319 break
319 break
320 if not found:
320 if not found:
321 continue
321 continue
322 copies[name] = copysource
322 copies[name] = copysource
323 except TypeError:
323 except TypeError:
324 pass
324 pass
325 except error.LookupError, e:
325 except error.LookupError, e:
326 if not self.ignoreerrors:
326 if not self.ignoreerrors:
327 raise
327 raise
328 self.ignored.add(name)
328 self.ignored.add(name)
329 self.ui.warn(_('ignoring: %s\n') % e)
329 self.ui.warn(_('ignoring: %s\n') % e)
330 return copies
330 return copies
331
331
332 def getcommit(self, rev):
332 def getcommit(self, rev):
333 ctx = self.changectx(rev)
333 ctx = self.changectx(rev)
334 parents = [p.hex() for p in self.parents(ctx)]
334 parents = [p.hex() for p in self.parents(ctx)]
335 if self.saverev:
335 if self.saverev:
336 crev = rev
336 crev = rev
337 else:
337 else:
338 crev = None
338 crev = None
339 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
339 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
340 desc=ctx.description(), rev=crev, parents=parents,
340 desc=ctx.description(), rev=crev, parents=parents,
341 branch=ctx.branch(), extra=ctx.extra(),
341 branch=ctx.branch(), extra=ctx.extra(),
342 sortkey=ctx.rev())
342 sortkey=ctx.rev())
343
343
344 def gettags(self):
344 def gettags(self):
345 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
345 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
346 return dict([(name, hex(node)) for name, node in tags
346 return dict([(name, hex(node)) for name, node in tags
347 if self.keep(node)])
347 if self.keep(node)])
348
348
349 def getchangedfiles(self, rev, i):
349 def getchangedfiles(self, rev, i):
350 ctx = self.changectx(rev)
350 ctx = self.changectx(rev)
351 parents = self.parents(ctx)
351 parents = self.parents(ctx)
352 if not parents and i is None:
352 if not parents and i is None:
353 i = 0
353 i = 0
354 changes = [], ctx.manifest().keys(), []
354 changes = [], ctx.manifest().keys(), []
355 else:
355 else:
356 i = i or 0
356 i = i or 0
357 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
357 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
358 changes = [[f for f in l if f not in self.ignored] for l in changes]
358 changes = [[f for f in l if f not in self.ignored] for l in changes]
359
359
360 if i == 0:
360 if i == 0:
361 self._changescache = (rev, changes)
361 self._changescache = (rev, changes)
362
362
363 return changes[0] + changes[1] + changes[2]
363 return changes[0] + changes[1] + changes[2]
364
364
365 def converted(self, rev, destrev):
365 def converted(self, rev, destrev):
366 if self.convertfp is None:
366 if self.convertfp is None:
367 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
367 self.convertfp = open(self.repo.join('shamap'), 'a')
368 'a')
369 self.convertfp.write('%s %s\n' % (destrev, rev))
368 self.convertfp.write('%s %s\n' % (destrev, rev))
370 self.convertfp.flush()
369 self.convertfp.flush()
371
370
372 def before(self):
371 def before(self):
373 self.ui.debug('run hg source pre-conversion action\n')
372 self.ui.debug('run hg source pre-conversion action\n')
374
373
375 def after(self):
374 def after(self):
376 self.ui.debug('run hg source post-conversion action\n')
375 self.ui.debug('run hg source post-conversion action\n')
377
376
378 def hasnativeorder(self):
377 def hasnativeorder(self):
379 return True
378 return True
380
379
381 def lookuprev(self, rev):
380 def lookuprev(self, rev):
382 try:
381 try:
383 return hex(self.repo.lookup(rev))
382 return hex(self.repo.lookup(rev))
384 except error.RepoError:
383 except error.RepoError:
385 return None
384 return None
386
385
387 def getbookmarks(self):
386 def getbookmarks(self):
388 return bookmarks.listbookmarks(self.repo)
387 return bookmarks.listbookmarks(self.repo)
@@ -1,1175 +1,1175
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os
5 import os
6 import re
6 import re
7 import sys
7 import sys
8 import cPickle as pickle
8 import cPickle as pickle
9 import tempfile
9 import tempfile
10 import urllib
10 import urllib
11 import urllib2
11 import urllib2
12
12
13 from mercurial import strutil, scmutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 # Subversion stuff. Works best with very recent Python SVN bindings
16 # Subversion stuff. Works best with very recent Python SVN bindings
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 # these bindings.
18 # these bindings.
19
19
20 from cStringIO import StringIO
20 from cStringIO import StringIO
21
21
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 from common import commandline, converter_source, converter_sink, mapfile
23 from common import commandline, converter_source, converter_sink, mapfile
24
24
25 try:
25 try:
26 from svn.core import SubversionException, Pool
26 from svn.core import SubversionException, Pool
27 import svn
27 import svn
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.ra
30 import svn.ra
31 import svn.delta
31 import svn.delta
32 import transport
32 import transport
33 import warnings
33 import warnings
34 warnings.filterwarnings('ignore',
34 warnings.filterwarnings('ignore',
35 module='svn.core',
35 module='svn.core',
36 category=DeprecationWarning)
36 category=DeprecationWarning)
37
37
38 except ImportError:
38 except ImportError:
39 svn = None
39 svn = None
40
40
41 class SvnPathNotFound(Exception):
41 class SvnPathNotFound(Exception):
42 pass
42 pass
43
43
44 def revsplit(rev):
44 def revsplit(rev):
45 """Parse a revision string and return (uuid, path, revnum)."""
45 """Parse a revision string and return (uuid, path, revnum)."""
46 url, revnum = rev.rsplit('@', 1)
46 url, revnum = rev.rsplit('@', 1)
47 parts = url.split('/', 1)
47 parts = url.split('/', 1)
48 mod = ''
48 mod = ''
49 if len(parts) > 1:
49 if len(parts) > 1:
50 mod = '/' + parts[1]
50 mod = '/' + parts[1]
51 return parts[0][4:], mod, int(revnum)
51 return parts[0][4:], mod, int(revnum)
52
52
53 def geturl(path):
53 def geturl(path):
54 try:
54 try:
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 except SubversionException:
56 except SubversionException:
57 pass
57 pass
58 if os.path.isdir(path):
58 if os.path.isdir(path):
59 path = os.path.normpath(os.path.abspath(path))
59 path = os.path.normpath(os.path.abspath(path))
60 if os.name == 'nt':
60 if os.name == 'nt':
61 path = '/' + util.normpath(path)
61 path = '/' + util.normpath(path)
62 # Module URL is later compared with the repository URL returned
62 # Module URL is later compared with the repository URL returned
63 # by svn API, which is UTF-8.
63 # by svn API, which is UTF-8.
64 path = encoding.tolocal(path)
64 path = encoding.tolocal(path)
65 return 'file://%s' % urllib.quote(path)
65 return 'file://%s' % urllib.quote(path)
66 return path
66 return path
67
67
68 def optrev(number):
68 def optrev(number):
69 optrev = svn.core.svn_opt_revision_t()
69 optrev = svn.core.svn_opt_revision_t()
70 optrev.kind = svn.core.svn_opt_revision_number
70 optrev.kind = svn.core.svn_opt_revision_number
71 optrev.value.number = number
71 optrev.value.number = number
72 return optrev
72 return optrev
73
73
74 class changedpath(object):
74 class changedpath(object):
75 def __init__(self, p):
75 def __init__(self, p):
76 self.copyfrom_path = p.copyfrom_path
76 self.copyfrom_path = p.copyfrom_path
77 self.copyfrom_rev = p.copyfrom_rev
77 self.copyfrom_rev = p.copyfrom_rev
78 self.action = p.action
78 self.action = p.action
79
79
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 strict_node_history=False):
81 strict_node_history=False):
82 protocol = -1
82 protocol = -1
83 def receiver(orig_paths, revnum, author, date, message, pool):
83 def receiver(orig_paths, revnum, author, date, message, pool):
84 if orig_paths is not None:
84 if orig_paths is not None:
85 for k, v in orig_paths.iteritems():
85 for k, v in orig_paths.iteritems():
86 orig_paths[k] = changedpath(v)
86 orig_paths[k] = changedpath(v)
87 pickle.dump((orig_paths, revnum, author, date, message),
87 pickle.dump((orig_paths, revnum, author, date, message),
88 fp, protocol)
88 fp, protocol)
89
89
90 try:
90 try:
91 # Use an ra of our own so that our parent can consume
91 # Use an ra of our own so that our parent can consume
92 # our results without confusing the server.
92 # our results without confusing the server.
93 t = transport.SvnRaTransport(url=url)
93 t = transport.SvnRaTransport(url=url)
94 svn.ra.get_log(t.ra, paths, start, end, limit,
94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 discover_changed_paths,
95 discover_changed_paths,
96 strict_node_history,
96 strict_node_history,
97 receiver)
97 receiver)
98 except SubversionException, (inst, num):
98 except SubversionException, (inst, num):
99 pickle.dump(num, fp, protocol)
99 pickle.dump(num, fp, protocol)
100 except IOError:
100 except IOError:
101 # Caller may interrupt the iteration
101 # Caller may interrupt the iteration
102 pickle.dump(None, fp, protocol)
102 pickle.dump(None, fp, protocol)
103 else:
103 else:
104 pickle.dump(None, fp, protocol)
104 pickle.dump(None, fp, protocol)
105 fp.close()
105 fp.close()
106 # With large history, cleanup process goes crazy and suddenly
106 # With large history, cleanup process goes crazy and suddenly
107 # consumes *huge* amount of memory. The output file being closed,
107 # consumes *huge* amount of memory. The output file being closed,
108 # there is no need for clean termination.
108 # there is no need for clean termination.
109 os._exit(0)
109 os._exit(0)
110
110
111 def debugsvnlog(ui, **opts):
111 def debugsvnlog(ui, **opts):
112 """Fetch SVN log in a subprocess and channel them back to parent to
112 """Fetch SVN log in a subprocess and channel them back to parent to
113 avoid memory collection issues.
113 avoid memory collection issues.
114 """
114 """
115 util.setbinary(sys.stdin)
115 util.setbinary(sys.stdin)
116 util.setbinary(sys.stdout)
116 util.setbinary(sys.stdout)
117 args = decodeargs(sys.stdin.read())
117 args = decodeargs(sys.stdin.read())
118 get_log_child(sys.stdout, *args)
118 get_log_child(sys.stdout, *args)
119
119
120 class logstream(object):
120 class logstream(object):
121 """Interruptible revision log iterator."""
121 """Interruptible revision log iterator."""
122 def __init__(self, stdout):
122 def __init__(self, stdout):
123 self._stdout = stdout
123 self._stdout = stdout
124
124
125 def __iter__(self):
125 def __iter__(self):
126 while True:
126 while True:
127 try:
127 try:
128 entry = pickle.load(self._stdout)
128 entry = pickle.load(self._stdout)
129 except EOFError:
129 except EOFError:
130 raise util.Abort(_('Mercurial failed to run itself, check'
130 raise util.Abort(_('Mercurial failed to run itself, check'
131 ' hg executable is in PATH'))
131 ' hg executable is in PATH'))
132 try:
132 try:
133 orig_paths, revnum, author, date, message = entry
133 orig_paths, revnum, author, date, message = entry
134 except:
134 except:
135 if entry is None:
135 if entry is None:
136 break
136 break
137 raise SubversionException("child raised exception", entry)
137 raise SubversionException("child raised exception", entry)
138 yield entry
138 yield entry
139
139
140 def close(self):
140 def close(self):
141 if self._stdout:
141 if self._stdout:
142 self._stdout.close()
142 self._stdout.close()
143 self._stdout = None
143 self._stdout = None
144
144
145
145
146 # Check to see if the given path is a local Subversion repo. Verify this by
146 # Check to see if the given path is a local Subversion repo. Verify this by
147 # looking for several svn-specific files and directories in the given
147 # looking for several svn-specific files and directories in the given
148 # directory.
148 # directory.
149 def filecheck(ui, path, proto):
149 def filecheck(ui, path, proto):
150 for x in ('locks', 'hooks', 'format', 'db'):
150 for x in ('locks', 'hooks', 'format', 'db'):
151 if not os.path.exists(os.path.join(path, x)):
151 if not os.path.exists(os.path.join(path, x)):
152 return False
152 return False
153 return True
153 return True
154
154
155 # Check to see if a given path is the root of an svn repo over http. We verify
155 # Check to see if a given path is the root of an svn repo over http. We verify
156 # this by requesting a version-controlled URL we know can't exist and looking
156 # this by requesting a version-controlled URL we know can't exist and looking
157 # for the svn-specific "not found" XML.
157 # for the svn-specific "not found" XML.
158 def httpcheck(ui, path, proto):
158 def httpcheck(ui, path, proto):
159 try:
159 try:
160 opener = urllib2.build_opener()
160 opener = urllib2.build_opener()
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 data = rsp.read()
162 data = rsp.read()
163 except urllib2.HTTPError, inst:
163 except urllib2.HTTPError, inst:
164 if inst.code != 404:
164 if inst.code != 404:
165 # Except for 404 we cannot know for sure this is not an svn repo
165 # Except for 404 we cannot know for sure this is not an svn repo
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 'be a subversion repository. Use --source-type if you '
167 'be a subversion repository. Use --source-type if you '
168 'know better.\n'))
168 'know better.\n'))
169 return True
169 return True
170 data = inst.fp.read()
170 data = inst.fp.read()
171 except:
171 except:
172 # Could be urllib2.URLError if the URL is invalid or anything else.
172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 return False
173 return False
174 return '<m:human-readable errcode="160013">' in data
174 return '<m:human-readable errcode="160013">' in data
175
175
176 protomap = {'http': httpcheck,
176 protomap = {'http': httpcheck,
177 'https': httpcheck,
177 'https': httpcheck,
178 'file': filecheck,
178 'file': filecheck,
179 }
179 }
180 def issvnurl(ui, url):
180 def issvnurl(ui, url):
181 try:
181 try:
182 proto, path = url.split('://', 1)
182 proto, path = url.split('://', 1)
183 if proto == 'file':
183 if proto == 'file':
184 path = urllib.url2pathname(path)
184 path = urllib.url2pathname(path)
185 except ValueError:
185 except ValueError:
186 proto = 'file'
186 proto = 'file'
187 path = os.path.abspath(url)
187 path = os.path.abspath(url)
188 if proto == 'file':
188 if proto == 'file':
189 path = path.replace(os.sep, '/')
189 path = path.replace(os.sep, '/')
190 check = protomap.get(proto, lambda *args: False)
190 check = protomap.get(proto, lambda *args: False)
191 while '/' in path:
191 while '/' in path:
192 if check(ui, path, proto):
192 if check(ui, path, proto):
193 return True
193 return True
194 path = path.rsplit('/', 1)[0]
194 path = path.rsplit('/', 1)[0]
195 return False
195 return False
196
196
197 # SVN conversion code stolen from bzr-svn and tailor
197 # SVN conversion code stolen from bzr-svn and tailor
198 #
198 #
199 # Subversion looks like a versioned filesystem, branches structures
199 # Subversion looks like a versioned filesystem, branches structures
200 # are defined by conventions and not enforced by the tool. First,
200 # are defined by conventions and not enforced by the tool. First,
201 # we define the potential branches (modules) as "trunk" and "branches"
201 # we define the potential branches (modules) as "trunk" and "branches"
202 # children directories. Revisions are then identified by their
202 # children directories. Revisions are then identified by their
203 # module and revision number (and a repository identifier).
203 # module and revision number (and a repository identifier).
204 #
204 #
205 # The revision graph is really a tree (or a forest). By default, a
205 # The revision graph is really a tree (or a forest). By default, a
206 # revision parent is the previous revision in the same module. If the
206 # revision parent is the previous revision in the same module. If the
207 # module directory is copied/moved from another module then the
207 # module directory is copied/moved from another module then the
208 # revision is the module root and its parent the source revision in
208 # revision is the module root and its parent the source revision in
209 # the parent module. A revision has at most one parent.
209 # the parent module. A revision has at most one parent.
210 #
210 #
211 class svn_source(converter_source):
211 class svn_source(converter_source):
212 def __init__(self, ui, url, rev=None):
212 def __init__(self, ui, url, rev=None):
213 super(svn_source, self).__init__(ui, url, rev=rev)
213 super(svn_source, self).__init__(ui, url, rev=rev)
214
214
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 (os.path.exists(url) and
216 (os.path.exists(url) and
217 os.path.exists(os.path.join(url, '.svn'))) or
217 os.path.exists(os.path.join(url, '.svn'))) or
218 issvnurl(ui, url)):
218 issvnurl(ui, url)):
219 raise NoRepo(_("%s does not look like a Subversion repository")
219 raise NoRepo(_("%s does not look like a Subversion repository")
220 % url)
220 % url)
221 if svn is None:
221 if svn is None:
222 raise MissingTool(_('Could not load Subversion python bindings'))
222 raise MissingTool(_('Could not load Subversion python bindings'))
223
223
224 try:
224 try:
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 if version < (1, 4):
226 if version < (1, 4):
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 '1.4 or later required') % version)
228 '1.4 or later required') % version)
229 except AttributeError:
229 except AttributeError:
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 'or later required'))
231 'or later required'))
232
232
233 self.lastrevs = {}
233 self.lastrevs = {}
234
234
235 latest = None
235 latest = None
236 try:
236 try:
237 # Support file://path@rev syntax. Useful e.g. to convert
237 # Support file://path@rev syntax. Useful e.g. to convert
238 # deleted branches.
238 # deleted branches.
239 at = url.rfind('@')
239 at = url.rfind('@')
240 if at >= 0:
240 if at >= 0:
241 latest = int(url[at + 1:])
241 latest = int(url[at + 1:])
242 url = url[:at]
242 url = url[:at]
243 except ValueError:
243 except ValueError:
244 pass
244 pass
245 self.url = geturl(url)
245 self.url = geturl(url)
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 try:
247 try:
248 self.transport = transport.SvnRaTransport(url=self.url)
248 self.transport = transport.SvnRaTransport(url=self.url)
249 self.ra = self.transport.ra
249 self.ra = self.transport.ra
250 self.ctx = self.transport.client
250 self.ctx = self.transport.client
251 self.baseurl = svn.ra.get_repos_root(self.ra)
251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 # Module is either empty or a repository path starting with
252 # Module is either empty or a repository path starting with
253 # a slash and not ending with a slash.
253 # a slash and not ending with a slash.
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 self.prevmodule = None
255 self.prevmodule = None
256 self.rootmodule = self.module
256 self.rootmodule = self.module
257 self.commits = {}
257 self.commits = {}
258 self.paths = {}
258 self.paths = {}
259 self.uuid = svn.ra.get_uuid(self.ra)
259 self.uuid = svn.ra.get_uuid(self.ra)
260 except SubversionException:
260 except SubversionException:
261 ui.traceback()
261 ui.traceback()
262 raise NoRepo(_("%s does not look like a Subversion repository")
262 raise NoRepo(_("%s does not look like a Subversion repository")
263 % self.url)
263 % self.url)
264
264
265 if rev:
265 if rev:
266 try:
266 try:
267 latest = int(rev)
267 latest = int(rev)
268 except ValueError:
268 except ValueError:
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270
270
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 try:
273 try:
274 self.startrev = int(self.startrev)
274 self.startrev = int(self.startrev)
275 if self.startrev < 0:
275 if self.startrev < 0:
276 self.startrev = 0
276 self.startrev = 0
277 except ValueError:
277 except ValueError:
278 raise util.Abort(_('svn: start revision %s is not an integer')
278 raise util.Abort(_('svn: start revision %s is not an integer')
279 % self.startrev)
279 % self.startrev)
280
280
281 try:
281 try:
282 self.head = self.latest(self.module, latest)
282 self.head = self.latest(self.module, latest)
283 except SvnPathNotFound:
283 except SvnPathNotFound:
284 self.head = None
284 self.head = None
285 if not self.head:
285 if not self.head:
286 raise util.Abort(_('no revision found in module %s')
286 raise util.Abort(_('no revision found in module %s')
287 % self.module)
287 % self.module)
288 self.last_changed = self.revnum(self.head)
288 self.last_changed = self.revnum(self.head)
289
289
290 self._changescache = None
290 self._changescache = None
291
291
292 if os.path.exists(os.path.join(url, '.svn/entries')):
292 if os.path.exists(os.path.join(url, '.svn/entries')):
293 self.wc = url
293 self.wc = url
294 else:
294 else:
295 self.wc = None
295 self.wc = None
296 self.convertfp = None
296 self.convertfp = None
297
297
298 def setrevmap(self, revmap):
298 def setrevmap(self, revmap):
299 lastrevs = {}
299 lastrevs = {}
300 for revid in revmap.iterkeys():
300 for revid in revmap.iterkeys():
301 uuid, module, revnum = revsplit(revid)
301 uuid, module, revnum = revsplit(revid)
302 lastrevnum = lastrevs.setdefault(module, revnum)
302 lastrevnum = lastrevs.setdefault(module, revnum)
303 if revnum > lastrevnum:
303 if revnum > lastrevnum:
304 lastrevs[module] = revnum
304 lastrevs[module] = revnum
305 self.lastrevs = lastrevs
305 self.lastrevs = lastrevs
306
306
307 def exists(self, path, optrev):
307 def exists(self, path, optrev):
308 try:
308 try:
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
310 optrev, False, self.ctx)
310 optrev, False, self.ctx)
311 return True
311 return True
312 except SubversionException:
312 except SubversionException:
313 return False
313 return False
314
314
315 def getheads(self):
315 def getheads(self):
316
316
317 def isdir(path, revnum):
317 def isdir(path, revnum):
318 kind = self._checkpath(path, revnum)
318 kind = self._checkpath(path, revnum)
319 return kind == svn.core.svn_node_dir
319 return kind == svn.core.svn_node_dir
320
320
321 def getcfgpath(name, rev):
321 def getcfgpath(name, rev):
322 cfgpath = self.ui.config('convert', 'svn.' + name)
322 cfgpath = self.ui.config('convert', 'svn.' + name)
323 if cfgpath is not None and cfgpath.strip() == '':
323 if cfgpath is not None and cfgpath.strip() == '':
324 return None
324 return None
325 path = (cfgpath or name).strip('/')
325 path = (cfgpath or name).strip('/')
326 if not self.exists(path, rev):
326 if not self.exists(path, rev):
327 if self.module.endswith(path) and name == 'trunk':
327 if self.module.endswith(path) and name == 'trunk':
328 # we are converting from inside this directory
328 # we are converting from inside this directory
329 return None
329 return None
330 if cfgpath:
330 if cfgpath:
331 raise util.Abort(_('expected %s to be at %r, but not found')
331 raise util.Abort(_('expected %s to be at %r, but not found')
332 % (name, path))
332 % (name, path))
333 return None
333 return None
334 self.ui.note(_('found %s at %r\n') % (name, path))
334 self.ui.note(_('found %s at %r\n') % (name, path))
335 return path
335 return path
336
336
337 rev = optrev(self.last_changed)
337 rev = optrev(self.last_changed)
338 oldmodule = ''
338 oldmodule = ''
339 trunk = getcfgpath('trunk', rev)
339 trunk = getcfgpath('trunk', rev)
340 self.tags = getcfgpath('tags', rev)
340 self.tags = getcfgpath('tags', rev)
341 branches = getcfgpath('branches', rev)
341 branches = getcfgpath('branches', rev)
342
342
343 # If the project has a trunk or branches, we will extract heads
343 # If the project has a trunk or branches, we will extract heads
344 # from them. We keep the project root otherwise.
344 # from them. We keep the project root otherwise.
345 if trunk:
345 if trunk:
346 oldmodule = self.module or ''
346 oldmodule = self.module or ''
347 self.module += '/' + trunk
347 self.module += '/' + trunk
348 self.head = self.latest(self.module, self.last_changed)
348 self.head = self.latest(self.module, self.last_changed)
349 if not self.head:
349 if not self.head:
350 raise util.Abort(_('no revision found in module %s')
350 raise util.Abort(_('no revision found in module %s')
351 % self.module)
351 % self.module)
352
352
353 # First head in the list is the module's head
353 # First head in the list is the module's head
354 self.heads = [self.head]
354 self.heads = [self.head]
355 if self.tags is not None:
355 if self.tags is not None:
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
357
357
358 # Check if branches bring a few more heads to the list
358 # Check if branches bring a few more heads to the list
359 if branches:
359 if branches:
360 rpath = self.url.strip('/')
360 rpath = self.url.strip('/')
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
362 rev, False, self.ctx)
362 rev, False, self.ctx)
363 for branch in branchnames.keys():
363 for branch in branchnames.keys():
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
364 module = '%s/%s/%s' % (oldmodule, branches, branch)
365 if not isdir(module, self.last_changed):
365 if not isdir(module, self.last_changed):
366 continue
366 continue
367 brevid = self.latest(module, self.last_changed)
367 brevid = self.latest(module, self.last_changed)
368 if not brevid:
368 if not brevid:
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
369 self.ui.note(_('ignoring empty branch %s\n') % branch)
370 continue
370 continue
371 self.ui.note(_('found branch %s at %d\n') %
371 self.ui.note(_('found branch %s at %d\n') %
372 (branch, self.revnum(brevid)))
372 (branch, self.revnum(brevid)))
373 self.heads.append(brevid)
373 self.heads.append(brevid)
374
374
375 if self.startrev and self.heads:
375 if self.startrev and self.heads:
376 if len(self.heads) > 1:
376 if len(self.heads) > 1:
377 raise util.Abort(_('svn: start revision is not supported '
377 raise util.Abort(_('svn: start revision is not supported '
378 'with more than one branch'))
378 'with more than one branch'))
379 revnum = self.revnum(self.heads[0])
379 revnum = self.revnum(self.heads[0])
380 if revnum < self.startrev:
380 if revnum < self.startrev:
381 raise util.Abort(
381 raise util.Abort(
382 _('svn: no revision found after start revision %d')
382 _('svn: no revision found after start revision %d')
383 % self.startrev)
383 % self.startrev)
384
384
385 return self.heads
385 return self.heads
386
386
387 def getchanges(self, rev):
387 def getchanges(self, rev):
388 if self._changescache and self._changescache[0] == rev:
388 if self._changescache and self._changescache[0] == rev:
389 return self._changescache[1]
389 return self._changescache[1]
390 self._changescache = None
390 self._changescache = None
391 (paths, parents) = self.paths[rev]
391 (paths, parents) = self.paths[rev]
392 if parents:
392 if parents:
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
394 else:
394 else:
395 # Perform a full checkout on roots
395 # Perform a full checkout on roots
396 uuid, module, revnum = revsplit(rev)
396 uuid, module, revnum = revsplit(rev)
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
398 optrev(revnum), True, self.ctx)
398 optrev(revnum), True, self.ctx)
399 files = [n for n, e in entries.iteritems()
399 files = [n for n, e in entries.iteritems()
400 if e.kind == svn.core.svn_node_file]
400 if e.kind == svn.core.svn_node_file]
401 copies = {}
401 copies = {}
402 self.removed = set()
402 self.removed = set()
403
403
404 files.sort()
404 files.sort()
405 files = zip(files, [rev] * len(files))
405 files = zip(files, [rev] * len(files))
406
406
407 # caller caches the result, so free it here to release memory
407 # caller caches the result, so free it here to release memory
408 del self.paths[rev]
408 del self.paths[rev]
409 return (files, copies)
409 return (files, copies)
410
410
411 def getchangedfiles(self, rev, i):
411 def getchangedfiles(self, rev, i):
412 changes = self.getchanges(rev)
412 changes = self.getchanges(rev)
413 self._changescache = (rev, changes)
413 self._changescache = (rev, changes)
414 return [f[0] for f in changes[0]]
414 return [f[0] for f in changes[0]]
415
415
416 def getcommit(self, rev):
416 def getcommit(self, rev):
417 if rev not in self.commits:
417 if rev not in self.commits:
418 uuid, module, revnum = revsplit(rev)
418 uuid, module, revnum = revsplit(rev)
419 self.module = module
419 self.module = module
420 self.reparent(module)
420 self.reparent(module)
421 # We assume that:
421 # We assume that:
422 # - requests for revisions after "stop" come from the
422 # - requests for revisions after "stop" come from the
423 # revision graph backward traversal. Cache all of them
423 # revision graph backward traversal. Cache all of them
424 # down to stop, they will be used eventually.
424 # down to stop, they will be used eventually.
425 # - requests for revisions before "stop" come to get
425 # - requests for revisions before "stop" come to get
426 # isolated branches parents. Just fetch what is needed.
426 # isolated branches parents. Just fetch what is needed.
427 stop = self.lastrevs.get(module, 0)
427 stop = self.lastrevs.get(module, 0)
428 if revnum < stop:
428 if revnum < stop:
429 stop = revnum + 1
429 stop = revnum + 1
430 self._fetch_revisions(revnum, stop)
430 self._fetch_revisions(revnum, stop)
431 commit = self.commits[rev]
431 commit = self.commits[rev]
432 # caller caches the result, so free it here to release memory
432 # caller caches the result, so free it here to release memory
433 del self.commits[rev]
433 del self.commits[rev]
434 return commit
434 return commit
435
435
436 def gettags(self):
436 def gettags(self):
437 tags = {}
437 tags = {}
438 if self.tags is None:
438 if self.tags is None:
439 return tags
439 return tags
440
440
441 # svn tags are just a convention, project branches left in a
441 # svn tags are just a convention, project branches left in a
442 # 'tags' directory. There is no other relationship than
442 # 'tags' directory. There is no other relationship than
443 # ancestry, which is expensive to discover and makes them hard
443 # ancestry, which is expensive to discover and makes them hard
444 # to update incrementally. Worse, past revisions may be
444 # to update incrementally. Worse, past revisions may be
445 # referenced by tags far away in the future, requiring a deep
445 # referenced by tags far away in the future, requiring a deep
446 # history traversal on every calculation. Current code
446 # history traversal on every calculation. Current code
447 # performs a single backward traversal, tracking moves within
447 # performs a single backward traversal, tracking moves within
448 # the tags directory (tag renaming) and recording a new tag
448 # the tags directory (tag renaming) and recording a new tag
449 # everytime a project is copied from outside the tags
449 # everytime a project is copied from outside the tags
450 # directory. It also lists deleted tags, this behaviour may
450 # directory. It also lists deleted tags, this behaviour may
451 # change in the future.
451 # change in the future.
452 pendings = []
452 pendings = []
453 tagspath = self.tags
453 tagspath = self.tags
454 start = svn.ra.get_latest_revnum(self.ra)
454 start = svn.ra.get_latest_revnum(self.ra)
455 stream = self._getlog([self.tags], start, self.startrev)
455 stream = self._getlog([self.tags], start, self.startrev)
456 try:
456 try:
457 for entry in stream:
457 for entry in stream:
458 origpaths, revnum, author, date, message = entry
458 origpaths, revnum, author, date, message = entry
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
460 in origpaths.iteritems() if e.copyfrom_path]
460 in origpaths.iteritems() if e.copyfrom_path]
461 # Apply moves/copies from more specific to general
461 # Apply moves/copies from more specific to general
462 copies.sort(reverse=True)
462 copies.sort(reverse=True)
463
463
464 srctagspath = tagspath
464 srctagspath = tagspath
465 if copies and copies[-1][2] == tagspath:
465 if copies and copies[-1][2] == tagspath:
466 # Track tags directory moves
466 # Track tags directory moves
467 srctagspath = copies.pop()[0]
467 srctagspath = copies.pop()[0]
468
468
469 for source, sourcerev, dest in copies:
469 for source, sourcerev, dest in copies:
470 if not dest.startswith(tagspath + '/'):
470 if not dest.startswith(tagspath + '/'):
471 continue
471 continue
472 for tag in pendings:
472 for tag in pendings:
473 if tag[0].startswith(dest):
473 if tag[0].startswith(dest):
474 tagpath = source + tag[0][len(dest):]
474 tagpath = source + tag[0][len(dest):]
475 tag[:2] = [tagpath, sourcerev]
475 tag[:2] = [tagpath, sourcerev]
476 break
476 break
477 else:
477 else:
478 pendings.append([source, sourcerev, dest])
478 pendings.append([source, sourcerev, dest])
479
479
480 # Filter out tags with children coming from different
480 # Filter out tags with children coming from different
481 # parts of the repository like:
481 # parts of the repository like:
482 # /tags/tag.1 (from /trunk:10)
482 # /tags/tag.1 (from /trunk:10)
483 # /tags/tag.1/foo (from /branches/foo:12)
483 # /tags/tag.1/foo (from /branches/foo:12)
484 # Here/tags/tag.1 discarded as well as its children.
484 # Here/tags/tag.1 discarded as well as its children.
485 # It happens with tools like cvs2svn. Such tags cannot
485 # It happens with tools like cvs2svn. Such tags cannot
486 # be represented in mercurial.
486 # be represented in mercurial.
487 addeds = dict((p, e.copyfrom_path) for p, e
487 addeds = dict((p, e.copyfrom_path) for p, e
488 in origpaths.iteritems()
488 in origpaths.iteritems()
489 if e.action == 'A' and e.copyfrom_path)
489 if e.action == 'A' and e.copyfrom_path)
490 badroots = set()
490 badroots = set()
491 for destroot in addeds:
491 for destroot in addeds:
492 for source, sourcerev, dest in pendings:
492 for source, sourcerev, dest in pendings:
493 if (not dest.startswith(destroot + '/')
493 if (not dest.startswith(destroot + '/')
494 or source.startswith(addeds[destroot] + '/')):
494 or source.startswith(addeds[destroot] + '/')):
495 continue
495 continue
496 badroots.add(destroot)
496 badroots.add(destroot)
497 break
497 break
498
498
499 for badroot in badroots:
499 for badroot in badroots:
500 pendings = [p for p in pendings if p[2] != badroot
500 pendings = [p for p in pendings if p[2] != badroot
501 and not p[2].startswith(badroot + '/')]
501 and not p[2].startswith(badroot + '/')]
502
502
503 # Tell tag renamings from tag creations
503 # Tell tag renamings from tag creations
504 remainings = []
504 renamings = []
505 for source, sourcerev, dest in pendings:
505 for source, sourcerev, dest in pendings:
506 tagname = dest.split('/')[-1]
506 tagname = dest.split('/')[-1]
507 if source.startswith(srctagspath):
507 if source.startswith(srctagspath):
508 remainings.append([source, sourcerev, tagname])
508 renamings.append([source, sourcerev, tagname])
509 continue
509 continue
510 if tagname in tags:
510 if tagname in tags:
511 # Keep the latest tag value
511 # Keep the latest tag value
512 continue
512 continue
513 # From revision may be fake, get one with changes
513 # From revision may be fake, get one with changes
514 try:
514 try:
515 tagid = self.latest(source, sourcerev)
515 tagid = self.latest(source, sourcerev)
516 if tagid and tagname not in tags:
516 if tagid and tagname not in tags:
517 tags[tagname] = tagid
517 tags[tagname] = tagid
518 except SvnPathNotFound:
518 except SvnPathNotFound:
519 # It happens when we are following directories
519 # It happens when we are following directories
520 # we assumed were copied with their parents
520 # we assumed were copied with their parents
521 # but were really created in the tag
521 # but were really created in the tag
522 # directory.
522 # directory.
523 pass
523 pass
524 pendings = remainings
524 pendings = renamings
525 tagspath = srctagspath
525 tagspath = srctagspath
526 finally:
526 finally:
527 stream.close()
527 stream.close()
528 return tags
528 return tags
529
529
530 def converted(self, rev, destrev):
530 def converted(self, rev, destrev):
531 if not self.wc:
531 if not self.wc:
532 return
532 return
533 if self.convertfp is None:
533 if self.convertfp is None:
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
535 'a')
535 'a')
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
537 self.convertfp.flush()
537 self.convertfp.flush()
538
538
539 def revid(self, revnum, module=None):
539 def revid(self, revnum, module=None):
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
541
541
542 def revnum(self, rev):
542 def revnum(self, rev):
543 return int(rev.split('@')[-1])
543 return int(rev.split('@')[-1])
544
544
545 def latest(self, path, stop=0):
545 def latest(self, path, stop=0):
546 """Find the latest revid affecting path, up to stop. It may return
546 """Find the latest revid affecting path, up to stop. It may return
547 a revision in a different module, since a branch may be moved without
547 a revision in a different module, since a branch may be moved without
548 a change being reported. Return None if computed module does not
548 a change being reported. Return None if computed module does not
549 belong to rootmodule subtree.
549 belong to rootmodule subtree.
550 """
550 """
551 if not path.startswith(self.rootmodule):
551 if not path.startswith(self.rootmodule):
552 # Requests on foreign branches may be forbidden at server level
552 # Requests on foreign branches may be forbidden at server level
553 self.ui.debug('ignoring foreign branch %r\n' % path)
553 self.ui.debug('ignoring foreign branch %r\n' % path)
554 return None
554 return None
555
555
556 if not stop:
556 if not stop:
557 stop = svn.ra.get_latest_revnum(self.ra)
557 stop = svn.ra.get_latest_revnum(self.ra)
558 try:
558 try:
559 prevmodule = self.reparent('')
559 prevmodule = self.reparent('')
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
561 self.reparent(prevmodule)
561 self.reparent(prevmodule)
562 except SubversionException:
562 except SubversionException:
563 dirent = None
563 dirent = None
564 if not dirent:
564 if not dirent:
565 raise SvnPathNotFound(_('%s not found up to revision %d')
565 raise SvnPathNotFound(_('%s not found up to revision %d')
566 % (path, stop))
566 % (path, stop))
567
567
568 # stat() gives us the previous revision on this line of
568 # stat() gives us the previous revision on this line of
569 # development, but it might be in *another module*. Fetch the
569 # development, but it might be in *another module*. Fetch the
570 # log and detect renames down to the latest revision.
570 # log and detect renames down to the latest revision.
571 stream = self._getlog([path], stop, dirent.created_rev)
571 stream = self._getlog([path], stop, dirent.created_rev)
572 try:
572 try:
573 for entry in stream:
573 for entry in stream:
574 paths, revnum, author, date, message = entry
574 paths, revnum, author, date, message = entry
575 if revnum <= dirent.created_rev:
575 if revnum <= dirent.created_rev:
576 break
576 break
577
577
578 for p in paths:
578 for p in paths:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
579 if not path.startswith(p) or not paths[p].copyfrom_path:
580 continue
580 continue
581 newpath = paths[p].copyfrom_path + path[len(p):]
581 newpath = paths[p].copyfrom_path + path[len(p):]
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
582 self.ui.debug("branch renamed from %s to %s at %d\n" %
583 (path, newpath, revnum))
583 (path, newpath, revnum))
584 path = newpath
584 path = newpath
585 break
585 break
586 finally:
586 finally:
587 stream.close()
587 stream.close()
588
588
589 if not path.startswith(self.rootmodule):
589 if not path.startswith(self.rootmodule):
590 self.ui.debug('ignoring foreign branch %r\n' % path)
590 self.ui.debug('ignoring foreign branch %r\n' % path)
591 return None
591 return None
592 return self.revid(dirent.created_rev, path)
592 return self.revid(dirent.created_rev, path)
593
593
594 def reparent(self, module):
594 def reparent(self, module):
595 """Reparent the svn transport and return the previous parent."""
595 """Reparent the svn transport and return the previous parent."""
596 if self.prevmodule == module:
596 if self.prevmodule == module:
597 return module
597 return module
598 svnurl = self.baseurl + urllib.quote(module)
598 svnurl = self.baseurl + urllib.quote(module)
599 prevmodule = self.prevmodule
599 prevmodule = self.prevmodule
600 if prevmodule is None:
600 if prevmodule is None:
601 prevmodule = ''
601 prevmodule = ''
602 self.ui.debug("reparent to %s\n" % svnurl)
602 self.ui.debug("reparent to %s\n" % svnurl)
603 svn.ra.reparent(self.ra, svnurl)
603 svn.ra.reparent(self.ra, svnurl)
604 self.prevmodule = module
604 self.prevmodule = module
605 return prevmodule
605 return prevmodule
606
606
607 def expandpaths(self, rev, paths, parents):
607 def expandpaths(self, rev, paths, parents):
608 changed, removed = set(), set()
608 changed, removed = set(), set()
609 copies = {}
609 copies = {}
610
610
611 new_module, revnum = revsplit(rev)[1:]
611 new_module, revnum = revsplit(rev)[1:]
612 if new_module != self.module:
612 if new_module != self.module:
613 self.module = new_module
613 self.module = new_module
614 self.reparent(self.module)
614 self.reparent(self.module)
615
615
616 for i, (path, ent) in enumerate(paths):
616 for i, (path, ent) in enumerate(paths):
617 self.ui.progress(_('scanning paths'), i, item=path,
617 self.ui.progress(_('scanning paths'), i, item=path,
618 total=len(paths))
618 total=len(paths))
619 entrypath = self.getrelpath(path)
619 entrypath = self.getrelpath(path)
620
620
621 kind = self._checkpath(entrypath, revnum)
621 kind = self._checkpath(entrypath, revnum)
622 if kind == svn.core.svn_node_file:
622 if kind == svn.core.svn_node_file:
623 changed.add(self.recode(entrypath))
623 changed.add(self.recode(entrypath))
624 if not ent.copyfrom_path or not parents:
624 if not ent.copyfrom_path or not parents:
625 continue
625 continue
626 # Copy sources not in parent revisions cannot be
626 # Copy sources not in parent revisions cannot be
627 # represented, ignore their origin for now
627 # represented, ignore their origin for now
628 pmodule, prevnum = revsplit(parents[0])[1:]
628 pmodule, prevnum = revsplit(parents[0])[1:]
629 if ent.copyfrom_rev < prevnum:
629 if ent.copyfrom_rev < prevnum:
630 continue
630 continue
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
632 if not copyfrom_path:
632 if not copyfrom_path:
633 continue
633 continue
634 self.ui.debug("copied to %s from %s@%s\n" %
634 self.ui.debug("copied to %s from %s@%s\n" %
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
635 (entrypath, copyfrom_path, ent.copyfrom_rev))
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
637 elif kind == 0: # gone, but had better be a deleted *file*
637 elif kind == 0: # gone, but had better be a deleted *file*
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
639 pmodule, prevnum = revsplit(parents[0])[1:]
639 pmodule, prevnum = revsplit(parents[0])[1:]
640 parentpath = pmodule + "/" + entrypath
640 parentpath = pmodule + "/" + entrypath
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
642
642
643 if fromkind == svn.core.svn_node_file:
643 if fromkind == svn.core.svn_node_file:
644 removed.add(self.recode(entrypath))
644 removed.add(self.recode(entrypath))
645 elif fromkind == svn.core.svn_node_dir:
645 elif fromkind == svn.core.svn_node_dir:
646 oroot = parentpath.strip('/')
646 oroot = parentpath.strip('/')
647 nroot = path.strip('/')
647 nroot = path.strip('/')
648 children = self._iterfiles(oroot, prevnum)
648 children = self._iterfiles(oroot, prevnum)
649 for childpath in children:
649 for childpath in children:
650 childpath = childpath.replace(oroot, nroot)
650 childpath = childpath.replace(oroot, nroot)
651 childpath = self.getrelpath("/" + childpath, pmodule)
651 childpath = self.getrelpath("/" + childpath, pmodule)
652 if childpath:
652 if childpath:
653 removed.add(self.recode(childpath))
653 removed.add(self.recode(childpath))
654 else:
654 else:
655 self.ui.debug('unknown path in revision %d: %s\n' % \
655 self.ui.debug('unknown path in revision %d: %s\n' % \
656 (revnum, path))
656 (revnum, path))
657 elif kind == svn.core.svn_node_dir:
657 elif kind == svn.core.svn_node_dir:
658 if ent.action == 'M':
658 if ent.action == 'M':
659 # If the directory just had a prop change,
659 # If the directory just had a prop change,
660 # then we shouldn't need to look for its children.
660 # then we shouldn't need to look for its children.
661 continue
661 continue
662 if ent.action == 'R' and parents:
662 if ent.action == 'R' and parents:
663 # If a directory is replacing a file, mark the previous
663 # If a directory is replacing a file, mark the previous
664 # file as deleted
664 # file as deleted
665 pmodule, prevnum = revsplit(parents[0])[1:]
665 pmodule, prevnum = revsplit(parents[0])[1:]
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
666 pkind = self._checkpath(entrypath, prevnum, pmodule)
667 if pkind == svn.core.svn_node_file:
667 if pkind == svn.core.svn_node_file:
668 removed.add(self.recode(entrypath))
668 removed.add(self.recode(entrypath))
669 elif pkind == svn.core.svn_node_dir:
669 elif pkind == svn.core.svn_node_dir:
670 # We do not know what files were kept or removed,
670 # We do not know what files were kept or removed,
671 # mark them all as changed.
671 # mark them all as changed.
672 for childpath in self._iterfiles(pmodule, prevnum):
672 for childpath in self._iterfiles(pmodule, prevnum):
673 childpath = self.getrelpath("/" + childpath)
673 childpath = self.getrelpath("/" + childpath)
674 if childpath:
674 if childpath:
675 changed.add(self.recode(childpath))
675 changed.add(self.recode(childpath))
676
676
677 for childpath in self._iterfiles(path, revnum):
677 for childpath in self._iterfiles(path, revnum):
678 childpath = self.getrelpath("/" + childpath)
678 childpath = self.getrelpath("/" + childpath)
679 if childpath:
679 if childpath:
680 changed.add(self.recode(childpath))
680 changed.add(self.recode(childpath))
681
681
682 # Handle directory copies
682 # Handle directory copies
683 if not ent.copyfrom_path or not parents:
683 if not ent.copyfrom_path or not parents:
684 continue
684 continue
685 # Copy sources not in parent revisions cannot be
685 # Copy sources not in parent revisions cannot be
686 # represented, ignore their origin for now
686 # represented, ignore their origin for now
687 pmodule, prevnum = revsplit(parents[0])[1:]
687 pmodule, prevnum = revsplit(parents[0])[1:]
688 if ent.copyfrom_rev < prevnum:
688 if ent.copyfrom_rev < prevnum:
689 continue
689 continue
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
691 if not copyfrompath:
691 if not copyfrompath:
692 continue
692 continue
693 self.ui.debug("mark %s came from %s:%d\n"
693 self.ui.debug("mark %s came from %s:%d\n"
694 % (path, copyfrompath, ent.copyfrom_rev))
694 % (path, copyfrompath, ent.copyfrom_rev))
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
696 for childpath in children:
696 for childpath in children:
697 childpath = self.getrelpath("/" + childpath, pmodule)
697 childpath = self.getrelpath("/" + childpath, pmodule)
698 if not childpath:
698 if not childpath:
699 continue
699 continue
700 copytopath = path + childpath[len(copyfrompath):]
700 copytopath = path + childpath[len(copyfrompath):]
701 copytopath = self.getrelpath(copytopath)
701 copytopath = self.getrelpath(copytopath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
702 copies[self.recode(copytopath)] = self.recode(childpath)
703
703
704 self.ui.progress(_('scanning paths'), None)
704 self.ui.progress(_('scanning paths'), None)
705 changed.update(removed)
705 changed.update(removed)
706 return (list(changed), removed, copies)
706 return (list(changed), removed, copies)
707
707
708 def _fetch_revisions(self, from_revnum, to_revnum):
708 def _fetch_revisions(self, from_revnum, to_revnum):
709 if from_revnum < to_revnum:
709 if from_revnum < to_revnum:
710 from_revnum, to_revnum = to_revnum, from_revnum
710 from_revnum, to_revnum = to_revnum, from_revnum
711
711
712 self.child_cset = None
712 self.child_cset = None
713
713
714 def parselogentry(orig_paths, revnum, author, date, message):
714 def parselogentry(orig_paths, revnum, author, date, message):
715 """Return the parsed commit object or None, and True if
715 """Return the parsed commit object or None, and True if
716 the revision is a branch root.
716 the revision is a branch root.
717 """
717 """
718 self.ui.debug("parsing revision %d (%d changes)\n" %
718 self.ui.debug("parsing revision %d (%d changes)\n" %
719 (revnum, len(orig_paths)))
719 (revnum, len(orig_paths)))
720
720
721 branched = False
721 branched = False
722 rev = self.revid(revnum)
722 rev = self.revid(revnum)
723 # branch log might return entries for a parent we already have
723 # branch log might return entries for a parent we already have
724
724
725 if rev in self.commits or revnum < to_revnum:
725 if rev in self.commits or revnum < to_revnum:
726 return None, branched
726 return None, branched
727
727
728 parents = []
728 parents = []
729 # check whether this revision is the start of a branch or part
729 # check whether this revision is the start of a branch or part
730 # of a branch renaming
730 # of a branch renaming
731 orig_paths = sorted(orig_paths.iteritems())
731 orig_paths = sorted(orig_paths.iteritems())
732 root_paths = [(p, e) for p, e in orig_paths
732 root_paths = [(p, e) for p, e in orig_paths
733 if self.module.startswith(p)]
733 if self.module.startswith(p)]
734 if root_paths:
734 if root_paths:
735 path, ent = root_paths[-1]
735 path, ent = root_paths[-1]
736 if ent.copyfrom_path:
736 if ent.copyfrom_path:
737 branched = True
737 branched = True
738 newpath = ent.copyfrom_path + self.module[len(path):]
738 newpath = ent.copyfrom_path + self.module[len(path):]
739 # ent.copyfrom_rev may not be the actual last revision
739 # ent.copyfrom_rev may not be the actual last revision
740 previd = self.latest(newpath, ent.copyfrom_rev)
740 previd = self.latest(newpath, ent.copyfrom_rev)
741 if previd is not None:
741 if previd is not None:
742 prevmodule, prevnum = revsplit(previd)[1:]
742 prevmodule, prevnum = revsplit(previd)[1:]
743 if prevnum >= self.startrev:
743 if prevnum >= self.startrev:
744 parents = [previd]
744 parents = [previd]
745 self.ui.note(
745 self.ui.note(
746 _('found parent of branch %s at %d: %s\n') %
746 _('found parent of branch %s at %d: %s\n') %
747 (self.module, prevnum, prevmodule))
747 (self.module, prevnum, prevmodule))
748 else:
748 else:
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
749 self.ui.debug("no copyfrom path, don't know what to do.\n")
750
750
751 paths = []
751 paths = []
752 # filter out unrelated paths
752 # filter out unrelated paths
753 for path, ent in orig_paths:
753 for path, ent in orig_paths:
754 if self.getrelpath(path) is None:
754 if self.getrelpath(path) is None:
755 continue
755 continue
756 paths.append((path, ent))
756 paths.append((path, ent))
757
757
758 # Example SVN datetime. Includes microseconds.
758 # Example SVN datetime. Includes microseconds.
759 # ISO-8601 conformant
759 # ISO-8601 conformant
760 # '2007-01-04T17:35:00.902377Z'
760 # '2007-01-04T17:35:00.902377Z'
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
762
762
763 log = message and self.recode(message) or ''
763 log = message and self.recode(message) or ''
764 author = author and self.recode(author) or ''
764 author = author and self.recode(author) or ''
765 try:
765 try:
766 branch = self.module.split("/")[-1]
766 branch = self.module.split("/")[-1]
767 if branch == self.trunkname:
767 if branch == self.trunkname:
768 branch = None
768 branch = None
769 except IndexError:
769 except IndexError:
770 branch = None
770 branch = None
771
771
772 cset = commit(author=author,
772 cset = commit(author=author,
773 date=util.datestr(date),
773 date=util.datestr(date),
774 desc=log,
774 desc=log,
775 parents=parents,
775 parents=parents,
776 branch=branch,
776 branch=branch,
777 rev=rev)
777 rev=rev)
778
778
779 self.commits[rev] = cset
779 self.commits[rev] = cset
780 # The parents list is *shared* among self.paths and the
780 # The parents list is *shared* among self.paths and the
781 # commit object. Both will be updated below.
781 # commit object. Both will be updated below.
782 self.paths[rev] = (paths, cset.parents)
782 self.paths[rev] = (paths, cset.parents)
783 if self.child_cset and not self.child_cset.parents:
783 if self.child_cset and not self.child_cset.parents:
784 self.child_cset.parents[:] = [rev]
784 self.child_cset.parents[:] = [rev]
785 self.child_cset = cset
785 self.child_cset = cset
786 return cset, branched
786 return cset, branched
787
787
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
789 (self.module, from_revnum, to_revnum))
789 (self.module, from_revnum, to_revnum))
790
790
791 try:
791 try:
792 firstcset = None
792 firstcset = None
793 lastonbranch = False
793 lastonbranch = False
794 stream = self._getlog([self.module], from_revnum, to_revnum)
794 stream = self._getlog([self.module], from_revnum, to_revnum)
795 try:
795 try:
796 for entry in stream:
796 for entry in stream:
797 paths, revnum, author, date, message = entry
797 paths, revnum, author, date, message = entry
798 if revnum < self.startrev:
798 if revnum < self.startrev:
799 lastonbranch = True
799 lastonbranch = True
800 break
800 break
801 if not paths:
801 if not paths:
802 self.ui.debug('revision %d has no entries\n' % revnum)
802 self.ui.debug('revision %d has no entries\n' % revnum)
803 # If we ever leave the loop on an empty
803 # If we ever leave the loop on an empty
804 # revision, do not try to get a parent branch
804 # revision, do not try to get a parent branch
805 lastonbranch = lastonbranch or revnum == 0
805 lastonbranch = lastonbranch or revnum == 0
806 continue
806 continue
807 cset, lastonbranch = parselogentry(paths, revnum, author,
807 cset, lastonbranch = parselogentry(paths, revnum, author,
808 date, message)
808 date, message)
809 if cset:
809 if cset:
810 firstcset = cset
810 firstcset = cset
811 if lastonbranch:
811 if lastonbranch:
812 break
812 break
813 finally:
813 finally:
814 stream.close()
814 stream.close()
815
815
816 if not lastonbranch and firstcset and not firstcset.parents:
816 if not lastonbranch and firstcset and not firstcset.parents:
817 # The first revision of the sequence (the last fetched one)
817 # The first revision of the sequence (the last fetched one)
818 # has invalid parents if not a branch root. Find the parent
818 # has invalid parents if not a branch root. Find the parent
819 # revision now, if any.
819 # revision now, if any.
820 try:
820 try:
821 firstrevnum = self.revnum(firstcset.rev)
821 firstrevnum = self.revnum(firstcset.rev)
822 if firstrevnum > 1:
822 if firstrevnum > 1:
823 latest = self.latest(self.module, firstrevnum - 1)
823 latest = self.latest(self.module, firstrevnum - 1)
824 if latest:
824 if latest:
825 firstcset.parents.append(latest)
825 firstcset.parents.append(latest)
826 except SvnPathNotFound:
826 except SvnPathNotFound:
827 pass
827 pass
828 except SubversionException, (inst, num):
828 except SubversionException, (inst, num):
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
831 raise
831 raise
832
832
833 def getfile(self, file, rev):
833 def getfile(self, file, rev):
834 # TODO: ra.get_file transmits the whole file instead of diffs.
834 # TODO: ra.get_file transmits the whole file instead of diffs.
835 if file in self.removed:
835 if file in self.removed:
836 raise IOError()
836 raise IOError()
837 mode = ''
837 mode = ''
838 try:
838 try:
839 new_module, revnum = revsplit(rev)[1:]
839 new_module, revnum = revsplit(rev)[1:]
840 if self.module != new_module:
840 if self.module != new_module:
841 self.module = new_module
841 self.module = new_module
842 self.reparent(self.module)
842 self.reparent(self.module)
843 io = StringIO()
843 io = StringIO()
844 info = svn.ra.get_file(self.ra, file, revnum, io)
844 info = svn.ra.get_file(self.ra, file, revnum, io)
845 data = io.getvalue()
845 data = io.getvalue()
846 # ra.get_files() seems to keep a reference on the input buffer
846 # ra.get_files() seems to keep a reference on the input buffer
847 # preventing collection. Release it explicitely.
847 # preventing collection. Release it explicitely.
848 io.close()
848 io.close()
849 if isinstance(info, list):
849 if isinstance(info, list):
850 info = info[-1]
850 info = info[-1]
851 mode = ("svn:executable" in info) and 'x' or ''
851 mode = ("svn:executable" in info) and 'x' or ''
852 mode = ("svn:special" in info) and 'l' or mode
852 mode = ("svn:special" in info) and 'l' or mode
853 except SubversionException, e:
853 except SubversionException, e:
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
856 if e.apr_err in notfound: # File not found
856 if e.apr_err in notfound: # File not found
857 raise IOError()
857 raise IOError()
858 raise
858 raise
859 if mode == 'l':
859 if mode == 'l':
860 link_prefix = "link "
860 link_prefix = "link "
861 if data.startswith(link_prefix):
861 if data.startswith(link_prefix):
862 data = data[len(link_prefix):]
862 data = data[len(link_prefix):]
863 return data, mode
863 return data, mode
864
864
865 def _iterfiles(self, path, revnum):
865 def _iterfiles(self, path, revnum):
866 """Enumerate all files in path at revnum, recursively."""
866 """Enumerate all files in path at revnum, recursively."""
867 path = path.strip('/')
867 path = path.strip('/')
868 pool = Pool()
868 pool = Pool()
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
871 if path:
871 if path:
872 path += '/'
872 path += '/'
873 return ((path + p) for p, e in entries.iteritems()
873 return ((path + p) for p, e in entries.iteritems()
874 if e.kind == svn.core.svn_node_file)
874 if e.kind == svn.core.svn_node_file)
875
875
876 def getrelpath(self, path, module=None):
876 def getrelpath(self, path, module=None):
877 if module is None:
877 if module is None:
878 module = self.module
878 module = self.module
879 # Given the repository url of this wc, say
879 # Given the repository url of this wc, say
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 # extract the "entry" portion (a relative path) from what
881 # extract the "entry" portion (a relative path) from what
882 # svn log --xml says, ie
882 # svn log --xml says, ie
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
884 # that is to say "tests/PloneTestCase.py"
885 if path.startswith(module):
885 if path.startswith(module):
886 relative = path.rstrip('/')[len(module):]
886 relative = path.rstrip('/')[len(module):]
887 if relative.startswith('/'):
887 if relative.startswith('/'):
888 return relative[1:]
888 return relative[1:]
889 elif relative == '':
889 elif relative == '':
890 return relative
890 return relative
891
891
892 # The path is outside our tracked tree...
892 # The path is outside our tracked tree...
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
894 return None
894 return None
895
895
896 def _checkpath(self, path, revnum, module=None):
896 def _checkpath(self, path, revnum, module=None):
897 if module is not None:
897 if module is not None:
898 prevmodule = self.reparent('')
898 prevmodule = self.reparent('')
899 path = module + '/' + path
899 path = module + '/' + path
900 try:
900 try:
901 # ra.check_path does not like leading slashes very much, it leads
901 # ra.check_path does not like leading slashes very much, it leads
902 # to PROPFIND subversion errors
902 # to PROPFIND subversion errors
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
904 finally:
904 finally:
905 if module is not None:
905 if module is not None:
906 self.reparent(prevmodule)
906 self.reparent(prevmodule)
907
907
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
909 strict_node_history=False):
909 strict_node_history=False):
910 # Normalize path names, svn >= 1.5 only wants paths relative to
910 # Normalize path names, svn >= 1.5 only wants paths relative to
911 # supplied URL
911 # supplied URL
912 relpaths = []
912 relpaths = []
913 for p in paths:
913 for p in paths:
914 if not p.startswith('/'):
914 if not p.startswith('/'):
915 p = self.module + '/' + p
915 p = self.module + '/' + p
916 relpaths.append(p.strip('/'))
916 relpaths.append(p.strip('/'))
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
918 strict_node_history]
918 strict_node_history]
919 arg = encodeargs(args)
919 arg = encodeargs(args)
920 hgexe = util.hgexecutable()
920 hgexe = util.hgexecutable()
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
922 stdin, stdout = util.popen2(util.quotecommand(cmd))
923 stdin.write(arg)
923 stdin.write(arg)
924 try:
924 try:
925 stdin.close()
925 stdin.close()
926 except IOError:
926 except IOError:
927 raise util.Abort(_('Mercurial failed to run itself, check'
927 raise util.Abort(_('Mercurial failed to run itself, check'
928 ' hg executable is in PATH'))
928 ' hg executable is in PATH'))
929 return logstream(stdout)
929 return logstream(stdout)
930
930
931 pre_revprop_change = '''#!/bin/sh
931 pre_revprop_change = '''#!/bin/sh
932
932
933 REPOS="$1"
933 REPOS="$1"
934 REV="$2"
934 REV="$2"
935 USER="$3"
935 USER="$3"
936 PROPNAME="$4"
936 PROPNAME="$4"
937 ACTION="$5"
937 ACTION="$5"
938
938
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
942
942
943 echo "Changing prohibited revision property" >&2
943 echo "Changing prohibited revision property" >&2
944 exit 1
944 exit 1
945 '''
945 '''
946
946
947 class svn_sink(converter_sink, commandline):
947 class svn_sink(converter_sink, commandline):
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
950
950
951 def prerun(self):
951 def prerun(self):
952 if self.wc:
952 if self.wc:
953 os.chdir(self.wc)
953 os.chdir(self.wc)
954
954
955 def postrun(self):
955 def postrun(self):
956 if self.wc:
956 if self.wc:
957 os.chdir(self.cwd)
957 os.chdir(self.cwd)
958
958
959 def join(self, name):
959 def join(self, name):
960 return os.path.join(self.wc, '.svn', name)
960 return os.path.join(self.wc, '.svn', name)
961
961
962 def revmapfile(self):
962 def revmapfile(self):
963 return self.join('hg-shamap')
963 return self.join('hg-shamap')
964
964
965 def authorfile(self):
965 def authorfile(self):
966 return self.join('hg-authormap')
966 return self.join('hg-authormap')
967
967
968 def __init__(self, ui, path):
968 def __init__(self, ui, path):
969
969
970 converter_sink.__init__(self, ui, path)
970 converter_sink.__init__(self, ui, path)
971 commandline.__init__(self, ui, 'svn')
971 commandline.__init__(self, ui, 'svn')
972 self.delete = []
972 self.delete = []
973 self.setexec = []
973 self.setexec = []
974 self.delexec = []
974 self.delexec = []
975 self.copies = []
975 self.copies = []
976 self.wc = None
976 self.wc = None
977 self.cwd = os.getcwd()
977 self.cwd = os.getcwd()
978
978
979 path = os.path.realpath(path)
979 path = os.path.realpath(path)
980
980
981 created = False
981 created = False
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
983 self.wc = path
983 self.wc = path
984 self.run0('update')
984 self.run0('update')
985 else:
985 else:
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
987
987
988 if os.path.isdir(os.path.dirname(path)):
988 if os.path.isdir(os.path.dirname(path)):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
990 ui.status(_('initializing svn repository %r\n') %
990 ui.status(_('initializing svn repository %r\n') %
991 os.path.basename(path))
991 os.path.basename(path))
992 commandline(ui, 'svnadmin').run0('create', path)
992 commandline(ui, 'svnadmin').run0('create', path)
993 created = path
993 created = path
994 path = util.normpath(path)
994 path = util.normpath(path)
995 if not path.startswith('/'):
995 if not path.startswith('/'):
996 path = '/' + path
996 path = '/' + path
997 path = 'file://' + path
997 path = 'file://' + path
998
998
999 ui.status(_('initializing svn working copy %r\n')
999 ui.status(_('initializing svn working copy %r\n')
1000 % os.path.basename(wcpath))
1000 % os.path.basename(wcpath))
1001 self.run0('checkout', path, wcpath)
1001 self.run0('checkout', path, wcpath)
1002
1002
1003 self.wc = wcpath
1003 self.wc = wcpath
1004 self.opener = scmutil.opener(self.wc)
1004 self.opener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1005 self.wopener = scmutil.opener(self.wc)
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1008
1008
1009 if created:
1009 if created:
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1011 fp = open(hook, 'w')
1011 fp = open(hook, 'w')
1012 fp.write(pre_revprop_change)
1012 fp.write(pre_revprop_change)
1013 fp.close()
1013 fp.close()
1014 util.setflags(hook, False, True)
1014 util.setflags(hook, False, True)
1015
1015
1016 output = self.run0('info')
1016 output = self.run0('info')
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1018
1018
1019 def wjoin(self, *names):
1019 def wjoin(self, *names):
1020 return os.path.join(self.wc, *names)
1020 return os.path.join(self.wc, *names)
1021
1021
1022 def putfile(self, filename, flags, data):
1022 def putfile(self, filename, flags, data):
1023 if 'l' in flags:
1023 if 'l' in flags:
1024 self.wopener.symlink(data, filename)
1024 self.wopener.symlink(data, filename)
1025 else:
1025 else:
1026 try:
1026 try:
1027 if os.path.islink(self.wjoin(filename)):
1027 if os.path.islink(self.wjoin(filename)):
1028 os.unlink(filename)
1028 os.unlink(filename)
1029 except OSError:
1029 except OSError:
1030 pass
1030 pass
1031 self.wopener.write(filename, data)
1031 self.wopener.write(filename, data)
1032
1032
1033 if self.is_exec:
1033 if self.is_exec:
1034 was_exec = self.is_exec(self.wjoin(filename))
1034 was_exec = self.is_exec(self.wjoin(filename))
1035 else:
1035 else:
1036 # On filesystems not supporting execute-bit, there is no way
1036 # On filesystems not supporting execute-bit, there is no way
1037 # to know if it is set but asking subversion. Setting it
1037 # to know if it is set but asking subversion. Setting it
1038 # systematically is just as expensive and much simpler.
1038 # systematically is just as expensive and much simpler.
1039 was_exec = 'x' not in flags
1039 was_exec = 'x' not in flags
1040
1040
1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1042 if was_exec:
1042 if was_exec:
1043 if 'x' not in flags:
1043 if 'x' not in flags:
1044 self.delexec.append(filename)
1044 self.delexec.append(filename)
1045 else:
1045 else:
1046 if 'x' in flags:
1046 if 'x' in flags:
1047 self.setexec.append(filename)
1047 self.setexec.append(filename)
1048
1048
1049 def _copyfile(self, source, dest):
1049 def _copyfile(self, source, dest):
1050 # SVN's copy command pukes if the destination file exists, but
1050 # SVN's copy command pukes if the destination file exists, but
1051 # our copyfile method expects to record a copy that has
1051 # our copyfile method expects to record a copy that has
1052 # already occurred. Cross the semantic gap.
1052 # already occurred. Cross the semantic gap.
1053 wdest = self.wjoin(dest)
1053 wdest = self.wjoin(dest)
1054 exists = os.path.lexists(wdest)
1054 exists = os.path.lexists(wdest)
1055 if exists:
1055 if exists:
1056 fd, tempname = tempfile.mkstemp(
1056 fd, tempname = tempfile.mkstemp(
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1058 os.close(fd)
1058 os.close(fd)
1059 os.unlink(tempname)
1059 os.unlink(tempname)
1060 os.rename(wdest, tempname)
1060 os.rename(wdest, tempname)
1061 try:
1061 try:
1062 self.run0('copy', source, dest)
1062 self.run0('copy', source, dest)
1063 finally:
1063 finally:
1064 if exists:
1064 if exists:
1065 try:
1065 try:
1066 os.unlink(wdest)
1066 os.unlink(wdest)
1067 except OSError:
1067 except OSError:
1068 pass
1068 pass
1069 os.rename(tempname, wdest)
1069 os.rename(tempname, wdest)
1070
1070
1071 def dirs_of(self, files):
1071 def dirs_of(self, files):
1072 dirs = set()
1072 dirs = set()
1073 for f in files:
1073 for f in files:
1074 if os.path.isdir(self.wjoin(f)):
1074 if os.path.isdir(self.wjoin(f)):
1075 dirs.add(f)
1075 dirs.add(f)
1076 for i in strutil.rfindall(f, '/'):
1076 for i in strutil.rfindall(f, '/'):
1077 dirs.add(f[:i])
1077 dirs.add(f[:i])
1078 return dirs
1078 return dirs
1079
1079
1080 def add_dirs(self, files):
1080 def add_dirs(self, files):
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1083 if add_dirs:
1083 if add_dirs:
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1085 return add_dirs
1085 return add_dirs
1086
1086
1087 def add_files(self, files):
1087 def add_files(self, files):
1088 if files:
1088 if files:
1089 self.xargs(files, 'add', quiet=True)
1089 self.xargs(files, 'add', quiet=True)
1090 return files
1090 return files
1091
1091
1092 def tidy_dirs(self, names):
1092 def tidy_dirs(self, names):
1093 deleted = []
1093 deleted = []
1094 for d in sorted(self.dirs_of(names), reverse=True):
1094 for d in sorted(self.dirs_of(names), reverse=True):
1095 wd = self.wjoin(d)
1095 wd = self.wjoin(d)
1096 if os.listdir(wd) == '.svn':
1096 if os.listdir(wd) == '.svn':
1097 self.run0('delete', d)
1097 self.run0('delete', d)
1098 deleted.append(d)
1098 deleted.append(d)
1099 return deleted
1099 return deleted
1100
1100
1101 def addchild(self, parent, child):
1101 def addchild(self, parent, child):
1102 self.childmap[parent] = child
1102 self.childmap[parent] = child
1103
1103
1104 def revid(self, rev):
1104 def revid(self, rev):
1105 return u"svn:%s@%s" % (self.uuid, rev)
1105 return u"svn:%s@%s" % (self.uuid, rev)
1106
1106
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1108 # Apply changes to working copy
1108 # Apply changes to working copy
1109 for f, v in files:
1109 for f, v in files:
1110 try:
1110 try:
1111 data, mode = source.getfile(f, v)
1111 data, mode = source.getfile(f, v)
1112 except IOError:
1112 except IOError:
1113 self.delete.append(f)
1113 self.delete.append(f)
1114 else:
1114 else:
1115 self.putfile(f, mode, data)
1115 self.putfile(f, mode, data)
1116 if f in copies:
1116 if f in copies:
1117 self.copies.append([copies[f], f])
1117 self.copies.append([copies[f], f])
1118 files = [f[0] for f in files]
1118 files = [f[0] for f in files]
1119
1119
1120 for parent in parents:
1120 for parent in parents:
1121 try:
1121 try:
1122 return self.revid(self.childmap[parent])
1122 return self.revid(self.childmap[parent])
1123 except KeyError:
1123 except KeyError:
1124 pass
1124 pass
1125 entries = set(self.delete)
1125 entries = set(self.delete)
1126 files = frozenset(files)
1126 files = frozenset(files)
1127 entries.update(self.add_dirs(files.difference(entries)))
1127 entries.update(self.add_dirs(files.difference(entries)))
1128 if self.copies:
1128 if self.copies:
1129 for s, d in self.copies:
1129 for s, d in self.copies:
1130 self._copyfile(s, d)
1130 self._copyfile(s, d)
1131 self.copies = []
1131 self.copies = []
1132 if self.delete:
1132 if self.delete:
1133 self.xargs(self.delete, 'delete')
1133 self.xargs(self.delete, 'delete')
1134 self.delete = []
1134 self.delete = []
1135 entries.update(self.add_files(files.difference(entries)))
1135 entries.update(self.add_files(files.difference(entries)))
1136 entries.update(self.tidy_dirs(entries))
1136 entries.update(self.tidy_dirs(entries))
1137 if self.delexec:
1137 if self.delexec:
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1139 self.delexec = []
1139 self.delexec = []
1140 if self.setexec:
1140 if self.setexec:
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1142 self.setexec = []
1142 self.setexec = []
1143
1143
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1145 fp = os.fdopen(fd, 'w')
1145 fp = os.fdopen(fd, 'w')
1146 fp.write(commit.desc)
1146 fp.write(commit.desc)
1147 fp.close()
1147 fp.close()
1148 try:
1148 try:
1149 output = self.run0('commit',
1149 output = self.run0('commit',
1150 username=util.shortuser(commit.author),
1150 username=util.shortuser(commit.author),
1151 file=messagefile,
1151 file=messagefile,
1152 encoding='utf-8')
1152 encoding='utf-8')
1153 try:
1153 try:
1154 rev = self.commit_re.search(output).group(1)
1154 rev = self.commit_re.search(output).group(1)
1155 except AttributeError:
1155 except AttributeError:
1156 if not files:
1156 if not files:
1157 return parents[0]
1157 return parents[0]
1158 self.ui.warn(_('unexpected svn output:\n'))
1158 self.ui.warn(_('unexpected svn output:\n'))
1159 self.ui.warn(output)
1159 self.ui.warn(output)
1160 raise util.Abort(_('unable to cope with svn output'))
1160 raise util.Abort(_('unable to cope with svn output'))
1161 if commit.rev:
1161 if commit.rev:
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1162 self.run('propset', 'hg:convert-rev', commit.rev,
1163 revprop=True, revision=rev)
1163 revprop=True, revision=rev)
1164 if commit.branch and commit.branch != 'default':
1164 if commit.branch and commit.branch != 'default':
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1165 self.run('propset', 'hg:convert-branch', commit.branch,
1166 revprop=True, revision=rev)
1166 revprop=True, revision=rev)
1167 for parent in parents:
1167 for parent in parents:
1168 self.addchild(parent, rev)
1168 self.addchild(parent, rev)
1169 return self.revid(rev)
1169 return self.revid(rev)
1170 finally:
1170 finally:
1171 os.unlink(messagefile)
1171 os.unlink(messagefile)
1172
1172
1173 def puttags(self, tags):
1173 def puttags(self, tags):
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1175 return None, None
1175 return None, None
@@ -1,128 +1,128
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 # This is a stripped-down version of the original bzr-svn transport.py,
4 # This is a stripped-down version of the original bzr-svn transport.py,
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6
6
7 # This program is free software; you can redistribute it and/or modify
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
10 # (at your option) any later version.
11
11
12 # This program is distributed in the hope that it will be useful,
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
15 # GNU General Public License for more details.
16
16
17 # You should have received a copy of the GNU General Public License
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
20
21 from svn.core import SubversionException, Pool
21 from svn.core import SubversionException, Pool
22 import svn.ra
22 import svn.ra
23 import svn.client
23 import svn.client
24 import svn.core
24 import svn.core
25
25
26 # Some older versions of the Python bindings need to be
26 # Some older versions of the Python bindings need to be
27 # explicitly initialized. But what we want to do probably
27 # explicitly initialized. But what we want to do probably
28 # won't work worth a darn against those libraries anyway!
28 # won't work worth a darn against those libraries anyway!
29 svn.ra.initialize()
29 svn.ra.initialize()
30
30
31 svn_config = svn.core.svn_config_get_config(None)
31 svn_config = svn.core.svn_config_get_config(None)
32
32
33
33
34 def _create_auth_baton(pool):
34 def _create_auth_baton(pool):
35 """Create a Subversion authentication baton. """
35 """Create a Subversion authentication baton. """
36 import svn.client
36 import svn.client
37 # Give the client context baton a suite of authentication
37 # Give the client context baton a suite of authentication
38 # providers.h
38 # providers.h
39 providers = [
39 providers = [
40 svn.client.get_simple_provider(pool),
40 svn.client.get_simple_provider(pool),
41 svn.client.get_username_provider(pool),
41 svn.client.get_username_provider(pool),
42 svn.client.get_ssl_client_cert_file_provider(pool),
42 svn.client.get_ssl_client_cert_file_provider(pool),
43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
44 svn.client.get_ssl_server_trust_file_provider(pool),
44 svn.client.get_ssl_server_trust_file_provider(pool),
45 ]
45 ]
46 # Platform-dependant authentication methods
46 # Platform-dependant authentication methods
47 getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
47 getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
48 None)
48 None)
49 if getprovider:
49 if getprovider:
50 # Available in svn >= 1.6
50 # Available in svn >= 1.6
51 for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
51 for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
52 for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
52 for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
53 p = getprovider(name, type, pool)
53 p = getprovider(name, type, pool)
54 if p:
54 if p:
55 providers.append(p)
55 providers.append(p)
56 else:
56 else:
57 if hasattr(svn.client, 'get_windows_simple_provider'):
57 if util.safehasattr(svn.client, 'get_windows_simple_provider'):
58 providers.append(svn.client.get_windows_simple_provider(pool))
58 providers.append(svn.client.get_windows_simple_provider(pool))
59
59
60 return svn.core.svn_auth_open(providers, pool)
60 return svn.core.svn_auth_open(providers, pool)
61
61
62 class NotBranchError(SubversionException):
62 class NotBranchError(SubversionException):
63 pass
63 pass
64
64
65 class SvnRaTransport(object):
65 class SvnRaTransport(object):
66 """
66 """
67 Open an ra connection to a Subversion repository.
67 Open an ra connection to a Subversion repository.
68 """
68 """
69 def __init__(self, url="", ra=None):
69 def __init__(self, url="", ra=None):
70 self.pool = Pool()
70 self.pool = Pool()
71 self.svn_url = url
71 self.svn_url = url
72 self.username = ''
72 self.username = ''
73 self.password = ''
73 self.password = ''
74
74
75 # Only Subversion 1.4 has reparent()
75 # Only Subversion 1.4 has reparent()
76 if ra is None or not hasattr(svn.ra, 'reparent'):
76 if ra is None or not util.safehasattr(svn.ra, 'reparent'):
77 self.client = svn.client.create_context(self.pool)
77 self.client = svn.client.create_context(self.pool)
78 ab = _create_auth_baton(self.pool)
78 ab = _create_auth_baton(self.pool)
79 if False:
79 if False:
80 svn.core.svn_auth_set_parameter(
80 svn.core.svn_auth_set_parameter(
81 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
81 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
82 svn.core.svn_auth_set_parameter(
82 svn.core.svn_auth_set_parameter(
83 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
83 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
84 self.client.auth_baton = ab
84 self.client.auth_baton = ab
85 self.client.config = svn_config
85 self.client.config = svn_config
86 try:
86 try:
87 self.ra = svn.client.open_ra_session(
87 self.ra = svn.client.open_ra_session(
88 self.svn_url.encode('utf8'),
88 self.svn_url.encode('utf8'),
89 self.client, self.pool)
89 self.client, self.pool)
90 except SubversionException, (inst, num):
90 except SubversionException, (inst, num):
91 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
91 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
92 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
92 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
93 svn.core.SVN_ERR_BAD_URL):
93 svn.core.SVN_ERR_BAD_URL):
94 raise NotBranchError(url)
94 raise NotBranchError(url)
95 raise
95 raise
96 else:
96 else:
97 self.ra = ra
97 self.ra = ra
98 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
98 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
99
99
100 class Reporter(object):
100 class Reporter(object):
101 def __init__(self, reporter_data):
101 def __init__(self, reporter_data):
102 self._reporter, self._baton = reporter_data
102 self._reporter, self._baton = reporter_data
103
103
104 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
104 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
105 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
105 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
106 path, revnum, start_empty, lock_token, pool)
106 path, revnum, start_empty, lock_token, pool)
107
107
108 def delete_path(self, path, pool=None):
108 def delete_path(self, path, pool=None):
109 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
109 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
110 path, pool)
110 path, pool)
111
111
112 def link_path(self, path, url, revision, start_empty, lock_token,
112 def link_path(self, path, url, revision, start_empty, lock_token,
113 pool=None):
113 pool=None):
114 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
114 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
115 path, url, revision, start_empty, lock_token,
115 path, url, revision, start_empty, lock_token,
116 pool)
116 pool)
117
117
118 def finish_report(self, pool=None):
118 def finish_report(self, pool=None):
119 svn.ra.reporter2_invoke_finish_report(self._reporter,
119 svn.ra.reporter2_invoke_finish_report(self._reporter,
120 self._baton, pool)
120 self._baton, pool)
121
121
122 def abort_report(self, pool=None):
122 def abort_report(self, pool=None):
123 svn.ra.reporter2_invoke_abort_report(self._reporter,
123 svn.ra.reporter2_invoke_abort_report(self._reporter,
124 self._baton, pool)
124 self._baton, pool)
125
125
126 def do_update(self, revnum, path, *args, **kwargs):
126 def do_update(self, revnum, path, *args, **kwargs):
127 return self.Reporter(svn.ra.do_update(self.ra, revnum, path,
127 return self.Reporter(svn.ra.do_update(self.ra, revnum, path,
128 *args, **kwargs))
128 *args, **kwargs))
@@ -1,335 +1,347
1 """automatically manage newlines in repository files
1 """automatically manage newlines in repository files
2
2
3 This extension allows you to manage the type of line endings (CRLF or
3 This extension allows you to manage the type of line endings (CRLF or
4 LF) that are used in the repository and in the local working
4 LF) that are used in the repository and in the local working
5 directory. That way you can get CRLF line endings on Windows and LF on
5 directory. That way you can get CRLF line endings on Windows and LF on
6 Unix/Mac, thereby letting everybody use their OS native line endings.
6 Unix/Mac, thereby letting everybody use their OS native line endings.
7
7
8 The extension reads its configuration from a versioned ``.hgeol``
8 The extension reads its configuration from a versioned ``.hgeol``
9 configuration file found in the root of the working copy. The
9 configuration file found in the root of the working copy. The
10 ``.hgeol`` file use the same syntax as all other Mercurial
10 ``.hgeol`` file use the same syntax as all other Mercurial
11 configuration files. It uses two sections, ``[patterns]`` and
11 configuration files. It uses two sections, ``[patterns]`` and
12 ``[repository]``.
12 ``[repository]``.
13
13
14 The ``[patterns]`` section specifies how line endings should be
14 The ``[patterns]`` section specifies how line endings should be
15 converted between the working copy and the repository. The format is
15 converted between the working copy and the repository. The format is
16 specified by a file pattern. The first match is used, so put more
16 specified by a file pattern. The first match is used, so put more
17 specific patterns first. The available line endings are ``LF``,
17 specific patterns first. The available line endings are ``LF``,
18 ``CRLF``, and ``BIN``.
18 ``CRLF``, and ``BIN``.
19
19
20 Files with the declared format of ``CRLF`` or ``LF`` are always
20 Files with the declared format of ``CRLF`` or ``LF`` are always
21 checked out and stored in the repository in that format and files
21 checked out and stored in the repository in that format and files
22 declared to be binary (``BIN``) are left unchanged. Additionally,
22 declared to be binary (``BIN``) are left unchanged. Additionally,
23 ``native`` is an alias for checking out in the platform's default line
23 ``native`` is an alias for checking out in the platform's default line
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
26 default behaviour; it is only needed if you need to override a later,
26 default behaviour; it is only needed if you need to override a later,
27 more general pattern.
27 more general pattern.
28
28
29 The optional ``[repository]`` section specifies the line endings to
29 The optional ``[repository]`` section specifies the line endings to
30 use for files stored in the repository. It has a single setting,
30 use for files stored in the repository. It has a single setting,
31 ``native``, which determines the storage line endings for files
31 ``native``, which determines the storage line endings for files
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
35 will be converted to ``LF`` when stored in the repository. Files
35 will be converted to ``LF`` when stored in the repository. Files
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
37 are always stored as-is in the repository.
37 are always stored as-is in the repository.
38
38
39 Example versioned ``.hgeol`` file::
39 Example versioned ``.hgeol`` file::
40
40
41 [patterns]
41 [patterns]
42 **.py = native
42 **.py = native
43 **.vcproj = CRLF
43 **.vcproj = CRLF
44 **.txt = native
44 **.txt = native
45 Makefile = LF
45 Makefile = LF
46 **.jpg = BIN
46 **.jpg = BIN
47
47
48 [repository]
48 [repository]
49 native = LF
49 native = LF
50
50
51 .. note::
51 .. note::
52 The rules will first apply when files are touched in the working
52 The rules will first apply when files are touched in the working
53 copy, e.g. by updating to null and back to tip to touch all files.
53 copy, e.g. by updating to null and back to tip to touch all files.
54
54
55 The extension uses an optional ``[eol]`` section in your hgrc file
55 The extension uses an optional ``[eol]`` section read from both the
56 (not the ``.hgeol`` file) for settings that control the overall
56 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 behavior. There are two settings:
57 latter overriding the former. You can use that section to control the
58 overall behavior. There are three settings:
58
59
59 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
60 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
60 ``CRLF`` to override the default interpretation of ``native`` for
61 ``CRLF`` to override the default interpretation of ``native`` for
61 checkout. This can be used with :hg:`archive` on Unix, say, to
62 checkout. This can be used with :hg:`archive` on Unix, say, to
62 generate an archive where files have line endings for Windows.
63 generate an archive where files have line endings for Windows.
63
64
64 - ``eol.only-consistent`` (default True) can be set to False to make
65 - ``eol.only-consistent`` (default True) can be set to False to make
65 the extension convert files with inconsistent EOLs. Inconsistent
66 the extension convert files with inconsistent EOLs. Inconsistent
66 means that there is both ``CRLF`` and ``LF`` present in the file.
67 means that there is both ``CRLF`` and ``LF`` present in the file.
67 Such files are normally not touched under the assumption that they
68 Such files are normally not touched under the assumption that they
68 have mixed EOLs on purpose.
69 have mixed EOLs on purpose.
69
70
71 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 ensure that converted files end with a EOL character (either ``\\n``
73 or ``\\r\\n`` as per the configured patterns).
74
70 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
75 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
71 like the deprecated win32text extension does. This means that you can
76 like the deprecated win32text extension does. This means that you can
72 disable win32text and enable eol and your filters will still work. You
77 disable win32text and enable eol and your filters will still work. You
73 only need to these filters until you have prepared a ``.hgeol`` file.
78 only need to these filters until you have prepared a ``.hgeol`` file.
74
79
75 The ``win32text.forbid*`` hooks provided by the win32text extension
80 The ``win32text.forbid*`` hooks provided by the win32text extension
76 have been unified into a single hook named ``eol.checkheadshook``. The
81 have been unified into a single hook named ``eol.checkheadshook``. The
77 hook will lookup the expected line endings from the ``.hgeol`` file,
82 hook will lookup the expected line endings from the ``.hgeol`` file,
78 which means you must migrate to a ``.hgeol`` file first before using
83 which means you must migrate to a ``.hgeol`` file first before using
79 the hook. ``eol.checkheadshook`` only checks heads, intermediate
84 the hook. ``eol.checkheadshook`` only checks heads, intermediate
80 invalid revisions will be pushed. To forbid them completely, use the
85 invalid revisions will be pushed. To forbid them completely, use the
81 ``eol.checkallhook`` hook. These hooks are best used as
86 ``eol.checkallhook`` hook. These hooks are best used as
82 ``pretxnchangegroup`` hooks.
87 ``pretxnchangegroup`` hooks.
83
88
84 See :hg:`help patterns` for more information about the glob patterns
89 See :hg:`help patterns` for more information about the glob patterns
85 used.
90 used.
86 """
91 """
87
92
88 from mercurial.i18n import _
93 from mercurial.i18n import _
89 from mercurial import util, config, extensions, match, error
94 from mercurial import util, config, extensions, match, error
90 import re, os
95 import re, os
91
96
92 # Matches a lone LF, i.e., one that is not part of CRLF.
97 # Matches a lone LF, i.e., one that is not part of CRLF.
93 singlelf = re.compile('(^|[^\r])\n')
98 singlelf = re.compile('(^|[^\r])\n')
94 # Matches a single EOL which can either be a CRLF where repeated CR
99 # Matches a single EOL which can either be a CRLF where repeated CR
95 # are removed or a LF. We do not care about old Machintosh files, so a
100 # are removed or a LF. We do not care about old Machintosh files, so a
96 # stray CR is an error.
101 # stray CR is an error.
97 eolre = re.compile('\r*\n')
102 eolre = re.compile('\r*\n')
98
103
99
104
100 def inconsistenteol(data):
105 def inconsistenteol(data):
101 return '\r\n' in data and singlelf.search(data)
106 return '\r\n' in data and singlelf.search(data)
102
107
103 def tolf(s, params, ui, **kwargs):
108 def tolf(s, params, ui, **kwargs):
104 """Filter to convert to LF EOLs."""
109 """Filter to convert to LF EOLs."""
105 if util.binary(s):
110 if util.binary(s):
106 return s
111 return s
107 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
112 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
108 return s
113 return s
114 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
115 s = s + '\n'
109 return eolre.sub('\n', s)
116 return eolre.sub('\n', s)
110
117
111 def tocrlf(s, params, ui, **kwargs):
118 def tocrlf(s, params, ui, **kwargs):
112 """Filter to convert to CRLF EOLs."""
119 """Filter to convert to CRLF EOLs."""
113 if util.binary(s):
120 if util.binary(s):
114 return s
121 return s
115 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
122 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
116 return s
123 return s
124 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
125 s = s + '\n'
117 return eolre.sub('\r\n', s)
126 return eolre.sub('\r\n', s)
118
127
119 def isbinary(s, params):
128 def isbinary(s, params):
120 """Filter to do nothing with the file."""
129 """Filter to do nothing with the file."""
121 return s
130 return s
122
131
123 filters = {
132 filters = {
124 'to-lf': tolf,
133 'to-lf': tolf,
125 'to-crlf': tocrlf,
134 'to-crlf': tocrlf,
126 'is-binary': isbinary,
135 'is-binary': isbinary,
127 # The following provide backwards compatibility with win32text
136 # The following provide backwards compatibility with win32text
128 'cleverencode:': tolf,
137 'cleverencode:': tolf,
129 'cleverdecode:': tocrlf
138 'cleverdecode:': tocrlf
130 }
139 }
131
140
132 class eolfile(object):
141 class eolfile(object):
133 def __init__(self, ui, root, data):
142 def __init__(self, ui, root, data):
134 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
143 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
135 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
144 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
136
145
137 self.cfg = config.config()
146 self.cfg = config.config()
138 # Our files should not be touched. The pattern must be
147 # Our files should not be touched. The pattern must be
139 # inserted first override a '** = native' pattern.
148 # inserted first override a '** = native' pattern.
140 self.cfg.set('patterns', '.hg*', 'BIN')
149 self.cfg.set('patterns', '.hg*', 'BIN')
141 # We can then parse the user's patterns.
150 # We can then parse the user's patterns.
142 self.cfg.parse('.hgeol', data)
151 self.cfg.parse('.hgeol', data)
143
152
144 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
153 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
145 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
154 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
146 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
155 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
147 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
156 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
148
157
149 include = []
158 include = []
150 exclude = []
159 exclude = []
151 for pattern, style in self.cfg.items('patterns'):
160 for pattern, style in self.cfg.items('patterns'):
152 key = style.upper()
161 key = style.upper()
153 if key == 'BIN':
162 if key == 'BIN':
154 exclude.append(pattern)
163 exclude.append(pattern)
155 else:
164 else:
156 include.append(pattern)
165 include.append(pattern)
157 # This will match the files for which we need to care
166 # This will match the files for which we need to care
158 # about inconsistent newlines.
167 # about inconsistent newlines.
159 self.match = match.match(root, '', [], include, exclude)
168 self.match = match.match(root, '', [], include, exclude)
160
169
161 def setfilters(self, ui):
170 def copytoui(self, ui):
162 for pattern, style in self.cfg.items('patterns'):
171 for pattern, style in self.cfg.items('patterns'):
163 key = style.upper()
172 key = style.upper()
164 try:
173 try:
165 ui.setconfig('decode', pattern, self._decode[key])
174 ui.setconfig('decode', pattern, self._decode[key])
166 ui.setconfig('encode', pattern, self._encode[key])
175 ui.setconfig('encode', pattern, self._encode[key])
167 except KeyError:
176 except KeyError:
168 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
177 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
169 % (style, self.cfg.source('patterns', pattern)))
178 % (style, self.cfg.source('patterns', pattern)))
179 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
180 for k, v in self.cfg.items('eol'):
181 ui.setconfig('eol', k, v)
170
182
171 def checkrev(self, repo, ctx, files):
183 def checkrev(self, repo, ctx, files):
172 failed = []
184 failed = []
173 for f in (files or ctx.files()):
185 for f in (files or ctx.files()):
174 if f not in ctx:
186 if f not in ctx:
175 continue
187 continue
176 for pattern, style in self.cfg.items('patterns'):
188 for pattern, style in self.cfg.items('patterns'):
177 if not match.match(repo.root, '', [pattern])(f):
189 if not match.match(repo.root, '', [pattern])(f):
178 continue
190 continue
179 target = self._encode[style.upper()]
191 target = self._encode[style.upper()]
180 data = ctx[f].data()
192 data = ctx[f].data()
181 if (target == "to-lf" and "\r\n" in data
193 if (target == "to-lf" and "\r\n" in data
182 or target == "to-crlf" and singlelf.search(data)):
194 or target == "to-crlf" and singlelf.search(data)):
183 failed.append((str(ctx), target, f))
195 failed.append((str(ctx), target, f))
184 break
196 break
185 return failed
197 return failed
186
198
187 def parseeol(ui, repo, nodes):
199 def parseeol(ui, repo, nodes):
188 try:
200 try:
189 for node in nodes:
201 for node in nodes:
190 try:
202 try:
191 if node is None:
203 if node is None:
192 # Cannot use workingctx.data() since it would load
204 # Cannot use workingctx.data() since it would load
193 # and cache the filters before we configure them.
205 # and cache the filters before we configure them.
194 data = repo.wfile('.hgeol').read()
206 data = repo.wfile('.hgeol').read()
195 else:
207 else:
196 data = repo[node]['.hgeol'].data()
208 data = repo[node]['.hgeol'].data()
197 return eolfile(ui, repo.root, data)
209 return eolfile(ui, repo.root, data)
198 except (IOError, LookupError):
210 except (IOError, LookupError):
199 pass
211 pass
200 except error.ParseError, inst:
212 except error.ParseError, inst:
201 ui.warn(_("warning: ignoring .hgeol file due to parse error "
213 ui.warn(_("warning: ignoring .hgeol file due to parse error "
202 "at %s: %s\n") % (inst.args[1], inst.args[0]))
214 "at %s: %s\n") % (inst.args[1], inst.args[0]))
203 return None
215 return None
204
216
205 def _checkhook(ui, repo, node, headsonly):
217 def _checkhook(ui, repo, node, headsonly):
206 # Get revisions to check and touched files at the same time
218 # Get revisions to check and touched files at the same time
207 files = set()
219 files = set()
208 revs = set()
220 revs = set()
209 for rev in xrange(repo[node].rev(), len(repo)):
221 for rev in xrange(repo[node].rev(), len(repo)):
210 revs.add(rev)
222 revs.add(rev)
211 if headsonly:
223 if headsonly:
212 ctx = repo[rev]
224 ctx = repo[rev]
213 files.update(ctx.files())
225 files.update(ctx.files())
214 for pctx in ctx.parents():
226 for pctx in ctx.parents():
215 revs.discard(pctx.rev())
227 revs.discard(pctx.rev())
216 failed = []
228 failed = []
217 for rev in revs:
229 for rev in revs:
218 ctx = repo[rev]
230 ctx = repo[rev]
219 eol = parseeol(ui, repo, [ctx.node()])
231 eol = parseeol(ui, repo, [ctx.node()])
220 if eol:
232 if eol:
221 failed.extend(eol.checkrev(repo, ctx, files))
233 failed.extend(eol.checkrev(repo, ctx, files))
222
234
223 if failed:
235 if failed:
224 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
236 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
225 msgs = []
237 msgs = []
226 for node, target, f in failed:
238 for node, target, f in failed:
227 msgs.append(_(" %s in %s should not have %s line endings") %
239 msgs.append(_(" %s in %s should not have %s line endings") %
228 (f, node, eols[target]))
240 (f, node, eols[target]))
229 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
241 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
230
242
231 def checkallhook(ui, repo, node, hooktype, **kwargs):
243 def checkallhook(ui, repo, node, hooktype, **kwargs):
232 """verify that files have expected EOLs"""
244 """verify that files have expected EOLs"""
233 _checkhook(ui, repo, node, False)
245 _checkhook(ui, repo, node, False)
234
246
235 def checkheadshook(ui, repo, node, hooktype, **kwargs):
247 def checkheadshook(ui, repo, node, hooktype, **kwargs):
236 """verify that files have expected EOLs"""
248 """verify that files have expected EOLs"""
237 _checkhook(ui, repo, node, True)
249 _checkhook(ui, repo, node, True)
238
250
239 # "checkheadshook" used to be called "hook"
251 # "checkheadshook" used to be called "hook"
240 hook = checkheadshook
252 hook = checkheadshook
241
253
242 def preupdate(ui, repo, hooktype, parent1, parent2):
254 def preupdate(ui, repo, hooktype, parent1, parent2):
243 #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2)
255 #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2)
244 repo.loadeol([parent1])
256 repo.loadeol([parent1])
245 return False
257 return False
246
258
247 def uisetup(ui):
259 def uisetup(ui):
248 ui.setconfig('hooks', 'preupdate.eol', preupdate)
260 ui.setconfig('hooks', 'preupdate.eol', preupdate)
249
261
250 def extsetup(ui):
262 def extsetup(ui):
251 try:
263 try:
252 extensions.find('win32text')
264 extensions.find('win32text')
253 ui.warn(_("the eol extension is incompatible with the "
265 ui.warn(_("the eol extension is incompatible with the "
254 "win32text extension\n"))
266 "win32text extension\n"))
255 except KeyError:
267 except KeyError:
256 pass
268 pass
257
269
258
270
259 def reposetup(ui, repo):
271 def reposetup(ui, repo):
260 uisetup(repo.ui)
272 uisetup(repo.ui)
261 #print "reposetup for", repo.root
273 #print "reposetup for", repo.root
262
274
263 if not repo.local():
275 if not repo.local():
264 return
276 return
265 for name, fn in filters.iteritems():
277 for name, fn in filters.iteritems():
266 repo.adddatafilter(name, fn)
278 repo.adddatafilter(name, fn)
267
279
268 ui.setconfig('patch', 'eol', 'auto')
280 ui.setconfig('patch', 'eol', 'auto')
269
281
270 class eolrepo(repo.__class__):
282 class eolrepo(repo.__class__):
271
283
272 def loadeol(self, nodes):
284 def loadeol(self, nodes):
273 eol = parseeol(self.ui, self, nodes)
285 eol = parseeol(self.ui, self, nodes)
274 if eol is None:
286 if eol is None:
275 return None
287 return None
276 eol.setfilters(self.ui)
288 eol.copytoui(self.ui)
277 return eol.match
289 return eol.match
278
290
279 def _hgcleardirstate(self):
291 def _hgcleardirstate(self):
280 self._eolfile = self.loadeol([None, 'tip'])
292 self._eolfile = self.loadeol([None, 'tip'])
281 if not self._eolfile:
293 if not self._eolfile:
282 self._eolfile = util.never
294 self._eolfile = util.never
283 return
295 return
284
296
285 try:
297 try:
286 cachemtime = os.path.getmtime(self.join("eol.cache"))
298 cachemtime = os.path.getmtime(self.join("eol.cache"))
287 except OSError:
299 except OSError:
288 cachemtime = 0
300 cachemtime = 0
289
301
290 try:
302 try:
291 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
303 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
292 except OSError:
304 except OSError:
293 eolmtime = 0
305 eolmtime = 0
294
306
295 if eolmtime > cachemtime:
307 if eolmtime > cachemtime:
296 ui.debug("eol: detected change in .hgeol\n")
308 ui.debug("eol: detected change in .hgeol\n")
297 wlock = None
309 wlock = None
298 try:
310 try:
299 wlock = self.wlock()
311 wlock = self.wlock()
300 for f in self.dirstate:
312 for f in self.dirstate:
301 if self.dirstate[f] == 'n':
313 if self.dirstate[f] == 'n':
302 # all normal files need to be looked at
314 # all normal files need to be looked at
303 # again since the new .hgeol file might no
315 # again since the new .hgeol file might no
304 # longer match a file it matched before
316 # longer match a file it matched before
305 self.dirstate.normallookup(f)
317 self.dirstate.normallookup(f)
306 # Touch the cache to update mtime.
318 # Touch the cache to update mtime.
307 self.opener("eol.cache", "w").close()
319 self.opener("eol.cache", "w").close()
308 wlock.release()
320 wlock.release()
309 except error.LockUnavailable:
321 except error.LockUnavailable:
310 # If we cannot lock the repository and clear the
322 # If we cannot lock the repository and clear the
311 # dirstate, then a commit might not see all files
323 # dirstate, then a commit might not see all files
312 # as modified. But if we cannot lock the
324 # as modified. But if we cannot lock the
313 # repository, then we can also not make a commit,
325 # repository, then we can also not make a commit,
314 # so ignore the error.
326 # so ignore the error.
315 pass
327 pass
316
328
317 def commitctx(self, ctx, error=False):
329 def commitctx(self, ctx, error=False):
318 for f in sorted(ctx.added() + ctx.modified()):
330 for f in sorted(ctx.added() + ctx.modified()):
319 if not self._eolfile(f):
331 if not self._eolfile(f):
320 continue
332 continue
321 try:
333 try:
322 data = ctx[f].data()
334 data = ctx[f].data()
323 except IOError:
335 except IOError:
324 continue
336 continue
325 if util.binary(data):
337 if util.binary(data):
326 # We should not abort here, since the user should
338 # We should not abort here, since the user should
327 # be able to say "** = native" to automatically
339 # be able to say "** = native" to automatically
328 # have all non-binary files taken care of.
340 # have all non-binary files taken care of.
329 continue
341 continue
330 if inconsistenteol(data):
342 if inconsistenteol(data):
331 raise util.Abort(_("inconsistent newline style "
343 raise util.Abort(_("inconsistent newline style "
332 "in %s\n" % f))
344 "in %s\n" % f))
333 return super(eolrepo, self).commitctx(ctx, error)
345 return super(eolrepo, self).commitctx(ctx, error)
334 repo.__class__ = eolrepo
346 repo.__class__ = eolrepo
335 repo._hgcleardirstate()
347 repo._hgcleardirstate()
@@ -1,89 +1,90
1 # __init__.py - inotify-based status acceleration for Linux
1 # __init__.py - inotify-based status acceleration for Linux
2 #
2 #
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''accelerate status report using Linux's inotify service'''
9 '''accelerate status report using Linux's inotify service'''
10
10
11 # todo: socket permissions
11 # todo: socket permissions
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial import util
14 import server
15 import server
15 from client import client, QueryFailed
16 from client import client, QueryFailed
16
17
17 def serve(ui, repo, **opts):
18 def serve(ui, repo, **opts):
18 '''start an inotify server for this repository'''
19 '''start an inotify server for this repository'''
19 server.start(ui, repo.dirstate, repo.root, opts)
20 server.start(ui, repo.dirstate, repo.root, opts)
20
21
21 def debuginotify(ui, repo, **opts):
22 def debuginotify(ui, repo, **opts):
22 '''debugging information for inotify extension
23 '''debugging information for inotify extension
23
24
24 Prints the list of directories being watched by the inotify server.
25 Prints the list of directories being watched by the inotify server.
25 '''
26 '''
26 cli = client(ui, repo)
27 cli = client(ui, repo)
27 response = cli.debugquery()
28 response = cli.debugquery()
28
29
29 ui.write(_('directories being watched:\n'))
30 ui.write(_('directories being watched:\n'))
30 for path in response:
31 for path in response:
31 ui.write((' %s/\n') % path)
32 ui.write((' %s/\n') % path)
32
33
33 def reposetup(ui, repo):
34 def reposetup(ui, repo):
34 if not hasattr(repo, 'dirstate'):
35 if not util.safehasattr(repo, 'dirstate'):
35 return
36 return
36
37
37 class inotifydirstate(repo.dirstate.__class__):
38 class inotifydirstate(repo.dirstate.__class__):
38
39
39 # We'll set this to false after an unsuccessful attempt so that
40 # We'll set this to false after an unsuccessful attempt so that
40 # next calls of status() within the same instance don't try again
41 # next calls of status() within the same instance don't try again
41 # to start an inotify server if it won't start.
42 # to start an inotify server if it won't start.
42 _inotifyon = True
43 _inotifyon = True
43
44
44 def status(self, match, subrepos, ignored, clean, unknown):
45 def status(self, match, subrepos, ignored, clean, unknown):
45 files = match.files()
46 files = match.files()
46 if '.' in files:
47 if '.' in files:
47 files = []
48 files = []
48 if self._inotifyon and not ignored and not subrepos and not self._dirty:
49 if self._inotifyon and not ignored and not subrepos and not self._dirty:
49 cli = client(ui, repo)
50 cli = client(ui, repo)
50 try:
51 try:
51 result = cli.statusquery(files, match, False,
52 result = cli.statusquery(files, match, False,
52 clean, unknown)
53 clean, unknown)
53 except QueryFailed, instr:
54 except QueryFailed, instr:
54 ui.debug(str(instr))
55 ui.debug(str(instr))
55 # don't retry within the same hg instance
56 # don't retry within the same hg instance
56 inotifydirstate._inotifyon = False
57 inotifydirstate._inotifyon = False
57 pass
58 pass
58 else:
59 else:
59 if ui.config('inotify', 'debug'):
60 if ui.config('inotify', 'debug'):
60 r2 = super(inotifydirstate, self).status(
61 r2 = super(inotifydirstate, self).status(
61 match, [], False, clean, unknown)
62 match, [], False, clean, unknown)
62 for c, a, b in zip('LMARDUIC', result, r2):
63 for c, a, b in zip('LMARDUIC', result, r2):
63 for f in a:
64 for f in a:
64 if f not in b:
65 if f not in b:
65 ui.warn('*** inotify: %s +%s\n' % (c, f))
66 ui.warn('*** inotify: %s +%s\n' % (c, f))
66 for f in b:
67 for f in b:
67 if f not in a:
68 if f not in a:
68 ui.warn('*** inotify: %s -%s\n' % (c, f))
69 ui.warn('*** inotify: %s -%s\n' % (c, f))
69 result = r2
70 result = r2
70 return result
71 return result
71 return super(inotifydirstate, self).status(
72 return super(inotifydirstate, self).status(
72 match, subrepos, ignored, clean, unknown)
73 match, subrepos, ignored, clean, unknown)
73
74
74 repo.dirstate.__class__ = inotifydirstate
75 repo.dirstate.__class__ = inotifydirstate
75
76
76 cmdtable = {
77 cmdtable = {
77 'debuginotify':
78 'debuginotify':
78 (debuginotify, [], ('hg debuginotify')),
79 (debuginotify, [], ('hg debuginotify')),
79 '^inserve':
80 '^inserve':
80 (serve,
81 (serve,
81 [('d', 'daemon', None, _('run server in background')),
82 [('d', 'daemon', None, _('run server in background')),
82 ('', 'daemon-pipefds', '',
83 ('', 'daemon-pipefds', '',
83 _('used internally by daemon mode'), _('NUM')),
84 _('used internally by daemon mode'), _('NUM')),
84 ('t', 'idle-timeout', '',
85 ('t', 'idle-timeout', '',
85 _('minutes to sit idle before exiting'), _('NUM')),
86 _('minutes to sit idle before exiting'), _('NUM')),
86 ('', 'pid-file', '',
87 ('', 'pid-file', '',
87 _('name of file to write process ID to'), _('FILE'))],
88 _('name of file to write process ID to'), _('FILE'))],
88 _('hg inserve [OPTION]...')),
89 _('hg inserve [OPTION]...')),
89 }
90 }
@@ -1,693 +1,701
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56 The more specific you are in your filename patterns the less you
56 The more specific you are in your filename patterns the less you
57 lose speed in huge repositories.
57 lose speed in huge repositories.
58
58
59 For [keywordmaps] template mapping and expansion demonstration and
59 For [keywordmaps] template mapping and expansion demonstration and
60 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
60 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 available templates and filters.
61 available templates and filters.
62
62
63 Three additional date template filters are provided:
63 Three additional date template filters are provided:
64
64
65 :``utcdate``: "2006/09/18 15:13:13"
65 :``utcdate``: "2006/09/18 15:13:13"
66 :``svnutcdate``: "2006-09-18 15:13:13Z"
66 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
67 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68
68
69 The default template mappings (view with :hg:`kwdemo -d`) can be
69 The default template mappings (view with :hg:`kwdemo -d`) can be
70 replaced with customized keywords and templates. Again, run
70 replaced with customized keywords and templates. Again, run
71 :hg:`kwdemo` to control the results of your configuration changes.
71 :hg:`kwdemo` to control the results of your configuration changes.
72
72
73 Before changing/disabling active keywords, you must run :hg:`kwshrink`
73 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 to avoid storing expanded keywords in the change history.
74 to avoid storing expanded keywords in the change history.
75
75
76 To force expansion after enabling it, or a configuration change, run
76 To force expansion after enabling it, or a configuration change, run
77 :hg:`kwexpand`.
77 :hg:`kwexpand`.
78
78
79 Expansions spanning more than one line and incremental expansions,
79 Expansions spanning more than one line and incremental expansions,
80 like CVS' $Log$, are not supported. A keyword template map "Log =
80 like CVS' $Log$, are not supported. A keyword template map "Log =
81 {desc}" expands to the first line of the changeset description.
81 {desc}" expands to the first line of the changeset description.
82 '''
82 '''
83
83
84 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
84 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
85 from mercurial import localrepo, match, patch, templatefilters, templater, util
85 from mercurial import localrepo, match, patch, templatefilters, templater, util
86 from mercurial import scmutil
86 from mercurial import scmutil
87 from mercurial.hgweb import webcommands
87 from mercurial.hgweb import webcommands
88 from mercurial.i18n import _
88 from mercurial.i18n import _
89 import os, re, shutil, tempfile
89 import os, re, shutil, tempfile
90
90
91 commands.optionalrepo += ' kwdemo'
91 commands.optionalrepo += ' kwdemo'
92
92
93 cmdtable = {}
93 cmdtable = {}
94 command = cmdutil.command(cmdtable)
94 command = cmdutil.command(cmdtable)
95
95
96 # hg commands that do not act on keywords
96 # hg commands that do not act on keywords
97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
98 ' outgoing push tip verify convert email glog')
98 ' outgoing push tip verify convert email glog')
99
99
100 # hg commands that trigger expansion only when writing to working dir,
100 # hg commands that trigger expansion only when writing to working dir,
101 # not when reading filelog, and unexpand when reading from working dir
101 # not when reading filelog, and unexpand when reading from working dir
102 restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
102 restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
103
103
104 # names of extensions using dorecord
104 # names of extensions using dorecord
105 recordextensions = 'record'
105 recordextensions = 'record'
106
106
107 colortable = {
107 colortable = {
108 'kwfiles.enabled': 'green bold',
108 'kwfiles.enabled': 'green bold',
109 'kwfiles.deleted': 'cyan bold underline',
109 'kwfiles.deleted': 'cyan bold underline',
110 'kwfiles.enabledunknown': 'green',
110 'kwfiles.enabledunknown': 'green',
111 'kwfiles.ignored': 'bold',
111 'kwfiles.ignored': 'bold',
112 'kwfiles.ignoredunknown': 'none'
112 'kwfiles.ignoredunknown': 'none'
113 }
113 }
114
114
115 # date like in cvs' $Date
115 # date like in cvs' $Date
116 def utcdate(text):
116 def utcdate(text):
117 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
117 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
118 '''
118 '''
119 return util.datestr((text[0], 0), '%Y/%m/%d %H:%M:%S')
119 return util.datestr((text[0], 0), '%Y/%m/%d %H:%M:%S')
120 # date like in svn's $Date
120 # date like in svn's $Date
121 def svnisodate(text):
121 def svnisodate(text):
122 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
122 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
123 +0200 (Tue, 18 Aug 2009)".
123 +0200 (Tue, 18 Aug 2009)".
124 '''
124 '''
125 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
125 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
126 # date like in svn's $Id
126 # date like in svn's $Id
127 def svnutcdate(text):
127 def svnutcdate(text):
128 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
128 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
129 11:00:13Z".
129 11:00:13Z".
130 '''
130 '''
131 return util.datestr((text[0], 0), '%Y-%m-%d %H:%M:%SZ')
131 return util.datestr((text[0], 0), '%Y-%m-%d %H:%M:%SZ')
132
132
133 templatefilters.filters.update({'utcdate': utcdate,
133 templatefilters.filters.update({'utcdate': utcdate,
134 'svnisodate': svnisodate,
134 'svnisodate': svnisodate,
135 'svnutcdate': svnutcdate})
135 'svnutcdate': svnutcdate})
136
136
137 # make keyword tools accessible
137 # make keyword tools accessible
138 kwtools = {'templater': None, 'hgcmd': ''}
138 kwtools = {'templater': None, 'hgcmd': ''}
139
139
140 def _defaultkwmaps(ui):
140 def _defaultkwmaps(ui):
141 '''Returns default keywordmaps according to keywordset configuration.'''
141 '''Returns default keywordmaps according to keywordset configuration.'''
142 templates = {
142 templates = {
143 'Revision': '{node|short}',
143 'Revision': '{node|short}',
144 'Author': '{author|user}',
144 'Author': '{author|user}',
145 }
145 }
146 kwsets = ({
146 kwsets = ({
147 'Date': '{date|utcdate}',
147 'Date': '{date|utcdate}',
148 'RCSfile': '{file|basename},v',
148 'RCSfile': '{file|basename},v',
149 'RCSFile': '{file|basename},v', # kept for backwards compatibility
149 'RCSFile': '{file|basename},v', # kept for backwards compatibility
150 # with hg-keyword
150 # with hg-keyword
151 'Source': '{root}/{file},v',
151 'Source': '{root}/{file},v',
152 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
152 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
153 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
153 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
154 }, {
154 }, {
155 'Date': '{date|svnisodate}',
155 'Date': '{date|svnisodate}',
156 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
156 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
157 'LastChangedRevision': '{node|short}',
157 'LastChangedRevision': '{node|short}',
158 'LastChangedBy': '{author|user}',
158 'LastChangedBy': '{author|user}',
159 'LastChangedDate': '{date|svnisodate}',
159 'LastChangedDate': '{date|svnisodate}',
160 })
160 })
161 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
161 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
162 return templates
162 return templates
163
163
164 def _shrinktext(text, subfunc):
164 def _shrinktext(text, subfunc):
165 '''Helper for keyword expansion removal in text.
165 '''Helper for keyword expansion removal in text.
166 Depending on subfunc also returns number of substitutions.'''
166 Depending on subfunc also returns number of substitutions.'''
167 return subfunc(r'$\1$', text)
167 return subfunc(r'$\1$', text)
168
168
169 def _preselect(wstatus, changed):
169 def _preselect(wstatus, changed):
170 '''Retrieves modfied and added files from a working directory state
170 '''Retrieves modfied and added files from a working directory state
171 and returns the subset of each contained in given changed files
171 and returns the subset of each contained in given changed files
172 retrieved from a change context.'''
172 retrieved from a change context.'''
173 modified, added = wstatus[:2]
173 modified, added = wstatus[:2]
174 modified = [f for f in modified if f in changed]
174 modified = [f for f in modified if f in changed]
175 added = [f for f in added if f in changed]
175 added = [f for f in added if f in changed]
176 return modified, added
176 return modified, added
177
177
178
178
179 class kwtemplater(object):
179 class kwtemplater(object):
180 '''
180 '''
181 Sets up keyword templates, corresponding keyword regex, and
181 Sets up keyword templates, corresponding keyword regex, and
182 provides keyword substitution functions.
182 provides keyword substitution functions.
183 '''
183 '''
184
184
185 def __init__(self, ui, repo, inc, exc):
185 def __init__(self, ui, repo, inc, exc):
186 self.ui = ui
186 self.ui = ui
187 self.repo = repo
187 self.repo = repo
188 self.match = match.match(repo.root, '', [], inc, exc)
188 self.match = match.match(repo.root, '', [], inc, exc)
189 self.restrict = kwtools['hgcmd'] in restricted.split()
189 self.restrict = kwtools['hgcmd'] in restricted.split()
190 self.record = False
190 self.record = False
191
191
192 kwmaps = self.ui.configitems('keywordmaps')
192 kwmaps = self.ui.configitems('keywordmaps')
193 if kwmaps: # override default templates
193 if kwmaps: # override default templates
194 self.templates = dict((k, templater.parsestring(v, False))
194 self.templates = dict((k, templater.parsestring(v, False))
195 for k, v in kwmaps)
195 for k, v in kwmaps)
196 else:
196 else:
197 self.templates = _defaultkwmaps(self.ui)
197 self.templates = _defaultkwmaps(self.ui)
198
198
199 @util.propertycache
199 @util.propertycache
200 def escape(self):
200 def escape(self):
201 '''Returns bar-separated and escaped keywords.'''
201 '''Returns bar-separated and escaped keywords.'''
202 return '|'.join(map(re.escape, self.templates.keys()))
202 return '|'.join(map(re.escape, self.templates.keys()))
203
203
204 @util.propertycache
204 @util.propertycache
205 def rekw(self):
205 def rekw(self):
206 '''Returns regex for unexpanded keywords.'''
206 '''Returns regex for unexpanded keywords.'''
207 return re.compile(r'\$(%s)\$' % self.escape)
207 return re.compile(r'\$(%s)\$' % self.escape)
208
208
209 @util.propertycache
209 @util.propertycache
210 def rekwexp(self):
210 def rekwexp(self):
211 '''Returns regex for expanded keywords.'''
211 '''Returns regex for expanded keywords.'''
212 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
212 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
213
213
214 def substitute(self, data, path, ctx, subfunc):
214 def substitute(self, data, path, ctx, subfunc):
215 '''Replaces keywords in data with expanded template.'''
215 '''Replaces keywords in data with expanded template.'''
216 def kwsub(mobj):
216 def kwsub(mobj):
217 kw = mobj.group(1)
217 kw = mobj.group(1)
218 ct = cmdutil.changeset_templater(self.ui, self.repo,
218 ct = cmdutil.changeset_templater(self.ui, self.repo,
219 False, None, '', False)
219 False, None, '', False)
220 ct.use_template(self.templates[kw])
220 ct.use_template(self.templates[kw])
221 self.ui.pushbuffer()
221 self.ui.pushbuffer()
222 ct.show(ctx, root=self.repo.root, file=path)
222 ct.show(ctx, root=self.repo.root, file=path)
223 ekw = templatefilters.firstline(self.ui.popbuffer())
223 ekw = templatefilters.firstline(self.ui.popbuffer())
224 return '$%s: %s $' % (kw, ekw)
224 return '$%s: %s $' % (kw, ekw)
225 return subfunc(kwsub, data)
225 return subfunc(kwsub, data)
226
226
227 def linkctx(self, path, fileid):
227 def linkctx(self, path, fileid):
228 '''Similar to filelog.linkrev, but returns a changectx.'''
228 '''Similar to filelog.linkrev, but returns a changectx.'''
229 return self.repo.filectx(path, fileid=fileid).changectx()
229 return self.repo.filectx(path, fileid=fileid).changectx()
230
230
231 def expand(self, path, node, data):
231 def expand(self, path, node, data):
232 '''Returns data with keywords expanded.'''
232 '''Returns data with keywords expanded.'''
233 if not self.restrict and self.match(path) and not util.binary(data):
233 if not self.restrict and self.match(path) and not util.binary(data):
234 ctx = self.linkctx(path, node)
234 ctx = self.linkctx(path, node)
235 return self.substitute(data, path, ctx, self.rekw.sub)
235 return self.substitute(data, path, ctx, self.rekw.sub)
236 return data
236 return data
237
237
238 def iskwfile(self, cand, ctx):
238 def iskwfile(self, cand, ctx):
239 '''Returns subset of candidates which are configured for keyword
239 '''Returns subset of candidates which are configured for keyword
240 expansion are not symbolic links.'''
240 expansion are not symbolic links.'''
241 return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
241 return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
242
242
243 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
243 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
244 '''Overwrites selected files expanding/shrinking keywords.'''
244 '''Overwrites selected files expanding/shrinking keywords.'''
245 if self.restrict or lookup or self.record: # exclude kw_copy
245 if self.restrict or lookup or self.record: # exclude kw_copy
246 candidates = self.iskwfile(candidates, ctx)
246 candidates = self.iskwfile(candidates, ctx)
247 if not candidates:
247 if not candidates:
248 return
248 return
249 kwcmd = self.restrict and lookup # kwexpand/kwshrink
249 kwcmd = self.restrict and lookup # kwexpand/kwshrink
250 if self.restrict or expand and lookup:
250 if self.restrict or expand and lookup:
251 mf = ctx.manifest()
251 mf = ctx.manifest()
252 lctx = ctx
252 if self.restrict or rekw:
253 re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp
253 re_kw = self.rekw
254 msg = (expand and _('overwriting %s expanding keywords\n')
254 else:
255 or _('overwriting %s shrinking keywords\n'))
255 re_kw = self.rekwexp
256 if expand:
257 msg = _('overwriting %s expanding keywords\n')
258 else:
259 msg = _('overwriting %s shrinking keywords\n')
256 for f in candidates:
260 for f in candidates:
257 if self.restrict:
261 if self.restrict:
258 data = self.repo.file(f).read(mf[f])
262 data = self.repo.file(f).read(mf[f])
259 else:
263 else:
260 data = self.repo.wread(f)
264 data = self.repo.wread(f)
261 if util.binary(data):
265 if util.binary(data):
262 continue
266 continue
263 if expand:
267 if expand:
264 if lookup:
268 if lookup:
265 lctx = self.linkctx(f, mf[f])
269 ctx = self.linkctx(f, mf[f])
266 data, found = self.substitute(data, f, lctx, re_kw.subn)
270 data, found = self.substitute(data, f, ctx, re_kw.subn)
267 elif self.restrict:
271 elif self.restrict:
268 found = re_kw.search(data)
272 found = re_kw.search(data)
269 else:
273 else:
270 data, found = _shrinktext(data, re_kw.subn)
274 data, found = _shrinktext(data, re_kw.subn)
271 if found:
275 if found:
272 self.ui.note(msg % f)
276 self.ui.note(msg % f)
273 fpath = self.repo.wjoin(f)
277 fp = self.repo.wopener(f, "wb", atomictemp=True)
274 mode = os.lstat(fpath).st_mode
278 fp.write(data)
275 self.repo.wwrite(f, data, ctx.flags(f))
279 fp.close()
276 os.chmod(fpath, mode)
277 if kwcmd:
280 if kwcmd:
278 self.repo.dirstate.normal(f)
281 self.repo.dirstate.normal(f)
279 elif self.record:
282 elif self.record:
280 self.repo.dirstate.normallookup(f)
283 self.repo.dirstate.normallookup(f)
281
284
282 def shrink(self, fname, text):
285 def shrink(self, fname, text):
283 '''Returns text with all keyword substitutions removed.'''
286 '''Returns text with all keyword substitutions removed.'''
284 if self.match(fname) and not util.binary(text):
287 if self.match(fname) and not util.binary(text):
285 return _shrinktext(text, self.rekwexp.sub)
288 return _shrinktext(text, self.rekwexp.sub)
286 return text
289 return text
287
290
288 def shrinklines(self, fname, lines):
291 def shrinklines(self, fname, lines):
289 '''Returns lines with keyword substitutions removed.'''
292 '''Returns lines with keyword substitutions removed.'''
290 if self.match(fname):
293 if self.match(fname):
291 text = ''.join(lines)
294 text = ''.join(lines)
292 if not util.binary(text):
295 if not util.binary(text):
293 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
296 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
294 return lines
297 return lines
295
298
296 def wread(self, fname, data):
299 def wread(self, fname, data):
297 '''If in restricted mode returns data read from wdir with
300 '''If in restricted mode returns data read from wdir with
298 keyword substitutions removed.'''
301 keyword substitutions removed.'''
299 return self.restrict and self.shrink(fname, data) or data
302 if self.restrict:
303 return self.shrink(fname, data)
304 return data
300
305
301 class kwfilelog(filelog.filelog):
306 class kwfilelog(filelog.filelog):
302 '''
307 '''
303 Subclass of filelog to hook into its read, add, cmp methods.
308 Subclass of filelog to hook into its read, add, cmp methods.
304 Keywords are "stored" unexpanded, and processed on reading.
309 Keywords are "stored" unexpanded, and processed on reading.
305 '''
310 '''
306 def __init__(self, opener, kwt, path):
311 def __init__(self, opener, kwt, path):
307 super(kwfilelog, self).__init__(opener, path)
312 super(kwfilelog, self).__init__(opener, path)
308 self.kwt = kwt
313 self.kwt = kwt
309 self.path = path
314 self.path = path
310
315
311 def read(self, node):
316 def read(self, node):
312 '''Expands keywords when reading filelog.'''
317 '''Expands keywords when reading filelog.'''
313 data = super(kwfilelog, self).read(node)
318 data = super(kwfilelog, self).read(node)
314 if self.renamed(node):
319 if self.renamed(node):
315 return data
320 return data
316 return self.kwt.expand(self.path, node, data)
321 return self.kwt.expand(self.path, node, data)
317
322
318 def add(self, text, meta, tr, link, p1=None, p2=None):
323 def add(self, text, meta, tr, link, p1=None, p2=None):
319 '''Removes keyword substitutions when adding to filelog.'''
324 '''Removes keyword substitutions when adding to filelog.'''
320 text = self.kwt.shrink(self.path, text)
325 text = self.kwt.shrink(self.path, text)
321 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
326 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
322
327
323 def cmp(self, node, text):
328 def cmp(self, node, text):
324 '''Removes keyword substitutions for comparison.'''
329 '''Removes keyword substitutions for comparison.'''
325 text = self.kwt.shrink(self.path, text)
330 text = self.kwt.shrink(self.path, text)
326 return super(kwfilelog, self).cmp(node, text)
331 return super(kwfilelog, self).cmp(node, text)
327
332
328 def _status(ui, repo, kwt, *pats, **opts):
333 def _status(ui, repo, wctx, kwt, *pats, **opts):
329 '''Bails out if [keyword] configuration is not active.
334 '''Bails out if [keyword] configuration is not active.
330 Returns status of working directory.'''
335 Returns status of working directory.'''
331 if kwt:
336 if kwt:
332 return repo.status(match=scmutil.match(repo[None], pats, opts), clean=True,
337 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
333 unknown=opts.get('unknown') or opts.get('all'))
338 unknown=opts.get('unknown') or opts.get('all'))
334 if ui.configitems('keyword'):
339 if ui.configitems('keyword'):
335 raise util.Abort(_('[keyword] patterns cannot match'))
340 raise util.Abort(_('[keyword] patterns cannot match'))
336 raise util.Abort(_('no [keyword] patterns configured'))
341 raise util.Abort(_('no [keyword] patterns configured'))
337
342
338 def _kwfwrite(ui, repo, expand, *pats, **opts):
343 def _kwfwrite(ui, repo, expand, *pats, **opts):
339 '''Selects files and passes them to kwtemplater.overwrite.'''
344 '''Selects files and passes them to kwtemplater.overwrite.'''
340 wctx = repo[None]
345 wctx = repo[None]
341 if len(wctx.parents()) > 1:
346 if len(wctx.parents()) > 1:
342 raise util.Abort(_('outstanding uncommitted merge'))
347 raise util.Abort(_('outstanding uncommitted merge'))
343 kwt = kwtools['templater']
348 kwt = kwtools['templater']
344 wlock = repo.wlock()
349 wlock = repo.wlock()
345 try:
350 try:
346 status = _status(ui, repo, kwt, *pats, **opts)
351 status = _status(ui, repo, wctx, kwt, *pats, **opts)
347 modified, added, removed, deleted, unknown, ignored, clean = status
352 modified, added, removed, deleted, unknown, ignored, clean = status
348 if modified or added or removed or deleted:
353 if modified or added or removed or deleted:
349 raise util.Abort(_('outstanding uncommitted changes'))
354 raise util.Abort(_('outstanding uncommitted changes'))
350 kwt.overwrite(wctx, clean, True, expand)
355 kwt.overwrite(wctx, clean, True, expand)
351 finally:
356 finally:
352 wlock.release()
357 wlock.release()
353
358
354 @command('kwdemo',
359 @command('kwdemo',
355 [('d', 'default', None, _('show default keyword template maps')),
360 [('d', 'default', None, _('show default keyword template maps')),
356 ('f', 'rcfile', '',
361 ('f', 'rcfile', '',
357 _('read maps from rcfile'), _('FILE'))],
362 _('read maps from rcfile'), _('FILE'))],
358 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'))
363 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'))
359 def demo(ui, repo, *args, **opts):
364 def demo(ui, repo, *args, **opts):
360 '''print [keywordmaps] configuration and an expansion example
365 '''print [keywordmaps] configuration and an expansion example
361
366
362 Show current, custom, or default keyword template maps and their
367 Show current, custom, or default keyword template maps and their
363 expansions.
368 expansions.
364
369
365 Extend the current configuration by specifying maps as arguments
370 Extend the current configuration by specifying maps as arguments
366 and using -f/--rcfile to source an external hgrc file.
371 and using -f/--rcfile to source an external hgrc file.
367
372
368 Use -d/--default to disable current configuration.
373 Use -d/--default to disable current configuration.
369
374
370 See :hg:`help templates` for information on templates and filters.
375 See :hg:`help templates` for information on templates and filters.
371 '''
376 '''
372 def demoitems(section, items):
377 def demoitems(section, items):
373 ui.write('[%s]\n' % section)
378 ui.write('[%s]\n' % section)
374 for k, v in sorted(items):
379 for k, v in sorted(items):
375 ui.write('%s = %s\n' % (k, v))
380 ui.write('%s = %s\n' % (k, v))
376
381
377 fn = 'demo.txt'
382 fn = 'demo.txt'
378 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
383 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
379 ui.note(_('creating temporary repository at %s\n') % tmpdir)
384 ui.note(_('creating temporary repository at %s\n') % tmpdir)
380 repo = localrepo.localrepository(ui, tmpdir, True)
385 repo = localrepo.localrepository(ui, tmpdir, True)
381 ui.setconfig('keyword', fn, '')
386 ui.setconfig('keyword', fn, '')
382 svn = ui.configbool('keywordset', 'svn')
387 svn = ui.configbool('keywordset', 'svn')
383 # explicitly set keywordset for demo output
388 # explicitly set keywordset for demo output
384 ui.setconfig('keywordset', 'svn', svn)
389 ui.setconfig('keywordset', 'svn', svn)
385
390
386 uikwmaps = ui.configitems('keywordmaps')
391 uikwmaps = ui.configitems('keywordmaps')
387 if args or opts.get('rcfile'):
392 if args or opts.get('rcfile'):
388 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
393 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
389 if uikwmaps:
394 if uikwmaps:
390 ui.status(_('\textending current template maps\n'))
395 ui.status(_('\textending current template maps\n'))
391 if opts.get('default') or not uikwmaps:
396 if opts.get('default') or not uikwmaps:
392 if svn:
397 if svn:
393 ui.status(_('\toverriding default svn keywordset\n'))
398 ui.status(_('\toverriding default svn keywordset\n'))
394 else:
399 else:
395 ui.status(_('\toverriding default cvs keywordset\n'))
400 ui.status(_('\toverriding default cvs keywordset\n'))
396 if opts.get('rcfile'):
401 if opts.get('rcfile'):
397 ui.readconfig(opts.get('rcfile'))
402 ui.readconfig(opts.get('rcfile'))
398 if args:
403 if args:
399 # simulate hgrc parsing
404 # simulate hgrc parsing
400 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
405 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
401 fp = repo.opener('hgrc', 'w')
406 fp = repo.opener('hgrc', 'w')
402 fp.writelines(rcmaps)
407 fp.writelines(rcmaps)
403 fp.close()
408 fp.close()
404 ui.readconfig(repo.join('hgrc'))
409 ui.readconfig(repo.join('hgrc'))
405 kwmaps = dict(ui.configitems('keywordmaps'))
410 kwmaps = dict(ui.configitems('keywordmaps'))
406 elif opts.get('default'):
411 elif opts.get('default'):
407 if svn:
412 if svn:
408 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
413 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
409 else:
414 else:
410 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
415 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
411 kwmaps = _defaultkwmaps(ui)
416 kwmaps = _defaultkwmaps(ui)
412 if uikwmaps:
417 if uikwmaps:
413 ui.status(_('\tdisabling current template maps\n'))
418 ui.status(_('\tdisabling current template maps\n'))
414 for k, v in kwmaps.iteritems():
419 for k, v in kwmaps.iteritems():
415 ui.setconfig('keywordmaps', k, v)
420 ui.setconfig('keywordmaps', k, v)
416 else:
421 else:
417 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
422 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
418 kwmaps = dict(uikwmaps) or _defaultkwmaps(ui)
423 if uikwmaps:
424 kwmaps = dict(uikwmaps)
425 else:
426 kwmaps = _defaultkwmaps(ui)
419
427
420 uisetup(ui)
428 uisetup(ui)
421 reposetup(ui, repo)
429 reposetup(ui, repo)
422 ui.write('[extensions]\nkeyword =\n')
430 ui.write('[extensions]\nkeyword =\n')
423 demoitems('keyword', ui.configitems('keyword'))
431 demoitems('keyword', ui.configitems('keyword'))
424 demoitems('keywordset', ui.configitems('keywordset'))
432 demoitems('keywordset', ui.configitems('keywordset'))
425 demoitems('keywordmaps', kwmaps.iteritems())
433 demoitems('keywordmaps', kwmaps.iteritems())
426 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
434 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
427 repo.wopener.write(fn, keywords)
435 repo.wopener.write(fn, keywords)
428 repo[None].add([fn])
436 repo[None].add([fn])
429 ui.note(_('\nkeywords written to %s:\n') % fn)
437 ui.note(_('\nkeywords written to %s:\n') % fn)
430 ui.note(keywords)
438 ui.note(keywords)
431 repo.dirstate.setbranch('demobranch')
439 repo.dirstate.setbranch('demobranch')
432 for name, cmd in ui.configitems('hooks'):
440 for name, cmd in ui.configitems('hooks'):
433 if name.split('.', 1)[0].find('commit') > -1:
441 if name.split('.', 1)[0].find('commit') > -1:
434 repo.ui.setconfig('hooks', name, '')
442 repo.ui.setconfig('hooks', name, '')
435 msg = _('hg keyword configuration and expansion example')
443 msg = _('hg keyword configuration and expansion example')
436 ui.note("hg ci -m '%s'\n" % msg)
444 ui.note("hg ci -m '%s'\n" % msg)
437 repo.commit(text=msg)
445 repo.commit(text=msg)
438 ui.status(_('\n\tkeywords expanded\n'))
446 ui.status(_('\n\tkeywords expanded\n'))
439 ui.write(repo.wread(fn))
447 ui.write(repo.wread(fn))
440 shutil.rmtree(tmpdir, ignore_errors=True)
448 shutil.rmtree(tmpdir, ignore_errors=True)
441
449
442 @command('kwexpand', commands.walkopts, _('hg kwexpand [OPTION]... [FILE]...'))
450 @command('kwexpand', commands.walkopts, _('hg kwexpand [OPTION]... [FILE]...'))
443 def expand(ui, repo, *pats, **opts):
451 def expand(ui, repo, *pats, **opts):
444 '''expand keywords in the working directory
452 '''expand keywords in the working directory
445
453
446 Run after (re)enabling keyword expansion.
454 Run after (re)enabling keyword expansion.
447
455
448 kwexpand refuses to run if given files contain local changes.
456 kwexpand refuses to run if given files contain local changes.
449 '''
457 '''
450 # 3rd argument sets expansion to True
458 # 3rd argument sets expansion to True
451 _kwfwrite(ui, repo, True, *pats, **opts)
459 _kwfwrite(ui, repo, True, *pats, **opts)
452
460
453 @command('kwfiles',
461 @command('kwfiles',
454 [('A', 'all', None, _('show keyword status flags of all files')),
462 [('A', 'all', None, _('show keyword status flags of all files')),
455 ('i', 'ignore', None, _('show files excluded from expansion')),
463 ('i', 'ignore', None, _('show files excluded from expansion')),
456 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
464 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
457 ] + commands.walkopts,
465 ] + commands.walkopts,
458 _('hg kwfiles [OPTION]... [FILE]...'))
466 _('hg kwfiles [OPTION]... [FILE]...'))
459 def files(ui, repo, *pats, **opts):
467 def files(ui, repo, *pats, **opts):
460 '''show files configured for keyword expansion
468 '''show files configured for keyword expansion
461
469
462 List which files in the working directory are matched by the
470 List which files in the working directory are matched by the
463 [keyword] configuration patterns.
471 [keyword] configuration patterns.
464
472
465 Useful to prevent inadvertent keyword expansion and to speed up
473 Useful to prevent inadvertent keyword expansion and to speed up
466 execution by including only files that are actual candidates for
474 execution by including only files that are actual candidates for
467 expansion.
475 expansion.
468
476
469 See :hg:`help keyword` on how to construct patterns both for
477 See :hg:`help keyword` on how to construct patterns both for
470 inclusion and exclusion of files.
478 inclusion and exclusion of files.
471
479
472 With -A/--all and -v/--verbose the codes used to show the status
480 With -A/--all and -v/--verbose the codes used to show the status
473 of files are::
481 of files are::
474
482
475 K = keyword expansion candidate
483 K = keyword expansion candidate
476 k = keyword expansion candidate (not tracked)
484 k = keyword expansion candidate (not tracked)
477 I = ignored
485 I = ignored
478 i = ignored (not tracked)
486 i = ignored (not tracked)
479 '''
487 '''
480 kwt = kwtools['templater']
488 kwt = kwtools['templater']
481 status = _status(ui, repo, kwt, *pats, **opts)
489 wctx = repo[None]
490 status = _status(ui, repo, wctx, kwt, *pats, **opts)
482 cwd = pats and repo.getcwd() or ''
491 cwd = pats and repo.getcwd() or ''
483 modified, added, removed, deleted, unknown, ignored, clean = status
492 modified, added, removed, deleted, unknown, ignored, clean = status
484 files = []
493 files = []
485 if not opts.get('unknown') or opts.get('all'):
494 if not opts.get('unknown') or opts.get('all'):
486 files = sorted(modified + added + clean)
495 files = sorted(modified + added + clean)
487 wctx = repo[None]
488 kwfiles = kwt.iskwfile(files, wctx)
496 kwfiles = kwt.iskwfile(files, wctx)
489 kwdeleted = kwt.iskwfile(deleted, wctx)
497 kwdeleted = kwt.iskwfile(deleted, wctx)
490 kwunknown = kwt.iskwfile(unknown, wctx)
498 kwunknown = kwt.iskwfile(unknown, wctx)
491 if not opts.get('ignore') or opts.get('all'):
499 if not opts.get('ignore') or opts.get('all'):
492 showfiles = kwfiles, kwdeleted, kwunknown
500 showfiles = kwfiles, kwdeleted, kwunknown
493 else:
501 else:
494 showfiles = [], [], []
502 showfiles = [], [], []
495 if opts.get('all') or opts.get('ignore'):
503 if opts.get('all') or opts.get('ignore'):
496 showfiles += ([f for f in files if f not in kwfiles],
504 showfiles += ([f for f in files if f not in kwfiles],
497 [f for f in unknown if f not in kwunknown])
505 [f for f in unknown if f not in kwunknown])
498 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
506 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
499 kwstates = zip('K!kIi', showfiles, kwlabels)
507 kwstates = zip('K!kIi', showfiles, kwlabels)
500 for char, filenames, kwstate in kwstates:
508 for char, filenames, kwstate in kwstates:
501 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
509 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
502 for f in filenames:
510 for f in filenames:
503 ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate)
511 ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate)
504
512
505 @command('kwshrink', commands.walkopts, _('hg kwshrink [OPTION]... [FILE]...'))
513 @command('kwshrink', commands.walkopts, _('hg kwshrink [OPTION]... [FILE]...'))
506 def shrink(ui, repo, *pats, **opts):
514 def shrink(ui, repo, *pats, **opts):
507 '''revert expanded keywords in the working directory
515 '''revert expanded keywords in the working directory
508
516
509 Must be run before changing/disabling active keywords.
517 Must be run before changing/disabling active keywords.
510
518
511 kwshrink refuses to run if given files contain local changes.
519 kwshrink refuses to run if given files contain local changes.
512 '''
520 '''
513 # 3rd argument sets expansion to False
521 # 3rd argument sets expansion to False
514 _kwfwrite(ui, repo, False, *pats, **opts)
522 _kwfwrite(ui, repo, False, *pats, **opts)
515
523
516
524
517 def uisetup(ui):
525 def uisetup(ui):
518 ''' Monkeypatches dispatch._parse to retrieve user command.'''
526 ''' Monkeypatches dispatch._parse to retrieve user command.'''
519
527
520 def kwdispatch_parse(orig, ui, args):
528 def kwdispatch_parse(orig, ui, args):
521 '''Monkeypatch dispatch._parse to obtain running hg command.'''
529 '''Monkeypatch dispatch._parse to obtain running hg command.'''
522 cmd, func, args, options, cmdoptions = orig(ui, args)
530 cmd, func, args, options, cmdoptions = orig(ui, args)
523 kwtools['hgcmd'] = cmd
531 kwtools['hgcmd'] = cmd
524 return cmd, func, args, options, cmdoptions
532 return cmd, func, args, options, cmdoptions
525
533
526 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
534 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
527
535
528 def reposetup(ui, repo):
536 def reposetup(ui, repo):
529 '''Sets up repo as kwrepo for keyword substitution.
537 '''Sets up repo as kwrepo for keyword substitution.
530 Overrides file method to return kwfilelog instead of filelog
538 Overrides file method to return kwfilelog instead of filelog
531 if file matches user configuration.
539 if file matches user configuration.
532 Wraps commit to overwrite configured files with updated
540 Wraps commit to overwrite configured files with updated
533 keyword substitutions.
541 keyword substitutions.
534 Monkeypatches patch and webcommands.'''
542 Monkeypatches patch and webcommands.'''
535
543
536 try:
544 try:
537 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
545 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
538 or '.hg' in util.splitpath(repo.root)
546 or '.hg' in util.splitpath(repo.root)
539 or repo._url.startswith('bundle:')):
547 or repo._url.startswith('bundle:')):
540 return
548 return
541 except AttributeError:
549 except AttributeError:
542 pass
550 pass
543
551
544 inc, exc = [], ['.hg*']
552 inc, exc = [], ['.hg*']
545 for pat, opt in ui.configitems('keyword'):
553 for pat, opt in ui.configitems('keyword'):
546 if opt != 'ignore':
554 if opt != 'ignore':
547 inc.append(pat)
555 inc.append(pat)
548 else:
556 else:
549 exc.append(pat)
557 exc.append(pat)
550 if not inc:
558 if not inc:
551 return
559 return
552
560
553 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
561 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
554
562
555 class kwrepo(repo.__class__):
563 class kwrepo(repo.__class__):
556 def file(self, f):
564 def file(self, f):
557 if f[0] == '/':
565 if f[0] == '/':
558 f = f[1:]
566 f = f[1:]
559 return kwfilelog(self.sopener, kwt, f)
567 return kwfilelog(self.sopener, kwt, f)
560
568
561 def wread(self, filename):
569 def wread(self, filename):
562 data = super(kwrepo, self).wread(filename)
570 data = super(kwrepo, self).wread(filename)
563 return kwt.wread(filename, data)
571 return kwt.wread(filename, data)
564
572
565 def commit(self, *args, **opts):
573 def commit(self, *args, **opts):
566 # use custom commitctx for user commands
574 # use custom commitctx for user commands
567 # other extensions can still wrap repo.commitctx directly
575 # other extensions can still wrap repo.commitctx directly
568 self.commitctx = self.kwcommitctx
576 self.commitctx = self.kwcommitctx
569 try:
577 try:
570 return super(kwrepo, self).commit(*args, **opts)
578 return super(kwrepo, self).commit(*args, **opts)
571 finally:
579 finally:
572 del self.commitctx
580 del self.commitctx
573
581
574 def kwcommitctx(self, ctx, error=False):
582 def kwcommitctx(self, ctx, error=False):
575 n = super(kwrepo, self).commitctx(ctx, error)
583 n = super(kwrepo, self).commitctx(ctx, error)
576 # no lock needed, only called from repo.commit() which already locks
584 # no lock needed, only called from repo.commit() which already locks
577 if not kwt.record:
585 if not kwt.record:
578 restrict = kwt.restrict
586 restrict = kwt.restrict
579 kwt.restrict = True
587 kwt.restrict = True
580 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
588 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
581 False, True)
589 False, True)
582 kwt.restrict = restrict
590 kwt.restrict = restrict
583 return n
591 return n
584
592
585 def rollback(self, dryrun=False):
593 def rollback(self, dryrun=False, force=False):
586 wlock = self.wlock()
594 wlock = self.wlock()
587 try:
595 try:
588 if not dryrun:
596 if not dryrun:
589 changed = self['.'].files()
597 changed = self['.'].files()
590 ret = super(kwrepo, self).rollback(dryrun)
598 ret = super(kwrepo, self).rollback(dryrun, force)
591 if not dryrun:
599 if not dryrun:
592 ctx = self['.']
600 ctx = self['.']
593 modified, added = _preselect(self[None].status(), changed)
601 modified, added = _preselect(self[None].status(), changed)
594 kwt.overwrite(ctx, modified, True, True)
602 kwt.overwrite(ctx, modified, True, True)
595 kwt.overwrite(ctx, added, True, False)
603 kwt.overwrite(ctx, added, True, False)
596 return ret
604 return ret
597 finally:
605 finally:
598 wlock.release()
606 wlock.release()
599
607
600 # monkeypatches
608 # monkeypatches
601 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
609 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
602 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
610 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
603 rejects or conflicts due to expanded keywords in working dir.'''
611 rejects or conflicts due to expanded keywords in working dir.'''
604 orig(self, ui, gp, backend, store, eolmode)
612 orig(self, ui, gp, backend, store, eolmode)
605 # shrink keywords read from working dir
613 # shrink keywords read from working dir
606 self.lines = kwt.shrinklines(self.fname, self.lines)
614 self.lines = kwt.shrinklines(self.fname, self.lines)
607
615
608 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
616 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
609 opts=None, prefix=''):
617 opts=None, prefix=''):
610 '''Monkeypatch patch.diff to avoid expansion.'''
618 '''Monkeypatch patch.diff to avoid expansion.'''
611 kwt.restrict = True
619 kwt.restrict = True
612 return orig(repo, node1, node2, match, changes, opts, prefix)
620 return orig(repo, node1, node2, match, changes, opts, prefix)
613
621
614 def kwweb_skip(orig, web, req, tmpl):
622 def kwweb_skip(orig, web, req, tmpl):
615 '''Wraps webcommands.x turning off keyword expansion.'''
623 '''Wraps webcommands.x turning off keyword expansion.'''
616 kwt.match = util.never
624 kwt.match = util.never
617 return orig(web, req, tmpl)
625 return orig(web, req, tmpl)
618
626
619 def kw_copy(orig, ui, repo, pats, opts, rename=False):
627 def kw_copy(orig, ui, repo, pats, opts, rename=False):
620 '''Wraps cmdutil.copy so that copy/rename destinations do not
628 '''Wraps cmdutil.copy so that copy/rename destinations do not
621 contain expanded keywords.
629 contain expanded keywords.
622 Note that the source of a regular file destination may also be a
630 Note that the source of a regular file destination may also be a
623 symlink:
631 symlink:
624 hg cp sym x -> x is symlink
632 hg cp sym x -> x is symlink
625 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
633 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
626 For the latter we have to follow the symlink to find out whether its
634 For the latter we have to follow the symlink to find out whether its
627 target is configured for expansion and we therefore must unexpand the
635 target is configured for expansion and we therefore must unexpand the
628 keywords in the destination.'''
636 keywords in the destination.'''
629 orig(ui, repo, pats, opts, rename)
637 orig(ui, repo, pats, opts, rename)
630 if opts.get('dry_run'):
638 if opts.get('dry_run'):
631 return
639 return
632 wctx = repo[None]
640 wctx = repo[None]
633 cwd = repo.getcwd()
641 cwd = repo.getcwd()
634
642
635 def haskwsource(dest):
643 def haskwsource(dest):
636 '''Returns true if dest is a regular file and configured for
644 '''Returns true if dest is a regular file and configured for
637 expansion or a symlink which points to a file configured for
645 expansion or a symlink which points to a file configured for
638 expansion. '''
646 expansion. '''
639 source = repo.dirstate.copied(dest)
647 source = repo.dirstate.copied(dest)
640 if 'l' in wctx.flags(source):
648 if 'l' in wctx.flags(source):
641 source = scmutil.canonpath(repo.root, cwd,
649 source = scmutil.canonpath(repo.root, cwd,
642 os.path.realpath(source))
650 os.path.realpath(source))
643 return kwt.match(source)
651 return kwt.match(source)
644
652
645 candidates = [f for f in repo.dirstate.copies() if
653 candidates = [f for f in repo.dirstate.copies() if
646 not 'l' in wctx.flags(f) and haskwsource(f)]
654 not 'l' in wctx.flags(f) and haskwsource(f)]
647 kwt.overwrite(wctx, candidates, False, False)
655 kwt.overwrite(wctx, candidates, False, False)
648
656
649 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
657 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
650 '''Wraps record.dorecord expanding keywords after recording.'''
658 '''Wraps record.dorecord expanding keywords after recording.'''
651 wlock = repo.wlock()
659 wlock = repo.wlock()
652 try:
660 try:
653 # record returns 0 even when nothing has changed
661 # record returns 0 even when nothing has changed
654 # therefore compare nodes before and after
662 # therefore compare nodes before and after
655 kwt.record = True
663 kwt.record = True
656 ctx = repo['.']
664 ctx = repo['.']
657 wstatus = repo[None].status()
665 wstatus = repo[None].status()
658 ret = orig(ui, repo, commitfunc, *pats, **opts)
666 ret = orig(ui, repo, commitfunc, *pats, **opts)
659 recctx = repo['.']
667 recctx = repo['.']
660 if ctx != recctx:
668 if ctx != recctx:
661 modified, added = _preselect(wstatus, recctx.files())
669 modified, added = _preselect(wstatus, recctx.files())
662 kwt.restrict = False
670 kwt.restrict = False
663 kwt.overwrite(recctx, modified, False, True)
671 kwt.overwrite(recctx, modified, False, True)
664 kwt.overwrite(recctx, added, False, True, True)
672 kwt.overwrite(recctx, added, False, True, True)
665 kwt.restrict = True
673 kwt.restrict = True
666 return ret
674 return ret
667 finally:
675 finally:
668 wlock.release()
676 wlock.release()
669
677
670 def kwfilectx_cmp(orig, self, fctx):
678 def kwfilectx_cmp(orig, self, fctx):
671 # keyword affects data size, comparing wdir and filelog size does
679 # keyword affects data size, comparing wdir and filelog size does
672 # not make sense
680 # not make sense
673 if (fctx._filerev is None and
681 if (fctx._filerev is None and
674 (self._repo._encodefilterpats or
682 (self._repo._encodefilterpats or
675 kwt.match(fctx.path()) and not 'l' in fctx.flags()) or
683 kwt.match(fctx.path()) and not 'l' in fctx.flags()) or
676 self.size() == fctx.size()):
684 self.size() == fctx.size()):
677 return self._filelog.cmp(self._filenode, fctx.data())
685 return self._filelog.cmp(self._filenode, fctx.data())
678 return True
686 return True
679
687
680 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
688 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
681 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
689 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
682 extensions.wrapfunction(patch, 'diff', kw_diff)
690 extensions.wrapfunction(patch, 'diff', kw_diff)
683 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
691 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
684 for c in 'annotate changeset rev filediff diff'.split():
692 for c in 'annotate changeset rev filediff diff'.split():
685 extensions.wrapfunction(webcommands, c, kwweb_skip)
693 extensions.wrapfunction(webcommands, c, kwweb_skip)
686 for name in recordextensions.split():
694 for name in recordextensions.split():
687 try:
695 try:
688 record = extensions.find(name)
696 record = extensions.find(name)
689 extensions.wrapfunction(record, 'dorecord', kw_dorecord)
697 extensions.wrapfunction(record, 'dorecord', kw_dorecord)
690 except KeyError:
698 except KeyError:
691 pass
699 pass
692
700
693 repo.__class__ = kwrepo
701 repo.__class__ = kwrepo
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from mercurial/templates/map-cmdline.default to mercurial/templates/map-cmdline.bisect
NO CONTENT: file copied from mercurial/templates/map-cmdline.default to mercurial/templates/map-cmdline.bisect
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now