##// END OF EJS Templates
merge default into stable for 2.0 code freeze
Matt Mackall -
r15273:38408275 merge 2.0-rc stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,47
1 #!/usr/bin/env python
2 #
3 # Dumps output generated by Mercurial's command server in a formatted style to a
4 # given file or stderr if '-' is specified. Output is also written in its raw
5 # format to stdout.
6 #
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
9
10 import sys, struct
11
12 if len(sys.argv) != 2:
13 print 'usage: debugcmdserver.py FILE'
14 sys.exit(1)
15
16 outputfmt = '>cI'
17 outputfmtsize = struct.calcsize(outputfmt)
18
19 if sys.argv[1] == '-':
20 log = sys.stderr
21 else:
22 log = open(sys.argv[1], 'a')
23
24 def read(size):
25 data = sys.stdin.read(size)
26 if not data:
27 raise EOFError()
28 sys.stdout.write(data)
29 sys.stdout.flush()
30 return data
31
32 try:
33 while True:
34 header = read(outputfmtsize)
35 channel, length = struct.unpack(outputfmt, header)
36 log.write('%s, %-4d' % (channel, length))
37 if channel in 'IL':
38 log.write(' -> waiting for input\n')
39 else:
40 data = read(length)
41 log.write(' -> %r\n' % data)
42 log.flush()
43 except EOFError:
44 pass
45 finally:
46 if log != sys.stderr:
47 log.close()
@@ -0,0 +1,4
1 Greg Ward, author of the original bfiles extension
2 Na'Tosha Bard of Unity Technologies
3 Fog Creek Software
4 Special thanks to the University of Toronto and the UCOSP program
@@ -0,0 +1,94
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''track large binary files
10
11 Large binary files tend to be not very compressible, not very
12 diffable, and not at all mergeable. Such files are not handled
13 efficiently by Mercurial's storage format (revlog), which is based on
14 compressed binary deltas; storing large binary files as regular
15 Mercurial files wastes bandwidth and disk space and increases
16 Mercurial's memory usage. The largefiles extension addresses these
17 problems by adding a centralized client-server layer on top of
18 Mercurial: largefiles live in a *central store* out on the network
19 somewhere, and you only fetch the revisions that you need when you
20 need them.
21
22 largefiles works by maintaining a "standin file" in .hglf/ for each
23 largefile. The standins are small (41 bytes: an SHA-1 hash plus
24 newline) and are tracked by Mercurial. Largefile revisions are
25 identified by the SHA-1 hash of their contents, which is written to
26 the standin. largefiles uses that revision ID to get/put largefile
27 revisions from/to the central store. This saves both disk space and
28 bandwidth, since you don't need to retrieve all historical revisions
29 of large files when you clone or pull.
30
31 To start a new repository or add new large binary files, just add
32 --large to your ``hg add`` command. For example::
33
34 $ dd if=/dev/urandom of=randomdata count=2000
35 $ hg add --large randomdata
36 $ hg commit -m 'add randomdata as a largefile'
37
38 When you push a changeset that adds/modifies largefiles to a remote
39 repository, its largefile revisions will be uploaded along with it.
40 Note that the remote Mercurial must also have the largefiles extension
41 enabled for this to work.
42
43 When you pull a changeset that affects largefiles from a remote
44 repository, Mercurial behaves as normal. However, when you update to
45 such a revision, any largefiles needed by that revision are downloaded
46 and cached (if they have never been downloaded before). This means
47 that network access may be required to update to changesets you have
48 not previously updated to.
49
50 If you already have large files tracked by Mercurial without the
51 largefiles extension, you will need to convert your repository in
52 order to benefit from largefiles. This is done with the 'hg lfconvert'
53 command::
54
55 $ hg lfconvert --size 10 oldrepo newrepo
56
57 In repositories that already have largefiles in them, any new file
58 over 10MB will automatically be added as a largefile. To change this
59 threshhold, set ``largefiles.size`` in your Mercurial config file to
60 the minimum size in megabytes to track as a largefile, or use the
61 --lfsize option to the add command (also in megabytes)::
62
63 [largefiles]
64 size = 2 XXX wouldn't minsize be a better name?
65
66 $ hg add --lfsize 2
67
68 The ``largefiles.patterns`` config option allows you to specify a list
69 of filename patterns (see ``hg help patterns``) that should always be
70 tracked as largefiles::
71
72 [largefiles]
73 patterns =
74 *.jpg
75 re:.*\.(png|bmp)$
76 library.zip
77 content/audio/*
78
79 Files that match one of these patterns will be added as largefiles
80 regardless of their size.
81 '''
82
83 from mercurial import commands
84
85 import lfcommands
86 import reposetup
87 import uisetup
88
89 reposetup = reposetup.reposetup
90 uisetup = uisetup.uisetup
91
92 commands.norepo += " lfconvert"
93
94 cmdtable = lfcommands.cmdtable
@@ -0,0 +1,202
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''base class for store implementations and store-related utility code'''
10
11 import os
12 import tempfile
13 import binascii
14 import re
15
16 from mercurial import util, node, hg
17 from mercurial.i18n import _
18
19 import lfutil
20
21 class StoreError(Exception):
22 '''Raised when there is a problem getting files from or putting
23 files to a central store.'''
24 def __init__(self, filename, hash, url, detail):
25 self.filename = filename
26 self.hash = hash
27 self.url = url
28 self.detail = detail
29
30 def longmessage(self):
31 if self.url:
32 return ('%s: %s\n'
33 '(failed URL: %s)\n'
34 % (self.filename, self.detail, self.url))
35 else:
36 return ('%s: %s\n'
37 '(no default or default-push path set in hgrc)\n'
38 % (self.filename, self.detail))
39
40 def __str__(self):
41 return "%s: %s" % (self.url, self.detail)
42
43 class basestore(object):
44 def __init__(self, ui, repo, url):
45 self.ui = ui
46 self.repo = repo
47 self.url = url
48
49 def put(self, source, hash):
50 '''Put source file into the store under <filename>/<hash>.'''
51 raise NotImplementedError('abstract method')
52
53 def exists(self, hash):
54 '''Check to see if the store contains the given hash.'''
55 raise NotImplementedError('abstract method')
56
57 def get(self, files):
58 '''Get the specified largefiles from the store and write to local
59 files under repo.root. files is a list of (filename, hash)
60 tuples. Return (success, missing), lists of files successfuly
61 downloaded and those not found in the store. success is a list
62 of (filename, hash) tuples; missing is a list of filenames that
63 we could not get. (The detailed error message will already have
64 been presented to the user, so missing is just supplied as a
65 summary.)'''
66 success = []
67 missing = []
68 ui = self.ui
69
70 at = 0
71 for filename, hash in files:
72 ui.progress(_('getting largefiles'), at, unit='lfile',
73 total=len(files))
74 at += 1
75 ui.note(_('getting %s:%s\n') % (filename, hash))
76
77 cachefilename = lfutil.cachepath(self.repo, hash)
78 cachedir = os.path.dirname(cachefilename)
79
80 # No need to pass mode='wb' to fdopen(), since mkstemp() already
81 # opened the file in binary mode.
82 (tmpfd, tmpfilename) = tempfile.mkstemp(
83 dir=cachedir, prefix=os.path.basename(filename))
84 tmpfile = os.fdopen(tmpfd, 'w')
85
86 try:
87 hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash))
88 except StoreError, err:
89 ui.warn(err.longmessage())
90 hhash = ""
91
92 if hhash != hash:
93 if hhash != "":
94 ui.warn(_('%s: data corruption (expected %s, got %s)\n')
95 % (filename, hash, hhash))
96 tmpfile.close() # no-op if it's already closed
97 os.remove(tmpfilename)
98 missing.append(filename)
99 continue
100
101 if os.path.exists(cachefilename): # Windows
102 os.remove(cachefilename)
103 os.rename(tmpfilename, cachefilename)
104 lfutil.linktosystemcache(self.repo, hash)
105 success.append((filename, hhash))
106
107 ui.progress(_('getting largefiles'), None)
108 return (success, missing)
109
110 def verify(self, revs, contents=False):
111 '''Verify the existence (and, optionally, contents) of every big
112 file revision referenced by every changeset in revs.
113 Return 0 if all is well, non-zero on any errors.'''
114 write = self.ui.write
115 failed = False
116
117 write(_('searching %d changesets for largefiles\n') % len(revs))
118 verified = set() # set of (filename, filenode) tuples
119
120 for rev in revs:
121 cctx = self.repo[rev]
122 cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
123
124 failed = lfutil.any_(self._verifyfile(
125 cctx, cset, contents, standin, verified) for standin in cctx)
126
127 num_revs = len(verified)
128 num_lfiles = len(set([fname for (fname, fnode) in verified]))
129 if contents:
130 write(_('verified contents of %d revisions of %d largefiles\n')
131 % (num_revs, num_lfiles))
132 else:
133 write(_('verified existence of %d revisions of %d largefiles\n')
134 % (num_revs, num_lfiles))
135
136 return int(failed)
137
138 def _getfile(self, tmpfile, filename, hash):
139 '''Fetch one revision of one file from the store and write it
140 to tmpfile. Compute the hash of the file on-the-fly as it
141 downloads and return the binary hash. Close tmpfile. Raise
142 StoreError if unable to download the file (e.g. it does not
143 exist in the store).'''
144 raise NotImplementedError('abstract method')
145
146 def _verifyfile(self, cctx, cset, contents, standin, verified):
147 '''Perform the actual verification of a file in the store.
148 '''
149 raise NotImplementedError('abstract method')
150
151 import localstore, wirestore
152
153 _storeprovider = {
154 'file': [localstore.localstore],
155 'http': [wirestore.wirestore],
156 'https': [wirestore.wirestore],
157 'ssh': [wirestore.wirestore],
158 }
159
160 _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
161
162 # During clone this function is passed the src's ui object
163 # but it needs the dest's ui object so it can read out of
164 # the config file. Use repo.ui instead.
165 def _openstore(repo, remote=None, put=False):
166 ui = repo.ui
167
168 if not remote:
169 path = (getattr(repo, 'lfpullsource', None) or
170 ui.expandpath('default-push', 'default'))
171
172 # ui.expandpath() leaves 'default-push' and 'default' alone if
173 # they cannot be expanded: fallback to the empty string,
174 # meaning the current directory.
175 if path == 'default-push' or path == 'default':
176 path = ''
177 remote = repo
178 else:
179 remote = hg.peer(repo, {}, path)
180
181 # The path could be a scheme so use Mercurial's normal functionality
182 # to resolve the scheme to a repository and use its path
183 path = util.safehasattr(remote, 'url') and remote.url() or remote.path
184
185 match = _scheme_re.match(path)
186 if not match: # regular filesystem path
187 scheme = 'file'
188 else:
189 scheme = match.group(1)
190
191 try:
192 storeproviders = _storeprovider[scheme]
193 except KeyError:
194 raise util.Abort(_('unsupported URL scheme %r') % scheme)
195
196 for class_obj in storeproviders:
197 try:
198 return class_obj(ui, repo, remote)
199 except lfutil.storeprotonotcapable:
200 pass
201
202 raise util.Abort(_('%s does not appear to be a largefile store'), path)
@@ -0,0 +1,49
1 = largefiles - manage large binary files =
2 This extension is based off of Greg Ward's bfiles extension which can be found
3 at http://mercurial.selenic.com/wiki/BfilesExtension.
4
5 == The largefile store ==
6
7 largefile stores are, in the typical use case, centralized servers that have
8 every past revision of a given binary file. Each largefile is identified by
9 its sha1 hash, and all interactions with the store take one of the following
10 forms.
11
12 -Download a bfile with this hash
13 -Upload a bfile with this hash
14 -Check if the store has a bfile with this hash
15
16 largefiles stores can take one of two forms:
17
18 -Directories on a network file share
19 -Mercurial wireproto servers, either via ssh or http (hgweb)
20
21 == The Local Repository ==
22
23 The local repository has a largefile cache in .hg/largefiles which holds a
24 subset of the largefiles needed. On a clone only the largefiles at tip are
25 downloaded. When largefiles are downloaded from the central store, a copy is
26 saved in this store.
27
28 == The Global Cache ==
29
30 largefiles in a local repository cache are hardlinked to files in the global
31 cache. Before a file is downloaded we check if it is in the global cache.
32
33 == Implementation Details ==
34
35 Each largefile has a standin which is in .hglf. The standin is tracked by
36 Mercurial. The standin contains the SHA1 hash of the largefile. When a
37 largefile is added/removed/copied/renamed/etc the same operation is applied to
38 the standin. Thus the history of the standin is the history of the largefile.
39
40 For performance reasons, the contents of a standin are only updated before a
41 commit. Standins are added/removed/copied/renamed from add/remove/copy/rename
42 Mercurial commands but their contents will not be updated. The contents of a
43 standin will always be the hash of the largefile as of the last commit. To
44 support some commands (revert) some standins are temporarily updated but will
45 be changed back after the command is finished.
46
47 A Mercurial dirstate object tracks the state of the largefiles. The dirstate
48 uses the last modified time and current size to detect if a file has changed
49 (without reading the entire contents of the file).
@@ -0,0 +1,481
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
11 import os
12 import shutil
13
14 from mercurial import util, match as match_, hg, node, context, error
15 from mercurial.i18n import _
16
17 import lfutil
18 import basestore
19
20 # -- Commands ----------------------------------------------------------
21
22 def lfconvert(ui, src, dest, *pats, **opts):
23 '''convert a normal repository to a largefiles repository
24
25 Convert repository SOURCE to a new repository DEST, identical to
26 SOURCE except that certain files will be converted as largefiles:
27 specifically, any file that matches any PATTERN *or* whose size is
28 above the minimum size threshold is converted as a largefile. The
29 size used to determine whether or not to track a file as a
30 largefile is the size of the first version of the file. The
31 minimum size can be specified either with --size or in
32 configuration as ``largefiles.size``.
33
34 After running this command you will need to make sure that
35 largefiles is enabled anywhere you intend to push the new
36 repository.
37
38 Use --tonormal to convert largefiles back to normal files; after
39 this, the DEST repository can be used without largefiles at all.'''
40
41 if opts['tonormal']:
42 tolfile = False
43 else:
44 tolfile = True
45 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
46 try:
47 rsrc = hg.repository(ui, src)
48 if not rsrc.local():
49 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 except error.RepoError, err:
51 ui.traceback()
52 raise util.Abort(err.args[0])
53 if os.path.exists(dest):
54 if not os.path.isdir(dest):
55 raise util.Abort(_('destination %s already exists') % dest)
56 elif os.listdir(dest):
57 raise util.Abort(_('destination %s is not empty') % dest)
58 try:
59 ui.status(_('initializing destination %s\n') % dest)
60 rdst = hg.repository(ui, dest, create=True)
61 if not rdst.local():
62 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
63 except error.RepoError:
64 ui.traceback()
65 raise util.Abort(_('%s is not a repo') % dest)
66
67 success = False
68 try:
69 # Lock destination to prevent modification while it is converted to.
70 # Don't need to lock src because we are just reading from its history
71 # which can't change.
72 dst_lock = rdst.lock()
73
74 # Get a list of all changesets in the source. The easy way to do this
75 # is to simply walk the changelog, using changelog.nodesbewteen().
76 # Take a look at mercurial/revlog.py:639 for more details.
77 # Use a generator instead of a list to decrease memory usage
78 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
79 rsrc.heads())[0])
80 revmap = {node.nullid: node.nullid}
81 if tolfile:
82 lfiles = set()
83 normalfiles = set()
84 if not pats:
85 pats = ui.config(lfutil.longname, 'patterns', default=())
86 if pats:
87 pats = pats.split(' ')
88 if pats:
89 matcher = match_.match(rsrc.root, '', list(pats))
90 else:
91 matcher = None
92
93 lfiletohash = {}
94 for ctx in ctxs:
95 ui.progress(_('converting revisions'), ctx.rev(),
96 unit=_('revision'), total=rsrc['tip'].rev())
97 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
98 lfiles, normalfiles, matcher, size, lfiletohash)
99 ui.progress(_('converting revisions'), None)
100
101 if os.path.exists(rdst.wjoin(lfutil.shortname)):
102 shutil.rmtree(rdst.wjoin(lfutil.shortname))
103
104 for f in lfiletohash.keys():
105 if os.path.isfile(rdst.wjoin(f)):
106 os.unlink(rdst.wjoin(f))
107 try:
108 os.removedirs(os.path.dirname(rdst.wjoin(f)))
109 except OSError:
110 pass
111
112 else:
113 for ctx in ctxs:
114 ui.progress(_('converting revisions'), ctx.rev(),
115 unit=_('revision'), total=rsrc['tip'].rev())
116 _addchangeset(ui, rsrc, rdst, ctx, revmap)
117
118 ui.progress(_('converting revisions'), None)
119 success = True
120 finally:
121 if not success:
122 # we failed, remove the new directory
123 shutil.rmtree(rdst.root)
124 dst_lock.release()
125
126 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
127 # Convert src parents to dst parents
128 parents = []
129 for p in ctx.parents():
130 parents.append(revmap[p.node()])
131 while len(parents) < 2:
132 parents.append(node.nullid)
133
134 # Generate list of changed files
135 files = set(ctx.files())
136 if node.nullid not in parents:
137 mc = ctx.manifest()
138 mp1 = ctx.parents()[0].manifest()
139 mp2 = ctx.parents()[1].manifest()
140 files |= (set(mp1) | set(mp2)) - set(mc)
141 for f in mc:
142 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
143 files.add(f)
144
145 def getfilectx(repo, memctx, f):
146 if lfutil.standin(f) in files:
147 # if the file isn't in the manifest then it was removed
148 # or renamed, raise IOError to indicate this
149 try:
150 fctx = ctx.filectx(lfutil.standin(f))
151 except error.LookupError:
152 raise IOError()
153 renamed = fctx.renamed()
154 if renamed:
155 renamed = lfutil.splitstandin(renamed[0])
156
157 hash = fctx.data().strip()
158 path = lfutil.findfile(rsrc, hash)
159 ### TODO: What if the file is not cached?
160 data = ''
161 fd = None
162 try:
163 fd = open(path, 'rb')
164 data = fd.read()
165 finally:
166 if fd:
167 fd.close()
168 return context.memfilectx(f, data, 'l' in fctx.flags(),
169 'x' in fctx.flags(), renamed)
170 else:
171 try:
172 fctx = ctx.filectx(f)
173 except error.LookupError:
174 raise IOError()
175 renamed = fctx.renamed()
176 if renamed:
177 renamed = renamed[0]
178 data = fctx.data()
179 if f == '.hgtags':
180 newdata = []
181 for line in data.splitlines():
182 id, name = line.split(' ', 1)
183 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
184 name))
185 data = ''.join(newdata)
186 return context.memfilectx(f, data, 'l' in fctx.flags(),
187 'x' in fctx.flags(), renamed)
188
189 dstfiles = []
190 for file in files:
191 if lfutil.isstandin(file):
192 dstfiles.append(lfutil.splitstandin(file))
193 else:
194 dstfiles.append(file)
195 # Commit
196 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
197 getfilectx, ctx.user(), ctx.date(), ctx.extra())
198 ret = rdst.commitctx(mctx)
199 rdst.dirstate.setparents(ret)
200 revmap[ctx.node()] = rdst.changelog.tip()
201
202 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
203 matcher, size, lfiletohash):
204 # Convert src parents to dst parents
205 parents = []
206 for p in ctx.parents():
207 parents.append(revmap[p.node()])
208 while len(parents) < 2:
209 parents.append(node.nullid)
210
211 # Generate list of changed files
212 files = set(ctx.files())
213 if node.nullid not in parents:
214 mc = ctx.manifest()
215 mp1 = ctx.parents()[0].manifest()
216 mp2 = ctx.parents()[1].manifest()
217 files |= (set(mp1) | set(mp2)) - set(mc)
218 for f in mc:
219 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
220 files.add(f)
221
222 dstfiles = []
223 for f in files:
224 if f not in lfiles and f not in normalfiles:
225 islfile = _islfile(f, ctx, matcher, size)
226 # If this file was renamed or copied then copy
227 # the lfileness of its predecessor
228 if f in ctx.manifest():
229 fctx = ctx.filectx(f)
230 renamed = fctx.renamed()
231 renamedlfile = renamed and renamed[0] in lfiles
232 islfile |= renamedlfile
233 if 'l' in fctx.flags():
234 if renamedlfile:
235 raise util.Abort(
236 _('Renamed/copied largefile %s becomes symlink')
237 % f)
238 islfile = False
239 if islfile:
240 lfiles.add(f)
241 else:
242 normalfiles.add(f)
243
244 if f in lfiles:
245 dstfiles.append(lfutil.standin(f))
246 # largefile in manifest if it has not been removed/renamed
247 if f in ctx.manifest():
248 if 'l' in ctx.filectx(f).flags():
249 if renamed and renamed[0] in lfiles:
250 raise util.Abort(_('largefile %s becomes symlink') % f)
251
252 # largefile was modified, update standins
253 fullpath = rdst.wjoin(f)
254 lfutil.createdir(os.path.dirname(fullpath))
255 m = util.sha1('')
256 m.update(ctx[f].data())
257 hash = m.hexdigest()
258 if f not in lfiletohash or lfiletohash[f] != hash:
259 try:
260 fd = open(fullpath, 'wb')
261 fd.write(ctx[f].data())
262 finally:
263 if fd:
264 fd.close()
265 executable = 'x' in ctx[f].flags()
266 os.chmod(fullpath, lfutil.getmode(executable))
267 lfutil.writestandin(rdst, lfutil.standin(f), hash,
268 executable)
269 lfiletohash[f] = hash
270 else:
271 # normal file
272 dstfiles.append(f)
273
274 def getfilectx(repo, memctx, f):
275 if lfutil.isstandin(f):
276 # if the file isn't in the manifest then it was removed
277 # or renamed, raise IOError to indicate this
278 srcfname = lfutil.splitstandin(f)
279 try:
280 fctx = ctx.filectx(srcfname)
281 except error.LookupError:
282 raise IOError()
283 renamed = fctx.renamed()
284 if renamed:
285 # standin is always a largefile because largefile-ness
286 # doesn't change after rename or copy
287 renamed = lfutil.standin(renamed[0])
288
289 return context.memfilectx(f, lfiletohash[srcfname], 'l' in
290 fctx.flags(), 'x' in fctx.flags(), renamed)
291 else:
292 try:
293 fctx = ctx.filectx(f)
294 except error.LookupError:
295 raise IOError()
296 renamed = fctx.renamed()
297 if renamed:
298 renamed = renamed[0]
299
300 data = fctx.data()
301 if f == '.hgtags':
302 newdata = []
303 for line in data.splitlines():
304 id, name = line.split(' ', 1)
305 newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]),
306 name))
307 data = ''.join(newdata)
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 'x' in fctx.flags(), renamed)
310
311 # Commit
312 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
313 getfilectx, ctx.user(), ctx.date(), ctx.extra())
314 ret = rdst.commitctx(mctx)
315 rdst.dirstate.setparents(ret)
316 revmap[ctx.node()] = rdst.changelog.tip()
317
318 def _islfile(file, ctx, matcher, size):
319 '''Return true if file should be considered a largefile, i.e.
320 matcher matches it or it is larger than size.'''
321 # never store special .hg* files as largefiles
322 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
323 return False
324 if matcher and matcher(file):
325 return True
326 try:
327 return ctx.filectx(file).size() >= size * 1024 * 1024
328 except error.LookupError:
329 return False
330
331 def uploadlfiles(ui, rsrc, rdst, files):
332 '''upload largefiles to the central store'''
333
334 # Don't upload locally. All largefiles are in the system wide cache
335 # so the other repo can just get them from there.
336 if not files or rdst.local():
337 return
338
339 store = basestore._openstore(rsrc, rdst, put=True)
340
341 at = 0
342 files = filter(lambda h: not store.exists(h), files)
343 for hash in files:
344 ui.progress(_('uploading largefiles'), at, unit='largefile',
345 total=len(files))
346 source = lfutil.findfile(rsrc, hash)
347 if not source:
348 raise util.Abort(_('largefile %s missing from store'
349 ' (needs to be uploaded)') % hash)
350 # XXX check for errors here
351 store.put(source, hash)
352 at += 1
353 ui.progress(_('uploading largefiles'), None)
354
355 def verifylfiles(ui, repo, all=False, contents=False):
356 '''Verify that every big file revision in the current changeset
357 exists in the central store. With --contents, also verify that
358 the contents of each big file revision are correct (SHA-1 hash
359 matches the revision ID). With --all, check every changeset in
360 this repository.'''
361 if all:
362 # Pass a list to the function rather than an iterator because we know a
363 # list will work.
364 revs = range(len(repo))
365 else:
366 revs = ['.']
367
368 store = basestore._openstore(repo)
369 return store.verify(revs, contents=contents)
370
371 def cachelfiles(ui, repo, node):
372 '''cachelfiles ensures that all largefiles needed by the specified revision
373 are present in the repository's largefile cache.
374
375 returns a tuple (cached, missing). cached is the list of files downloaded
376 by this operation; missing is the list of files that were needed but could
377 not be found.'''
378 lfiles = lfutil.listlfiles(repo, node)
379 toget = []
380
381 for lfile in lfiles:
382 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
383 # if it exists and its hash matches, it might have been locally
384 # modified before updating and the user chose 'local'. in this case,
385 # it will not be in any store, so don't look for it.
386 if ((not os.path.exists(repo.wjoin(lfile)) or
387 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
388 not lfutil.findfile(repo, expectedhash)):
389 toget.append((lfile, expectedhash))
390
391 if toget:
392 store = basestore._openstore(repo)
393 ret = store.get(toget)
394 return ret
395
396 return ([], [])
397
398 def updatelfiles(ui, repo, filelist=None, printmessage=True):
399 wlock = repo.wlock()
400 try:
401 lfdirstate = lfutil.openlfdirstate(ui, repo)
402 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
403
404 if filelist is not None:
405 lfiles = [f for f in lfiles if f in filelist]
406
407 printed = False
408 if printmessage and lfiles:
409 ui.status(_('getting changed largefiles\n'))
410 printed = True
411 cachelfiles(ui, repo, '.')
412
413 updated, removed = 0, 0
414 for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
415 # increment the appropriate counter according to _updatelfile's
416 # return value
417 updated += i > 0 and i or 0
418 removed -= i < 0 and i or 0
419 if printmessage and (removed or updated) and not printed:
420 ui.status(_('getting changed largefiles\n'))
421 printed = True
422
423 lfdirstate.write()
424 if printed and printmessage:
425 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
426 removed))
427 finally:
428 wlock.release()
429
430 def _updatelfile(repo, lfdirstate, lfile):
431 '''updates a single largefile and copies the state of its standin from
432 the repository's dirstate to its state in the lfdirstate.
433
434 returns 1 if the file was modified, -1 if the file was removed, 0 if the
435 file was unchanged, and None if the needed largefile was missing from the
436 cache.'''
437 ret = 0
438 abslfile = repo.wjoin(lfile)
439 absstandin = repo.wjoin(lfutil.standin(lfile))
440 if os.path.exists(absstandin):
441 if os.path.exists(absstandin+'.orig'):
442 shutil.copyfile(abslfile, abslfile+'.orig')
443 expecthash = lfutil.readstandin(repo, lfile)
444 if (expecthash != '' and
445 (not os.path.exists(abslfile) or
446 expecthash != lfutil.hashfile(abslfile))):
447 if not lfutil.copyfromcache(repo, expecthash, lfile):
448 return None # don't try to set the mode or update the dirstate
449 ret = 1
450 mode = os.stat(absstandin).st_mode
451 if mode != os.stat(abslfile).st_mode:
452 os.chmod(abslfile, mode)
453 ret = 1
454 else:
455 if os.path.exists(abslfile):
456 os.unlink(abslfile)
457 ret = -1
458 state = repo.dirstate[lfutil.standin(lfile)]
459 if state == 'n':
460 lfdirstate.normal(lfile)
461 elif state == 'r':
462 lfdirstate.remove(lfile)
463 elif state == 'a':
464 lfdirstate.add(lfile)
465 elif state == '?':
466 lfdirstate.drop(lfile)
467 return ret
468
469 # -- hg commands declarations ------------------------------------------------
470
471 cmdtable = {
472 'lfconvert': (lfconvert,
473 [('s', 'size', '',
474 _('minimum size (MB) for files to be converted '
475 'as largefiles'),
476 'SIZE'),
477 ('', 'tonormal', False,
478 _('convert from a largefiles repo to a normal repo')),
479 ],
480 _('hg lfconvert SOURCE DEST [FILE ...]')),
481 }
@@ -0,0 +1,448
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''largefiles utility code: must not import other modules in this package.'''
10
11 import os
12 import errno
13 import shutil
14 import stat
15 import hashlib
16
17 from mercurial import dirstate, httpconnection, match as match_, util, scmutil
18 from mercurial.i18n import _
19
20 shortname = '.hglf'
21 longname = 'largefiles'
22
23
24 # -- Portability wrappers ----------------------------------------------
25
26 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
27 return dirstate.walk(matcher, [], unknown, ignored)
28
29 def repo_add(repo, list):
30 add = repo[None].add
31 return add(list)
32
33 def repo_remove(repo, list, unlink=False):
34 def remove(list, unlink):
35 wlock = repo.wlock()
36 try:
37 if unlink:
38 for f in list:
39 try:
40 util.unlinkpath(repo.wjoin(f))
41 except OSError, inst:
42 if inst.errno != errno.ENOENT:
43 raise
44 repo[None].forget(list)
45 finally:
46 wlock.release()
47 return remove(list, unlink=unlink)
48
49 def repo_forget(repo, list):
50 forget = repo[None].forget
51 return forget(list)
52
53 def findoutgoing(repo, remote, force):
54 from mercurial import discovery
55 common, _anyinc, _heads = discovery.findcommonincoming(repo,
56 remote, force=force)
57 return repo.changelog.findmissing(common)
58
59 # -- Private worker functions ------------------------------------------
60
61 def getminsize(ui, assumelfiles, opt, default=10):
62 lfsize = opt
63 if not lfsize and assumelfiles:
64 lfsize = ui.config(longname, 'size', default=default)
65 if lfsize:
66 try:
67 lfsize = float(lfsize)
68 except ValueError:
69 raise util.Abort(_('largefiles: size must be number (not %s)\n')
70 % lfsize)
71 if lfsize is None:
72 raise util.Abort(_('minimum size for largefiles must be specified'))
73 return lfsize
74
75 def link(src, dest):
76 try:
77 util.oslink(src, dest)
78 except OSError:
79 # if hardlinks fail, fallback on copy
80 shutil.copyfile(src, dest)
81 os.chmod(dest, os.stat(src).st_mode)
82
83 def systemcachepath(ui, hash):
84 path = ui.config(longname, 'systemcache', None)
85 if path:
86 path = os.path.join(path, hash)
87 else:
88 if os.name == 'nt':
89 appdata = os.getenv('LOCALAPPDATA', os.getenv('APPDATA'))
90 path = os.path.join(appdata, longname, hash)
91 elif os.name == 'posix':
92 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
93 else:
94 raise util.Abort(_('unknown operating system: %s\n') % os.name)
95 return path
96
97 def insystemcache(ui, hash):
98 return os.path.exists(systemcachepath(ui, hash))
99
100 def findfile(repo, hash):
101 if incache(repo, hash):
102 repo.ui.note(_('Found %s in cache\n') % hash)
103 return cachepath(repo, hash)
104 if insystemcache(repo.ui, hash):
105 repo.ui.note(_('Found %s in system cache\n') % hash)
106 return systemcachepath(repo.ui, hash)
107 return None
108
109 class largefiles_dirstate(dirstate.dirstate):
110 def __getitem__(self, key):
111 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
112 def normal(self, f):
113 return super(largefiles_dirstate, self).normal(unixpath(f))
114 def remove(self, f):
115 return super(largefiles_dirstate, self).remove(unixpath(f))
116 def add(self, f):
117 return super(largefiles_dirstate, self).add(unixpath(f))
118 def drop(self, f):
119 return super(largefiles_dirstate, self).drop(unixpath(f))
120 def forget(self, f):
121 return super(largefiles_dirstate, self).forget(unixpath(f))
122
123 def openlfdirstate(ui, repo):
124 '''
125 Return a dirstate object that tracks largefiles: i.e. its root is
126 the repo root, but it is saved in .hg/largefiles/dirstate.
127 '''
128 admin = repo.join(longname)
129 opener = scmutil.opener(admin)
130 if util.safehasattr(repo.dirstate, '_validate'):
131 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
132 repo.dirstate._validate)
133 else:
134 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
135
136 # If the largefiles dirstate does not exist, populate and create
137 # it. This ensures that we create it on the first meaningful
138 # largefiles operation in a new clone. It also gives us an easy
139 # way to forcibly rebuild largefiles state:
140 # rm .hg/largefiles/dirstate && hg status
141 # Or even, if things are really messed up:
142 # rm -rf .hg/largefiles && hg status
143 if not os.path.exists(os.path.join(admin, 'dirstate')):
144 util.makedirs(admin)
145 matcher = getstandinmatcher(repo)
146 for standin in dirstate_walk(repo.dirstate, matcher):
147 lfile = splitstandin(standin)
148 hash = readstandin(repo, lfile)
149 lfdirstate.normallookup(lfile)
150 try:
151 if hash == hashfile(lfile):
152 lfdirstate.normal(lfile)
153 except IOError, err:
154 if err.errno != errno.ENOENT:
155 raise
156
157 lfdirstate.write()
158
159 return lfdirstate
160
161 def lfdirstate_status(lfdirstate, repo, rev):
162 wlock = repo.wlock()
163 try:
164 match = match_.always(repo.root, repo.getcwd())
165 s = lfdirstate.status(match, [], False, False, False)
166 unsure, modified, added, removed, missing, unknown, ignored, clean = s
167 for lfile in unsure:
168 if repo[rev][standin(lfile)].data().strip() != \
169 hashfile(repo.wjoin(lfile)):
170 modified.append(lfile)
171 else:
172 clean.append(lfile)
173 lfdirstate.normal(lfile)
174 lfdirstate.write()
175 finally:
176 wlock.release()
177 return (modified, added, removed, missing, unknown, ignored, clean)
178
179 def listlfiles(repo, rev=None, matcher=None):
180 '''return a list of largefiles in the working copy or the
181 specified changeset'''
182
183 if matcher is None:
184 matcher = getstandinmatcher(repo)
185
186 # ignore unknown files in working directory
187 return [splitstandin(f)
188 for f in repo[rev].walk(matcher)
189 if rev is not None or repo.dirstate[f] != '?']
190
191 def incache(repo, hash):
192 return os.path.exists(cachepath(repo, hash))
193
194 def createdir(dir):
195 if not os.path.exists(dir):
196 os.makedirs(dir)
197
198 def cachepath(repo, hash):
199 return repo.join(os.path.join(longname, hash))
200
201 def copyfromcache(repo, hash, filename):
202 '''Copy the specified largefile from the repo or system cache to
203 filename in the repository. Return true on success or false if the
204 file was not found in either cache (which should not happened:
205 this is meant to be called only after ensuring that the needed
206 largefile exists in the cache).'''
207 path = findfile(repo, hash)
208 if path is None:
209 return False
210 util.makedirs(os.path.dirname(repo.wjoin(filename)))
211 shutil.copy(path, repo.wjoin(filename))
212 return True
213
214 def copytocache(repo, rev, file, uploaded=False):
215 hash = readstandin(repo, file)
216 if incache(repo, hash):
217 return
218 copytocacheabsolute(repo, repo.wjoin(file), hash)
219
220 def copytocacheabsolute(repo, file, hash):
221 createdir(os.path.dirname(cachepath(repo, hash)))
222 if insystemcache(repo.ui, hash):
223 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
224 else:
225 shutil.copyfile(file, cachepath(repo, hash))
226 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
227 linktosystemcache(repo, hash)
228
229 def linktosystemcache(repo, hash):
230 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
231 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
232
233 def getstandinmatcher(repo, pats=[], opts={}):
234 '''Return a match object that applies pats to the standin directory'''
235 standindir = repo.pathto(shortname)
236 if pats:
237 # patterns supplied: search standin directory relative to current dir
238 cwd = repo.getcwd()
239 if os.path.isabs(cwd):
240 # cwd is an absolute path for hg -R <reponame>
241 # work relative to the repository root in this case
242 cwd = ''
243 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
244 elif os.path.isdir(standindir):
245 # no patterns: relative to repo root
246 pats = [standindir]
247 else:
248 # no patterns and no standin dir: return matcher that matches nothing
249 match = match_.match(repo.root, None, [], exact=True)
250 match.matchfn = lambda f: False
251 return match
252 return getmatcher(repo, pats, opts, showbad=False)
253
254 def getmatcher(repo, pats=[], opts={}, showbad=True):
255 '''Wrapper around scmutil.match() that adds showbad: if false,
256 neuter the match object's bad() method so it does not print any
257 warnings about missing files or directories.'''
258 match = scmutil.match(repo[None], pats, opts)
259
260 if not showbad:
261 match.bad = lambda f, msg: None
262 return match
263
264 def composestandinmatcher(repo, rmatcher):
265 '''Return a matcher that accepts standins corresponding to the
266 files accepted by rmatcher. Pass the list of files in the matcher
267 as the paths specified by the user.'''
268 smatcher = getstandinmatcher(repo, rmatcher.files())
269 isstandin = smatcher.matchfn
270 def composed_matchfn(f):
271 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
272 smatcher.matchfn = composed_matchfn
273
274 return smatcher
275
276 def standin(filename):
277 '''Return the repo-relative path to the standin for the specified big
278 file.'''
279 # Notes:
280 # 1) Most callers want an absolute path, but _create_standin() needs
281 # it repo-relative so lfadd() can pass it to repo_add(). So leave
282 # it up to the caller to use repo.wjoin() to get an absolute path.
283 # 2) Join with '/' because that's what dirstate always uses, even on
284 # Windows. Change existing separator to '/' first in case we are
285 # passed filenames from an external source (like the command line).
286 return shortname + '/' + filename.replace(os.sep, '/')
287
288 def isstandin(filename):
289 '''Return true if filename is a big file standin. filename must be
290 in Mercurial's internal form (slash-separated).'''
291 return filename.startswith(shortname + '/')
292
293 def splitstandin(filename):
294 # Split on / because that's what dirstate always uses, even on Windows.
295 # Change local separator to / first just in case we are passed filenames
296 # from an external source (like the command line).
297 bits = filename.replace(os.sep, '/').split('/', 1)
298 if len(bits) == 2 and bits[0] == shortname:
299 return bits[1]
300 else:
301 return None
302
303 def updatestandin(repo, standin):
304 file = repo.wjoin(splitstandin(standin))
305 if os.path.exists(file):
306 hash = hashfile(file)
307 executable = getexecutable(file)
308 writestandin(repo, standin, hash, executable)
309
310 def readstandin(repo, filename, node=None):
311 '''read hex hash from standin for filename at given node, or working
312 directory if no node is given'''
313 return repo[node][standin(filename)].data().strip()
314
315 def writestandin(repo, standin, hash, executable):
316 '''write hash to <repo.root>/<standin>'''
317 writehash(hash, repo.wjoin(standin), executable)
318
319 def copyandhash(instream, outfile):
320 '''Read bytes from instream (iterable) and write them to outfile,
321 computing the SHA-1 hash of the data along the way. Close outfile
322 when done and return the binary hash.'''
323 hasher = util.sha1('')
324 for data in instream:
325 hasher.update(data)
326 outfile.write(data)
327
328 # Blecch: closing a file that somebody else opened is rude and
329 # wrong. But it's so darn convenient and practical! After all,
330 # outfile was opened just to copy and hash.
331 outfile.close()
332
333 return hasher.digest()
334
335 def hashrepofile(repo, file):
336 return hashfile(repo.wjoin(file))
337
338 def hashfile(file):
339 if not os.path.exists(file):
340 return ''
341 hasher = util.sha1('')
342 fd = open(file, 'rb')
343 for data in blockstream(fd):
344 hasher.update(data)
345 fd.close()
346 return hasher.hexdigest()
347
348 class limitreader(object):
349 def __init__(self, f, limit):
350 self.f = f
351 self.limit = limit
352
353 def read(self, length):
354 if self.limit == 0:
355 return ''
356 length = length > self.limit and self.limit or length
357 self.limit -= length
358 return self.f.read(length)
359
360 def close(self):
361 pass
362
363 def blockstream(infile, blocksize=128 * 1024):
364 """Generator that yields blocks of data from infile and closes infile."""
365 while True:
366 data = infile.read(blocksize)
367 if not data:
368 break
369 yield data
370 # same blecch as copyandhash() above
371 infile.close()
372
373 def readhash(filename):
374 rfile = open(filename, 'rb')
375 hash = rfile.read(40)
376 rfile.close()
377 if len(hash) < 40:
378 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
379 % (filename, len(hash)))
380 return hash
381
382 def writehash(hash, filename, executable):
383 util.makedirs(os.path.dirname(filename))
384 if os.path.exists(filename):
385 os.unlink(filename)
386 wfile = open(filename, 'wb')
387
388 try:
389 wfile.write(hash)
390 wfile.write('\n')
391 finally:
392 wfile.close()
393 if os.path.exists(filename):
394 os.chmod(filename, getmode(executable))
395
396 def getexecutable(filename):
397 mode = os.stat(filename).st_mode
398 return ((mode & stat.S_IXUSR) and
399 (mode & stat.S_IXGRP) and
400 (mode & stat.S_IXOTH))
401
402 def getmode(executable):
403 if executable:
404 return 0755
405 else:
406 return 0644
407
408 def urljoin(first, second, *arg):
409 def join(left, right):
410 if not left.endswith('/'):
411 left += '/'
412 if right.startswith('/'):
413 right = right[1:]
414 return left + right
415
416 url = join(first, second)
417 for a in arg:
418 url = join(url, a)
419 return url
420
421 def hexsha1(data):
422 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
423 object data"""
424 h = hashlib.sha1()
425 for chunk in util.filechunkiter(data):
426 h.update(chunk)
427 return h.hexdigest()
428
429 def httpsendfile(ui, filename):
430 return httpconnection.httpsendfile(ui, filename, 'rb')
431
432 def unixpath(path):
433 '''Return a version of path normalized for use with the lfdirstate.'''
434 return os.path.normpath(path).replace(os.sep, '/')
435
436 def islfilesrepo(repo):
437 return ('largefiles' in repo.requirements and
438 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
439
440 def any_(gen):
441 for x in gen:
442 if x:
443 return True
444 return False
445
446 class storeprotonotcapable(BaseException):
447 def __init__(self, storetypes):
448 self.storetypes = storetypes
@@ -0,0 +1,71
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''store class for local filesystem'''
10
11 import os
12
13 from mercurial import util
14 from mercurial.i18n import _
15
16 import lfutil
17 import basestore
18
19 class localstore(basestore.basestore):
20 '''Because there is a system-wide cache, the local store always
21 uses that cache. Since the cache is updated elsewhere, we can
22 just read from it here as if it were the store.'''
23
24 def __init__(self, ui, repo, remote):
25 url = os.path.join(remote.path, '.hg', lfutil.longname)
26 super(localstore, self).__init__(ui, repo, util.expandpath(url))
27
28 def put(self, source, filename, hash):
29 '''Any file that is put must already be in the system-wide
30 cache so do nothing.'''
31 return
32
33 def exists(self, hash):
34 return lfutil.insystemcache(self.repo.ui, hash)
35
36 def _getfile(self, tmpfile, filename, hash):
37 if lfutil.insystemcache(self.ui, hash):
38 return lfutil.systemcachepath(self.ui, hash)
39 raise basestore.StoreError(filename, hash, '',
40 _("Can't get file locally"))
41
42 def _verifyfile(self, cctx, cset, contents, standin, verified):
43 filename = lfutil.splitstandin(standin)
44 if not filename:
45 return False
46 fctx = cctx[standin]
47 key = (filename, fctx.filenode())
48 if key in verified:
49 return False
50
51 expecthash = fctx.data()[0:40]
52 verified.add(key)
53 if not lfutil.insystemcache(self.ui, expecthash):
54 self.ui.warn(
55 _('changeset %s: %s missing\n'
56 ' (looked for hash %s)\n')
57 % (cset, filename, expecthash))
58 return True # failed
59
60 if contents:
61 storepath = lfutil.systemcachepath(self.ui, expecthash)
62 actualhash = lfutil.hashfile(storepath)
63 if actualhash != expecthash:
64 self.ui.warn(
65 _('changeset %s: %s: contents differ\n'
66 ' (%s:\n'
67 ' expected hash %s,\n'
68 ' but got %s)\n')
69 % (cset, filename, storepath, expecthash, actualhash))
70 return True # failed
71 return False
This diff has been collapsed as it changes many lines, (830 lines changed) Show them Hide them
@@ -0,0 +1,830
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
11 import os
12 import copy
13
14 from mercurial import hg, commands, util, cmdutil, match as match_, node, \
15 archival, error, merge
16 from mercurial.i18n import _
17 from mercurial.node import hex
18 from hgext import rebase
19 import lfutil
20
21 try:
22 from mercurial import scmutil
23 except ImportError:
24 pass
25
26 import lfutil
27 import lfcommands
28
29 def installnormalfilesmatchfn(manifest):
30 '''overrides scmutil.match so that the matcher it returns will ignore all
31 largefiles'''
32 oldmatch = None # for the closure
33 def override_match(repo, pats=[], opts={}, globbed=False,
34 default='relpath'):
35 match = oldmatch(repo, pats, opts, globbed, default)
36 m = copy.copy(match)
37 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
38 manifest)
39 m._files = filter(notlfile, m._files)
40 m._fmap = set(m._files)
41 orig_matchfn = m.matchfn
42 m.matchfn = lambda f: notlfile(f) and orig_matchfn(f) or None
43 return m
44 oldmatch = installmatchfn(override_match)
45
46 def installmatchfn(f):
47 oldmatch = scmutil.match
48 setattr(f, 'oldmatch', oldmatch)
49 scmutil.match = f
50 return oldmatch
51
52 def restorematchfn():
53 '''restores scmutil.match to what it was before installnormalfilesmatchfn
54 was called. no-op if scmutil.match is its original function.
55
56 Note that n calls to installnormalfilesmatchfn will require n calls to
57 restore matchfn to reverse'''
58 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
59
60 # -- Wrappers: modify existing commands --------------------------------
61
62 # Add works by going through the files that the user wanted to add and
63 # checking if they should be added as largefiles. Then it makes a new
64 # matcher which matches only the normal files and runs the original
65 # version of add.
66 def override_add(orig, ui, repo, *pats, **opts):
67 large = opts.pop('large', None)
68 lfsize = lfutil.getminsize(
69 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
70
71 lfmatcher = None
72 if os.path.exists(repo.wjoin(lfutil.shortname)):
73 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
74 if lfpats:
75 lfmatcher = match_.match(repo.root, '', list(lfpats))
76
77 lfnames = []
78 m = scmutil.match(repo[None], pats, opts)
79 m.bad = lambda x, y: None
80 wctx = repo[None]
81 for f in repo.walk(m):
82 exact = m.exact(f)
83 lfile = lfutil.standin(f) in wctx
84 nfile = f in wctx
85 exists = lfile or nfile
86
87 # Don't warn the user when they attempt to add a normal tracked file.
88 # The normal add code will do that for us.
89 if exact and exists:
90 if lfile:
91 ui.warn(_('%s already a largefile\n') % f)
92 continue
93
94 if exact or not exists:
95 abovemin = (lfsize and
96 os.path.getsize(repo.wjoin(f)) >= lfsize * 1024 * 1024)
97 if large or abovemin or (lfmatcher and lfmatcher(f)):
98 lfnames.append(f)
99 if ui.verbose or not exact:
100 ui.status(_('adding %s as a largefile\n') % m.rel(f))
101
102 bad = []
103 standins = []
104
105 # Need to lock, otherwise there could be a race condition between
106 # when standins are created and added to the repo.
107 wlock = repo.wlock()
108 try:
109 if not opts.get('dry_run'):
110 lfdirstate = lfutil.openlfdirstate(ui, repo)
111 for f in lfnames:
112 standinname = lfutil.standin(f)
113 lfutil.writestandin(repo, standinname, hash='',
114 executable=lfutil.getexecutable(repo.wjoin(f)))
115 standins.append(standinname)
116 if lfdirstate[f] == 'r':
117 lfdirstate.normallookup(f)
118 else:
119 lfdirstate.add(f)
120 lfdirstate.write()
121 bad += [lfutil.splitstandin(f)
122 for f in lfutil.repo_add(repo, standins)
123 if f in m.files()]
124 finally:
125 wlock.release()
126
127 installnormalfilesmatchfn(repo[None].manifest())
128 result = orig(ui, repo, *pats, **opts)
129 restorematchfn()
130
131 return (result == 1 or bad) and 1 or 0
132
133 def override_remove(orig, ui, repo, *pats, **opts):
134 manifest = repo[None].manifest()
135 installnormalfilesmatchfn(manifest)
136 orig(ui, repo, *pats, **opts)
137 restorematchfn()
138
139 after, force = opts.get('after'), opts.get('force')
140 if not pats and not after:
141 raise util.Abort(_('no files specified'))
142 m = scmutil.match(repo[None], pats, opts)
143 try:
144 repo.lfstatus = True
145 s = repo.status(match=m, clean=True)
146 finally:
147 repo.lfstatus = False
148 modified, added, deleted, clean = [[f for f in list
149 if lfutil.standin(f) in manifest]
150 for list in [s[0], s[1], s[3], s[6]]]
151
152 def warn(files, reason):
153 for f in files:
154 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
155 % (m.rel(f), reason))
156
157 if force:
158 remove, forget = modified + deleted + clean, added
159 elif after:
160 remove, forget = deleted, []
161 warn(modified + added + clean, _('still exists'))
162 else:
163 remove, forget = deleted + clean, []
164 warn(modified, _('is modified'))
165 warn(added, _('has been marked for add'))
166
167 for f in sorted(remove + forget):
168 if ui.verbose or not m.exact(f):
169 ui.status(_('removing %s\n') % m.rel(f))
170
171 # Need to lock because standin files are deleted then removed from the
172 # repository and we could race inbetween.
173 wlock = repo.wlock()
174 try:
175 lfdirstate = lfutil.openlfdirstate(ui, repo)
176 for f in remove:
177 if not after:
178 os.unlink(repo.wjoin(f))
179 currentdir = os.path.split(f)[0]
180 while currentdir and not os.listdir(repo.wjoin(currentdir)):
181 os.rmdir(repo.wjoin(currentdir))
182 currentdir = os.path.split(currentdir)[0]
183 lfdirstate.remove(f)
184 lfdirstate.write()
185
186 forget = [lfutil.standin(f) for f in forget]
187 remove = [lfutil.standin(f) for f in remove]
188 lfutil.repo_forget(repo, forget)
189 lfutil.repo_remove(repo, remove, unlink=True)
190 finally:
191 wlock.release()
192
193 def override_status(orig, ui, repo, *pats, **opts):
194 try:
195 repo.lfstatus = True
196 return orig(ui, repo, *pats, **opts)
197 finally:
198 repo.lfstatus = False
199
200 def override_log(orig, ui, repo, *pats, **opts):
201 try:
202 repo.lfstatus = True
203 orig(ui, repo, *pats, **opts)
204 finally:
205 repo.lfstatus = False
206
207 def override_verify(orig, ui, repo, *pats, **opts):
208 large = opts.pop('large', False)
209 all = opts.pop('lfa', False)
210 contents = opts.pop('lfc', False)
211
212 result = orig(ui, repo, *pats, **opts)
213 if large:
214 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
215 return result
216
217 # Override needs to refresh standins so that update's normal merge
218 # will go through properly. Then the other update hook (overriding repo.update)
219 # will get the new files. Filemerge is also overriden so that the merge
220 # will merge standins correctly.
221 def override_update(orig, ui, repo, *pats, **opts):
222 lfdirstate = lfutil.openlfdirstate(ui, repo)
223 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
224 False, False)
225 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
226
227 # Need to lock between the standins getting updated and their
228 # largefiles getting updated
229 wlock = repo.wlock()
230 try:
231 if opts['check']:
232 mod = len(modified) > 0
233 for lfile in unsure:
234 standin = lfutil.standin(lfile)
235 if repo['.'][standin].data().strip() != \
236 lfutil.hashfile(repo.wjoin(lfile)):
237 mod = True
238 else:
239 lfdirstate.normal(lfile)
240 lfdirstate.write()
241 if mod:
242 raise util.Abort(_('uncommitted local changes'))
243 # XXX handle removed differently
244 if not opts['clean']:
245 for lfile in unsure + modified + added:
246 lfutil.updatestandin(repo, lfutil.standin(lfile))
247 finally:
248 wlock.release()
249 return orig(ui, repo, *pats, **opts)
250
251 # Override filemerge to prompt the user about how they wish to merge
252 # largefiles. This will handle identical edits, and copy/rename +
253 # edit without prompting the user.
254 def override_filemerge(origfn, repo, mynode, orig, fcd, fco, fca):
255 # Use better variable names here. Because this is a wrapper we cannot
256 # change the variable names in the function declaration.
257 fcdest, fcother, fcancestor = fcd, fco, fca
258 if not lfutil.isstandin(orig):
259 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
260 else:
261 if not fcother.cmp(fcdest): # files identical?
262 return None
263
264 # backwards, use working dir parent as ancestor
265 if fcancestor == fcother:
266 fcancestor = fcdest.parents()[0]
267
268 if orig != fcother.path():
269 repo.ui.status(_('merging %s and %s to %s\n')
270 % (lfutil.splitstandin(orig),
271 lfutil.splitstandin(fcother.path()),
272 lfutil.splitstandin(fcdest.path())))
273 else:
274 repo.ui.status(_('merging %s\n')
275 % lfutil.splitstandin(fcdest.path()))
276
277 if fcancestor.path() != fcother.path() and fcother.data() == \
278 fcancestor.data():
279 return 0
280 if fcancestor.path() != fcdest.path() and fcdest.data() == \
281 fcancestor.data():
282 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
283 return 0
284
285 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
286 'keep (l)ocal or take (o)ther?') %
287 lfutil.splitstandin(orig),
288 (_('&Local'), _('&Other')), 0) == 0:
289 return 0
290 else:
291 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
292 return 0
293
294 # Copy first changes the matchers to match standins instead of
295 # largefiles. Then it overrides util.copyfile in that function it
296 # checks if the destination largefile already exists. It also keeps a
297 # list of copied files so that the largefiles can be copied and the
298 # dirstate updated.
299 def override_copy(orig, ui, repo, pats, opts, rename=False):
300 # doesn't remove largefile on rename
301 if len(pats) < 2:
302 # this isn't legal, let the original function deal with it
303 return orig(ui, repo, pats, opts, rename)
304
305 def makestandin(relpath):
306 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
307 return os.path.join(os.path.relpath('.', repo.getcwd()),
308 lfutil.standin(path))
309
310 fullpats = scmutil.expandpats(pats)
311 dest = fullpats[-1]
312
313 if os.path.isdir(dest):
314 if not os.path.isdir(makestandin(dest)):
315 os.makedirs(makestandin(dest))
316 # This could copy both lfiles and normal files in one command,
317 # but we don't want to do that. First replace their matcher to
318 # only match normal files and run it, then replace it to just
319 # match largefiles and run it again.
320 nonormalfiles = False
321 nolfiles = False
322 try:
323 installnormalfilesmatchfn(repo[None].manifest())
324 result = orig(ui, repo, pats, opts, rename)
325 except util.Abort, e:
326 if str(e) != 'no files to copy':
327 raise e
328 else:
329 nonormalfiles = True
330 result = 0
331 finally:
332 restorematchfn()
333
334 # The first rename can cause our current working directory to be removed.
335 # In that case there is nothing left to copy/rename so just quit.
336 try:
337 repo.getcwd()
338 except OSError:
339 return result
340
341 try:
342 # When we call orig below it creates the standins but we don't add them
343 # to the dir state until later so lock during that time.
344 wlock = repo.wlock()
345
346 manifest = repo[None].manifest()
347 oldmatch = None # for the closure
348 def override_match(repo, pats=[], opts={}, globbed=False,
349 default='relpath'):
350 newpats = []
351 # The patterns were previously mangled to add the standin
352 # directory; we need to remove that now
353 for pat in pats:
354 if match_.patkind(pat) is None and lfutil.shortname in pat:
355 newpats.append(pat.replace(lfutil.shortname, ''))
356 else:
357 newpats.append(pat)
358 match = oldmatch(repo, newpats, opts, globbed, default)
359 m = copy.copy(match)
360 lfile = lambda f: lfutil.standin(f) in manifest
361 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
362 m._fmap = set(m._files)
363 orig_matchfn = m.matchfn
364 m.matchfn = lambda f: (lfutil.isstandin(f) and
365 lfile(lfutil.splitstandin(f)) and
366 orig_matchfn(lfutil.splitstandin(f)) or
367 None)
368 return m
369 oldmatch = installmatchfn(override_match)
370 listpats = []
371 for pat in pats:
372 if match_.patkind(pat) is not None:
373 listpats.append(pat)
374 else:
375 listpats.append(makestandin(pat))
376
377 try:
378 origcopyfile = util.copyfile
379 copiedfiles = []
380 def override_copyfile(src, dest):
381 if lfutil.shortname in src and lfutil.shortname in dest:
382 destlfile = dest.replace(lfutil.shortname, '')
383 if not opts['force'] and os.path.exists(destlfile):
384 raise IOError('',
385 _('destination largefile already exists'))
386 copiedfiles.append((src, dest))
387 origcopyfile(src, dest)
388
389 util.copyfile = override_copyfile
390 result += orig(ui, repo, listpats, opts, rename)
391 finally:
392 util.copyfile = origcopyfile
393
394 lfdirstate = lfutil.openlfdirstate(ui, repo)
395 for (src, dest) in copiedfiles:
396 if lfutil.shortname in src and lfutil.shortname in dest:
397 srclfile = src.replace(lfutil.shortname, '')
398 destlfile = dest.replace(lfutil.shortname, '')
399 destlfiledir = os.path.dirname(destlfile) or '.'
400 if not os.path.isdir(destlfiledir):
401 os.makedirs(destlfiledir)
402 if rename:
403 os.rename(srclfile, destlfile)
404 lfdirstate.remove(os.path.relpath(srclfile,
405 repo.root))
406 else:
407 util.copyfile(srclfile, destlfile)
408 lfdirstate.add(os.path.relpath(destlfile,
409 repo.root))
410 lfdirstate.write()
411 except util.Abort, e:
412 if str(e) != 'no files to copy':
413 raise e
414 else:
415 nolfiles = True
416 finally:
417 restorematchfn()
418 wlock.release()
419
420 if nolfiles and nonormalfiles:
421 raise util.Abort(_('no files to copy'))
422
423 return result
424
425 # When the user calls revert, we have to be careful to not revert any
426 # changes to other largefiles accidentally. This means we have to keep
427 # track of the largefiles that are being reverted so we only pull down
428 # the necessary largefiles.
429 #
430 # Standins are only updated (to match the hash of largefiles) before
431 # commits. Update the standins then run the original revert, changing
432 # the matcher to hit standins instead of largefiles. Based on the
433 # resulting standins update the largefiles. Then return the standins
434 # to their proper state
435 def override_revert(orig, ui, repo, *pats, **opts):
436 # Because we put the standins in a bad state (by updating them)
437 # and then return them to a correct state we need to lock to
438 # prevent others from changing them in their incorrect state.
439 wlock = repo.wlock()
440 try:
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
442 (modified, added, removed, missing, unknown, ignored, clean) = \
443 lfutil.lfdirstate_status(lfdirstate, repo, repo['.'].rev())
444 for lfile in modified:
445 lfutil.updatestandin(repo, lfutil.standin(lfile))
446
447 try:
448 ctx = repo[opts.get('rev')]
449 oldmatch = None # for the closure
450 def override_match(ctxorrepo, pats=[], opts={}, globbed=False,
451 default='relpath'):
452 if util.safehasattr(ctxorrepo, 'match'):
453 ctx0 = ctxorrepo
454 else:
455 ctx0 = ctxorrepo[None]
456 match = oldmatch(ctxorrepo, pats, opts, globbed, default)
457 m = copy.copy(match)
458 def tostandin(f):
459 if lfutil.standin(f) in ctx0 or lfutil.standin(f) in ctx:
460 return lfutil.standin(f)
461 elif lfutil.standin(f) in repo[None]:
462 return None
463 return f
464 m._files = [tostandin(f) for f in m._files]
465 m._files = [f for f in m._files if f is not None]
466 m._fmap = set(m._files)
467 orig_matchfn = m.matchfn
468 def matchfn(f):
469 if lfutil.isstandin(f):
470 # We need to keep track of what largefiles are being
471 # matched so we know which ones to update later --
472 # otherwise we accidentally revert changes to other
473 # largefiles. This is repo-specific, so duckpunch the
474 # repo object to keep the list of largefiles for us
475 # later.
476 if orig_matchfn(lfutil.splitstandin(f)) and \
477 (f in repo[None] or f in ctx):
478 lfileslist = getattr(repo, '_lfilestoupdate', [])
479 lfileslist.append(lfutil.splitstandin(f))
480 repo._lfilestoupdate = lfileslist
481 return True
482 else:
483 return False
484 return orig_matchfn(f)
485 m.matchfn = matchfn
486 return m
487 oldmatch = installmatchfn(override_match)
488 scmutil.match
489 matches = override_match(repo[None], pats, opts)
490 orig(ui, repo, *pats, **opts)
491 finally:
492 restorematchfn()
493 lfileslist = getattr(repo, '_lfilestoupdate', [])
494 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
495 printmessage=False)
496
497 # empty out the largefiles list so we start fresh next time
498 repo._lfilestoupdate = []
499 for lfile in modified:
500 if lfile in lfileslist:
501 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
502 in repo['.']:
503 lfutil.writestandin(repo, lfutil.standin(lfile),
504 repo['.'][lfile].data().strip(),
505 'x' in repo['.'][lfile].flags())
506 lfdirstate = lfutil.openlfdirstate(ui, repo)
507 for lfile in added:
508 standin = lfutil.standin(lfile)
509 if standin not in ctx and (standin in matches or opts.get('all')):
510 if lfile in lfdirstate:
511 lfdirstate.drop(lfile)
512 util.unlinkpath(repo.wjoin(standin))
513 lfdirstate.write()
514 finally:
515 wlock.release()
516
517 def hg_update(orig, repo, node):
518 result = orig(repo, node)
519 # XXX check if it worked first
520 lfcommands.updatelfiles(repo.ui, repo)
521 return result
522
523 def hg_clean(orig, repo, node, show_stats=True):
524 result = orig(repo, node, show_stats)
525 lfcommands.updatelfiles(repo.ui, repo)
526 return result
527
528 def hg_merge(orig, repo, node, force=None, remind=True):
529 result = orig(repo, node, force, remind)
530 lfcommands.updatelfiles(repo.ui, repo)
531 return result
532
533 # When we rebase a repository with remotely changed largefiles, we need to
534 # take some extra care so that the largefiles are correctly updated in the
535 # working copy
536 def override_pull(orig, ui, repo, source=None, **opts):
537 if opts.get('rebase', False):
538 repo._isrebasing = True
539 try:
540 if opts.get('update'):
541 del opts['update']
542 ui.debug('--update and --rebase are not compatible, ignoring '
543 'the update flag\n')
544 del opts['rebase']
545 cmdutil.bailifchanged(repo)
546 revsprepull = len(repo)
547 origpostincoming = commands.postincoming
548 def _dummy(*args, **kwargs):
549 pass
550 commands.postincoming = _dummy
551 repo.lfpullsource = source
552 if not source:
553 source = 'default'
554 try:
555 result = commands.pull(ui, repo, source, **opts)
556 finally:
557 commands.postincoming = origpostincoming
558 revspostpull = len(repo)
559 if revspostpull > revsprepull:
560 result = result or rebase.rebase(ui, repo)
561 finally:
562 repo._isrebasing = False
563 else:
564 repo.lfpullsource = source
565 if not source:
566 source = 'default'
567 result = orig(ui, repo, source, **opts)
568 return result
569
570 def override_rebase(orig, ui, repo, **opts):
571 repo._isrebasing = True
572 try:
573 orig(ui, repo, **opts)
574 finally:
575 repo._isrebasing = False
576
577 def override_archive(orig, repo, dest, node, kind, decode=True, matchfn=None,
578 prefix=None, mtime=None, subrepos=None):
579 # No need to lock because we are only reading history and
580 # largefile caches, neither of which are modified.
581 lfcommands.cachelfiles(repo.ui, repo, node)
582
583 if kind not in archival.archivers:
584 raise util.Abort(_("unknown archive type '%s'") % kind)
585
586 ctx = repo[node]
587
588 if kind == 'files':
589 if prefix:
590 raise util.Abort(
591 _('cannot give prefix when archiving to files'))
592 else:
593 prefix = archival.tidyprefix(dest, kind, prefix)
594
595 def write(name, mode, islink, getdata):
596 if matchfn and not matchfn(name):
597 return
598 data = getdata()
599 if decode:
600 data = repo.wwritedata(name, data)
601 archiver.addfile(prefix + name, mode, islink, data)
602
603 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
604
605 if repo.ui.configbool("ui", "archivemeta", True):
606 def metadata():
607 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
608 hex(repo.changelog.node(0)), hex(node), ctx.branch())
609
610 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
611 if repo.tagtype(t) == 'global')
612 if not tags:
613 repo.ui.pushbuffer()
614 opts = {'template': '{latesttag}\n{latesttagdistance}',
615 'style': '', 'patch': None, 'git': None}
616 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
617 ltags, dist = repo.ui.popbuffer().split('\n')
618 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
619 tags += 'latesttagdistance: %s\n' % dist
620
621 return base + tags
622
623 write('.hg_archival.txt', 0644, False, metadata)
624
625 for f in ctx:
626 ff = ctx.flags(f)
627 getdata = ctx[f].data
628 if lfutil.isstandin(f):
629 path = lfutil.findfile(repo, getdata().strip())
630 f = lfutil.splitstandin(f)
631
632 def getdatafn():
633 try:
634 fd = open(path, 'rb')
635 return fd.read()
636 finally:
637 fd.close()
638
639 getdata = getdatafn
640 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
641
642 if subrepos:
643 for subpath in ctx.substate:
644 sub = ctx.sub(subpath)
645 try:
646 sub.archive(repo.ui, archiver, prefix)
647 except TypeError:
648 sub.archive(archiver, prefix)
649
650 archiver.done()
651
652 # If a largefile is modified, the change is not reflected in its
653 # standin until a commit. cmdutil.bailifchanged() raises an exception
654 # if the repo has uncommitted changes. Wrap it to also check if
655 # largefiles were changed. This is used by bisect and backout.
656 def override_bailifchanged(orig, repo):
657 orig(repo)
658 repo.lfstatus = True
659 modified, added, removed, deleted = repo.status()[:4]
660 repo.lfstatus = False
661 if modified or added or removed or deleted:
662 raise util.Abort(_('outstanding uncommitted changes'))
663
664 # Fetch doesn't use cmdutil.bail_if_changed so override it to add the check
665 def override_fetch(orig, ui, repo, *pats, **opts):
666 repo.lfstatus = True
667 modified, added, removed, deleted = repo.status()[:4]
668 repo.lfstatus = False
669 if modified or added or removed or deleted:
670 raise util.Abort(_('outstanding uncommitted changes'))
671 return orig(ui, repo, *pats, **opts)
672
673 def override_forget(orig, ui, repo, *pats, **opts):
674 installnormalfilesmatchfn(repo[None].manifest())
675 orig(ui, repo, *pats, **opts)
676 restorematchfn()
677 m = scmutil.match(repo[None], pats, opts)
678
679 try:
680 repo.lfstatus = True
681 s = repo.status(match=m, clean=True)
682 finally:
683 repo.lfstatus = False
684 forget = sorted(s[0] + s[1] + s[3] + s[6])
685 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
686
687 for f in forget:
688 if lfutil.standin(f) not in repo.dirstate and not \
689 os.path.isdir(m.rel(lfutil.standin(f))):
690 ui.warn(_('not removing %s: file is already untracked\n')
691 % m.rel(f))
692
693 for f in forget:
694 if ui.verbose or not m.exact(f):
695 ui.status(_('removing %s\n') % m.rel(f))
696
697 # Need to lock because standin files are deleted then removed from the
698 # repository and we could race inbetween.
699 wlock = repo.wlock()
700 try:
701 lfdirstate = lfutil.openlfdirstate(ui, repo)
702 for f in forget:
703 if lfdirstate[f] == 'a':
704 lfdirstate.drop(f)
705 else:
706 lfdirstate.remove(f)
707 lfdirstate.write()
708 lfutil.repo_remove(repo, [lfutil.standin(f) for f in forget],
709 unlink=True)
710 finally:
711 wlock.release()
712
713 def getoutgoinglfiles(ui, repo, dest=None, **opts):
714 dest = ui.expandpath(dest or 'default-push', dest or 'default')
715 dest, branches = hg.parseurl(dest, opts.get('branch'))
716 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
717 if revs:
718 revs = [repo.lookup(rev) for rev in revs]
719
720 remoteui = hg.remoteui
721
722 try:
723 remote = hg.repository(remoteui(repo, opts), dest)
724 except error.RepoError:
725 return None
726 o = lfutil.findoutgoing(repo, remote, False)
727 if not o:
728 return None
729 o = repo.changelog.nodesbetween(o, revs)[0]
730 if opts.get('newest_first'):
731 o.reverse()
732
733 toupload = set()
734 for n in o:
735 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
736 ctx = repo[n]
737 files = set(ctx.files())
738 if len(parents) == 2:
739 mc = ctx.manifest()
740 mp1 = ctx.parents()[0].manifest()
741 mp2 = ctx.parents()[1].manifest()
742 for f in mp1:
743 if f not in mc:
744 files.add(f)
745 for f in mp2:
746 if f not in mc:
747 files.add(f)
748 for f in mc:
749 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
750 files.add(f)
751 toupload = toupload.union(
752 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
753 return toupload
754
755 def override_outgoing(orig, ui, repo, dest=None, **opts):
756 orig(ui, repo, dest, **opts)
757
758 if opts.pop('large', None):
759 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
760 if toupload is None:
761 ui.status(_('largefiles: No remote repo\n'))
762 else:
763 ui.status(_('largefiles to upload:\n'))
764 for file in toupload:
765 ui.status(lfutil.splitstandin(file) + '\n')
766 ui.status('\n')
767
768 def override_summary(orig, ui, repo, *pats, **opts):
769 orig(ui, repo, *pats, **opts)
770
771 if opts.pop('large', None):
772 toupload = getoutgoinglfiles(ui, repo, None, **opts)
773 if toupload is None:
774 ui.status(_('largefiles: No remote repo\n'))
775 else:
776 ui.status(_('largefiles: %d to upload\n') % len(toupload))
777
778 def override_addremove(orig, ui, repo, *pats, **opts):
779 # Check if the parent or child has largefiles; if so, disallow
780 # addremove. If there is a symlink in the manifest then getting
781 # the manifest throws an exception: catch it and let addremove
782 # deal with it.
783 try:
784 manifesttip = set(repo['tip'].manifest())
785 except util.Abort:
786 manifesttip = set()
787 try:
788 manifestworking = set(repo[None].manifest())
789 except util.Abort:
790 manifestworking = set()
791
792 # Manifests are only iterable so turn them into sets then union
793 for file in manifesttip.union(manifestworking):
794 if file.startswith(lfutil.shortname):
795 raise util.Abort(
796 _('addremove cannot be run on a repo with largefiles'))
797
798 return orig(ui, repo, *pats, **opts)
799
800 # Calling purge with --all will cause the largefiles to be deleted.
801 # Override repo.status to prevent this from happening.
802 def override_purge(orig, ui, repo, *dirs, **opts):
803 oldstatus = repo.status
804 def override_status(node1='.', node2=None, match=None, ignored=False,
805 clean=False, unknown=False, listsubrepos=False):
806 r = oldstatus(node1, node2, match, ignored, clean, unknown,
807 listsubrepos)
808 lfdirstate = lfutil.openlfdirstate(ui, repo)
809 modified, added, removed, deleted, unknown, ignored, clean = r
810 unknown = [f for f in unknown if lfdirstate[f] == '?']
811 ignored = [f for f in ignored if lfdirstate[f] == '?']
812 return modified, added, removed, deleted, unknown, ignored, clean
813 repo.status = override_status
814 orig(ui, repo, *dirs, **opts)
815 repo.status = oldstatus
816
817 def override_rollback(orig, ui, repo, **opts):
818 result = orig(ui, repo, **opts)
819 merge.update(repo, node=None, branchmerge=False, force=True,
820 partial=lfutil.isstandin)
821 lfdirstate = lfutil.openlfdirstate(ui, repo)
822 lfiles = lfutil.listlfiles(repo)
823 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
824 for file in lfiles:
825 if file in oldlfiles:
826 lfdirstate.normallookup(file)
827 else:
828 lfdirstate.add(file)
829 lfdirstate.write()
830 return result
@@ -0,0 +1,160
1 # Copyright 2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 import os
7 import tempfile
8 import urllib2
9
10 from mercurial import error, httprepo, util, wireproto
11 from mercurial.i18n import _
12
13 import lfutil
14
15 LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
16 '\n\nPlease enable it in your Mercurial config '
17 'file.\n')
18
19 def putlfile(repo, proto, sha):
20 '''Put a largefile into a repository's local cache and into the
21 system cache.'''
22 f = None
23 proto.redirect()
24 try:
25 try:
26 f = tempfile.NamedTemporaryFile(mode='wb+', prefix='hg-putlfile-')
27 proto.getfile(f)
28 f.seek(0)
29 if sha != lfutil.hexsha1(f):
30 return wireproto.pushres(1)
31 lfutil.copytocacheabsolute(repo, f.name, sha)
32 except IOError:
33 repo.ui.warn(
34 _('error: could not put received data into largefile store'))
35 return wireproto.pushres(1)
36 finally:
37 if f:
38 f.close()
39
40 return wireproto.pushres(0)
41
42 def getlfile(repo, proto, sha):
43 '''Retrieve a largefile from the repository-local cache or system
44 cache.'''
45 filename = lfutil.findfile(repo, sha)
46 if not filename:
47 raise util.Abort(_('requested largefile %s not present in cache') % sha)
48 f = open(filename, 'rb')
49 length = os.fstat(f.fileno())[6]
50
51 # Since we can't set an HTTP content-length header here, and
52 # Mercurial core provides no way to give the length of a streamres
53 # (and reading the entire file into RAM would be ill-advised), we
54 # just send the length on the first line of the response, like the
55 # ssh proto does for string responses.
56 def generator():
57 yield '%d\n' % length
58 for chunk in f:
59 yield chunk
60 return wireproto.streamres(generator())
61
62 def statlfile(repo, proto, sha):
63 '''Return '2\n' if the largefile is missing, '1\n' if it has a
64 mismatched checksum, or '0\n' if it is in good condition'''
65 filename = lfutil.findfile(repo, sha)
66 if not filename:
67 return '2\n'
68 fd = None
69 try:
70 fd = open(filename, 'rb')
71 return lfutil.hexsha1(fd) == sha and '0\n' or '1\n'
72 finally:
73 if fd:
74 fd.close()
75
76 def wirereposetup(ui, repo):
77 class lfileswirerepository(repo.__class__):
78 def putlfile(self, sha, fd):
79 # unfortunately, httprepository._callpush tries to convert its
80 # input file-like into a bundle before sending it, so we can't use
81 # it ...
82 if issubclass(self.__class__, httprepo.httprepository):
83 try:
84 return int(self._call('putlfile', data=fd, sha=sha,
85 headers={'content-type':'application/mercurial-0.1'}))
86 except (ValueError, urllib2.HTTPError):
87 return 1
88 # ... but we can't use sshrepository._call because the data=
89 # argument won't get sent, and _callpush does exactly what we want
90 # in this case: send the data straight through
91 else:
92 try:
93 ret, output = self._callpush("putlfile", fd, sha=sha)
94 if ret == "":
95 raise error.ResponseError(_('putlfile failed:'),
96 output)
97 return int(ret)
98 except IOError:
99 return 1
100 except ValueError:
101 raise error.ResponseError(
102 _('putlfile failed (unexpected response):'), ret)
103
104 def getlfile(self, sha):
105 stream = self._callstream("getlfile", sha=sha)
106 length = stream.readline()
107 try:
108 length = int(length)
109 except ValueError:
110 self._abort(error.ResponseError(_("unexpected response:"),
111 length))
112 return (length, stream)
113
114 def statlfile(self, sha):
115 try:
116 return int(self._call("statlfile", sha=sha))
117 except (ValueError, urllib2.HTTPError):
118 # If the server returns anything but an integer followed by a
119 # newline, newline, it's not speaking our language; if we get
120 # an HTTP error, we can't be sure the largefile is present;
121 # either way, consider it missing.
122 return 2
123
124 repo.__class__ = lfileswirerepository
125
126 # advertise the largefiles=serve capability
127 def capabilities(repo, proto):
128 return capabilities_orig(repo, proto) + ' largefiles=serve'
129
130 # duplicate what Mercurial's new out-of-band errors mechanism does, because
131 # clients old and new alike both handle it well
132 def webproto_refuseclient(self, message):
133 self.req.header([('Content-Type', 'application/hg-error')])
134 return message
135
136 def sshproto_refuseclient(self, message):
137 self.ui.write_err('%s\n-\n' % message)
138 self.fout.write('\n')
139 self.fout.flush()
140
141 return ''
142
143 def heads(repo, proto):
144 if lfutil.islfilesrepo(repo):
145 return wireproto.ooberror(LARGEFILES_REQUIRED_MSG)
146 return wireproto.heads(repo, proto)
147
148 def sshrepo_callstream(self, cmd, **args):
149 if cmd == 'heads' and self.capable('largefiles'):
150 cmd = 'lheads'
151 if cmd == 'batch' and self.capable('largefiles'):
152 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
153 return ssh_oldcallstream(self, cmd, **args)
154
155 def httprepo_callstream(self, cmd, **args):
156 if cmd == 'heads' and self.capable('largefiles'):
157 cmd = 'lheads'
158 if cmd == 'batch' and self.capable('largefiles'):
159 args['cmds'] = args['cmds'].replace('heads ', 'lheads ')
160 return http_oldcallstream(self, cmd, **args)
@@ -0,0 +1,106
1 # Copyright 2010-2011 Fog Creek Software
2 # Copyright 2010-2011 Unity Technologies
3 #
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
6
7 '''remote largefile store; the base class for servestore'''
8
9 import urllib2
10
11 from mercurial import util
12 from mercurial.i18n import _
13
14 import lfutil
15 import basestore
16
17 class remotestore(basestore.basestore):
18 '''a largefile store accessed over a network'''
19 def __init__(self, ui, repo, url):
20 super(remotestore, self).__init__(ui, repo, url)
21
22 def put(self, source, hash):
23 if self._verify(hash):
24 return
25 if self.sendfile(source, hash):
26 raise util.Abort(
27 _('remotestore: could not put %s to remote store %s')
28 % (source, self.url))
29 self.ui.debug(
30 _('remotestore: put %s to remote store %s') % (source, self.url))
31
32 def exists(self, hash):
33 return self._verify(hash)
34
35 def sendfile(self, filename, hash):
36 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
37 fd = None
38 try:
39 try:
40 fd = lfutil.httpsendfile(self.ui, filename)
41 except IOError, e:
42 raise util.Abort(
43 _('remotestore: could not open file %s: %s')
44 % (filename, str(e)))
45 return self._put(hash, fd)
46 finally:
47 if fd:
48 fd.close()
49
50 def _getfile(self, tmpfile, filename, hash):
51 # quit if the largefile isn't there
52 stat = self._stat(hash)
53 if stat == 1:
54 raise util.Abort(_('remotestore: largefile %s is invalid') % hash)
55 elif stat == 2:
56 raise util.Abort(_('remotestore: largefile %s is missing') % hash)
57
58 try:
59 length, infile = self._get(hash)
60 except urllib2.HTTPError, e:
61 # 401s get converted to util.Aborts; everything else is fine being
62 # turned into a StoreError
63 raise basestore.StoreError(filename, hash, self.url, str(e))
64 except urllib2.URLError, e:
65 # This usually indicates a connection problem, so don't
66 # keep trying with the other files... they will probably
67 # all fail too.
68 raise util.Abort('%s: %s' % (self.url, e.reason))
69 except IOError, e:
70 raise basestore.StoreError(filename, hash, self.url, str(e))
71
72 # Mercurial does not close its SSH connections after writing a stream
73 if length is not None:
74 infile = lfutil.limitreader(infile, length)
75 return lfutil.copyandhash(lfutil.blockstream(infile), tmpfile)
76
77 def _verify(self, hash):
78 return not self._stat(hash)
79
80 def _verifyfile(self, cctx, cset, contents, standin, verified):
81 filename = lfutil.splitstandin(standin)
82 if not filename:
83 return False
84 fctx = cctx[standin]
85 key = (filename, fctx.filenode())
86 if key in verified:
87 return False
88
89 verified.add(key)
90
91 stat = self._stat(hash)
92 if not stat:
93 return False
94 elif stat == 1:
95 self.ui.warn(
96 _('changeset %s: %s: contents differ\n')
97 % (cset, filename))
98 return True # failed
99 elif stat == 2:
100 self.ui.warn(
101 _('changeset %s: %s missing\n')
102 % (cset, filename))
103 return True # failed
104 else:
105 raise RuntimeError('verify failed: unexpected response from '
106 'statlfile (%r)' % stat)
@@ -0,0 +1,416
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles repositories: reposetup'''
10 import copy
11 import types
12 import os
13 import re
14
15 from mercurial import context, error, manifest, match as match_, \
16 node, util
17 from mercurial.i18n import _
18
19 import lfcommands
20 import proto
21 import lfutil
22
23 def reposetup(ui, repo):
24 # wire repositories should be given new wireproto functions but not the
25 # other largefiles modifications
26 if not repo.local():
27 return proto.wirereposetup(ui, repo)
28
29 for name in ('status', 'commitctx', 'commit', 'push'):
30 method = getattr(repo, name)
31 #if not (isinstance(method, types.MethodType) and
32 # method.im_func is repo.__class__.commitctx.im_func):
33 if (isinstance(method, types.FunctionType) and
34 method.func_name == 'wrap'):
35 ui.warn(_('largefiles: repo method %r appears to have already been'
36 ' wrapped by another extension: '
37 'largefiles may behave incorrectly\n')
38 % name)
39
40 class lfiles_repo(repo.__class__):
41 lfstatus = False
42 def status_nolfiles(self, *args, **kwargs):
43 return super(lfiles_repo, self).status(*args, **kwargs)
44
45 # When lfstatus is set, return a context that gives the names
46 # of largefiles instead of their corresponding standins and
47 # identifies the largefiles as always binary, regardless of
48 # their actual contents.
49 def __getitem__(self, changeid):
50 ctx = super(lfiles_repo, self).__getitem__(changeid)
51 if self.lfstatus:
52 class lfiles_manifestdict(manifest.manifestdict):
53 def __contains__(self, filename):
54 if super(lfiles_manifestdict,
55 self).__contains__(filename):
56 return True
57 return super(lfiles_manifestdict,
58 self).__contains__(lfutil.shortname+'/' + filename)
59 class lfiles_ctx(ctx.__class__):
60 def files(self):
61 filenames = super(lfiles_ctx, self).files()
62 return [re.sub('^\\'+lfutil.shortname+'/', '',
63 filename) for filename in filenames]
64 def manifest(self):
65 man1 = super(lfiles_ctx, self).manifest()
66 man1.__class__ = lfiles_manifestdict
67 return man1
68 def filectx(self, path, fileid=None, filelog=None):
69 try:
70 result = super(lfiles_ctx, self).filectx(path,
71 fileid, filelog)
72 except error.LookupError:
73 # Adding a null character will cause Mercurial to
74 # identify this as a binary file.
75 result = super(lfiles_ctx, self).filectx(
76 lfutil.shortname + '/' + path, fileid,
77 filelog)
78 olddata = result.data
79 result.data = lambda: olddata() + '\0'
80 return result
81 ctx.__class__ = lfiles_ctx
82 return ctx
83
84 # Figure out the status of big files and insert them into the
85 # appropriate list in the result. Also removes standin files
86 # from the listing. Revert to the original status if
87 # self.lfstatus is False.
88 def status(self, node1='.', node2=None, match=None, ignored=False,
89 clean=False, unknown=False, listsubrepos=False):
90 listignored, listclean, listunknown = ignored, clean, unknown
91 if not self.lfstatus:
92 try:
93 return super(lfiles_repo, self).status(node1, node2, match,
94 listignored, listclean, listunknown, listsubrepos)
95 except TypeError:
96 return super(lfiles_repo, self).status(node1, node2, match,
97 listignored, listclean, listunknown)
98 else:
99 # some calls in this function rely on the old version of status
100 self.lfstatus = False
101 if isinstance(node1, context.changectx):
102 ctx1 = node1
103 else:
104 ctx1 = repo[node1]
105 if isinstance(node2, context.changectx):
106 ctx2 = node2
107 else:
108 ctx2 = repo[node2]
109 working = ctx2.rev() is None
110 parentworking = working and ctx1 == self['.']
111
112 def inctx(file, ctx):
113 try:
114 if ctx.rev() is None:
115 return file in ctx.manifest()
116 ctx[file]
117 return True
118 except KeyError:
119 return False
120
121 if match is None:
122 match = match_.always(self.root, self.getcwd())
123
124 # Create a copy of match that matches standins instead
125 # of largefiles.
126 def tostandin(file):
127 if inctx(lfutil.standin(file), ctx2):
128 return lfutil.standin(file)
129 return file
130
131 m = copy.copy(match)
132 m._files = [tostandin(f) for f in m._files]
133
134 # get ignored, clean, and unknown but remove them
135 # later if they were not asked for
136 try:
137 result = super(lfiles_repo, self).status(node1, node2, m,
138 True, True, True, listsubrepos)
139 except TypeError:
140 result = super(lfiles_repo, self).status(node1, node2, m,
141 True, True, True)
142 if working:
143 # hold the wlock while we read largefiles and
144 # update the lfdirstate
145 wlock = repo.wlock()
146 try:
147 # Any non-largefiles that were explicitly listed must be
148 # taken out or lfdirstate.status will report an error.
149 # The status of these files was already computed using
150 # super's status.
151 lfdirstate = lfutil.openlfdirstate(ui, self)
152 match._files = [f for f in match._files if f in
153 lfdirstate]
154 s = lfdirstate.status(match, [], listignored,
155 listclean, listunknown)
156 (unsure, modified, added, removed, missing, unknown,
157 ignored, clean) = s
158 if parentworking:
159 for lfile in unsure:
160 if ctx1[lfutil.standin(lfile)].data().strip() \
161 != lfutil.hashfile(self.wjoin(lfile)):
162 modified.append(lfile)
163 else:
164 clean.append(lfile)
165 lfdirstate.normal(lfile)
166 lfdirstate.write()
167 else:
168 tocheck = unsure + modified + added + clean
169 modified, added, clean = [], [], []
170
171 for lfile in tocheck:
172 standin = lfutil.standin(lfile)
173 if inctx(standin, ctx1):
174 if ctx1[standin].data().strip() != \
175 lfutil.hashfile(self.wjoin(lfile)):
176 modified.append(lfile)
177 else:
178 clean.append(lfile)
179 else:
180 added.append(lfile)
181 finally:
182 wlock.release()
183
184 for standin in ctx1.manifest():
185 if not lfutil.isstandin(standin):
186 continue
187 lfile = lfutil.splitstandin(standin)
188 if not match(lfile):
189 continue
190 if lfile not in lfdirstate:
191 removed.append(lfile)
192 # Handle unknown and ignored differently
193 lfiles = (modified, added, removed, missing, [], [], clean)
194 result = list(result)
195 # Unknown files
196 result[4] = [f for f in unknown
197 if (repo.dirstate[f] == '?' and
198 not lfutil.isstandin(f))]
199 # Ignored files must be ignored by both the dirstate and
200 # lfdirstate
201 result[5] = set(ignored).intersection(set(result[5]))
202 # combine normal files and largefiles
203 normals = [[fn for fn in filelist
204 if not lfutil.isstandin(fn)]
205 for filelist in result]
206 result = [sorted(list1 + list2)
207 for (list1, list2) in zip(normals, lfiles)]
208 else:
209 def toname(f):
210 if lfutil.isstandin(f):
211 return lfutil.splitstandin(f)
212 return f
213 result = [[toname(f) for f in items] for items in result]
214
215 if not listunknown:
216 result[4] = []
217 if not listignored:
218 result[5] = []
219 if not listclean:
220 result[6] = []
221 self.lfstatus = True
222 return result
223
224 # As part of committing, copy all of the largefiles into the
225 # cache.
226 def commitctx(self, *args, **kwargs):
227 node = super(lfiles_repo, self).commitctx(*args, **kwargs)
228 ctx = self[node]
229 for filename in ctx.files():
230 if lfutil.isstandin(filename) and filename in ctx.manifest():
231 realfile = lfutil.splitstandin(filename)
232 lfutil.copytocache(self, ctx.node(), realfile)
233
234 return node
235
236 # Before commit, largefile standins have not had their
237 # contents updated to reflect the hash of their largefile.
238 # Do that here.
239 def commit(self, text="", user=None, date=None, match=None,
240 force=False, editor=False, extra={}):
241 orig = super(lfiles_repo, self).commit
242
243 wlock = repo.wlock()
244 try:
245 if getattr(repo, "_isrebasing", False):
246 # We have to take the time to pull down the new
247 # largefiles now. Otherwise if we are rebasing,
248 # any largefiles that were modified in the
249 # destination changesets get overwritten, either
250 # by the rebase or in the first commit after the
251 # rebase.
252 lfcommands.updatelfiles(repo.ui, repo)
253 # Case 1: user calls commit with no specific files or
254 # include/exclude patterns: refresh and commit all files that
255 # are "dirty".
256 if ((match is None) or
257 (not match.anypats() and not match.files())):
258 # Spend a bit of time here to get a list of files we know
259 # are modified so we can compare only against those.
260 # It can cost a lot of time (several seconds)
261 # otherwise to update all standins if the largefiles are
262 # large.
263 lfdirstate = lfutil.openlfdirstate(ui, self)
264 dirtymatch = match_.always(repo.root, repo.getcwd())
265 s = lfdirstate.status(dirtymatch, [], False, False, False)
266 modifiedfiles = []
267 for i in s:
268 modifiedfiles.extend(i)
269 lfiles = lfutil.listlfiles(self)
270 # this only loops through largefiles that exist (not
271 # removed/renamed)
272 for lfile in lfiles:
273 if lfile in modifiedfiles:
274 if os.path.exists(self.wjoin(lfutil.standin(lfile))):
275 # this handles the case where a rebase is being
276 # performed and the working copy is not updated
277 # yet.
278 if os.path.exists(self.wjoin(lfile)):
279 lfutil.updatestandin(self,
280 lfutil.standin(lfile))
281 lfdirstate.normal(lfile)
282 for lfile in lfdirstate:
283 if lfile in modifiedfiles:
284 if not os.path.exists(
285 repo.wjoin(lfutil.standin(lfile))):
286 lfdirstate.drop(lfile)
287 lfdirstate.write()
288
289 return orig(text=text, user=user, date=date, match=match,
290 force=force, editor=editor, extra=extra)
291
292 for f in match.files():
293 if lfutil.isstandin(f):
294 raise util.Abort(
295 _('file "%s" is a largefile standin') % f,
296 hint=('commit the largefile itself instead'))
297
298 # Case 2: user calls commit with specified patterns: refresh
299 # any matching big files.
300 smatcher = lfutil.composestandinmatcher(self, match)
301 standins = lfutil.dirstate_walk(self.dirstate, smatcher)
302
303 # No matching big files: get out of the way and pass control to
304 # the usual commit() method.
305 if not standins:
306 return orig(text=text, user=user, date=date, match=match,
307 force=force, editor=editor, extra=extra)
308
309 # Refresh all matching big files. It's possible that the
310 # commit will end up failing, in which case the big files will
311 # stay refreshed. No harm done: the user modified them and
312 # asked to commit them, so sooner or later we're going to
313 # refresh the standins. Might as well leave them refreshed.
314 lfdirstate = lfutil.openlfdirstate(ui, self)
315 for standin in standins:
316 lfile = lfutil.splitstandin(standin)
317 if lfdirstate[lfile] <> 'r':
318 lfutil.updatestandin(self, standin)
319 lfdirstate.normal(lfile)
320 else:
321 lfdirstate.drop(lfile)
322 lfdirstate.write()
323
324 # Cook up a new matcher that only matches regular files or
325 # standins corresponding to the big files requested by the
326 # user. Have to modify _files to prevent commit() from
327 # complaining "not tracked" for big files.
328 lfiles = lfutil.listlfiles(repo)
329 match = copy.copy(match)
330 orig_matchfn = match.matchfn
331
332 # Check both the list of largefiles and the list of
333 # standins because if a largefile was removed, it
334 # won't be in the list of largefiles at this point
335 match._files += sorted(standins)
336
337 actualfiles = []
338 for f in match._files:
339 fstandin = lfutil.standin(f)
340
341 # ignore known largefiles and standins
342 if f in lfiles or fstandin in standins:
343 continue
344
345 # append directory separator to avoid collisions
346 if not fstandin.endswith(os.sep):
347 fstandin += os.sep
348
349 # prevalidate matching standin directories
350 if lfutil.any_(st for st in match._files
351 if st.startswith(fstandin)):
352 continue
353 actualfiles.append(f)
354 match._files = actualfiles
355
356 def matchfn(f):
357 if orig_matchfn(f):
358 return f not in lfiles
359 else:
360 return f in standins
361
362 match.matchfn = matchfn
363 return orig(text=text, user=user, date=date, match=match,
364 force=force, editor=editor, extra=extra)
365 finally:
366 wlock.release()
367
368 def push(self, remote, force=False, revs=None, newbranch=False):
369 o = lfutil.findoutgoing(repo, remote, force)
370 if o:
371 toupload = set()
372 o = repo.changelog.nodesbetween(o, revs)[0]
373 for n in o:
374 parents = [p for p in repo.changelog.parents(n)
375 if p != node.nullid]
376 ctx = repo[n]
377 files = set(ctx.files())
378 if len(parents) == 2:
379 mc = ctx.manifest()
380 mp1 = ctx.parents()[0].manifest()
381 mp2 = ctx.parents()[1].manifest()
382 for f in mp1:
383 if f not in mc:
384 files.add(f)
385 for f in mp2:
386 if f not in mc:
387 files.add(f)
388 for f in mc:
389 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f,
390 None):
391 files.add(f)
392
393 toupload = toupload.union(
394 set([ctx[f].data().strip()
395 for f in files
396 if lfutil.isstandin(f) and f in ctx]))
397 lfcommands.uploadlfiles(ui, self, remote, toupload)
398 return super(lfiles_repo, self).push(remote, force, revs,
399 newbranch)
400
401 repo.__class__ = lfiles_repo
402
403 def checkrequireslfiles(ui, repo, **kwargs):
404 if 'largefiles' not in repo.requirements and lfutil.any_(
405 lfutil.shortname+'/' in f[0] for f in repo.store.datafiles()):
406 # workaround bug in Mercurial 1.9 whereby requirements is
407 # a list on newly-cloned repos
408 repo.requirements = set(repo.requirements)
409
410 repo.requirements |= set(['largefiles'])
411 repo._writerequirements()
412
413 checkrequireslfiles(ui, repo)
414
415 ui.setconfig('hooks', 'changegroup.lfiles', checkrequireslfiles)
416 ui.setconfig('hooks', 'commit.lfiles', checkrequireslfiles)
@@ -0,0 +1,138
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''setup for largefiles extension: uisetup'''
10
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
12 httprepo, localrepo, sshrepo, sshserver, util, wireproto
13 from mercurial.i18n import _
14 from mercurial.hgweb import hgweb_mod, protocol
15
16 import overrides
17 import proto
18
19 def uisetup(ui):
20 # Disable auto-status for some commands which assume that all
21 # files in the result are under Mercurial's control
22
23 entry = extensions.wrapcommand(commands.table, 'add',
24 overrides.override_add)
25 addopt = [('', 'large', None, _('add as largefile')),
26 ('', 'lfsize', '', _('add all files above this size (in megabytes)'
27 'as largefiles (default: 10)'))]
28 entry[1].extend(addopt)
29
30 entry = extensions.wrapcommand(commands.table, 'addremove',
31 overrides.override_addremove)
32 entry = extensions.wrapcommand(commands.table, 'remove',
33 overrides.override_remove)
34 entry = extensions.wrapcommand(commands.table, 'forget',
35 overrides.override_forget)
36 entry = extensions.wrapcommand(commands.table, 'status',
37 overrides.override_status)
38 entry = extensions.wrapcommand(commands.table, 'log',
39 overrides.override_log)
40 entry = extensions.wrapcommand(commands.table, 'rollback',
41 overrides.override_rollback)
42 entry = extensions.wrapcommand(commands.table, 'verify',
43 overrides.override_verify)
44
45 verifyopt = [('', 'large', None, _('verify largefiles')),
46 ('', 'lfa', None,
47 _('verify all revisions of largefiles not just current')),
48 ('', 'lfc', None,
49 _('verify largefile contents not just existence'))]
50 entry[1].extend(verifyopt)
51
52 entry = extensions.wrapcommand(commands.table, 'outgoing',
53 overrides.override_outgoing)
54 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
55 entry[1].extend(outgoingopt)
56 entry = extensions.wrapcommand(commands.table, 'summary',
57 overrides.override_summary)
58 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
59 entry[1].extend(summaryopt)
60
61 entry = extensions.wrapcommand(commands.table, 'update',
62 overrides.override_update)
63 entry = extensions.wrapcommand(commands.table, 'pull',
64 overrides.override_pull)
65 entry = extensions.wrapfunction(filemerge, 'filemerge',
66 overrides.override_filemerge)
67 entry = extensions.wrapfunction(cmdutil, 'copy',
68 overrides.override_copy)
69
70 # Backout calls revert so we need to override both the command and the
71 # function
72 entry = extensions.wrapcommand(commands.table, 'revert',
73 overrides.override_revert)
74 entry = extensions.wrapfunction(commands, 'revert',
75 overrides.override_revert)
76
77 # clone uses hg._update instead of hg.update even though they are the
78 # same function... so wrap both of them)
79 extensions.wrapfunction(hg, 'update', overrides.hg_update)
80 extensions.wrapfunction(hg, '_update', overrides.hg_update)
81 extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
82 extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
83
84 extensions.wrapfunction(archival, 'archive', overrides.override_archive)
85 if util.safehasattr(cmdutil, 'bailifchanged'):
86 extensions.wrapfunction(cmdutil, 'bailifchanged',
87 overrides.override_bailifchanged)
88 else:
89 extensions.wrapfunction(cmdutil, 'bail_if_changed',
90 overrides.override_bailifchanged)
91
92 # create the new wireproto commands ...
93 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
94 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
95 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
96
97 # ... and wrap some existing ones
98 wireproto.commands['capabilities'] = (proto.capabilities, '')
99 wireproto.commands['heads'] = (proto.heads, '')
100 wireproto.commands['lheads'] = (wireproto.heads, '')
101
102 # make putlfile behave the same as push and {get,stat}lfile behave
103 # the same as pull w.r.t. permissions checks
104 hgweb_mod.perms['putlfile'] = 'push'
105 hgweb_mod.perms['getlfile'] = 'pull'
106 hgweb_mod.perms['statlfile'] = 'pull'
107
108 # the hello wireproto command uses wireproto.capabilities, so it won't see
109 # our largefiles capability unless we replace the actual function as well.
110 proto.capabilities_orig = wireproto.capabilities
111 wireproto.capabilities = proto.capabilities
112
113 # these let us reject non-largefiles clients and make them display
114 # our error messages
115 protocol.webproto.refuseclient = proto.webproto_refuseclient
116 sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
117
118 # can't do this in reposetup because it needs to have happened before
119 # wirerepo.__init__ is called
120 proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
121 proto.http_oldcallstream = httprepo.httprepository._callstream
122 sshrepo.sshrepository._callstream = proto.sshrepo_callstream
123 httprepo.httprepository._callstream = proto.httprepo_callstream
124
125 # don't die on seeing a repo with the largefiles requirement
126 localrepo.localrepository.supported |= set(['largefiles'])
127
128 # override some extensions' stuff as well
129 for name, module in extensions.extensions():
130 if name == 'fetch':
131 extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
132 overrides.override_fetch)
133 if name == 'purge':
134 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
135 overrides.override_purge)
136 if name == 'rebase':
137 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
138 overrides.override_rebase)
@@ -0,0 +1,51
1 Largefiles allows for tracking large, incompressible binary files in Mercurial
2 without requiring excessive bandwidth for clones and pulls. Files added as
3 largefiles are not tracked directly by Mercurial; rather, their revisions are
4 identified by a checksum, and Mercurial tracks these checksums. This way, when
5 you clone a repository or pull in changesets, the large files in older
6 revisions of the repository are not needed, and only the ones needed to update
7 to the current version are downloaded. This saves both disk space and
8 bandwidth.
9
10 If you are starting a new repository or adding new large binary files, using
11 largefiles for them is as easy as adding '--large' to your hg add command. For
12 example:
13
14 $ dd if=/dev/urandom of=thisfileislarge count=2000
15 $ hg add --large thisfileislarge
16 $ hg commit -m 'add thisfileislarge, which is large, as a largefile'
17
18 When you push a changeset that affects largefiles to a remote repository, its
19 largefile revisions will be uploaded along with it. Note that the remote
20 Mercurial must also have the largefiles extension enabled for this to work.
21
22 When you pull a changeset that affects largefiles from a remote repository,
23 nothing different from Mercurial's normal behavior happens. However, when you
24 update to such a revision, any largefiles needed by that revision are
25 downloaded and cached if they have never been downloaded before. This means
26 that network access is required to update to revision you have not yet updated
27 to.
28
29 If you already have large files tracked by Mercurial without the largefiles
30 extension, you will need to convert your repository in order to benefit from
31 largefiles. This is done with the 'hg lfconvert' command:
32
33 $ hg lfconvert --size 10 oldrepo newrepo
34
35 By default, in repositories that already have largefiles in them, any new file
36 over 10MB will automatically be added as largefiles. To change this
37 threshhold, set [largefiles].size in your Mercurial config file to the minimum
38 size in megabytes to track as a largefile, or use the --lfsize option to the
39 add command (also in megabytes):
40
41 [largefiles]
42 size = 2
43
44 $ hg add --lfsize 2
45
46 The [largefiles].patterns config option allows you to specify specific
47 space-separated filename patterns (in shell glob syntax) that should always be
48 tracked as largefiles:
49
50 [largefiles]
51 pattens = *.jpg *.{png,bmp} library.zip content/audio/*
@@ -0,0 +1,29
1 # Copyright 2010-2011 Fog Creek Software
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 '''largefile store working over Mercurial's wire protocol'''
7
8 import lfutil
9 import remotestore
10
11 class wirestore(remotestore.remotestore):
12 def __init__(self, ui, repo, remote):
13 cap = remote.capable('largefiles')
14 if not cap:
15 raise lfutil.storeprotonotcapable([])
16 storetypes = cap.split(',')
17 if not 'serve' in storetypes:
18 raise lfutil.storeprotonotcapable(storetypes)
19 self.remote = remote
20 super(wirestore, self).__init__(ui, repo, remote.url())
21
22 def _put(self, hash, fd):
23 return self.remote.putlfile(hash, fd)
24
25 def _get(self, hash):
26 return self.remote.getlfile(hash)
27
28 def _stat(self, hash):
29 return self.remote.statlfile(hash)
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,384 +1,386
1 1 #!/usr/bin/env python
2 2 #
3 3 # check-code - a style and portability checker for Mercurial
4 4 #
5 5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 import re, glob, os, sys
11 11 import keyword
12 12 import optparse
13 13
14 14 def repquote(m):
15 15 t = re.sub(r"\w", "x", m.group('text'))
16 16 t = re.sub(r"[^\sx]", "o", t)
17 17 return m.group('quote') + t + m.group('quote')
18 18
19 19 def reppython(m):
20 20 comment = m.group('comment')
21 21 if comment:
22 22 return "#" * len(comment)
23 23 return repquote(m)
24 24
25 25 def repcomment(m):
26 26 return m.group(1) + "#" * len(m.group(2))
27 27
28 28 def repccomment(m):
29 29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
30 30 return m.group(1) + t + "*/"
31 31
32 32 def repcallspaces(m):
33 33 t = re.sub(r"\n\s+", "\n", m.group(2))
34 34 return m.group(1) + t
35 35
36 36 def repinclude(m):
37 37 return m.group(1) + "<foo>"
38 38
39 39 def rephere(m):
40 40 t = re.sub(r"\S", "x", m.group(2))
41 41 return m.group(1) + t
42 42
43 43
44 44 testpats = [
45 45 [
46 46 (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"),
47 47 (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"),
48 48 (r'^function', "don't use 'function', use old style"),
49 49 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
50 50 (r'echo.*\\n', "don't use 'echo \\n', use printf"),
51 51 (r'echo -n', "don't use 'echo -n', use printf"),
52 52 (r'^diff.*-\w*N', "don't use 'diff -N'"),
53 53 (r'(^| )wc[^|]*$', "filter wc output"),
54 54 (r'head -c', "don't use 'head -c', use 'dd'"),
55 55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
56 56 (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"),
57 57 (r'printf.*\\x', "don't use printf \\x, use Python"),
58 58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
59 59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
60 60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
61 61 "use egrep for extended grep syntax"),
62 62 (r'/bin/', "don't use explicit paths for tools"),
63 63 (r'\$PWD', "don't use $PWD, use `pwd`"),
64 64 (r'[^\n]\Z', "no trailing newline"),
65 65 (r'export.*=', "don't export and assign at once"),
66 66 ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"),
67 67 (r'^source\b', "don't use 'source', use '.'"),
68 68 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
69 69 (r'ls\s+[^|-]+\s+-', "options to 'ls' must come before filenames"),
70 70 (r'[^>]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
71 71 (r'stop\(\)', "don't use 'stop' as a shell function name"),
72 72 ],
73 73 # warnings
74 74 []
75 75 ]
76 76
77 77 testfilters = [
78 78 (r"( *)(#([^\n]*\S)?)", repcomment),
79 79 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
80 80 ]
81 81
82 82 uprefix = r"^ \$ "
83 83 uprefixc = r"^ > "
84 84 utestpats = [
85 85 [
86 86 (r'^(\S| $ ).*(\S\s+|^\s+)\n', "trailing whitespace on non-output"),
87 87 (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"),
88 88 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
89 89 (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"),
90 90 (uprefix + r'.*\|\| echo.*(fail|error)',
91 91 "explicit exit code checks unnecessary"),
92 92 (uprefix + r'set -e', "don't use set -e"),
93 93 (uprefixc + r'( *)\t', "don't use tabs to indent"),
94 94 ],
95 95 # warnings
96 96 []
97 97 ]
98 98
99 99 for i in [0, 1]:
100 100 for p, m in testpats[i]:
101 101 if p.startswith('^'):
102 102 p = uprefix + p[1:]
103 103 else:
104 104 p = uprefix + p
105 105 utestpats[i].append((p, m))
106 106
107 107 utestfilters = [
108 108 (r"( *)(#([^\n]*\S)?)", repcomment),
109 109 ]
110 110
111 111 pypats = [
112 112 [
113 113 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
114 114 "tuple parameter unpacking not available in Python 3+"),
115 115 (r'lambda\s*\(.*,.*\)',
116 116 "tuple parameter unpacking not available in Python 3+"),
117 117 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
118 118 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
119 119 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
120 120 (r'^\s*\t', "don't use tabs"),
121 121 (r'\S;\s*\n', "semicolon"),
122 122 (r'\w,\w', "missing whitespace after ,"),
123 123 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
124 124 (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"),
125 125 (r'.{85}', "line too long"),
126 126 (r'[^\n]\Z', "no trailing newline"),
127 127 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
128 128 # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"),
129 129 # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"),
130 130 (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+',
131 131 "linebreak after :"),
132 132 (r'class\s[^( ]+:', "old-style class, use class foo(object)"),
133 133 (r'class\s[^( ]+\(\):',
134 134 "class foo() not available in Python 2.4, use class foo(object)"),
135 135 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
136 136 "Python keyword is not a function"),
137 137 (r',]', "unneeded trailing ',' in list"),
138 138 # (r'class\s[A-Z][^\(]*\((?!Exception)',
139 139 # "don't capitalize non-exception classes"),
140 140 # (r'in range\(', "use xrange"),
141 141 # (r'^\s*print\s+', "avoid using print in core and extensions"),
142 142 (r'[\x80-\xff]', "non-ASCII character literal"),
143 143 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
144 144 (r'^\s*with\s+', "with not available in Python 2.4"),
145 145 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
146 146 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
147 147 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
148 148 (r'(?<!def)\s+(any|all|format)\(',
149 149 "any/all/format not available in Python 2.4"),
150 150 (r'(?<!def)\s+(callable)\(',
151 "callable not available in Python 3, use hasattr(f, '__call__')"),
151 "callable not available in Python 3, use getattr(f, '__call__', None)"),
152 152 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
153 153 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
154 154 "gratuitous whitespace after Python keyword"),
155 155 (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"),
156 156 # (r'\s\s=', "gratuitous whitespace before ="),
157 157 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
158 158 "missing whitespace around operator"),
159 159 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s',
160 160 "missing whitespace around operator"),
161 161 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
162 162 "missing whitespace around operator"),
163 163 (r'[^+=*/!<>&| -](\s=|=\s)[^= ]',
164 164 "wrong whitespace around ="),
165 165 (r'raise Exception', "don't raise generic exceptions"),
166 166 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
167 167 (r' [=!]=\s+(True|False|None)',
168 168 "comparison with singleton, use 'is' or 'is not' instead"),
169 169 (r'^\s*(while|if) [01]:',
170 170 "use True/False for constant Boolean expression"),
171 (r'(?<!def)\s+hasattr',
172 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
171 173 (r'opener\([^)]*\).read\(',
172 174 "use opener.read() instead"),
173 175 (r'opener\([^)]*\).write\(',
174 176 "use opener.write() instead"),
175 177 (r'[\s\(](open|file)\([^)]*\)\.read\(',
176 178 "use util.readfile() instead"),
177 179 (r'[\s\(](open|file)\([^)]*\)\.write\(',
178 180 "use util.readfile() instead"),
179 181 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
180 182 "always assign an opened file to a variable, and close it afterwards"),
181 183 (r'[\s\(](open|file)\([^)]*\)\.',
182 184 "always assign an opened file to a variable, and close it afterwards"),
183 185 (r'(?i)descendent', "the proper spelling is descendAnt"),
184 186 (r'\.debug\(\_', "don't mark debug messages for translation"),
185 187 ],
186 188 # warnings
187 189 [
188 190 (r'.{81}', "warning: line over 80 characters"),
189 191 (r'^\s*except:$', "warning: naked except clause"),
190 192 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
191 193 "warning: unwrapped ui message"),
192 194 ]
193 195 ]
194 196
195 197 pyfilters = [
196 198 (r"""(?msx)(?P<comment>\#.*?$)|
197 199 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
198 200 (?P<text>(([^\\]|\\.)*?))
199 201 (?P=quote))""", reppython),
200 202 ]
201 203
202 204 cpats = [
203 205 [
204 206 (r'//', "don't use //-style comments"),
205 207 (r'^ ', "don't use spaces to indent"),
206 208 (r'\S\t', "don't use tabs except for indent"),
207 209 (r'(\S\s+|^\s+)\n', "trailing whitespace"),
208 210 (r'.{85}', "line too long"),
209 211 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
210 212 (r'return\(', "return is not a function"),
211 213 (r' ;', "no space before ;"),
212 214 (r'\w+\* \w+', "use int *foo, not int* foo"),
213 215 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
214 216 (r'\S+ (\+\+|--)', "use foo++, not foo ++"),
215 217 (r'\w,\w', "missing whitespace after ,"),
216 218 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
217 219 (r'^#\s+\w', "use #foo, not # foo"),
218 220 (r'[^\n]\Z', "no trailing newline"),
219 221 (r'^\s*#import\b', "use only #include in standard C code"),
220 222 ],
221 223 # warnings
222 224 []
223 225 ]
224 226
225 227 cfilters = [
226 228 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
227 229 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
228 230 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
229 231 (r'(\()([^)]+\))', repcallspaces),
230 232 ]
231 233
232 234 inutilpats = [
233 235 [
234 236 (r'\bui\.', "don't use ui in util"),
235 237 ],
236 238 # warnings
237 239 []
238 240 ]
239 241
240 242 inrevlogpats = [
241 243 [
242 244 (r'\brepo\.', "don't use repo in revlog"),
243 245 ],
244 246 # warnings
245 247 []
246 248 ]
247 249
248 250 checks = [
249 251 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
250 252 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
251 253 ('c', r'.*\.c$', cfilters, cpats),
252 254 ('unified test', r'.*\.t$', utestfilters, utestpats),
253 255 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
254 256 inrevlogpats),
255 257 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
256 258 inutilpats),
257 259 ]
258 260
259 261 class norepeatlogger(object):
260 262 def __init__(self):
261 263 self._lastseen = None
262 264
263 265 def log(self, fname, lineno, line, msg, blame):
264 266 """print error related a to given line of a given file.
265 267
266 268 The faulty line will also be printed but only once in the case
267 269 of multiple errors.
268 270
269 271 :fname: filename
270 272 :lineno: line number
271 273 :line: actual content of the line
272 274 :msg: error message
273 275 """
274 276 msgid = fname, lineno, line
275 277 if msgid != self._lastseen:
276 278 if blame:
277 279 print "%s:%d (%s):" % (fname, lineno, blame)
278 280 else:
279 281 print "%s:%d:" % (fname, lineno)
280 282 print " > %s" % line
281 283 self._lastseen = msgid
282 284 print " " + msg
283 285
284 286 _defaultlogger = norepeatlogger()
285 287
286 288 def getblame(f):
287 289 lines = []
288 290 for l in os.popen('hg annotate -un %s' % f):
289 291 start, line = l.split(':', 1)
290 292 user, rev = start.split()
291 293 lines.append((line[1:-1], user, rev))
292 294 return lines
293 295
294 296 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
295 297 blame=False, debug=False):
296 298 """checks style and portability of a given file
297 299
298 300 :f: filepath
299 301 :logfunc: function used to report error
300 302 logfunc(filename, linenumber, linecontent, errormessage)
301 303 :maxerr: number of error to display before arborting.
302 304 Set to None (default) to report all errors
303 305
304 306 return True if no error is found, False otherwise.
305 307 """
306 308 blamecache = None
307 309 result = True
308 310 for name, match, filters, pats in checks:
309 311 if debug:
310 312 print name, f
311 313 fc = 0
312 314 if not re.match(match, f):
313 315 if debug:
314 316 print "Skipping %s for %s it doesn't match %s" % (
315 317 name, match, f)
316 318 continue
317 319 fp = open(f)
318 320 pre = post = fp.read()
319 321 fp.close()
320 322 if "no-" + "check-code" in pre:
321 323 if debug:
322 324 print "Skipping %s for %s it has no- and check-code" % (
323 325 name, f)
324 326 break
325 327 for p, r in filters:
326 328 post = re.sub(p, r, post)
327 329 if warnings:
328 330 pats = pats[0] + pats[1]
329 331 else:
330 332 pats = pats[0]
331 333 # print post # uncomment to show filtered version
332 334 z = enumerate(zip(pre.splitlines(), post.splitlines(True)))
333 335 if debug:
334 336 print "Checking %s for %s" % (name, f)
335 337 for n, l in z:
336 338 if "check-code" + "-ignore" in l[0]:
337 339 if debug:
338 340 print "Skipping %s for %s:%s (check-code -ignore)" % (
339 341 name, f, n)
340 342 continue
341 343 for p, msg in pats:
342 344 if re.search(p, l[1]):
343 345 bd = ""
344 346 if blame:
345 347 bd = 'working directory'
346 348 if not blamecache:
347 349 blamecache = getblame(f)
348 350 if n < len(blamecache):
349 351 bl, bu, br = blamecache[n]
350 352 if bl == l[0]:
351 353 bd = '%s@%s' % (bu, br)
352 354 logfunc(f, n + 1, l[0], msg, bd)
353 355 fc += 1
354 356 result = False
355 357 if maxerr is not None and fc >= maxerr:
356 358 print " (too many errors, giving up)"
357 359 break
358 360 return result
359 361
360 362 if __name__ == "__main__":
361 363 parser = optparse.OptionParser("%prog [options] [files]")
362 364 parser.add_option("-w", "--warnings", action="store_true",
363 365 help="include warning-level checks")
364 366 parser.add_option("-p", "--per-file", type="int",
365 367 help="max warnings per file")
366 368 parser.add_option("-b", "--blame", action="store_true",
367 369 help="use annotate to generate blame info")
368 370 parser.add_option("", "--debug", action="store_true",
369 371 help="show debug information")
370 372
371 373 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False)
372 374 (options, args) = parser.parse_args()
373 375
374 376 if len(args) == 0:
375 377 check = glob.glob("*")
376 378 else:
377 379 check = args
378 380
379 381 for f in check:
380 382 ret = 0
381 383 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
382 384 blame=options.blame, debug=options.debug):
383 385 ret = 1
384 386 sys.exit(ret)
@@ -1,373 +1,373
1 1 #
2 2 # This is an experimental py3k-enabled mercurial setup script.
3 3 #
4 4 # 'python setup.py install', or
5 5 # 'python setup.py --help' for more options
6 6
7 7 from distutils.command.build_py import build_py_2to3
8 8 from lib2to3.refactor import get_fixers_from_package as getfixers
9 9
10 10 import sys
11 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
11 if getattr(sys, 'version_info', (0, 0, 0)) < (2, 4, 0, 'final'):
12 12 raise SystemExit("Mercurial requires Python 2.4 or later.")
13 13
14 14 if sys.version_info[0] >= 3:
15 15 def b(s):
16 16 '''A helper function to emulate 2.6+ bytes literals using string
17 17 literals.'''
18 18 return s.encode('latin1')
19 19 else:
20 20 def b(s):
21 21 '''A helper function to emulate 2.6+ bytes literals using string
22 22 literals.'''
23 23 return s
24 24
25 25 # Solaris Python packaging brain damage
26 26 try:
27 27 import hashlib
28 28 sha = hashlib.sha1()
29 29 except:
30 30 try:
31 31 import sha
32 32 except:
33 33 raise SystemExit(
34 34 "Couldn't import standard hashlib (incomplete Python install).")
35 35
36 36 try:
37 37 import zlib
38 38 except:
39 39 raise SystemExit(
40 40 "Couldn't import standard zlib (incomplete Python install).")
41 41
42 42 try:
43 43 import bz2
44 44 except:
45 45 raise SystemExit(
46 46 "Couldn't import standard bz2 (incomplete Python install).")
47 47
48 48 import os, subprocess, time
49 49 import shutil
50 50 import tempfile
51 51 from distutils import log
52 52 from distutils.core import setup, Extension
53 53 from distutils.dist import Distribution
54 54 from distutils.command.build import build
55 55 from distutils.command.build_ext import build_ext
56 56 from distutils.command.build_py import build_py
57 57 from distutils.spawn import spawn, find_executable
58 58 from distutils.ccompiler import new_compiler
59 59 from distutils.errors import CCompilerError
60 60
61 61 scripts = ['hg']
62 62 if os.name == 'nt':
63 63 scripts.append('contrib/win32/hg.bat')
64 64
65 65 # simplified version of distutils.ccompiler.CCompiler.has_function
66 66 # that actually removes its temporary files.
67 67 def hasfunction(cc, funcname):
68 68 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
69 69 devnull = oldstderr = None
70 70 try:
71 71 try:
72 72 fname = os.path.join(tmpdir, 'funcname.c')
73 73 f = open(fname, 'w')
74 74 f.write('int main(void) {\n')
75 75 f.write(' %s();\n' % funcname)
76 76 f.write('}\n')
77 77 f.close()
78 78 # Redirect stderr to /dev/null to hide any error messages
79 79 # from the compiler.
80 80 # This will have to be changed if we ever have to check
81 81 # for a function on Windows.
82 82 devnull = open('/dev/null', 'w')
83 83 oldstderr = os.dup(sys.stderr.fileno())
84 84 os.dup2(devnull.fileno(), sys.stderr.fileno())
85 85 objects = cc.compile([fname], output_dir=tmpdir)
86 86 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
87 87 except:
88 88 return False
89 89 return True
90 90 finally:
91 91 if oldstderr is not None:
92 92 os.dup2(oldstderr, sys.stderr.fileno())
93 93 if devnull is not None:
94 94 devnull.close()
95 95 shutil.rmtree(tmpdir)
96 96
97 97 # py2exe needs to be installed to work
98 98 try:
99 99 import py2exe
100 100 py2exeloaded = True
101 101
102 102 # Help py2exe to find win32com.shell
103 103 try:
104 104 import modulefinder
105 105 import win32com
106 106 for p in win32com.__path__[1:]: # Take the path to win32comext
107 107 modulefinder.AddPackagePath("win32com", p)
108 108 pn = "win32com.shell"
109 109 __import__(pn)
110 110 m = sys.modules[pn]
111 111 for p in m.__path__[1:]:
112 112 modulefinder.AddPackagePath(pn, p)
113 113 except ImportError:
114 114 pass
115 115
116 116 except ImportError:
117 117 py2exeloaded = False
118 118 pass
119 119
120 120 def runcmd(cmd, env):
121 121 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
122 122 stderr=subprocess.PIPE, env=env)
123 123 out, err = p.communicate()
124 124 # If root is executing setup.py, but the repository is owned by
125 125 # another user (as in "sudo python setup.py install") we will get
126 126 # trust warnings since the .hg/hgrc file is untrusted. That is
127 127 # fine, we don't want to load it anyway. Python may warn about
128 128 # a missing __init__.py in mercurial/locale, we also ignore that.
129 129 err = [e for e in err.splitlines()
130 130 if not e.startswith(b('Not trusting file')) \
131 131 and not e.startswith(b('warning: Not importing'))]
132 132 if err:
133 133 return ''
134 134 return out
135 135
136 136 version = ''
137 137
138 138 if os.path.isdir('.hg'):
139 139 # Execute hg out of this directory with a custom environment which
140 140 # includes the pure Python modules in mercurial/pure. We also take
141 141 # care to not use any hgrc files and do no localization.
142 142 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
143 143 env = {'PYTHONPATH': os.pathsep.join(pypath),
144 144 'HGRCPATH': '',
145 145 'LANGUAGE': 'C'}
146 146 if 'LD_LIBRARY_PATH' in os.environ:
147 147 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
148 148 if 'SystemRoot' in os.environ:
149 149 # Copy SystemRoot into the custom environment for Python 2.6
150 150 # under Windows. Otherwise, the subprocess will fail with
151 151 # error 0xc0150004. See: http://bugs.python.org/issue3440
152 152 env['SystemRoot'] = os.environ['SystemRoot']
153 153 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
154 154 l = runcmd(cmd, env).split()
155 155 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
156 156 l.pop()
157 157 if len(l) > 1: # tag found
158 158 version = l[-1]
159 159 if l[0].endswith('+'): # propagate the dirty status to the tag
160 160 version += '+'
161 161 elif len(l) == 1: # no tag found
162 162 cmd = [sys.executable, 'hg', 'parents', '--template',
163 163 '{latesttag}+{latesttagdistance}-']
164 164 version = runcmd(cmd, env) + l[0]
165 165 if version.endswith('+'):
166 166 version += time.strftime('%Y%m%d')
167 167 elif os.path.exists('.hg_archival.txt'):
168 168 kw = dict([[t.strip() for t in l.split(':', 1)]
169 169 for l in open('.hg_archival.txt')])
170 170 if 'tag' in kw:
171 171 version = kw['tag']
172 172 elif 'latesttag' in kw:
173 173 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
174 174 else:
175 175 version = kw.get('node', '')[:12]
176 176
177 177 if version:
178 178 f = open("mercurial/__version__.py", "w")
179 179 f.write('# this file is autogenerated by setup.py\n')
180 180 f.write('version = "%s"\n' % version)
181 181 f.close()
182 182
183 183
184 184 try:
185 185 from mercurial import __version__
186 186 version = __version__.version
187 187 except ImportError:
188 188 version = 'unknown'
189 189
190 190 class hgbuildmo(build):
191 191
192 192 description = "build translations (.mo files)"
193 193
194 194 def run(self):
195 195 if not find_executable('msgfmt'):
196 196 self.warn("could not find msgfmt executable, no translations "
197 197 "will be built")
198 198 return
199 199
200 200 podir = 'i18n'
201 201 if not os.path.isdir(podir):
202 202 self.warn("could not find %s/ directory" % podir)
203 203 return
204 204
205 205 join = os.path.join
206 206 for po in os.listdir(podir):
207 207 if not po.endswith('.po'):
208 208 continue
209 209 pofile = join(podir, po)
210 210 modir = join('locale', po[:-3], 'LC_MESSAGES')
211 211 mofile = join(modir, 'hg.mo')
212 212 mobuildfile = join('mercurial', mofile)
213 213 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
214 214 if sys.platform != 'sunos5':
215 215 # msgfmt on Solaris does not know about -c
216 216 cmd.append('-c')
217 217 self.mkpath(join('mercurial', modir))
218 218 self.make_file([pofile], mobuildfile, spawn, (cmd,))
219 219
220 220 # Insert hgbuildmo first so that files in mercurial/locale/ are found
221 221 # when build_py is run next.
222 222 build.sub_commands.insert(0, ('build_mo', None))
223 223 # We also need build_ext before build_py. Otherwise, when 2to3 is called (in
224 224 # build_py), it will not find osutil & friends, thinking that those modules are
225 225 # global and, consequently, making a mess, now that all module imports are
226 226 # global.
227 227 build.sub_commands.insert(1, ('build_ext', None))
228 228
229 229 Distribution.pure = 0
230 230 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
231 231 "code instead of C extensions"))
232 232
233 233 class hgbuildext(build_ext):
234 234
235 235 def build_extension(self, ext):
236 236 try:
237 237 build_ext.build_extension(self, ext)
238 238 except CCompilerError:
239 if not hasattr(ext, 'optional') or not ext.optional:
239 if getattr(ext, 'optional', False):
240 240 raise
241 241 log.warn("Failed to build optional extension '%s' (skipping)",
242 242 ext.name)
243 243
244 244 class hgbuildpy(build_py_2to3):
245 245 fixer_names = sorted(set(getfixers("lib2to3.fixes") +
246 246 getfixers("hgfixes")))
247 247
248 248 def finalize_options(self):
249 249 build_py.finalize_options(self)
250 250
251 251 if self.distribution.pure:
252 252 if self.py_modules is None:
253 253 self.py_modules = []
254 254 for ext in self.distribution.ext_modules:
255 255 if ext.name.startswith("mercurial."):
256 256 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
257 257 self.distribution.ext_modules = []
258 258
259 259 def find_modules(self):
260 260 modules = build_py.find_modules(self)
261 261 for module in modules:
262 262 if module[0] == "mercurial.pure":
263 263 if module[1] != "__init__":
264 264 yield ("mercurial", module[1], module[2])
265 265 else:
266 266 yield module
267 267
268 268 def run(self):
269 269 # In the build_py_2to3 class, self.updated_files = [], but I couldn't
270 270 # see when that variable was updated to point to the updated files, as
271 271 # its names suggests. Thus, I decided to just find_all_modules and feed
272 272 # them to 2to3. Unfortunately, subsequent calls to setup3k.py will
273 273 # incur in 2to3 analysis overhead.
274 274 self.updated_files = [i[2] for i in self.find_all_modules()]
275 275
276 276 # Base class code
277 277 if self.py_modules:
278 278 self.build_modules()
279 279 if self.packages:
280 280 self.build_packages()
281 281 self.build_package_data()
282 282
283 283 # 2to3
284 284 self.run_2to3(self.updated_files)
285 285
286 286 # Remaining base class code
287 287 self.byte_compile(self.get_outputs(include_bytecode=0))
288 288
289 289 cmdclass = {'build_mo': hgbuildmo,
290 290 'build_ext': hgbuildext,
291 291 'build_py': hgbuildpy}
292 292
293 293 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
294 294 'hgext.highlight', 'hgext.zeroconf']
295 295
296 296 pymodules = []
297 297
298 298 extmodules = [
299 299 Extension('mercurial.base85', ['mercurial/base85.c']),
300 300 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
301 301 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
302 302 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
303 303 Extension('mercurial.parsers', ['mercurial/parsers.c']),
304 304 ]
305 305
306 306 # disable osutil.c under windows + python 2.4 (issue1364)
307 307 if sys.platform == 'win32' and sys.version_info < (2, 5, 0, 'final'):
308 308 pymodules.append('mercurial.pure.osutil')
309 309 else:
310 310 extmodules.append(Extension('mercurial.osutil', ['mercurial/osutil.c']))
311 311
312 312 if sys.platform.startswith('linux') and os.uname()[2] > '2.6':
313 313 # The inotify extension is only usable with Linux 2.6 kernels.
314 314 # You also need a reasonably recent C library.
315 315 # In any case, if it fails to build the error will be skipped ('optional').
316 316 cc = new_compiler()
317 317 if hasfunction(cc, 'inotify_add_watch'):
318 318 inotify = Extension('hgext.inotify.linux._inotify',
319 319 ['hgext/inotify/linux/_inotify.c'],
320 320 ['mercurial'])
321 321 inotify.optional = True
322 322 extmodules.append(inotify)
323 323 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
324 324
325 325 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
326 326 'help/*.txt']}
327 327
328 328 def ordinarypath(p):
329 329 return p and p[0] != '.' and p[-1] != '~'
330 330
331 331 for root in ('templates',):
332 332 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
333 333 curdir = curdir.split(os.sep, 1)[1]
334 334 dirs[:] = filter(ordinarypath, dirs)
335 335 for f in filter(ordinarypath, files):
336 336 f = os.path.join(curdir, f)
337 337 packagedata['mercurial'].append(f)
338 338
339 339 datafiles = []
340 340 setupversion = version
341 341 extra = {}
342 342
343 343 if py2exeloaded:
344 344 extra['console'] = [
345 345 {'script':'hg',
346 346 'copyright':'Copyright (C) 2005-2010 Matt Mackall and others',
347 347 'product_version':version}]
348 348
349 349 if os.name == 'nt':
350 350 # Windows binary file versions for exe/dll files must have the
351 351 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
352 352 setupversion = version.split('+', 1)[0]
353 353
354 354 setup(name='mercurial',
355 355 version=setupversion,
356 356 author='Matt Mackall',
357 357 author_email='mpm@selenic.com',
358 358 url='http://mercurial.selenic.com/',
359 359 description='Scalable distributed SCM',
360 360 license='GNU GPLv2+',
361 361 scripts=scripts,
362 362 packages=packages,
363 363 py_modules=pymodules,
364 364 ext_modules=extmodules,
365 365 data_files=datafiles,
366 366 package_data=packagedata,
367 367 cmdclass=cmdclass,
368 368 options=dict(py2exe=dict(packages=['hgext', 'email']),
369 369 bdist_mpkg=dict(zipdist=True,
370 370 license='COPYING',
371 371 readme='contrib/macosx/Readme.html',
372 372 welcome='contrib/macosx/Welcome.html')),
373 373 **extra)
@@ -1,95 +1,95
1 1 # An example WSGI script for IIS/isapi-wsgi to export multiple hgweb repos
2 2 # Copyright 2010 Sune Foldager <cryo@cyanite.org>
3 3 #
4 4 # This software may be used and distributed according to the terms of the
5 5 # GNU General Public License version 2 or any later version.
6 6 #
7 7 # Requirements:
8 8 # - Python 2.6
9 9 # - PyWin32 build 214 or newer
10 10 # - Mercurial installed from source (python setup.py install)
11 11 # - IIS 7
12 12 #
13 13 # Earlier versions will in general work as well, but the PyWin32 version is
14 14 # necessary for win32traceutil to work correctly.
15 15 #
16 16 #
17 17 # Installation and use:
18 18 #
19 19 # - Download the isapi-wsgi source and run python setup.py install:
20 20 # http://code.google.com/p/isapi-wsgi/
21 21 #
22 22 # - Run this script (i.e. python hgwebdir_wsgi.py) to get a shim dll. The
23 23 # shim is identical for all scripts, so you can just copy and rename one
24 24 # from an earlier run, if you wish.
25 25 #
26 26 # - Setup an IIS application where your hgwebdir is to be served from.
27 27 # On 64-bit systems, make sure it's assigned a 32-bit app pool.
28 28 #
29 29 # - In the application, setup a wildcard script handler mapping of type
30 30 # IpsapiModule with the shim dll as its executable. This file MUST reside
31 31 # in the same directory as the shim. Remove all other handlers, if you wish.
32 32 #
33 33 # - Make sure the ISAPI and CGI restrictions (configured globally on the
34 34 # web server) includes the shim dll, to allow it to run.
35 35 #
36 36 # - Adjust the configuration variables below to match your needs.
37 37 #
38 38
39 39 # Configuration file location
40 40 hgweb_config = r'c:\src\iis\hg\hgweb.config'
41 41
42 42 # Global settings for IIS path translation
43 43 path_strip = 0 # Strip this many path elements off (when using url rewrite)
44 44 path_prefix = 1 # This many path elements are prefixes (depends on the
45 45 # virtual path of the IIS application).
46 46
47 47 import sys
48 48
49 49 # Adjust python path if this is not a system-wide install
50 50 #sys.path.insert(0, r'c:\path\to\python\lib')
51 51
52 52 # Enable tracing. Run 'python -m win32traceutil' to debug
53 if hasattr(sys, 'isapidllhandle'):
53 if getattr(sys, 'isapidllhandle', None) is not None:
54 54 import win32traceutil
55 55
56 56 # To serve pages in local charset instead of UTF-8, remove the two lines below
57 57 import os
58 58 os.environ['HGENCODING'] = 'UTF-8'
59 59
60 60
61 61 import isapi_wsgi
62 62 from mercurial import demandimport; demandimport.enable()
63 63 from mercurial.hgweb.hgwebdir_mod import hgwebdir
64 64
65 65 # Example tweak: Replace isapi_wsgi's handler to provide better error message
66 66 # Other stuff could also be done here, like logging errors etc.
67 67 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
68 68 error_status = '500 Internal Server Error' # less silly error message
69 69
70 70 isapi_wsgi.IsapiWsgiHandler = WsgiHandler
71 71
72 72 # Only create the hgwebdir instance once
73 73 application = hgwebdir(hgweb_config)
74 74
75 75 def handler(environ, start_response):
76 76
77 77 # Translate IIS's weird URLs
78 78 url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
79 79 paths = url[1:].split('/')[path_strip:]
80 80 script_name = '/' + '/'.join(paths[:path_prefix])
81 81 path_info = '/'.join(paths[path_prefix:])
82 82 if path_info:
83 83 path_info = '/' + path_info
84 84 environ['SCRIPT_NAME'] = script_name
85 85 environ['PATH_INFO'] = path_info
86 86
87 87 return application(environ, start_response)
88 88
89 89 def __ExtensionFactory__():
90 90 return isapi_wsgi.ISAPISimpleHandler(handler)
91 91
92 92 if __name__=='__main__':
93 93 from isapi.install import *
94 94 params = ISAPIParameters()
95 95 HandleCommandLine(params)
@@ -1,1010 +1,1023
1 1 #compdef hg
2 2
3 3 # Zsh completion script for mercurial. Rename this file to _hg and copy
4 4 # it into your zsh function path (/usr/share/zsh/site-functions for
5 5 # instance)
6 6 #
7 7 # If you do not want to install it globally, you can copy it somewhere
8 8 # else and add that directory to $fpath. This must be done before
9 9 # compinit is called. If the file is copied to ~/.zsh.d, your ~/.zshrc
10 10 # file could look like this:
11 11 #
12 12 # fpath=("$HOME/.zsh.d" $fpath)
13 13 # autoload -U compinit
14 14 # compinit
15 15 #
16 16 # Copyright (C) 2005, 2006 Steve Borho <steve@borho.org>
17 17 # Copyright (C) 2006-10 Brendan Cully <brendan@kublai.com>
18 18 #
19 19 # Permission is hereby granted, without written agreement and without
20 20 # licence or royalty fees, to use, copy, modify, and distribute this
21 21 # software and to distribute modified versions of this software for any
22 22 # purpose, provided that the above copyright notice and the following
23 23 # two paragraphs appear in all copies of this software.
24 24 #
25 25 # In no event shall the authors be liable to any party for direct,
26 26 # indirect, special, incidental, or consequential damages arising out of
27 27 # the use of this software and its documentation, even if the authors
28 28 # have been advised of the possibility of such damage.
29 29 #
30 30 # The authors specifically disclaim any warranties, including, but not
31 31 # limited to, the implied warranties of merchantability and fitness for
32 32 # a particular purpose. The software provided hereunder is on an "as
33 33 # is" basis, and the authors have no obligation to provide maintenance,
34 34 # support, updates, enhancements, or modifications.
35 35
36 36 emulate -LR zsh
37 37 setopt extendedglob
38 38
39 39 local curcontext="$curcontext" state line
40 40 typeset -A _hg_cmd_globals
41 41
42 42 _hg() {
43 43 local cmd _hg_root
44 44 integer i=2
45 45 _hg_cmd_globals=()
46 46
47 47 while (( i < $#words ))
48 48 do
49 49 case "$words[$i]" in
50 50 -R|--repository)
51 51 eval _hg_root="$words[$i+1]"
52 52 _hg_cmd_globals+=("$words[$i]" "$_hg_root")
53 53 (( i += 2 ))
54 54 continue
55 55 ;;
56 56 -R*)
57 57 _hg_cmd_globals+="$words[$i]"
58 58 eval _hg_root="${words[$i]#-R}"
59 59 (( i++ ))
60 60 continue
61 61 ;;
62 62 --cwd|--config)
63 63 # pass along arguments to hg completer
64 64 _hg_cmd_globals+=("$words[$i]" "$words[$i+1]")
65 65 (( i += 2 ))
66 66 continue
67 67 ;;
68 68 -*)
69 69 # skip option
70 70 (( i++ ))
71 71 continue
72 72 ;;
73 73 esac
74 74 if [[ -z "$cmd" ]]
75 75 then
76 76 cmd="$words[$i]"
77 77 words[$i]=()
78 78 (( CURRENT-- ))
79 79 fi
80 80 (( i++ ))
81 81 done
82 82
83 83 if [[ -z "$cmd" ]]
84 84 then
85 85 _arguments -s -w : $_hg_global_opts \
86 86 ':mercurial command:_hg_commands'
87 87 return
88 88 fi
89 89
90 90 # resolve abbreviations and aliases
91 91 if ! (( $+functions[_hg_cmd_${cmd}] ))
92 92 then
93 93 local cmdexp
94 94 (( $#_hg_cmd_list )) || _hg_get_commands
95 95
96 96 cmdexp=$_hg_cmd_list[(r)${cmd}*]
97 97 if [[ $cmdexp == $_hg_cmd_list[(R)${cmd}*] ]]
98 98 then
99 99 # might be nice to rewrite the command line with the expansion
100 100 cmd="$cmdexp"
101 101 fi
102 102 if [[ -n $_hg_alias_list[$cmd] ]]
103 103 then
104 104 cmd=$_hg_alias_list[$cmd]
105 105 fi
106 106 fi
107 107
108 108 curcontext="${curcontext%:*:*}:hg-${cmd}:"
109 109
110 110 zstyle -s ":completion:$curcontext:" cache-policy update_policy
111 111
112 112 if [[ -z "$update_policy" ]]
113 113 then
114 114 zstyle ":completion:$curcontext:" cache-policy _hg_cache_policy
115 115 fi
116 116
117 117 if (( $+functions[_hg_cmd_${cmd}] ))
118 118 then
119 119 _hg_cmd_${cmd}
120 120 else
121 121 # complete unknown commands normally
122 122 _arguments -s -w : $_hg_global_opts \
123 123 '*:files:_hg_files'
124 124 fi
125 125 }
126 126
127 127 _hg_cache_policy() {
128 128 typeset -a old
129 129
130 130 # cache for a minute
131 131 old=( "$1"(mm+10) )
132 132 (( $#old )) && return 0
133 133
134 134 return 1
135 135 }
136 136
137 137 _hg_get_commands() {
138 138 typeset -ga _hg_cmd_list
139 139 typeset -gA _hg_alias_list
140 140 local hline cmd cmdalias
141 141
142 142 _call_program hg hg debugcomplete -v | while read -A hline
143 143 do
144 144 cmd=$hline[1]
145 145 _hg_cmd_list+=($cmd)
146 146
147 147 for cmdalias in $hline[2,-1]
148 148 do
149 149 _hg_cmd_list+=($cmdalias)
150 150 _hg_alias_list+=($cmdalias $cmd)
151 151 done
152 152 done
153 153 }
154 154
155 155 _hg_commands() {
156 156 (( $#_hg_cmd_list )) || _hg_get_commands
157 157 _describe -t commands 'mercurial command' _hg_cmd_list
158 158 }
159 159
160 160 _hg_revrange() {
161 161 compset -P 1 '*:'
162 162 _hg_labels "$@"
163 163 }
164 164
165 165 _hg_labels() {
166 166 _hg_tags "$@"
167 167 _hg_bookmarks "$@"
168 _hg_branches "$@"
168 169 }
169 170
170 171 _hg_tags() {
171 172 typeset -a tags
172 173 local tag rev
173 174
174 175 _hg_cmd tags | while read tag
175 176 do
176 177 tags+=(${tag/ # [0-9]#:*})
177 178 done
178 179 (( $#tags )) && _describe -t tags 'tags' tags
179 180 }
180 181
181 182 _hg_bookmarks() {
182 183 typeset -a bookmark bookmarks
183 184
184 185 _hg_cmd bookmarks | while read -A bookmark
185 186 do
186 187 if test -z ${bookmark[-1]:#[0-9]*}
187 188 then
188 189 bookmarks+=($bookmark[-2])
189 190 fi
190 191 done
191 192 (( $#bookmarks )) && _describe -t bookmarks 'bookmarks' bookmarks
192 193 }
193 194
195 _hg_branches() {
196 typeset -a branches
197 local branch
198
199 _hg_cmd branches | while read branch
200 do
201 branches+=(${branch/ # [0-9]#:*})
202 done
203 (( $#branches )) && _describe -t branches 'branches' branches
204 }
205
194 206 # likely merge candidates
195 207 _hg_mergerevs() {
196 208 typeset -a heads
197 209 local myrev
198 210
199 211 heads=(${(f)"$(_hg_cmd heads --template '{rev}\\n')"})
200 212 # exclude own revision
201 213 myrev=$(_hg_cmd log -r . --template '{rev}\\n')
202 214 heads=(${heads:#$myrev})
203 215
204 216 (( $#heads )) && _describe -t heads 'heads' heads
205 217 }
206 218
207 219 _hg_files() {
208 220 if [[ -n "$_hg_root" ]]
209 221 then
210 222 [[ -d "$_hg_root/.hg" ]] || return
211 223 case "$_hg_root" in
212 224 /*)
213 225 _files -W $_hg_root
214 226 ;;
215 227 *)
216 228 _files -W $PWD/$_hg_root
217 229 ;;
218 230 esac
219 231 else
220 232 _files
221 233 fi
222 234 }
223 235
224 236 _hg_status() {
225 237 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
226 238 status_files=(${(ps:\0:)"$(_hg_cmd status -0n$1 ./$PREFIX)"})
227 239 }
228 240
229 241 _hg_unknown() {
230 242 typeset -a status_files
231 243 _hg_status u
232 244 _wanted files expl 'unknown files' _multi_parts / status_files
233 245 }
234 246
235 247 _hg_missing() {
236 248 typeset -a status_files
237 249 _hg_status d
238 250 _wanted files expl 'missing files' _multi_parts / status_files
239 251 }
240 252
241 253 _hg_modified() {
242 254 typeset -a status_files
243 255 _hg_status m
244 256 _wanted files expl 'modified files' _multi_parts / status_files
245 257 }
246 258
247 259 _hg_resolve() {
248 260 local rstate rpath
249 261
250 262 [[ -d $PREFIX ]] || PREFIX=$PREFIX:h
251 263
252 264 _hg_cmd resolve -l ./$PREFIX | while read rstate rpath
253 265 do
254 266 [[ $rstate == 'R' ]] && resolved_files+=($rpath)
255 267 [[ $rstate == 'U' ]] && unresolved_files+=($rpath)
256 268 done
257 269 }
258 270
259 271 _hg_resolved() {
260 272 typeset -a resolved_files unresolved_files
261 273 _hg_resolve
262 274 _wanted files expl 'resolved files' _multi_parts / resolved_files
263 275 }
264 276
265 277 _hg_unresolved() {
266 278 typeset -a resolved_files unresolved_files
267 279 _hg_resolve
268 280 _wanted files expl 'unresolved files' _multi_parts / unresolved_files
269 281 }
270 282
271 283 _hg_config() {
272 284 typeset -a items
273 285 items=(${${(%f)"$(_call_program hg hg showconfig)"}%%\=*})
274 286 (( $#items )) && _describe -t config 'config item' items
275 287 }
276 288
277 289 _hg_addremove() {
278 290 _alternative 'files:unknown files:_hg_unknown' \
279 291 'files:missing files:_hg_missing'
280 292 }
281 293
282 294 _hg_ssh_urls() {
283 295 if [[ -prefix */ ]]
284 296 then
285 297 if zstyle -T ":completion:${curcontext}:files" remote-access
286 298 then
287 299 local host=${PREFIX%%/*}
288 300 typeset -a remdirs
289 301 compset -p $(( $#host + 1 ))
290 302 local rempath=${(M)PREFIX##*/}
291 303 local cacheid="hg:${host}-${rempath//\//_}"
292 304 cacheid=${cacheid%[-_]}
293 305 compset -P '*/'
294 306 if _cache_invalid "$cacheid" || ! _retrieve_cache "$cacheid"
295 307 then
296 308 remdirs=(${${(M)${(f)"$(_call_program files ssh -a -x $host ls -1FL "${(q)rempath}")"}##*/}%/})
297 309 _store_cache "$cacheid" remdirs
298 310 fi
299 311 _describe -t directories 'remote directory' remdirs -S/
300 312 else
301 313 _message 'remote directory'
302 314 fi
303 315 else
304 316 if compset -P '*@'
305 317 then
306 318 _hosts -S/
307 319 else
308 320 _alternative 'hosts:remote host name:_hosts -S/' \
309 321 'users:user:_users -S@'
310 322 fi
311 323 fi
312 324 }
313 325
314 326 _hg_urls() {
315 327 if compset -P bundle://
316 328 then
317 329 _files
318 330 elif compset -P ssh://
319 331 then
320 332 _hg_ssh_urls
321 333 elif [[ -prefix *: ]]
322 334 then
323 335 _urls
324 336 else
325 337 local expl
326 338 compset -S '[^:]*'
327 339 _wanted url-schemas expl 'URL schema' compadd -S '' - \
328 340 http:// https:// ssh:// bundle://
329 341 fi
330 342 }
331 343
332 344 _hg_paths() {
333 345 typeset -a paths pnames
334 346 _hg_cmd paths | while read -A pnames
335 347 do
336 348 paths+=($pnames[1])
337 349 done
338 350 (( $#paths )) && _describe -t path-aliases 'repository alias' paths
339 351 }
340 352
341 353 _hg_remote() {
342 354 _alternative 'path-aliases:repository alias:_hg_paths' \
343 355 'directories:directory:_files -/' \
344 356 'urls:URL:_hg_urls'
345 357 }
346 358
347 359 _hg_clone_dest() {
348 360 _alternative 'directories:directory:_files -/' \
349 361 'urls:URL:_hg_urls'
350 362 }
351 363
352 364 # Common options
353 365 _hg_global_opts=(
354 366 '(--repository -R)'{-R+,--repository}'[repository root directory]:repository:_files -/'
355 367 '--cwd[change working directory]:new working directory:_files -/'
356 368 '(--noninteractive -y)'{-y,--noninteractive}'[do not prompt, assume yes for any required answers]'
357 369 '(--verbose -v)'{-v,--verbose}'[enable additional output]'
358 370 '*--config[set/override config option]:defined config items:_hg_config'
359 371 '(--quiet -q)'{-q,--quiet}'[suppress output]'
360 372 '(--help -h)'{-h,--help}'[display help and exit]'
361 373 '--debug[debug mode]'
362 374 '--debugger[start debugger]'
363 375 '--encoding[set the charset encoding]'
364 376 '--encodingmode[set the charset encoding mode]'
365 377 '--lsprof[print improved command execution profile]'
366 378 '--traceback[print traceback on exception]'
367 379 '--time[time how long the command takes]'
368 380 '--profile[profile]'
369 381 '--version[output version information and exit]'
370 382 )
371 383
372 384 _hg_pat_opts=(
373 385 '*'{-I+,--include}'[include names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/'
374 386 '*'{-X+,--exclude}'[exclude names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/')
375 387
376 388 _hg_diff_opts=(
377 389 '(--text -a)'{-a,--text}'[treat all files as text]'
378 390 '(--git -g)'{-g,--git}'[use git extended diff format]'
379 391 "--nodates[omit dates from diff headers]")
380 392
381 393 _hg_dryrun_opts=(
382 394 '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]')
383 395
384 396 _hg_style_opts=(
385 397 '--style[display using template map file]:'
386 398 '--template[display with template]:')
387 399
388 400 _hg_commit_opts=(
389 401 '(-m --message -l --logfile --edit -e)'{-e,--edit}'[edit commit message]'
390 402 '(-e --edit -l --logfile --message -m)'{-m+,--message}'[use <text> as commit message]:message:'
391 403 '(-e --edit -m --message --logfile -l)'{-l+,--logfile}'[read the commit message from <file>]:log file:_files')
392 404
393 405 _hg_remote_opts=(
394 406 '(--ssh -e)'{-e+,--ssh}'[specify ssh command to use]:'
395 407 '--remotecmd[specify hg command to run on the remote side]:')
396 408
397 409 _hg_cmd() {
398 410 _call_program hg HGPLAIN=1 hg "$_hg_cmd_globals[@]" "$@" 2> /dev/null
399 411 }
400 412
401 413 _hg_cmd_add() {
402 414 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
403 415 '*:unknown files:_hg_unknown'
404 416 }
405 417
406 418 _hg_cmd_addremove() {
407 419 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
408 420 '(--similarity -s)'{-s+,--similarity}'[guess renamed files by similarity (0<=s<=100)]:' \
409 421 '*:unknown or missing files:_hg_addremove'
410 422 }
411 423
412 424 _hg_cmd_annotate() {
413 425 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
414 426 '(--rev -r)'{-r+,--rev}'[annotate the specified revision]:revision:_hg_labels' \
415 427 '(--follow -f)'{-f,--follow}'[follow file copies and renames]' \
416 428 '(--text -a)'{-a,--text}'[treat all files as text]' \
417 429 '(--user -u)'{-u,--user}'[list the author]' \
418 430 '(--date -d)'{-d,--date}'[list the date]' \
419 431 '(--number -n)'{-n,--number}'[list the revision number (default)]' \
420 432 '(--changeset -c)'{-c,--changeset}'[list the changeset]' \
421 433 '*:files:_hg_files'
422 434 }
423 435
424 436 _hg_cmd_archive() {
425 437 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
426 438 '--no-decode[do not pass files through decoders]' \
427 439 '(--prefix -p)'{-p+,--prefix}'[directory prefix for files in archive]:' \
428 440 '(--rev -r)'{-r+,--rev}'[revision to distribute]:revision:_hg_labels' \
429 441 '(--type -t)'{-t+,--type}'[type of distribution to create]:archive type:(files tar tbz2 tgz uzip zip)' \
430 442 '*:destination:_files'
431 443 }
432 444
433 445 _hg_cmd_backout() {
434 446 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
435 447 '--merge[merge with old dirstate parent after backout]' \
436 448 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
437 449 '--parent[parent to choose when backing out merge]' \
438 450 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
439 451 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
440 452 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
441 453 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt'
442 454 }
443 455
444 456 _hg_cmd_bisect() {
445 457 _arguments -s -w : $_hg_global_opts \
446 458 '(-)'{-r,--reset}'[reset bisect state]' \
447 459 '(--good -g --bad -b --skip -s --reset -r)'{-g,--good}'[mark changeset good]'::revision:_hg_labels \
448 460 '(--good -g --bad -b --skip -s --reset -r)'{-b,--bad}'[mark changeset bad]'::revision:_hg_labels \
449 461 '(--good -g --bad -b --skip -s --reset -r)'{-s,--skip}'[skip testing changeset]' \
450 462 '(--command -c --noupdate -U)'{-c+,--command}'[use command to check changeset state]':commands:_command_names \
451 463 '(--command -c --noupdate -U)'{-U,--noupdate}'[do not update to target]'
452 464 }
453 465
454 466 _hg_cmd_bookmarks() {
455 467 _arguments -s -w : $_hg_global_opts \
456 468 '(--force -f)'{-f,--force}'[force]' \
457 469 '(--rev -r --delete -d --rename -m)'{-r+,--rev}'[revision]:revision:_hg_labels' \
458 470 '(--rev -r --delete -d --rename -m)'{-d,--delete}'[delete a given bookmark]' \
459 471 '(--rev -r --delete -d --rename -m)'{-m+,--rename}'[rename a given bookmark]:bookmark:_hg_bookmarks' \
460 472 ':bookmark:_hg_bookmarks'
461 473 }
462 474
463 475 _hg_cmd_branch() {
464 476 _arguments -s -w : $_hg_global_opts \
465 477 '(--force -f)'{-f,--force}'[set branch name even if it shadows an existing branch]' \
466 478 '(--clean -C)'{-C,--clean}'[reset branch name to parent branch name]'
467 479 }
468 480
469 481 _hg_cmd_branches() {
470 482 _arguments -s -w : $_hg_global_opts \
471 483 '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]'
472 484 }
473 485
474 486 _hg_cmd_bundle() {
475 487 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
476 488 '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
477 489 '(2)*--base[a base changeset to specify instead of a destination]:revision:_hg_labels' \
478 490 ':output file:_files' \
479 491 ':destination repository:_files -/'
480 492 }
481 493
482 494 _hg_cmd_cat() {
483 495 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
484 496 '(--output -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
485 497 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
486 498 '*:file:_hg_files'
487 499 }
488 500
489 501 _hg_cmd_clone() {
490 502 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
491 503 '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' \
492 504 '(--rev -r)'{-r+,--rev}'[a changeset you would like to have after cloning]:' \
493 505 '--uncompressed[use uncompressed transfer (fast over LAN)]' \
494 506 ':source repository:_hg_remote' \
495 507 ':destination:_hg_clone_dest'
496 508 }
497 509
498 510 _hg_cmd_commit() {
499 511 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
500 512 '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \
501 513 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
502 514 '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt' \
503 515 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
504 516 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
505 517 '*:file:_hg_files'
506 518 }
507 519
508 520 _hg_cmd_copy() {
509 521 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
510 522 '(--after -A)'{-A,--after}'[record a copy that has already occurred]' \
511 523 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
512 524 '*:file:_hg_files'
513 525 }
514 526
515 527 _hg_cmd_diff() {
516 528 typeset -A opt_args
517 529 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \
518 530 '*'{-r,--rev}'+[revision]:revision:_hg_revrange' \
519 531 '(--show-function -p)'{-p,--show-function}'[show which function each change is in]' \
520 532 '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' \
521 533 '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' \
522 534 '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]' \
523 535 '*:file:->diff_files'
524 536
525 537 if [[ $state == 'diff_files' ]]
526 538 then
527 539 if [[ -n $opt_args[-r] ]]
528 540 then
529 541 _hg_files
530 542 else
531 543 _hg_modified
532 544 fi
533 545 fi
534 546 }
535 547
536 548 _hg_cmd_export() {
537 549 _arguments -s -w : $_hg_global_opts $_hg_diff_opts \
538 550 '(--outout -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
539 551 '--switch-parent[diff against the second parent]' \
540 552 '*:revision:_hg_labels'
541 553 }
542 554
543 555 _hg_cmd_grep() {
544 556 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
545 557 '(--print0 -0)'{-0,--print0}'[end filenames with NUL]' \
546 558 '--all[print all revisions with matches]' \
547 559 '(--follow -f)'{-f,--follow}'[follow changeset or file history]' \
548 560 '(--ignore-case -i)'{-i,--ignore-case}'[ignore case when matching]' \
549 561 '(--files-with-matches -l)'{-l,--files-with-matches}'[print only filenames and revs that match]' \
550 562 '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
551 563 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
552 564 '(--user -u)'{-u,--user}'[print user who committed change]' \
553 565 '1:search pattern:' \
554 566 '*:files:_hg_files'
555 567 }
556 568
557 569 _hg_cmd_heads() {
558 570 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
559 571 '(--rev -r)'{-r+,--rev}'[show only heads which are descendants of rev]:revision:_hg_labels'
560 572 }
561 573
562 574 _hg_cmd_help() {
563 575 _arguments -s -w : $_hg_global_opts \
564 576 '*:mercurial command:_hg_commands'
565 577 }
566 578
567 579 _hg_cmd_identify() {
568 580 _arguments -s -w : $_hg_global_opts \
569 581 '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_labels' \
570 582 '(--num -n)'{-n+,--num}'[show local revision number]' \
571 583 '(--id -i)'{-i+,--id}'[show global revision id]' \
572 584 '(--branch -b)'{-b+,--branch}'[show branch]' \
573 585 '(--tags -t)'{-t+,--tags}'[show tags]'
574 586 }
575 587
576 588 _hg_cmd_import() {
577 589 _arguments -s -w : $_hg_global_opts \
578 590 '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
579 591 '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
580 592 '(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \
581 593 '*:patch:_files'
582 594 }
583 595
584 596 _hg_cmd_incoming() {
585 597 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
586 598 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
587 599 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
588 600 '(--patch -p)'{-p,--patch}'[show patch]' \
589 601 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
590 602 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
591 603 '--bundle[file to store the bundles into]:bundle file:_files' \
592 604 ':source:_hg_remote'
593 605 }
594 606
595 607 _hg_cmd_init() {
596 608 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
597 609 ':dir:_files -/'
598 610 }
599 611
600 612 _hg_cmd_locate() {
601 613 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
602 614 '(--rev -r)'{-r+,--rev}'[search repository as it stood at revision]:revision:_hg_labels' \
603 615 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
604 616 '(--fullpath -f)'{-f,--fullpath}'[print complete paths]' \
605 617 '*:search pattern:_hg_files'
606 618 }
607 619
608 620 _hg_cmd_log() {
609 621 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_style_opts \
610 622 '(--follow --follow-first -f)'{-f,--follow}'[follow changeset or history]' \
611 623 '(-f --follow)--follow-first[only follow the first parent of merge changesets]' \
612 624 '(--copies -C)'{-C,--copies}'[show copied files]' \
613 625 '(--keyword -k)'{-k+,--keyword}'[search for a keyword]:' \
614 626 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
615 627 '*'{-r,--rev}'[show the specified revision or range]:revision:_hg_revrange' \
616 628 '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' \
617 629 '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \
618 630 '(--patch -p)'{-p,--patch}'[show patch]' \
619 631 '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \
632 '(--branch -b)'{-b+,--branch}'[show changesets within the given named branch]:branch:_hg_branches' \
620 633 '*:files:_hg_files'
621 634 }
622 635
623 636 _hg_cmd_manifest() {
624 637 _arguments -s -w : $_hg_global_opts \
625 638 ':revision:_hg_labels'
626 639 }
627 640
628 641 _hg_cmd_merge() {
629 642 _arguments -s -w : $_hg_global_opts \
630 643 '(--force -f)'{-f,--force}'[force a merge with outstanding changes]' \
631 644 '(--rev -r 1)'{-r,--rev}'[revision to merge]:revision:_hg_mergerevs' \
632 645 '(--preview -P)'{-P,--preview}'[review revisions to merge (no merge is performed)]' \
633 646 ':revision:_hg_mergerevs'
634 647 }
635 648
636 649 _hg_cmd_outgoing() {
637 650 _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
638 651 '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
639 652 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
640 653 '(--patch -p)'{-p,--patch}'[show patch]' \
641 654 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
642 655 '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
643 656 ':destination:_hg_remote'
644 657 }
645 658
646 659 _hg_cmd_parents() {
647 660 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
648 661 '(--rev -r)'{-r+,--rev}'[show parents of the specified rev]:revision:_hg_labels' \
649 662 ':last modified file:_hg_files'
650 663 }
651 664
652 665 _hg_cmd_paths() {
653 666 _arguments -s -w : $_hg_global_opts \
654 667 ':path:_hg_paths'
655 668 }
656 669
657 670 _hg_cmd_pull() {
658 671 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
659 672 '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
660 673 '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
661 674 '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
662 675 ':source:_hg_remote'
663 676 }
664 677
665 678 _hg_cmd_push() {
666 679 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
667 680 '(--force -f)'{-f,--force}'[force push]' \
668 681 '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_labels' \
669 682 ':destination:_hg_remote'
670 683 }
671 684
672 685 _hg_cmd_remove() {
673 686 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
674 687 '(--after -A)'{-A,--after}'[record remove that has already occurred]' \
675 688 '(--force -f)'{-f,--force}'[remove file even if modified]' \
676 689 '*:file:_hg_files'
677 690 }
678 691
679 692 _hg_cmd_rename() {
680 693 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
681 694 '(--after -A)'{-A,--after}'[record a rename that has already occurred]' \
682 695 '(--force -f)'{-f,--force}'[forcibly copy over an existing managed file]' \
683 696 '*:file:_hg_files'
684 697 }
685 698
686 699 _hg_cmd_resolve() {
687 700 local context state line
688 701 typeset -A opt_args
689 702
690 703 _arguments -s -w : $_hg_global_opts \
691 704 '(--list -l --mark -m --unmark -u)'{-l,--list}'[list state of files needing merge]:*:merged files:->resolve_files' \
692 705 '(--mark -m --list -l --unmark -u)'{-m,--mark}'[mark files as resolved]:*:unresolved files:_hg_unresolved' \
693 706 '(--unmark -u --list -l --mark -m)'{-u,--unmark}'[unmark files as resolved]:*:resolved files:_hg_resolved' \
694 707 '*:file:_hg_unresolved'
695 708
696 709 if [[ $state == 'resolve_files' ]]
697 710 then
698 711 _alternative 'files:resolved files:_hg_resolved' \
699 712 'files:unresolved files:_hg_unresolved'
700 713 fi
701 714 }
702 715
703 716 _hg_cmd_revert() {
704 717 local context state line
705 718 typeset -A opt_args
706 719
707 720 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
708 721 '(--all -a :)'{-a,--all}'[revert all changes when no arguments given]' \
709 722 '(--rev -r)'{-r+,--rev}'[revision to revert to]:revision:_hg_labels' \
710 723 '--no-backup[do not save backup copies of files]' \
711 724 '*:file:->diff_files'
712 725
713 726 if [[ $state == 'diff_files' ]]
714 727 then
715 728 if [[ -n $opt_args[-r] ]]
716 729 then
717 730 _hg_files
718 731 else
719 732 typeset -a status_files
720 733 _hg_status mard
721 734 _wanted files expl 'modified, added, removed or deleted file' _multi_parts / status_files
722 735 fi
723 736 fi
724 737 }
725 738
726 739 _hg_cmd_serve() {
727 740 _arguments -s -w : $_hg_global_opts \
728 741 '(--accesslog -A)'{-A+,--accesslog}'[name of access log file]:log file:_files' \
729 742 '(--errorlog -E)'{-E+,--errorlog}'[name of error log file]:log file:_files' \
730 743 '(--daemon -d)'{-d,--daemon}'[run server in background]' \
731 744 '(--port -p)'{-p+,--port}'[listen port]:listen port:' \
732 745 '(--address -a)'{-a+,--address}'[interface address]:interface address:' \
733 746 '(--name -n)'{-n+,--name}'[name to show in web pages]:repository name:' \
734 747 '(--templates -t)'{-t,--templates}'[web template directory]:template dir:_files -/' \
735 748 '--style[web template style]:style' \
736 749 '--stdio[for remote clients]' \
737 750 '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
738 751 }
739 752
740 753 _hg_cmd_showconfig() {
741 754 _arguments -s -w : $_hg_global_opts \
742 755 '(--untrusted -u)'{-u+,--untrusted}'[show untrusted configuration options]' \
743 756 ':config item:_hg_config'
744 757 }
745 758
746 759 _hg_cmd_status() {
747 760 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
748 761 '(--all -A)'{-A,--all}'[show status of all files]' \
749 762 '(--modified -m)'{-m,--modified}'[show only modified files]' \
750 763 '(--added -a)'{-a,--added}'[show only added files]' \
751 764 '(--removed -r)'{-r,--removed}'[show only removed files]' \
752 765 '(--deleted -d)'{-d,--deleted}'[show only deleted (but tracked) files]' \
753 766 '(--clean -c)'{-c,--clean}'[show only files without changes]' \
754 767 '(--unknown -u)'{-u,--unknown}'[show only unknown files]' \
755 768 '(--ignored -i)'{-i,--ignored}'[show ignored files]' \
756 769 '(--no-status -n)'{-n,--no-status}'[hide status prefix]' \
757 770 '(--copies -C)'{-C,--copies}'[show source of copied files]' \
758 771 '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
759 772 '--rev[show difference from revision]:revision:_hg_labels' \
760 773 '*:files:_files'
761 774 }
762 775
763 776 _hg_cmd_summary() {
764 777 _arguments -s -w : $_hg_global_opts \
765 778 '--remote[check for push and pull]'
766 779 }
767 780
768 781 _hg_cmd_tag() {
769 782 _arguments -s -w : $_hg_global_opts \
770 783 '(--local -l)'{-l,--local}'[make the tag local]' \
771 784 '(--message -m)'{-m+,--message}'[message for tag commit log entry]:message:' \
772 785 '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
773 786 '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
774 787 '(--rev -r)'{-r+,--rev}'[revision to tag]:revision:_hg_labels' \
775 788 ':tag name:'
776 789 }
777 790
778 791 _hg_cmd_tip() {
779 792 _arguments -s -w : $_hg_global_opts $_hg_style_opts \
780 793 '(--patch -p)'{-p,--patch}'[show patch]'
781 794 }
782 795
783 796 _hg_cmd_unbundle() {
784 797 _arguments -s -w : $_hg_global_opts \
785 798 '(--update -u)'{-u,--update}'[update to new tip if changesets were unbundled]' \
786 799 ':files:_files'
787 800 }
788 801
789 802 _hg_cmd_update() {
790 803 _arguments -s -w : $_hg_global_opts \
791 804 '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
792 805 '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
793 806 ':revision:_hg_labels'
794 807 }
795 808
796 809 ## extensions ##
797 810
798 811 # HGK
799 812 _hg_cmd_view() {
800 813 _arguments -s -w : $_hg_global_opts \
801 814 '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
802 815 ':revision range:_hg_tags'
803 816 }
804 817
805 818 # MQ
806 819 _hg_qseries() {
807 820 typeset -a patches
808 821 patches=(${(f)"$(_hg_cmd qseries)"})
809 822 (( $#patches )) && _describe -t hg-patches 'patches' patches
810 823 }
811 824
812 825 _hg_qapplied() {
813 826 typeset -a patches
814 827 patches=(${(f)"$(_hg_cmd qapplied)"})
815 828 if (( $#patches ))
816 829 then
817 830 patches+=(qbase qtip)
818 831 _describe -t hg-applied-patches 'applied patches' patches
819 832 fi
820 833 }
821 834
822 835 _hg_qunapplied() {
823 836 typeset -a patches
824 837 patches=(${(f)"$(_hg_cmd qunapplied)"})
825 838 (( $#patches )) && _describe -t hg-unapplied-patches 'unapplied patches' patches
826 839 }
827 840
828 841 # unapplied, including guarded patches
829 842 _hg_qdeletable() {
830 843 typeset -a unapplied
831 844 unapplied=(${(f)"$(_hg_cmd qseries)"})
832 845 for p in $(_hg_cmd qapplied)
833 846 do
834 847 unapplied=(${unapplied:#$p})
835 848 done
836 849
837 850 (( $#unapplied )) && _describe -t hg-allunapplied-patches 'all unapplied patches' unapplied
838 851 }
839 852
840 853 _hg_qguards() {
841 854 typeset -a guards
842 855 local guard
843 856 compset -P "+|-"
844 857 _hg_cmd qselect -s | while read guard
845 858 do
846 859 guards+=(${guard#(+|-)})
847 860 done
848 861 (( $#guards )) && _describe -t hg-guards 'guards' guards
849 862 }
850 863
851 864 _hg_qseries_opts=(
852 865 '(--summary -s)'{-s,--summary}'[print first line of patch header]')
853 866
854 867 _hg_cmd_qapplied() {
855 868 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
856 869 }
857 870
858 871 _hg_cmd_qdelete() {
859 872 _arguments -s -w : $_hg_global_opts \
860 873 '(--keep -k)'{-k,--keep}'[keep patch file]' \
861 874 '*'{-r+,--rev}'[stop managing a revision]:applied patch:_hg_revrange' \
862 875 '*:unapplied patch:_hg_qdeletable'
863 876 }
864 877
865 878 _hg_cmd_qdiff() {
866 879 _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
867 880 '*:pattern:_hg_files'
868 881 }
869 882
870 883 _hg_cmd_qfold() {
871 884 _arguments -s -w : $_hg_global_opts $_h_commit_opts \
872 885 '(--keep,-k)'{-k,--keep}'[keep folded patch files]' \
873 886 '*:unapplied patch:_hg_qunapplied'
874 887 }
875 888
876 889 _hg_cmd_qgoto() {
877 890 _arguments -s -w : $_hg_global_opts \
878 891 '(--force -f)'{-f,--force}'[overwrite any local changes]' \
879 892 ':patch:_hg_qseries'
880 893 }
881 894
882 895 _hg_cmd_qguard() {
883 896 _arguments -s -w : $_hg_global_opts \
884 897 '(--list -l)'{-l,--list}'[list all patches and guards]' \
885 898 '(--none -n)'{-n,--none}'[drop all guards]' \
886 899 ':patch:_hg_qseries' \
887 900 '*:guards:_hg_qguards'
888 901 }
889 902
890 903 _hg_cmd_qheader() {
891 904 _arguments -s -w : $_hg_global_opts \
892 905 ':patch:_hg_qseries'
893 906 }
894 907
895 908 _hg_cmd_qimport() {
896 909 _arguments -s -w : $_hg_global_opts \
897 910 '(--existing -e)'{-e,--existing}'[import file in patch dir]' \
898 911 '(--name -n 2)'{-n+,--name}'[patch file name]:name:' \
899 912 '(--force -f)'{-f,--force}'[overwrite existing files]' \
900 913 '*'{-r+,--rev}'[place existing revisions under mq control]:revision:_hg_revrange' \
901 914 '*:patch:_files'
902 915 }
903 916
904 917 _hg_cmd_qnew() {
905 918 _arguments -s -w : $_hg_global_opts $_hg_commit_opts \
906 919 '(--force -f)'{-f,--force}'[import uncommitted changes into patch]' \
907 920 ':patch:'
908 921 }
909 922
910 923 _hg_cmd_qnext() {
911 924 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
912 925 }
913 926
914 927 _hg_cmd_qpop() {
915 928 _arguments -s -w : $_hg_global_opts \
916 929 '(--all -a :)'{-a,--all}'[pop all patches]' \
917 930 '(--name -n)'{-n+,--name}'[queue name to pop]:' \
918 931 '(--force -f)'{-f,--force}'[forget any local changes]' \
919 932 ':patch:_hg_qapplied'
920 933 }
921 934
922 935 _hg_cmd_qprev() {
923 936 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
924 937 }
925 938
926 939 _hg_cmd_qpush() {
927 940 _arguments -s -w : $_hg_global_opts \
928 941 '(--all -a :)'{-a,--all}'[apply all patches]' \
929 942 '(--list -l)'{-l,--list}'[list patch name in commit text]' \
930 943 '(--merge -m)'{-m+,--merge}'[merge from another queue]:' \
931 944 '(--name -n)'{-n+,--name}'[merge queue name]:' \
932 945 '(--force -f)'{-f,--force}'[apply if the patch has rejects]' \
933 946 '--move[reorder patch series and apply only the patch]' \
934 947 ':patch:_hg_qunapplied'
935 948 }
936 949
937 950 _hg_cmd_qrefresh() {
938 951 _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts \
939 952 '(--git -g)'{-g,--git}'[use git extended diff format]' \
940 953 '(--short -s)'{-s,--short}'[short refresh]' \
941 954 '*:files:_hg_files'
942 955 }
943 956
944 957 _hg_cmd_qrename() {
945 958 _arguments -s -w : $_hg_global_opts \
946 959 ':patch:_hg_qseries' \
947 960 ':destination:'
948 961 }
949 962
950 963 _hg_cmd_qselect() {
951 964 _arguments -s -w : $_hg_global_opts \
952 965 '(--none -n :)'{-n,--none}'[disable all guards]' \
953 966 '(--series -s :)'{-s,--series}'[list all guards in series file]' \
954 967 '--pop[pop to before first guarded applied patch]' \
955 968 '--reapply[pop and reapply patches]' \
956 969 '*:guards:_hg_qguards'
957 970 }
958 971
959 972 _hg_cmd_qseries() {
960 973 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \
961 974 '(--missing -m)'{-m,--missing}'[print patches not in series]'
962 975 }
963 976
964 977 _hg_cmd_qunapplied() {
965 978 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
966 979 }
967 980
968 981 _hg_cmd_qtop() {
969 982 _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
970 983 }
971 984
972 985 _hg_cmd_strip() {
973 986 _arguments -s -w : $_hg_global_opts \
974 987 '(--force -f)'{-f,--force}'[force multi-head removal]' \
975 988 '(--backup -b)'{-b,--backup}'[bundle unrelated changesets]' \
976 989 '(--nobackup -n)'{-n,--nobackup}'[no backups]' \
977 990 ':revision:_hg_labels'
978 991 }
979 992
980 993 # Patchbomb
981 994 _hg_cmd_email() {
982 995 _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
983 996 '(--git -g)'{-g,--git}'[use git extended diff format]' \
984 997 '--plain[omit hg patch header]' \
985 998 '(--outgoing -o)'{-o,--outgoing}'[send changes not found in the target repository]' \
986 999 '(--bundle -b)'{-b,--bundle}'[send changes not in target as a binary bundle]' \
987 1000 '--bundlename[name of the bundle attachment file (default: bundle)]:' \
988 1001 '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
989 1002 '--force[run even when remote repository is unrelated (with -b/--bundle)]' \
990 1003 '*--base[a base changeset to specify instead of a destination (with -b/--bundle)]:revision:_hg_labels' \
991 1004 '--intro[send an introduction email for a single patch]' \
992 1005 '(--inline -i --attach -a)'{-a,--attach}'[send patches as attachments]' \
993 1006 '(--attach -a --inline -i)'{-i,--inline}'[send patches as inline attachments]' \
994 1007 '*--bcc[email addresses of blind carbon copy recipients]:email:' \
995 1008 '*'{-c+,--cc}'[email addresses of copy recipients]:email:' \
996 1009 '(--diffstat -d)'{-d,--diffstat}'[add diffstat output to messages]' \
997 1010 '--date[use the given date as the sending date]:date:' \
998 1011 '--desc[use the given file as the series description]:files:_files' \
999 1012 '(--from -f)'{-f,--from}'[email address of sender]:email:' \
1000 1013 '(--test -n)'{-n,--test}'[print messages that would be sent]' \
1001 1014 '(--mbox -m)'{-m,--mbox}'[write messages to mbox file instead of sending them]:file:' \
1002 1015 '*--reply-to[email addresses replies should be sent to]:email:' \
1003 1016 '(--subject -s)'{-s,--subject}'[subject of first message (intro or single patch)]:subject:' \
1004 1017 '--in-reply-to[message identifier to reply to]:msgid:' \
1005 1018 '*--flag[flags to add in subject prefixes]:flag:' \
1006 1019 '*'{-t,--to}'[email addresses of recipients]:email:' \
1007 1020 ':revision:_hg_revrange'
1008 1021 }
1009 1022
1010 1023 _hg "$@"
@@ -1,167 +1,168
1 1 import os, sys, textwrap
2 2 # import from the live mercurial repo
3 3 sys.path.insert(0, "..")
4 4 # fall back to pure modules if required C extensions are not available
5 5 sys.path.append(os.path.join('..', 'mercurial', 'pure'))
6 6 from mercurial import demandimport; demandimport.enable()
7 7 from mercurial import encoding
8 8 from mercurial.commands import table, globalopts
9 9 from mercurial.i18n import _
10 10 from mercurial.help import helptable
11 11 from mercurial import extensions
12 from mercurial import util
12 13
13 14 def get_desc(docstr):
14 15 if not docstr:
15 16 return "", ""
16 17 # sanitize
17 18 docstr = docstr.strip("\n")
18 19 docstr = docstr.rstrip()
19 20 shortdesc = docstr.splitlines()[0].strip()
20 21
21 22 i = docstr.find("\n")
22 23 if i != -1:
23 24 desc = docstr[i + 2:]
24 25 else:
25 26 desc = shortdesc
26 27
27 28 desc = textwrap.dedent(desc)
28 29
29 30 return (shortdesc, desc)
30 31
31 32 def get_opts(opts):
32 33 for opt in opts:
33 34 if len(opt) == 5:
34 35 shortopt, longopt, default, desc, optlabel = opt
35 36 else:
36 37 shortopt, longopt, default, desc = opt
37 38 allopts = []
38 39 if shortopt:
39 40 allopts.append("-%s" % shortopt)
40 41 if longopt:
41 42 allopts.append("--%s" % longopt)
42 43 desc += default and _(" (default: %s)") % default or ""
43 44 yield (", ".join(allopts), desc)
44 45
45 46 def get_cmd(cmd, cmdtable):
46 47 d = {}
47 48 attr = cmdtable[cmd]
48 49 cmds = cmd.lstrip("^").split("|")
49 50
50 51 d['cmd'] = cmds[0]
51 52 d['aliases'] = cmd.split("|")[1:]
52 53 d['desc'] = get_desc(attr[0].__doc__)
53 54 d['opts'] = list(get_opts(attr[1]))
54 55
55 56 s = 'hg ' + cmds[0]
56 57 if len(attr) > 2:
57 58 if not attr[2].startswith('hg'):
58 59 s += ' ' + attr[2]
59 60 else:
60 61 s = attr[2]
61 62 d['synopsis'] = s.strip()
62 63
63 64 return d
64 65
65 66 def section(ui, s):
66 67 ui.write("%s\n%s\n\n" % (s, "-" * encoding.colwidth(s)))
67 68
68 69 def subsection(ui, s):
69 70 ui.write("%s\n%s\n\n" % (s, '"' * encoding.colwidth(s)))
70 71
71 72 def subsubsection(ui, s):
72 73 ui.write("%s\n%s\n\n" % (s, "." * encoding.colwidth(s)))
73 74
74 75 def subsubsubsection(ui, s):
75 76 ui.write("%s\n%s\n\n" % (s, "#" * encoding.colwidth(s)))
76 77
77 78
78 79 def show_doc(ui):
79 80 # print options
80 81 section(ui, _("Options"))
81 82 for optstr, desc in get_opts(globalopts):
82 83 ui.write("%s\n %s\n\n" % (optstr, desc))
83 84
84 85 # print cmds
85 86 section(ui, _("Commands"))
86 87 commandprinter(ui, table, subsection)
87 88
88 89 # print topics
89 90 for names, sec, doc in helptable:
90 91 if names[0] == "config":
91 92 # The config help topic is included in the hgrc.5 man
92 93 # page.
93 94 continue
94 95 for name in names:
95 96 ui.write(".. _%s:\n" % name)
96 97 ui.write("\n")
97 98 section(ui, sec)
98 if hasattr(doc, '__call__'):
99 if util.safehasattr(doc, '__call__'):
99 100 doc = doc()
100 101 ui.write(doc)
101 102 ui.write("\n")
102 103
103 104 section(ui, _("Extensions"))
104 105 ui.write(_("This section contains help for extensions that are distributed "
105 106 "together with Mercurial. Help for other extensions is available "
106 107 "in the help system."))
107 108 ui.write("\n\n"
108 109 ".. contents::\n"
109 110 " :class: htmlonly\n"
110 111 " :local:\n"
111 112 " :depth: 1\n\n")
112 113
113 114 for extensionname in sorted(allextensionnames()):
114 115 mod = extensions.load(None, extensionname, None)
115 116 subsection(ui, extensionname)
116 117 ui.write("%s\n\n" % mod.__doc__)
117 118 cmdtable = getattr(mod, 'cmdtable', None)
118 119 if cmdtable:
119 120 subsubsection(ui, _('Commands'))
120 121 commandprinter(ui, cmdtable, subsubsubsection)
121 122
122 123 def commandprinter(ui, cmdtable, sectionfunc):
123 124 h = {}
124 125 for c, attr in cmdtable.items():
125 126 f = c.split("|")[0]
126 127 f = f.lstrip("^")
127 128 h[f] = c
128 129 cmds = h.keys()
129 130 cmds.sort()
130 131
131 132 for f in cmds:
132 133 if f.startswith("debug"):
133 134 continue
134 135 d = get_cmd(h[f], cmdtable)
135 136 sectionfunc(ui, d['cmd'])
136 137 # synopsis
137 138 ui.write("::\n\n")
138 139 synopsislines = d['synopsis'].splitlines()
139 140 for line in synopsislines:
140 141 # some commands (such as rebase) have a multi-line
141 142 # synopsis
142 143 ui.write(" %s\n" % line)
143 144 ui.write('\n')
144 145 # description
145 146 ui.write("%s\n\n" % d['desc'][1])
146 147 # options
147 148 opt_output = list(d['opts'])
148 149 if opt_output:
149 150 opts_len = max([len(line[0]) for line in opt_output])
150 151 ui.write(_("Options:\n\n"))
151 152 for optstr, desc in opt_output:
152 153 if desc:
153 154 s = "%-*s %s" % (opts_len, optstr, desc)
154 155 else:
155 156 s = optstr
156 157 ui.write("%s\n" % s)
157 158 ui.write("\n")
158 159 # aliases
159 160 if d['aliases']:
160 161 ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
161 162
162 163
163 164 def allextensionnames():
164 165 return extensions.enabled().keys() + extensions.disabled().keys()
165 166
166 167 if __name__ == "__main__":
167 168 show_doc(sys.stdout)
@@ -1,250 +1,252
1 1 # acl.py - changeset access control for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''hooks for controlling repository access
9 9
10 10 This hook makes it possible to allow or deny write access to given
11 11 branches and paths of a repository when receiving incoming changesets
12 12 via pretxnchangegroup and pretxncommit.
13 13
14 14 The authorization is matched based on the local user name on the
15 15 system where the hook runs, and not the committer of the original
16 16 changeset (since the latter is merely informative).
17 17
18 18 The acl hook is best used along with a restricted shell like hgsh,
19 19 preventing authenticating users from doing anything other than pushing
20 20 or pulling. The hook is not safe to use if users have interactive
21 21 shell access, as they can then disable the hook. Nor is it safe if
22 22 remote users share an account, because then there is no way to
23 23 distinguish them.
24 24
25 25 The order in which access checks are performed is:
26 26
27 27 1) Deny list for branches (section ``acl.deny.branches``)
28 28 2) Allow list for branches (section ``acl.allow.branches``)
29 29 3) Deny list for paths (section ``acl.deny``)
30 30 4) Allow list for paths (section ``acl.allow``)
31 31
32 32 The allow and deny sections take key-value pairs.
33 33
34 34 Branch-based Access Control
35 35 ...........................
36 36
37 37 Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
38 38 have branch-based access control. Keys in these sections can be
39 39 either:
40 40
41 41 - a branch name, or
42 42 - an asterisk, to match any branch;
43 43
44 44 The corresponding values can be either:
45 45
46 46 - a comma-separated list containing users and groups, or
47 47 - an asterisk, to match anyone;
48 48
49 49 Path-based Access Control
50 50 .........................
51 51
52 52 Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
53 53 access control. Keys in these sections accept a subtree pattern (with
54 54 a glob syntax by default). The corresponding values follow the same
55 55 syntax as the other sections above.
56 56
57 57 Groups
58 58 ......
59 59
60 60 Group names must be prefixed with an ``@`` symbol. Specifying a group
61 61 name has the same effect as specifying all the users in that group.
62 62
63 63 You can define group members in the ``acl.groups`` section.
64 64 If a group name is not defined there, and Mercurial is running under
65 65 a Unix-like system, the list of users will be taken from the OS.
66 66 Otherwise, an exception will be raised.
67 67
68 68 Example Configuration
69 69 .....................
70 70
71 71 ::
72 72
73 73 [hooks]
74 74
75 75 # Use this if you want to check access restrictions at commit time
76 76 pretxncommit.acl = python:hgext.acl.hook
77 77
78 78 # Use this if you want to check access restrictions for pull, push,
79 79 # bundle and serve.
80 80 pretxnchangegroup.acl = python:hgext.acl.hook
81 81
82 82 [acl]
83 83 # Allow or deny access for incoming changes only if their source is
84 84 # listed here, let them pass otherwise. Source is "serve" for all
85 85 # remote access (http or ssh), "push", "pull" or "bundle" when the
86 86 # related commands are run locally.
87 87 # Default: serve
88 88 sources = serve
89 89
90 90 [acl.deny.branches]
91 91
92 92 # Everyone is denied to the frozen branch:
93 93 frozen-branch = *
94 94
95 95 # A bad user is denied on all branches:
96 96 * = bad-user
97 97
98 98 [acl.allow.branches]
99 99
100 100 # A few users are allowed on branch-a:
101 101 branch-a = user-1, user-2, user-3
102 102
103 103 # Only one user is allowed on branch-b:
104 104 branch-b = user-1
105 105
106 106 # The super user is allowed on any branch:
107 107 * = super-user
108 108
109 109 # Everyone is allowed on branch-for-tests:
110 110 branch-for-tests = *
111 111
112 112 [acl.deny]
113 113 # This list is checked first. If a match is found, acl.allow is not
114 114 # checked. All users are granted access if acl.deny is not present.
115 115 # Format for both lists: glob pattern = user, ..., @group, ...
116 116
117 117 # To match everyone, use an asterisk for the user:
118 118 # my/glob/pattern = *
119 119
120 120 # user6 will not have write access to any file:
121 121 ** = user6
122 122
123 123 # Group "hg-denied" will not have write access to any file:
124 124 ** = @hg-denied
125 125
126 126 # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
127 127 # everyone being able to change all other files. See below.
128 128 src/main/resources/DONT-TOUCH-THIS.txt = *
129 129
130 130 [acl.allow]
131 131 # if acl.allow is not present, all users are allowed by default
132 132 # empty acl.allow = no users allowed
133 133
134 134 # User "doc_writer" has write access to any file under the "docs"
135 135 # folder:
136 136 docs/** = doc_writer
137 137
138 138 # User "jack" and group "designers" have write access to any file
139 139 # under the "images" folder:
140 140 images/** = jack, @designers
141 141
142 142 # Everyone (except for "user6" - see acl.deny above) will have write
143 143 # access to any file under the "resources" folder (except for 1
144 144 # file. See acl.deny):
145 145 src/main/resources/** = *
146 146
147 147 .hgtags = release_engineer
148 148
149 149 '''
150 150
151 151 from mercurial.i18n import _
152 152 from mercurial import util, match
153 153 import getpass, urllib
154 154
155 155 def _getusers(ui, group):
156 156
157 157 # First, try to use group definition from section [acl.groups]
158 158 hgrcusers = ui.configlist('acl.groups', group)
159 159 if hgrcusers:
160 160 return hgrcusers
161 161
162 162 ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
163 163 # If no users found in group definition, get users from OS-level group
164 164 try:
165 165 return util.groupmembers(group)
166 166 except KeyError:
167 167 raise util.Abort(_("group '%s' is undefined") % group)
168 168
169 169 def _usermatch(ui, user, usersorgroups):
170 170
171 171 if usersorgroups == '*':
172 172 return True
173 173
174 174 for ug in usersorgroups.replace(',', ' ').split():
175 175 if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]):
176 176 return True
177 177
178 178 return False
179 179
180 180 def buildmatch(ui, repo, user, key):
181 181 '''return tuple of (match function, list enabled).'''
182 182 if not ui.has_section(key):
183 183 ui.debug('acl: %s not enabled\n' % key)
184 184 return None
185 185
186 186 pats = [pat for pat, users in ui.configitems(key)
187 187 if _usermatch(ui, user, users)]
188 188 ui.debug('acl: %s enabled, %d entries for user %s\n' %
189 189 (key, len(pats), user))
190 190
191 191 if not repo:
192 192 if pats:
193 193 return lambda b: '*' in pats or b in pats
194 194 return lambda b: False
195 195
196 196 if pats:
197 197 return match.match(repo.root, '', pats)
198 198 return match.exact(repo.root, '', [])
199 199
200 200
201 201 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
202 202 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
203 203 raise util.Abort(_('config error - hook type "%s" cannot stop '
204 204 'incoming changesets nor commits') % hooktype)
205 205 if (hooktype == 'pretxnchangegroup' and
206 206 source not in ui.config('acl', 'sources', 'serve').split()):
207 207 ui.debug('acl: changes have source "%s" - skipping\n' % source)
208 208 return
209 209
210 210 user = None
211 211 if source == 'serve' and 'url' in kwargs:
212 212 url = kwargs['url'].split(':')
213 213 if url[0] == 'remote' and url[1].startswith('http'):
214 214 user = urllib.unquote(url[3])
215 215
216 216 if user is None:
217 217 user = getpass.getuser()
218 218
219 ui.debug('acl: checking access for user "%s"\n' % user)
220
219 221 cfg = ui.config('acl', 'config')
220 222 if cfg:
221 223 ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches',
222 224 'acl.deny.branches', 'acl.allow', 'acl.deny'])
223 225
224 226 allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
225 227 denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
226 228 allow = buildmatch(ui, repo, user, 'acl.allow')
227 229 deny = buildmatch(ui, repo, user, 'acl.deny')
228 230
229 231 for rev in xrange(repo[node], len(repo)):
230 232 ctx = repo[rev]
231 233 branch = ctx.branch()
232 234 if denybranches and denybranches(branch):
233 235 raise util.Abort(_('acl: user "%s" denied on branch "%s"'
234 236 ' (changeset "%s")')
235 237 % (user, branch, ctx))
236 238 if allowbranches and not allowbranches(branch):
237 239 raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
238 240 ' (changeset "%s")')
239 241 % (user, branch, ctx))
240 242 ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
241 243 % (ctx, branch))
242 244
243 245 for f in ctx.files():
244 246 if deny and deny(f):
245 ui.debug('acl: user %s denied on %s\n' % (user, f))
246 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
247 raise util.Abort(_('acl: user "%s" denied on "%s"'
248 ' (changeset "%s")') % (user, f, ctx))
247 249 if allow and not allow(f):
248 ui.debug('acl: user %s not allowed on %s\n' % (user, f))
249 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
250 ui.debug('acl: allowing changeset %s\n' % ctx)
250 raise util.Abort(_('acl: user "%s" not allowed on "%s"'
251 ' (changeset "%s")') % (user, f, ctx))
252 ui.debug('acl: path access granted: "%s"\n' % ctx)
@@ -1,507 +1,512
1 1 # color.py color output for the status and qseries commands
2 2 #
3 3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
4 4 #
5 5 # This program is free software; you can redistribute it and/or modify it
6 6 # under the terms of the GNU General Public License as published by the
7 7 # Free Software Foundation; either version 2 of the License, or (at your
8 8 # option) any later version.
9 9 #
10 10 # This program is distributed in the hope that it will be useful, but
11 11 # WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13 13 # Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License along
16 16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18 18
19 19 '''colorize output from some commands
20 20
21 21 This extension modifies the status and resolve commands to add color
22 22 to their output to reflect file status, the qseries command to add
23 23 color to reflect patch status (applied, unapplied, missing), and to
24 24 diff-related commands to highlight additions, removals, diff headers,
25 25 and trailing whitespace.
26 26
27 27 Other effects in addition to color, like bold and underlined text, are
28 28 also available. By default, the terminfo database is used to find the
29 29 terminal codes used to change color and effect. If terminfo is not
30 30 available, then effects are rendered with the ECMA-48 SGR control
31 31 function (aka ANSI escape codes).
32 32
33 33 Default effects may be overridden from your configuration file::
34 34
35 35 [color]
36 36 status.modified = blue bold underline red_background
37 37 status.added = green bold
38 38 status.removed = red bold blue_background
39 39 status.deleted = cyan bold underline
40 40 status.unknown = magenta bold underline
41 41 status.ignored = black bold
42 42
43 43 # 'none' turns off all effects
44 44 status.clean = none
45 45 status.copied = none
46 46
47 47 qseries.applied = blue bold underline
48 48 qseries.unapplied = black bold
49 49 qseries.missing = red bold
50 50
51 51 diff.diffline = bold
52 52 diff.extended = cyan bold
53 53 diff.file_a = red bold
54 54 diff.file_b = green bold
55 55 diff.hunk = magenta
56 56 diff.deleted = red
57 57 diff.inserted = green
58 58 diff.changed = white
59 59 diff.trailingwhitespace = bold red_background
60 60
61 61 resolve.unresolved = red bold
62 62 resolve.resolved = green bold
63 63
64 64 bookmarks.current = green
65 65
66 66 branches.active = none
67 67 branches.closed = black bold
68 68 branches.current = green
69 69 branches.inactive = none
70 70
71 tags.normal = green
72 tags.local = black bold
73
71 74 The available effects in terminfo mode are 'blink', 'bold', 'dim',
72 75 'inverse', 'invisible', 'italic', 'standout', and 'underline'; in
73 76 ECMA-48 mode, the options are 'bold', 'inverse', 'italic', and
74 77 'underline'. How each is rendered depends on the terminal emulator.
75 78 Some may not be available for a given terminal type, and will be
76 79 silently ignored.
77 80
78 81 Note that on some systems, terminfo mode may cause problems when using
79 82 color with the pager extension and less -R. less with the -R option
80 83 will only display ECMA-48 color codes, and terminfo mode may sometimes
81 84 emit codes that less doesn't understand. You can work around this by
82 85 either using ansi mode (or auto mode), or by using less -r (which will
83 86 pass through all terminal control codes, not just color control
84 87 codes).
85 88
86 89 Because there are only eight standard colors, this module allows you
87 90 to define color names for other color slots which might be available
88 91 for your terminal type, assuming terminfo mode. For instance::
89 92
90 93 color.brightblue = 12
91 94 color.pink = 207
92 95 color.orange = 202
93 96
94 97 to set 'brightblue' to color slot 12 (useful for 16 color terminals
95 98 that have brighter colors defined in the upper eight) and, 'pink' and
96 99 'orange' to colors in 256-color xterm's default color cube. These
97 100 defined colors may then be used as any of the pre-defined eight,
98 101 including appending '_background' to set the background to that color.
99 102
100 103 By default, the color extension will use ANSI mode (or win32 mode on
101 104 Windows) if it detects a terminal. To override auto mode (to enable
102 105 terminfo mode, for example), set the following configuration option::
103 106
104 107 [color]
105 108 mode = terminfo
106 109
107 110 Any value other than 'ansi', 'win32', 'terminfo', or 'auto' will
108 111 disable color.
109 112 '''
110 113
111 114 import os
112 115
113 116 from mercurial import commands, dispatch, extensions, ui as uimod, util
114 117 from mercurial.i18n import _
115 118
116 119 # start and stop parameters for effects
117 120 _effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
118 121 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
119 122 'italic': 3, 'underline': 4, 'inverse': 7,
120 123 'black_background': 40, 'red_background': 41,
121 124 'green_background': 42, 'yellow_background': 43,
122 125 'blue_background': 44, 'purple_background': 45,
123 126 'cyan_background': 46, 'white_background': 47}
124 127
125 128 def _terminfosetup(ui, mode):
126 129 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
127 130
128 131 global _terminfo_params
129 132 # If we failed to load curses, we go ahead and return.
130 133 if not _terminfo_params:
131 134 return
132 135 # Otherwise, see what the config file says.
133 136 if mode not in ('auto', 'terminfo'):
134 137 return
135 138
136 139 _terminfo_params.update((key[6:], (False, int(val)))
137 140 for key, val in ui.configitems('color')
138 141 if key.startswith('color.'))
139 142
140 143 try:
141 144 curses.setupterm()
142 145 except curses.error, e:
143 146 _terminfo_params = {}
144 147 return
145 148
146 149 for key, (b, e) in _terminfo_params.items():
147 150 if not b:
148 151 continue
149 152 if not curses.tigetstr(e):
150 153 # Most terminals don't support dim, invis, etc, so don't be
151 154 # noisy and use ui.debug().
152 155 ui.debug("no terminfo entry for %s\n" % e)
153 156 del _terminfo_params[key]
154 157 if not curses.tigetstr('setaf') or not curses.tigetstr('setab'):
155 158 # Only warn about missing terminfo entries if we explicitly asked for
156 159 # terminfo mode.
157 160 if mode == "terminfo":
158 161 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
159 162 "ECMA-48 color\n"))
160 163 _terminfo_params = {}
161 164
162 165 def _modesetup(ui, opts):
163 166 global _terminfo_params
164 167
165 168 coloropt = opts['color']
166 169 auto = coloropt == 'auto'
167 170 always = not auto and util.parsebool(coloropt)
168 171 if not always and not auto:
169 172 return None
170 173
171 174 formatted = always or (os.environ.get('TERM') != 'dumb' and ui.formatted())
172 175
173 176 mode = ui.config('color', 'mode', 'auto')
174 177 realmode = mode
175 178 if mode == 'auto':
176 179 if os.name == 'nt' and 'TERM' not in os.environ:
177 180 # looks line a cmd.exe console, use win32 API or nothing
178 181 realmode = 'win32'
179 182 else:
180 183 realmode = 'ansi'
181 184
182 185 if realmode == 'win32':
183 186 _terminfo_params = {}
184 187 if not w32effects:
185 188 if mode == 'win32':
186 189 # only warn if color.mode is explicitly set to win32
187 190 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
188 191 return None
189 192 _effects.update(w32effects)
190 193 elif realmode == 'ansi':
191 194 _terminfo_params = {}
192 195 elif realmode == 'terminfo':
193 196 _terminfosetup(ui, mode)
194 197 if not _terminfo_params:
195 198 if mode == 'terminfo':
196 199 ## FIXME Shouldn't we return None in this case too?
197 200 # only warn if color.mode is explicitly set to win32
198 201 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
199 202 realmode = 'ansi'
200 203 else:
201 204 return None
202 205
203 206 if always or (auto and formatted):
204 207 return realmode
205 208 return None
206 209
207 210 try:
208 211 import curses
209 212 # Mapping from effect name to terminfo attribute name or color number.
210 213 # This will also force-load the curses module.
211 214 _terminfo_params = {'none': (True, 'sgr0'),
212 215 'standout': (True, 'smso'),
213 216 'underline': (True, 'smul'),
214 217 'reverse': (True, 'rev'),
215 218 'inverse': (True, 'rev'),
216 219 'blink': (True, 'blink'),
217 220 'dim': (True, 'dim'),
218 221 'bold': (True, 'bold'),
219 222 'invisible': (True, 'invis'),
220 223 'italic': (True, 'sitm'),
221 224 'black': (False, curses.COLOR_BLACK),
222 225 'red': (False, curses.COLOR_RED),
223 226 'green': (False, curses.COLOR_GREEN),
224 227 'yellow': (False, curses.COLOR_YELLOW),
225 228 'blue': (False, curses.COLOR_BLUE),
226 229 'magenta': (False, curses.COLOR_MAGENTA),
227 230 'cyan': (False, curses.COLOR_CYAN),
228 231 'white': (False, curses.COLOR_WHITE)}
229 232 except ImportError:
230 233 _terminfo_params = False
231 234
232 235 _styles = {'grep.match': 'red bold',
233 236 'bookmarks.current': 'green',
234 237 'branches.active': 'none',
235 238 'branches.closed': 'black bold',
236 239 'branches.current': 'green',
237 240 'branches.inactive': 'none',
238 241 'diff.changed': 'white',
239 242 'diff.deleted': 'red',
240 243 'diff.diffline': 'bold',
241 244 'diff.extended': 'cyan bold',
242 245 'diff.file_a': 'red bold',
243 246 'diff.file_b': 'green bold',
244 247 'diff.hunk': 'magenta',
245 248 'diff.inserted': 'green',
246 249 'diff.trailingwhitespace': 'bold red_background',
247 250 'diffstat.deleted': 'red',
248 251 'diffstat.inserted': 'green',
249 252 'ui.prompt': 'yellow',
250 253 'log.changeset': 'yellow',
251 254 'resolve.resolved': 'green bold',
252 255 'resolve.unresolved': 'red bold',
253 256 'status.added': 'green bold',
254 257 'status.clean': 'none',
255 258 'status.copied': 'none',
256 259 'status.deleted': 'cyan bold underline',
257 260 'status.ignored': 'black bold',
258 261 'status.modified': 'blue bold',
259 262 'status.removed': 'red bold',
260 'status.unknown': 'magenta bold underline'}
263 'status.unknown': 'magenta bold underline',
264 'tags.normal': 'green',
265 'tags.local': 'black bold'}
261 266
262 267
263 268 def _effect_str(effect):
264 269 '''Helper function for render_effects().'''
265 270
266 271 bg = False
267 272 if effect.endswith('_background'):
268 273 bg = True
269 274 effect = effect[:-11]
270 275 attr, val = _terminfo_params[effect]
271 276 if attr:
272 277 return curses.tigetstr(val)
273 278 elif bg:
274 279 return curses.tparm(curses.tigetstr('setab'), val)
275 280 else:
276 281 return curses.tparm(curses.tigetstr('setaf'), val)
277 282
278 283 def render_effects(text, effects):
279 284 'Wrap text in commands to turn on each effect.'
280 285 if not text:
281 286 return text
282 287 if not _terminfo_params:
283 288 start = [str(_effects[e]) for e in ['none'] + effects.split()]
284 289 start = '\033[' + ';'.join(start) + 'm'
285 290 stop = '\033[' + str(_effects['none']) + 'm'
286 291 else:
287 292 start = ''.join(_effect_str(effect)
288 293 for effect in ['none'] + effects.split())
289 294 stop = _effect_str('none')
290 295 return ''.join([start, text, stop])
291 296
292 297 def extstyles():
293 298 for name, ext in extensions.extensions():
294 299 _styles.update(getattr(ext, 'colortable', {}))
295 300
296 301 def configstyles(ui):
297 302 for status, cfgeffects in ui.configitems('color'):
298 303 if '.' not in status or status.startswith('color.'):
299 304 continue
300 305 cfgeffects = ui.configlist('color', status)
301 306 if cfgeffects:
302 307 good = []
303 308 for e in cfgeffects:
304 309 if not _terminfo_params and e in _effects:
305 310 good.append(e)
306 311 elif e in _terminfo_params or e[:-11] in _terminfo_params:
307 312 good.append(e)
308 313 else:
309 314 ui.warn(_("ignoring unknown color/effect %r "
310 315 "(configured in color.%s)\n")
311 316 % (e, status))
312 317 _styles[status] = ' '.join(good)
313 318
314 319 class colorui(uimod.ui):
315 320 def popbuffer(self, labeled=False):
316 321 if labeled:
317 322 return ''.join(self.label(a, label) for a, label
318 323 in self._buffers.pop())
319 324 return ''.join(a for a, label in self._buffers.pop())
320 325
321 326 _colormode = 'ansi'
322 327 def write(self, *args, **opts):
323 328 label = opts.get('label', '')
324 329 if self._buffers:
325 330 self._buffers[-1].extend([(str(a), label) for a in args])
326 331 elif self._colormode == 'win32':
327 332 for a in args:
328 333 win32print(a, super(colorui, self).write, **opts)
329 334 else:
330 335 return super(colorui, self).write(
331 336 *[self.label(str(a), label) for a in args], **opts)
332 337
333 338 def write_err(self, *args, **opts):
334 339 label = opts.get('label', '')
335 340 if self._colormode == 'win32':
336 341 for a in args:
337 342 win32print(a, super(colorui, self).write_err, **opts)
338 343 else:
339 344 return super(colorui, self).write_err(
340 345 *[self.label(str(a), label) for a in args], **opts)
341 346
342 347 def label(self, msg, label):
343 348 effects = []
344 349 for l in label.split():
345 350 s = _styles.get(l, '')
346 351 if s:
347 352 effects.append(s)
348 353 effects = ' '.join(effects)
349 354 if effects:
350 355 return '\n'.join([render_effects(s, effects)
351 356 for s in msg.split('\n')])
352 357 return msg
353 358
354 359
355 360 def uisetup(ui):
356 361 global _terminfo_params
357 362 if ui.plain():
358 363 return
359 364 def colorcmd(orig, ui_, opts, cmd, cmdfunc):
360 365 mode = _modesetup(ui_, opts)
361 366 if mode:
362 367 colorui._colormode = mode
363 368 if not issubclass(ui_.__class__, colorui):
364 369 colorui.__bases__ = (ui_.__class__,)
365 370 ui_.__class__ = colorui
366 371 extstyles()
367 372 configstyles(ui_)
368 373 return orig(ui_, opts, cmd, cmdfunc)
369 374 extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
370 375
371 376 def extsetup(ui):
372 377 commands.globalopts.append(
373 378 ('', 'color', 'auto',
374 379 # i18n: 'always', 'auto', and 'never' are keywords and should
375 380 # not be translated
376 381 _("when to colorize (boolean, always, auto, or never)"),
377 382 _('TYPE')))
378 383
379 384 if os.name != 'nt':
380 385 w32effects = None
381 386 else:
382 387 import re, ctypes
383 388
384 389 _kernel32 = ctypes.windll.kernel32
385 390
386 391 _WORD = ctypes.c_ushort
387 392
388 393 _INVALID_HANDLE_VALUE = -1
389 394
390 395 class _COORD(ctypes.Structure):
391 396 _fields_ = [('X', ctypes.c_short),
392 397 ('Y', ctypes.c_short)]
393 398
394 399 class _SMALL_RECT(ctypes.Structure):
395 400 _fields_ = [('Left', ctypes.c_short),
396 401 ('Top', ctypes.c_short),
397 402 ('Right', ctypes.c_short),
398 403 ('Bottom', ctypes.c_short)]
399 404
400 405 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
401 406 _fields_ = [('dwSize', _COORD),
402 407 ('dwCursorPosition', _COORD),
403 408 ('wAttributes', _WORD),
404 409 ('srWindow', _SMALL_RECT),
405 410 ('dwMaximumWindowSize', _COORD)]
406 411
407 412 _STD_OUTPUT_HANDLE = 0xfffffff5L # (DWORD)-11
408 413 _STD_ERROR_HANDLE = 0xfffffff4L # (DWORD)-12
409 414
410 415 _FOREGROUND_BLUE = 0x0001
411 416 _FOREGROUND_GREEN = 0x0002
412 417 _FOREGROUND_RED = 0x0004
413 418 _FOREGROUND_INTENSITY = 0x0008
414 419
415 420 _BACKGROUND_BLUE = 0x0010
416 421 _BACKGROUND_GREEN = 0x0020
417 422 _BACKGROUND_RED = 0x0040
418 423 _BACKGROUND_INTENSITY = 0x0080
419 424
420 425 _COMMON_LVB_REVERSE_VIDEO = 0x4000
421 426 _COMMON_LVB_UNDERSCORE = 0x8000
422 427
423 428 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
424 429 w32effects = {
425 430 'none': -1,
426 431 'black': 0,
427 432 'red': _FOREGROUND_RED,
428 433 'green': _FOREGROUND_GREEN,
429 434 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
430 435 'blue': _FOREGROUND_BLUE,
431 436 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
432 437 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
433 438 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
434 439 'bold': _FOREGROUND_INTENSITY,
435 440 'black_background': 0x100, # unused value > 0x0f
436 441 'red_background': _BACKGROUND_RED,
437 442 'green_background': _BACKGROUND_GREEN,
438 443 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
439 444 'blue_background': _BACKGROUND_BLUE,
440 445 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
441 446 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
442 447 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
443 448 _BACKGROUND_BLUE),
444 449 'bold_background': _BACKGROUND_INTENSITY,
445 450 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
446 451 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
447 452 }
448 453
449 454 passthrough = set([_FOREGROUND_INTENSITY,
450 455 _BACKGROUND_INTENSITY,
451 456 _COMMON_LVB_UNDERSCORE,
452 457 _COMMON_LVB_REVERSE_VIDEO])
453 458
454 459 stdout = _kernel32.GetStdHandle(
455 460 _STD_OUTPUT_HANDLE) # don't close the handle returned
456 461 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
457 462 w32effects = None
458 463 else:
459 464 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
460 465 if not _kernel32.GetConsoleScreenBufferInfo(
461 466 stdout, ctypes.byref(csbi)):
462 467 # stdout may not support GetConsoleScreenBufferInfo()
463 468 # when called from subprocess or redirected
464 469 w32effects = None
465 470 else:
466 471 origattr = csbi.wAttributes
467 472 ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)',
468 473 re.MULTILINE | re.DOTALL)
469 474
470 475 def win32print(text, orig, **opts):
471 476 label = opts.get('label', '')
472 477 attr = origattr
473 478
474 479 def mapcolor(val, attr):
475 480 if val == -1:
476 481 return origattr
477 482 elif val in passthrough:
478 483 return attr | val
479 484 elif val > 0x0f:
480 485 return (val & 0x70) | (attr & 0x8f)
481 486 else:
482 487 return (val & 0x07) | (attr & 0xf8)
483 488
484 489 # determine console attributes based on labels
485 490 for l in label.split():
486 491 style = _styles.get(l, '')
487 492 for effect in style.split():
488 493 attr = mapcolor(w32effects[effect], attr)
489 494
490 495 # hack to ensure regexp finds data
491 496 if not text.startswith('\033['):
492 497 text = '\033[m' + text
493 498
494 499 # Look for ANSI-like codes embedded in text
495 500 m = re.match(ansire, text)
496 501
497 502 try:
498 503 while m:
499 504 for sattr in m.group(1).split(';'):
500 505 if sattr:
501 506 attr = mapcolor(int(sattr), attr)
502 507 _kernel32.SetConsoleTextAttribute(stdout, attr)
503 508 orig(m.group(2), **opts)
504 509 m = re.match(ansire, m.group(3))
505 510 finally:
506 511 # Explicity reset original attributes
507 512 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,847 +1,854
1 1 # Mercurial built-in replacement for cvsps.
2 2 #
3 3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os
9 9 import re
10 10 import cPickle as pickle
11 11 from mercurial import util
12 12 from mercurial.i18n import _
13 13 from mercurial import hook
14 from mercurial import util
14 15
15 16 class logentry(object):
16 17 '''Class logentry has the following attributes:
17 18 .author - author name as CVS knows it
18 19 .branch - name of branch this revision is on
19 20 .branches - revision tuple of branches starting at this revision
20 21 .comment - commit message
21 22 .date - the commit date as a (time, tz) tuple
22 23 .dead - true if file revision is dead
23 24 .file - Name of file
24 25 .lines - a tuple (+lines, -lines) or None
25 26 .parent - Previous revision of this entry
26 27 .rcs - name of file as returned from CVS
27 28 .revision - revision number as tuple
28 29 .tags - list of tags on the file
29 30 .synthetic - is this a synthetic "file ... added on ..." revision?
30 31 .mergepoint- the branch that has been merged from
31 32 (if present in rlog output)
32 33 .branchpoints- the branches that start at the current entry
33 34 '''
34 35 def __init__(self, **entries):
35 36 self.synthetic = False
36 37 self.__dict__.update(entries)
37 38
38 39 def __repr__(self):
39 40 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
40 41 id(self),
41 42 self.file,
42 43 ".".join(map(str, self.revision)))
43 44
44 45 class logerror(Exception):
45 46 pass
46 47
47 48 def getrepopath(cvspath):
48 49 """Return the repository path from a CVS path.
49 50
50 51 >>> getrepopath('/foo/bar')
51 52 '/foo/bar'
52 53 >>> getrepopath('c:/foo/bar')
53 54 'c:/foo/bar'
54 55 >>> getrepopath(':pserver:10/foo/bar')
55 56 '/foo/bar'
56 57 >>> getrepopath(':pserver:10c:/foo/bar')
57 58 '/foo/bar'
58 59 >>> getrepopath(':pserver:/foo/bar')
59 60 '/foo/bar'
60 61 >>> getrepopath(':pserver:c:/foo/bar')
61 62 'c:/foo/bar'
62 63 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
63 64 '/foo/bar'
64 65 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
65 66 'c:/foo/bar'
66 67 """
67 68 # According to CVS manual, CVS paths are expressed like:
68 69 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
69 70 #
70 71 # Unfortunately, Windows absolute paths start with a drive letter
71 72 # like 'c:' making it harder to parse. Here we assume that drive
72 73 # letters are only one character long and any CVS component before
73 74 # the repository path is at least 2 characters long, and use this
74 75 # to disambiguate.
75 76 parts = cvspath.split(':')
76 77 if len(parts) == 1:
77 78 return parts[0]
78 79 # Here there is an ambiguous case if we have a port number
79 80 # immediately followed by a Windows driver letter. We assume this
80 81 # never happens and decide it must be CVS path component,
81 82 # therefore ignoring it.
82 83 if len(parts[-2]) > 1:
83 84 return parts[-1].lstrip('0123456789')
84 85 return parts[-2] + ':' + parts[-1]
85 86
86 87 def createlog(ui, directory=None, root="", rlog=True, cache=None):
87 88 '''Collect the CVS rlog'''
88 89
89 90 # Because we store many duplicate commit log messages, reusing strings
90 91 # saves a lot of memory and pickle storage space.
91 92 _scache = {}
92 93 def scache(s):
93 94 "return a shared version of a string"
94 95 return _scache.setdefault(s, s)
95 96
96 97 ui.status(_('collecting CVS rlog\n'))
97 98
98 99 log = [] # list of logentry objects containing the CVS state
99 100
100 101 # patterns to match in CVS (r)log output, by state of use
101 102 re_00 = re.compile('RCS file: (.+)$')
102 103 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
103 104 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
104 105 re_03 = re.compile("(Cannot access.+CVSROOT)|"
105 106 "(can't create temporary directory.+)$")
106 107 re_10 = re.compile('Working file: (.+)$')
107 108 re_20 = re.compile('symbolic names:')
108 109 re_30 = re.compile('\t(.+): ([\\d.]+)$')
109 110 re_31 = re.compile('----------------------------$')
110 111 re_32 = re.compile('======================================='
111 112 '======================================$')
112 113 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
113 114 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
114 115 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
115 116 r'(.*mergepoint:\s+([^;]+);)?')
116 117 re_70 = re.compile('branches: (.+);$')
117 118
118 119 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
119 120
120 121 prefix = '' # leading path to strip of what we get from CVS
121 122
122 123 if directory is None:
123 124 # Current working directory
124 125
125 126 # Get the real directory in the repository
126 127 try:
127 128 prefix = open(os.path.join('CVS','Repository')).read().strip()
128 129 directory = prefix
129 130 if prefix == ".":
130 131 prefix = ""
131 132 except IOError:
132 133 raise logerror(_('not a CVS sandbox'))
133 134
134 135 if prefix and not prefix.endswith(os.sep):
135 136 prefix += os.sep
136 137
137 138 # Use the Root file in the sandbox, if it exists
138 139 try:
139 140 root = open(os.path.join('CVS','Root')).read().strip()
140 141 except IOError:
141 142 pass
142 143
143 144 if not root:
144 145 root = os.environ.get('CVSROOT', '')
145 146
146 147 # read log cache if one exists
147 148 oldlog = []
148 149 date = None
149 150
150 151 if cache:
151 152 cachedir = os.path.expanduser('~/.hg.cvsps')
152 153 if not os.path.exists(cachedir):
153 154 os.mkdir(cachedir)
154 155
155 156 # The cvsps cache pickle needs a uniquified name, based on the
156 157 # repository location. The address may have all sort of nasties
157 158 # in it, slashes, colons and such. So here we take just the
158 159 # alphanumerics, concatenated in a way that does not mix up the
159 160 # various components, so that
160 161 # :pserver:user@server:/path
161 162 # and
162 163 # /pserver/user/server/path
163 164 # are mapped to different cache file names.
164 165 cachefile = root.split(":") + [directory, "cache"]
165 166 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
166 167 cachefile = os.path.join(cachedir,
167 168 '.'.join([s for s in cachefile if s]))
168 169
169 170 if cache == 'update':
170 171 try:
171 172 ui.note(_('reading cvs log cache %s\n') % cachefile)
172 173 oldlog = pickle.load(open(cachefile))
173 174 ui.note(_('cache has %d log entries\n') % len(oldlog))
174 175 except Exception, e:
175 176 ui.note(_('error reading cache: %r\n') % e)
176 177
177 178 if oldlog:
178 179 date = oldlog[-1].date # last commit date as a (time,tz) tuple
179 180 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
180 181
181 182 # build the CVS commandline
182 183 cmd = ['cvs', '-q']
183 184 if root:
184 185 cmd.append('-d%s' % root)
185 186 p = util.normpath(getrepopath(root))
186 187 if not p.endswith('/'):
187 188 p += '/'
188 189 if prefix:
189 190 # looks like normpath replaces "" by "."
190 191 prefix = p + util.normpath(prefix)
191 192 else:
192 193 prefix = p
193 194 cmd.append(['log', 'rlog'][rlog])
194 195 if date:
195 196 # no space between option and date string
196 197 cmd.append('-d>%s' % date)
197 198 cmd.append(directory)
198 199
199 200 # state machine begins here
200 201 tags = {} # dictionary of revisions on current file with their tags
201 202 branchmap = {} # mapping between branch names and revision numbers
202 203 state = 0
203 204 store = False # set when a new record can be appended
204 205
205 206 cmd = [util.shellquote(arg) for arg in cmd]
206 207 ui.note(_("running %s\n") % (' '.join(cmd)))
207 208 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
208 209
209 210 pfp = util.popen(' '.join(cmd))
210 211 peek = pfp.readline()
211 212 while True:
212 213 line = peek
213 214 if line == '':
214 215 break
215 216 peek = pfp.readline()
216 217 if line.endswith('\n'):
217 218 line = line[:-1]
218 219 #ui.debug('state=%d line=%r\n' % (state, line))
219 220
220 221 if state == 0:
221 222 # initial state, consume input until we see 'RCS file'
222 223 match = re_00.match(line)
223 224 if match:
224 225 rcs = match.group(1)
225 226 tags = {}
226 227 if rlog:
227 228 filename = util.normpath(rcs[:-2])
228 229 if filename.startswith(prefix):
229 230 filename = filename[len(prefix):]
230 231 if filename.startswith('/'):
231 232 filename = filename[1:]
232 233 if filename.startswith('Attic/'):
233 234 filename = filename[6:]
234 235 else:
235 236 filename = filename.replace('/Attic/', '/')
236 237 state = 2
237 238 continue
238 239 state = 1
239 240 continue
240 241 match = re_01.match(line)
241 242 if match:
242 243 raise logerror(match.group(1))
243 244 match = re_02.match(line)
244 245 if match:
245 246 raise logerror(match.group(2))
246 247 if re_03.match(line):
247 248 raise logerror(line)
248 249
249 250 elif state == 1:
250 251 # expect 'Working file' (only when using log instead of rlog)
251 252 match = re_10.match(line)
252 253 assert match, _('RCS file must be followed by working file')
253 254 filename = util.normpath(match.group(1))
254 255 state = 2
255 256
256 257 elif state == 2:
257 258 # expect 'symbolic names'
258 259 if re_20.match(line):
259 260 branchmap = {}
260 261 state = 3
261 262
262 263 elif state == 3:
263 264 # read the symbolic names and store as tags
264 265 match = re_30.match(line)
265 266 if match:
266 267 rev = [int(x) for x in match.group(2).split('.')]
267 268
268 269 # Convert magic branch number to an odd-numbered one
269 270 revn = len(rev)
270 271 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
271 272 rev = rev[:-2] + rev[-1:]
272 273 rev = tuple(rev)
273 274
274 275 if rev not in tags:
275 276 tags[rev] = []
276 277 tags[rev].append(match.group(1))
277 278 branchmap[match.group(1)] = match.group(2)
278 279
279 280 elif re_31.match(line):
280 281 state = 5
281 282 elif re_32.match(line):
282 283 state = 0
283 284
284 285 elif state == 4:
285 286 # expecting '------' separator before first revision
286 287 if re_31.match(line):
287 288 state = 5
288 289 else:
289 290 assert not re_32.match(line), _('must have at least '
290 291 'some revisions')
291 292
292 293 elif state == 5:
293 294 # expecting revision number and possibly (ignored) lock indication
294 295 # we create the logentry here from values stored in states 0 to 4,
295 296 # as this state is re-entered for subsequent revisions of a file.
296 297 match = re_50.match(line)
297 298 assert match, _('expected revision number')
298 299 e = logentry(rcs=scache(rcs), file=scache(filename),
299 300 revision=tuple([int(x) for x in match.group(1).split('.')]),
300 301 branches=[], parent=None)
301 302 state = 6
302 303
303 304 elif state == 6:
304 305 # expecting date, author, state, lines changed
305 306 match = re_60.match(line)
306 307 assert match, _('revision must be followed by date line')
307 308 d = match.group(1)
308 309 if d[2] == '/':
309 310 # Y2K
310 311 d = '19' + d
311 312
312 313 if len(d.split()) != 3:
313 314 # cvs log dates always in GMT
314 315 d = d + ' UTC'
315 316 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
316 317 '%Y/%m/%d %H:%M:%S',
317 318 '%Y-%m-%d %H:%M:%S'])
318 319 e.author = scache(match.group(2))
319 320 e.dead = match.group(3).lower() == 'dead'
320 321
321 322 if match.group(5):
322 323 if match.group(6):
323 324 e.lines = (int(match.group(5)), int(match.group(6)))
324 325 else:
325 326 e.lines = (int(match.group(5)), 0)
326 327 elif match.group(6):
327 328 e.lines = (0, int(match.group(6)))
328 329 else:
329 330 e.lines = None
330 331
331 332 if match.group(7): # cvsnt mergepoint
332 333 myrev = match.group(8).split('.')
333 334 if len(myrev) == 2: # head
334 335 e.mergepoint = 'HEAD'
335 336 else:
336 337 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
337 338 branches = [b for b in branchmap if branchmap[b] == myrev]
338 339 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
339 340 e.mergepoint = branches[0]
340 341 else:
341 342 e.mergepoint = None
342 343 e.comment = []
343 344 state = 7
344 345
345 346 elif state == 7:
346 347 # read the revision numbers of branches that start at this revision
347 348 # or store the commit log message otherwise
348 349 m = re_70.match(line)
349 350 if m:
350 351 e.branches = [tuple([int(y) for y in x.strip().split('.')])
351 352 for x in m.group(1).split(';')]
352 353 state = 8
353 354 elif re_31.match(line) and re_50.match(peek):
354 355 state = 5
355 356 store = True
356 357 elif re_32.match(line):
357 358 state = 0
358 359 store = True
359 360 else:
360 361 e.comment.append(line)
361 362
362 363 elif state == 8:
363 364 # store commit log message
364 365 if re_31.match(line):
365 state = 5
366 store = True
366 cpeek = peek
367 if cpeek.endswith('\n'):
368 cpeek = cpeek[:-1]
369 if re_50.match(cpeek):
370 state = 5
371 store = True
372 else:
373 e.comment.append(line)
367 374 elif re_32.match(line):
368 375 state = 0
369 376 store = True
370 377 else:
371 378 e.comment.append(line)
372 379
373 380 # When a file is added on a branch B1, CVS creates a synthetic
374 381 # dead trunk revision 1.1 so that the branch has a root.
375 382 # Likewise, if you merge such a file to a later branch B2 (one
376 383 # that already existed when the file was added on B1), CVS
377 384 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
378 385 # these revisions now, but mark them synthetic so
379 386 # createchangeset() can take care of them.
380 387 if (store and
381 388 e.dead and
382 389 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
383 390 len(e.comment) == 1 and
384 391 file_added_re.match(e.comment[0])):
385 392 ui.debug('found synthetic revision in %s: %r\n'
386 393 % (e.rcs, e.comment[0]))
387 394 e.synthetic = True
388 395
389 396 if store:
390 397 # clean up the results and save in the log.
391 398 store = False
392 399 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
393 400 e.comment = scache('\n'.join(e.comment))
394 401
395 402 revn = len(e.revision)
396 403 if revn > 3 and (revn % 2) == 0:
397 404 e.branch = tags.get(e.revision[:-1], [None])[0]
398 405 else:
399 406 e.branch = None
400 407
401 408 # find the branches starting from this revision
402 409 branchpoints = set()
403 410 for branch, revision in branchmap.iteritems():
404 411 revparts = tuple([int(i) for i in revision.split('.')])
405 412 if len(revparts) < 2: # bad tags
406 413 continue
407 414 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
408 415 # normal branch
409 416 if revparts[:-2] == e.revision:
410 417 branchpoints.add(branch)
411 418 elif revparts == (1, 1, 1): # vendor branch
412 419 if revparts in e.branches:
413 420 branchpoints.add(branch)
414 421 e.branchpoints = branchpoints
415 422
416 423 log.append(e)
417 424
418 425 if len(log) % 100 == 0:
419 426 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
420 427
421 428 log.sort(key=lambda x: (x.rcs, x.revision))
422 429
423 430 # find parent revisions of individual files
424 431 versions = {}
425 432 for e in log:
426 433 branch = e.revision[:-1]
427 434 p = versions.get((e.rcs, branch), None)
428 435 if p is None:
429 436 p = e.revision[:-2]
430 437 e.parent = p
431 438 versions[(e.rcs, branch)] = e.revision
432 439
433 440 # update the log cache
434 441 if cache:
435 442 if log:
436 443 # join up the old and new logs
437 444 log.sort(key=lambda x: x.date)
438 445
439 446 if oldlog and oldlog[-1].date >= log[0].date:
440 447 raise logerror(_('log cache overlaps with new log entries,'
441 448 ' re-run without cache.'))
442 449
443 450 log = oldlog + log
444 451
445 452 # write the new cachefile
446 453 ui.note(_('writing cvs log cache %s\n') % cachefile)
447 454 pickle.dump(log, open(cachefile, 'w'))
448 455 else:
449 456 log = oldlog
450 457
451 458 ui.status(_('%d log entries\n') % len(log))
452 459
453 460 hook.hook(ui, None, "cvslog", True, log=log)
454 461
455 462 return log
456 463
457 464
458 465 class changeset(object):
459 466 '''Class changeset has the following attributes:
460 467 .id - integer identifying this changeset (list index)
461 468 .author - author name as CVS knows it
462 469 .branch - name of branch this changeset is on, or None
463 470 .comment - commit message
464 471 .date - the commit date as a (time,tz) tuple
465 472 .entries - list of logentry objects in this changeset
466 473 .parents - list of one or two parent changesets
467 474 .tags - list of tags on this changeset
468 475 .synthetic - from synthetic revision "file ... added on branch ..."
469 476 .mergepoint- the branch that has been merged from
470 477 (if present in rlog output)
471 478 .branchpoints- the branches that start at the current entry
472 479 '''
473 480 def __init__(self, **entries):
474 481 self.synthetic = False
475 482 self.__dict__.update(entries)
476 483
477 484 def __repr__(self):
478 485 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
479 486 id(self),
480 487 getattr(self, 'id', "(no id)"))
481 488
482 489 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
483 490 '''Convert log into changesets.'''
484 491
485 492 ui.status(_('creating changesets\n'))
486 493
487 494 # Merge changesets
488 495
489 496 log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
490 497
491 498 changesets = []
492 499 files = set()
493 500 c = None
494 501 for i, e in enumerate(log):
495 502
496 503 # Check if log entry belongs to the current changeset or not.
497 504
498 505 # Since CVS is file centric, two different file revisions with
499 506 # different branchpoints should be treated as belonging to two
500 507 # different changesets (and the ordering is important and not
501 508 # honoured by cvsps at this point).
502 509 #
503 510 # Consider the following case:
504 511 # foo 1.1 branchpoints: [MYBRANCH]
505 512 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
506 513 #
507 514 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
508 515 # later version of foo may be in MYBRANCH2, so foo should be the
509 516 # first changeset and bar the next and MYBRANCH and MYBRANCH2
510 517 # should both start off of the bar changeset. No provisions are
511 518 # made to ensure that this is, in fact, what happens.
512 519 if not (c and
513 520 e.comment == c.comment and
514 521 e.author == c.author and
515 522 e.branch == c.branch and
516 (not hasattr(e, 'branchpoints') or
517 not hasattr (c, 'branchpoints') or
523 (not util.safehasattr(e, 'branchpoints') or
524 not util.safehasattr (c, 'branchpoints') or
518 525 e.branchpoints == c.branchpoints) and
519 526 ((c.date[0] + c.date[1]) <=
520 527 (e.date[0] + e.date[1]) <=
521 528 (c.date[0] + c.date[1]) + fuzz) and
522 529 e.file not in files):
523 530 c = changeset(comment=e.comment, author=e.author,
524 531 branch=e.branch, date=e.date, entries=[],
525 532 mergepoint=getattr(e, 'mergepoint', None),
526 533 branchpoints=getattr(e, 'branchpoints', set()))
527 534 changesets.append(c)
528 535 files = set()
529 536 if len(changesets) % 100 == 0:
530 537 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
531 538 ui.status(util.ellipsis(t, 80) + '\n')
532 539
533 540 c.entries.append(e)
534 541 files.add(e.file)
535 542 c.date = e.date # changeset date is date of latest commit in it
536 543
537 544 # Mark synthetic changesets
538 545
539 546 for c in changesets:
540 547 # Synthetic revisions always get their own changeset, because
541 548 # the log message includes the filename. E.g. if you add file3
542 549 # and file4 on a branch, you get four log entries and three
543 550 # changesets:
544 551 # "File file3 was added on branch ..." (synthetic, 1 entry)
545 552 # "File file4 was added on branch ..." (synthetic, 1 entry)
546 553 # "Add file3 and file4 to fix ..." (real, 2 entries)
547 554 # Hence the check for 1 entry here.
548 555 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
549 556
550 557 # Sort files in each changeset
551 558
552 559 for c in changesets:
553 560 def pathcompare(l, r):
554 561 'Mimic cvsps sorting order'
555 562 l = l.split('/')
556 563 r = r.split('/')
557 564 nl = len(l)
558 565 nr = len(r)
559 566 n = min(nl, nr)
560 567 for i in range(n):
561 568 if i + 1 == nl and nl < nr:
562 569 return -1
563 570 elif i + 1 == nr and nl > nr:
564 571 return +1
565 572 elif l[i] < r[i]:
566 573 return -1
567 574 elif l[i] > r[i]:
568 575 return +1
569 576 return 0
570 577 def entitycompare(l, r):
571 578 return pathcompare(l.file, r.file)
572 579
573 580 c.entries.sort(entitycompare)
574 581
575 582 # Sort changesets by date
576 583
577 584 def cscmp(l, r):
578 585 d = sum(l.date) - sum(r.date)
579 586 if d:
580 587 return d
581 588
582 589 # detect vendor branches and initial commits on a branch
583 590 le = {}
584 591 for e in l.entries:
585 592 le[e.rcs] = e.revision
586 593 re = {}
587 594 for e in r.entries:
588 595 re[e.rcs] = e.revision
589 596
590 597 d = 0
591 598 for e in l.entries:
592 599 if re.get(e.rcs, None) == e.parent:
593 600 assert not d
594 601 d = 1
595 602 break
596 603
597 604 for e in r.entries:
598 605 if le.get(e.rcs, None) == e.parent:
599 606 assert not d
600 607 d = -1
601 608 break
602 609
603 610 return d
604 611
605 612 changesets.sort(cscmp)
606 613
607 614 # Collect tags
608 615
609 616 globaltags = {}
610 617 for c in changesets:
611 618 for e in c.entries:
612 619 for tag in e.tags:
613 620 # remember which is the latest changeset to have this tag
614 621 globaltags[tag] = c
615 622
616 623 for c in changesets:
617 624 tags = set()
618 625 for e in c.entries:
619 626 tags.update(e.tags)
620 627 # remember tags only if this is the latest changeset to have it
621 628 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
622 629
623 630 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
624 631 # by inserting dummy changesets with two parents, and handle
625 632 # {{mergefrombranch BRANCHNAME}} by setting two parents.
626 633
627 634 if mergeto is None:
628 635 mergeto = r'{{mergetobranch ([-\w]+)}}'
629 636 if mergeto:
630 637 mergeto = re.compile(mergeto)
631 638
632 639 if mergefrom is None:
633 640 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
634 641 if mergefrom:
635 642 mergefrom = re.compile(mergefrom)
636 643
637 644 versions = {} # changeset index where we saw any particular file version
638 645 branches = {} # changeset index where we saw a branch
639 646 n = len(changesets)
640 647 i = 0
641 648 while i < n:
642 649 c = changesets[i]
643 650
644 651 for f in c.entries:
645 652 versions[(f.rcs, f.revision)] = i
646 653
647 654 p = None
648 655 if c.branch in branches:
649 656 p = branches[c.branch]
650 657 else:
651 658 # first changeset on a new branch
652 659 # the parent is a changeset with the branch in its
653 660 # branchpoints such that it is the latest possible
654 661 # commit without any intervening, unrelated commits.
655 662
656 663 for candidate in xrange(i):
657 664 if c.branch not in changesets[candidate].branchpoints:
658 665 if p is not None:
659 666 break
660 667 continue
661 668 p = candidate
662 669
663 670 c.parents = []
664 671 if p is not None:
665 672 p = changesets[p]
666 673
667 674 # Ensure no changeset has a synthetic changeset as a parent.
668 675 while p.synthetic:
669 676 assert len(p.parents) <= 1, \
670 677 _('synthetic changeset cannot have multiple parents')
671 678 if p.parents:
672 679 p = p.parents[0]
673 680 else:
674 681 p = None
675 682 break
676 683
677 684 if p is not None:
678 685 c.parents.append(p)
679 686
680 687 if c.mergepoint:
681 688 if c.mergepoint == 'HEAD':
682 689 c.mergepoint = None
683 690 c.parents.append(changesets[branches[c.mergepoint]])
684 691
685 692 if mergefrom:
686 693 m = mergefrom.search(c.comment)
687 694 if m:
688 695 m = m.group(1)
689 696 if m == 'HEAD':
690 697 m = None
691 698 try:
692 699 candidate = changesets[branches[m]]
693 700 except KeyError:
694 701 ui.warn(_("warning: CVS commit message references "
695 702 "non-existent branch %r:\n%s\n")
696 703 % (m, c.comment))
697 704 if m in branches and c.branch != m and not candidate.synthetic:
698 705 c.parents.append(candidate)
699 706
700 707 if mergeto:
701 708 m = mergeto.search(c.comment)
702 709 if m:
703 710 try:
704 711 m = m.group(1)
705 712 if m == 'HEAD':
706 713 m = None
707 714 except:
708 715 m = None # if no group found then merge to HEAD
709 716 if m in branches and c.branch != m:
710 717 # insert empty changeset for merge
711 718 cc = changeset(
712 719 author=c.author, branch=m, date=c.date,
713 720 comment='convert-repo: CVS merge from branch %s'
714 721 % c.branch,
715 722 entries=[], tags=[],
716 723 parents=[changesets[branches[m]], c])
717 724 changesets.insert(i + 1, cc)
718 725 branches[m] = i + 1
719 726
720 727 # adjust our loop counters now we have inserted a new entry
721 728 n += 1
722 729 i += 2
723 730 continue
724 731
725 732 branches[c.branch] = i
726 733 i += 1
727 734
728 735 # Drop synthetic changesets (safe now that we have ensured no other
729 736 # changesets can have them as parents).
730 737 i = 0
731 738 while i < len(changesets):
732 739 if changesets[i].synthetic:
733 740 del changesets[i]
734 741 else:
735 742 i += 1
736 743
737 744 # Number changesets
738 745
739 746 for i, c in enumerate(changesets):
740 747 c.id = i + 1
741 748
742 749 ui.status(_('%d changeset entries\n') % len(changesets))
743 750
744 751 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
745 752
746 753 return changesets
747 754
748 755
749 756 def debugcvsps(ui, *args, **opts):
750 757 '''Read CVS rlog for current directory or named path in
751 758 repository, and convert the log to changesets based on matching
752 759 commit log entries and dates.
753 760 '''
754 761 if opts["new_cache"]:
755 762 cache = "write"
756 763 elif opts["update_cache"]:
757 764 cache = "update"
758 765 else:
759 766 cache = None
760 767
761 768 revisions = opts["revisions"]
762 769
763 770 try:
764 771 if args:
765 772 log = []
766 773 for d in args:
767 774 log += createlog(ui, d, root=opts["root"], cache=cache)
768 775 else:
769 776 log = createlog(ui, root=opts["root"], cache=cache)
770 777 except logerror, e:
771 778 ui.write("%r\n"%e)
772 779 return
773 780
774 781 changesets = createchangeset(ui, log, opts["fuzz"])
775 782 del log
776 783
777 784 # Print changesets (optionally filtered)
778 785
779 786 off = len(revisions)
780 787 branches = {} # latest version number in each branch
781 788 ancestors = {} # parent branch
782 789 for cs in changesets:
783 790
784 791 if opts["ancestors"]:
785 792 if cs.branch not in branches and cs.parents and cs.parents[0].id:
786 793 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
787 794 cs.parents[0].id)
788 795 branches[cs.branch] = cs.id
789 796
790 797 # limit by branches
791 798 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
792 799 continue
793 800
794 801 if not off:
795 802 # Note: trailing spaces on several lines here are needed to have
796 803 # bug-for-bug compatibility with cvsps.
797 804 ui.write('---------------------\n')
798 805 ui.write('PatchSet %d \n' % cs.id)
799 806 ui.write('Date: %s\n' % util.datestr(cs.date,
800 807 '%Y/%m/%d %H:%M:%S %1%2'))
801 808 ui.write('Author: %s\n' % cs.author)
802 809 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
803 810 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
804 811 ','.join(cs.tags) or '(none)'))
805 812 branchpoints = getattr(cs, 'branchpoints', None)
806 813 if branchpoints:
807 814 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
808 815 if opts["parents"] and cs.parents:
809 816 if len(cs.parents) > 1:
810 817 ui.write('Parents: %s\n' %
811 818 (','.join([str(p.id) for p in cs.parents])))
812 819 else:
813 820 ui.write('Parent: %d\n' % cs.parents[0].id)
814 821
815 822 if opts["ancestors"]:
816 823 b = cs.branch
817 824 r = []
818 825 while b:
819 826 b, c = ancestors[b]
820 827 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
821 828 if r:
822 829 ui.write('Ancestors: %s\n' % (','.join(r)))
823 830
824 831 ui.write('Log:\n')
825 832 ui.write('%s\n\n' % cs.comment)
826 833 ui.write('Members: \n')
827 834 for f in cs.entries:
828 835 fn = f.file
829 836 if fn.startswith(opts["prefix"]):
830 837 fn = fn[len(opts["prefix"]):]
831 838 ui.write('\t%s:%s->%s%s \n' % (
832 839 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
833 840 '.'.join([str(x) for x in f.revision]),
834 841 ['', '(DEAD)'][f.dead]))
835 842 ui.write('\n')
836 843
837 844 # have we seen the start tag?
838 845 if revisions and off:
839 846 if revisions[0] == str(cs.id) or \
840 847 revisions[0] in cs.tags:
841 848 off = False
842 849
843 850 # see if we reached the end tag
844 851 if len(revisions) > 1 and not off:
845 852 if revisions[1] == str(cs.id) or \
846 853 revisions[1] in cs.tags:
847 854 break
@@ -1,377 +1,380
1 1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
2 2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 3 #
4 4 # This software may be used and distributed according to the terms of the
5 5 # GNU General Public License version 2 or any later version.
6 6
7 7 import shlex
8 8 from mercurial.i18n import _
9 9 from mercurial import util
10 10 from common import SKIPREV, converter_source
11 11
12 12 def rpairs(name):
13 13 e = len(name)
14 14 while e != -1:
15 15 yield name[:e], name[e + 1:]
16 16 e = name.rfind('/', 0, e)
17 17 yield '.', name
18 18
19 19 class filemapper(object):
20 20 '''Map and filter filenames when importing.
21 21 A name can be mapped to itself, a new name, or None (omit from new
22 22 repository).'''
23 23
24 24 def __init__(self, ui, path=None):
25 25 self.ui = ui
26 26 self.include = {}
27 27 self.exclude = {}
28 28 self.rename = {}
29 29 if path:
30 30 if self.parse(path):
31 31 raise util.Abort(_('errors in filemap'))
32 32
33 33 def parse(self, path):
34 34 errs = 0
35 35 def check(name, mapping, listname):
36 36 if not name:
37 37 self.ui.warn(_('%s:%d: path to %s is missing\n') %
38 38 (lex.infile, lex.lineno, listname))
39 39 return 1
40 40 if name in mapping:
41 41 self.ui.warn(_('%s:%d: %r already in %s list\n') %
42 42 (lex.infile, lex.lineno, name, listname))
43 43 return 1
44 44 if (name.startswith('/') or
45 45 name.endswith('/') or
46 46 '//' in name):
47 47 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
48 48 (lex.infile, lex.lineno, listname, name))
49 49 return 1
50 50 return 0
51 51 lex = shlex.shlex(open(path), path, True)
52 52 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
53 53 cmd = lex.get_token()
54 54 while cmd:
55 55 if cmd == 'include':
56 56 name = lex.get_token()
57 57 errs += check(name, self.exclude, 'exclude')
58 58 self.include[name] = name
59 59 elif cmd == 'exclude':
60 60 name = lex.get_token()
61 61 errs += check(name, self.include, 'include')
62 62 errs += check(name, self.rename, 'rename')
63 63 self.exclude[name] = name
64 64 elif cmd == 'rename':
65 65 src = lex.get_token()
66 66 dest = lex.get_token()
67 67 errs += check(src, self.exclude, 'exclude')
68 68 self.rename[src] = dest
69 69 elif cmd == 'source':
70 70 errs += self.parse(lex.get_token())
71 71 else:
72 72 self.ui.warn(_('%s:%d: unknown directive %r\n') %
73 73 (lex.infile, lex.lineno, cmd))
74 74 errs += 1
75 75 cmd = lex.get_token()
76 76 return errs
77 77
78 78 def lookup(self, name, mapping):
79 79 for pre, suf in rpairs(name):
80 80 try:
81 81 return mapping[pre], pre, suf
82 82 except KeyError:
83 83 pass
84 84 return '', name, ''
85 85
86 86 def __call__(self, name):
87 87 if self.include:
88 88 inc = self.lookup(name, self.include)[0]
89 89 else:
90 90 inc = name
91 91 if self.exclude:
92 92 exc = self.lookup(name, self.exclude)[0]
93 93 else:
94 94 exc = ''
95 95 if (not self.include and exc) or (len(inc) <= len(exc)):
96 96 return None
97 97 newpre, pre, suf = self.lookup(name, self.rename)
98 98 if newpre:
99 99 if newpre == '.':
100 100 return suf
101 101 if suf:
102 102 return newpre + '/' + suf
103 103 return newpre
104 104 return name
105 105
106 106 def active(self):
107 107 return bool(self.include or self.exclude or self.rename)
108 108
109 109 # This class does two additional things compared to a regular source:
110 110 #
111 111 # - Filter and rename files. This is mostly wrapped by the filemapper
112 112 # class above. We hide the original filename in the revision that is
113 113 # returned by getchanges to be able to find things later in getfile.
114 114 #
115 115 # - Return only revisions that matter for the files we're interested in.
116 116 # This involves rewriting the parents of the original revision to
117 117 # create a graph that is restricted to those revisions.
118 118 #
119 119 # This set of revisions includes not only revisions that directly
120 120 # touch files we're interested in, but also merges that merge two
121 121 # or more interesting revisions.
122 122
123 123 class filemap_source(converter_source):
124 124 def __init__(self, ui, baseconverter, filemap):
125 125 super(filemap_source, self).__init__(ui)
126 126 self.base = baseconverter
127 127 self.filemapper = filemapper(ui, filemap)
128 128 self.commits = {}
129 129 # if a revision rev has parent p in the original revision graph, then
130 130 # rev will have parent self.parentmap[p] in the restricted graph.
131 131 self.parentmap = {}
132 132 # self.wantedancestors[rev] is the set of all ancestors of rev that
133 133 # are in the restricted graph.
134 134 self.wantedancestors = {}
135 135 self.convertedorder = None
136 136 self._rebuilt = False
137 137 self.origparents = {}
138 138 self.children = {}
139 139 self.seenchildren = {}
140 140
141 141 def before(self):
142 142 self.base.before()
143 143
144 144 def after(self):
145 145 self.base.after()
146 146
147 147 def setrevmap(self, revmap):
148 148 # rebuild our state to make things restartable
149 149 #
150 150 # To avoid calling getcommit for every revision that has already
151 151 # been converted, we rebuild only the parentmap, delaying the
152 152 # rebuild of wantedancestors until we need it (i.e. until a
153 153 # merge).
154 154 #
155 155 # We assume the order argument lists the revisions in
156 156 # topological order, so that we can infer which revisions were
157 157 # wanted by previous runs.
158 158 self._rebuilt = not revmap
159 159 seen = {SKIPREV: SKIPREV}
160 160 dummyset = set()
161 161 converted = []
162 162 for rev in revmap.order:
163 163 mapped = revmap[rev]
164 164 wanted = mapped not in seen
165 165 if wanted:
166 166 seen[mapped] = rev
167 167 self.parentmap[rev] = rev
168 168 else:
169 169 self.parentmap[rev] = seen[mapped]
170 170 self.wantedancestors[rev] = dummyset
171 171 arg = seen[mapped]
172 172 if arg == SKIPREV:
173 173 arg = None
174 174 converted.append((rev, wanted, arg))
175 175 self.convertedorder = converted
176 176 return self.base.setrevmap(revmap)
177 177
178 178 def rebuild(self):
179 179 if self._rebuilt:
180 180 return True
181 181 self._rebuilt = True
182 182 self.parentmap.clear()
183 183 self.wantedancestors.clear()
184 184 self.seenchildren.clear()
185 185 for rev, wanted, arg in self.convertedorder:
186 186 if rev not in self.origparents:
187 187 self.origparents[rev] = self.getcommit(rev).parents
188 188 if arg is not None:
189 189 self.children[arg] = self.children.get(arg, 0) + 1
190 190
191 191 for rev, wanted, arg in self.convertedorder:
192 192 parents = self.origparents[rev]
193 193 if wanted:
194 194 self.mark_wanted(rev, parents)
195 195 else:
196 196 self.mark_not_wanted(rev, arg)
197 197 self._discard(arg, *parents)
198 198
199 199 return True
200 200
201 201 def getheads(self):
202 202 return self.base.getheads()
203 203
204 204 def getcommit(self, rev):
205 205 # We want to save a reference to the commit objects to be able
206 206 # to rewrite their parents later on.
207 207 c = self.commits[rev] = self.base.getcommit(rev)
208 208 for p in c.parents:
209 209 self.children[p] = self.children.get(p, 0) + 1
210 210 return c
211 211
212 212 def _cachedcommit(self, rev):
213 213 if rev in self.commits:
214 214 return self.commits[rev]
215 215 return self.base.getcommit(rev)
216 216
217 217 def _discard(self, *revs):
218 218 for r in revs:
219 219 if r is None:
220 220 continue
221 221 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
222 222 if self.seenchildren[r] == self.children[r]:
223 223 del self.wantedancestors[r]
224 224 del self.parentmap[r]
225 225 del self.seenchildren[r]
226 226 if self._rebuilt:
227 227 del self.children[r]
228 228
229 229 def wanted(self, rev, i):
230 230 # Return True if we're directly interested in rev.
231 231 #
232 232 # i is an index selecting one of the parents of rev (if rev
233 233 # has no parents, i is None). getchangedfiles will give us
234 234 # the list of files that are different in rev and in the parent
235 235 # indicated by i. If we're interested in any of these files,
236 236 # we're interested in rev.
237 237 try:
238 238 files = self.base.getchangedfiles(rev, i)
239 239 except NotImplementedError:
240 240 raise util.Abort(_("source repository doesn't support --filemap"))
241 241 for f in files:
242 242 if self.filemapper(f):
243 243 return True
244 244 return False
245 245
246 246 def mark_not_wanted(self, rev, p):
247 247 # Mark rev as not interesting and update data structures.
248 248
249 249 if p is None:
250 250 # A root revision. Use SKIPREV to indicate that it doesn't
251 251 # map to any revision in the restricted graph. Put SKIPREV
252 252 # in the set of wanted ancestors to simplify code elsewhere
253 253 self.parentmap[rev] = SKIPREV
254 254 self.wantedancestors[rev] = set((SKIPREV,))
255 255 return
256 256
257 257 # Reuse the data from our parent.
258 258 self.parentmap[rev] = self.parentmap[p]
259 259 self.wantedancestors[rev] = self.wantedancestors[p]
260 260
261 261 def mark_wanted(self, rev, parents):
262 262 # Mark rev ss wanted and update data structures.
263 263
264 264 # rev will be in the restricted graph, so children of rev in
265 265 # the original graph should still have rev as a parent in the
266 266 # restricted graph.
267 267 self.parentmap[rev] = rev
268 268
269 269 # The set of wanted ancestors of rev is the union of the sets
270 270 # of wanted ancestors of its parents. Plus rev itself.
271 271 wrev = set()
272 272 for p in parents:
273 273 wrev.update(self.wantedancestors[p])
274 274 wrev.add(rev)
275 275 self.wantedancestors[rev] = wrev
276 276
277 277 def getchanges(self, rev):
278 278 parents = self.commits[rev].parents
279 279 if len(parents) > 1:
280 280 self.rebuild()
281 281
282 282 # To decide whether we're interested in rev we:
283 283 #
284 284 # - calculate what parents rev will have if it turns out we're
285 285 # interested in it. If it's going to have more than 1 parent,
286 286 # we're interested in it.
287 287 #
288 288 # - otherwise, we'll compare it with the single parent we found.
289 289 # If any of the files we're interested in is different in the
290 290 # the two revisions, we're interested in rev.
291 291
292 292 # A parent p is interesting if its mapped version (self.parentmap[p]):
293 293 # - is not SKIPREV
294 294 # - is still not in the list of parents (we don't want duplicates)
295 295 # - is not an ancestor of the mapped versions of the other parents
296 296 mparents = []
297 297 wp = None
298 298 for i, p1 in enumerate(parents):
299 299 mp1 = self.parentmap[p1]
300 300 if mp1 == SKIPREV or mp1 in mparents:
301 301 continue
302 302 for p2 in parents:
303 303 if p1 == p2 or mp1 == self.parentmap[p2]:
304 304 continue
305 305 if mp1 in self.wantedancestors[p2]:
306 306 break
307 307 else:
308 308 mparents.append(mp1)
309 309 wp = i
310 310
311 311 if wp is None and parents:
312 312 wp = 0
313 313
314 314 self.origparents[rev] = parents
315 315
316 316 closed = False
317 317 if 'close' in self.commits[rev].extra:
318 318 # A branch closing revision is only useful if one of its
319 319 # parents belong to the branch being closed
320 320 branch = self.commits[rev].branch
321 321 pbranches = [self._cachedcommit(p).branch for p in mparents]
322 322 if branch in pbranches:
323 323 closed = True
324 324
325 325 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
326 326 # We don't want this revision.
327 327 # Update our state and tell the convert process to map this
328 328 # revision to the same revision its parent as mapped to.
329 329 p = None
330 330 if parents:
331 331 p = parents[wp]
332 332 self.mark_not_wanted(rev, p)
333 333 self.convertedorder.append((rev, False, p))
334 334 self._discard(*parents)
335 335 return self.parentmap[rev]
336 336
337 337 # We want this revision.
338 338 # Rewrite the parents of the commit object
339 339 self.commits[rev].parents = mparents
340 340 self.mark_wanted(rev, parents)
341 341 self.convertedorder.append((rev, True, None))
342 342 self._discard(*parents)
343 343
344 344 # Get the real changes and do the filtering/mapping. To be
345 345 # able to get the files later on in getfile, we hide the
346 346 # original filename in the rev part of the return value.
347 347 changes, copies = self.base.getchanges(rev)
348 348 newnames = {}
349 349 files = []
350 350 for f, r in changes:
351 351 newf = self.filemapper(f)
352 352 if newf:
353 353 files.append((newf, (f, r)))
354 354 newnames[f] = newf
355 355
356 356 ncopies = {}
357 357 for c in copies:
358 358 newc = self.filemapper(c)
359 359 if newc:
360 360 newsource = self.filemapper(copies[c])
361 361 if newsource:
362 362 ncopies[newc] = newsource
363 363
364 364 return files, ncopies
365 365
366 366 def getfile(self, name, rev):
367 367 realname, realrev = rev
368 368 return self.base.getfile(realname, realrev)
369 369
370 370 def gettags(self):
371 371 return self.base.gettags()
372 372
373 373 def hasnativeorder(self):
374 374 return self.base.hasnativeorder()
375 375
376 376 def lookuprev(self, rev):
377 377 return self.base.lookuprev(rev)
378
379 def getbookmarks(self):
380 return self.base.getbookmarks()
@@ -1,205 +1,205
1 1 # git.py - git support for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os
9 9 from mercurial import util
10 10 from mercurial.node import hex, nullid
11 11 from mercurial.i18n import _
12 12
13 13 from common import NoRepo, commit, converter_source, checktool
14 14
15 15 class convert_git(converter_source):
16 16 # Windows does not support GIT_DIR= construct while other systems
17 17 # cannot remove environment variable. Just assume none have
18 18 # both issues.
19 if hasattr(os, 'unsetenv'):
19 if util.safehasattr(os, 'unsetenv'):
20 20 def gitopen(self, s, noerr=False):
21 21 prevgitdir = os.environ.get('GIT_DIR')
22 22 os.environ['GIT_DIR'] = self.path
23 23 try:
24 24 if noerr:
25 25 (stdin, stdout, stderr) = util.popen3(s)
26 26 return stdout
27 27 else:
28 28 return util.popen(s, 'rb')
29 29 finally:
30 30 if prevgitdir is None:
31 31 del os.environ['GIT_DIR']
32 32 else:
33 33 os.environ['GIT_DIR'] = prevgitdir
34 34 else:
35 35 def gitopen(self, s, noerr=False):
36 36 if noerr:
37 37 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
38 38 return so
39 39 else:
40 40 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
41 41
42 42 def gitread(self, s):
43 43 fh = self.gitopen(s)
44 44 data = fh.read()
45 45 return data, fh.close()
46 46
47 47 def __init__(self, ui, path, rev=None):
48 48 super(convert_git, self).__init__(ui, path, rev=rev)
49 49
50 50 if os.path.isdir(path + "/.git"):
51 51 path += "/.git"
52 52 if not os.path.exists(path + "/objects"):
53 53 raise NoRepo(_("%s does not look like a Git repository") % path)
54 54
55 55 checktool('git', 'git')
56 56
57 57 self.path = path
58 58
59 59 def getheads(self):
60 60 if not self.rev:
61 61 heads, ret = self.gitread('git rev-parse --branches --remotes')
62 62 heads = heads.splitlines()
63 63 else:
64 64 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
65 65 heads = [heads[:-1]]
66 66 if ret:
67 67 raise util.Abort(_('cannot retrieve git heads'))
68 68 return heads
69 69
70 70 def catfile(self, rev, type):
71 71 if rev == hex(nullid):
72 72 raise IOError()
73 73 data, ret = self.gitread("git cat-file %s %s" % (type, rev))
74 74 if ret:
75 75 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
76 76 return data
77 77
78 78 def getfile(self, name, rev):
79 79 data = self.catfile(rev, "blob")
80 80 mode = self.modecache[(name, rev)]
81 81 return data, mode
82 82
83 83 def getchanges(self, version):
84 84 self.modecache = {}
85 85 fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
86 86 changes = []
87 87 seen = set()
88 88 entry = None
89 89 for l in fh.read().split('\x00'):
90 90 if not entry:
91 91 if not l.startswith(':'):
92 92 continue
93 93 entry = l
94 94 continue
95 95 f = l
96 96 if f not in seen:
97 97 seen.add(f)
98 98 entry = entry.split()
99 99 h = entry[3]
100 100 p = (entry[1] == "100755")
101 101 s = (entry[1] == "120000")
102 102 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
103 103 changes.append((f, h))
104 104 entry = None
105 105 if fh.close():
106 106 raise util.Abort(_('cannot read changes in %s') % version)
107 107 return (changes, {})
108 108
109 109 def getcommit(self, version):
110 110 c = self.catfile(version, "commit") # read the commit hash
111 111 end = c.find("\n\n")
112 112 message = c[end + 2:]
113 113 message = self.recode(message)
114 114 l = c[:end].splitlines()
115 115 parents = []
116 116 author = committer = None
117 117 for e in l[1:]:
118 118 n, v = e.split(" ", 1)
119 119 if n == "author":
120 120 p = v.split()
121 121 tm, tz = p[-2:]
122 122 author = " ".join(p[:-2])
123 123 if author[0] == "<": author = author[1:-1]
124 124 author = self.recode(author)
125 125 if n == "committer":
126 126 p = v.split()
127 127 tm, tz = p[-2:]
128 128 committer = " ".join(p[:-2])
129 129 if committer[0] == "<": committer = committer[1:-1]
130 130 committer = self.recode(committer)
131 131 if n == "parent":
132 132 parents.append(v)
133 133
134 134 if committer and committer != author:
135 135 message += "\ncommitter: %s\n" % committer
136 136 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
137 137 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
138 138 date = tm + " " + str(tz)
139 139
140 140 c = commit(parents=parents, date=date, author=author, desc=message,
141 141 rev=version)
142 142 return c
143 143
144 144 def gettags(self):
145 145 tags = {}
146 146 fh = self.gitopen('git ls-remote --tags "%s"' % self.path)
147 147 prefix = 'refs/tags/'
148 148 for line in fh:
149 149 line = line.strip()
150 150 if not line.endswith("^{}"):
151 151 continue
152 152 node, tag = line.split(None, 1)
153 153 if not tag.startswith(prefix):
154 154 continue
155 155 tag = tag[len(prefix):-3]
156 156 tags[tag] = node
157 157 if fh.close():
158 158 raise util.Abort(_('cannot read tags from %s') % self.path)
159 159
160 160 return tags
161 161
162 162 def getchangedfiles(self, version, i):
163 163 changes = []
164 164 if i is None:
165 165 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
166 166 for l in fh:
167 167 if "\t" not in l:
168 168 continue
169 169 m, f = l[:-1].split("\t")
170 170 changes.append(f)
171 171 else:
172 172 fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
173 173 % (version, version, i + 1))
174 174 changes = [f.rstrip('\n') for f in fh]
175 175 if fh.close():
176 176 raise util.Abort(_('cannot read changes in %s') % version)
177 177
178 178 return changes
179 179
180 180 def getbookmarks(self):
181 181 bookmarks = {}
182 182
183 183 # Interesting references in git are prefixed
184 184 prefix = 'refs/heads/'
185 185 prefixlen = len(prefix)
186 186
187 187 # factor two commands
188 188 gitcmd = { 'remote/': 'git ls-remote --heads origin',
189 189 '': 'git show-ref'}
190 190
191 191 # Origin heads
192 192 for reftype in gitcmd:
193 193 try:
194 194 fh = self.gitopen(gitcmd[reftype], noerr=True)
195 195 for line in fh:
196 196 line = line.strip()
197 197 rev, name = line.split(None, 1)
198 198 if not name.startswith(prefix):
199 199 continue
200 200 name = '%s%s' % (reftype, name[prefixlen:])
201 201 bookmarks[name] = rev
202 202 except:
203 203 pass
204 204
205 205 return bookmarks
@@ -1,388 +1,387
1 1 # hg.py - hg backend for convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # Notes for hg->hg conversion:
9 9 #
10 10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 11 # of commit messages, but new versions do. Changesets created by
12 12 # those older versions, then converted, may thus have different
13 13 # hashes for changesets that are otherwise identical.
14 14 #
15 15 # * Using "--config convert.hg.saverev=true" will make the source
16 16 # identifier to be stored in the converted revision. This will cause
17 17 # the converted revision to have a different identity than the
18 18 # source.
19 19
20 20
21 21 import os, time, cStringIO
22 22 from mercurial.i18n import _
23 23 from mercurial.node import bin, hex, nullid
24 24 from mercurial import hg, util, context, bookmarks, error
25 25
26 26 from common import NoRepo, commit, converter_source, converter_sink
27 27
28 28 class mercurial_sink(converter_sink):
29 29 def __init__(self, ui, path):
30 30 converter_sink.__init__(self, ui, path)
31 31 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
32 32 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
33 33 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
34 34 self.lastbranch = None
35 35 if os.path.isdir(path) and len(os.listdir(path)) > 0:
36 36 try:
37 37 self.repo = hg.repository(self.ui, path)
38 38 if not self.repo.local():
39 39 raise NoRepo(_('%s is not a local Mercurial repository')
40 40 % path)
41 41 except error.RepoError, err:
42 42 ui.traceback()
43 43 raise NoRepo(err.args[0])
44 44 else:
45 45 try:
46 46 ui.status(_('initializing destination %s repository\n') % path)
47 47 self.repo = hg.repository(self.ui, path, create=True)
48 48 if not self.repo.local():
49 49 raise NoRepo(_('%s is not a local Mercurial repository')
50 50 % path)
51 51 self.created.append(path)
52 52 except error.RepoError:
53 53 ui.traceback()
54 54 raise NoRepo(_("could not create hg repository %s as sink")
55 55 % path)
56 56 self.lock = None
57 57 self.wlock = None
58 58 self.filemapmode = False
59 59
60 60 def before(self):
61 61 self.ui.debug('run hg sink pre-conversion action\n')
62 62 self.wlock = self.repo.wlock()
63 63 self.lock = self.repo.lock()
64 64
65 65 def after(self):
66 66 self.ui.debug('run hg sink post-conversion action\n')
67 67 if self.lock:
68 68 self.lock.release()
69 69 if self.wlock:
70 70 self.wlock.release()
71 71
72 72 def revmapfile(self):
73 return os.path.join(self.path, ".hg", "shamap")
73 return self.repo.join("shamap")
74 74
75 75 def authorfile(self):
76 return os.path.join(self.path, ".hg", "authormap")
76 return self.repo.join("authormap")
77 77
78 78 def getheads(self):
79 79 h = self.repo.changelog.heads()
80 80 return [hex(x) for x in h]
81 81
82 82 def setbranch(self, branch, pbranches):
83 83 if not self.clonebranches:
84 84 return
85 85
86 86 setbranch = (branch != self.lastbranch)
87 87 self.lastbranch = branch
88 88 if not branch:
89 89 branch = 'default'
90 90 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
91 91 pbranch = pbranches and pbranches[0][1] or 'default'
92 92
93 93 branchpath = os.path.join(self.path, branch)
94 94 if setbranch:
95 95 self.after()
96 96 try:
97 97 self.repo = hg.repository(self.ui, branchpath)
98 98 except:
99 99 self.repo = hg.repository(self.ui, branchpath, create=True)
100 100 self.before()
101 101
102 102 # pbranches may bring revisions from other branches (merge parents)
103 103 # Make sure we have them, or pull them.
104 104 missings = {}
105 105 for b in pbranches:
106 106 try:
107 107 self.repo.lookup(b[0])
108 108 except:
109 109 missings.setdefault(b[1], []).append(b[0])
110 110
111 111 if missings:
112 112 self.after()
113 113 for pbranch, heads in missings.iteritems():
114 114 pbranchpath = os.path.join(self.path, pbranch)
115 115 prepo = hg.peer(self.ui, {}, pbranchpath)
116 116 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
117 117 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
118 118 self.before()
119 119
120 120 def _rewritetags(self, source, revmap, data):
121 121 fp = cStringIO.StringIO()
122 122 for line in data.splitlines():
123 123 s = line.split(' ', 1)
124 124 if len(s) != 2:
125 125 continue
126 126 revid = revmap.get(source.lookuprev(s[0]))
127 127 if not revid:
128 128 continue
129 129 fp.write('%s %s\n' % (revid, s[1]))
130 130 return fp.getvalue()
131 131
132 132 def putcommit(self, files, copies, parents, commit, source, revmap):
133 133
134 134 files = dict(files)
135 135 def getfilectx(repo, memctx, f):
136 136 v = files[f]
137 137 data, mode = source.getfile(f, v)
138 138 if f == '.hgtags':
139 139 data = self._rewritetags(source, revmap, data)
140 140 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
141 141 copies.get(f))
142 142
143 143 pl = []
144 144 for p in parents:
145 145 if p not in pl:
146 146 pl.append(p)
147 147 parents = pl
148 148 nparents = len(parents)
149 149 if self.filemapmode and nparents == 1:
150 150 m1node = self.repo.changelog.read(bin(parents[0]))[0]
151 151 parent = parents[0]
152 152
153 153 if len(parents) < 2:
154 154 parents.append(nullid)
155 155 if len(parents) < 2:
156 156 parents.append(nullid)
157 157 p2 = parents.pop(0)
158 158
159 159 text = commit.desc
160 160 extra = commit.extra.copy()
161 161 if self.branchnames and commit.branch:
162 162 extra['branch'] = commit.branch
163 163 if commit.rev:
164 164 extra['convert_revision'] = commit.rev
165 165
166 166 while parents:
167 167 p1 = p2
168 168 p2 = parents.pop(0)
169 169 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
170 170 getfilectx, commit.author, commit.date, extra)
171 171 self.repo.commitctx(ctx)
172 172 text = "(octopus merge fixup)\n"
173 173 p2 = hex(self.repo.changelog.tip())
174 174
175 175 if self.filemapmode and nparents == 1:
176 176 man = self.repo.manifest
177 177 mnode = self.repo.changelog.read(bin(p2))[0]
178 178 closed = 'close' in commit.extra
179 179 if not closed and not man.cmp(m1node, man.revision(mnode)):
180 180 self.ui.status(_("filtering out empty revision\n"))
181 self.repo.rollback()
181 self.repo.rollback(force=True)
182 182 return parent
183 183 return p2
184 184
185 185 def puttags(self, tags):
186 186 try:
187 187 parentctx = self.repo[self.tagsbranch]
188 188 tagparent = parentctx.node()
189 189 except error.RepoError:
190 190 parentctx = None
191 191 tagparent = nullid
192 192
193 193 try:
194 194 oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
195 195 except:
196 196 oldlines = []
197 197
198 198 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
199 199 if newlines == oldlines:
200 200 return None, None
201 201 data = "".join(newlines)
202 202 def getfilectx(repo, memctx, f):
203 203 return context.memfilectx(f, data, False, False, None)
204 204
205 205 self.ui.status(_("updating tags\n"))
206 206 date = "%s 0" % int(time.mktime(time.gmtime()))
207 207 extra = {'branch': self.tagsbranch}
208 208 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
209 209 [".hgtags"], getfilectx, "convert-repo", date,
210 210 extra)
211 211 self.repo.commitctx(ctx)
212 212 return hex(self.repo.changelog.tip()), hex(tagparent)
213 213
214 214 def setfilemapmode(self, active):
215 215 self.filemapmode = active
216 216
217 217 def putbookmarks(self, updatedbookmark):
218 218 if not len(updatedbookmark):
219 219 return
220 220
221 221 self.ui.status(_("updating bookmarks\n"))
222 222 for bookmark in updatedbookmark:
223 223 self.repo._bookmarks[bookmark] = bin(updatedbookmark[bookmark])
224 224 bookmarks.write(self.repo)
225 225
226 226
227 227 class mercurial_source(converter_source):
228 228 def __init__(self, ui, path, rev=None):
229 229 converter_source.__init__(self, ui, path, rev)
230 230 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
231 231 self.ignored = set()
232 232 self.saverev = ui.configbool('convert', 'hg.saverev', False)
233 233 try:
234 234 self.repo = hg.repository(self.ui, path)
235 235 # try to provoke an exception if this isn't really a hg
236 236 # repo, but some other bogus compatible-looking url
237 237 if not self.repo.local():
238 238 raise error.RepoError()
239 239 except error.RepoError:
240 240 ui.traceback()
241 241 raise NoRepo(_("%s is not a local Mercurial repository") % path)
242 242 self.lastrev = None
243 243 self.lastctx = None
244 244 self._changescache = None
245 245 self.convertfp = None
246 246 # Restrict converted revisions to startrev descendants
247 247 startnode = ui.config('convert', 'hg.startrev')
248 248 if startnode is not None:
249 249 try:
250 250 startnode = self.repo.lookup(startnode)
251 251 except error.RepoError:
252 252 raise util.Abort(_('%s is not a valid start revision')
253 253 % startnode)
254 254 startrev = self.repo.changelog.rev(startnode)
255 255 children = {startnode: 1}
256 256 for rev in self.repo.changelog.descendants(startrev):
257 257 children[self.repo.changelog.node(rev)] = 1
258 258 self.keep = children.__contains__
259 259 else:
260 260 self.keep = util.always
261 261
262 262 def changectx(self, rev):
263 263 if self.lastrev != rev:
264 264 self.lastctx = self.repo[rev]
265 265 self.lastrev = rev
266 266 return self.lastctx
267 267
268 268 def parents(self, ctx):
269 269 return [p for p in ctx.parents() if p and self.keep(p.node())]
270 270
271 271 def getheads(self):
272 272 if self.rev:
273 273 heads = [self.repo[self.rev].node()]
274 274 else:
275 275 heads = self.repo.heads()
276 276 return [hex(h) for h in heads if self.keep(h)]
277 277
278 278 def getfile(self, name, rev):
279 279 try:
280 280 fctx = self.changectx(rev)[name]
281 281 return fctx.data(), fctx.flags()
282 282 except error.LookupError, err:
283 283 raise IOError(err)
284 284
285 285 def getchanges(self, rev):
286 286 ctx = self.changectx(rev)
287 287 parents = self.parents(ctx)
288 288 if not parents:
289 289 files = sorted(ctx.manifest())
290 290 # getcopies() is not needed for roots, but it is a simple way to
291 291 # detect missing revlogs and abort on errors or populate self.ignored
292 292 self.getcopies(ctx, parents, files)
293 293 return [(f, rev) for f in files if f not in self.ignored], {}
294 294 if self._changescache and self._changescache[0] == rev:
295 295 m, a, r = self._changescache[1]
296 296 else:
297 297 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
298 298 # getcopies() detects missing revlogs early, run it before
299 299 # filtering the changes.
300 300 copies = self.getcopies(ctx, parents, m + a)
301 301 changes = [(name, rev) for name in m + a + r
302 302 if name not in self.ignored]
303 303 return sorted(changes), copies
304 304
305 305 def getcopies(self, ctx, parents, files):
306 306 copies = {}
307 307 for name in files:
308 308 if name in self.ignored:
309 309 continue
310 310 try:
311 311 copysource, copynode = ctx.filectx(name).renamed()
312 312 if copysource in self.ignored or not self.keep(copynode):
313 313 continue
314 314 # Ignore copy sources not in parent revisions
315 315 found = False
316 316 for p in parents:
317 317 if copysource in p:
318 318 found = True
319 319 break
320 320 if not found:
321 321 continue
322 322 copies[name] = copysource
323 323 except TypeError:
324 324 pass
325 325 except error.LookupError, e:
326 326 if not self.ignoreerrors:
327 327 raise
328 328 self.ignored.add(name)
329 329 self.ui.warn(_('ignoring: %s\n') % e)
330 330 return copies
331 331
332 332 def getcommit(self, rev):
333 333 ctx = self.changectx(rev)
334 334 parents = [p.hex() for p in self.parents(ctx)]
335 335 if self.saverev:
336 336 crev = rev
337 337 else:
338 338 crev = None
339 339 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
340 340 desc=ctx.description(), rev=crev, parents=parents,
341 341 branch=ctx.branch(), extra=ctx.extra(),
342 342 sortkey=ctx.rev())
343 343
344 344 def gettags(self):
345 345 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
346 346 return dict([(name, hex(node)) for name, node in tags
347 347 if self.keep(node)])
348 348
349 349 def getchangedfiles(self, rev, i):
350 350 ctx = self.changectx(rev)
351 351 parents = self.parents(ctx)
352 352 if not parents and i is None:
353 353 i = 0
354 354 changes = [], ctx.manifest().keys(), []
355 355 else:
356 356 i = i or 0
357 357 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
358 358 changes = [[f for f in l if f not in self.ignored] for l in changes]
359 359
360 360 if i == 0:
361 361 self._changescache = (rev, changes)
362 362
363 363 return changes[0] + changes[1] + changes[2]
364 364
365 365 def converted(self, rev, destrev):
366 366 if self.convertfp is None:
367 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
368 'a')
367 self.convertfp = open(self.repo.join('shamap'), 'a')
369 368 self.convertfp.write('%s %s\n' % (destrev, rev))
370 369 self.convertfp.flush()
371 370
372 371 def before(self):
373 372 self.ui.debug('run hg source pre-conversion action\n')
374 373
375 374 def after(self):
376 375 self.ui.debug('run hg source post-conversion action\n')
377 376
378 377 def hasnativeorder(self):
379 378 return True
380 379
381 380 def lookuprev(self, rev):
382 381 try:
383 382 return hex(self.repo.lookup(rev))
384 383 except error.RepoError:
385 384 return None
386 385
387 386 def getbookmarks(self):
388 387 return bookmarks.listbookmarks(self.repo)
@@ -1,1175 +1,1175
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os
6 6 import re
7 7 import sys
8 8 import cPickle as pickle
9 9 import tempfile
10 10 import urllib
11 11 import urllib2
12 12
13 13 from mercurial import strutil, scmutil, util, encoding
14 14 from mercurial.i18n import _
15 15
16 16 # Subversion stuff. Works best with very recent Python SVN bindings
17 17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 18 # these bindings.
19 19
20 20 from cStringIO import StringIO
21 21
22 22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 23 from common import commandline, converter_source, converter_sink, mapfile
24 24
25 25 try:
26 26 from svn.core import SubversionException, Pool
27 27 import svn
28 28 import svn.client
29 29 import svn.core
30 30 import svn.ra
31 31 import svn.delta
32 32 import transport
33 33 import warnings
34 34 warnings.filterwarnings('ignore',
35 35 module='svn.core',
36 36 category=DeprecationWarning)
37 37
38 38 except ImportError:
39 39 svn = None
40 40
41 41 class SvnPathNotFound(Exception):
42 42 pass
43 43
44 44 def revsplit(rev):
45 45 """Parse a revision string and return (uuid, path, revnum)."""
46 46 url, revnum = rev.rsplit('@', 1)
47 47 parts = url.split('/', 1)
48 48 mod = ''
49 49 if len(parts) > 1:
50 50 mod = '/' + parts[1]
51 51 return parts[0][4:], mod, int(revnum)
52 52
53 53 def geturl(path):
54 54 try:
55 55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 56 except SubversionException:
57 57 pass
58 58 if os.path.isdir(path):
59 59 path = os.path.normpath(os.path.abspath(path))
60 60 if os.name == 'nt':
61 61 path = '/' + util.normpath(path)
62 62 # Module URL is later compared with the repository URL returned
63 63 # by svn API, which is UTF-8.
64 64 path = encoding.tolocal(path)
65 65 return 'file://%s' % urllib.quote(path)
66 66 return path
67 67
68 68 def optrev(number):
69 69 optrev = svn.core.svn_opt_revision_t()
70 70 optrev.kind = svn.core.svn_opt_revision_number
71 71 optrev.value.number = number
72 72 return optrev
73 73
74 74 class changedpath(object):
75 75 def __init__(self, p):
76 76 self.copyfrom_path = p.copyfrom_path
77 77 self.copyfrom_rev = p.copyfrom_rev
78 78 self.action = p.action
79 79
80 80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 81 strict_node_history=False):
82 82 protocol = -1
83 83 def receiver(orig_paths, revnum, author, date, message, pool):
84 84 if orig_paths is not None:
85 85 for k, v in orig_paths.iteritems():
86 86 orig_paths[k] = changedpath(v)
87 87 pickle.dump((orig_paths, revnum, author, date, message),
88 88 fp, protocol)
89 89
90 90 try:
91 91 # Use an ra of our own so that our parent can consume
92 92 # our results without confusing the server.
93 93 t = transport.SvnRaTransport(url=url)
94 94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 95 discover_changed_paths,
96 96 strict_node_history,
97 97 receiver)
98 98 except SubversionException, (inst, num):
99 99 pickle.dump(num, fp, protocol)
100 100 except IOError:
101 101 # Caller may interrupt the iteration
102 102 pickle.dump(None, fp, protocol)
103 103 else:
104 104 pickle.dump(None, fp, protocol)
105 105 fp.close()
106 106 # With large history, cleanup process goes crazy and suddenly
107 107 # consumes *huge* amount of memory. The output file being closed,
108 108 # there is no need for clean termination.
109 109 os._exit(0)
110 110
111 111 def debugsvnlog(ui, **opts):
112 112 """Fetch SVN log in a subprocess and channel them back to parent to
113 113 avoid memory collection issues.
114 114 """
115 115 util.setbinary(sys.stdin)
116 116 util.setbinary(sys.stdout)
117 117 args = decodeargs(sys.stdin.read())
118 118 get_log_child(sys.stdout, *args)
119 119
120 120 class logstream(object):
121 121 """Interruptible revision log iterator."""
122 122 def __init__(self, stdout):
123 123 self._stdout = stdout
124 124
125 125 def __iter__(self):
126 126 while True:
127 127 try:
128 128 entry = pickle.load(self._stdout)
129 129 except EOFError:
130 130 raise util.Abort(_('Mercurial failed to run itself, check'
131 131 ' hg executable is in PATH'))
132 132 try:
133 133 orig_paths, revnum, author, date, message = entry
134 134 except:
135 135 if entry is None:
136 136 break
137 137 raise SubversionException("child raised exception", entry)
138 138 yield entry
139 139
140 140 def close(self):
141 141 if self._stdout:
142 142 self._stdout.close()
143 143 self._stdout = None
144 144
145 145
146 146 # Check to see if the given path is a local Subversion repo. Verify this by
147 147 # looking for several svn-specific files and directories in the given
148 148 # directory.
149 149 def filecheck(ui, path, proto):
150 150 for x in ('locks', 'hooks', 'format', 'db'):
151 151 if not os.path.exists(os.path.join(path, x)):
152 152 return False
153 153 return True
154 154
155 155 # Check to see if a given path is the root of an svn repo over http. We verify
156 156 # this by requesting a version-controlled URL we know can't exist and looking
157 157 # for the svn-specific "not found" XML.
158 158 def httpcheck(ui, path, proto):
159 159 try:
160 160 opener = urllib2.build_opener()
161 161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 162 data = rsp.read()
163 163 except urllib2.HTTPError, inst:
164 164 if inst.code != 404:
165 165 # Except for 404 we cannot know for sure this is not an svn repo
166 166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 167 'be a subversion repository. Use --source-type if you '
168 168 'know better.\n'))
169 169 return True
170 170 data = inst.fp.read()
171 171 except:
172 172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 173 return False
174 174 return '<m:human-readable errcode="160013">' in data
175 175
176 176 protomap = {'http': httpcheck,
177 177 'https': httpcheck,
178 178 'file': filecheck,
179 179 }
180 180 def issvnurl(ui, url):
181 181 try:
182 182 proto, path = url.split('://', 1)
183 183 if proto == 'file':
184 184 path = urllib.url2pathname(path)
185 185 except ValueError:
186 186 proto = 'file'
187 187 path = os.path.abspath(url)
188 188 if proto == 'file':
189 189 path = path.replace(os.sep, '/')
190 190 check = protomap.get(proto, lambda *args: False)
191 191 while '/' in path:
192 192 if check(ui, path, proto):
193 193 return True
194 194 path = path.rsplit('/', 1)[0]
195 195 return False
196 196
197 197 # SVN conversion code stolen from bzr-svn and tailor
198 198 #
199 199 # Subversion looks like a versioned filesystem, branches structures
200 200 # are defined by conventions and not enforced by the tool. First,
201 201 # we define the potential branches (modules) as "trunk" and "branches"
202 202 # children directories. Revisions are then identified by their
203 203 # module and revision number (and a repository identifier).
204 204 #
205 205 # The revision graph is really a tree (or a forest). By default, a
206 206 # revision parent is the previous revision in the same module. If the
207 207 # module directory is copied/moved from another module then the
208 208 # revision is the module root and its parent the source revision in
209 209 # the parent module. A revision has at most one parent.
210 210 #
211 211 class svn_source(converter_source):
212 212 def __init__(self, ui, url, rev=None):
213 213 super(svn_source, self).__init__(ui, url, rev=rev)
214 214
215 215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 216 (os.path.exists(url) and
217 217 os.path.exists(os.path.join(url, '.svn'))) or
218 218 issvnurl(ui, url)):
219 219 raise NoRepo(_("%s does not look like a Subversion repository")
220 220 % url)
221 221 if svn is None:
222 222 raise MissingTool(_('Could not load Subversion python bindings'))
223 223
224 224 try:
225 225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 226 if version < (1, 4):
227 227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 228 '1.4 or later required') % version)
229 229 except AttributeError:
230 230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 231 'or later required'))
232 232
233 233 self.lastrevs = {}
234 234
235 235 latest = None
236 236 try:
237 237 # Support file://path@rev syntax. Useful e.g. to convert
238 238 # deleted branches.
239 239 at = url.rfind('@')
240 240 if at >= 0:
241 241 latest = int(url[at + 1:])
242 242 url = url[:at]
243 243 except ValueError:
244 244 pass
245 245 self.url = geturl(url)
246 246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 247 try:
248 248 self.transport = transport.SvnRaTransport(url=self.url)
249 249 self.ra = self.transport.ra
250 250 self.ctx = self.transport.client
251 251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 252 # Module is either empty or a repository path starting with
253 253 # a slash and not ending with a slash.
254 254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 255 self.prevmodule = None
256 256 self.rootmodule = self.module
257 257 self.commits = {}
258 258 self.paths = {}
259 259 self.uuid = svn.ra.get_uuid(self.ra)
260 260 except SubversionException:
261 261 ui.traceback()
262 262 raise NoRepo(_("%s does not look like a Subversion repository")
263 263 % self.url)
264 264
265 265 if rev:
266 266 try:
267 267 latest = int(rev)
268 268 except ValueError:
269 269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270 270
271 271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 273 try:
274 274 self.startrev = int(self.startrev)
275 275 if self.startrev < 0:
276 276 self.startrev = 0
277 277 except ValueError:
278 278 raise util.Abort(_('svn: start revision %s is not an integer')
279 279 % self.startrev)
280 280
281 281 try:
282 282 self.head = self.latest(self.module, latest)
283 283 except SvnPathNotFound:
284 284 self.head = None
285 285 if not self.head:
286 286 raise util.Abort(_('no revision found in module %s')
287 287 % self.module)
288 288 self.last_changed = self.revnum(self.head)
289 289
290 290 self._changescache = None
291 291
292 292 if os.path.exists(os.path.join(url, '.svn/entries')):
293 293 self.wc = url
294 294 else:
295 295 self.wc = None
296 296 self.convertfp = None
297 297
298 298 def setrevmap(self, revmap):
299 299 lastrevs = {}
300 300 for revid in revmap.iterkeys():
301 301 uuid, module, revnum = revsplit(revid)
302 302 lastrevnum = lastrevs.setdefault(module, revnum)
303 303 if revnum > lastrevnum:
304 304 lastrevs[module] = revnum
305 305 self.lastrevs = lastrevs
306 306
307 307 def exists(self, path, optrev):
308 308 try:
309 309 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
310 310 optrev, False, self.ctx)
311 311 return True
312 312 except SubversionException:
313 313 return False
314 314
315 315 def getheads(self):
316 316
317 317 def isdir(path, revnum):
318 318 kind = self._checkpath(path, revnum)
319 319 return kind == svn.core.svn_node_dir
320 320
321 321 def getcfgpath(name, rev):
322 322 cfgpath = self.ui.config('convert', 'svn.' + name)
323 323 if cfgpath is not None and cfgpath.strip() == '':
324 324 return None
325 325 path = (cfgpath or name).strip('/')
326 326 if not self.exists(path, rev):
327 327 if self.module.endswith(path) and name == 'trunk':
328 328 # we are converting from inside this directory
329 329 return None
330 330 if cfgpath:
331 331 raise util.Abort(_('expected %s to be at %r, but not found')
332 332 % (name, path))
333 333 return None
334 334 self.ui.note(_('found %s at %r\n') % (name, path))
335 335 return path
336 336
337 337 rev = optrev(self.last_changed)
338 338 oldmodule = ''
339 339 trunk = getcfgpath('trunk', rev)
340 340 self.tags = getcfgpath('tags', rev)
341 341 branches = getcfgpath('branches', rev)
342 342
343 343 # If the project has a trunk or branches, we will extract heads
344 344 # from them. We keep the project root otherwise.
345 345 if trunk:
346 346 oldmodule = self.module or ''
347 347 self.module += '/' + trunk
348 348 self.head = self.latest(self.module, self.last_changed)
349 349 if not self.head:
350 350 raise util.Abort(_('no revision found in module %s')
351 351 % self.module)
352 352
353 353 # First head in the list is the module's head
354 354 self.heads = [self.head]
355 355 if self.tags is not None:
356 356 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
357 357
358 358 # Check if branches bring a few more heads to the list
359 359 if branches:
360 360 rpath = self.url.strip('/')
361 361 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
362 362 rev, False, self.ctx)
363 363 for branch in branchnames.keys():
364 364 module = '%s/%s/%s' % (oldmodule, branches, branch)
365 365 if not isdir(module, self.last_changed):
366 366 continue
367 367 brevid = self.latest(module, self.last_changed)
368 368 if not brevid:
369 369 self.ui.note(_('ignoring empty branch %s\n') % branch)
370 370 continue
371 371 self.ui.note(_('found branch %s at %d\n') %
372 372 (branch, self.revnum(brevid)))
373 373 self.heads.append(brevid)
374 374
375 375 if self.startrev and self.heads:
376 376 if len(self.heads) > 1:
377 377 raise util.Abort(_('svn: start revision is not supported '
378 378 'with more than one branch'))
379 379 revnum = self.revnum(self.heads[0])
380 380 if revnum < self.startrev:
381 381 raise util.Abort(
382 382 _('svn: no revision found after start revision %d')
383 383 % self.startrev)
384 384
385 385 return self.heads
386 386
387 387 def getchanges(self, rev):
388 388 if self._changescache and self._changescache[0] == rev:
389 389 return self._changescache[1]
390 390 self._changescache = None
391 391 (paths, parents) = self.paths[rev]
392 392 if parents:
393 393 files, self.removed, copies = self.expandpaths(rev, paths, parents)
394 394 else:
395 395 # Perform a full checkout on roots
396 396 uuid, module, revnum = revsplit(rev)
397 397 entries = svn.client.ls(self.baseurl + urllib.quote(module),
398 398 optrev(revnum), True, self.ctx)
399 399 files = [n for n, e in entries.iteritems()
400 400 if e.kind == svn.core.svn_node_file]
401 401 copies = {}
402 402 self.removed = set()
403 403
404 404 files.sort()
405 405 files = zip(files, [rev] * len(files))
406 406
407 407 # caller caches the result, so free it here to release memory
408 408 del self.paths[rev]
409 409 return (files, copies)
410 410
411 411 def getchangedfiles(self, rev, i):
412 412 changes = self.getchanges(rev)
413 413 self._changescache = (rev, changes)
414 414 return [f[0] for f in changes[0]]
415 415
416 416 def getcommit(self, rev):
417 417 if rev not in self.commits:
418 418 uuid, module, revnum = revsplit(rev)
419 419 self.module = module
420 420 self.reparent(module)
421 421 # We assume that:
422 422 # - requests for revisions after "stop" come from the
423 423 # revision graph backward traversal. Cache all of them
424 424 # down to stop, they will be used eventually.
425 425 # - requests for revisions before "stop" come to get
426 426 # isolated branches parents. Just fetch what is needed.
427 427 stop = self.lastrevs.get(module, 0)
428 428 if revnum < stop:
429 429 stop = revnum + 1
430 430 self._fetch_revisions(revnum, stop)
431 431 commit = self.commits[rev]
432 432 # caller caches the result, so free it here to release memory
433 433 del self.commits[rev]
434 434 return commit
435 435
436 436 def gettags(self):
437 437 tags = {}
438 438 if self.tags is None:
439 439 return tags
440 440
441 441 # svn tags are just a convention, project branches left in a
442 442 # 'tags' directory. There is no other relationship than
443 443 # ancestry, which is expensive to discover and makes them hard
444 444 # to update incrementally. Worse, past revisions may be
445 445 # referenced by tags far away in the future, requiring a deep
446 446 # history traversal on every calculation. Current code
447 447 # performs a single backward traversal, tracking moves within
448 448 # the tags directory (tag renaming) and recording a new tag
449 449 # everytime a project is copied from outside the tags
450 450 # directory. It also lists deleted tags, this behaviour may
451 451 # change in the future.
452 452 pendings = []
453 453 tagspath = self.tags
454 454 start = svn.ra.get_latest_revnum(self.ra)
455 455 stream = self._getlog([self.tags], start, self.startrev)
456 456 try:
457 457 for entry in stream:
458 458 origpaths, revnum, author, date, message = entry
459 459 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
460 460 in origpaths.iteritems() if e.copyfrom_path]
461 461 # Apply moves/copies from more specific to general
462 462 copies.sort(reverse=True)
463 463
464 464 srctagspath = tagspath
465 465 if copies and copies[-1][2] == tagspath:
466 466 # Track tags directory moves
467 467 srctagspath = copies.pop()[0]
468 468
469 469 for source, sourcerev, dest in copies:
470 470 if not dest.startswith(tagspath + '/'):
471 471 continue
472 472 for tag in pendings:
473 473 if tag[0].startswith(dest):
474 474 tagpath = source + tag[0][len(dest):]
475 475 tag[:2] = [tagpath, sourcerev]
476 476 break
477 477 else:
478 478 pendings.append([source, sourcerev, dest])
479 479
480 480 # Filter out tags with children coming from different
481 481 # parts of the repository like:
482 482 # /tags/tag.1 (from /trunk:10)
483 483 # /tags/tag.1/foo (from /branches/foo:12)
484 484 # Here/tags/tag.1 discarded as well as its children.
485 485 # It happens with tools like cvs2svn. Such tags cannot
486 486 # be represented in mercurial.
487 487 addeds = dict((p, e.copyfrom_path) for p, e
488 488 in origpaths.iteritems()
489 489 if e.action == 'A' and e.copyfrom_path)
490 490 badroots = set()
491 491 for destroot in addeds:
492 492 for source, sourcerev, dest in pendings:
493 493 if (not dest.startswith(destroot + '/')
494 494 or source.startswith(addeds[destroot] + '/')):
495 495 continue
496 496 badroots.add(destroot)
497 497 break
498 498
499 499 for badroot in badroots:
500 500 pendings = [p for p in pendings if p[2] != badroot
501 501 and not p[2].startswith(badroot + '/')]
502 502
503 503 # Tell tag renamings from tag creations
504 remainings = []
504 renamings = []
505 505 for source, sourcerev, dest in pendings:
506 506 tagname = dest.split('/')[-1]
507 507 if source.startswith(srctagspath):
508 remainings.append([source, sourcerev, tagname])
508 renamings.append([source, sourcerev, tagname])
509 509 continue
510 510 if tagname in tags:
511 511 # Keep the latest tag value
512 512 continue
513 513 # From revision may be fake, get one with changes
514 514 try:
515 515 tagid = self.latest(source, sourcerev)
516 516 if tagid and tagname not in tags:
517 517 tags[tagname] = tagid
518 518 except SvnPathNotFound:
519 519 # It happens when we are following directories
520 520 # we assumed were copied with their parents
521 521 # but were really created in the tag
522 522 # directory.
523 523 pass
524 pendings = remainings
524 pendings = renamings
525 525 tagspath = srctagspath
526 526 finally:
527 527 stream.close()
528 528 return tags
529 529
530 530 def converted(self, rev, destrev):
531 531 if not self.wc:
532 532 return
533 533 if self.convertfp is None:
534 534 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
535 535 'a')
536 536 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
537 537 self.convertfp.flush()
538 538
539 539 def revid(self, revnum, module=None):
540 540 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
541 541
542 542 def revnum(self, rev):
543 543 return int(rev.split('@')[-1])
544 544
545 545 def latest(self, path, stop=0):
546 546 """Find the latest revid affecting path, up to stop. It may return
547 547 a revision in a different module, since a branch may be moved without
548 548 a change being reported. Return None if computed module does not
549 549 belong to rootmodule subtree.
550 550 """
551 551 if not path.startswith(self.rootmodule):
552 552 # Requests on foreign branches may be forbidden at server level
553 553 self.ui.debug('ignoring foreign branch %r\n' % path)
554 554 return None
555 555
556 556 if not stop:
557 557 stop = svn.ra.get_latest_revnum(self.ra)
558 558 try:
559 559 prevmodule = self.reparent('')
560 560 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
561 561 self.reparent(prevmodule)
562 562 except SubversionException:
563 563 dirent = None
564 564 if not dirent:
565 565 raise SvnPathNotFound(_('%s not found up to revision %d')
566 566 % (path, stop))
567 567
568 568 # stat() gives us the previous revision on this line of
569 569 # development, but it might be in *another module*. Fetch the
570 570 # log and detect renames down to the latest revision.
571 571 stream = self._getlog([path], stop, dirent.created_rev)
572 572 try:
573 573 for entry in stream:
574 574 paths, revnum, author, date, message = entry
575 575 if revnum <= dirent.created_rev:
576 576 break
577 577
578 578 for p in paths:
579 579 if not path.startswith(p) or not paths[p].copyfrom_path:
580 580 continue
581 581 newpath = paths[p].copyfrom_path + path[len(p):]
582 582 self.ui.debug("branch renamed from %s to %s at %d\n" %
583 583 (path, newpath, revnum))
584 584 path = newpath
585 585 break
586 586 finally:
587 587 stream.close()
588 588
589 589 if not path.startswith(self.rootmodule):
590 590 self.ui.debug('ignoring foreign branch %r\n' % path)
591 591 return None
592 592 return self.revid(dirent.created_rev, path)
593 593
594 594 def reparent(self, module):
595 595 """Reparent the svn transport and return the previous parent."""
596 596 if self.prevmodule == module:
597 597 return module
598 598 svnurl = self.baseurl + urllib.quote(module)
599 599 prevmodule = self.prevmodule
600 600 if prevmodule is None:
601 601 prevmodule = ''
602 602 self.ui.debug("reparent to %s\n" % svnurl)
603 603 svn.ra.reparent(self.ra, svnurl)
604 604 self.prevmodule = module
605 605 return prevmodule
606 606
607 607 def expandpaths(self, rev, paths, parents):
608 608 changed, removed = set(), set()
609 609 copies = {}
610 610
611 611 new_module, revnum = revsplit(rev)[1:]
612 612 if new_module != self.module:
613 613 self.module = new_module
614 614 self.reparent(self.module)
615 615
616 616 for i, (path, ent) in enumerate(paths):
617 617 self.ui.progress(_('scanning paths'), i, item=path,
618 618 total=len(paths))
619 619 entrypath = self.getrelpath(path)
620 620
621 621 kind = self._checkpath(entrypath, revnum)
622 622 if kind == svn.core.svn_node_file:
623 623 changed.add(self.recode(entrypath))
624 624 if not ent.copyfrom_path or not parents:
625 625 continue
626 626 # Copy sources not in parent revisions cannot be
627 627 # represented, ignore their origin for now
628 628 pmodule, prevnum = revsplit(parents[0])[1:]
629 629 if ent.copyfrom_rev < prevnum:
630 630 continue
631 631 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
632 632 if not copyfrom_path:
633 633 continue
634 634 self.ui.debug("copied to %s from %s@%s\n" %
635 635 (entrypath, copyfrom_path, ent.copyfrom_rev))
636 636 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
637 637 elif kind == 0: # gone, but had better be a deleted *file*
638 638 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
639 639 pmodule, prevnum = revsplit(parents[0])[1:]
640 640 parentpath = pmodule + "/" + entrypath
641 641 fromkind = self._checkpath(entrypath, prevnum, pmodule)
642 642
643 643 if fromkind == svn.core.svn_node_file:
644 644 removed.add(self.recode(entrypath))
645 645 elif fromkind == svn.core.svn_node_dir:
646 646 oroot = parentpath.strip('/')
647 647 nroot = path.strip('/')
648 648 children = self._iterfiles(oroot, prevnum)
649 649 for childpath in children:
650 650 childpath = childpath.replace(oroot, nroot)
651 651 childpath = self.getrelpath("/" + childpath, pmodule)
652 652 if childpath:
653 653 removed.add(self.recode(childpath))
654 654 else:
655 655 self.ui.debug('unknown path in revision %d: %s\n' % \
656 656 (revnum, path))
657 657 elif kind == svn.core.svn_node_dir:
658 658 if ent.action == 'M':
659 659 # If the directory just had a prop change,
660 660 # then we shouldn't need to look for its children.
661 661 continue
662 662 if ent.action == 'R' and parents:
663 663 # If a directory is replacing a file, mark the previous
664 664 # file as deleted
665 665 pmodule, prevnum = revsplit(parents[0])[1:]
666 666 pkind = self._checkpath(entrypath, prevnum, pmodule)
667 667 if pkind == svn.core.svn_node_file:
668 668 removed.add(self.recode(entrypath))
669 669 elif pkind == svn.core.svn_node_dir:
670 670 # We do not know what files were kept or removed,
671 671 # mark them all as changed.
672 672 for childpath in self._iterfiles(pmodule, prevnum):
673 673 childpath = self.getrelpath("/" + childpath)
674 674 if childpath:
675 675 changed.add(self.recode(childpath))
676 676
677 677 for childpath in self._iterfiles(path, revnum):
678 678 childpath = self.getrelpath("/" + childpath)
679 679 if childpath:
680 680 changed.add(self.recode(childpath))
681 681
682 682 # Handle directory copies
683 683 if not ent.copyfrom_path or not parents:
684 684 continue
685 685 # Copy sources not in parent revisions cannot be
686 686 # represented, ignore their origin for now
687 687 pmodule, prevnum = revsplit(parents[0])[1:]
688 688 if ent.copyfrom_rev < prevnum:
689 689 continue
690 690 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
691 691 if not copyfrompath:
692 692 continue
693 693 self.ui.debug("mark %s came from %s:%d\n"
694 694 % (path, copyfrompath, ent.copyfrom_rev))
695 695 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
696 696 for childpath in children:
697 697 childpath = self.getrelpath("/" + childpath, pmodule)
698 698 if not childpath:
699 699 continue
700 700 copytopath = path + childpath[len(copyfrompath):]
701 701 copytopath = self.getrelpath(copytopath)
702 702 copies[self.recode(copytopath)] = self.recode(childpath)
703 703
704 704 self.ui.progress(_('scanning paths'), None)
705 705 changed.update(removed)
706 706 return (list(changed), removed, copies)
707 707
708 708 def _fetch_revisions(self, from_revnum, to_revnum):
709 709 if from_revnum < to_revnum:
710 710 from_revnum, to_revnum = to_revnum, from_revnum
711 711
712 712 self.child_cset = None
713 713
714 714 def parselogentry(orig_paths, revnum, author, date, message):
715 715 """Return the parsed commit object or None, and True if
716 716 the revision is a branch root.
717 717 """
718 718 self.ui.debug("parsing revision %d (%d changes)\n" %
719 719 (revnum, len(orig_paths)))
720 720
721 721 branched = False
722 722 rev = self.revid(revnum)
723 723 # branch log might return entries for a parent we already have
724 724
725 725 if rev in self.commits or revnum < to_revnum:
726 726 return None, branched
727 727
728 728 parents = []
729 729 # check whether this revision is the start of a branch or part
730 730 # of a branch renaming
731 731 orig_paths = sorted(orig_paths.iteritems())
732 732 root_paths = [(p, e) for p, e in orig_paths
733 733 if self.module.startswith(p)]
734 734 if root_paths:
735 735 path, ent = root_paths[-1]
736 736 if ent.copyfrom_path:
737 737 branched = True
738 738 newpath = ent.copyfrom_path + self.module[len(path):]
739 739 # ent.copyfrom_rev may not be the actual last revision
740 740 previd = self.latest(newpath, ent.copyfrom_rev)
741 741 if previd is not None:
742 742 prevmodule, prevnum = revsplit(previd)[1:]
743 743 if prevnum >= self.startrev:
744 744 parents = [previd]
745 745 self.ui.note(
746 746 _('found parent of branch %s at %d: %s\n') %
747 747 (self.module, prevnum, prevmodule))
748 748 else:
749 749 self.ui.debug("no copyfrom path, don't know what to do.\n")
750 750
751 751 paths = []
752 752 # filter out unrelated paths
753 753 for path, ent in orig_paths:
754 754 if self.getrelpath(path) is None:
755 755 continue
756 756 paths.append((path, ent))
757 757
758 758 # Example SVN datetime. Includes microseconds.
759 759 # ISO-8601 conformant
760 760 # '2007-01-04T17:35:00.902377Z'
761 761 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
762 762
763 763 log = message and self.recode(message) or ''
764 764 author = author and self.recode(author) or ''
765 765 try:
766 766 branch = self.module.split("/")[-1]
767 767 if branch == self.trunkname:
768 768 branch = None
769 769 except IndexError:
770 770 branch = None
771 771
772 772 cset = commit(author=author,
773 773 date=util.datestr(date),
774 774 desc=log,
775 775 parents=parents,
776 776 branch=branch,
777 777 rev=rev)
778 778
779 779 self.commits[rev] = cset
780 780 # The parents list is *shared* among self.paths and the
781 781 # commit object. Both will be updated below.
782 782 self.paths[rev] = (paths, cset.parents)
783 783 if self.child_cset and not self.child_cset.parents:
784 784 self.child_cset.parents[:] = [rev]
785 785 self.child_cset = cset
786 786 return cset, branched
787 787
788 788 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
789 789 (self.module, from_revnum, to_revnum))
790 790
791 791 try:
792 792 firstcset = None
793 793 lastonbranch = False
794 794 stream = self._getlog([self.module], from_revnum, to_revnum)
795 795 try:
796 796 for entry in stream:
797 797 paths, revnum, author, date, message = entry
798 798 if revnum < self.startrev:
799 799 lastonbranch = True
800 800 break
801 801 if not paths:
802 802 self.ui.debug('revision %d has no entries\n' % revnum)
803 803 # If we ever leave the loop on an empty
804 804 # revision, do not try to get a parent branch
805 805 lastonbranch = lastonbranch or revnum == 0
806 806 continue
807 807 cset, lastonbranch = parselogentry(paths, revnum, author,
808 808 date, message)
809 809 if cset:
810 810 firstcset = cset
811 811 if lastonbranch:
812 812 break
813 813 finally:
814 814 stream.close()
815 815
816 816 if not lastonbranch and firstcset and not firstcset.parents:
817 817 # The first revision of the sequence (the last fetched one)
818 818 # has invalid parents if not a branch root. Find the parent
819 819 # revision now, if any.
820 820 try:
821 821 firstrevnum = self.revnum(firstcset.rev)
822 822 if firstrevnum > 1:
823 823 latest = self.latest(self.module, firstrevnum - 1)
824 824 if latest:
825 825 firstcset.parents.append(latest)
826 826 except SvnPathNotFound:
827 827 pass
828 828 except SubversionException, (inst, num):
829 829 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
830 830 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
831 831 raise
832 832
833 833 def getfile(self, file, rev):
834 834 # TODO: ra.get_file transmits the whole file instead of diffs.
835 835 if file in self.removed:
836 836 raise IOError()
837 837 mode = ''
838 838 try:
839 839 new_module, revnum = revsplit(rev)[1:]
840 840 if self.module != new_module:
841 841 self.module = new_module
842 842 self.reparent(self.module)
843 843 io = StringIO()
844 844 info = svn.ra.get_file(self.ra, file, revnum, io)
845 845 data = io.getvalue()
846 846 # ra.get_files() seems to keep a reference on the input buffer
847 847 # preventing collection. Release it explicitely.
848 848 io.close()
849 849 if isinstance(info, list):
850 850 info = info[-1]
851 851 mode = ("svn:executable" in info) and 'x' or ''
852 852 mode = ("svn:special" in info) and 'l' or mode
853 853 except SubversionException, e:
854 854 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
855 855 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
856 856 if e.apr_err in notfound: # File not found
857 857 raise IOError()
858 858 raise
859 859 if mode == 'l':
860 860 link_prefix = "link "
861 861 if data.startswith(link_prefix):
862 862 data = data[len(link_prefix):]
863 863 return data, mode
864 864
865 865 def _iterfiles(self, path, revnum):
866 866 """Enumerate all files in path at revnum, recursively."""
867 867 path = path.strip('/')
868 868 pool = Pool()
869 869 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
870 870 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
871 871 if path:
872 872 path += '/'
873 873 return ((path + p) for p, e in entries.iteritems()
874 874 if e.kind == svn.core.svn_node_file)
875 875
876 876 def getrelpath(self, path, module=None):
877 877 if module is None:
878 878 module = self.module
879 879 # Given the repository url of this wc, say
880 880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 881 # extract the "entry" portion (a relative path) from what
882 882 # svn log --xml says, ie
883 883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 884 # that is to say "tests/PloneTestCase.py"
885 885 if path.startswith(module):
886 886 relative = path.rstrip('/')[len(module):]
887 887 if relative.startswith('/'):
888 888 return relative[1:]
889 889 elif relative == '':
890 890 return relative
891 891
892 892 # The path is outside our tracked tree...
893 893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
894 894 return None
895 895
896 896 def _checkpath(self, path, revnum, module=None):
897 897 if module is not None:
898 898 prevmodule = self.reparent('')
899 899 path = module + '/' + path
900 900 try:
901 901 # ra.check_path does not like leading slashes very much, it leads
902 902 # to PROPFIND subversion errors
903 903 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
904 904 finally:
905 905 if module is not None:
906 906 self.reparent(prevmodule)
907 907
908 908 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
909 909 strict_node_history=False):
910 910 # Normalize path names, svn >= 1.5 only wants paths relative to
911 911 # supplied URL
912 912 relpaths = []
913 913 for p in paths:
914 914 if not p.startswith('/'):
915 915 p = self.module + '/' + p
916 916 relpaths.append(p.strip('/'))
917 917 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
918 918 strict_node_history]
919 919 arg = encodeargs(args)
920 920 hgexe = util.hgexecutable()
921 921 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
922 922 stdin, stdout = util.popen2(util.quotecommand(cmd))
923 923 stdin.write(arg)
924 924 try:
925 925 stdin.close()
926 926 except IOError:
927 927 raise util.Abort(_('Mercurial failed to run itself, check'
928 928 ' hg executable is in PATH'))
929 929 return logstream(stdout)
930 930
931 931 pre_revprop_change = '''#!/bin/sh
932 932
933 933 REPOS="$1"
934 934 REV="$2"
935 935 USER="$3"
936 936 PROPNAME="$4"
937 937 ACTION="$5"
938 938
939 939 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
940 940 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
941 941 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
942 942
943 943 echo "Changing prohibited revision property" >&2
944 944 exit 1
945 945 '''
946 946
947 947 class svn_sink(converter_sink, commandline):
948 948 commit_re = re.compile(r'Committed revision (\d+).', re.M)
949 949 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
950 950
951 951 def prerun(self):
952 952 if self.wc:
953 953 os.chdir(self.wc)
954 954
955 955 def postrun(self):
956 956 if self.wc:
957 957 os.chdir(self.cwd)
958 958
959 959 def join(self, name):
960 960 return os.path.join(self.wc, '.svn', name)
961 961
962 962 def revmapfile(self):
963 963 return self.join('hg-shamap')
964 964
965 965 def authorfile(self):
966 966 return self.join('hg-authormap')
967 967
968 968 def __init__(self, ui, path):
969 969
970 970 converter_sink.__init__(self, ui, path)
971 971 commandline.__init__(self, ui, 'svn')
972 972 self.delete = []
973 973 self.setexec = []
974 974 self.delexec = []
975 975 self.copies = []
976 976 self.wc = None
977 977 self.cwd = os.getcwd()
978 978
979 979 path = os.path.realpath(path)
980 980
981 981 created = False
982 982 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
983 983 self.wc = path
984 984 self.run0('update')
985 985 else:
986 986 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
987 987
988 988 if os.path.isdir(os.path.dirname(path)):
989 989 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
990 990 ui.status(_('initializing svn repository %r\n') %
991 991 os.path.basename(path))
992 992 commandline(ui, 'svnadmin').run0('create', path)
993 993 created = path
994 994 path = util.normpath(path)
995 995 if not path.startswith('/'):
996 996 path = '/' + path
997 997 path = 'file://' + path
998 998
999 999 ui.status(_('initializing svn working copy %r\n')
1000 1000 % os.path.basename(wcpath))
1001 1001 self.run0('checkout', path, wcpath)
1002 1002
1003 1003 self.wc = wcpath
1004 1004 self.opener = scmutil.opener(self.wc)
1005 1005 self.wopener = scmutil.opener(self.wc)
1006 1006 self.childmap = mapfile(ui, self.join('hg-childmap'))
1007 1007 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1008 1008
1009 1009 if created:
1010 1010 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1011 1011 fp = open(hook, 'w')
1012 1012 fp.write(pre_revprop_change)
1013 1013 fp.close()
1014 1014 util.setflags(hook, False, True)
1015 1015
1016 1016 output = self.run0('info')
1017 1017 self.uuid = self.uuid_re.search(output).group(1).strip()
1018 1018
1019 1019 def wjoin(self, *names):
1020 1020 return os.path.join(self.wc, *names)
1021 1021
1022 1022 def putfile(self, filename, flags, data):
1023 1023 if 'l' in flags:
1024 1024 self.wopener.symlink(data, filename)
1025 1025 else:
1026 1026 try:
1027 1027 if os.path.islink(self.wjoin(filename)):
1028 1028 os.unlink(filename)
1029 1029 except OSError:
1030 1030 pass
1031 1031 self.wopener.write(filename, data)
1032 1032
1033 1033 if self.is_exec:
1034 1034 was_exec = self.is_exec(self.wjoin(filename))
1035 1035 else:
1036 1036 # On filesystems not supporting execute-bit, there is no way
1037 1037 # to know if it is set but asking subversion. Setting it
1038 1038 # systematically is just as expensive and much simpler.
1039 1039 was_exec = 'x' not in flags
1040 1040
1041 1041 util.setflags(self.wjoin(filename), False, 'x' in flags)
1042 1042 if was_exec:
1043 1043 if 'x' not in flags:
1044 1044 self.delexec.append(filename)
1045 1045 else:
1046 1046 if 'x' in flags:
1047 1047 self.setexec.append(filename)
1048 1048
1049 1049 def _copyfile(self, source, dest):
1050 1050 # SVN's copy command pukes if the destination file exists, but
1051 1051 # our copyfile method expects to record a copy that has
1052 1052 # already occurred. Cross the semantic gap.
1053 1053 wdest = self.wjoin(dest)
1054 1054 exists = os.path.lexists(wdest)
1055 1055 if exists:
1056 1056 fd, tempname = tempfile.mkstemp(
1057 1057 prefix='hg-copy-', dir=os.path.dirname(wdest))
1058 1058 os.close(fd)
1059 1059 os.unlink(tempname)
1060 1060 os.rename(wdest, tempname)
1061 1061 try:
1062 1062 self.run0('copy', source, dest)
1063 1063 finally:
1064 1064 if exists:
1065 1065 try:
1066 1066 os.unlink(wdest)
1067 1067 except OSError:
1068 1068 pass
1069 1069 os.rename(tempname, wdest)
1070 1070
1071 1071 def dirs_of(self, files):
1072 1072 dirs = set()
1073 1073 for f in files:
1074 1074 if os.path.isdir(self.wjoin(f)):
1075 1075 dirs.add(f)
1076 1076 for i in strutil.rfindall(f, '/'):
1077 1077 dirs.add(f[:i])
1078 1078 return dirs
1079 1079
1080 1080 def add_dirs(self, files):
1081 1081 add_dirs = [d for d in sorted(self.dirs_of(files))
1082 1082 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1083 1083 if add_dirs:
1084 1084 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1085 1085 return add_dirs
1086 1086
1087 1087 def add_files(self, files):
1088 1088 if files:
1089 1089 self.xargs(files, 'add', quiet=True)
1090 1090 return files
1091 1091
1092 1092 def tidy_dirs(self, names):
1093 1093 deleted = []
1094 1094 for d in sorted(self.dirs_of(names), reverse=True):
1095 1095 wd = self.wjoin(d)
1096 1096 if os.listdir(wd) == '.svn':
1097 1097 self.run0('delete', d)
1098 1098 deleted.append(d)
1099 1099 return deleted
1100 1100
1101 1101 def addchild(self, parent, child):
1102 1102 self.childmap[parent] = child
1103 1103
1104 1104 def revid(self, rev):
1105 1105 return u"svn:%s@%s" % (self.uuid, rev)
1106 1106
1107 1107 def putcommit(self, files, copies, parents, commit, source, revmap):
1108 1108 # Apply changes to working copy
1109 1109 for f, v in files:
1110 1110 try:
1111 1111 data, mode = source.getfile(f, v)
1112 1112 except IOError:
1113 1113 self.delete.append(f)
1114 1114 else:
1115 1115 self.putfile(f, mode, data)
1116 1116 if f in copies:
1117 1117 self.copies.append([copies[f], f])
1118 1118 files = [f[0] for f in files]
1119 1119
1120 1120 for parent in parents:
1121 1121 try:
1122 1122 return self.revid(self.childmap[parent])
1123 1123 except KeyError:
1124 1124 pass
1125 1125 entries = set(self.delete)
1126 1126 files = frozenset(files)
1127 1127 entries.update(self.add_dirs(files.difference(entries)))
1128 1128 if self.copies:
1129 1129 for s, d in self.copies:
1130 1130 self._copyfile(s, d)
1131 1131 self.copies = []
1132 1132 if self.delete:
1133 1133 self.xargs(self.delete, 'delete')
1134 1134 self.delete = []
1135 1135 entries.update(self.add_files(files.difference(entries)))
1136 1136 entries.update(self.tidy_dirs(entries))
1137 1137 if self.delexec:
1138 1138 self.xargs(self.delexec, 'propdel', 'svn:executable')
1139 1139 self.delexec = []
1140 1140 if self.setexec:
1141 1141 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1142 1142 self.setexec = []
1143 1143
1144 1144 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1145 1145 fp = os.fdopen(fd, 'w')
1146 1146 fp.write(commit.desc)
1147 1147 fp.close()
1148 1148 try:
1149 1149 output = self.run0('commit',
1150 1150 username=util.shortuser(commit.author),
1151 1151 file=messagefile,
1152 1152 encoding='utf-8')
1153 1153 try:
1154 1154 rev = self.commit_re.search(output).group(1)
1155 1155 except AttributeError:
1156 1156 if not files:
1157 1157 return parents[0]
1158 1158 self.ui.warn(_('unexpected svn output:\n'))
1159 1159 self.ui.warn(output)
1160 1160 raise util.Abort(_('unable to cope with svn output'))
1161 1161 if commit.rev:
1162 1162 self.run('propset', 'hg:convert-rev', commit.rev,
1163 1163 revprop=True, revision=rev)
1164 1164 if commit.branch and commit.branch != 'default':
1165 1165 self.run('propset', 'hg:convert-branch', commit.branch,
1166 1166 revprop=True, revision=rev)
1167 1167 for parent in parents:
1168 1168 self.addchild(parent, rev)
1169 1169 return self.revid(rev)
1170 1170 finally:
1171 1171 os.unlink(messagefile)
1172 1172
1173 1173 def puttags(self, tags):
1174 1174 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1175 1175 return None, None
@@ -1,128 +1,128
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 4 # This is a stripped-down version of the original bzr-svn transport.py,
5 5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6 6
7 7 # This program is free software; you can redistribute it and/or modify
8 8 # it under the terms of the GNU General Public License as published by
9 9 # the Free Software Foundation; either version 2 of the License, or
10 10 # (at your option) any later version.
11 11
12 12 # This program is distributed in the hope that it will be useful,
13 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 15 # GNU General Public License for more details.
16 16
17 17 # You should have received a copy of the GNU General Public License
18 18 # along with this program; if not, write to the Free Software
19 19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 20
21 21 from svn.core import SubversionException, Pool
22 22 import svn.ra
23 23 import svn.client
24 24 import svn.core
25 25
26 26 # Some older versions of the Python bindings need to be
27 27 # explicitly initialized. But what we want to do probably
28 28 # won't work worth a darn against those libraries anyway!
29 29 svn.ra.initialize()
30 30
31 31 svn_config = svn.core.svn_config_get_config(None)
32 32
33 33
34 34 def _create_auth_baton(pool):
35 35 """Create a Subversion authentication baton. """
36 36 import svn.client
37 37 # Give the client context baton a suite of authentication
38 38 # providers.h
39 39 providers = [
40 40 svn.client.get_simple_provider(pool),
41 41 svn.client.get_username_provider(pool),
42 42 svn.client.get_ssl_client_cert_file_provider(pool),
43 43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
44 44 svn.client.get_ssl_server_trust_file_provider(pool),
45 45 ]
46 46 # Platform-dependant authentication methods
47 47 getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
48 48 None)
49 49 if getprovider:
50 50 # Available in svn >= 1.6
51 51 for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
52 52 for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
53 53 p = getprovider(name, type, pool)
54 54 if p:
55 55 providers.append(p)
56 56 else:
57 if hasattr(svn.client, 'get_windows_simple_provider'):
57 if util.safehasattr(svn.client, 'get_windows_simple_provider'):
58 58 providers.append(svn.client.get_windows_simple_provider(pool))
59 59
60 60 return svn.core.svn_auth_open(providers, pool)
61 61
62 62 class NotBranchError(SubversionException):
63 63 pass
64 64
65 65 class SvnRaTransport(object):
66 66 """
67 67 Open an ra connection to a Subversion repository.
68 68 """
69 69 def __init__(self, url="", ra=None):
70 70 self.pool = Pool()
71 71 self.svn_url = url
72 72 self.username = ''
73 73 self.password = ''
74 74
75 75 # Only Subversion 1.4 has reparent()
76 if ra is None or not hasattr(svn.ra, 'reparent'):
76 if ra is None or not util.safehasattr(svn.ra, 'reparent'):
77 77 self.client = svn.client.create_context(self.pool)
78 78 ab = _create_auth_baton(self.pool)
79 79 if False:
80 80 svn.core.svn_auth_set_parameter(
81 81 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
82 82 svn.core.svn_auth_set_parameter(
83 83 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
84 84 self.client.auth_baton = ab
85 85 self.client.config = svn_config
86 86 try:
87 87 self.ra = svn.client.open_ra_session(
88 88 self.svn_url.encode('utf8'),
89 89 self.client, self.pool)
90 90 except SubversionException, (inst, num):
91 91 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
92 92 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
93 93 svn.core.SVN_ERR_BAD_URL):
94 94 raise NotBranchError(url)
95 95 raise
96 96 else:
97 97 self.ra = ra
98 98 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
99 99
100 100 class Reporter(object):
101 101 def __init__(self, reporter_data):
102 102 self._reporter, self._baton = reporter_data
103 103
104 104 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
105 105 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
106 106 path, revnum, start_empty, lock_token, pool)
107 107
108 108 def delete_path(self, path, pool=None):
109 109 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
110 110 path, pool)
111 111
112 112 def link_path(self, path, url, revision, start_empty, lock_token,
113 113 pool=None):
114 114 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
115 115 path, url, revision, start_empty, lock_token,
116 116 pool)
117 117
118 118 def finish_report(self, pool=None):
119 119 svn.ra.reporter2_invoke_finish_report(self._reporter,
120 120 self._baton, pool)
121 121
122 122 def abort_report(self, pool=None):
123 123 svn.ra.reporter2_invoke_abort_report(self._reporter,
124 124 self._baton, pool)
125 125
126 126 def do_update(self, revnum, path, *args, **kwargs):
127 127 return self.Reporter(svn.ra.do_update(self.ra, revnum, path,
128 128 *args, **kwargs))
@@ -1,335 +1,347
1 1 """automatically manage newlines in repository files
2 2
3 3 This extension allows you to manage the type of line endings (CRLF or
4 4 LF) that are used in the repository and in the local working
5 5 directory. That way you can get CRLF line endings on Windows and LF on
6 6 Unix/Mac, thereby letting everybody use their OS native line endings.
7 7
8 8 The extension reads its configuration from a versioned ``.hgeol``
9 9 configuration file found in the root of the working copy. The
10 10 ``.hgeol`` file use the same syntax as all other Mercurial
11 11 configuration files. It uses two sections, ``[patterns]`` and
12 12 ``[repository]``.
13 13
14 14 The ``[patterns]`` section specifies how line endings should be
15 15 converted between the working copy and the repository. The format is
16 16 specified by a file pattern. The first match is used, so put more
17 17 specific patterns first. The available line endings are ``LF``,
18 18 ``CRLF``, and ``BIN``.
19 19
20 20 Files with the declared format of ``CRLF`` or ``LF`` are always
21 21 checked out and stored in the repository in that format and files
22 22 declared to be binary (``BIN``) are left unchanged. Additionally,
23 23 ``native`` is an alias for checking out in the platform's default line
24 24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
25 25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
26 26 default behaviour; it is only needed if you need to override a later,
27 27 more general pattern.
28 28
29 29 The optional ``[repository]`` section specifies the line endings to
30 30 use for files stored in the repository. It has a single setting,
31 31 ``native``, which determines the storage line endings for files
32 32 declared as ``native`` in the ``[patterns]`` section. It can be set to
33 33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
34 34 that on Windows, files configured as ``native`` (``CRLF`` by default)
35 35 will be converted to ``LF`` when stored in the repository. Files
36 36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
37 37 are always stored as-is in the repository.
38 38
39 39 Example versioned ``.hgeol`` file::
40 40
41 41 [patterns]
42 42 **.py = native
43 43 **.vcproj = CRLF
44 44 **.txt = native
45 45 Makefile = LF
46 46 **.jpg = BIN
47 47
48 48 [repository]
49 49 native = LF
50 50
51 51 .. note::
52 52 The rules will first apply when files are touched in the working
53 53 copy, e.g. by updating to null and back to tip to touch all files.
54 54
55 The extension uses an optional ``[eol]`` section in your hgrc file
56 (not the ``.hgeol`` file) for settings that control the overall
57 behavior. There are two settings:
55 The extension uses an optional ``[eol]`` section read from both the
56 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 latter overriding the former. You can use that section to control the
58 overall behavior. There are three settings:
58 59
59 60 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
60 61 ``CRLF`` to override the default interpretation of ``native`` for
61 62 checkout. This can be used with :hg:`archive` on Unix, say, to
62 63 generate an archive where files have line endings for Windows.
63 64
64 65 - ``eol.only-consistent`` (default True) can be set to False to make
65 66 the extension convert files with inconsistent EOLs. Inconsistent
66 67 means that there is both ``CRLF`` and ``LF`` present in the file.
67 68 Such files are normally not touched under the assumption that they
68 69 have mixed EOLs on purpose.
69 70
71 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 ensure that converted files end with a EOL character (either ``\\n``
73 or ``\\r\\n`` as per the configured patterns).
74
70 75 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
71 76 like the deprecated win32text extension does. This means that you can
72 77 disable win32text and enable eol and your filters will still work. You
73 78 only need to these filters until you have prepared a ``.hgeol`` file.
74 79
75 80 The ``win32text.forbid*`` hooks provided by the win32text extension
76 81 have been unified into a single hook named ``eol.checkheadshook``. The
77 82 hook will lookup the expected line endings from the ``.hgeol`` file,
78 83 which means you must migrate to a ``.hgeol`` file first before using
79 84 the hook. ``eol.checkheadshook`` only checks heads, intermediate
80 85 invalid revisions will be pushed. To forbid them completely, use the
81 86 ``eol.checkallhook`` hook. These hooks are best used as
82 87 ``pretxnchangegroup`` hooks.
83 88
84 89 See :hg:`help patterns` for more information about the glob patterns
85 90 used.
86 91 """
87 92
88 93 from mercurial.i18n import _
89 94 from mercurial import util, config, extensions, match, error
90 95 import re, os
91 96
92 97 # Matches a lone LF, i.e., one that is not part of CRLF.
93 98 singlelf = re.compile('(^|[^\r])\n')
94 99 # Matches a single EOL which can either be a CRLF where repeated CR
95 100 # are removed or a LF. We do not care about old Machintosh files, so a
96 101 # stray CR is an error.
97 102 eolre = re.compile('\r*\n')
98 103
99 104
100 105 def inconsistenteol(data):
101 106 return '\r\n' in data and singlelf.search(data)
102 107
103 108 def tolf(s, params, ui, **kwargs):
104 109 """Filter to convert to LF EOLs."""
105 110 if util.binary(s):
106 111 return s
107 112 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
108 113 return s
114 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
115 s = s + '\n'
109 116 return eolre.sub('\n', s)
110 117
111 118 def tocrlf(s, params, ui, **kwargs):
112 119 """Filter to convert to CRLF EOLs."""
113 120 if util.binary(s):
114 121 return s
115 122 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
116 123 return s
124 if ui.configbool('eol', 'fix-trailing-newline', False) and s and s[-1] != '\n':
125 s = s + '\n'
117 126 return eolre.sub('\r\n', s)
118 127
119 128 def isbinary(s, params):
120 129 """Filter to do nothing with the file."""
121 130 return s
122 131
123 132 filters = {
124 133 'to-lf': tolf,
125 134 'to-crlf': tocrlf,
126 135 'is-binary': isbinary,
127 136 # The following provide backwards compatibility with win32text
128 137 'cleverencode:': tolf,
129 138 'cleverdecode:': tocrlf
130 139 }
131 140
132 141 class eolfile(object):
133 142 def __init__(self, ui, root, data):
134 143 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
135 144 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
136 145
137 146 self.cfg = config.config()
138 147 # Our files should not be touched. The pattern must be
139 148 # inserted first override a '** = native' pattern.
140 149 self.cfg.set('patterns', '.hg*', 'BIN')
141 150 # We can then parse the user's patterns.
142 151 self.cfg.parse('.hgeol', data)
143 152
144 153 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
145 154 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
146 155 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
147 156 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
148 157
149 158 include = []
150 159 exclude = []
151 160 for pattern, style in self.cfg.items('patterns'):
152 161 key = style.upper()
153 162 if key == 'BIN':
154 163 exclude.append(pattern)
155 164 else:
156 165 include.append(pattern)
157 166 # This will match the files for which we need to care
158 167 # about inconsistent newlines.
159 168 self.match = match.match(root, '', [], include, exclude)
160 169
161 def setfilters(self, ui):
170 def copytoui(self, ui):
162 171 for pattern, style in self.cfg.items('patterns'):
163 172 key = style.upper()
164 173 try:
165 174 ui.setconfig('decode', pattern, self._decode[key])
166 175 ui.setconfig('encode', pattern, self._encode[key])
167 176 except KeyError:
168 177 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
169 178 % (style, self.cfg.source('patterns', pattern)))
179 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
180 for k, v in self.cfg.items('eol'):
181 ui.setconfig('eol', k, v)
170 182
171 183 def checkrev(self, repo, ctx, files):
172 184 failed = []
173 185 for f in (files or ctx.files()):
174 186 if f not in ctx:
175 187 continue
176 188 for pattern, style in self.cfg.items('patterns'):
177 189 if not match.match(repo.root, '', [pattern])(f):
178 190 continue
179 191 target = self._encode[style.upper()]
180 192 data = ctx[f].data()
181 193 if (target == "to-lf" and "\r\n" in data
182 194 or target == "to-crlf" and singlelf.search(data)):
183 195 failed.append((str(ctx), target, f))
184 196 break
185 197 return failed
186 198
187 199 def parseeol(ui, repo, nodes):
188 200 try:
189 201 for node in nodes:
190 202 try:
191 203 if node is None:
192 204 # Cannot use workingctx.data() since it would load
193 205 # and cache the filters before we configure them.
194 206 data = repo.wfile('.hgeol').read()
195 207 else:
196 208 data = repo[node]['.hgeol'].data()
197 209 return eolfile(ui, repo.root, data)
198 210 except (IOError, LookupError):
199 211 pass
200 212 except error.ParseError, inst:
201 213 ui.warn(_("warning: ignoring .hgeol file due to parse error "
202 214 "at %s: %s\n") % (inst.args[1], inst.args[0]))
203 215 return None
204 216
205 217 def _checkhook(ui, repo, node, headsonly):
206 218 # Get revisions to check and touched files at the same time
207 219 files = set()
208 220 revs = set()
209 221 for rev in xrange(repo[node].rev(), len(repo)):
210 222 revs.add(rev)
211 223 if headsonly:
212 224 ctx = repo[rev]
213 225 files.update(ctx.files())
214 226 for pctx in ctx.parents():
215 227 revs.discard(pctx.rev())
216 228 failed = []
217 229 for rev in revs:
218 230 ctx = repo[rev]
219 231 eol = parseeol(ui, repo, [ctx.node()])
220 232 if eol:
221 233 failed.extend(eol.checkrev(repo, ctx, files))
222 234
223 235 if failed:
224 236 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
225 237 msgs = []
226 238 for node, target, f in failed:
227 239 msgs.append(_(" %s in %s should not have %s line endings") %
228 240 (f, node, eols[target]))
229 241 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
230 242
231 243 def checkallhook(ui, repo, node, hooktype, **kwargs):
232 244 """verify that files have expected EOLs"""
233 245 _checkhook(ui, repo, node, False)
234 246
235 247 def checkheadshook(ui, repo, node, hooktype, **kwargs):
236 248 """verify that files have expected EOLs"""
237 249 _checkhook(ui, repo, node, True)
238 250
239 251 # "checkheadshook" used to be called "hook"
240 252 hook = checkheadshook
241 253
242 254 def preupdate(ui, repo, hooktype, parent1, parent2):
243 255 #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2)
244 256 repo.loadeol([parent1])
245 257 return False
246 258
247 259 def uisetup(ui):
248 260 ui.setconfig('hooks', 'preupdate.eol', preupdate)
249 261
250 262 def extsetup(ui):
251 263 try:
252 264 extensions.find('win32text')
253 265 ui.warn(_("the eol extension is incompatible with the "
254 266 "win32text extension\n"))
255 267 except KeyError:
256 268 pass
257 269
258 270
259 271 def reposetup(ui, repo):
260 272 uisetup(repo.ui)
261 273 #print "reposetup for", repo.root
262 274
263 275 if not repo.local():
264 276 return
265 277 for name, fn in filters.iteritems():
266 278 repo.adddatafilter(name, fn)
267 279
268 280 ui.setconfig('patch', 'eol', 'auto')
269 281
270 282 class eolrepo(repo.__class__):
271 283
272 284 def loadeol(self, nodes):
273 285 eol = parseeol(self.ui, self, nodes)
274 286 if eol is None:
275 287 return None
276 eol.setfilters(self.ui)
288 eol.copytoui(self.ui)
277 289 return eol.match
278 290
279 291 def _hgcleardirstate(self):
280 292 self._eolfile = self.loadeol([None, 'tip'])
281 293 if not self._eolfile:
282 294 self._eolfile = util.never
283 295 return
284 296
285 297 try:
286 298 cachemtime = os.path.getmtime(self.join("eol.cache"))
287 299 except OSError:
288 300 cachemtime = 0
289 301
290 302 try:
291 303 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
292 304 except OSError:
293 305 eolmtime = 0
294 306
295 307 if eolmtime > cachemtime:
296 308 ui.debug("eol: detected change in .hgeol\n")
297 309 wlock = None
298 310 try:
299 311 wlock = self.wlock()
300 312 for f in self.dirstate:
301 313 if self.dirstate[f] == 'n':
302 314 # all normal files need to be looked at
303 315 # again since the new .hgeol file might no
304 316 # longer match a file it matched before
305 317 self.dirstate.normallookup(f)
306 318 # Touch the cache to update mtime.
307 319 self.opener("eol.cache", "w").close()
308 320 wlock.release()
309 321 except error.LockUnavailable:
310 322 # If we cannot lock the repository and clear the
311 323 # dirstate, then a commit might not see all files
312 324 # as modified. But if we cannot lock the
313 325 # repository, then we can also not make a commit,
314 326 # so ignore the error.
315 327 pass
316 328
317 329 def commitctx(self, ctx, error=False):
318 330 for f in sorted(ctx.added() + ctx.modified()):
319 331 if not self._eolfile(f):
320 332 continue
321 333 try:
322 334 data = ctx[f].data()
323 335 except IOError:
324 336 continue
325 337 if util.binary(data):
326 338 # We should not abort here, since the user should
327 339 # be able to say "** = native" to automatically
328 340 # have all non-binary files taken care of.
329 341 continue
330 342 if inconsistenteol(data):
331 343 raise util.Abort(_("inconsistent newline style "
332 344 "in %s\n" % f))
333 345 return super(eolrepo, self).commitctx(ctx, error)
334 346 repo.__class__ = eolrepo
335 347 repo._hgcleardirstate()
@@ -1,89 +1,90
1 1 # __init__.py - inotify-based status acceleration for Linux
2 2 #
3 3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''accelerate status report using Linux's inotify service'''
10 10
11 11 # todo: socket permissions
12 12
13 13 from mercurial.i18n import _
14 from mercurial import util
14 15 import server
15 16 from client import client, QueryFailed
16 17
17 18 def serve(ui, repo, **opts):
18 19 '''start an inotify server for this repository'''
19 20 server.start(ui, repo.dirstate, repo.root, opts)
20 21
21 22 def debuginotify(ui, repo, **opts):
22 23 '''debugging information for inotify extension
23 24
24 25 Prints the list of directories being watched by the inotify server.
25 26 '''
26 27 cli = client(ui, repo)
27 28 response = cli.debugquery()
28 29
29 30 ui.write(_('directories being watched:\n'))
30 31 for path in response:
31 32 ui.write((' %s/\n') % path)
32 33
33 34 def reposetup(ui, repo):
34 if not hasattr(repo, 'dirstate'):
35 if not util.safehasattr(repo, 'dirstate'):
35 36 return
36 37
37 38 class inotifydirstate(repo.dirstate.__class__):
38 39
39 40 # We'll set this to false after an unsuccessful attempt so that
40 41 # next calls of status() within the same instance don't try again
41 42 # to start an inotify server if it won't start.
42 43 _inotifyon = True
43 44
44 45 def status(self, match, subrepos, ignored, clean, unknown):
45 46 files = match.files()
46 47 if '.' in files:
47 48 files = []
48 49 if self._inotifyon and not ignored and not subrepos and not self._dirty:
49 50 cli = client(ui, repo)
50 51 try:
51 52 result = cli.statusquery(files, match, False,
52 53 clean, unknown)
53 54 except QueryFailed, instr:
54 55 ui.debug(str(instr))
55 56 # don't retry within the same hg instance
56 57 inotifydirstate._inotifyon = False
57 58 pass
58 59 else:
59 60 if ui.config('inotify', 'debug'):
60 61 r2 = super(inotifydirstate, self).status(
61 62 match, [], False, clean, unknown)
62 63 for c, a, b in zip('LMARDUIC', result, r2):
63 64 for f in a:
64 65 if f not in b:
65 66 ui.warn('*** inotify: %s +%s\n' % (c, f))
66 67 for f in b:
67 68 if f not in a:
68 69 ui.warn('*** inotify: %s -%s\n' % (c, f))
69 70 result = r2
70 71 return result
71 72 return super(inotifydirstate, self).status(
72 73 match, subrepos, ignored, clean, unknown)
73 74
74 75 repo.dirstate.__class__ = inotifydirstate
75 76
76 77 cmdtable = {
77 78 'debuginotify':
78 79 (debuginotify, [], ('hg debuginotify')),
79 80 '^inserve':
80 81 (serve,
81 82 [('d', 'daemon', None, _('run server in background')),
82 83 ('', 'daemon-pipefds', '',
83 84 _('used internally by daemon mode'), _('NUM')),
84 85 ('t', 'idle-timeout', '',
85 86 _('minutes to sit idle before exiting'), _('NUM')),
86 87 ('', 'pid-file', '',
87 88 _('name of file to write process ID to'), _('FILE'))],
88 89 _('hg inserve [OPTION]...')),
89 90 }
@@ -1,693 +1,701
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an
15 15 # audience not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Files to act upon/ignore are specified in the [keyword] section.
25 25 # Customized keyword template mappings in the [keywordmaps] section.
26 26 #
27 27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28 28
29 29 '''expand keywords in tracked files
30 30
31 31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 32 tracked text files selected by your configuration.
33 33
34 34 Keywords are only expanded in local repositories and not stored in the
35 35 change history. The mechanism can be regarded as a convenience for the
36 36 current user or for archive distribution.
37 37
38 38 Keywords expand to the changeset data pertaining to the latest change
39 39 relative to the working directory parent of each file.
40 40
41 41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 42 sections of hgrc files.
43 43
44 44 Example::
45 45
46 46 [keyword]
47 47 # expand keywords in every python file except those matching "x*"
48 48 **.py =
49 49 x* = ignore
50 50
51 51 [keywordset]
52 52 # prefer svn- over cvs-like default keywordmaps
53 53 svn = True
54 54
55 55 .. note::
56 56 The more specific you are in your filename patterns the less you
57 57 lose speed in huge repositories.
58 58
59 59 For [keywordmaps] template mapping and expansion demonstration and
60 60 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 61 available templates and filters.
62 62
63 63 Three additional date template filters are provided:
64 64
65 65 :``utcdate``: "2006/09/18 15:13:13"
66 66 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 67 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 68
69 69 The default template mappings (view with :hg:`kwdemo -d`) can be
70 70 replaced with customized keywords and templates. Again, run
71 71 :hg:`kwdemo` to control the results of your configuration changes.
72 72
73 73 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 74 to avoid storing expanded keywords in the change history.
75 75
76 76 To force expansion after enabling it, or a configuration change, run
77 77 :hg:`kwexpand`.
78 78
79 79 Expansions spanning more than one line and incremental expansions,
80 80 like CVS' $Log$, are not supported. A keyword template map "Log =
81 81 {desc}" expands to the first line of the changeset description.
82 82 '''
83 83
84 84 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
85 85 from mercurial import localrepo, match, patch, templatefilters, templater, util
86 86 from mercurial import scmutil
87 87 from mercurial.hgweb import webcommands
88 88 from mercurial.i18n import _
89 89 import os, re, shutil, tempfile
90 90
91 91 commands.optionalrepo += ' kwdemo'
92 92
93 93 cmdtable = {}
94 94 command = cmdutil.command(cmdtable)
95 95
96 96 # hg commands that do not act on keywords
97 97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
98 98 ' outgoing push tip verify convert email glog')
99 99
100 100 # hg commands that trigger expansion only when writing to working dir,
101 101 # not when reading filelog, and unexpand when reading from working dir
102 102 restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
103 103
104 104 # names of extensions using dorecord
105 105 recordextensions = 'record'
106 106
107 107 colortable = {
108 108 'kwfiles.enabled': 'green bold',
109 109 'kwfiles.deleted': 'cyan bold underline',
110 110 'kwfiles.enabledunknown': 'green',
111 111 'kwfiles.ignored': 'bold',
112 112 'kwfiles.ignoredunknown': 'none'
113 113 }
114 114
115 115 # date like in cvs' $Date
116 116 def utcdate(text):
117 117 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
118 118 '''
119 119 return util.datestr((text[0], 0), '%Y/%m/%d %H:%M:%S')
120 120 # date like in svn's $Date
121 121 def svnisodate(text):
122 122 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
123 123 +0200 (Tue, 18 Aug 2009)".
124 124 '''
125 125 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
126 126 # date like in svn's $Id
127 127 def svnutcdate(text):
128 128 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
129 129 11:00:13Z".
130 130 '''
131 131 return util.datestr((text[0], 0), '%Y-%m-%d %H:%M:%SZ')
132 132
133 133 templatefilters.filters.update({'utcdate': utcdate,
134 134 'svnisodate': svnisodate,
135 135 'svnutcdate': svnutcdate})
136 136
137 137 # make keyword tools accessible
138 138 kwtools = {'templater': None, 'hgcmd': ''}
139 139
140 140 def _defaultkwmaps(ui):
141 141 '''Returns default keywordmaps according to keywordset configuration.'''
142 142 templates = {
143 143 'Revision': '{node|short}',
144 144 'Author': '{author|user}',
145 145 }
146 146 kwsets = ({
147 147 'Date': '{date|utcdate}',
148 148 'RCSfile': '{file|basename},v',
149 149 'RCSFile': '{file|basename},v', # kept for backwards compatibility
150 150 # with hg-keyword
151 151 'Source': '{root}/{file},v',
152 152 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
153 153 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
154 154 }, {
155 155 'Date': '{date|svnisodate}',
156 156 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
157 157 'LastChangedRevision': '{node|short}',
158 158 'LastChangedBy': '{author|user}',
159 159 'LastChangedDate': '{date|svnisodate}',
160 160 })
161 161 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
162 162 return templates
163 163
164 164 def _shrinktext(text, subfunc):
165 165 '''Helper for keyword expansion removal in text.
166 166 Depending on subfunc also returns number of substitutions.'''
167 167 return subfunc(r'$\1$', text)
168 168
169 169 def _preselect(wstatus, changed):
170 170 '''Retrieves modfied and added files from a working directory state
171 171 and returns the subset of each contained in given changed files
172 172 retrieved from a change context.'''
173 173 modified, added = wstatus[:2]
174 174 modified = [f for f in modified if f in changed]
175 175 added = [f for f in added if f in changed]
176 176 return modified, added
177 177
178 178
179 179 class kwtemplater(object):
180 180 '''
181 181 Sets up keyword templates, corresponding keyword regex, and
182 182 provides keyword substitution functions.
183 183 '''
184 184
185 185 def __init__(self, ui, repo, inc, exc):
186 186 self.ui = ui
187 187 self.repo = repo
188 188 self.match = match.match(repo.root, '', [], inc, exc)
189 189 self.restrict = kwtools['hgcmd'] in restricted.split()
190 190 self.record = False
191 191
192 192 kwmaps = self.ui.configitems('keywordmaps')
193 193 if kwmaps: # override default templates
194 194 self.templates = dict((k, templater.parsestring(v, False))
195 195 for k, v in kwmaps)
196 196 else:
197 197 self.templates = _defaultkwmaps(self.ui)
198 198
199 199 @util.propertycache
200 200 def escape(self):
201 201 '''Returns bar-separated and escaped keywords.'''
202 202 return '|'.join(map(re.escape, self.templates.keys()))
203 203
204 204 @util.propertycache
205 205 def rekw(self):
206 206 '''Returns regex for unexpanded keywords.'''
207 207 return re.compile(r'\$(%s)\$' % self.escape)
208 208
209 209 @util.propertycache
210 210 def rekwexp(self):
211 211 '''Returns regex for expanded keywords.'''
212 212 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
213 213
214 214 def substitute(self, data, path, ctx, subfunc):
215 215 '''Replaces keywords in data with expanded template.'''
216 216 def kwsub(mobj):
217 217 kw = mobj.group(1)
218 218 ct = cmdutil.changeset_templater(self.ui, self.repo,
219 219 False, None, '', False)
220 220 ct.use_template(self.templates[kw])
221 221 self.ui.pushbuffer()
222 222 ct.show(ctx, root=self.repo.root, file=path)
223 223 ekw = templatefilters.firstline(self.ui.popbuffer())
224 224 return '$%s: %s $' % (kw, ekw)
225 225 return subfunc(kwsub, data)
226 226
227 227 def linkctx(self, path, fileid):
228 228 '''Similar to filelog.linkrev, but returns a changectx.'''
229 229 return self.repo.filectx(path, fileid=fileid).changectx()
230 230
231 231 def expand(self, path, node, data):
232 232 '''Returns data with keywords expanded.'''
233 233 if not self.restrict and self.match(path) and not util.binary(data):
234 234 ctx = self.linkctx(path, node)
235 235 return self.substitute(data, path, ctx, self.rekw.sub)
236 236 return data
237 237
238 238 def iskwfile(self, cand, ctx):
239 239 '''Returns subset of candidates which are configured for keyword
240 240 expansion are not symbolic links.'''
241 241 return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
242 242
243 243 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
244 244 '''Overwrites selected files expanding/shrinking keywords.'''
245 245 if self.restrict or lookup or self.record: # exclude kw_copy
246 246 candidates = self.iskwfile(candidates, ctx)
247 247 if not candidates:
248 248 return
249 249 kwcmd = self.restrict and lookup # kwexpand/kwshrink
250 250 if self.restrict or expand and lookup:
251 251 mf = ctx.manifest()
252 lctx = ctx
253 re_kw = (self.restrict or rekw) and self.rekw or self.rekwexp
254 msg = (expand and _('overwriting %s expanding keywords\n')
255 or _('overwriting %s shrinking keywords\n'))
252 if self.restrict or rekw:
253 re_kw = self.rekw
254 else:
255 re_kw = self.rekwexp
256 if expand:
257 msg = _('overwriting %s expanding keywords\n')
258 else:
259 msg = _('overwriting %s shrinking keywords\n')
256 260 for f in candidates:
257 261 if self.restrict:
258 262 data = self.repo.file(f).read(mf[f])
259 263 else:
260 264 data = self.repo.wread(f)
261 265 if util.binary(data):
262 266 continue
263 267 if expand:
264 268 if lookup:
265 lctx = self.linkctx(f, mf[f])
266 data, found = self.substitute(data, f, lctx, re_kw.subn)
269 ctx = self.linkctx(f, mf[f])
270 data, found = self.substitute(data, f, ctx, re_kw.subn)
267 271 elif self.restrict:
268 272 found = re_kw.search(data)
269 273 else:
270 274 data, found = _shrinktext(data, re_kw.subn)
271 275 if found:
272 276 self.ui.note(msg % f)
273 fpath = self.repo.wjoin(f)
274 mode = os.lstat(fpath).st_mode
275 self.repo.wwrite(f, data, ctx.flags(f))
276 os.chmod(fpath, mode)
277 fp = self.repo.wopener(f, "wb", atomictemp=True)
278 fp.write(data)
279 fp.close()
277 280 if kwcmd:
278 281 self.repo.dirstate.normal(f)
279 282 elif self.record:
280 283 self.repo.dirstate.normallookup(f)
281 284
282 285 def shrink(self, fname, text):
283 286 '''Returns text with all keyword substitutions removed.'''
284 287 if self.match(fname) and not util.binary(text):
285 288 return _shrinktext(text, self.rekwexp.sub)
286 289 return text
287 290
288 291 def shrinklines(self, fname, lines):
289 292 '''Returns lines with keyword substitutions removed.'''
290 293 if self.match(fname):
291 294 text = ''.join(lines)
292 295 if not util.binary(text):
293 296 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
294 297 return lines
295 298
296 299 def wread(self, fname, data):
297 300 '''If in restricted mode returns data read from wdir with
298 301 keyword substitutions removed.'''
299 return self.restrict and self.shrink(fname, data) or data
302 if self.restrict:
303 return self.shrink(fname, data)
304 return data
300 305
301 306 class kwfilelog(filelog.filelog):
302 307 '''
303 308 Subclass of filelog to hook into its read, add, cmp methods.
304 309 Keywords are "stored" unexpanded, and processed on reading.
305 310 '''
306 311 def __init__(self, opener, kwt, path):
307 312 super(kwfilelog, self).__init__(opener, path)
308 313 self.kwt = kwt
309 314 self.path = path
310 315
311 316 def read(self, node):
312 317 '''Expands keywords when reading filelog.'''
313 318 data = super(kwfilelog, self).read(node)
314 319 if self.renamed(node):
315 320 return data
316 321 return self.kwt.expand(self.path, node, data)
317 322
318 323 def add(self, text, meta, tr, link, p1=None, p2=None):
319 324 '''Removes keyword substitutions when adding to filelog.'''
320 325 text = self.kwt.shrink(self.path, text)
321 326 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
322 327
323 328 def cmp(self, node, text):
324 329 '''Removes keyword substitutions for comparison.'''
325 330 text = self.kwt.shrink(self.path, text)
326 331 return super(kwfilelog, self).cmp(node, text)
327 332
328 def _status(ui, repo, kwt, *pats, **opts):
333 def _status(ui, repo, wctx, kwt, *pats, **opts):
329 334 '''Bails out if [keyword] configuration is not active.
330 335 Returns status of working directory.'''
331 336 if kwt:
332 return repo.status(match=scmutil.match(repo[None], pats, opts), clean=True,
337 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
333 338 unknown=opts.get('unknown') or opts.get('all'))
334 339 if ui.configitems('keyword'):
335 340 raise util.Abort(_('[keyword] patterns cannot match'))
336 341 raise util.Abort(_('no [keyword] patterns configured'))
337 342
338 343 def _kwfwrite(ui, repo, expand, *pats, **opts):
339 344 '''Selects files and passes them to kwtemplater.overwrite.'''
340 345 wctx = repo[None]
341 346 if len(wctx.parents()) > 1:
342 347 raise util.Abort(_('outstanding uncommitted merge'))
343 348 kwt = kwtools['templater']
344 349 wlock = repo.wlock()
345 350 try:
346 status = _status(ui, repo, kwt, *pats, **opts)
351 status = _status(ui, repo, wctx, kwt, *pats, **opts)
347 352 modified, added, removed, deleted, unknown, ignored, clean = status
348 353 if modified or added or removed or deleted:
349 354 raise util.Abort(_('outstanding uncommitted changes'))
350 355 kwt.overwrite(wctx, clean, True, expand)
351 356 finally:
352 357 wlock.release()
353 358
354 359 @command('kwdemo',
355 360 [('d', 'default', None, _('show default keyword template maps')),
356 361 ('f', 'rcfile', '',
357 362 _('read maps from rcfile'), _('FILE'))],
358 363 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'))
359 364 def demo(ui, repo, *args, **opts):
360 365 '''print [keywordmaps] configuration and an expansion example
361 366
362 367 Show current, custom, or default keyword template maps and their
363 368 expansions.
364 369
365 370 Extend the current configuration by specifying maps as arguments
366 371 and using -f/--rcfile to source an external hgrc file.
367 372
368 373 Use -d/--default to disable current configuration.
369 374
370 375 See :hg:`help templates` for information on templates and filters.
371 376 '''
372 377 def demoitems(section, items):
373 378 ui.write('[%s]\n' % section)
374 379 for k, v in sorted(items):
375 380 ui.write('%s = %s\n' % (k, v))
376 381
377 382 fn = 'demo.txt'
378 383 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
379 384 ui.note(_('creating temporary repository at %s\n') % tmpdir)
380 385 repo = localrepo.localrepository(ui, tmpdir, True)
381 386 ui.setconfig('keyword', fn, '')
382 387 svn = ui.configbool('keywordset', 'svn')
383 388 # explicitly set keywordset for demo output
384 389 ui.setconfig('keywordset', 'svn', svn)
385 390
386 391 uikwmaps = ui.configitems('keywordmaps')
387 392 if args or opts.get('rcfile'):
388 393 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
389 394 if uikwmaps:
390 395 ui.status(_('\textending current template maps\n'))
391 396 if opts.get('default') or not uikwmaps:
392 397 if svn:
393 398 ui.status(_('\toverriding default svn keywordset\n'))
394 399 else:
395 400 ui.status(_('\toverriding default cvs keywordset\n'))
396 401 if opts.get('rcfile'):
397 402 ui.readconfig(opts.get('rcfile'))
398 403 if args:
399 404 # simulate hgrc parsing
400 405 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
401 406 fp = repo.opener('hgrc', 'w')
402 407 fp.writelines(rcmaps)
403 408 fp.close()
404 409 ui.readconfig(repo.join('hgrc'))
405 410 kwmaps = dict(ui.configitems('keywordmaps'))
406 411 elif opts.get('default'):
407 412 if svn:
408 413 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
409 414 else:
410 415 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
411 416 kwmaps = _defaultkwmaps(ui)
412 417 if uikwmaps:
413 418 ui.status(_('\tdisabling current template maps\n'))
414 419 for k, v in kwmaps.iteritems():
415 420 ui.setconfig('keywordmaps', k, v)
416 421 else:
417 422 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
418 kwmaps = dict(uikwmaps) or _defaultkwmaps(ui)
423 if uikwmaps:
424 kwmaps = dict(uikwmaps)
425 else:
426 kwmaps = _defaultkwmaps(ui)
419 427
420 428 uisetup(ui)
421 429 reposetup(ui, repo)
422 430 ui.write('[extensions]\nkeyword =\n')
423 431 demoitems('keyword', ui.configitems('keyword'))
424 432 demoitems('keywordset', ui.configitems('keywordset'))
425 433 demoitems('keywordmaps', kwmaps.iteritems())
426 434 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
427 435 repo.wopener.write(fn, keywords)
428 436 repo[None].add([fn])
429 437 ui.note(_('\nkeywords written to %s:\n') % fn)
430 438 ui.note(keywords)
431 439 repo.dirstate.setbranch('demobranch')
432 440 for name, cmd in ui.configitems('hooks'):
433 441 if name.split('.', 1)[0].find('commit') > -1:
434 442 repo.ui.setconfig('hooks', name, '')
435 443 msg = _('hg keyword configuration and expansion example')
436 444 ui.note("hg ci -m '%s'\n" % msg)
437 445 repo.commit(text=msg)
438 446 ui.status(_('\n\tkeywords expanded\n'))
439 447 ui.write(repo.wread(fn))
440 448 shutil.rmtree(tmpdir, ignore_errors=True)
441 449
442 450 @command('kwexpand', commands.walkopts, _('hg kwexpand [OPTION]... [FILE]...'))
443 451 def expand(ui, repo, *pats, **opts):
444 452 '''expand keywords in the working directory
445 453
446 454 Run after (re)enabling keyword expansion.
447 455
448 456 kwexpand refuses to run if given files contain local changes.
449 457 '''
450 458 # 3rd argument sets expansion to True
451 459 _kwfwrite(ui, repo, True, *pats, **opts)
452 460
453 461 @command('kwfiles',
454 462 [('A', 'all', None, _('show keyword status flags of all files')),
455 463 ('i', 'ignore', None, _('show files excluded from expansion')),
456 464 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
457 465 ] + commands.walkopts,
458 466 _('hg kwfiles [OPTION]... [FILE]...'))
459 467 def files(ui, repo, *pats, **opts):
460 468 '''show files configured for keyword expansion
461 469
462 470 List which files in the working directory are matched by the
463 471 [keyword] configuration patterns.
464 472
465 473 Useful to prevent inadvertent keyword expansion and to speed up
466 474 execution by including only files that are actual candidates for
467 475 expansion.
468 476
469 477 See :hg:`help keyword` on how to construct patterns both for
470 478 inclusion and exclusion of files.
471 479
472 480 With -A/--all and -v/--verbose the codes used to show the status
473 481 of files are::
474 482
475 483 K = keyword expansion candidate
476 484 k = keyword expansion candidate (not tracked)
477 485 I = ignored
478 486 i = ignored (not tracked)
479 487 '''
480 488 kwt = kwtools['templater']
481 status = _status(ui, repo, kwt, *pats, **opts)
489 wctx = repo[None]
490 status = _status(ui, repo, wctx, kwt, *pats, **opts)
482 491 cwd = pats and repo.getcwd() or ''
483 492 modified, added, removed, deleted, unknown, ignored, clean = status
484 493 files = []
485 494 if not opts.get('unknown') or opts.get('all'):
486 495 files = sorted(modified + added + clean)
487 wctx = repo[None]
488 496 kwfiles = kwt.iskwfile(files, wctx)
489 497 kwdeleted = kwt.iskwfile(deleted, wctx)
490 498 kwunknown = kwt.iskwfile(unknown, wctx)
491 499 if not opts.get('ignore') or opts.get('all'):
492 500 showfiles = kwfiles, kwdeleted, kwunknown
493 501 else:
494 502 showfiles = [], [], []
495 503 if opts.get('all') or opts.get('ignore'):
496 504 showfiles += ([f for f in files if f not in kwfiles],
497 505 [f for f in unknown if f not in kwunknown])
498 506 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
499 507 kwstates = zip('K!kIi', showfiles, kwlabels)
500 508 for char, filenames, kwstate in kwstates:
501 509 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
502 510 for f in filenames:
503 511 ui.write(fmt % repo.pathto(f, cwd), label='kwfiles.' + kwstate)
504 512
505 513 @command('kwshrink', commands.walkopts, _('hg kwshrink [OPTION]... [FILE]...'))
506 514 def shrink(ui, repo, *pats, **opts):
507 515 '''revert expanded keywords in the working directory
508 516
509 517 Must be run before changing/disabling active keywords.
510 518
511 519 kwshrink refuses to run if given files contain local changes.
512 520 '''
513 521 # 3rd argument sets expansion to False
514 522 _kwfwrite(ui, repo, False, *pats, **opts)
515 523
516 524
517 525 def uisetup(ui):
518 526 ''' Monkeypatches dispatch._parse to retrieve user command.'''
519 527
520 528 def kwdispatch_parse(orig, ui, args):
521 529 '''Monkeypatch dispatch._parse to obtain running hg command.'''
522 530 cmd, func, args, options, cmdoptions = orig(ui, args)
523 531 kwtools['hgcmd'] = cmd
524 532 return cmd, func, args, options, cmdoptions
525 533
526 534 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
527 535
528 536 def reposetup(ui, repo):
529 537 '''Sets up repo as kwrepo for keyword substitution.
530 538 Overrides file method to return kwfilelog instead of filelog
531 539 if file matches user configuration.
532 540 Wraps commit to overwrite configured files with updated
533 541 keyword substitutions.
534 542 Monkeypatches patch and webcommands.'''
535 543
536 544 try:
537 545 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
538 546 or '.hg' in util.splitpath(repo.root)
539 547 or repo._url.startswith('bundle:')):
540 548 return
541 549 except AttributeError:
542 550 pass
543 551
544 552 inc, exc = [], ['.hg*']
545 553 for pat, opt in ui.configitems('keyword'):
546 554 if opt != 'ignore':
547 555 inc.append(pat)
548 556 else:
549 557 exc.append(pat)
550 558 if not inc:
551 559 return
552 560
553 561 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
554 562
555 563 class kwrepo(repo.__class__):
556 564 def file(self, f):
557 565 if f[0] == '/':
558 566 f = f[1:]
559 567 return kwfilelog(self.sopener, kwt, f)
560 568
561 569 def wread(self, filename):
562 570 data = super(kwrepo, self).wread(filename)
563 571 return kwt.wread(filename, data)
564 572
565 573 def commit(self, *args, **opts):
566 574 # use custom commitctx for user commands
567 575 # other extensions can still wrap repo.commitctx directly
568 576 self.commitctx = self.kwcommitctx
569 577 try:
570 578 return super(kwrepo, self).commit(*args, **opts)
571 579 finally:
572 580 del self.commitctx
573 581
574 582 def kwcommitctx(self, ctx, error=False):
575 583 n = super(kwrepo, self).commitctx(ctx, error)
576 584 # no lock needed, only called from repo.commit() which already locks
577 585 if not kwt.record:
578 586 restrict = kwt.restrict
579 587 kwt.restrict = True
580 588 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
581 589 False, True)
582 590 kwt.restrict = restrict
583 591 return n
584 592
585 def rollback(self, dryrun=False):
593 def rollback(self, dryrun=False, force=False):
586 594 wlock = self.wlock()
587 595 try:
588 596 if not dryrun:
589 597 changed = self['.'].files()
590 ret = super(kwrepo, self).rollback(dryrun)
598 ret = super(kwrepo, self).rollback(dryrun, force)
591 599 if not dryrun:
592 600 ctx = self['.']
593 601 modified, added = _preselect(self[None].status(), changed)
594 602 kwt.overwrite(ctx, modified, True, True)
595 603 kwt.overwrite(ctx, added, True, False)
596 604 return ret
597 605 finally:
598 606 wlock.release()
599 607
600 608 # monkeypatches
601 609 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
602 610 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
603 611 rejects or conflicts due to expanded keywords in working dir.'''
604 612 orig(self, ui, gp, backend, store, eolmode)
605 613 # shrink keywords read from working dir
606 614 self.lines = kwt.shrinklines(self.fname, self.lines)
607 615
608 616 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
609 617 opts=None, prefix=''):
610 618 '''Monkeypatch patch.diff to avoid expansion.'''
611 619 kwt.restrict = True
612 620 return orig(repo, node1, node2, match, changes, opts, prefix)
613 621
614 622 def kwweb_skip(orig, web, req, tmpl):
615 623 '''Wraps webcommands.x turning off keyword expansion.'''
616 624 kwt.match = util.never
617 625 return orig(web, req, tmpl)
618 626
619 627 def kw_copy(orig, ui, repo, pats, opts, rename=False):
620 628 '''Wraps cmdutil.copy so that copy/rename destinations do not
621 629 contain expanded keywords.
622 630 Note that the source of a regular file destination may also be a
623 631 symlink:
624 632 hg cp sym x -> x is symlink
625 633 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
626 634 For the latter we have to follow the symlink to find out whether its
627 635 target is configured for expansion and we therefore must unexpand the
628 636 keywords in the destination.'''
629 637 orig(ui, repo, pats, opts, rename)
630 638 if opts.get('dry_run'):
631 639 return
632 640 wctx = repo[None]
633 641 cwd = repo.getcwd()
634 642
635 643 def haskwsource(dest):
636 644 '''Returns true if dest is a regular file and configured for
637 645 expansion or a symlink which points to a file configured for
638 646 expansion. '''
639 647 source = repo.dirstate.copied(dest)
640 648 if 'l' in wctx.flags(source):
641 649 source = scmutil.canonpath(repo.root, cwd,
642 650 os.path.realpath(source))
643 651 return kwt.match(source)
644 652
645 653 candidates = [f for f in repo.dirstate.copies() if
646 654 not 'l' in wctx.flags(f) and haskwsource(f)]
647 655 kwt.overwrite(wctx, candidates, False, False)
648 656
649 657 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
650 658 '''Wraps record.dorecord expanding keywords after recording.'''
651 659 wlock = repo.wlock()
652 660 try:
653 661 # record returns 0 even when nothing has changed
654 662 # therefore compare nodes before and after
655 663 kwt.record = True
656 664 ctx = repo['.']
657 665 wstatus = repo[None].status()
658 666 ret = orig(ui, repo, commitfunc, *pats, **opts)
659 667 recctx = repo['.']
660 668 if ctx != recctx:
661 669 modified, added = _preselect(wstatus, recctx.files())
662 670 kwt.restrict = False
663 671 kwt.overwrite(recctx, modified, False, True)
664 672 kwt.overwrite(recctx, added, False, True, True)
665 673 kwt.restrict = True
666 674 return ret
667 675 finally:
668 676 wlock.release()
669 677
670 678 def kwfilectx_cmp(orig, self, fctx):
671 679 # keyword affects data size, comparing wdir and filelog size does
672 680 # not make sense
673 681 if (fctx._filerev is None and
674 682 (self._repo._encodefilterpats or
675 683 kwt.match(fctx.path()) and not 'l' in fctx.flags()) or
676 684 self.size() == fctx.size()):
677 685 return self._filelog.cmp(self._filenode, fctx.data())
678 686 return True
679 687
680 688 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
681 689 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
682 690 extensions.wrapfunction(patch, 'diff', kw_diff)
683 691 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
684 692 for c in 'annotate changeset rev filediff diff'.split():
685 693 extensions.wrapfunction(webcommands, c, kwweb_skip)
686 694 for name in recordextensions.split():
687 695 try:
688 696 record = extensions.find(name)
689 697 extensions.wrapfunction(record, 'dorecord', kw_dorecord)
690 698 except KeyError:
691 699 pass
692 700
693 701 repo.__class__ = kwrepo
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from mercurial/templates/map-cmdline.default to mercurial/templates/map-cmdline.bisect
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now