##// END OF EJS Templates
move encoding bits from util to encoding...
Matt Mackall -
r7948:de377b1a default
parent child Browse files
Show More
@@ -0,0 +1,77 b''
1 """
2 encoding.py - character transcoding support for Mercurial
3
4 Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
5
6 This software may be used and distributed according to the terms of
7 the GNU General Public License version 2, incorporated herein by
8 reference.
9 """
10
11 import sys, unicodedata, locale, os, error
12
13 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
14
15 try:
16 encoding = os.environ.get("HGENCODING")
17 if sys.platform == 'darwin' and not encoding:
18 # On darwin, getpreferredencoding ignores the locale environment and
19 # always returns mac-roman. We override this if the environment is
20 # not C (has been customized by the user).
21 locale.setlocale(locale.LC_CTYPE, '')
22 encoding = locale.getlocale()[1]
23 if not encoding:
24 encoding = locale.getpreferredencoding() or 'ascii'
25 encoding = _encodingfixup.get(encoding, encoding)
26 except locale.Error:
27 encoding = 'ascii'
28 encodingmode = os.environ.get("HGENCODINGMODE", "strict")
29 fallbackencoding = 'ISO-8859-1'
30
31 def tolocal(s):
32 """
33 Convert a string from internal UTF-8 to local encoding
34
35 All internal strings should be UTF-8 but some repos before the
36 implementation of locale support may contain latin1 or possibly
37 other character sets. We attempt to decode everything strictly
38 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
39 replace unknown characters.
40 """
41 for e in ('UTF-8', fallbackencoding):
42 try:
43 u = s.decode(e) # attempt strict decoding
44 return u.encode(encoding, "replace")
45 except LookupError, k:
46 raise error.Abort("%s, please check your locale settings" % k)
47 except UnicodeDecodeError:
48 pass
49 u = s.decode("utf-8", "replace") # last ditch
50 return u.encode(encoding, "replace")
51
52 def fromlocal(s):
53 """
54 Convert a string from the local character encoding to UTF-8
55
56 We attempt to decode strings using the encoding mode set by
57 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
58 characters will cause an error message. Other modes include
59 'replace', which replaces unknown characters with a special
60 Unicode character, and 'ignore', which drops the character.
61 """
62 try:
63 return s.decode(encoding, encodingmode).encode("utf-8")
64 except UnicodeDecodeError, inst:
65 sub = s[max(0, inst.start-10):inst.start+10]
66 raise error.Abort("decoding near '%s': %s!" % (sub, inst))
67 except LookupError, k:
68 raise error.Abort("%s, please check your locale settings" % k)
69
70 def colwidth(s):
71 "Find the column width of a UTF-8 string for display"
72 d = s.decode(encoding, 'replace')
73 if hasattr(unicodedata, 'east_asian_width'):
74 w = unicodedata.east_asian_width
75 return sum([w(c) in 'WF' and 2 or 1 for c in d])
76 return len(d)
77
@@ -1,341 +1,341 b''
1 1 # convcmd - convert extension commands definition
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 9 from cvs import convert_cvs
10 10 from darcs import darcs_source
11 11 from git import convert_git
12 12 from hg import mercurial_source, mercurial_sink
13 13 from subversion import svn_source, svn_sink
14 14 from monotone import monotone_source
15 15 from gnuarch import gnuarch_source
16 16 from bzr import bzr_source
17 17 from p4 import p4_source
18 18 import filemap
19 19
20 20 import os, shutil
21 from mercurial import hg, util
21 from mercurial import hg, util, encoding
22 22 from mercurial.i18n import _
23 23
24 24 orig_encoding = 'ascii'
25 25
26 26 def recode(s):
27 27 if isinstance(s, unicode):
28 28 return s.encode(orig_encoding, 'replace')
29 29 else:
30 30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31 31
32 32 source_converters = [
33 33 ('cvs', convert_cvs),
34 34 ('git', convert_git),
35 35 ('svn', svn_source),
36 36 ('hg', mercurial_source),
37 37 ('darcs', darcs_source),
38 38 ('mtn', monotone_source),
39 39 ('gnuarch', gnuarch_source),
40 40 ('bzr', bzr_source),
41 41 ('p4', p4_source),
42 42 ]
43 43
44 44 sink_converters = [
45 45 ('hg', mercurial_sink),
46 46 ('svn', svn_sink),
47 47 ]
48 48
49 49 def convertsource(ui, path, type, rev):
50 50 exceptions = []
51 51 for name, source in source_converters:
52 52 try:
53 53 if not type or name == type:
54 54 return source(ui, path, rev)
55 55 except (NoRepo, MissingTool), inst:
56 56 exceptions.append(inst)
57 57 if not ui.quiet:
58 58 for inst in exceptions:
59 59 ui.write("%s\n" % inst)
60 60 raise util.Abort(_('%s: missing or unsupported repository') % path)
61 61
62 62 def convertsink(ui, path, type):
63 63 for name, sink in sink_converters:
64 64 try:
65 65 if not type or name == type:
66 66 return sink(ui, path)
67 67 except NoRepo, inst:
68 68 ui.note(_("convert: %s\n") % inst)
69 69 raise util.Abort(_('%s: unknown repository type') % path)
70 70
71 71 class converter(object):
72 72 def __init__(self, ui, source, dest, revmapfile, opts):
73 73
74 74 self.source = source
75 75 self.dest = dest
76 76 self.ui = ui
77 77 self.opts = opts
78 78 self.commitcache = {}
79 79 self.authors = {}
80 80 self.authorfile = None
81 81
82 82 self.map = mapfile(ui, revmapfile)
83 83
84 84 # Read first the dst author map if any
85 85 authorfile = self.dest.authorfile()
86 86 if authorfile and os.path.exists(authorfile):
87 87 self.readauthormap(authorfile)
88 88 # Extend/Override with new author map if necessary
89 89 if opts.get('authors'):
90 90 self.readauthormap(opts.get('authors'))
91 91 self.authorfile = self.dest.authorfile()
92 92
93 93 self.splicemap = mapfile(ui, opts.get('splicemap'))
94 94
95 95 def walktree(self, heads):
96 96 '''Return a mapping that identifies the uncommitted parents of every
97 97 uncommitted changeset.'''
98 98 visit = heads
99 99 known = {}
100 100 parents = {}
101 101 while visit:
102 102 n = visit.pop(0)
103 103 if n in known or n in self.map: continue
104 104 known[n] = 1
105 105 commit = self.cachecommit(n)
106 106 parents[n] = []
107 107 for p in commit.parents:
108 108 parents[n].append(p)
109 109 visit.append(p)
110 110
111 111 return parents
112 112
113 113 def toposort(self, parents):
114 114 '''Return an ordering such that every uncommitted changeset is
115 115 preceeded by all its uncommitted ancestors.'''
116 116 visit = parents.keys()
117 117 seen = {}
118 118 children = {}
119 119 actives = []
120 120
121 121 while visit:
122 122 n = visit.pop(0)
123 123 if n in seen: continue
124 124 seen[n] = 1
125 125 # Ensure that nodes without parents are present in the 'children'
126 126 # mapping.
127 127 children.setdefault(n, [])
128 128 hasparent = False
129 129 for p in parents[n]:
130 130 if not p in self.map:
131 131 visit.append(p)
132 132 hasparent = True
133 133 children.setdefault(p, []).append(n)
134 134 if not hasparent:
135 135 actives.append(n)
136 136
137 137 del seen
138 138 del visit
139 139
140 140 if self.opts.get('datesort'):
141 141 dates = {}
142 142 def getdate(n):
143 143 if n not in dates:
144 144 dates[n] = util.parsedate(self.commitcache[n].date)
145 145 return dates[n]
146 146
147 147 def picknext(nodes):
148 148 return min([(getdate(n), n) for n in nodes])[1]
149 149 else:
150 150 prev = [None]
151 151 def picknext(nodes):
152 152 # Return the first eligible child of the previously converted
153 153 # revision, or any of them.
154 154 next = nodes[0]
155 155 for n in nodes:
156 156 if prev[0] in parents[n]:
157 157 next = n
158 158 break
159 159 prev[0] = next
160 160 return next
161 161
162 162 s = []
163 163 pendings = {}
164 164 while actives:
165 165 n = picknext(actives)
166 166 actives.remove(n)
167 167 s.append(n)
168 168
169 169 # Update dependents list
170 170 for c in children.get(n, []):
171 171 if c not in pendings:
172 172 pendings[c] = [p for p in parents[c] if p not in self.map]
173 173 try:
174 174 pendings[c].remove(n)
175 175 except ValueError:
176 176 raise util.Abort(_('cycle detected between %s and %s')
177 177 % (recode(c), recode(n)))
178 178 if not pendings[c]:
179 179 # Parents are converted, node is eligible
180 180 actives.insert(0, c)
181 181 pendings[c] = None
182 182
183 183 if len(s) != len(parents):
184 184 raise util.Abort(_("not all revisions were sorted"))
185 185
186 186 return s
187 187
188 188 def writeauthormap(self):
189 189 authorfile = self.authorfile
190 190 if authorfile:
191 191 self.ui.status(_('Writing author map file %s\n') % authorfile)
192 192 ofile = open(authorfile, 'w+')
193 193 for author in self.authors:
194 194 ofile.write("%s=%s\n" % (author, self.authors[author]))
195 195 ofile.close()
196 196
197 197 def readauthormap(self, authorfile):
198 198 afile = open(authorfile, 'r')
199 199 for line in afile:
200 200 if line.strip() == '':
201 201 continue
202 202 try:
203 203 srcauthor, dstauthor = line.split('=', 1)
204 204 srcauthor = srcauthor.strip()
205 205 dstauthor = dstauthor.strip()
206 206 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
207 207 self.ui.status(
208 208 _('Overriding mapping for author %s, was %s, will be %s\n')
209 209 % (srcauthor, self.authors[srcauthor], dstauthor))
210 210 else:
211 211 self.ui.debug(_('mapping author %s to %s\n')
212 212 % (srcauthor, dstauthor))
213 213 self.authors[srcauthor] = dstauthor
214 214 except IndexError:
215 215 self.ui.warn(
216 216 _('Ignoring bad line in author map file %s: %s\n')
217 217 % (authorfile, line.rstrip()))
218 218 afile.close()
219 219
220 220 def cachecommit(self, rev):
221 221 commit = self.source.getcommit(rev)
222 222 commit.author = self.authors.get(commit.author, commit.author)
223 223 self.commitcache[rev] = commit
224 224 return commit
225 225
226 226 def copy(self, rev):
227 227 commit = self.commitcache[rev]
228 228
229 229 changes = self.source.getchanges(rev)
230 230 if isinstance(changes, basestring):
231 231 if changes == SKIPREV:
232 232 dest = SKIPREV
233 233 else:
234 234 dest = self.map[changes]
235 235 self.map[rev] = dest
236 236 return
237 237 files, copies = changes
238 238 pbranches = []
239 239 if commit.parents:
240 240 for prev in commit.parents:
241 241 if prev not in self.commitcache:
242 242 self.cachecommit(prev)
243 243 pbranches.append((self.map[prev],
244 244 self.commitcache[prev].branch))
245 245 self.dest.setbranch(commit.branch, pbranches)
246 246 try:
247 247 parents = self.splicemap[rev].replace(',', ' ').split()
248 248 self.ui.status(_('spliced in %s as parents of %s\n') %
249 249 (parents, rev))
250 250 parents = [self.map.get(p, p) for p in parents]
251 251 except KeyError:
252 252 parents = [b[0] for b in pbranches]
253 253 newnode = self.dest.putcommit(files, copies, parents, commit, self.source)
254 254 self.source.converted(rev, newnode)
255 255 self.map[rev] = newnode
256 256
257 257 def convert(self):
258 258
259 259 try:
260 260 self.source.before()
261 261 self.dest.before()
262 262 self.source.setrevmap(self.map)
263 263 self.ui.status(_("scanning source...\n"))
264 264 heads = self.source.getheads()
265 265 parents = self.walktree(heads)
266 266 self.ui.status(_("sorting...\n"))
267 267 t = self.toposort(parents)
268 268 num = len(t)
269 269 c = None
270 270
271 271 self.ui.status(_("converting...\n"))
272 272 for c in t:
273 273 num -= 1
274 274 desc = self.commitcache[c].desc
275 275 if "\n" in desc:
276 276 desc = desc.splitlines()[0]
277 277 # convert log message to local encoding without using
278 # tolocal() because util._encoding conver() use it as
278 # tolocal() because encoding.encoding conver() use it as
279 279 # 'utf-8'
280 280 self.ui.status("%d %s\n" % (num, recode(desc)))
281 281 self.ui.note(_("source: %s\n") % recode(c))
282 282 self.copy(c)
283 283
284 284 tags = self.source.gettags()
285 285 ctags = {}
286 286 for k in tags:
287 287 v = tags[k]
288 288 if self.map.get(v, SKIPREV) != SKIPREV:
289 289 ctags[k] = self.map[v]
290 290
291 291 if c and ctags:
292 292 nrev = self.dest.puttags(ctags)
293 293 # write another hash correspondence to override the previous
294 294 # one so we don't end up with extra tag heads
295 295 if nrev:
296 296 self.map[c] = nrev
297 297
298 298 self.writeauthormap()
299 299 finally:
300 300 self.cleanup()
301 301
302 302 def cleanup(self):
303 303 try:
304 304 self.dest.after()
305 305 finally:
306 306 self.source.after()
307 307 self.map.close()
308 308
309 309 def convert(ui, src, dest=None, revmapfile=None, **opts):
310 310 global orig_encoding
311 orig_encoding = util._encoding
312 util._encoding = 'UTF-8'
311 orig_encoding = encoding.encoding
312 encoding.encoding = 'UTF-8'
313 313
314 314 if not dest:
315 315 dest = hg.defaultdest(src) + "-hg"
316 316 ui.status(_("assuming destination %s\n") % dest)
317 317
318 318 destc = convertsink(ui, dest, opts.get('dest_type'))
319 319
320 320 try:
321 321 srcc = convertsource(ui, src, opts.get('source_type'),
322 322 opts.get('rev'))
323 323 except Exception:
324 324 for path in destc.created:
325 325 shutil.rmtree(path, True)
326 326 raise
327 327
328 328 fmap = opts.get('filemap')
329 329 if fmap:
330 330 srcc = filemap.filemap_source(ui, srcc, fmap)
331 331 destc.setfilemapmode(True)
332 332
333 333 if not revmapfile:
334 334 try:
335 335 revmapfile = destc.revmapfile()
336 336 except:
337 337 revmapfile = os.path.join(destc, "map")
338 338
339 339 c = converter(ui, srcc, destc, revmapfile, opts)
340 340 c.convert()
341 341
@@ -1,57 +1,57 b''
1 1 # highlight extension implementation file
2 2 #
3 3 # The original module was split in an interface and an implementation
4 4 # file to defer pygments loading and speedup extension setup.
5 5
6 6 from mercurial import demandimport
7 7 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',])
8 8
9 from mercurial import util
9 from mercurial import util, encoding
10 10 from mercurial.templatefilters import filters
11 11
12 12 from pygments import highlight
13 13 from pygments.util import ClassNotFound
14 14 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
15 15 from pygments.formatters import HtmlFormatter
16 16
17 17 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
18 18 'type="text/css" />')
19 19
20 20 def pygmentize(field, fctx, style, tmpl):
21 21
22 22 # append a <link ...> to the syntax highlighting css
23 23 old_header = ''.join(tmpl('header'))
24 24 if SYNTAX_CSS not in old_header:
25 25 new_header = old_header + SYNTAX_CSS
26 26 tmpl.cache['header'] = new_header
27 27
28 28 text = fctx.data()
29 29 if util.binary(text):
30 30 return
31 31
32 32 # avoid UnicodeDecodeError in pygments
33 text = util.tolocal(text)
33 text = encoding.tolocal(text)
34 34
35 35 # To get multi-line strings right, we can't format line-by-line
36 36 try:
37 37 lexer = guess_lexer_for_filename(fctx.path(), text[:1024],
38 encoding=util._encoding)
38 encoding=encoding.encoding)
39 39 except (ClassNotFound, ValueError):
40 40 try:
41 lexer = guess_lexer(text[:1024], encoding=util._encoding)
41 lexer = guess_lexer(text[:1024], encoding=encoding.encoding)
42 42 except (ClassNotFound, ValueError):
43 lexer = TextLexer(encoding=util._encoding)
43 lexer = TextLexer(encoding=encoding.encoding)
44 44
45 formatter = HtmlFormatter(style=style, encoding=util._encoding)
45 formatter = HtmlFormatter(style=style, encoding=encoding.encoding)
46 46
47 47 colorized = highlight(text, lexer, formatter)
48 48 # strip wrapping div
49 49 colorized = colorized[:colorized.find('\n</pre>')]
50 50 colorized = colorized[colorized.find('<pre>')+5:]
51 51 coloriter = iter(colorized.splitlines())
52 52
53 53 filters['colorize'] = lambda x: coloriter.next()
54 54
55 55 oldl = tmpl.cache[field]
56 56 newl = oldl.replace('line|escape', 'line|colorize')
57 57 tmpl.cache[field] = newl
@@ -1,122 +1,123 b''
1 1 # win32mbcs.py -- MBCS filename support for Mercurial
2 2 #
3 3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 4 #
5 5 # Version: 0.2
6 6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 7 #
8 8 # This software may be used and distributed according to the terms
9 9 # of the GNU General Public License, incorporated herein by reference.
10 10 #
11 11 """allow to use MBCS path with problematic encoding.
12 12
13 13 Some MBCS encodings are not good for some path operations
14 14 (i.e. splitting path, case conversion, etc.) with its encoded bytes.
15 15 We call such a encoding (i.e. shift_jis and big5) as "problematic
16 16 encoding". This extension can be used to fix the issue with those
17 17 encodings by wrapping some functions to convert to unicode string
18 18 before path operation.
19 19
20 20 This extension is usefull for:
21 21 * Japanese Windows users using shift_jis encoding.
22 22 * Chinese Windows users using big5 encoding.
23 23 * All users who use a repository with one of problematic encodings
24 24 on case-insensitive file system.
25 25
26 26 This extension is not needed for:
27 27 * Any user who use only ascii chars in path.
28 28 * Any user who do not use any of problematic encodings.
29 29
30 30 Note that there are some limitations on using this extension:
31 31 * You should use single encoding in one repository.
32 32 * You should set same encoding for the repository by locale or HGENCODING.
33 33
34 34 To use this extension, enable the extension in .hg/hgrc or ~/.hgrc:
35 35
36 36 [extensions]
37 37 hgext.win32mbcs =
38 38
39 Path encoding conversion are done between unicode and util._encoding
39 Path encoding conversion are done between unicode and encoding.encoding
40 40 which is decided by mercurial from current locale setting or HGENCODING.
41 41
42 42 """
43 43
44 44 import os
45 45 from mercurial.i18n import _
46 from mercurial import util
46 from mercurial import util, encoding
47 47
48 48 def decode(arg):
49 49 if isinstance(arg, str):
50 uarg = arg.decode(util._encoding)
51 if arg == uarg.encode(util._encoding):
50 uarg = arg.decode(encoding.encoding)
51 if arg == uarg.encode(encoding.encoding):
52 52 return uarg
53 53 raise UnicodeError("Not local encoding")
54 54 elif isinstance(arg, tuple):
55 55 return tuple(map(decode, arg))
56 56 elif isinstance(arg, list):
57 57 return map(decode, arg)
58 58 return arg
59 59
60 60 def encode(arg):
61 61 if isinstance(arg, unicode):
62 return arg.encode(util._encoding)
62 return arg.encode(encoding.encoding)
63 63 elif isinstance(arg, tuple):
64 64 return tuple(map(encode, arg))
65 65 elif isinstance(arg, list):
66 66 return map(encode, arg)
67 67 return arg
68 68
69 69 def wrapper(func, args):
70 70 # check argument is unicode, then call original
71 71 for arg in args:
72 72 if isinstance(arg, unicode):
73 73 return func(*args)
74 74
75 75 try:
76 76 # convert arguments to unicode, call func, then convert back
77 77 return encode(func(*decode(args)))
78 78 except UnicodeError:
79 # If not encoded with util._encoding, report it then
79 # If not encoded with encoding.encoding, report it then
80 80 # continue with calling original function.
81 81 raise util.Abort(_("[win32mbcs] filename conversion fail with"
82 " %s encoding\n") % (util._encoding))
82 " %s encoding\n") % (encoding.encoding))
83 83
84 84 def wrapname(name):
85 85 idx = name.rfind('.')
86 86 module = name[:idx]
87 87 name = name[idx+1:]
88 88 module = eval(module)
89 89 func = getattr(module, name)
90 90 def f(*args):
91 91 return wrapper(func, args)
92 92 try:
93 93 f.__name__ = func.__name__ # fail with python23
94 94 except Exception:
95 95 pass
96 96 setattr(module, name, f)
97 97
98 98 # List of functions to be wrapped.
99 99 # NOTE: os.path.dirname() and os.path.basename() are safe because
100 100 # they use result of os.path.split()
101 101 funcs = '''os.path.join os.path.split os.path.splitext
102 102 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
103 103 util.endswithsep util.splitpath util.checkcase util.fspath'''
104 104
105 105 # codec and alias names of sjis and big5 to be faked.
106 106 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
107 107 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
108 108 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
109 109 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213'''
110 110
111 111 def reposetup(ui, repo):
112 112 # TODO: decide use of config section for this extension
113 113 if not os.path.supports_unicode_filenames:
114 114 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
115 115 return
116 116
117 117 # fake is only for relevant environment.
118 if util._encoding.lower() in problematic_encodings.split():
118 if encoding.encoding.lower() in problematic_encodings.split():
119 119 for f in funcs.split():
120 120 wrapname(f)
121 ui.debug(_("[win32mbcs] activated with encoding: %s\n") % util._encoding)
121 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
122 % encoding.encoding)
122 123
@@ -1,221 +1,221 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid
9 9 from i18n import _
10 import util, error, revlog
10 import util, error, revlog, encoding
11 11
12 12 def _string_escape(text):
13 13 """
14 14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 16 >>> s
17 17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 18 >>> res = _string_escape(s)
19 19 >>> s == res.decode('string_escape')
20 20 True
21 21 """
22 22 # subset of the string_escape codec
23 23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 24 return text.replace('\0', '\\0')
25 25
26 26 class appender:
27 27 '''the changelog index must be updated last on disk, so we use this class
28 28 to delay writes to it'''
29 29 def __init__(self, fp, buf):
30 30 self.data = buf
31 31 self.fp = fp
32 32 self.offset = fp.tell()
33 33 self.size = util.fstat(fp).st_size
34 34
35 35 def end(self):
36 36 return self.size + len("".join(self.data))
37 37 def tell(self):
38 38 return self.offset
39 39 def flush(self):
40 40 pass
41 41 def close(self):
42 42 self.fp.close()
43 43
44 44 def seek(self, offset, whence=0):
45 45 '''virtual file offset spans real file and data'''
46 46 if whence == 0:
47 47 self.offset = offset
48 48 elif whence == 1:
49 49 self.offset += offset
50 50 elif whence == 2:
51 51 self.offset = self.end() + offset
52 52 if self.offset < self.size:
53 53 self.fp.seek(self.offset)
54 54
55 55 def read(self, count=-1):
56 56 '''only trick here is reads that span real file and data'''
57 57 ret = ""
58 58 if self.offset < self.size:
59 59 s = self.fp.read(count)
60 60 ret = s
61 61 self.offset += len(s)
62 62 if count > 0:
63 63 count -= len(s)
64 64 if count != 0:
65 65 doff = self.offset - self.size
66 66 self.data.insert(0, "".join(self.data))
67 67 del self.data[1:]
68 68 s = self.data[0][doff:doff+count]
69 69 self.offset += len(s)
70 70 ret += s
71 71 return ret
72 72
73 73 def write(self, s):
74 74 self.data.append(str(s))
75 75 self.offset += len(s)
76 76
77 77 class changelog(revlog.revlog):
78 78 def __init__(self, opener):
79 79 revlog.revlog.__init__(self, opener, "00changelog.i")
80 80
81 81 def delayupdate(self):
82 82 "delay visibility of index updates to other readers"
83 83 self._realopener = self.opener
84 84 self.opener = self._delayopener
85 85 self._delaycount = len(self)
86 86 self._delaybuf = []
87 87 self._delayname = None
88 88
89 89 def finalize(self, tr):
90 90 "finalize index updates"
91 91 self.opener = self._realopener
92 92 # move redirected index data back into place
93 93 if self._delayname:
94 94 util.rename(self._delayname + ".a", self._delayname)
95 95 elif self._delaybuf:
96 96 fp = self.opener(self.indexfile, 'a')
97 97 fp.write("".join(self._delaybuf))
98 98 fp.close()
99 99 self._delaybuf = []
100 100 # split when we're done
101 101 self.checkinlinesize(tr)
102 102
103 103 def _delayopener(self, name, mode='r'):
104 104 fp = self._realopener(name, mode)
105 105 # only divert the index
106 106 if not name == self.indexfile:
107 107 return fp
108 108 # if we're doing an initial clone, divert to another file
109 109 if self._delaycount == 0:
110 110 self._delayname = fp.name
111 111 if not len(self):
112 112 # make sure to truncate the file
113 113 mode = mode.replace('a', 'w')
114 114 return self._realopener(name + ".a", mode)
115 115 # otherwise, divert to memory
116 116 return appender(fp, self._delaybuf)
117 117
118 118 def readpending(self, file):
119 119 r = revlog.revlog(self.opener, file)
120 120 self.index = r.index
121 121 self.nodemap = r.nodemap
122 122 self._chunkcache = r._chunkcache
123 123
124 124 def writepending(self):
125 125 "create a file containing the unfinalized state for pretxnchangegroup"
126 126 if self._delaybuf:
127 127 # make a temporary copy of the index
128 128 fp1 = self._realopener(self.indexfile)
129 129 fp2 = self._realopener(self.indexfile + ".a", "w")
130 130 fp2.write(fp1.read())
131 131 # add pending data
132 132 fp2.write("".join(self._delaybuf))
133 133 fp2.close()
134 134 # switch modes so finalize can simply rename
135 135 self._delaybuf = []
136 136 self._delayname = fp1.name
137 137
138 138 if self._delayname:
139 139 return True
140 140
141 141 return False
142 142
143 143 def checkinlinesize(self, tr, fp=None):
144 144 if self.opener == self._delayopener:
145 145 return
146 146 return revlog.revlog.checkinlinesize(self, tr, fp)
147 147
148 148 def decode_extra(self, text):
149 149 extra = {}
150 150 for l in text.split('\0'):
151 151 if l:
152 152 k, v = l.decode('string_escape').split(':', 1)
153 153 extra[k] = v
154 154 return extra
155 155
156 156 def encode_extra(self, d):
157 157 # keys must be sorted to produce a deterministic changelog entry
158 158 items = [_string_escape('%s:%s' % (k, d[k])) for k in util.sort(d)]
159 159 return "\0".join(items)
160 160
161 161 def read(self, node):
162 162 """
163 163 format used:
164 164 nodeid\n : manifest node in ascii
165 165 user\n : user, no \n or \r allowed
166 166 time tz extra\n : date (time is int or float, timezone is int)
167 167 : extra is metadatas, encoded and separated by '\0'
168 168 : older versions ignore it
169 169 files\n\n : files modified by the cset, no \n or \r allowed
170 170 (.*) : comment (free text, ideally utf-8)
171 171
172 172 changelog v0 doesn't use extra
173 173 """
174 174 text = self.revision(node)
175 175 if not text:
176 176 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
177 177 last = text.index("\n\n")
178 desc = util.tolocal(text[last + 2:])
178 desc = encoding.tolocal(text[last + 2:])
179 179 l = text[:last].split('\n')
180 180 manifest = bin(l[0])
181 user = util.tolocal(l[1])
181 user = encoding.tolocal(l[1])
182 182
183 183 extra_data = l[2].split(' ', 2)
184 184 if len(extra_data) != 3:
185 185 time = float(extra_data.pop(0))
186 186 try:
187 187 # various tools did silly things with the time zone field.
188 188 timezone = int(extra_data[0])
189 189 except:
190 190 timezone = 0
191 191 extra = {}
192 192 else:
193 193 time, timezone, extra = extra_data
194 194 time, timezone = float(time), int(timezone)
195 195 extra = self.decode_extra(extra)
196 196 if not extra.get('branch'):
197 197 extra['branch'] = 'default'
198 198 files = l[3:]
199 199 return (manifest, user, (time, timezone), files, desc, extra)
200 200
201 201 def add(self, manifest, files, desc, transaction, p1=None, p2=None,
202 202 user=None, date=None, extra={}):
203 203
204 204 user = user.strip()
205 205 if "\n" in user:
206 206 raise error.RevlogError(_("username %s contains a newline")
207 207 % repr(user))
208 user, desc = util.fromlocal(user), util.fromlocal(desc)
208 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
209 209
210 210 if date:
211 211 parseddate = "%d %d" % util.parsedate(date)
212 212 else:
213 213 parseddate = "%d %d" % util.makedate()
214 214 if extra and extra.get("branch") in ("default", ""):
215 215 del extra["branch"]
216 216 if extra:
217 217 extra = self.encode_extra(extra)
218 218 parseddate = "%s %s" % (parseddate, extra)
219 219 l = [hex(manifest), user, parseddate] + util.sort(files) + ["", desc]
220 220 text = "\n".join(l)
221 221 return self.addrevision(text, transaction, len(self), p1, p2)
@@ -1,1206 +1,1206 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat, encoding
11 11 import mdiff, bdiff, util, templater, templatefilters, patch, errno, error
12 12 import match as _match
13 13
14 14 revrangesep = ':'
15 15
16 16 def findpossible(cmd, table, strict=False):
17 17 """
18 18 Return cmd -> (aliases, command table entry)
19 19 for each matching command.
20 20 Return debug commands (or their aliases) only if no normal command matches.
21 21 """
22 22 choice = {}
23 23 debugchoice = {}
24 24 for e in table.keys():
25 25 aliases = e.lstrip("^").split("|")
26 26 found = None
27 27 if cmd in aliases:
28 28 found = cmd
29 29 elif not strict:
30 30 for a in aliases:
31 31 if a.startswith(cmd):
32 32 found = a
33 33 break
34 34 if found is not None:
35 35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 36 debugchoice[found] = (aliases, table[e])
37 37 else:
38 38 choice[found] = (aliases, table[e])
39 39
40 40 if not choice and debugchoice:
41 41 choice = debugchoice
42 42
43 43 return choice
44 44
45 45 def findcmd(cmd, table, strict=True):
46 46 """Return (aliases, command table entry) for command string."""
47 47 choice = findpossible(cmd, table, strict)
48 48
49 49 if cmd in choice:
50 50 return choice[cmd]
51 51
52 52 if len(choice) > 1:
53 53 clist = choice.keys()
54 54 clist.sort()
55 55 raise error.AmbiguousCommand(cmd, clist)
56 56
57 57 if choice:
58 58 return choice.values()[0]
59 59
60 60 raise error.UnknownCommand(cmd)
61 61
62 62 def bail_if_changed(repo):
63 63 if repo.dirstate.parents()[1] != nullid:
64 64 raise util.Abort(_('outstanding uncommitted merge'))
65 65 modified, added, removed, deleted = repo.status()[:4]
66 66 if modified or added or removed or deleted:
67 67 raise util.Abort(_("outstanding uncommitted changes"))
68 68
69 69 def logmessage(opts):
70 70 """ get the log message according to -m and -l option """
71 71 message = opts.get('message')
72 72 logfile = opts.get('logfile')
73 73
74 74 if message and logfile:
75 75 raise util.Abort(_('options --message and --logfile are mutually '
76 76 'exclusive'))
77 77 if not message and logfile:
78 78 try:
79 79 if logfile == '-':
80 80 message = sys.stdin.read()
81 81 else:
82 82 message = open(logfile).read()
83 83 except IOError, inst:
84 84 raise util.Abort(_("can't read commit message '%s': %s") %
85 85 (logfile, inst.strerror))
86 86 return message
87 87
88 88 def loglimit(opts):
89 89 """get the log limit according to option -l/--limit"""
90 90 limit = opts.get('limit')
91 91 if limit:
92 92 try:
93 93 limit = int(limit)
94 94 except ValueError:
95 95 raise util.Abort(_('limit must be a positive integer'))
96 96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 97 else:
98 98 limit = sys.maxint
99 99 return limit
100 100
101 101 def setremoteconfig(ui, opts):
102 102 "copy remote options to ui tree"
103 103 if opts.get('ssh'):
104 104 ui.setconfig("ui", "ssh", opts['ssh'])
105 105 if opts.get('remotecmd'):
106 106 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
107 107
108 108 def revpair(repo, revs):
109 109 '''return pair of nodes, given list of revisions. second item can
110 110 be None, meaning use working dir.'''
111 111
112 112 def revfix(repo, val, defval):
113 113 if not val and val != 0 and defval is not None:
114 114 val = defval
115 115 return repo.lookup(val)
116 116
117 117 if not revs:
118 118 return repo.dirstate.parents()[0], None
119 119 end = None
120 120 if len(revs) == 1:
121 121 if revrangesep in revs[0]:
122 122 start, end = revs[0].split(revrangesep, 1)
123 123 start = revfix(repo, start, 0)
124 124 end = revfix(repo, end, len(repo) - 1)
125 125 else:
126 126 start = revfix(repo, revs[0], None)
127 127 elif len(revs) == 2:
128 128 if revrangesep in revs[0] or revrangesep in revs[1]:
129 129 raise util.Abort(_('too many revisions specified'))
130 130 start = revfix(repo, revs[0], None)
131 131 end = revfix(repo, revs[1], None)
132 132 else:
133 133 raise util.Abort(_('too many revisions specified'))
134 134 return start, end
135 135
136 136 def revrange(repo, revs):
137 137 """Yield revision as strings from a list of revision specifications."""
138 138
139 139 def revfix(repo, val, defval):
140 140 if not val and val != 0 and defval is not None:
141 141 return defval
142 142 return repo.changelog.rev(repo.lookup(val))
143 143
144 144 seen, l = {}, []
145 145 for spec in revs:
146 146 if revrangesep in spec:
147 147 start, end = spec.split(revrangesep, 1)
148 148 start = revfix(repo, start, 0)
149 149 end = revfix(repo, end, len(repo) - 1)
150 150 step = start > end and -1 or 1
151 151 for rev in xrange(start, end+step, step):
152 152 if rev in seen:
153 153 continue
154 154 seen[rev] = 1
155 155 l.append(rev)
156 156 else:
157 157 rev = revfix(repo, spec, None)
158 158 if rev in seen:
159 159 continue
160 160 seen[rev] = 1
161 161 l.append(rev)
162 162
163 163 return l
164 164
165 165 def make_filename(repo, pat, node,
166 166 total=None, seqno=None, revwidth=None, pathname=None):
167 167 node_expander = {
168 168 'H': lambda: hex(node),
169 169 'R': lambda: str(repo.changelog.rev(node)),
170 170 'h': lambda: short(node),
171 171 }
172 172 expander = {
173 173 '%': lambda: '%',
174 174 'b': lambda: os.path.basename(repo.root),
175 175 }
176 176
177 177 try:
178 178 if node:
179 179 expander.update(node_expander)
180 180 if node:
181 181 expander['r'] = (lambda:
182 182 str(repo.changelog.rev(node)).zfill(revwidth or 0))
183 183 if total is not None:
184 184 expander['N'] = lambda: str(total)
185 185 if seqno is not None:
186 186 expander['n'] = lambda: str(seqno)
187 187 if total is not None and seqno is not None:
188 188 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
189 189 if pathname is not None:
190 190 expander['s'] = lambda: os.path.basename(pathname)
191 191 expander['d'] = lambda: os.path.dirname(pathname) or '.'
192 192 expander['p'] = lambda: pathname
193 193
194 194 newname = []
195 195 patlen = len(pat)
196 196 i = 0
197 197 while i < patlen:
198 198 c = pat[i]
199 199 if c == '%':
200 200 i += 1
201 201 c = pat[i]
202 202 c = expander[c]()
203 203 newname.append(c)
204 204 i += 1
205 205 return ''.join(newname)
206 206 except KeyError, inst:
207 207 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
208 208 inst.args[0])
209 209
210 210 def make_file(repo, pat, node=None,
211 211 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
212 212
213 213 writable = 'w' in mode or 'a' in mode
214 214
215 215 if not pat or pat == '-':
216 216 return writable and sys.stdout or sys.stdin
217 217 if hasattr(pat, 'write') and writable:
218 218 return pat
219 219 if hasattr(pat, 'read') and 'r' in mode:
220 220 return pat
221 221 return open(make_filename(repo, pat, node, total, seqno, revwidth,
222 222 pathname),
223 223 mode)
224 224
225 225 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
226 226 if not globbed and default == 'relpath':
227 227 pats = util.expand_glob(pats or [])
228 228 m = _match.match(repo.root, repo.getcwd(), pats,
229 229 opts.get('include'), opts.get('exclude'), default)
230 230 def badfn(f, msg):
231 231 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
232 232 return False
233 233 m.bad = badfn
234 234 return m
235 235
236 236 def matchall(repo):
237 237 return _match.always(repo.root, repo.getcwd())
238 238
239 239 def matchfiles(repo, files):
240 240 return _match.exact(repo.root, repo.getcwd(), files)
241 241
242 242 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 243 '''find renamed files -- yields (before, after, score) tuples'''
244 244 if added is None or removed is None:
245 245 added, removed = repo.status()[1:3]
246 246 ctx = repo['.']
247 247 for a in added:
248 248 aa = repo.wread(a)
249 249 bestname, bestscore = None, threshold
250 250 for r in removed:
251 251 rr = ctx.filectx(r).data()
252 252
253 253 # bdiff.blocks() returns blocks of matching lines
254 254 # count the number of bytes in each
255 255 equal = 0
256 256 alines = mdiff.splitnewlines(aa)
257 257 matches = bdiff.blocks(aa, rr)
258 258 for x1,x2,y1,y2 in matches:
259 259 for line in alines[x1:x2]:
260 260 equal += len(line)
261 261
262 262 lengths = len(aa) + len(rr)
263 263 if lengths:
264 264 myscore = equal*2.0 / lengths
265 265 if myscore >= bestscore:
266 266 bestname, bestscore = r, myscore
267 267 if bestname:
268 268 yield bestname, a, bestscore
269 269
270 270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 271 if dry_run is None:
272 272 dry_run = opts.get('dry_run')
273 273 if similarity is None:
274 274 similarity = float(opts.get('similarity') or 0)
275 275 add, remove = [], []
276 276 mapping = {}
277 277 audit_path = util.path_auditor(repo.root)
278 278 m = match(repo, pats, opts)
279 279 for abs in repo.walk(m):
280 280 target = repo.wjoin(abs)
281 281 good = True
282 282 try:
283 283 audit_path(abs)
284 284 except:
285 285 good = False
286 286 rel = m.rel(abs)
287 287 exact = m.exact(abs)
288 288 if good and abs not in repo.dirstate:
289 289 add.append(abs)
290 290 mapping[abs] = rel, m.exact(abs)
291 291 if repo.ui.verbose or not exact:
292 292 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
293 293 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
294 294 or (os.path.isdir(target) and not os.path.islink(target))):
295 295 remove.append(abs)
296 296 mapping[abs] = rel, exact
297 297 if repo.ui.verbose or not exact:
298 298 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
299 299 if not dry_run:
300 300 repo.remove(remove)
301 301 repo.add(add)
302 302 if similarity > 0:
303 303 for old, new, score in findrenames(repo, add, remove, similarity):
304 304 oldrel, oldexact = mapping[old]
305 305 newrel, newexact = mapping[new]
306 306 if repo.ui.verbose or not oldexact or not newexact:
307 307 repo.ui.status(_('recording removal of %s as rename to %s '
308 308 '(%d%% similar)\n') %
309 309 (oldrel, newrel, score * 100))
310 310 if not dry_run:
311 311 repo.copy(old, new)
312 312
313 313 def copy(ui, repo, pats, opts, rename=False):
314 314 # called with the repo lock held
315 315 #
316 316 # hgsep => pathname that uses "/" to separate directories
317 317 # ossep => pathname that uses os.sep to separate directories
318 318 cwd = repo.getcwd()
319 319 targets = {}
320 320 after = opts.get("after")
321 321 dryrun = opts.get("dry_run")
322 322
323 323 def walkpat(pat):
324 324 srcs = []
325 325 m = match(repo, [pat], opts, globbed=True)
326 326 for abs in repo.walk(m):
327 327 state = repo.dirstate[abs]
328 328 rel = m.rel(abs)
329 329 exact = m.exact(abs)
330 330 if state in '?r':
331 331 if exact and state == '?':
332 332 ui.warn(_('%s: not copying - file is not managed\n') % rel)
333 333 if exact and state == 'r':
334 334 ui.warn(_('%s: not copying - file has been marked for'
335 335 ' remove\n') % rel)
336 336 continue
337 337 # abs: hgsep
338 338 # rel: ossep
339 339 srcs.append((abs, rel, exact))
340 340 return srcs
341 341
342 342 # abssrc: hgsep
343 343 # relsrc: ossep
344 344 # otarget: ossep
345 345 def copyfile(abssrc, relsrc, otarget, exact):
346 346 abstarget = util.canonpath(repo.root, cwd, otarget)
347 347 reltarget = repo.pathto(abstarget, cwd)
348 348 target = repo.wjoin(abstarget)
349 349 src = repo.wjoin(abssrc)
350 350 state = repo.dirstate[abstarget]
351 351
352 352 # check for collisions
353 353 prevsrc = targets.get(abstarget)
354 354 if prevsrc is not None:
355 355 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
356 356 (reltarget, repo.pathto(abssrc, cwd),
357 357 repo.pathto(prevsrc, cwd)))
358 358 return
359 359
360 360 # check for overwrites
361 361 exists = os.path.exists(target)
362 362 if (not after and exists or after and state in 'mn'):
363 363 if not opts['force']:
364 364 ui.warn(_('%s: not overwriting - file exists\n') %
365 365 reltarget)
366 366 return
367 367
368 368 if after:
369 369 if not exists:
370 370 return
371 371 elif not dryrun:
372 372 try:
373 373 if exists:
374 374 os.unlink(target)
375 375 targetdir = os.path.dirname(target) or '.'
376 376 if not os.path.isdir(targetdir):
377 377 os.makedirs(targetdir)
378 378 util.copyfile(src, target)
379 379 except IOError, inst:
380 380 if inst.errno == errno.ENOENT:
381 381 ui.warn(_('%s: deleted in working copy\n') % relsrc)
382 382 else:
383 383 ui.warn(_('%s: cannot copy - %s\n') %
384 384 (relsrc, inst.strerror))
385 385 return True # report a failure
386 386
387 387 if ui.verbose or not exact:
388 388 if rename:
389 389 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
390 390 else:
391 391 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
392 392
393 393 targets[abstarget] = abssrc
394 394
395 395 # fix up dirstate
396 396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 397 if abstarget == origsrc: # copying back a copy?
398 398 if state not in 'mn' and not dryrun:
399 399 repo.dirstate.normallookup(abstarget)
400 400 else:
401 401 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
402 402 if not ui.quiet:
403 403 ui.warn(_("%s has not been committed yet, so no copy "
404 404 "data will be stored for %s.\n")
405 405 % (repo.pathto(origsrc, cwd), reltarget))
406 406 if repo.dirstate[abstarget] in '?r' and not dryrun:
407 407 repo.add([abstarget])
408 408 elif not dryrun:
409 409 repo.copy(origsrc, abstarget)
410 410
411 411 if rename and not dryrun:
412 412 repo.remove([abssrc], not after)
413 413
414 414 # pat: ossep
415 415 # dest ossep
416 416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 417 # return: function that takes hgsep and returns ossep
418 418 def targetpathfn(pat, dest, srcs):
419 419 if os.path.isdir(pat):
420 420 abspfx = util.canonpath(repo.root, cwd, pat)
421 421 abspfx = util.localpath(abspfx)
422 422 if destdirexists:
423 423 striplen = len(os.path.split(abspfx)[0])
424 424 else:
425 425 striplen = len(abspfx)
426 426 if striplen:
427 427 striplen += len(os.sep)
428 428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 429 elif destdirexists:
430 430 res = lambda p: os.path.join(dest,
431 431 os.path.basename(util.localpath(p)))
432 432 else:
433 433 res = lambda p: dest
434 434 return res
435 435
436 436 # pat: ossep
437 437 # dest ossep
438 438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 439 # return: function that takes hgsep and returns ossep
440 440 def targetpathafterfn(pat, dest, srcs):
441 441 if util.patkind(pat, None)[0]:
442 442 # a mercurial pattern
443 443 res = lambda p: os.path.join(dest,
444 444 os.path.basename(util.localpath(p)))
445 445 else:
446 446 abspfx = util.canonpath(repo.root, cwd, pat)
447 447 if len(abspfx) < len(srcs[0][0]):
448 448 # A directory. Either the target path contains the last
449 449 # component of the source path or it does not.
450 450 def evalpath(striplen):
451 451 score = 0
452 452 for s in srcs:
453 453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 454 if os.path.exists(t):
455 455 score += 1
456 456 return score
457 457
458 458 abspfx = util.localpath(abspfx)
459 459 striplen = len(abspfx)
460 460 if striplen:
461 461 striplen += len(os.sep)
462 462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 463 score = evalpath(striplen)
464 464 striplen1 = len(os.path.split(abspfx)[0])
465 465 if striplen1:
466 466 striplen1 += len(os.sep)
467 467 if evalpath(striplen1) > score:
468 468 striplen = striplen1
469 469 res = lambda p: os.path.join(dest,
470 470 util.localpath(p)[striplen:])
471 471 else:
472 472 # a file
473 473 if destdirexists:
474 474 res = lambda p: os.path.join(dest,
475 475 os.path.basename(util.localpath(p)))
476 476 else:
477 477 res = lambda p: dest
478 478 return res
479 479
480 480
481 481 pats = util.expand_glob(pats)
482 482 if not pats:
483 483 raise util.Abort(_('no source or destination specified'))
484 484 if len(pats) == 1:
485 485 raise util.Abort(_('no destination specified'))
486 486 dest = pats.pop()
487 487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 488 if not destdirexists:
489 489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 490 raise util.Abort(_('with multiple sources, destination must be an '
491 491 'existing directory'))
492 492 if util.endswithsep(dest):
493 493 raise util.Abort(_('destination %s is not a directory') % dest)
494 494
495 495 tfn = targetpathfn
496 496 if after:
497 497 tfn = targetpathafterfn
498 498 copylist = []
499 499 for pat in pats:
500 500 srcs = walkpat(pat)
501 501 if not srcs:
502 502 continue
503 503 copylist.append((tfn(pat, dest, srcs), srcs))
504 504 if not copylist:
505 505 raise util.Abort(_('no files to copy'))
506 506
507 507 errors = 0
508 508 for targetpath, srcs in copylist:
509 509 for abssrc, relsrc, exact in srcs:
510 510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 511 errors += 1
512 512
513 513 if errors:
514 514 ui.warn(_('(consider using --after)\n'))
515 515
516 516 return errors
517 517
518 518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 519 '''Run a command as a service.'''
520 520
521 521 if opts['daemon'] and not opts['daemon_pipefds']:
522 522 rfd, wfd = os.pipe()
523 523 args = sys.argv[:]
524 524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 525 # Don't pass --cwd to the child process, because we've already
526 526 # changed directory.
527 527 for i in xrange(1,len(args)):
528 528 if args[i].startswith('--cwd='):
529 529 del args[i]
530 530 break
531 531 elif args[i].startswith('--cwd'):
532 532 del args[i:i+2]
533 533 break
534 534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 535 args[0], args)
536 536 os.close(wfd)
537 537 os.read(rfd, 1)
538 538 if parentfn:
539 539 return parentfn(pid)
540 540 else:
541 541 os._exit(0)
542 542
543 543 if initfn:
544 544 initfn()
545 545
546 546 if opts['pid_file']:
547 547 fp = open(opts['pid_file'], 'w')
548 548 fp.write(str(os.getpid()) + '\n')
549 549 fp.close()
550 550
551 551 if opts['daemon_pipefds']:
552 552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 553 os.close(rfd)
554 554 try:
555 555 os.setsid()
556 556 except AttributeError:
557 557 pass
558 558 os.write(wfd, 'y')
559 559 os.close(wfd)
560 560 sys.stdout.flush()
561 561 sys.stderr.flush()
562 562 fd = os.open(util.nulldev, os.O_RDWR)
563 563 if fd != 0: os.dup2(fd, 0)
564 564 if fd != 1: os.dup2(fd, 1)
565 565 if fd != 2: os.dup2(fd, 2)
566 566 if fd not in (0, 1, 2): os.close(fd)
567 567
568 568 if runfn:
569 569 return runfn()
570 570
571 571 class changeset_printer(object):
572 572 '''show changeset information when templating not requested.'''
573 573
574 574 def __init__(self, ui, repo, patch, diffopts, buffered):
575 575 self.ui = ui
576 576 self.repo = repo
577 577 self.buffered = buffered
578 578 self.patch = patch
579 579 self.diffopts = diffopts
580 580 self.header = {}
581 581 self.hunk = {}
582 582 self.lastheader = None
583 583
584 584 def flush(self, rev):
585 585 if rev in self.header:
586 586 h = self.header[rev]
587 587 if h != self.lastheader:
588 588 self.lastheader = h
589 589 self.ui.write(h)
590 590 del self.header[rev]
591 591 if rev in self.hunk:
592 592 self.ui.write(self.hunk[rev])
593 593 del self.hunk[rev]
594 594 return 1
595 595 return 0
596 596
597 597 def show(self, ctx, copies=(), **props):
598 598 if self.buffered:
599 599 self.ui.pushbuffer()
600 600 self._show(ctx, copies, props)
601 601 self.hunk[ctx.rev()] = self.ui.popbuffer()
602 602 else:
603 603 self._show(ctx, copies, props)
604 604
605 605 def _show(self, ctx, copies, props):
606 606 '''show a single changeset or file revision'''
607 607 changenode = ctx.node()
608 608 rev = ctx.rev()
609 609
610 610 if self.ui.quiet:
611 611 self.ui.write("%d:%s\n" % (rev, short(changenode)))
612 612 return
613 613
614 614 log = self.repo.changelog
615 615 changes = log.read(changenode)
616 616 date = util.datestr(changes[2])
617 617 extra = changes[5]
618 618 branch = extra.get("branch")
619 619
620 620 hexfunc = self.ui.debugflag and hex or short
621 621
622 622 parents = [(p, hexfunc(log.node(p)))
623 623 for p in self._meaningful_parentrevs(log, rev)]
624 624
625 625 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
626 626
627 627 # don't show the default branch name
628 628 if branch != 'default':
629 branch = util.tolocal(branch)
629 branch = encoding.tolocal(branch)
630 630 self.ui.write(_("branch: %s\n") % branch)
631 631 for tag in self.repo.nodetags(changenode):
632 632 self.ui.write(_("tag: %s\n") % tag)
633 633 for parent in parents:
634 634 self.ui.write(_("parent: %d:%s\n") % parent)
635 635
636 636 if self.ui.debugflag:
637 637 self.ui.write(_("manifest: %d:%s\n") %
638 638 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
639 639 self.ui.write(_("user: %s\n") % changes[1])
640 640 self.ui.write(_("date: %s\n") % date)
641 641
642 642 if self.ui.debugflag:
643 643 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
644 644 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
645 645 files):
646 646 if value:
647 647 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
648 648 elif changes[3] and self.ui.verbose:
649 649 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
650 650 if copies and self.ui.verbose:
651 651 copies = ['%s (%s)' % c for c in copies]
652 652 self.ui.write(_("copies: %s\n") % ' '.join(copies))
653 653
654 654 if extra and self.ui.debugflag:
655 655 for key, value in util.sort(extra.items()):
656 656 self.ui.write(_("extra: %s=%s\n")
657 657 % (key, value.encode('string_escape')))
658 658
659 659 description = changes[4].strip()
660 660 if description:
661 661 if self.ui.verbose:
662 662 self.ui.write(_("description:\n"))
663 663 self.ui.write(description)
664 664 self.ui.write("\n\n")
665 665 else:
666 666 self.ui.write(_("summary: %s\n") %
667 667 description.splitlines()[0])
668 668 self.ui.write("\n")
669 669
670 670 self.showpatch(changenode)
671 671
672 672 def showpatch(self, node):
673 673 if self.patch:
674 674 prev = self.repo.changelog.parents(node)[0]
675 675 chunks = patch.diff(self.repo, prev, node, match=self.patch,
676 676 opts=patch.diffopts(self.ui, self.diffopts))
677 677 for chunk in chunks:
678 678 self.ui.write(chunk)
679 679 self.ui.write("\n")
680 680
681 681 def _meaningful_parentrevs(self, log, rev):
682 682 """Return list of meaningful (or all if debug) parentrevs for rev.
683 683
684 684 For merges (two non-nullrev revisions) both parents are meaningful.
685 685 Otherwise the first parent revision is considered meaningful if it
686 686 is not the preceding revision.
687 687 """
688 688 parents = log.parentrevs(rev)
689 689 if not self.ui.debugflag and parents[1] == nullrev:
690 690 if parents[0] >= rev - 1:
691 691 parents = []
692 692 else:
693 693 parents = [parents[0]]
694 694 return parents
695 695
696 696
697 697 class changeset_templater(changeset_printer):
698 698 '''format changeset information.'''
699 699
700 700 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
701 701 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
702 702 filters = templatefilters.filters.copy()
703 703 filters['formatnode'] = (ui.debugflag and (lambda x: x)
704 704 or (lambda x: x[:12]))
705 705 self.t = templater.templater(mapfile, filters,
706 706 cache={
707 707 'parent': '{rev}:{node|formatnode} ',
708 708 'manifest': '{rev}:{node|formatnode}',
709 709 'filecopy': '{name} ({source})'})
710 710
711 711 def use_template(self, t):
712 712 '''set template string to use'''
713 713 self.t.cache['changeset'] = t
714 714
715 715 def _meaningful_parentrevs(self, ctx):
716 716 """Return list of meaningful (or all if debug) parentrevs for rev.
717 717 """
718 718 parents = ctx.parents()
719 719 if len(parents) > 1:
720 720 return parents
721 721 if self.ui.debugflag:
722 722 return [parents[0], self.repo['null']]
723 723 if parents[0].rev() >= ctx.rev() - 1:
724 724 return []
725 725 return parents
726 726
727 727 def _show(self, ctx, copies, props):
728 728 '''show a single changeset or file revision'''
729 729
730 730 def showlist(name, values, plural=None, **args):
731 731 '''expand set of values.
732 732 name is name of key in template map.
733 733 values is list of strings or dicts.
734 734 plural is plural of name, if not simply name + 's'.
735 735
736 736 expansion works like this, given name 'foo'.
737 737
738 738 if values is empty, expand 'no_foos'.
739 739
740 740 if 'foo' not in template map, return values as a string,
741 741 joined by space.
742 742
743 743 expand 'start_foos'.
744 744
745 745 for each value, expand 'foo'. if 'last_foo' in template
746 746 map, expand it instead of 'foo' for last key.
747 747
748 748 expand 'end_foos'.
749 749 '''
750 750 if plural: names = plural
751 751 else: names = name + 's'
752 752 if not values:
753 753 noname = 'no_' + names
754 754 if noname in self.t:
755 755 yield self.t(noname, **args)
756 756 return
757 757 if name not in self.t:
758 758 if isinstance(values[0], str):
759 759 yield ' '.join(values)
760 760 else:
761 761 for v in values:
762 762 yield dict(v, **args)
763 763 return
764 764 startname = 'start_' + names
765 765 if startname in self.t:
766 766 yield self.t(startname, **args)
767 767 vargs = args.copy()
768 768 def one(v, tag=name):
769 769 try:
770 770 vargs.update(v)
771 771 except (AttributeError, ValueError):
772 772 try:
773 773 for a, b in v:
774 774 vargs[a] = b
775 775 except ValueError:
776 776 vargs[name] = v
777 777 return self.t(tag, **vargs)
778 778 lastname = 'last_' + name
779 779 if lastname in self.t:
780 780 last = values.pop()
781 781 else:
782 782 last = None
783 783 for v in values:
784 784 yield one(v)
785 785 if last is not None:
786 786 yield one(last, tag=lastname)
787 787 endname = 'end_' + names
788 788 if endname in self.t:
789 789 yield self.t(endname, **args)
790 790
791 791 def showbranches(**args):
792 792 branch = ctx.branch()
793 793 if branch != 'default':
794 branch = util.tolocal(branch)
794 branch = encoding.tolocal(branch)
795 795 return showlist('branch', [branch], plural='branches', **args)
796 796
797 797 def showparents(**args):
798 798 parents = [[('rev', p.rev()), ('node', p.hex())]
799 799 for p in self._meaningful_parentrevs(ctx)]
800 800 return showlist('parent', parents, **args)
801 801
802 802 def showtags(**args):
803 803 return showlist('tag', ctx.tags(), **args)
804 804
805 805 def showextras(**args):
806 806 for key, value in util.sort(ctx.extra().items()):
807 807 args = args.copy()
808 808 args.update(dict(key=key, value=value))
809 809 yield self.t('extra', **args)
810 810
811 811 def showcopies(**args):
812 812 c = [{'name': x[0], 'source': x[1]} for x in copies]
813 813 return showlist('file_copy', c, plural='file_copies', **args)
814 814
815 815 files = []
816 816 def getfiles():
817 817 if not files:
818 818 files[:] = self.repo.status(ctx.parents()[0].node(),
819 819 ctx.node())[:3]
820 820 return files
821 821 def showfiles(**args):
822 822 return showlist('file', ctx.files(), **args)
823 823 def showmods(**args):
824 824 return showlist('file_mod', getfiles()[0], **args)
825 825 def showadds(**args):
826 826 return showlist('file_add', getfiles()[1], **args)
827 827 def showdels(**args):
828 828 return showlist('file_del', getfiles()[2], **args)
829 829 def showmanifest(**args):
830 830 args = args.copy()
831 831 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
832 832 node=hex(ctx.changeset()[0])))
833 833 return self.t('manifest', **args)
834 834
835 835 def showdiffstat(**args):
836 836 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
837 837 files, adds, removes = 0, 0, 0
838 838 for i in patch.diffstatdata(util.iterlines(diff)):
839 839 files += 1
840 840 adds += i[1]
841 841 removes += i[2]
842 842 return '%s: +%s/-%s' % (files, adds, removes)
843 843
844 844 defprops = {
845 845 'author': ctx.user(),
846 846 'branches': showbranches,
847 847 'date': ctx.date(),
848 848 'desc': ctx.description().strip(),
849 849 'file_adds': showadds,
850 850 'file_dels': showdels,
851 851 'file_mods': showmods,
852 852 'files': showfiles,
853 853 'file_copies': showcopies,
854 854 'manifest': showmanifest,
855 855 'node': ctx.hex(),
856 856 'parents': showparents,
857 857 'rev': ctx.rev(),
858 858 'tags': showtags,
859 859 'extras': showextras,
860 860 'diffstat': showdiffstat,
861 861 }
862 862 props = props.copy()
863 863 props.update(defprops)
864 864
865 865 try:
866 866 if self.ui.debugflag and 'header_debug' in self.t:
867 867 key = 'header_debug'
868 868 elif self.ui.quiet and 'header_quiet' in self.t:
869 869 key = 'header_quiet'
870 870 elif self.ui.verbose and 'header_verbose' in self.t:
871 871 key = 'header_verbose'
872 872 elif 'header' in self.t:
873 873 key = 'header'
874 874 else:
875 875 key = ''
876 876 if key:
877 877 h = templater.stringify(self.t(key, **props))
878 878 if self.buffered:
879 879 self.header[ctx.rev()] = h
880 880 else:
881 881 self.ui.write(h)
882 882 if self.ui.debugflag and 'changeset_debug' in self.t:
883 883 key = 'changeset_debug'
884 884 elif self.ui.quiet and 'changeset_quiet' in self.t:
885 885 key = 'changeset_quiet'
886 886 elif self.ui.verbose and 'changeset_verbose' in self.t:
887 887 key = 'changeset_verbose'
888 888 else:
889 889 key = 'changeset'
890 890 self.ui.write(templater.stringify(self.t(key, **props)))
891 891 self.showpatch(ctx.node())
892 892 except KeyError, inst:
893 893 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
894 894 inst.args[0]))
895 895 except SyntaxError, inst:
896 896 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
897 897
898 898 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
899 899 """show one changeset using template or regular display.
900 900
901 901 Display format will be the first non-empty hit of:
902 902 1. option 'template'
903 903 2. option 'style'
904 904 3. [ui] setting 'logtemplate'
905 905 4. [ui] setting 'style'
906 906 If all of these values are either the unset or the empty string,
907 907 regular display via changeset_printer() is done.
908 908 """
909 909 # options
910 910 patch = False
911 911 if opts.get('patch'):
912 912 patch = matchfn or matchall(repo)
913 913
914 914 tmpl = opts.get('template')
915 915 mapfile = None
916 916 if tmpl:
917 917 tmpl = templater.parsestring(tmpl, quoted=False)
918 918 else:
919 919 mapfile = opts.get('style')
920 920 # ui settings
921 921 if not mapfile:
922 922 tmpl = ui.config('ui', 'logtemplate')
923 923 if tmpl:
924 924 tmpl = templater.parsestring(tmpl)
925 925 else:
926 926 mapfile = ui.config('ui', 'style')
927 927
928 928 if tmpl or mapfile:
929 929 if mapfile:
930 930 if not os.path.split(mapfile)[0]:
931 931 mapname = (templater.templatepath('map-cmdline.' + mapfile)
932 932 or templater.templatepath(mapfile))
933 933 if mapname: mapfile = mapname
934 934 try:
935 935 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
936 936 except SyntaxError, inst:
937 937 raise util.Abort(inst.args[0])
938 938 if tmpl: t.use_template(tmpl)
939 939 return t
940 940 return changeset_printer(ui, repo, patch, opts, buffered)
941 941
942 942 def finddate(ui, repo, date):
943 943 """Find the tipmost changeset that matches the given date spec"""
944 944 df = util.matchdate(date)
945 945 get = util.cachefunc(lambda r: repo[r].changeset())
946 946 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
947 947 results = {}
948 948 for st, rev, fns in changeiter:
949 949 if st == 'add':
950 950 d = get(rev)[2]
951 951 if df(d[0]):
952 952 results[rev] = d
953 953 elif st == 'iter':
954 954 if rev in results:
955 955 ui.status(_("Found revision %s from %s\n") %
956 956 (rev, util.datestr(results[rev])))
957 957 return str(rev)
958 958
959 959 raise util.Abort(_("revision matching date not found"))
960 960
961 961 def walkchangerevs(ui, repo, pats, change, opts):
962 962 '''Iterate over files and the revs in which they changed.
963 963
964 964 Callers most commonly need to iterate backwards over the history
965 965 in which they are interested. Doing so has awful (quadratic-looking)
966 966 performance, so we use iterators in a "windowed" way.
967 967
968 968 We walk a window of revisions in the desired order. Within the
969 969 window, we first walk forwards to gather data, then in the desired
970 970 order (usually backwards) to display it.
971 971
972 972 This function returns an (iterator, matchfn) tuple. The iterator
973 973 yields 3-tuples. They will be of one of the following forms:
974 974
975 975 "window", incrementing, lastrev: stepping through a window,
976 976 positive if walking forwards through revs, last rev in the
977 977 sequence iterated over - use to reset state for the current window
978 978
979 979 "add", rev, fns: out-of-order traversal of the given file names
980 980 fns, which changed during revision rev - use to gather data for
981 981 possible display
982 982
983 983 "iter", rev, None: in-order traversal of the revs earlier iterated
984 984 over with "add" - use to display data'''
985 985
986 986 def increasing_windows(start, end, windowsize=8, sizelimit=512):
987 987 if start < end:
988 988 while start < end:
989 989 yield start, min(windowsize, end-start)
990 990 start += windowsize
991 991 if windowsize < sizelimit:
992 992 windowsize *= 2
993 993 else:
994 994 while start > end:
995 995 yield start, min(windowsize, start-end-1)
996 996 start -= windowsize
997 997 if windowsize < sizelimit:
998 998 windowsize *= 2
999 999
1000 1000 m = match(repo, pats, opts)
1001 1001 follow = opts.get('follow') or opts.get('follow_first')
1002 1002
1003 1003 if not len(repo):
1004 1004 return [], m
1005 1005
1006 1006 if follow:
1007 1007 defrange = '%s:0' % repo['.'].rev()
1008 1008 else:
1009 1009 defrange = '-1:0'
1010 1010 revs = revrange(repo, opts['rev'] or [defrange])
1011 1011 wanted = {}
1012 1012 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1013 1013 fncache = {}
1014 1014
1015 1015 if not slowpath and not m.files():
1016 1016 # No files, no patterns. Display all revs.
1017 1017 wanted = dict.fromkeys(revs)
1018 1018 copies = []
1019 1019 if not slowpath:
1020 1020 # Only files, no patterns. Check the history of each file.
1021 1021 def filerevgen(filelog, node):
1022 1022 cl_count = len(repo)
1023 1023 if node is None:
1024 1024 last = len(filelog) - 1
1025 1025 else:
1026 1026 last = filelog.rev(node)
1027 1027 for i, window in increasing_windows(last, nullrev):
1028 1028 revs = []
1029 1029 for j in xrange(i - window, i + 1):
1030 1030 n = filelog.node(j)
1031 1031 revs.append((filelog.linkrev(j),
1032 1032 follow and filelog.renamed(n)))
1033 1033 revs.reverse()
1034 1034 for rev in revs:
1035 1035 # only yield rev for which we have the changelog, it can
1036 1036 # happen while doing "hg log" during a pull or commit
1037 1037 if rev[0] < cl_count:
1038 1038 yield rev
1039 1039 def iterfiles():
1040 1040 for filename in m.files():
1041 1041 yield filename, None
1042 1042 for filename_node in copies:
1043 1043 yield filename_node
1044 1044 minrev, maxrev = min(revs), max(revs)
1045 1045 for file_, node in iterfiles():
1046 1046 filelog = repo.file(file_)
1047 1047 if not len(filelog):
1048 1048 if node is None:
1049 1049 # A zero count may be a directory or deleted file, so
1050 1050 # try to find matching entries on the slow path.
1051 1051 if follow:
1052 1052 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1053 1053 slowpath = True
1054 1054 break
1055 1055 else:
1056 1056 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1057 1057 % (file_, short(node)))
1058 1058 continue
1059 1059 for rev, copied in filerevgen(filelog, node):
1060 1060 if rev <= maxrev:
1061 1061 if rev < minrev:
1062 1062 break
1063 1063 fncache.setdefault(rev, [])
1064 1064 fncache[rev].append(file_)
1065 1065 wanted[rev] = 1
1066 1066 if follow and copied:
1067 1067 copies.append(copied)
1068 1068 if slowpath:
1069 1069 if follow:
1070 1070 raise util.Abort(_('can only follow copies/renames for explicit '
1071 1071 'file names'))
1072 1072
1073 1073 # The slow path checks files modified in every changeset.
1074 1074 def changerevgen():
1075 1075 for i, window in increasing_windows(len(repo) - 1, nullrev):
1076 1076 for j in xrange(i - window, i + 1):
1077 1077 yield j, change(j)[3]
1078 1078
1079 1079 for rev, changefiles in changerevgen():
1080 1080 matches = filter(m, changefiles)
1081 1081 if matches:
1082 1082 fncache[rev] = matches
1083 1083 wanted[rev] = 1
1084 1084
1085 1085 class followfilter:
1086 1086 def __init__(self, onlyfirst=False):
1087 1087 self.startrev = nullrev
1088 1088 self.roots = []
1089 1089 self.onlyfirst = onlyfirst
1090 1090
1091 1091 def match(self, rev):
1092 1092 def realparents(rev):
1093 1093 if self.onlyfirst:
1094 1094 return repo.changelog.parentrevs(rev)[0:1]
1095 1095 else:
1096 1096 return filter(lambda x: x != nullrev,
1097 1097 repo.changelog.parentrevs(rev))
1098 1098
1099 1099 if self.startrev == nullrev:
1100 1100 self.startrev = rev
1101 1101 return True
1102 1102
1103 1103 if rev > self.startrev:
1104 1104 # forward: all descendants
1105 1105 if not self.roots:
1106 1106 self.roots.append(self.startrev)
1107 1107 for parent in realparents(rev):
1108 1108 if parent in self.roots:
1109 1109 self.roots.append(rev)
1110 1110 return True
1111 1111 else:
1112 1112 # backwards: all parents
1113 1113 if not self.roots:
1114 1114 self.roots.extend(realparents(self.startrev))
1115 1115 if rev in self.roots:
1116 1116 self.roots.remove(rev)
1117 1117 self.roots.extend(realparents(rev))
1118 1118 return True
1119 1119
1120 1120 return False
1121 1121
1122 1122 # it might be worthwhile to do this in the iterator if the rev range
1123 1123 # is descending and the prune args are all within that range
1124 1124 for rev in opts.get('prune', ()):
1125 1125 rev = repo.changelog.rev(repo.lookup(rev))
1126 1126 ff = followfilter()
1127 1127 stop = min(revs[0], revs[-1])
1128 1128 for x in xrange(rev, stop-1, -1):
1129 1129 if ff.match(x) and x in wanted:
1130 1130 del wanted[x]
1131 1131
1132 1132 def iterate():
1133 1133 if follow and not m.files():
1134 1134 ff = followfilter(onlyfirst=opts.get('follow_first'))
1135 1135 def want(rev):
1136 1136 if ff.match(rev) and rev in wanted:
1137 1137 return True
1138 1138 return False
1139 1139 else:
1140 1140 def want(rev):
1141 1141 return rev in wanted
1142 1142
1143 1143 for i, window in increasing_windows(0, len(revs)):
1144 1144 yield 'window', revs[0] < revs[-1], revs[-1]
1145 1145 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1146 1146 for rev in util.sort(list(nrevs)):
1147 1147 fns = fncache.get(rev)
1148 1148 if not fns:
1149 1149 def fns_generator():
1150 1150 for f in change(rev)[3]:
1151 1151 if m(f):
1152 1152 yield f
1153 1153 fns = fns_generator()
1154 1154 yield 'add', rev, fns
1155 1155 for rev in nrevs:
1156 1156 yield 'iter', rev, None
1157 1157 return iterate(), m
1158 1158
1159 1159 def commit(ui, repo, commitfunc, pats, opts):
1160 1160 '''commit the specified files or all outstanding changes'''
1161 1161 date = opts.get('date')
1162 1162 if date:
1163 1163 opts['date'] = util.parsedate(date)
1164 1164 message = logmessage(opts)
1165 1165
1166 1166 # extract addremove carefully -- this function can be called from a command
1167 1167 # that doesn't support addremove
1168 1168 if opts.get('addremove'):
1169 1169 addremove(repo, pats, opts)
1170 1170
1171 1171 m = match(repo, pats, opts)
1172 1172 if pats:
1173 1173 modified, added, removed = repo.status(match=m)[:3]
1174 1174 files = util.sort(modified + added + removed)
1175 1175
1176 1176 def is_dir(f):
1177 1177 name = f + '/'
1178 1178 i = bisect.bisect(files, name)
1179 1179 return i < len(files) and files[i].startswith(name)
1180 1180
1181 1181 for f in m.files():
1182 1182 if f == '.':
1183 1183 continue
1184 1184 if f not in files:
1185 1185 rf = repo.wjoin(f)
1186 1186 rel = repo.pathto(f)
1187 1187 try:
1188 1188 mode = os.lstat(rf)[stat.ST_MODE]
1189 1189 except OSError:
1190 1190 if is_dir(f): # deleted directory ?
1191 1191 continue
1192 1192 raise util.Abort(_("file %s not found!") % rel)
1193 1193 if stat.S_ISDIR(mode):
1194 1194 if not is_dir(f):
1195 1195 raise util.Abort(_("no match under directory %s!")
1196 1196 % rel)
1197 1197 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1198 1198 raise util.Abort(_("can't commit %s: "
1199 1199 "unsupported file type!") % rel)
1200 1200 elif f not in repo.dirstate:
1201 1201 raise util.Abort(_("file %s not tracked!") % rel)
1202 1202 m = matchfiles(repo, files)
1203 1203 try:
1204 1204 return commitfunc(ui, repo, message, m, opts)
1205 1205 except ValueError, inst:
1206 1206 raise util.Abort(str(inst))
@@ -1,3432 +1,3433 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _, gettext
10 10 import os, re, sys
11 11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import difflib, patch, time, help, mdiff, tempfile, url
12 import difflib, patch, time, help, mdiff, tempfile, url, encoding
13 13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 14 import merge as merge_
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files to the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 m = cmdutil.match(repo, pats, opts)
33 33 m.bad = lambda x,y: True
34 34 for abs in repo.walk(m):
35 35 if m.exact(abs):
36 36 if ui.verbose:
37 37 ui.status(_('adding %s\n') % m.rel(abs))
38 38 names.append(abs)
39 39 exacts[abs] = 1
40 40 elif abs not in repo.dirstate:
41 41 ui.status(_('adding %s\n') % m.rel(abs))
42 42 names.append(abs)
43 43 if not opts.get('dry_run'):
44 44 rejected = repo.add(names)
45 45 rejected = [p for p in rejected if p in exacts]
46 46 return rejected and 1 or 0
47 47
48 48 def addremove(ui, repo, *pats, **opts):
49 49 """add all new files, delete all missing files
50 50
51 51 Add all new files and remove all missing files from the repository.
52 52
53 53 New files are ignored if they match any of the patterns in .hgignore. As
54 54 with add, these changes take effect at the next commit.
55 55
56 56 Use the -s option to detect renamed files. With a parameter > 0,
57 57 this compares every removed file with every added file and records
58 58 those similar enough as renames. This option takes a percentage
59 59 between 0 (disabled) and 100 (files must be identical) as its
60 60 parameter. Detecting renamed files this way can be expensive.
61 61 """
62 62 try:
63 63 sim = float(opts.get('similarity') or 0)
64 64 except ValueError:
65 65 raise util.Abort(_('similarity must be a number'))
66 66 if sim < 0 or sim > 100:
67 67 raise util.Abort(_('similarity must be between 0 and 100'))
68 68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
69 69
70 70 def annotate(ui, repo, *pats, **opts):
71 71 """show changeset information per file line
72 72
73 73 List changes in files, showing the revision id responsible for each line
74 74
75 75 This command is useful to discover who did a change or when a change took
76 76 place.
77 77
78 78 Without the -a option, annotate will avoid processing files it
79 79 detects as binary. With -a, annotate will generate an annotation
80 80 anyway, probably with undesirable results.
81 81 """
82 82 datefunc = ui.quiet and util.shortdate or util.datestr
83 83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
84 84
85 85 if not pats:
86 86 raise util.Abort(_('at least one file name or pattern required'))
87 87
88 88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
89 89 ('number', lambda x: str(x[0].rev())),
90 90 ('changeset', lambda x: short(x[0].node())),
91 91 ('date', getdate),
92 92 ('follow', lambda x: x[0].path()),
93 93 ]
94 94
95 95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
96 96 and not opts.get('follow')):
97 97 opts['number'] = 1
98 98
99 99 linenumber = opts.get('line_number') is not None
100 100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
101 101 raise util.Abort(_('at least one of -n/-c is required for -l'))
102 102
103 103 funcmap = [func for op, func in opmap if opts.get(op)]
104 104 if linenumber:
105 105 lastfunc = funcmap[-1]
106 106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
107 107
108 108 ctx = repo[opts.get('rev')]
109 109
110 110 m = cmdutil.match(repo, pats, opts)
111 111 for abs in ctx.walk(m):
112 112 fctx = ctx[abs]
113 113 if not opts.get('text') and util.binary(fctx.data()):
114 114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
115 115 continue
116 116
117 117 lines = fctx.annotate(follow=opts.get('follow'),
118 118 linenumber=linenumber)
119 119 pieces = []
120 120
121 121 for f in funcmap:
122 122 l = [f(n) for n, dummy in lines]
123 123 if l:
124 124 ml = max(map(len, l))
125 125 pieces.append(["%*s" % (ml, x) for x in l])
126 126
127 127 if pieces:
128 128 for p, l in zip(zip(*pieces), lines):
129 129 ui.write("%s: %s" % (" ".join(p), l[1]))
130 130
131 131 def archive(ui, repo, dest, **opts):
132 132 '''create unversioned archive of a repository revision
133 133
134 134 By default, the revision used is the parent of the working
135 135 directory; use "-r" to specify a different revision.
136 136
137 137 To specify the type of archive to create, use "-t". Valid
138 138 types are:
139 139
140 140 "files" (default): a directory full of files
141 141 "tar": tar archive, uncompressed
142 142 "tbz2": tar archive, compressed using bzip2
143 143 "tgz": tar archive, compressed using gzip
144 144 "uzip": zip archive, uncompressed
145 145 "zip": zip archive, compressed using deflate
146 146
147 147 The exact name of the destination archive or directory is given
148 148 using a format string; see "hg help export" for details.
149 149
150 150 Each member added to an archive file has a directory prefix
151 151 prepended. Use "-p" to specify a format string for the prefix.
152 152 The default is the basename of the archive, with suffixes removed.
153 153 '''
154 154
155 155 ctx = repo[opts.get('rev')]
156 156 if not ctx:
157 157 raise util.Abort(_('no working directory: please specify a revision'))
158 158 node = ctx.node()
159 159 dest = cmdutil.make_filename(repo, dest, node)
160 160 if os.path.realpath(dest) == repo.root:
161 161 raise util.Abort(_('repository root cannot be destination'))
162 162 matchfn = cmdutil.match(repo, [], opts)
163 163 kind = opts.get('type') or 'files'
164 164 prefix = opts.get('prefix')
165 165 if dest == '-':
166 166 if kind == 'files':
167 167 raise util.Abort(_('cannot archive plain files to stdout'))
168 168 dest = sys.stdout
169 169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
170 170 prefix = cmdutil.make_filename(repo, prefix, node)
171 171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
172 172 matchfn, prefix)
173 173
174 174 def backout(ui, repo, node=None, rev=None, **opts):
175 175 '''reverse effect of earlier changeset
176 176
177 177 Commit the backed out changes as a new changeset. The new
178 178 changeset is a child of the backed out changeset.
179 179
180 180 If you back out a changeset other than the tip, a new head is
181 181 created. This head will be the new tip and you should merge this
182 182 backout changeset with another head (current one by default).
183 183
184 184 The --merge option remembers the parent of the working directory
185 185 before starting the backout, then merges the new head with that
186 186 changeset afterwards. This saves you from doing the merge by
187 187 hand. The result of this merge is not committed, as with a normal
188 188 merge.
189 189
190 190 See \'hg help dates\' for a list of formats valid for -d/--date.
191 191 '''
192 192 if rev and node:
193 193 raise util.Abort(_("please specify just one revision"))
194 194
195 195 if not rev:
196 196 rev = node
197 197
198 198 if not rev:
199 199 raise util.Abort(_("please specify a revision to backout"))
200 200
201 201 date = opts.get('date')
202 202 if date:
203 203 opts['date'] = util.parsedate(date)
204 204
205 205 cmdutil.bail_if_changed(repo)
206 206 node = repo.lookup(rev)
207 207
208 208 op1, op2 = repo.dirstate.parents()
209 209 a = repo.changelog.ancestor(op1, node)
210 210 if a != node:
211 211 raise util.Abort(_('cannot back out change on a different branch'))
212 212
213 213 p1, p2 = repo.changelog.parents(node)
214 214 if p1 == nullid:
215 215 raise util.Abort(_('cannot back out a change with no parents'))
216 216 if p2 != nullid:
217 217 if not opts.get('parent'):
218 218 raise util.Abort(_('cannot back out a merge changeset without '
219 219 '--parent'))
220 220 p = repo.lookup(opts['parent'])
221 221 if p not in (p1, p2):
222 222 raise util.Abort(_('%s is not a parent of %s') %
223 223 (short(p), short(node)))
224 224 parent = p
225 225 else:
226 226 if opts.get('parent'):
227 227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 228 parent = p1
229 229
230 230 # the backout should appear on the same branch
231 231 branch = repo.dirstate.branch()
232 232 hg.clean(repo, node, show_stats=False)
233 233 repo.dirstate.setbranch(branch)
234 234 revert_opts = opts.copy()
235 235 revert_opts['date'] = None
236 236 revert_opts['all'] = True
237 237 revert_opts['rev'] = hex(parent)
238 238 revert_opts['no_backup'] = None
239 239 revert(ui, repo, **revert_opts)
240 240 commit_opts = opts.copy()
241 241 commit_opts['addremove'] = False
242 242 if not commit_opts['message'] and not commit_opts['logfile']:
243 243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
244 244 commit_opts['force_editor'] = True
245 245 commit(ui, repo, **commit_opts)
246 246 def nice(node):
247 247 return '%d:%s' % (repo.changelog.rev(node), short(node))
248 248 ui.status(_('changeset %s backs out changeset %s\n') %
249 249 (nice(repo.changelog.tip()), nice(node)))
250 250 if op1 != node:
251 251 hg.clean(repo, op1, show_stats=False)
252 252 if opts.get('merge'):
253 253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
254 254 hg.merge(repo, hex(repo.changelog.tip()))
255 255 else:
256 256 ui.status(_('the backout changeset is a new head - '
257 257 'do not forget to merge\n'))
258 258 ui.status(_('(use "backout --merge" '
259 259 'if you want to auto-merge)\n'))
260 260
261 261 def bisect(ui, repo, rev=None, extra=None, command=None,
262 262 reset=None, good=None, bad=None, skip=None, noupdate=None):
263 263 """subdivision search of changesets
264 264
265 265 This command helps to find changesets which introduce problems.
266 266 To use, mark the earliest changeset you know exhibits the problem
267 267 as bad, then mark the latest changeset which is free from the
268 268 problem as good. Bisect will update your working directory to a
269 269 revision for testing (unless the --noupdate option is specified).
270 270 Once you have performed tests, mark the working directory as bad
271 271 or good and bisect will either update to another candidate changeset
272 272 or announce that it has found the bad revision.
273 273
274 274 As a shortcut, you can also use the revision argument to mark a
275 275 revision as good or bad without checking it out first.
276 276
277 277 If you supply a command it will be used for automatic bisection. Its exit
278 278 status will be used as flag to mark revision as bad or good. In case exit
279 279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
280 280 found) - bisection will be aborted; any other status bigger than 0 will
281 281 mark revision as bad.
282 282 """
283 283 def print_result(nodes, good):
284 284 displayer = cmdutil.show_changeset(ui, repo, {})
285 285 transition = (good and "good" or "bad")
286 286 if len(nodes) == 1:
287 287 # narrowed it down to a single revision
288 288 ui.write(_("The first %s revision is:\n") % transition)
289 289 displayer.show(repo[nodes[0]])
290 290 else:
291 291 # multiple possible revisions
292 292 ui.write(_("Due to skipped revisions, the first "
293 293 "%s revision could be any of:\n") % transition)
294 294 for n in nodes:
295 295 displayer.show(repo[n])
296 296
297 297 def check_state(state, interactive=True):
298 298 if not state['good'] or not state['bad']:
299 299 if (good or bad or skip or reset) and interactive:
300 300 return
301 301 if not state['good']:
302 302 raise util.Abort(_('cannot bisect (no known good revisions)'))
303 303 else:
304 304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
305 305 return True
306 306
307 307 # backward compatibility
308 308 if rev in "good bad reset init".split():
309 309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
310 310 cmd, rev, extra = rev, extra, None
311 311 if cmd == "good":
312 312 good = True
313 313 elif cmd == "bad":
314 314 bad = True
315 315 else:
316 316 reset = True
317 317 elif extra or good + bad + skip + reset + bool(command) > 1:
318 318 raise util.Abort(_('incompatible arguments'))
319 319
320 320 if reset:
321 321 p = repo.join("bisect.state")
322 322 if os.path.exists(p):
323 323 os.unlink(p)
324 324 return
325 325
326 326 state = hbisect.load_state(repo)
327 327
328 328 if command:
329 329 commandpath = util.find_exe(command)
330 330 changesets = 1
331 331 try:
332 332 while changesets:
333 333 # update state
334 334 status = os.spawnl(os.P_WAIT, commandpath, commandpath)
335 335 if status == 125:
336 336 transition = "skip"
337 337 elif status == 0:
338 338 transition = "good"
339 339 # status < 0 means process was killed
340 340 elif status == 127:
341 341 raise util.Abort(_("failed to execute %s") % command)
342 342 elif status < 0:
343 343 raise util.Abort(_("%s killed") % command)
344 344 else:
345 345 transition = "bad"
346 346 node = repo.lookup(rev or '.')
347 347 state[transition].append(node)
348 348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
349 349 check_state(state, interactive=False)
350 350 # bisect
351 351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
352 352 # update to next check
353 353 cmdutil.bail_if_changed(repo)
354 354 hg.clean(repo, nodes[0], show_stats=False)
355 355 finally:
356 356 hbisect.save_state(repo, state)
357 357 return print_result(nodes, not status)
358 358
359 359 # update state
360 360 node = repo.lookup(rev or '.')
361 361 if good:
362 362 state['good'].append(node)
363 363 elif bad:
364 364 state['bad'].append(node)
365 365 elif skip:
366 366 state['skip'].append(node)
367 367
368 368 hbisect.save_state(repo, state)
369 369
370 370 if not check_state(state):
371 371 return
372 372
373 373 # actually bisect
374 374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
375 375 if changesets == 0:
376 376 print_result(nodes, good)
377 377 else:
378 378 assert len(nodes) == 1 # only a single node can be tested next
379 379 node = nodes[0]
380 380 # compute the approximate number of remaining tests
381 381 tests, size = 0, 2
382 382 while size <= changesets:
383 383 tests, size = tests + 1, size * 2
384 384 rev = repo.changelog.rev(node)
385 385 ui.write(_("Testing changeset %s:%s "
386 386 "(%s changesets remaining, ~%s tests)\n")
387 387 % (rev, short(node), changesets, tests))
388 388 if not noupdate:
389 389 cmdutil.bail_if_changed(repo)
390 390 return hg.clean(repo, node)
391 391
392 392 def branch(ui, repo, label=None, **opts):
393 393 """set or show the current branch name
394 394
395 395 With no argument, show the current branch name. With one argument,
396 396 set the working directory branch name (the branch does not exist
397 397 in the repository until the next commit). It is recommended to use
398 398 the 'default' branch as your primary development branch.
399 399
400 400 Unless --force is specified, branch will not let you set a
401 401 branch name that shadows an existing branch.
402 402
403 403 Use --clean to reset the working directory branch to that of the
404 404 parent of the working directory, negating a previous branch change.
405 405
406 406 Use the command 'hg update' to switch to an existing branch.
407 407 """
408 408
409 409 if opts.get('clean'):
410 410 label = repo[None].parents()[0].branch()
411 411 repo.dirstate.setbranch(label)
412 412 ui.status(_('reset working directory to branch %s\n') % label)
413 413 elif label:
414 414 if not opts.get('force') and label in repo.branchtags():
415 415 if label not in [p.branch() for p in repo.parents()]:
416 416 raise util.Abort(_('a branch of the same name already exists'
417 417 ' (use --force to override)'))
418 repo.dirstate.setbranch(util.fromlocal(label))
418 repo.dirstate.setbranch(encoding.fromlocal(label))
419 419 ui.status(_('marked working directory as branch %s\n') % label)
420 420 else:
421 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
421 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
422 422
423 423 def branches(ui, repo, active=False):
424 424 """list repository named branches
425 425
426 426 List the repository's named branches, indicating which ones are
427 427 inactive. If active is specified, only show active branches.
428 428
429 429 A branch is considered active if it contains repository heads.
430 430
431 431 Use the command 'hg update' to switch to an existing branch.
432 432 """
433 433 hexfunc = ui.debugflag and hex or short
434 activebranches = [util.tolocal(repo[n].branch())
434 activebranches = [encoding.tolocal(repo[n].branch())
435 435 for n in repo.heads(closed=False)]
436 436 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
437 437 for tag, node in repo.branchtags().items()])
438 438 branches.reverse()
439 439
440 440 for isactive, node, tag in branches:
441 441 if (not active) or isactive:
442 442 if ui.quiet:
443 443 ui.write("%s\n" % tag)
444 444 else:
445 445 hn = repo.lookup(node)
446 446 if isactive:
447 447 notice = ''
448 448 elif hn not in repo.branchheads(tag, closed=False):
449 449 notice = ' (closed)'
450 450 else:
451 451 notice = ' (inactive)'
452 rev = str(node).rjust(31 - util.colwidth(tag))
452 rev = str(node).rjust(31 - encoding.colwidth(tag))
453 453 data = tag, rev, hexfunc(hn), notice
454 454 ui.write("%s %s:%s%s\n" % data)
455 455
456 456 def bundle(ui, repo, fname, dest=None, **opts):
457 457 """create a changegroup file
458 458
459 459 Generate a compressed changegroup file collecting changesets not
460 460 known to be in another repository.
461 461
462 462 If no destination repository is specified the destination is
463 463 assumed to have all the nodes specified by one or more --base
464 464 parameters. To create a bundle containing all changesets, use
465 465 --all (or --base null). To change the compression method applied,
466 466 use the -t option (by default, bundles are compressed using bz2).
467 467
468 468 The bundle file can then be transferred using conventional means and
469 469 applied to another repository with the unbundle or pull command.
470 470 This is useful when direct push and pull are not available or when
471 471 exporting an entire repository is undesirable.
472 472
473 473 Applying bundles preserves all changeset contents including
474 474 permissions, copy/rename information, and revision history.
475 475 """
476 476 revs = opts.get('rev') or None
477 477 if revs:
478 478 revs = [repo.lookup(rev) for rev in revs]
479 479 if opts.get('all'):
480 480 base = ['null']
481 481 else:
482 482 base = opts.get('base')
483 483 if base:
484 484 if dest:
485 485 raise util.Abort(_("--base is incompatible with specifiying "
486 486 "a destination"))
487 487 base = [repo.lookup(rev) for rev in base]
488 488 # create the right base
489 489 # XXX: nodesbetween / changegroup* should be "fixed" instead
490 490 o = []
491 491 has = {nullid: None}
492 492 for n in base:
493 493 has.update(repo.changelog.reachable(n))
494 494 if revs:
495 495 visit = list(revs)
496 496 else:
497 497 visit = repo.changelog.heads()
498 498 seen = {}
499 499 while visit:
500 500 n = visit.pop(0)
501 501 parents = [p for p in repo.changelog.parents(n) if p not in has]
502 502 if len(parents) == 0:
503 503 o.insert(0, n)
504 504 else:
505 505 for p in parents:
506 506 if p not in seen:
507 507 seen[p] = 1
508 508 visit.append(p)
509 509 else:
510 510 cmdutil.setremoteconfig(ui, opts)
511 511 dest, revs, checkout = hg.parseurl(
512 512 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
513 513 other = hg.repository(ui, dest)
514 514 o = repo.findoutgoing(other, force=opts.get('force'))
515 515
516 516 if revs:
517 517 cg = repo.changegroupsubset(o, revs, 'bundle')
518 518 else:
519 519 cg = repo.changegroup(o, 'bundle')
520 520
521 521 bundletype = opts.get('type', 'bzip2').lower()
522 522 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
523 523 bundletype = btypes.get(bundletype)
524 524 if bundletype not in changegroup.bundletypes:
525 525 raise util.Abort(_('unknown bundle type specified with --type'))
526 526
527 527 changegroup.writebundle(cg, fname, bundletype)
528 528
529 529 def cat(ui, repo, file1, *pats, **opts):
530 530 """output the current or given revision of files
531 531
532 532 Print the specified files as they were at the given revision.
533 533 If no revision is given, the parent of the working directory is used,
534 534 or tip if no revision is checked out.
535 535
536 536 Output may be to a file, in which case the name of the file is
537 537 given using a format string. The formatting rules are the same as
538 538 for the export command, with the following additions:
539 539
540 540 %s basename of file being printed
541 541 %d dirname of file being printed, or '.' if in repo root
542 542 %p root-relative path name of file being printed
543 543 """
544 544 ctx = repo[opts.get('rev')]
545 545 err = 1
546 546 m = cmdutil.match(repo, (file1,) + pats, opts)
547 547 for abs in ctx.walk(m):
548 548 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
549 549 data = ctx[abs].data()
550 550 if opts.get('decode'):
551 551 data = repo.wwritedata(abs, data)
552 552 fp.write(data)
553 553 err = 0
554 554 return err
555 555
556 556 def clone(ui, source, dest=None, **opts):
557 557 """make a copy of an existing repository
558 558
559 559 Create a copy of an existing repository in a new directory.
560 560
561 561 If no destination directory name is specified, it defaults to the
562 562 basename of the source.
563 563
564 564 The location of the source is added to the new repository's
565 565 .hg/hgrc file, as the default to be used for future pulls.
566 566
567 567 If you use the -r option to clone up to a specific revision, no
568 568 subsequent revisions (including subsequent tags) will be present
569 569 in the cloned repository. This option implies --pull, even on
570 570 local repositories.
571 571
572 572 By default, clone will check out the head of the 'default' branch.
573 573 If the -U option is used, the new clone will contain only a repository
574 574 (.hg) and no working copy (the working copy parent is the null revision).
575 575
576 576 See 'hg help urls' for valid source format details.
577 577
578 578 It is possible to specify an ssh:// URL as the destination, but no
579 579 .hg/hgrc and working directory will be created on the remote side.
580 580 Look at the help text for urls for important details about ssh:// URLs.
581 581
582 582 For efficiency, hardlinks are used for cloning whenever the source
583 583 and destination are on the same filesystem (note this applies only
584 584 to the repository data, not to the checked out files). Some
585 585 filesystems, such as AFS, implement hardlinking incorrectly, but
586 586 do not report errors. In these cases, use the --pull option to
587 587 avoid hardlinking.
588 588
589 589 In some cases, you can clone repositories and checked out files
590 590 using full hardlinks with
591 591
592 592 $ cp -al REPO REPOCLONE
593 593
594 594 This is the fastest way to clone, but it is not always safe. The
595 595 operation is not atomic (making sure REPO is not modified during
596 596 the operation is up to you) and you have to make sure your editor
597 597 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
598 598 this is not compatible with certain extensions that place their
599 599 metadata under the .hg directory, such as mq.
600 600
601 601 """
602 602 cmdutil.setremoteconfig(ui, opts)
603 603 hg.clone(ui, source, dest,
604 604 pull=opts.get('pull'),
605 605 stream=opts.get('uncompressed'),
606 606 rev=opts.get('rev'),
607 607 update=not opts.get('noupdate'))
608 608
609 609 def commit(ui, repo, *pats, **opts):
610 610 """commit the specified files or all outstanding changes
611 611
612 612 Commit changes to the given files into the repository.
613 613
614 614 If a list of files is omitted, all changes reported by "hg status"
615 615 will be committed.
616 616
617 617 If you are committing the result of a merge, do not provide any
618 618 file names or -I/-X filters.
619 619
620 620 If no commit message is specified, the configured editor is started to
621 621 prompt you for a message.
622 622
623 623 See 'hg help dates' for a list of formats valid for -d/--date.
624 624 """
625 625 extra = {}
626 626 if opts.get('close_branch'):
627 627 extra['close'] = 1
628 628 def commitfunc(ui, repo, message, match, opts):
629 629 return repo.commit(match.files(), message, opts.get('user'),
630 630 opts.get('date'), match, force_editor=opts.get('force_editor'),
631 631 extra=extra)
632 632
633 633 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
634 634 if not node:
635 635 return
636 636 cl = repo.changelog
637 637 rev = cl.rev(node)
638 638 parents = cl.parentrevs(rev)
639 639 if rev - 1 in parents:
640 640 # one of the parents was the old tip
641 641 pass
642 642 elif (parents == (nullrev, nullrev) or
643 643 len(cl.heads(cl.node(parents[0]))) > 1 and
644 644 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
645 645 ui.status(_('created new head\n'))
646 646
647 647 if ui.debugflag:
648 648 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
649 649 elif ui.verbose:
650 650 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
651 651
652 652 def copy(ui, repo, *pats, **opts):
653 653 """mark files as copied for the next commit
654 654
655 655 Mark dest as having copies of source files. If dest is a
656 656 directory, copies are put in that directory. If dest is a file,
657 657 the source must be a single file.
658 658
659 659 By default, this command copies the contents of files as they
660 660 stand in the working directory. If invoked with --after, the
661 661 operation is recorded, but no copying is performed.
662 662
663 663 This command takes effect with the next commit. To undo a copy
664 664 before that, see hg revert.
665 665 """
666 666 wlock = repo.wlock(False)
667 667 try:
668 668 return cmdutil.copy(ui, repo, pats, opts)
669 669 finally:
670 670 del wlock
671 671
672 672 def debugancestor(ui, repo, *args):
673 673 """find the ancestor revision of two revisions in a given index"""
674 674 if len(args) == 3:
675 675 index, rev1, rev2 = args
676 676 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
677 677 lookup = r.lookup
678 678 elif len(args) == 2:
679 679 if not repo:
680 680 raise util.Abort(_("There is no Mercurial repository here "
681 681 "(.hg not found)"))
682 682 rev1, rev2 = args
683 683 r = repo.changelog
684 684 lookup = repo.lookup
685 685 else:
686 686 raise util.Abort(_('either two or three arguments required'))
687 687 a = r.ancestor(lookup(rev1), lookup(rev2))
688 688 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
689 689
690 690 def debugcommands(ui, cmd='', *args):
691 691 for cmd, vals in util.sort(table.iteritems()):
692 692 cmd = cmd.split('|')[0].strip('^')
693 693 opts = ', '.join([i[1] for i in vals[1]])
694 694 ui.write('%s: %s\n' % (cmd, opts))
695 695
696 696 def debugcomplete(ui, cmd='', **opts):
697 697 """returns the completion list associated with the given command"""
698 698
699 699 if opts.get('options'):
700 700 options = []
701 701 otables = [globalopts]
702 702 if cmd:
703 703 aliases, entry = cmdutil.findcmd(cmd, table, False)
704 704 otables.append(entry[1])
705 705 for t in otables:
706 706 for o in t:
707 707 if o[0]:
708 708 options.append('-%s' % o[0])
709 709 options.append('--%s' % o[1])
710 710 ui.write("%s\n" % "\n".join(options))
711 711 return
712 712
713 713 cmdlist = cmdutil.findpossible(cmd, table)
714 714 if ui.verbose:
715 715 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
716 716 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
717 717
718 718 def debugfsinfo(ui, path = "."):
719 719 file('.debugfsinfo', 'w').write('')
720 720 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
721 721 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
722 722 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
723 723 and 'yes' or 'no'))
724 724 os.unlink('.debugfsinfo')
725 725
726 726 def debugrebuildstate(ui, repo, rev="tip"):
727 727 """rebuild the dirstate as it would look like for the given revision"""
728 728 ctx = repo[rev]
729 729 wlock = repo.wlock()
730 730 try:
731 731 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
732 732 finally:
733 733 del wlock
734 734
735 735 def debugcheckstate(ui, repo):
736 736 """validate the correctness of the current dirstate"""
737 737 parent1, parent2 = repo.dirstate.parents()
738 738 m1 = repo[parent1].manifest()
739 739 m2 = repo[parent2].manifest()
740 740 errors = 0
741 741 for f in repo.dirstate:
742 742 state = repo.dirstate[f]
743 743 if state in "nr" and f not in m1:
744 744 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
745 745 errors += 1
746 746 if state in "a" and f in m1:
747 747 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
748 748 errors += 1
749 749 if state in "m" and f not in m1 and f not in m2:
750 750 ui.warn(_("%s in state %s, but not in either manifest\n") %
751 751 (f, state))
752 752 errors += 1
753 753 for f in m1:
754 754 state = repo.dirstate[f]
755 755 if state not in "nrm":
756 756 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
757 757 errors += 1
758 758 if errors:
759 759 error = _(".hg/dirstate inconsistent with current parent's manifest")
760 760 raise util.Abort(error)
761 761
762 762 def showconfig(ui, repo, *values, **opts):
763 763 """show combined config settings from all hgrc files
764 764
765 765 With no args, print names and values of all config items.
766 766
767 767 With one arg of the form section.name, print just the value of
768 768 that config item.
769 769
770 770 With multiple args, print names and values of all config items
771 771 with matching section names."""
772 772
773 773 untrusted = bool(opts.get('untrusted'))
774 774 if values:
775 775 if len([v for v in values if '.' in v]) > 1:
776 776 raise util.Abort(_('only one config item permitted'))
777 777 for section, name, value in ui.walkconfig(untrusted=untrusted):
778 778 sectname = section + '.' + name
779 779 if values:
780 780 for v in values:
781 781 if v == section:
782 782 ui.write('%s=%s\n' % (sectname, value))
783 783 elif v == sectname:
784 784 ui.write(value, '\n')
785 785 else:
786 786 ui.write('%s=%s\n' % (sectname, value))
787 787
788 788 def debugsetparents(ui, repo, rev1, rev2=None):
789 789 """manually set the parents of the current working directory
790 790
791 791 This is useful for writing repository conversion tools, but should
792 792 be used with care.
793 793 """
794 794
795 795 if not rev2:
796 796 rev2 = hex(nullid)
797 797
798 798 wlock = repo.wlock()
799 799 try:
800 800 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
801 801 finally:
802 802 del wlock
803 803
804 804 def debugstate(ui, repo, nodates=None):
805 805 """show the contents of the current dirstate"""
806 806 timestr = ""
807 807 showdate = not nodates
808 808 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
809 809 if showdate:
810 810 if ent[3] == -1:
811 811 # Pad or slice to locale representation
812 812 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
813 813 timestr = 'unset'
814 814 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
815 815 else:
816 816 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
817 817 if ent[1] & 020000:
818 818 mode = 'lnk'
819 819 else:
820 820 mode = '%3o' % (ent[1] & 0777)
821 821 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
822 822 for f in repo.dirstate.copies():
823 823 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
824 824
825 825 def debugdata(ui, file_, rev):
826 826 """dump the contents of a data file revision"""
827 827 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
828 828 try:
829 829 ui.write(r.revision(r.lookup(rev)))
830 830 except KeyError:
831 831 raise util.Abort(_('invalid revision identifier %s') % rev)
832 832
833 833 def debugdate(ui, date, range=None, **opts):
834 834 """parse and display a date"""
835 835 if opts["extended"]:
836 836 d = util.parsedate(date, util.extendeddateformats)
837 837 else:
838 838 d = util.parsedate(date)
839 839 ui.write("internal: %s %s\n" % d)
840 840 ui.write("standard: %s\n" % util.datestr(d))
841 841 if range:
842 842 m = util.matchdate(range)
843 843 ui.write("match: %s\n" % m(d[0]))
844 844
845 845 def debugindex(ui, file_):
846 846 """dump the contents of an index file"""
847 847 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
848 848 ui.write(" rev offset length base linkrev"
849 849 " nodeid p1 p2\n")
850 850 for i in r:
851 851 node = r.node(i)
852 852 try:
853 853 pp = r.parents(node)
854 854 except:
855 855 pp = [nullid, nullid]
856 856 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
857 857 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
858 858 short(node), short(pp[0]), short(pp[1])))
859 859
860 860 def debugindexdot(ui, file_):
861 861 """dump an index DAG as a .dot file"""
862 862 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
863 863 ui.write("digraph G {\n")
864 864 for i in r:
865 865 node = r.node(i)
866 866 pp = r.parents(node)
867 867 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
868 868 if pp[1] != nullid:
869 869 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
870 870 ui.write("}\n")
871 871
872 872 def debuginstall(ui):
873 873 '''test Mercurial installation'''
874 874
875 875 def writetemp(contents):
876 876 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
877 877 f = os.fdopen(fd, "wb")
878 878 f.write(contents)
879 879 f.close()
880 880 return name
881 881
882 882 problems = 0
883 883
884 884 # encoding
885 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
885 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
886 886 try:
887 util.fromlocal("test")
887 encoding.fromlocal("test")
888 888 except util.Abort, inst:
889 889 ui.write(" %s\n" % inst)
890 890 ui.write(_(" (check that your locale is properly set)\n"))
891 891 problems += 1
892 892
893 893 # compiled modules
894 894 ui.status(_("Checking extensions...\n"))
895 895 try:
896 896 import bdiff, mpatch, base85
897 897 except Exception, inst:
898 898 ui.write(" %s\n" % inst)
899 899 ui.write(_(" One or more extensions could not be found"))
900 900 ui.write(_(" (check that you compiled the extensions)\n"))
901 901 problems += 1
902 902
903 903 # templates
904 904 ui.status(_("Checking templates...\n"))
905 905 try:
906 906 import templater
907 907 templater.templater(templater.templatepath("map-cmdline.default"))
908 908 except Exception, inst:
909 909 ui.write(" %s\n" % inst)
910 910 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
911 911 problems += 1
912 912
913 913 # patch
914 914 ui.status(_("Checking patch...\n"))
915 915 patchproblems = 0
916 916 a = "1\n2\n3\n4\n"
917 917 b = "1\n2\n3\ninsert\n4\n"
918 918 fa = writetemp(a)
919 919 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
920 920 os.path.basename(fa))
921 921 fd = writetemp(d)
922 922
923 923 files = {}
924 924 try:
925 925 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
926 926 except util.Abort, e:
927 927 ui.write(_(" patch call failed:\n"))
928 928 ui.write(" " + str(e) + "\n")
929 929 patchproblems += 1
930 930 else:
931 931 if list(files) != [os.path.basename(fa)]:
932 932 ui.write(_(" unexpected patch output!\n"))
933 933 patchproblems += 1
934 934 a = file(fa).read()
935 935 if a != b:
936 936 ui.write(_(" patch test failed!\n"))
937 937 patchproblems += 1
938 938
939 939 if patchproblems:
940 940 if ui.config('ui', 'patch'):
941 941 ui.write(_(" (Current patch tool may be incompatible with patch,"
942 942 " or misconfigured. Please check your .hgrc file)\n"))
943 943 else:
944 944 ui.write(_(" Internal patcher failure, please report this error"
945 945 " to http://www.selenic.com/mercurial/bts\n"))
946 946 problems += patchproblems
947 947
948 948 os.unlink(fa)
949 949 os.unlink(fd)
950 950
951 951 # editor
952 952 ui.status(_("Checking commit editor...\n"))
953 953 editor = ui.geteditor()
954 954 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
955 955 if not cmdpath:
956 956 if editor == 'vi':
957 957 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
958 958 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
959 959 else:
960 960 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
961 961 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
962 962 problems += 1
963 963
964 964 # check username
965 965 ui.status(_("Checking username...\n"))
966 966 user = os.environ.get("HGUSER")
967 967 if user is None:
968 968 user = ui.config("ui", "username")
969 969 if user is None:
970 970 user = os.environ.get("EMAIL")
971 971 if not user:
972 972 ui.warn(" ")
973 973 ui.username()
974 974 ui.write(_(" (specify a username in your .hgrc file)\n"))
975 975
976 976 if not problems:
977 977 ui.status(_("No problems detected\n"))
978 978 else:
979 979 ui.write(_("%s problems detected,"
980 980 " please check your install!\n") % problems)
981 981
982 982 return problems
983 983
984 984 def debugrename(ui, repo, file1, *pats, **opts):
985 985 """dump rename information"""
986 986
987 987 ctx = repo[opts.get('rev')]
988 988 m = cmdutil.match(repo, (file1,) + pats, opts)
989 989 for abs in ctx.walk(m):
990 990 fctx = ctx[abs]
991 991 o = fctx.filelog().renamed(fctx.filenode())
992 992 rel = m.rel(abs)
993 993 if o:
994 994 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
995 995 else:
996 996 ui.write(_("%s not renamed\n") % rel)
997 997
998 998 def debugwalk(ui, repo, *pats, **opts):
999 999 """show how files match on given patterns"""
1000 1000 m = cmdutil.match(repo, pats, opts)
1001 1001 items = list(repo.walk(m))
1002 1002 if not items:
1003 1003 return
1004 1004 fmt = 'f %%-%ds %%-%ds %%s' % (
1005 1005 max([len(abs) for abs in items]),
1006 1006 max([len(m.rel(abs)) for abs in items]))
1007 1007 for abs in items:
1008 1008 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1009 1009 ui.write("%s\n" % line.rstrip())
1010 1010
1011 1011 def diff(ui, repo, *pats, **opts):
1012 1012 """diff repository (or selected files)
1013 1013
1014 1014 Show differences between revisions for the specified files.
1015 1015
1016 1016 Differences between files are shown using the unified diff format.
1017 1017
1018 1018 NOTE: diff may generate unexpected results for merges, as it will
1019 1019 default to comparing against the working directory's first parent
1020 1020 changeset if no revisions are specified.
1021 1021
1022 1022 When two revision arguments are given, then changes are shown
1023 1023 between those revisions. If only one revision is specified then
1024 1024 that revision is compared to the working directory, and, when no
1025 1025 revisions are specified, the working directory files are compared
1026 1026 to its parent.
1027 1027
1028 1028 Without the -a option, diff will avoid generating diffs of files
1029 1029 it detects as binary. With -a, diff will generate a diff anyway,
1030 1030 probably with undesirable results.
1031 1031
1032 1032 Use the --git option to generate diffs in the git extended diff
1033 1033 format. For more information, read hg help diffs.
1034 1034 """
1035 1035
1036 1036 revs = opts.get('rev')
1037 1037 change = opts.get('change')
1038 1038
1039 1039 if revs and change:
1040 1040 msg = _('cannot specify --rev and --change at the same time')
1041 1041 raise util.Abort(msg)
1042 1042 elif change:
1043 1043 node2 = repo.lookup(change)
1044 1044 node1 = repo[node2].parents()[0].node()
1045 1045 else:
1046 1046 node1, node2 = cmdutil.revpair(repo, revs)
1047 1047
1048 1048 m = cmdutil.match(repo, pats, opts)
1049 1049 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1050 1050 for chunk in it:
1051 1051 repo.ui.write(chunk)
1052 1052
1053 1053 def export(ui, repo, *changesets, **opts):
1054 1054 """dump the header and diffs for one or more changesets
1055 1055
1056 1056 Print the changeset header and diffs for one or more revisions.
1057 1057
1058 1058 The information shown in the changeset header is: author,
1059 1059 changeset hash, parent(s) and commit comment.
1060 1060
1061 1061 NOTE: export may generate unexpected diff output for merge changesets,
1062 1062 as it will compare the merge changeset against its first parent only.
1063 1063
1064 1064 Output may be to a file, in which case the name of the file is
1065 1065 given using a format string. The formatting rules are as follows:
1066 1066
1067 1067 %% literal "%" character
1068 1068 %H changeset hash (40 bytes of hexadecimal)
1069 1069 %N number of patches being generated
1070 1070 %R changeset revision number
1071 1071 %b basename of the exporting repository
1072 1072 %h short-form changeset hash (12 bytes of hexadecimal)
1073 1073 %n zero-padded sequence number, starting at 1
1074 1074 %r zero-padded changeset revision number
1075 1075
1076 1076 Without the -a option, export will avoid generating diffs of files
1077 1077 it detects as binary. With -a, export will generate a diff anyway,
1078 1078 probably with undesirable results.
1079 1079
1080 1080 Use the --git option to generate diffs in the git extended diff
1081 1081 format. Read the diffs help topic for more information.
1082 1082
1083 1083 With the --switch-parent option, the diff will be against the second
1084 1084 parent. It can be useful to review a merge.
1085 1085 """
1086 1086 if not changesets:
1087 1087 raise util.Abort(_("export requires at least one changeset"))
1088 1088 revs = cmdutil.revrange(repo, changesets)
1089 1089 if len(revs) > 1:
1090 1090 ui.note(_('exporting patches:\n'))
1091 1091 else:
1092 1092 ui.note(_('exporting patch:\n'))
1093 1093 patch.export(repo, revs, template=opts.get('output'),
1094 1094 switch_parent=opts.get('switch_parent'),
1095 1095 opts=patch.diffopts(ui, opts))
1096 1096
1097 1097 def grep(ui, repo, pattern, *pats, **opts):
1098 1098 """search for a pattern in specified files and revisions
1099 1099
1100 1100 Search revisions of files for a regular expression.
1101 1101
1102 1102 This command behaves differently than Unix grep. It only accepts
1103 1103 Python/Perl regexps. It searches repository history, not the
1104 1104 working directory. It always prints the revision number in which
1105 1105 a match appears.
1106 1106
1107 1107 By default, grep only prints output for the first revision of a
1108 1108 file in which it finds a match. To get it to print every revision
1109 1109 that contains a change in match status ("-" for a match that
1110 1110 becomes a non-match, or "+" for a non-match that becomes a match),
1111 1111 use the --all flag.
1112 1112 """
1113 1113 reflags = 0
1114 1114 if opts.get('ignore_case'):
1115 1115 reflags |= re.I
1116 1116 try:
1117 1117 regexp = re.compile(pattern, reflags)
1118 1118 except Exception, inst:
1119 1119 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1120 1120 return None
1121 1121 sep, eol = ':', '\n'
1122 1122 if opts.get('print0'):
1123 1123 sep = eol = '\0'
1124 1124
1125 1125 fcache = {}
1126 1126 def getfile(fn):
1127 1127 if fn not in fcache:
1128 1128 fcache[fn] = repo.file(fn)
1129 1129 return fcache[fn]
1130 1130
1131 1131 def matchlines(body):
1132 1132 begin = 0
1133 1133 linenum = 0
1134 1134 while True:
1135 1135 match = regexp.search(body, begin)
1136 1136 if not match:
1137 1137 break
1138 1138 mstart, mend = match.span()
1139 1139 linenum += body.count('\n', begin, mstart) + 1
1140 1140 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1141 1141 begin = body.find('\n', mend) + 1 or len(body)
1142 1142 lend = begin - 1
1143 1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1144 1144
1145 1145 class linestate(object):
1146 1146 def __init__(self, line, linenum, colstart, colend):
1147 1147 self.line = line
1148 1148 self.linenum = linenum
1149 1149 self.colstart = colstart
1150 1150 self.colend = colend
1151 1151
1152 1152 def __hash__(self):
1153 1153 return hash((self.linenum, self.line))
1154 1154
1155 1155 def __eq__(self, other):
1156 1156 return self.line == other.line
1157 1157
1158 1158 matches = {}
1159 1159 copies = {}
1160 1160 def grepbody(fn, rev, body):
1161 1161 matches[rev].setdefault(fn, [])
1162 1162 m = matches[rev][fn]
1163 1163 for lnum, cstart, cend, line in matchlines(body):
1164 1164 s = linestate(line, lnum, cstart, cend)
1165 1165 m.append(s)
1166 1166
1167 1167 def difflinestates(a, b):
1168 1168 sm = difflib.SequenceMatcher(None, a, b)
1169 1169 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1170 1170 if tag == 'insert':
1171 1171 for i in xrange(blo, bhi):
1172 1172 yield ('+', b[i])
1173 1173 elif tag == 'delete':
1174 1174 for i in xrange(alo, ahi):
1175 1175 yield ('-', a[i])
1176 1176 elif tag == 'replace':
1177 1177 for i in xrange(alo, ahi):
1178 1178 yield ('-', a[i])
1179 1179 for i in xrange(blo, bhi):
1180 1180 yield ('+', b[i])
1181 1181
1182 1182 prev = {}
1183 1183 def display(fn, rev, states, prevstates):
1184 1184 datefunc = ui.quiet and util.shortdate or util.datestr
1185 1185 found = False
1186 1186 filerevmatches = {}
1187 1187 r = prev.get(fn, -1)
1188 1188 if opts.get('all'):
1189 1189 iter = difflinestates(states, prevstates)
1190 1190 else:
1191 1191 iter = [('', l) for l in prevstates]
1192 1192 for change, l in iter:
1193 1193 cols = [fn, str(r)]
1194 1194 if opts.get('line_number'):
1195 1195 cols.append(str(l.linenum))
1196 1196 if opts.get('all'):
1197 1197 cols.append(change)
1198 1198 if opts.get('user'):
1199 1199 cols.append(ui.shortuser(get(r)[1]))
1200 1200 if opts.get('date'):
1201 1201 cols.append(datefunc(get(r)[2]))
1202 1202 if opts.get('files_with_matches'):
1203 1203 c = (fn, r)
1204 1204 if c in filerevmatches:
1205 1205 continue
1206 1206 filerevmatches[c] = 1
1207 1207 else:
1208 1208 cols.append(l.line)
1209 1209 ui.write(sep.join(cols), eol)
1210 1210 found = True
1211 1211 return found
1212 1212
1213 1213 fstate = {}
1214 1214 skip = {}
1215 1215 get = util.cachefunc(lambda r: repo[r].changeset())
1216 1216 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1217 1217 found = False
1218 1218 follow = opts.get('follow')
1219 1219 for st, rev, fns in changeiter:
1220 1220 if st == 'window':
1221 1221 matches.clear()
1222 1222 elif st == 'add':
1223 1223 ctx = repo[rev]
1224 1224 matches[rev] = {}
1225 1225 for fn in fns:
1226 1226 if fn in skip:
1227 1227 continue
1228 1228 try:
1229 1229 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1230 1230 fstate.setdefault(fn, [])
1231 1231 if follow:
1232 1232 copied = getfile(fn).renamed(ctx.filenode(fn))
1233 1233 if copied:
1234 1234 copies.setdefault(rev, {})[fn] = copied[0]
1235 1235 except error.LookupError:
1236 1236 pass
1237 1237 elif st == 'iter':
1238 1238 for fn, m in util.sort(matches[rev].items()):
1239 1239 copy = copies.get(rev, {}).get(fn)
1240 1240 if fn in skip:
1241 1241 if copy:
1242 1242 skip[copy] = True
1243 1243 continue
1244 1244 if fn in prev or fstate[fn]:
1245 1245 r = display(fn, rev, m, fstate[fn])
1246 1246 found = found or r
1247 1247 if r and not opts.get('all'):
1248 1248 skip[fn] = True
1249 1249 if copy:
1250 1250 skip[copy] = True
1251 1251 fstate[fn] = m
1252 1252 if copy:
1253 1253 fstate[copy] = m
1254 1254 prev[fn] = rev
1255 1255
1256 1256 for fn, state in util.sort(fstate.items()):
1257 1257 if fn in skip:
1258 1258 continue
1259 1259 if fn not in copies.get(prev[fn], {}):
1260 1260 found = display(fn, rev, {}, state) or found
1261 1261 return (not found and 1) or 0
1262 1262
1263 1263 def heads(ui, repo, *branchrevs, **opts):
1264 1264 """show current repository heads or show branch heads
1265 1265
1266 1266 With no arguments, show all repository head changesets.
1267 1267
1268 1268 If branch or revisions names are given this will show the heads of
1269 1269 the specified branches or the branches those revisions are tagged
1270 1270 with.
1271 1271
1272 1272 Repository "heads" are changesets that don't have child
1273 1273 changesets. They are where development generally takes place and
1274 1274 are the usual targets for update and merge operations.
1275 1275
1276 1276 Branch heads are changesets that have a given branch tag, but have
1277 1277 no child changesets with that tag. They are usually where
1278 1278 development on the given branch takes place.
1279 1279 """
1280 1280 if opts.get('rev'):
1281 1281 start = repo.lookup(opts['rev'])
1282 1282 else:
1283 1283 start = None
1284 1284 closed = not opts.get('active')
1285 1285 if not branchrevs:
1286 1286 # Assume we're looking repo-wide heads if no revs were specified.
1287 1287 heads = repo.heads(start, closed=closed)
1288 1288 else:
1289 1289 heads = []
1290 1290 visitedset = util.set()
1291 1291 for branchrev in branchrevs:
1292 1292 branch = repo[branchrev].branch()
1293 1293 if branch in visitedset:
1294 1294 continue
1295 1295 visitedset.add(branch)
1296 1296 bheads = repo.branchheads(branch, start, closed=closed)
1297 1297 if not bheads:
1298 1298 if branch != branchrev:
1299 1299 ui.warn(_("no changes on branch %s containing %s are "
1300 1300 "reachable from %s\n")
1301 1301 % (branch, branchrev, opts.get('rev')))
1302 1302 else:
1303 1303 ui.warn(_("no changes on branch %s are reachable from %s\n")
1304 1304 % (branch, opts.get('rev')))
1305 1305 heads.extend(bheads)
1306 1306 if not heads:
1307 1307 return 1
1308 1308 displayer = cmdutil.show_changeset(ui, repo, opts)
1309 1309 for n in heads:
1310 1310 displayer.show(repo[n])
1311 1311
1312 1312 def help_(ui, name=None, with_version=False):
1313 1313 """show help for a given topic or a help overview
1314 1314
1315 1315 With no arguments, print a list of commands and short help.
1316 1316
1317 1317 Given a topic, extension, or command name, print help for that topic."""
1318 1318 option_lists = []
1319 1319
1320 1320 def addglobalopts(aliases):
1321 1321 if ui.verbose:
1322 1322 option_lists.append((_("global options:"), globalopts))
1323 1323 if name == 'shortlist':
1324 1324 option_lists.append((_('use "hg help" for the full list '
1325 1325 'of commands'), ()))
1326 1326 else:
1327 1327 if name == 'shortlist':
1328 1328 msg = _('use "hg help" for the full list of commands '
1329 1329 'or "hg -v" for details')
1330 1330 elif aliases:
1331 1331 msg = _('use "hg -v help%s" to show aliases and '
1332 1332 'global options') % (name and " " + name or "")
1333 1333 else:
1334 1334 msg = _('use "hg -v help %s" to show global options') % name
1335 1335 option_lists.append((msg, ()))
1336 1336
1337 1337 def helpcmd(name):
1338 1338 if with_version:
1339 1339 version_(ui)
1340 1340 ui.write('\n')
1341 1341
1342 1342 try:
1343 1343 aliases, i = cmdutil.findcmd(name, table, False)
1344 1344 except error.AmbiguousCommand, inst:
1345 1345 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1346 1346 helplist(_('list of commands:\n\n'), select)
1347 1347 return
1348 1348
1349 1349 # synopsis
1350 1350 if len(i) > 2:
1351 1351 if i[2].startswith('hg'):
1352 1352 ui.write("%s\n" % i[2])
1353 1353 else:
1354 1354 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1355 1355 else:
1356 1356 ui.write('hg %s\n' % aliases[0])
1357 1357
1358 1358 # aliases
1359 1359 if not ui.quiet and len(aliases) > 1:
1360 1360 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1361 1361
1362 1362 # description
1363 1363 doc = gettext(i[0].__doc__)
1364 1364 if not doc:
1365 1365 doc = _("(no help text available)")
1366 1366 if ui.quiet:
1367 1367 doc = doc.splitlines(0)[0]
1368 1368 ui.write("\n%s\n" % doc.rstrip())
1369 1369
1370 1370 if not ui.quiet:
1371 1371 # options
1372 1372 if i[1]:
1373 1373 option_lists.append((_("options:\n"), i[1]))
1374 1374
1375 1375 addglobalopts(False)
1376 1376
1377 1377 def helplist(header, select=None):
1378 1378 h = {}
1379 1379 cmds = {}
1380 1380 for c, e in table.iteritems():
1381 1381 f = c.split("|", 1)[0]
1382 1382 if select and not select(f):
1383 1383 continue
1384 1384 if (not select and name != 'shortlist' and
1385 1385 e[0].__module__ != __name__):
1386 1386 continue
1387 1387 if name == "shortlist" and not f.startswith("^"):
1388 1388 continue
1389 1389 f = f.lstrip("^")
1390 1390 if not ui.debugflag and f.startswith("debug"):
1391 1391 continue
1392 1392 doc = gettext(e[0].__doc__)
1393 1393 if not doc:
1394 1394 doc = _("(no help text available)")
1395 1395 h[f] = doc.splitlines(0)[0].rstrip()
1396 1396 cmds[f] = c.lstrip("^")
1397 1397
1398 1398 if not h:
1399 1399 ui.status(_('no commands defined\n'))
1400 1400 return
1401 1401
1402 1402 ui.status(header)
1403 1403 fns = util.sort(h)
1404 1404 m = max(map(len, fns))
1405 1405 for f in fns:
1406 1406 if ui.verbose:
1407 1407 commands = cmds[f].replace("|",", ")
1408 1408 ui.write(" %s:\n %s\n"%(commands, h[f]))
1409 1409 else:
1410 1410 ui.write(' %-*s %s\n' % (m, f, h[f]))
1411 1411
1412 1412 exts = list(extensions.extensions())
1413 1413 if exts and name != 'shortlist':
1414 1414 ui.write(_('\nenabled extensions:\n\n'))
1415 1415 maxlength = 0
1416 1416 exthelps = []
1417 1417 for ename, ext in exts:
1418 1418 doc = (ext.__doc__ or _('(no help text available)'))
1419 1419 ename = ename.split('.')[-1]
1420 1420 maxlength = max(len(ename), maxlength)
1421 1421 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1422 1422 for ename, text in exthelps:
1423 1423 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1424 1424
1425 1425 if not ui.quiet:
1426 1426 addglobalopts(True)
1427 1427
1428 1428 def helptopic(name):
1429 1429 for names, header, doc in help.helptable:
1430 1430 if name in names:
1431 1431 break
1432 1432 else:
1433 1433 raise error.UnknownCommand(name)
1434 1434
1435 1435 # description
1436 1436 if not doc:
1437 1437 doc = _("(no help text available)")
1438 1438 if callable(doc):
1439 1439 doc = doc()
1440 1440
1441 1441 ui.write("%s\n" % header)
1442 1442 ui.write("%s\n" % doc.rstrip())
1443 1443
1444 1444 def helpext(name):
1445 1445 try:
1446 1446 mod = extensions.find(name)
1447 1447 except KeyError:
1448 1448 raise error.UnknownCommand(name)
1449 1449
1450 1450 doc = gettext(mod.__doc__) or _('no help text available')
1451 1451 doc = doc.splitlines(0)
1452 1452 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1453 1453 for d in doc[1:]:
1454 1454 ui.write(d, '\n')
1455 1455
1456 1456 ui.status('\n')
1457 1457
1458 1458 try:
1459 1459 ct = mod.cmdtable
1460 1460 except AttributeError:
1461 1461 ct = {}
1462 1462
1463 1463 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1464 1464 helplist(_('list of commands:\n\n'), modcmds.has_key)
1465 1465
1466 1466 if name and name != 'shortlist':
1467 1467 i = None
1468 1468 for f in (helptopic, helpcmd, helpext):
1469 1469 try:
1470 1470 f(name)
1471 1471 i = None
1472 1472 break
1473 1473 except error.UnknownCommand, inst:
1474 1474 i = inst
1475 1475 if i:
1476 1476 raise i
1477 1477
1478 1478 else:
1479 1479 # program name
1480 1480 if ui.verbose or with_version:
1481 1481 version_(ui)
1482 1482 else:
1483 1483 ui.status(_("Mercurial Distributed SCM\n"))
1484 1484 ui.status('\n')
1485 1485
1486 1486 # list of commands
1487 1487 if name == "shortlist":
1488 1488 header = _('basic commands:\n\n')
1489 1489 else:
1490 1490 header = _('list of commands:\n\n')
1491 1491
1492 1492 helplist(header)
1493 1493
1494 1494 # list all option lists
1495 1495 opt_output = []
1496 1496 for title, options in option_lists:
1497 1497 opt_output.append(("\n%s" % title, None))
1498 1498 for shortopt, longopt, default, desc in options:
1499 1499 if "DEPRECATED" in desc and not ui.verbose: continue
1500 1500 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1501 1501 longopt and " --%s" % longopt),
1502 1502 "%s%s" % (desc,
1503 1503 default
1504 1504 and _(" (default: %s)") % default
1505 1505 or "")))
1506 1506
1507 1507 if not name:
1508 1508 ui.write(_("\nadditional help topics:\n\n"))
1509 1509 topics = []
1510 1510 for names, header, doc in help.helptable:
1511 1511 names = [(-len(name), name) for name in names]
1512 1512 names.sort()
1513 1513 topics.append((names[0][1], header))
1514 1514 topics_len = max([len(s[0]) for s in topics])
1515 1515 for t, desc in topics:
1516 1516 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1517 1517
1518 1518 if opt_output:
1519 1519 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1520 1520 for first, second in opt_output:
1521 1521 if second:
1522 1522 ui.write(" %-*s %s\n" % (opts_len, first, second))
1523 1523 else:
1524 1524 ui.write("%s\n" % first)
1525 1525
1526 1526 def identify(ui, repo, source=None,
1527 1527 rev=None, num=None, id=None, branch=None, tags=None):
1528 1528 """identify the working copy or specified revision
1529 1529
1530 1530 With no revision, print a summary of the current state of the repo.
1531 1531
1532 1532 With a path, do a lookup in another repository.
1533 1533
1534 1534 This summary identifies the repository state using one or two parent
1535 1535 hash identifiers, followed by a "+" if there are uncommitted changes
1536 1536 in the working directory, a list of tags for this revision and a branch
1537 1537 name for non-default branches.
1538 1538 """
1539 1539
1540 1540 if not repo and not source:
1541 1541 raise util.Abort(_("There is no Mercurial repository here "
1542 1542 "(.hg not found)"))
1543 1543
1544 1544 hexfunc = ui.debugflag and hex or short
1545 1545 default = not (num or id or branch or tags)
1546 1546 output = []
1547 1547
1548 1548 revs = []
1549 1549 if source:
1550 1550 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1551 1551 repo = hg.repository(ui, source)
1552 1552
1553 1553 if not repo.local():
1554 1554 if not rev and revs:
1555 1555 rev = revs[0]
1556 1556 if not rev:
1557 1557 rev = "tip"
1558 1558 if num or branch or tags:
1559 1559 raise util.Abort(
1560 1560 "can't query remote revision number, branch, or tags")
1561 1561 output = [hexfunc(repo.lookup(rev))]
1562 1562 elif not rev:
1563 1563 ctx = repo[None]
1564 1564 parents = ctx.parents()
1565 1565 changed = False
1566 1566 if default or id or num:
1567 1567 changed = ctx.files() + ctx.deleted()
1568 1568 if default or id:
1569 1569 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1570 1570 (changed) and "+" or "")]
1571 1571 if num:
1572 1572 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1573 1573 (changed) and "+" or ""))
1574 1574 else:
1575 1575 ctx = repo[rev]
1576 1576 if default or id:
1577 1577 output = [hexfunc(ctx.node())]
1578 1578 if num:
1579 1579 output.append(str(ctx.rev()))
1580 1580
1581 1581 if repo.local() and default and not ui.quiet:
1582 b = util.tolocal(ctx.branch())
1582 b = encoding.tolocal(ctx.branch())
1583 1583 if b != 'default':
1584 1584 output.append("(%s)" % b)
1585 1585
1586 1586 # multiple tags for a single parent separated by '/'
1587 1587 t = "/".join(ctx.tags())
1588 1588 if t:
1589 1589 output.append(t)
1590 1590
1591 1591 if branch:
1592 output.append(util.tolocal(ctx.branch()))
1592 output.append(encoding.tolocal(ctx.branch()))
1593 1593
1594 1594 if tags:
1595 1595 output.extend(ctx.tags())
1596 1596
1597 1597 ui.write("%s\n" % ' '.join(output))
1598 1598
1599 1599 def import_(ui, repo, patch1, *patches, **opts):
1600 1600 """import an ordered set of patches
1601 1601
1602 1602 Import a list of patches and commit them individually.
1603 1603
1604 1604 If there are outstanding changes in the working directory, import
1605 1605 will abort unless given the -f flag.
1606 1606
1607 1607 You can import a patch straight from a mail message. Even patches
1608 1608 as attachments work (body part must be type text/plain or
1609 1609 text/x-patch to be used). From and Subject headers of email
1610 1610 message are used as default committer and commit message. All
1611 1611 text/plain body parts before first diff are added to commit
1612 1612 message.
1613 1613
1614 1614 If the imported patch was generated by hg export, user and description
1615 1615 from patch override values from message headers and body. Values
1616 1616 given on command line with -m and -u override these.
1617 1617
1618 1618 If --exact is specified, import will set the working directory
1619 1619 to the parent of each patch before applying it, and will abort
1620 1620 if the resulting changeset has a different ID than the one
1621 1621 recorded in the patch. This may happen due to character set
1622 1622 problems or other deficiencies in the text patch format.
1623 1623
1624 1624 With --similarity, hg will attempt to discover renames and copies
1625 1625 in the patch in the same way as 'addremove'.
1626 1626
1627 1627 To read a patch from standard input, use patch name "-".
1628 1628 See 'hg help dates' for a list of formats valid for -d/--date.
1629 1629 """
1630 1630 patches = (patch1,) + patches
1631 1631
1632 1632 date = opts.get('date')
1633 1633 if date:
1634 1634 opts['date'] = util.parsedate(date)
1635 1635
1636 1636 try:
1637 1637 sim = float(opts.get('similarity') or 0)
1638 1638 except ValueError:
1639 1639 raise util.Abort(_('similarity must be a number'))
1640 1640 if sim < 0 or sim > 100:
1641 1641 raise util.Abort(_('similarity must be between 0 and 100'))
1642 1642
1643 1643 if opts.get('exact') or not opts.get('force'):
1644 1644 cmdutil.bail_if_changed(repo)
1645 1645
1646 1646 d = opts["base"]
1647 1647 strip = opts["strip"]
1648 1648 wlock = lock = None
1649 1649 try:
1650 1650 wlock = repo.wlock()
1651 1651 lock = repo.lock()
1652 1652 for p in patches:
1653 1653 pf = os.path.join(d, p)
1654 1654
1655 1655 if pf == '-':
1656 1656 ui.status(_("applying patch from stdin\n"))
1657 1657 pf = sys.stdin
1658 1658 else:
1659 1659 ui.status(_("applying %s\n") % p)
1660 1660 pf = url.open(ui, pf)
1661 1661 data = patch.extract(ui, pf)
1662 1662 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1663 1663
1664 1664 if tmpname is None:
1665 1665 raise util.Abort(_('no diffs found'))
1666 1666
1667 1667 try:
1668 1668 cmdline_message = cmdutil.logmessage(opts)
1669 1669 if cmdline_message:
1670 1670 # pickup the cmdline msg
1671 1671 message = cmdline_message
1672 1672 elif message:
1673 1673 # pickup the patch msg
1674 1674 message = message.strip()
1675 1675 else:
1676 1676 # launch the editor
1677 1677 message = None
1678 1678 ui.debug(_('message:\n%s\n') % message)
1679 1679
1680 1680 wp = repo.parents()
1681 1681 if opts.get('exact'):
1682 1682 if not nodeid or not p1:
1683 1683 raise util.Abort(_('not a mercurial patch'))
1684 1684 p1 = repo.lookup(p1)
1685 1685 p2 = repo.lookup(p2 or hex(nullid))
1686 1686
1687 1687 if p1 != wp[0].node():
1688 1688 hg.clean(repo, p1)
1689 1689 repo.dirstate.setparents(p1, p2)
1690 1690 elif p2:
1691 1691 try:
1692 1692 p1 = repo.lookup(p1)
1693 1693 p2 = repo.lookup(p2)
1694 1694 if p1 == wp[0].node():
1695 1695 repo.dirstate.setparents(p1, p2)
1696 1696 except error.RepoError:
1697 1697 pass
1698 1698 if opts.get('exact') or opts.get('import_branch'):
1699 1699 repo.dirstate.setbranch(branch or 'default')
1700 1700
1701 1701 files = {}
1702 1702 try:
1703 1703 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1704 1704 files=files)
1705 1705 finally:
1706 1706 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1707 1707 if not opts.get('no_commit'):
1708 1708 n = repo.commit(files, message, opts.get('user') or user,
1709 1709 opts.get('date') or date)
1710 1710 if opts.get('exact'):
1711 1711 if hex(n) != nodeid:
1712 1712 repo.rollback()
1713 1713 raise util.Abort(_('patch is damaged'
1714 1714 ' or loses information'))
1715 1715 # Force a dirstate write so that the next transaction
1716 1716 # backups an up-do-date file.
1717 1717 repo.dirstate.write()
1718 1718 finally:
1719 1719 os.unlink(tmpname)
1720 1720 finally:
1721 1721 del lock, wlock
1722 1722
1723 1723 def incoming(ui, repo, source="default", **opts):
1724 1724 """show new changesets found in source
1725 1725
1726 1726 Show new changesets found in the specified path/URL or the default
1727 1727 pull location. These are the changesets that would be pulled if a pull
1728 1728 was requested.
1729 1729
1730 1730 For remote repository, using --bundle avoids downloading the changesets
1731 1731 twice if the incoming is followed by a pull.
1732 1732
1733 1733 See pull for valid source format details.
1734 1734 """
1735 1735 limit = cmdutil.loglimit(opts)
1736 1736 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1737 1737 cmdutil.setremoteconfig(ui, opts)
1738 1738
1739 1739 other = hg.repository(ui, source)
1740 1740 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1741 1741 if revs:
1742 1742 revs = [other.lookup(rev) for rev in revs]
1743 1743 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1744 1744 force=opts["force"])
1745 1745 if not incoming:
1746 1746 try:
1747 1747 os.unlink(opts["bundle"])
1748 1748 except:
1749 1749 pass
1750 1750 ui.status(_("no changes found\n"))
1751 1751 return 1
1752 1752
1753 1753 cleanup = None
1754 1754 try:
1755 1755 fname = opts["bundle"]
1756 1756 if fname or not other.local():
1757 1757 # create a bundle (uncompressed if other repo is not local)
1758 1758
1759 1759 if revs is None and other.capable('changegroupsubset'):
1760 1760 revs = rheads
1761 1761
1762 1762 if revs is None:
1763 1763 cg = other.changegroup(incoming, "incoming")
1764 1764 else:
1765 1765 cg = other.changegroupsubset(incoming, revs, 'incoming')
1766 1766 bundletype = other.local() and "HG10BZ" or "HG10UN"
1767 1767 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1768 1768 # keep written bundle?
1769 1769 if opts["bundle"]:
1770 1770 cleanup = None
1771 1771 if not other.local():
1772 1772 # use the created uncompressed bundlerepo
1773 1773 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1774 1774
1775 1775 o = other.changelog.nodesbetween(incoming, revs)[0]
1776 1776 if opts.get('newest_first'):
1777 1777 o.reverse()
1778 1778 displayer = cmdutil.show_changeset(ui, other, opts)
1779 1779 count = 0
1780 1780 for n in o:
1781 1781 if count >= limit:
1782 1782 break
1783 1783 parents = [p for p in other.changelog.parents(n) if p != nullid]
1784 1784 if opts.get('no_merges') and len(parents) == 2:
1785 1785 continue
1786 1786 count += 1
1787 1787 displayer.show(other[n])
1788 1788 finally:
1789 1789 if hasattr(other, 'close'):
1790 1790 other.close()
1791 1791 if cleanup:
1792 1792 os.unlink(cleanup)
1793 1793
1794 1794 def init(ui, dest=".", **opts):
1795 1795 """create a new repository in the given directory
1796 1796
1797 1797 Initialize a new repository in the given directory. If the given
1798 1798 directory does not exist, it is created.
1799 1799
1800 1800 If no directory is given, the current directory is used.
1801 1801
1802 1802 It is possible to specify an ssh:// URL as the destination.
1803 1803 See 'hg help urls' for more information.
1804 1804 """
1805 1805 cmdutil.setremoteconfig(ui, opts)
1806 1806 hg.repository(ui, dest, create=1)
1807 1807
1808 1808 def locate(ui, repo, *pats, **opts):
1809 1809 """locate files matching specific patterns
1810 1810
1811 1811 Print all files under Mercurial control whose names match the
1812 1812 given patterns.
1813 1813
1814 1814 This command searches the entire repository by default. To search
1815 1815 just the current directory and its subdirectories, use
1816 1816 "--include .".
1817 1817
1818 1818 If no patterns are given to match, this command prints all file
1819 1819 names.
1820 1820
1821 1821 If you want to feed the output of this command into the "xargs"
1822 1822 command, use the "-0" option to both this command and "xargs".
1823 1823 This will avoid the problem of "xargs" treating single filenames
1824 1824 that contain white space as multiple filenames.
1825 1825 """
1826 1826 end = opts.get('print0') and '\0' or '\n'
1827 1827 rev = opts.get('rev') or None
1828 1828
1829 1829 ret = 1
1830 1830 m = cmdutil.match(repo, pats, opts, default='relglob')
1831 1831 m.bad = lambda x,y: False
1832 1832 for abs in repo[rev].walk(m):
1833 1833 if not rev and abs not in repo.dirstate:
1834 1834 continue
1835 1835 if opts.get('fullpath'):
1836 1836 ui.write(repo.wjoin(abs), end)
1837 1837 else:
1838 1838 ui.write(((pats and m.rel(abs)) or abs), end)
1839 1839 ret = 0
1840 1840
1841 1841 return ret
1842 1842
1843 1843 def log(ui, repo, *pats, **opts):
1844 1844 """show revision history of entire repository or files
1845 1845
1846 1846 Print the revision history of the specified files or the entire
1847 1847 project.
1848 1848
1849 1849 File history is shown without following rename or copy history of
1850 1850 files. Use -f/--follow with a file name to follow history across
1851 1851 renames and copies. --follow without a file name will only show
1852 1852 ancestors or descendants of the starting revision. --follow-first
1853 1853 only follows the first parent of merge revisions.
1854 1854
1855 1855 If no revision range is specified, the default is tip:0 unless
1856 1856 --follow is set, in which case the working directory parent is
1857 1857 used as the starting revision.
1858 1858
1859 1859 See 'hg help dates' for a list of formats valid for -d/--date.
1860 1860
1861 1861 By default this command outputs: changeset id and hash, tags,
1862 1862 non-trivial parents, user, date and time, and a summary for each
1863 1863 commit. When the -v/--verbose switch is used, the list of changed
1864 1864 files and full commit message is shown.
1865 1865
1866 1866 NOTE: log -p may generate unexpected diff output for merge
1867 1867 changesets, as it will only compare the merge changeset against
1868 1868 its first parent. Also, the files: list will only reflect files
1869 1869 that are different from BOTH parents.
1870 1870
1871 1871 """
1872 1872
1873 1873 get = util.cachefunc(lambda r: repo[r].changeset())
1874 1874 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1875 1875
1876 1876 limit = cmdutil.loglimit(opts)
1877 1877 count = 0
1878 1878
1879 1879 if opts.get('copies') and opts.get('rev'):
1880 1880 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1881 1881 else:
1882 1882 endrev = len(repo)
1883 1883 rcache = {}
1884 1884 ncache = {}
1885 1885 def getrenamed(fn, rev):
1886 1886 '''looks up all renames for a file (up to endrev) the first
1887 1887 time the file is given. It indexes on the changerev and only
1888 1888 parses the manifest if linkrev != changerev.
1889 1889 Returns rename info for fn at changerev rev.'''
1890 1890 if fn not in rcache:
1891 1891 rcache[fn] = {}
1892 1892 ncache[fn] = {}
1893 1893 fl = repo.file(fn)
1894 1894 for i in fl:
1895 1895 node = fl.node(i)
1896 1896 lr = fl.linkrev(i)
1897 1897 renamed = fl.renamed(node)
1898 1898 rcache[fn][lr] = renamed
1899 1899 if renamed:
1900 1900 ncache[fn][node] = renamed
1901 1901 if lr >= endrev:
1902 1902 break
1903 1903 if rev in rcache[fn]:
1904 1904 return rcache[fn][rev]
1905 1905
1906 1906 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1907 1907 # filectx logic.
1908 1908
1909 1909 try:
1910 1910 return repo[rev][fn].renamed()
1911 1911 except error.LookupError:
1912 1912 pass
1913 1913 return None
1914 1914
1915 1915 df = False
1916 1916 if opts["date"]:
1917 1917 df = util.matchdate(opts["date"])
1918 1918
1919 1919 only_branches = opts.get('only_branch')
1920 1920
1921 1921 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1922 1922 for st, rev, fns in changeiter:
1923 1923 if st == 'add':
1924 1924 parents = [p for p in repo.changelog.parentrevs(rev)
1925 1925 if p != nullrev]
1926 1926 if opts.get('no_merges') and len(parents) == 2:
1927 1927 continue
1928 1928 if opts.get('only_merges') and len(parents) != 2:
1929 1929 continue
1930 1930
1931 1931 if only_branches:
1932 1932 revbranch = get(rev)[5]['branch']
1933 1933 if revbranch not in only_branches:
1934 1934 continue
1935 1935
1936 1936 if df:
1937 1937 changes = get(rev)
1938 1938 if not df(changes[2][0]):
1939 1939 continue
1940 1940
1941 1941 if opts.get('keyword'):
1942 1942 changes = get(rev)
1943 1943 miss = 0
1944 1944 for k in [kw.lower() for kw in opts['keyword']]:
1945 1945 if not (k in changes[1].lower() or
1946 1946 k in changes[4].lower() or
1947 1947 k in " ".join(changes[3]).lower()):
1948 1948 miss = 1
1949 1949 break
1950 1950 if miss:
1951 1951 continue
1952 1952
1953 1953 if opts['user']:
1954 1954 changes = get(rev)
1955 1955 miss = 0
1956 1956 for k in opts['user']:
1957 1957 if k != changes[1]:
1958 1958 miss = 1
1959 1959 break
1960 1960 if miss:
1961 1961 continue
1962 1962
1963 1963 copies = []
1964 1964 if opts.get('copies') and rev:
1965 1965 for fn in get(rev)[3]:
1966 1966 rename = getrenamed(fn, rev)
1967 1967 if rename:
1968 1968 copies.append((fn, rename[0]))
1969 1969 displayer.show(context.changectx(repo, rev), copies=copies)
1970 1970 elif st == 'iter':
1971 1971 if count == limit: break
1972 1972 if displayer.flush(rev):
1973 1973 count += 1
1974 1974
1975 1975 def manifest(ui, repo, node=None, rev=None):
1976 1976 """output the current or given revision of the project manifest
1977 1977
1978 1978 Print a list of version controlled files for the given revision.
1979 1979 If no revision is given, the parent of the working directory is used,
1980 1980 or tip if no revision is checked out.
1981 1981
1982 1982 The manifest is the list of files being version controlled. If no revision
1983 1983 is given then the first parent of the working directory is used.
1984 1984
1985 1985 With -v flag, print file permissions, symlink and executable bits. With
1986 1986 --debug flag, print file revision hashes.
1987 1987 """
1988 1988
1989 1989 if rev and node:
1990 1990 raise util.Abort(_("please specify just one revision"))
1991 1991
1992 1992 if not node:
1993 1993 node = rev
1994 1994
1995 1995 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1996 1996 ctx = repo[node]
1997 1997 for f in ctx:
1998 1998 if ui.debugflag:
1999 1999 ui.write("%40s " % hex(ctx.manifest()[f]))
2000 2000 if ui.verbose:
2001 2001 ui.write(decor[ctx.flags(f)])
2002 2002 ui.write("%s\n" % f)
2003 2003
2004 2004 def merge(ui, repo, node=None, force=None, rev=None):
2005 2005 """merge working directory with another revision
2006 2006
2007 2007 Merge the contents of the current working directory and the
2008 2008 requested revision. Files that changed between either parent are
2009 2009 marked as changed for the next commit and a commit must be
2010 2010 performed before any further updates are allowed.
2011 2011
2012 2012 If no revision is specified, the working directory's parent is a
2013 2013 head revision, and the current branch contains exactly one other head,
2014 2014 the other head is merged with by default. Otherwise, an explicit
2015 2015 revision to merge with must be provided.
2016 2016 """
2017 2017
2018 2018 if rev and node:
2019 2019 raise util.Abort(_("please specify just one revision"))
2020 2020 if not node:
2021 2021 node = rev
2022 2022
2023 2023 if not node:
2024 2024 branch = repo.changectx(None).branch()
2025 2025 bheads = repo.branchheads(branch)
2026 2026 if len(bheads) > 2:
2027 2027 raise util.Abort(_("branch '%s' has %d heads - "
2028 2028 "please merge with an explicit rev") %
2029 2029 (branch, len(bheads)))
2030 2030
2031 2031 parent = repo.dirstate.parents()[0]
2032 2032 if len(bheads) == 1:
2033 2033 if len(repo.heads()) > 1:
2034 2034 raise util.Abort(_("branch '%s' has one head - "
2035 2035 "please merge with an explicit rev") %
2036 2036 branch)
2037 2037 msg = _('there is nothing to merge')
2038 2038 if parent != repo.lookup(repo[None].branch()):
2039 2039 msg = _('%s - use "hg update" instead') % msg
2040 2040 raise util.Abort(msg)
2041 2041
2042 2042 if parent not in bheads:
2043 2043 raise util.Abort(_('working dir not at a head rev - '
2044 2044 'use "hg update" or merge with an explicit rev'))
2045 2045 node = parent == bheads[0] and bheads[-1] or bheads[0]
2046 2046 return hg.merge(repo, node, force=force)
2047 2047
2048 2048 def outgoing(ui, repo, dest=None, **opts):
2049 2049 """show changesets not found in destination
2050 2050
2051 2051 Show changesets not found in the specified destination repository or
2052 2052 the default push location. These are the changesets that would be pushed
2053 2053 if a push was requested.
2054 2054
2055 2055 See pull for valid destination format details.
2056 2056 """
2057 2057 limit = cmdutil.loglimit(opts)
2058 2058 dest, revs, checkout = hg.parseurl(
2059 2059 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2060 2060 cmdutil.setremoteconfig(ui, opts)
2061 2061 if revs:
2062 2062 revs = [repo.lookup(rev) for rev in revs]
2063 2063
2064 2064 other = hg.repository(ui, dest)
2065 2065 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2066 2066 o = repo.findoutgoing(other, force=opts.get('force'))
2067 2067 if not o:
2068 2068 ui.status(_("no changes found\n"))
2069 2069 return 1
2070 2070 o = repo.changelog.nodesbetween(o, revs)[0]
2071 2071 if opts.get('newest_first'):
2072 2072 o.reverse()
2073 2073 displayer = cmdutil.show_changeset(ui, repo, opts)
2074 2074 count = 0
2075 2075 for n in o:
2076 2076 if count >= limit:
2077 2077 break
2078 2078 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2079 2079 if opts.get('no_merges') and len(parents) == 2:
2080 2080 continue
2081 2081 count += 1
2082 2082 displayer.show(repo[n])
2083 2083
2084 2084 def parents(ui, repo, file_=None, **opts):
2085 2085 """show the parents of the working dir or revision
2086 2086
2087 2087 Print the working directory's parent revisions. If a
2088 2088 revision is given via --rev, the parent of that revision
2089 2089 will be printed. If a file argument is given, revision in
2090 2090 which the file was last changed (before the working directory
2091 2091 revision or the argument to --rev if given) is printed.
2092 2092 """
2093 2093 rev = opts.get('rev')
2094 2094 if rev:
2095 2095 ctx = repo[rev]
2096 2096 else:
2097 2097 ctx = repo[None]
2098 2098
2099 2099 if file_:
2100 2100 m = cmdutil.match(repo, (file_,), opts)
2101 2101 if m.anypats() or len(m.files()) != 1:
2102 2102 raise util.Abort(_('can only specify an explicit file name'))
2103 2103 file_ = m.files()[0]
2104 2104 filenodes = []
2105 2105 for cp in ctx.parents():
2106 2106 if not cp:
2107 2107 continue
2108 2108 try:
2109 2109 filenodes.append(cp.filenode(file_))
2110 2110 except error.LookupError:
2111 2111 pass
2112 2112 if not filenodes:
2113 2113 raise util.Abort(_("'%s' not found in manifest!") % file_)
2114 2114 fl = repo.file(file_)
2115 2115 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2116 2116 else:
2117 2117 p = [cp.node() for cp in ctx.parents()]
2118 2118
2119 2119 displayer = cmdutil.show_changeset(ui, repo, opts)
2120 2120 for n in p:
2121 2121 if n != nullid:
2122 2122 displayer.show(repo[n])
2123 2123
2124 2124 def paths(ui, repo, search=None):
2125 2125 """show aliases for remote repositories
2126 2126
2127 2127 Show definition of symbolic path name NAME. If no name is given, show
2128 2128 definition of available names.
2129 2129
2130 2130 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2131 2131 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2132 2132
2133 2133 See 'hg help urls' for more information.
2134 2134 """
2135 2135 if search:
2136 2136 for name, path in ui.configitems("paths"):
2137 2137 if name == search:
2138 2138 ui.write("%s\n" % url.hidepassword(path))
2139 2139 return
2140 2140 ui.warn(_("not found!\n"))
2141 2141 return 1
2142 2142 else:
2143 2143 for name, path in ui.configitems("paths"):
2144 2144 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2145 2145
2146 2146 def postincoming(ui, repo, modheads, optupdate, checkout):
2147 2147 if modheads == 0:
2148 2148 return
2149 2149 if optupdate:
2150 2150 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2151 2151 return hg.update(repo, checkout)
2152 2152 else:
2153 2153 ui.status(_("not updating, since new heads added\n"))
2154 2154 if modheads > 1:
2155 2155 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2156 2156 else:
2157 2157 ui.status(_("(run 'hg update' to get a working copy)\n"))
2158 2158
2159 2159 def pull(ui, repo, source="default", **opts):
2160 2160 """pull changes from the specified source
2161 2161
2162 2162 Pull changes from a remote repository to a local one.
2163 2163
2164 2164 This finds all changes from the repository at the specified path
2165 2165 or URL and adds them to the local repository. By default, this
2166 2166 does not update the copy of the project in the working directory.
2167 2167
2168 2168 If SOURCE is omitted, the 'default' path will be used.
2169 2169 See 'hg help urls' for more information.
2170 2170 """
2171 2171 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2172 2172 cmdutil.setremoteconfig(ui, opts)
2173 2173
2174 2174 other = hg.repository(ui, source)
2175 2175 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2176 2176 if revs:
2177 2177 try:
2178 2178 revs = [other.lookup(rev) for rev in revs]
2179 2179 except error.CapabilityError:
2180 2180 err = _("Other repository doesn't support revision lookup, "
2181 2181 "so a rev cannot be specified.")
2182 2182 raise util.Abort(err)
2183 2183
2184 2184 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2185 2185 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2186 2186
2187 2187 def push(ui, repo, dest=None, **opts):
2188 2188 """push changes to the specified destination
2189 2189
2190 2190 Push changes from the local repository to the given destination.
2191 2191
2192 2192 This is the symmetrical operation for pull. It helps to move
2193 2193 changes from the current repository to a different one. If the
2194 2194 destination is local this is identical to a pull in that directory
2195 2195 from the current one.
2196 2196
2197 2197 By default, push will refuse to run if it detects the result would
2198 2198 increase the number of remote heads. This generally indicates the
2199 2199 the client has forgotten to pull and merge before pushing.
2200 2200
2201 2201 If -r is used, the named changeset and all its ancestors will be pushed
2202 2202 to the remote repository.
2203 2203
2204 2204 Look at the help text for urls for important details about ssh:// URLs.
2205 2205 If DESTINATION is omitted, a default path will be used.
2206 2206 See 'hg help urls' for more information.
2207 2207 """
2208 2208 dest, revs, checkout = hg.parseurl(
2209 2209 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2210 2210 cmdutil.setremoteconfig(ui, opts)
2211 2211
2212 2212 other = hg.repository(ui, dest)
2213 2213 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2214 2214 if revs:
2215 2215 revs = [repo.lookup(rev) for rev in revs]
2216 2216 r = repo.push(other, opts.get('force'), revs=revs)
2217 2217 return r == 0
2218 2218
2219 2219 def rawcommit(ui, repo, *pats, **opts):
2220 2220 """raw commit interface (DEPRECATED)
2221 2221
2222 2222 (DEPRECATED)
2223 2223 Lowlevel commit, for use in helper scripts.
2224 2224
2225 2225 This command is not intended to be used by normal users, as it is
2226 2226 primarily useful for importing from other SCMs.
2227 2227
2228 2228 This command is now deprecated and will be removed in a future
2229 2229 release, please use debugsetparents and commit instead.
2230 2230 """
2231 2231
2232 2232 ui.warn(_("(the rawcommit command is deprecated)\n"))
2233 2233
2234 2234 message = cmdutil.logmessage(opts)
2235 2235
2236 2236 files = cmdutil.match(repo, pats, opts).files()
2237 2237 if opts.get('files'):
2238 2238 files += open(opts['files']).read().splitlines()
2239 2239
2240 2240 parents = [repo.lookup(p) for p in opts['parent']]
2241 2241
2242 2242 try:
2243 2243 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2244 2244 except ValueError, inst:
2245 2245 raise util.Abort(str(inst))
2246 2246
2247 2247 def recover(ui, repo):
2248 2248 """roll back an interrupted transaction
2249 2249
2250 2250 Recover from an interrupted commit or pull.
2251 2251
2252 2252 This command tries to fix the repository status after an interrupted
2253 2253 operation. It should only be necessary when Mercurial suggests it.
2254 2254 """
2255 2255 if repo.recover():
2256 2256 return hg.verify(repo)
2257 2257 return 1
2258 2258
2259 2259 def remove(ui, repo, *pats, **opts):
2260 2260 """remove the specified files on the next commit
2261 2261
2262 2262 Schedule the indicated files for removal from the repository.
2263 2263
2264 2264 This only removes files from the current branch, not from the entire
2265 2265 project history. -A can be used to remove only files that have already
2266 2266 been deleted, -f can be used to force deletion, and -Af can be used
2267 2267 to remove files from the next revision without deleting them.
2268 2268
2269 2269 The following table details the behavior of remove for different file
2270 2270 states (columns) and option combinations (rows). The file states are
2271 2271 Added, Clean, Modified and Missing (as reported by hg status). The
2272 2272 actions are Warn, Remove (from branch) and Delete (from disk).
2273 2273
2274 2274 A C M !
2275 2275 none W RD W R
2276 2276 -f R RD RD R
2277 2277 -A W W W R
2278 2278 -Af R R R R
2279 2279
2280 2280 This command schedules the files to be removed at the next commit.
2281 2281 To undo a remove before that, see hg revert.
2282 2282 """
2283 2283
2284 2284 after, force = opts.get('after'), opts.get('force')
2285 2285 if not pats and not after:
2286 2286 raise util.Abort(_('no files specified'))
2287 2287
2288 2288 m = cmdutil.match(repo, pats, opts)
2289 2289 s = repo.status(match=m, clean=True)
2290 2290 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2291 2291
2292 2292 def warn(files, reason):
2293 2293 for f in files:
2294 2294 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2295 2295 % (m.rel(f), reason))
2296 2296
2297 2297 if force:
2298 2298 remove, forget = modified + deleted + clean, added
2299 2299 elif after:
2300 2300 remove, forget = deleted, []
2301 2301 warn(modified + added + clean, _('still exists'))
2302 2302 else:
2303 2303 remove, forget = deleted + clean, []
2304 2304 warn(modified, _('is modified'))
2305 2305 warn(added, _('has been marked for add'))
2306 2306
2307 2307 for f in util.sort(remove + forget):
2308 2308 if ui.verbose or not m.exact(f):
2309 2309 ui.status(_('removing %s\n') % m.rel(f))
2310 2310
2311 2311 repo.forget(forget)
2312 2312 repo.remove(remove, unlink=not after)
2313 2313
2314 2314 def rename(ui, repo, *pats, **opts):
2315 2315 """rename files; equivalent of copy + remove
2316 2316
2317 2317 Mark dest as copies of sources; mark sources for deletion. If
2318 2318 dest is a directory, copies are put in that directory. If dest is
2319 2319 a file, there can only be one source.
2320 2320
2321 2321 By default, this command copies the contents of files as they
2322 2322 exist in the working directory. If invoked with --after, the
2323 2323 operation is recorded, but no copying is performed.
2324 2324
2325 2325 This command takes effect at the next commit. To undo a rename
2326 2326 before that, see hg revert.
2327 2327 """
2328 2328 wlock = repo.wlock(False)
2329 2329 try:
2330 2330 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2331 2331 finally:
2332 2332 del wlock
2333 2333
2334 2334 def resolve(ui, repo, *pats, **opts):
2335 2335 """retry file merges from a merge or update
2336 2336
2337 2337 This command will cleanly retry unresolved file merges using file
2338 2338 revisions preserved from the last update or merge. To attempt to
2339 2339 resolve all unresolved files, use the -a switch.
2340 2340
2341 2341 This command will also allow listing resolved files and manually
2342 2342 marking and unmarking files as resolved.
2343 2343
2344 2344 The codes used to show the status of files are:
2345 2345 U = unresolved
2346 2346 R = resolved
2347 2347 """
2348 2348
2349 2349 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2350 2350
2351 2351 if (show and (mark or unmark)) or (mark and unmark):
2352 2352 raise util.Abort(_("too many options specified"))
2353 2353 if pats and all:
2354 2354 raise util.Abort(_("can't specify --all and patterns"))
2355 2355 if not (all or pats or show or mark or unmark):
2356 2356 raise util.Abort(_('no files or directories specified; '
2357 2357 'use --all to remerge all files'))
2358 2358
2359 2359 ms = merge_.mergestate(repo)
2360 2360 m = cmdutil.match(repo, pats, opts)
2361 2361
2362 2362 for f in ms:
2363 2363 if m(f):
2364 2364 if show:
2365 2365 ui.write("%s %s\n" % (ms[f].upper(), f))
2366 2366 elif mark:
2367 2367 ms.mark(f, "r")
2368 2368 elif unmark:
2369 2369 ms.mark(f, "u")
2370 2370 else:
2371 2371 wctx = repo[None]
2372 2372 mctx = wctx.parents()[-1]
2373 2373
2374 2374 # backup pre-resolve (merge uses .orig for its own purposes)
2375 2375 a = repo.wjoin(f)
2376 2376 util.copyfile(a, a + ".resolve")
2377 2377
2378 2378 # resolve file
2379 2379 ms.resolve(f, wctx, mctx)
2380 2380
2381 2381 # replace filemerge's .orig file with our resolve file
2382 2382 util.rename(a + ".resolve", a + ".orig")
2383 2383
2384 2384 def revert(ui, repo, *pats, **opts):
2385 2385 """restore individual files or dirs to an earlier state
2386 2386
2387 2387 (use update -r to check out earlier revisions, revert does not
2388 2388 change the working dir parents)
2389 2389
2390 2390 With no revision specified, revert the named files or directories
2391 2391 to the contents they had in the parent of the working directory.
2392 2392 This restores the contents of the affected files to an unmodified
2393 2393 state and unschedules adds, removes, copies, and renames. If the
2394 2394 working directory has two parents, you must explicitly specify the
2395 2395 revision to revert to.
2396 2396
2397 2397 Using the -r option, revert the given files or directories to their
2398 2398 contents as of a specific revision. This can be helpful to "roll
2399 2399 back" some or all of an earlier change.
2400 2400 See 'hg help dates' for a list of formats valid for -d/--date.
2401 2401
2402 2402 Revert modifies the working directory. It does not commit any
2403 2403 changes, or change the parent of the working directory. If you
2404 2404 revert to a revision other than the parent of the working
2405 2405 directory, the reverted files will thus appear modified
2406 2406 afterwards.
2407 2407
2408 2408 If a file has been deleted, it is restored. If the executable
2409 2409 mode of a file was changed, it is reset.
2410 2410
2411 2411 If names are given, all files matching the names are reverted.
2412 2412 If no arguments are given, no files are reverted.
2413 2413
2414 2414 Modified files are saved with a .orig suffix before reverting.
2415 2415 To disable these backups, use --no-backup.
2416 2416 """
2417 2417
2418 2418 if opts["date"]:
2419 2419 if opts["rev"]:
2420 2420 raise util.Abort(_("you can't specify a revision and a date"))
2421 2421 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2422 2422
2423 2423 if not pats and not opts.get('all'):
2424 2424 raise util.Abort(_('no files or directories specified; '
2425 2425 'use --all to revert the whole repo'))
2426 2426
2427 2427 parent, p2 = repo.dirstate.parents()
2428 2428 if not opts.get('rev') and p2 != nullid:
2429 2429 raise util.Abort(_('uncommitted merge - please provide a '
2430 2430 'specific revision'))
2431 2431 ctx = repo[opts.get('rev')]
2432 2432 node = ctx.node()
2433 2433 mf = ctx.manifest()
2434 2434 if node == parent:
2435 2435 pmf = mf
2436 2436 else:
2437 2437 pmf = None
2438 2438
2439 2439 # need all matching names in dirstate and manifest of target rev,
2440 2440 # so have to walk both. do not print errors if files exist in one
2441 2441 # but not other.
2442 2442
2443 2443 names = {}
2444 2444
2445 2445 wlock = repo.wlock()
2446 2446 try:
2447 2447 # walk dirstate.
2448 2448
2449 2449 m = cmdutil.match(repo, pats, opts)
2450 2450 m.bad = lambda x,y: False
2451 2451 for abs in repo.walk(m):
2452 2452 names[abs] = m.rel(abs), m.exact(abs)
2453 2453
2454 2454 # walk target manifest.
2455 2455
2456 2456 def badfn(path, msg):
2457 2457 if path in names:
2458 2458 return False
2459 2459 path_ = path + '/'
2460 2460 for f in names:
2461 2461 if f.startswith(path_):
2462 2462 return False
2463 2463 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2464 2464 return False
2465 2465
2466 2466 m = cmdutil.match(repo, pats, opts)
2467 2467 m.bad = badfn
2468 2468 for abs in repo[node].walk(m):
2469 2469 if abs not in names:
2470 2470 names[abs] = m.rel(abs), m.exact(abs)
2471 2471
2472 2472 m = cmdutil.matchfiles(repo, names)
2473 2473 changes = repo.status(match=m)[:4]
2474 2474 modified, added, removed, deleted = map(dict.fromkeys, changes)
2475 2475
2476 2476 # if f is a rename, also revert the source
2477 2477 cwd = repo.getcwd()
2478 2478 for f in added:
2479 2479 src = repo.dirstate.copied(f)
2480 2480 if src and src not in names and repo.dirstate[src] == 'r':
2481 2481 removed[src] = None
2482 2482 names[src] = (repo.pathto(src, cwd), True)
2483 2483
2484 2484 def removeforget(abs):
2485 2485 if repo.dirstate[abs] == 'a':
2486 2486 return _('forgetting %s\n')
2487 2487 return _('removing %s\n')
2488 2488
2489 2489 revert = ([], _('reverting %s\n'))
2490 2490 add = ([], _('adding %s\n'))
2491 2491 remove = ([], removeforget)
2492 2492 undelete = ([], _('undeleting %s\n'))
2493 2493
2494 2494 disptable = (
2495 2495 # dispatch table:
2496 2496 # file state
2497 2497 # action if in target manifest
2498 2498 # action if not in target manifest
2499 2499 # make backup if in target manifest
2500 2500 # make backup if not in target manifest
2501 2501 (modified, revert, remove, True, True),
2502 2502 (added, revert, remove, True, False),
2503 2503 (removed, undelete, None, False, False),
2504 2504 (deleted, revert, remove, False, False),
2505 2505 )
2506 2506
2507 2507 for abs, (rel, exact) in util.sort(names.items()):
2508 2508 mfentry = mf.get(abs)
2509 2509 target = repo.wjoin(abs)
2510 2510 def handle(xlist, dobackup):
2511 2511 xlist[0].append(abs)
2512 2512 if dobackup and not opts.get('no_backup') and util.lexists(target):
2513 2513 bakname = "%s.orig" % rel
2514 2514 ui.note(_('saving current version of %s as %s\n') %
2515 2515 (rel, bakname))
2516 2516 if not opts.get('dry_run'):
2517 2517 util.copyfile(target, bakname)
2518 2518 if ui.verbose or not exact:
2519 2519 msg = xlist[1]
2520 2520 if not isinstance(msg, basestring):
2521 2521 msg = msg(abs)
2522 2522 ui.status(msg % rel)
2523 2523 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2524 2524 if abs not in table: continue
2525 2525 # file has changed in dirstate
2526 2526 if mfentry:
2527 2527 handle(hitlist, backuphit)
2528 2528 elif misslist is not None:
2529 2529 handle(misslist, backupmiss)
2530 2530 break
2531 2531 else:
2532 2532 if abs not in repo.dirstate:
2533 2533 if mfentry:
2534 2534 handle(add, True)
2535 2535 elif exact:
2536 2536 ui.warn(_('file not managed: %s\n') % rel)
2537 2537 continue
2538 2538 # file has not changed in dirstate
2539 2539 if node == parent:
2540 2540 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2541 2541 continue
2542 2542 if pmf is None:
2543 2543 # only need parent manifest in this unlikely case,
2544 2544 # so do not read by default
2545 2545 pmf = repo[parent].manifest()
2546 2546 if abs in pmf:
2547 2547 if mfentry:
2548 2548 # if version of file is same in parent and target
2549 2549 # manifests, do nothing
2550 2550 if (pmf[abs] != mfentry or
2551 2551 pmf.flags(abs) != mf.flags(abs)):
2552 2552 handle(revert, False)
2553 2553 else:
2554 2554 handle(remove, False)
2555 2555
2556 2556 if not opts.get('dry_run'):
2557 2557 def checkout(f):
2558 2558 fc = ctx[f]
2559 2559 repo.wwrite(f, fc.data(), fc.flags())
2560 2560
2561 2561 audit_path = util.path_auditor(repo.root)
2562 2562 for f in remove[0]:
2563 2563 if repo.dirstate[f] == 'a':
2564 2564 repo.dirstate.forget(f)
2565 2565 continue
2566 2566 audit_path(f)
2567 2567 try:
2568 2568 util.unlink(repo.wjoin(f))
2569 2569 except OSError:
2570 2570 pass
2571 2571 repo.dirstate.remove(f)
2572 2572
2573 2573 normal = None
2574 2574 if node == parent:
2575 2575 # We're reverting to our parent. If possible, we'd like status
2576 2576 # to report the file as clean. We have to use normallookup for
2577 2577 # merges to avoid losing information about merged/dirty files.
2578 2578 if p2 != nullid:
2579 2579 normal = repo.dirstate.normallookup
2580 2580 else:
2581 2581 normal = repo.dirstate.normal
2582 2582 for f in revert[0]:
2583 2583 checkout(f)
2584 2584 if normal:
2585 2585 normal(f)
2586 2586
2587 2587 for f in add[0]:
2588 2588 checkout(f)
2589 2589 repo.dirstate.add(f)
2590 2590
2591 2591 normal = repo.dirstate.normallookup
2592 2592 if node == parent and p2 == nullid:
2593 2593 normal = repo.dirstate.normal
2594 2594 for f in undelete[0]:
2595 2595 checkout(f)
2596 2596 normal(f)
2597 2597
2598 2598 finally:
2599 2599 del wlock
2600 2600
2601 2601 def rollback(ui, repo):
2602 2602 """roll back the last transaction
2603 2603
2604 2604 This command should be used with care. There is only one level of
2605 2605 rollback, and there is no way to undo a rollback. It will also
2606 2606 restore the dirstate at the time of the last transaction, losing
2607 2607 any dirstate changes since that time.
2608 2608
2609 2609 Transactions are used to encapsulate the effects of all commands
2610 2610 that create new changesets or propagate existing changesets into a
2611 2611 repository. For example, the following commands are transactional,
2612 2612 and their effects can be rolled back:
2613 2613
2614 2614 commit
2615 2615 import
2616 2616 pull
2617 2617 push (with this repository as destination)
2618 2618 unbundle
2619 2619
2620 2620 This command is not intended for use on public repositories. Once
2621 2621 changes are visible for pull by other users, rolling a transaction
2622 2622 back locally is ineffective (someone else may already have pulled
2623 2623 the changes). Furthermore, a race is possible with readers of the
2624 2624 repository; for example an in-progress pull from the repository
2625 2625 may fail if a rollback is performed.
2626 2626 """
2627 2627 repo.rollback()
2628 2628
2629 2629 def root(ui, repo):
2630 2630 """print the root (top) of the current working dir
2631 2631
2632 2632 Print the root directory of the current repository.
2633 2633 """
2634 2634 ui.write(repo.root + "\n")
2635 2635
2636 2636 def serve(ui, repo, **opts):
2637 2637 """export the repository via HTTP
2638 2638
2639 2639 Start a local HTTP repository browser and pull server.
2640 2640
2641 2641 By default, the server logs accesses to stdout and errors to
2642 2642 stderr. Use the "-A" and "-E" options to log to files.
2643 2643 """
2644 2644
2645 2645 if opts["stdio"]:
2646 2646 if repo is None:
2647 2647 raise error.RepoError(_("There is no Mercurial repository here"
2648 2648 " (.hg not found)"))
2649 2649 s = sshserver.sshserver(ui, repo)
2650 2650 s.serve_forever()
2651 2651
2652 2652 parentui = ui.parentui or ui
2653 2653 optlist = ("name templates style address port prefix ipv6"
2654 2654 " accesslog errorlog webdir_conf certificate")
2655 2655 for o in optlist.split():
2656 2656 if opts[o]:
2657 2657 parentui.setconfig("web", o, str(opts[o]))
2658 2658 if (repo is not None) and (repo.ui != parentui):
2659 2659 repo.ui.setconfig("web", o, str(opts[o]))
2660 2660
2661 2661 if repo is None and not ui.config("web", "webdir_conf"):
2662 2662 raise error.RepoError(_("There is no Mercurial repository here"
2663 2663 " (.hg not found)"))
2664 2664
2665 2665 class service:
2666 2666 def init(self):
2667 2667 util.set_signal_handler()
2668 2668 self.httpd = hgweb.server.create_server(parentui, repo)
2669 2669
2670 2670 if not ui.verbose: return
2671 2671
2672 2672 if self.httpd.prefix:
2673 2673 prefix = self.httpd.prefix.strip('/') + '/'
2674 2674 else:
2675 2675 prefix = ''
2676 2676
2677 2677 port = ':%d' % self.httpd.port
2678 2678 if port == ':80':
2679 2679 port = ''
2680 2680
2681 2681 bindaddr = self.httpd.addr
2682 2682 if bindaddr == '0.0.0.0':
2683 2683 bindaddr = '*'
2684 2684 elif ':' in bindaddr: # IPv6
2685 2685 bindaddr = '[%s]' % bindaddr
2686 2686
2687 2687 fqaddr = self.httpd.fqaddr
2688 2688 if ':' in fqaddr:
2689 2689 fqaddr = '[%s]' % fqaddr
2690 2690 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2691 2691 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2692 2692
2693 2693 def run(self):
2694 2694 self.httpd.serve_forever()
2695 2695
2696 2696 service = service()
2697 2697
2698 2698 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2699 2699
2700 2700 def status(ui, repo, *pats, **opts):
2701 2701 """show changed files in the working directory
2702 2702
2703 2703 Show status of files in the repository. If names are given, only
2704 2704 files that match are shown. Files that are clean or ignored or
2705 2705 source of a copy/move operation, are not listed unless -c (clean),
2706 2706 -i (ignored), -C (copies) or -A is given. Unless options described
2707 2707 with "show only ..." are given, the options -mardu are used.
2708 2708
2709 2709 Option -q/--quiet hides untracked (unknown and ignored) files
2710 2710 unless explicitly requested with -u/--unknown or -i/-ignored.
2711 2711
2712 2712 NOTE: status may appear to disagree with diff if permissions have
2713 2713 changed or a merge has occurred. The standard diff format does not
2714 2714 report permission changes and diff only reports changes relative
2715 2715 to one merge parent.
2716 2716
2717 2717 If one revision is given, it is used as the base revision.
2718 2718 If two revisions are given, the difference between them is shown.
2719 2719
2720 2720 The codes used to show the status of files are:
2721 2721 M = modified
2722 2722 A = added
2723 2723 R = removed
2724 2724 C = clean
2725 2725 ! = deleted, but still tracked
2726 2726 ? = not tracked
2727 2727 I = ignored
2728 2728 = the previous added file was copied from here
2729 2729 """
2730 2730
2731 2731 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2732 2732 cwd = (pats and repo.getcwd()) or ''
2733 2733 end = opts.get('print0') and '\0' or '\n'
2734 2734 copy = {}
2735 2735 states = 'modified added removed deleted unknown ignored clean'.split()
2736 2736 show = [k for k in states if opts.get(k)]
2737 2737 if opts.get('all'):
2738 2738 show += ui.quiet and (states[:4] + ['clean']) or states
2739 2739 if not show:
2740 2740 show = ui.quiet and states[:4] or states[:5]
2741 2741
2742 2742 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2743 2743 'ignored' in show, 'clean' in show, 'unknown' in show)
2744 2744 changestates = zip(states, 'MAR!?IC', stat)
2745 2745
2746 2746 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2747 2747 ctxn = repo[nullid]
2748 2748 ctx1 = repo[node1]
2749 2749 ctx2 = repo[node2]
2750 2750 added = stat[1]
2751 2751 if node2 is None:
2752 2752 added = stat[0] + stat[1] # merged?
2753 2753
2754 2754 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2755 2755 if k in added:
2756 2756 copy[k] = v
2757 2757 elif v in added:
2758 2758 copy[v] = k
2759 2759
2760 2760 for state, char, files in changestates:
2761 2761 if state in show:
2762 2762 format = "%s %%s%s" % (char, end)
2763 2763 if opts.get('no_status'):
2764 2764 format = "%%s%s" % end
2765 2765
2766 2766 for f in files:
2767 2767 ui.write(format % repo.pathto(f, cwd))
2768 2768 if f in copy:
2769 2769 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2770 2770
2771 2771 def tag(ui, repo, name1, *names, **opts):
2772 2772 """add one or more tags for the current or given revision
2773 2773
2774 2774 Name a particular revision using <name>.
2775 2775
2776 2776 Tags are used to name particular revisions of the repository and are
2777 2777 very useful to compare different revisions, to go back to significant
2778 2778 earlier versions or to mark branch points as releases, etc.
2779 2779
2780 2780 If no revision is given, the parent of the working directory is used,
2781 2781 or tip if no revision is checked out.
2782 2782
2783 2783 To facilitate version control, distribution, and merging of tags,
2784 2784 they are stored as a file named ".hgtags" which is managed
2785 2785 similarly to other project files and can be hand-edited if
2786 2786 necessary. The file '.hg/localtags' is used for local tags (not
2787 2787 shared among repositories).
2788 2788
2789 2789 See 'hg help dates' for a list of formats valid for -d/--date.
2790 2790 """
2791 2791
2792 2792 rev_ = "."
2793 2793 names = (name1,) + names
2794 2794 if len(names) != len(dict.fromkeys(names)):
2795 2795 raise util.Abort(_('tag names must be unique'))
2796 2796 for n in names:
2797 2797 if n in ['tip', '.', 'null']:
2798 2798 raise util.Abort(_('the name \'%s\' is reserved') % n)
2799 2799 if opts.get('rev') and opts.get('remove'):
2800 2800 raise util.Abort(_("--rev and --remove are incompatible"))
2801 2801 if opts.get('rev'):
2802 2802 rev_ = opts['rev']
2803 2803 message = opts.get('message')
2804 2804 if opts.get('remove'):
2805 2805 expectedtype = opts.get('local') and 'local' or 'global'
2806 2806 for n in names:
2807 2807 if not repo.tagtype(n):
2808 2808 raise util.Abort(_('tag \'%s\' does not exist') % n)
2809 2809 if repo.tagtype(n) != expectedtype:
2810 2810 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2811 2811 (n, expectedtype))
2812 2812 rev_ = nullid
2813 2813 if not message:
2814 2814 message = _('Removed tag %s') % ', '.join(names)
2815 2815 elif not opts.get('force'):
2816 2816 for n in names:
2817 2817 if n in repo.tags():
2818 2818 raise util.Abort(_('tag \'%s\' already exists '
2819 2819 '(use -f to force)') % n)
2820 2820 if not rev_ and repo.dirstate.parents()[1] != nullid:
2821 2821 raise util.Abort(_('uncommitted merge - please provide a '
2822 2822 'specific revision'))
2823 2823 r = repo[rev_].node()
2824 2824
2825 2825 if not message:
2826 2826 message = (_('Added tag %s for changeset %s') %
2827 2827 (', '.join(names), short(r)))
2828 2828
2829 2829 date = opts.get('date')
2830 2830 if date:
2831 2831 date = util.parsedate(date)
2832 2832
2833 2833 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2834 2834
2835 2835 def tags(ui, repo):
2836 2836 """list repository tags
2837 2837
2838 2838 This lists both regular and local tags. When the -v/--verbose switch
2839 2839 is used, a third column "local" is printed for local tags.
2840 2840 """
2841 2841
2842 2842 l = repo.tagslist()
2843 2843 l.reverse()
2844 2844 hexfunc = ui.debugflag and hex or short
2845 2845 tagtype = ""
2846 2846
2847 2847 for t, n in l:
2848 2848 if ui.quiet:
2849 2849 ui.write("%s\n" % t)
2850 2850 continue
2851 2851
2852 2852 try:
2853 2853 hn = hexfunc(n)
2854 2854 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2855 2855 except error.LookupError:
2856 2856 r = " ?:%s" % hn
2857 2857 else:
2858 spaces = " " * (30 - util.colwidth(t))
2858 spaces = " " * (30 - encoding.colwidth(t))
2859 2859 if ui.verbose:
2860 2860 if repo.tagtype(t) == 'local':
2861 2861 tagtype = " local"
2862 2862 else:
2863 2863 tagtype = ""
2864 2864 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2865 2865
2866 2866 def tip(ui, repo, **opts):
2867 2867 """show the tip revision
2868 2868
2869 2869 The tip revision (usually just called the tip) is the most
2870 2870 recently added changeset in the repository, the most recently
2871 2871 changed head.
2872 2872
2873 2873 If you have just made a commit, that commit will be the tip. If
2874 2874 you have just pulled changes from another repository, the tip of
2875 2875 that repository becomes the current tip. The "tip" tag is special
2876 2876 and cannot be renamed or assigned to a different changeset.
2877 2877 """
2878 2878 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2879 2879
2880 2880 def unbundle(ui, repo, fname1, *fnames, **opts):
2881 2881 """apply one or more changegroup files
2882 2882
2883 2883 Apply one or more compressed changegroup files generated by the
2884 2884 bundle command.
2885 2885 """
2886 2886 fnames = (fname1,) + fnames
2887 2887
2888 2888 lock = None
2889 2889 try:
2890 2890 lock = repo.lock()
2891 2891 for fname in fnames:
2892 2892 f = url.open(ui, fname)
2893 2893 gen = changegroup.readbundle(f, fname)
2894 2894 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2895 2895 finally:
2896 2896 del lock
2897 2897
2898 2898 return postincoming(ui, repo, modheads, opts.get('update'), None)
2899 2899
2900 2900 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2901 2901 """update working directory
2902 2902
2903 2903 Update the repository's working directory to the specified revision,
2904 2904 or the tip of the current branch if none is specified. Use null as
2905 2905 the revision to remove the working copy (like 'hg clone -U').
2906 2906
2907 2907 When the working dir contains no uncommitted changes, it will be
2908 2908 replaced by the state of the requested revision from the repo. When
2909 2909 the requested revision is on a different branch, the working dir
2910 2910 will additionally be switched to that branch.
2911 2911
2912 2912 When there are uncommitted changes, use option -C to discard them,
2913 2913 forcibly replacing the state of the working dir with the requested
2914 2914 revision.
2915 2915
2916 2916 When there are uncommitted changes and option -C is not used, and
2917 2917 the parent revision and requested revision are on the same branch,
2918 2918 and one of them is an ancestor of the other, then the new working
2919 2919 directory will contain the requested revision merged with the
2920 2920 uncommitted changes. Otherwise, the update will fail with a
2921 2921 suggestion to use 'merge' or 'update -C' instead.
2922 2922
2923 2923 If you want to update just one file to an older revision, use revert.
2924 2924
2925 2925 See 'hg help dates' for a list of formats valid for --date.
2926 2926 """
2927 2927 if rev and node:
2928 2928 raise util.Abort(_("please specify just one revision"))
2929 2929
2930 2930 if not rev:
2931 2931 rev = node
2932 2932
2933 2933 if date:
2934 2934 if rev:
2935 2935 raise util.Abort(_("you can't specify a revision and a date"))
2936 2936 rev = cmdutil.finddate(ui, repo, date)
2937 2937
2938 2938 if clean:
2939 2939 return hg.clean(repo, rev)
2940 2940 else:
2941 2941 return hg.update(repo, rev)
2942 2942
2943 2943 def verify(ui, repo):
2944 2944 """verify the integrity of the repository
2945 2945
2946 2946 Verify the integrity of the current repository.
2947 2947
2948 2948 This will perform an extensive check of the repository's
2949 2949 integrity, validating the hashes and checksums of each entry in
2950 2950 the changelog, manifest, and tracked files, as well as the
2951 2951 integrity of their crosslinks and indices.
2952 2952 """
2953 2953 return hg.verify(repo)
2954 2954
2955 2955 def version_(ui):
2956 2956 """output version and copyright information"""
2957 2957 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2958 2958 % util.version())
2959 2959 ui.status(_(
2960 2960 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2961 2961 "This is free software; see the source for copying conditions. "
2962 2962 "There is NO\nwarranty; "
2963 2963 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2964 2964 ))
2965 2965
2966 2966 # Command options and aliases are listed here, alphabetically
2967 2967
2968 2968 globalopts = [
2969 2969 ('R', 'repository', '',
2970 2970 _('repository root directory or symbolic path name')),
2971 2971 ('', 'cwd', '', _('change working directory')),
2972 2972 ('y', 'noninteractive', None,
2973 2973 _('do not prompt, assume \'yes\' for any required answers')),
2974 2974 ('q', 'quiet', None, _('suppress output')),
2975 2975 ('v', 'verbose', None, _('enable additional output')),
2976 2976 ('', 'config', [], _('set/override config option')),
2977 2977 ('', 'debug', None, _('enable debugging output')),
2978 2978 ('', 'debugger', None, _('start debugger')),
2979 ('', 'encoding', util._encoding, _('set the charset encoding')),
2980 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2979 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
2980 ('', 'encodingmode', encoding.encodingmode,
2981 _('set the charset encoding mode')),
2981 2982 ('', 'lsprof', None, _('print improved command execution profile')),
2982 2983 ('', 'traceback', None, _('print traceback on exception')),
2983 2984 ('', 'time', None, _('time how long the command takes')),
2984 2985 ('', 'profile', None, _('print command execution profile')),
2985 2986 ('', 'version', None, _('output version information and exit')),
2986 2987 ('h', 'help', None, _('display help and exit')),
2987 2988 ]
2988 2989
2989 2990 dryrunopts = [('n', 'dry-run', None,
2990 2991 _('do not perform actions, just print output'))]
2991 2992
2992 2993 remoteopts = [
2993 2994 ('e', 'ssh', '', _('specify ssh command to use')),
2994 2995 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2995 2996 ]
2996 2997
2997 2998 walkopts = [
2998 2999 ('I', 'include', [], _('include names matching the given patterns')),
2999 3000 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3000 3001 ]
3001 3002
3002 3003 commitopts = [
3003 3004 ('m', 'message', '', _('use <text> as commit message')),
3004 3005 ('l', 'logfile', '', _('read commit message from <file>')),
3005 3006 ]
3006 3007
3007 3008 commitopts2 = [
3008 3009 ('d', 'date', '', _('record datecode as commit date')),
3009 3010 ('u', 'user', '', _('record user as committer')),
3010 3011 ]
3011 3012
3012 3013 templateopts = [
3013 3014 ('', 'style', '', _('display using template map file')),
3014 3015 ('', 'template', '', _('display with template')),
3015 3016 ]
3016 3017
3017 3018 logopts = [
3018 3019 ('p', 'patch', None, _('show patch')),
3019 3020 ('g', 'git', None, _('use git extended diff format')),
3020 3021 ('l', 'limit', '', _('limit number of changes displayed')),
3021 3022 ('M', 'no-merges', None, _('do not show merges')),
3022 3023 ] + templateopts
3023 3024
3024 3025 diffopts = [
3025 3026 ('a', 'text', None, _('treat all files as text')),
3026 3027 ('g', 'git', None, _('use git extended diff format')),
3027 3028 ('', 'nodates', None, _("don't include dates in diff headers"))
3028 3029 ]
3029 3030
3030 3031 diffopts2 = [
3031 3032 ('p', 'show-function', None, _('show which function each change is in')),
3032 3033 ('w', 'ignore-all-space', None,
3033 3034 _('ignore white space when comparing lines')),
3034 3035 ('b', 'ignore-space-change', None,
3035 3036 _('ignore changes in the amount of white space')),
3036 3037 ('B', 'ignore-blank-lines', None,
3037 3038 _('ignore changes whose lines are all blank')),
3038 3039 ('U', 'unified', '', _('number of lines of context to show'))
3039 3040 ]
3040 3041
3041 3042 similarityopts = [
3042 3043 ('s', 'similarity', '',
3043 3044 _('guess renamed files by similarity (0<=s<=100)'))
3044 3045 ]
3045 3046
3046 3047 table = {
3047 3048 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3048 3049 "addremove":
3049 3050 (addremove, similarityopts + walkopts + dryrunopts,
3050 3051 _('[OPTION]... [FILE]...')),
3051 3052 "^annotate|blame":
3052 3053 (annotate,
3053 3054 [('r', 'rev', '', _('annotate the specified revision')),
3054 3055 ('f', 'follow', None, _('follow file copies and renames')),
3055 3056 ('a', 'text', None, _('treat all files as text')),
3056 3057 ('u', 'user', None, _('list the author (long with -v)')),
3057 3058 ('d', 'date', None, _('list the date (short with -q)')),
3058 3059 ('n', 'number', None, _('list the revision number (default)')),
3059 3060 ('c', 'changeset', None, _('list the changeset')),
3060 3061 ('l', 'line-number', None,
3061 3062 _('show line number at the first appearance'))
3062 3063 ] + walkopts,
3063 3064 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3064 3065 "archive":
3065 3066 (archive,
3066 3067 [('', 'no-decode', None, _('do not pass files through decoders')),
3067 3068 ('p', 'prefix', '', _('directory prefix for files in archive')),
3068 3069 ('r', 'rev', '', _('revision to distribute')),
3069 3070 ('t', 'type', '', _('type of distribution to create')),
3070 3071 ] + walkopts,
3071 3072 _('[OPTION]... DEST')),
3072 3073 "backout":
3073 3074 (backout,
3074 3075 [('', 'merge', None,
3075 3076 _('merge with old dirstate parent after backout')),
3076 3077 ('', 'parent', '', _('parent to choose when backing out merge')),
3077 3078 ('r', 'rev', '', _('revision to backout')),
3078 3079 ] + walkopts + commitopts + commitopts2,
3079 3080 _('[OPTION]... [-r] REV')),
3080 3081 "bisect":
3081 3082 (bisect,
3082 3083 [('r', 'reset', False, _('reset bisect state')),
3083 3084 ('g', 'good', False, _('mark changeset good')),
3084 3085 ('b', 'bad', False, _('mark changeset bad')),
3085 3086 ('s', 'skip', False, _('skip testing changeset')),
3086 3087 ('c', 'command', '', _('use command to check changeset state')),
3087 3088 ('U', 'noupdate', False, _('do not update to target'))],
3088 3089 _("[-gbsr] [-c CMD] [REV]")),
3089 3090 "branch":
3090 3091 (branch,
3091 3092 [('f', 'force', None,
3092 3093 _('set branch name even if it shadows an existing branch')),
3093 3094 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3094 3095 _('[-fC] [NAME]')),
3095 3096 "branches":
3096 3097 (branches,
3097 3098 [('a', 'active', False,
3098 3099 _('show only branches that have unmerged heads'))],
3099 3100 _('[-a]')),
3100 3101 "bundle":
3101 3102 (bundle,
3102 3103 [('f', 'force', None,
3103 3104 _('run even when remote repository is unrelated')),
3104 3105 ('r', 'rev', [],
3105 3106 _('a changeset up to which you would like to bundle')),
3106 3107 ('', 'base', [],
3107 3108 _('a base changeset to specify instead of a destination')),
3108 3109 ('a', 'all', None, _('bundle all changesets in the repository')),
3109 3110 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3110 3111 ] + remoteopts,
3111 3112 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3112 3113 "cat":
3113 3114 (cat,
3114 3115 [('o', 'output', '', _('print output to file with formatted name')),
3115 3116 ('r', 'rev', '', _('print the given revision')),
3116 3117 ('', 'decode', None, _('apply any matching decode filter')),
3117 3118 ] + walkopts,
3118 3119 _('[OPTION]... FILE...')),
3119 3120 "^clone":
3120 3121 (clone,
3121 3122 [('U', 'noupdate', None,
3122 3123 _('the clone will only contain a repository (no working copy)')),
3123 3124 ('r', 'rev', [],
3124 3125 _('a changeset you would like to have after cloning')),
3125 3126 ('', 'pull', None, _('use pull protocol to copy metadata')),
3126 3127 ('', 'uncompressed', None,
3127 3128 _('use uncompressed transfer (fast over LAN)')),
3128 3129 ] + remoteopts,
3129 3130 _('[OPTION]... SOURCE [DEST]')),
3130 3131 "^commit|ci":
3131 3132 (commit,
3132 3133 [('A', 'addremove', None,
3133 3134 _('mark new/missing files as added/removed before committing')),
3134 3135 ('', 'close-branch', None,
3135 3136 _('mark a branch as closed, hiding it from the branch list')),
3136 3137 ] + walkopts + commitopts + commitopts2,
3137 3138 _('[OPTION]... [FILE]...')),
3138 3139 "copy|cp":
3139 3140 (copy,
3140 3141 [('A', 'after', None, _('record a copy that has already occurred')),
3141 3142 ('f', 'force', None,
3142 3143 _('forcibly copy over an existing managed file')),
3143 3144 ] + walkopts + dryrunopts,
3144 3145 _('[OPTION]... [SOURCE]... DEST')),
3145 3146 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3146 3147 "debugcheckstate": (debugcheckstate, []),
3147 3148 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3148 3149 "debugcomplete":
3149 3150 (debugcomplete,
3150 3151 [('o', 'options', None, _('show the command options'))],
3151 3152 _('[-o] CMD')),
3152 3153 "debugdate":
3153 3154 (debugdate,
3154 3155 [('e', 'extended', None, _('try extended date formats'))],
3155 3156 _('[-e] DATE [RANGE]')),
3156 3157 "debugdata": (debugdata, [], _('FILE REV')),
3157 3158 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3158 3159 "debugindex": (debugindex, [], _('FILE')),
3159 3160 "debugindexdot": (debugindexdot, [], _('FILE')),
3160 3161 "debuginstall": (debuginstall, []),
3161 3162 "debugrawcommit|rawcommit":
3162 3163 (rawcommit,
3163 3164 [('p', 'parent', [], _('parent')),
3164 3165 ('F', 'files', '', _('file list'))
3165 3166 ] + commitopts + commitopts2,
3166 3167 _('[OPTION]... [FILE]...')),
3167 3168 "debugrebuildstate":
3168 3169 (debugrebuildstate,
3169 3170 [('r', 'rev', '', _('revision to rebuild to'))],
3170 3171 _('[-r REV] [REV]')),
3171 3172 "debugrename":
3172 3173 (debugrename,
3173 3174 [('r', 'rev', '', _('revision to debug'))],
3174 3175 _('[-r REV] FILE')),
3175 3176 "debugsetparents":
3176 3177 (debugsetparents, [], _('REV1 [REV2]')),
3177 3178 "debugstate":
3178 3179 (debugstate,
3179 3180 [('', 'nodates', None, _('do not display the saved mtime'))],
3180 3181 _('[OPTION]...')),
3181 3182 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3182 3183 "^diff":
3183 3184 (diff,
3184 3185 [('r', 'rev', [], _('revision')),
3185 3186 ('c', 'change', '', _('change made by revision'))
3186 3187 ] + diffopts + diffopts2 + walkopts,
3187 3188 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3188 3189 "^export":
3189 3190 (export,
3190 3191 [('o', 'output', '', _('print output to file with formatted name')),
3191 3192 ('', 'switch-parent', None, _('diff against the second parent'))
3192 3193 ] + diffopts,
3193 3194 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3194 3195 "grep":
3195 3196 (grep,
3196 3197 [('0', 'print0', None, _('end fields with NUL')),
3197 3198 ('', 'all', None, _('print all revisions that match')),
3198 3199 ('f', 'follow', None,
3199 3200 _('follow changeset history, or file history across copies and renames')),
3200 3201 ('i', 'ignore-case', None, _('ignore case when matching')),
3201 3202 ('l', 'files-with-matches', None,
3202 3203 _('print only filenames and revs that match')),
3203 3204 ('n', 'line-number', None, _('print matching line numbers')),
3204 3205 ('r', 'rev', [], _('search in given revision range')),
3205 3206 ('u', 'user', None, _('list the author (long with -v)')),
3206 3207 ('d', 'date', None, _('list the date (short with -q)')),
3207 3208 ] + walkopts,
3208 3209 _('[OPTION]... PATTERN [FILE]...')),
3209 3210 "heads":
3210 3211 (heads,
3211 3212 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3212 3213 ('a', 'active', False,
3213 3214 _('show only the active heads from open branches')),
3214 3215 ] + templateopts,
3215 3216 _('[-r REV] [REV]...')),
3216 3217 "help": (help_, [], _('[TOPIC]')),
3217 3218 "identify|id":
3218 3219 (identify,
3219 3220 [('r', 'rev', '', _('identify the specified rev')),
3220 3221 ('n', 'num', None, _('show local revision number')),
3221 3222 ('i', 'id', None, _('show global revision id')),
3222 3223 ('b', 'branch', None, _('show branch')),
3223 3224 ('t', 'tags', None, _('show tags'))],
3224 3225 _('[-nibt] [-r REV] [SOURCE]')),
3225 3226 "import|patch":
3226 3227 (import_,
3227 3228 [('p', 'strip', 1,
3228 3229 _('directory strip option for patch. This has the same\n'
3229 3230 'meaning as the corresponding patch option')),
3230 3231 ('b', 'base', '', _('base path')),
3231 3232 ('f', 'force', None,
3232 3233 _('skip check for outstanding uncommitted changes')),
3233 3234 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3234 3235 ('', 'exact', None,
3235 3236 _('apply patch to the nodes from which it was generated')),
3236 3237 ('', 'import-branch', None,
3237 3238 _('Use any branch information in patch (implied by --exact)'))] +
3238 3239 commitopts + commitopts2 + similarityopts,
3239 3240 _('[OPTION]... PATCH...')),
3240 3241 "incoming|in":
3241 3242 (incoming,
3242 3243 [('f', 'force', None,
3243 3244 _('run even when remote repository is unrelated')),
3244 3245 ('n', 'newest-first', None, _('show newest record first')),
3245 3246 ('', 'bundle', '', _('file to store the bundles into')),
3246 3247 ('r', 'rev', [],
3247 3248 _('a specific revision up to which you would like to pull')),
3248 3249 ] + logopts + remoteopts,
3249 3250 _('[-p] [-n] [-M] [-f] [-r REV]...'
3250 3251 ' [--bundle FILENAME] [SOURCE]')),
3251 3252 "^init":
3252 3253 (init,
3253 3254 remoteopts,
3254 3255 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3255 3256 "locate":
3256 3257 (locate,
3257 3258 [('r', 'rev', '', _('search the repository as it stood at rev')),
3258 3259 ('0', 'print0', None,
3259 3260 _('end filenames with NUL, for use with xargs')),
3260 3261 ('f', 'fullpath', None,
3261 3262 _('print complete paths from the filesystem root')),
3262 3263 ] + walkopts,
3263 3264 _('[OPTION]... [PATTERN]...')),
3264 3265 "^log|history":
3265 3266 (log,
3266 3267 [('f', 'follow', None,
3267 3268 _('follow changeset history, or file history across copies and renames')),
3268 3269 ('', 'follow-first', None,
3269 3270 _('only follow the first parent of merge changesets')),
3270 3271 ('d', 'date', '', _('show revs matching date spec')),
3271 3272 ('C', 'copies', None, _('show copied files')),
3272 3273 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3273 3274 ('r', 'rev', [], _('show the specified revision or range')),
3274 3275 ('', 'removed', None, _('include revs where files were removed')),
3275 3276 ('m', 'only-merges', None, _('show only merges')),
3276 3277 ('u', 'user', [], _('revs committed by user')),
3277 3278 ('b', 'only-branch', [],
3278 3279 _('show only changesets within the given named branch')),
3279 3280 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3280 3281 ] + logopts + walkopts,
3281 3282 _('[OPTION]... [FILE]')),
3282 3283 "manifest":
3283 3284 (manifest,
3284 3285 [('r', 'rev', '', _('revision to display'))],
3285 3286 _('[-r REV]')),
3286 3287 "^merge":
3287 3288 (merge,
3288 3289 [('f', 'force', None, _('force a merge with outstanding changes')),
3289 3290 ('r', 'rev', '', _('revision to merge')),
3290 3291 ],
3291 3292 _('[-f] [[-r] REV]')),
3292 3293 "outgoing|out":
3293 3294 (outgoing,
3294 3295 [('f', 'force', None,
3295 3296 _('run even when remote repository is unrelated')),
3296 3297 ('r', 'rev', [],
3297 3298 _('a specific revision up to which you would like to push')),
3298 3299 ('n', 'newest-first', None, _('show newest record first')),
3299 3300 ] + logopts + remoteopts,
3300 3301 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3301 3302 "^parents":
3302 3303 (parents,
3303 3304 [('r', 'rev', '', _('show parents from the specified rev')),
3304 3305 ] + templateopts,
3305 3306 _('hg parents [-r REV] [FILE]')),
3306 3307 "paths": (paths, [], _('[NAME]')),
3307 3308 "^pull":
3308 3309 (pull,
3309 3310 [('u', 'update', None,
3310 3311 _('update to new tip if changesets were pulled')),
3311 3312 ('f', 'force', None,
3312 3313 _('run even when remote repository is unrelated')),
3313 3314 ('r', 'rev', [],
3314 3315 _('a specific revision up to which you would like to pull')),
3315 3316 ] + remoteopts,
3316 3317 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3317 3318 "^push":
3318 3319 (push,
3319 3320 [('f', 'force', None, _('force push')),
3320 3321 ('r', 'rev', [],
3321 3322 _('a specific revision up to which you would like to push')),
3322 3323 ] + remoteopts,
3323 3324 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3324 3325 "recover": (recover, []),
3325 3326 "^remove|rm":
3326 3327 (remove,
3327 3328 [('A', 'after', None, _('record delete for missing files')),
3328 3329 ('f', 'force', None,
3329 3330 _('remove (and delete) file even if added or modified')),
3330 3331 ] + walkopts,
3331 3332 _('[OPTION]... FILE...')),
3332 3333 "rename|mv":
3333 3334 (rename,
3334 3335 [('A', 'after', None, _('record a rename that has already occurred')),
3335 3336 ('f', 'force', None,
3336 3337 _('forcibly copy over an existing managed file')),
3337 3338 ] + walkopts + dryrunopts,
3338 3339 _('[OPTION]... SOURCE... DEST')),
3339 3340 "resolve":
3340 3341 (resolve,
3341 3342 [('a', 'all', None, _('remerge all unresolved files')),
3342 3343 ('l', 'list', None, _('list state of files needing merge')),
3343 3344 ('m', 'mark', None, _('mark files as resolved')),
3344 3345 ('u', 'unmark', None, _('unmark files as resolved'))]
3345 3346 + walkopts,
3346 3347 _('[OPTION]... [FILE]...')),
3347 3348 "revert":
3348 3349 (revert,
3349 3350 [('a', 'all', None, _('revert all changes when no arguments given')),
3350 3351 ('d', 'date', '', _('tipmost revision matching date')),
3351 3352 ('r', 'rev', '', _('revision to revert to')),
3352 3353 ('', 'no-backup', None, _('do not save backup copies of files')),
3353 3354 ] + walkopts + dryrunopts,
3354 3355 _('[OPTION]... [-r REV] [NAME]...')),
3355 3356 "rollback": (rollback, []),
3356 3357 "root": (root, []),
3357 3358 "^serve":
3358 3359 (serve,
3359 3360 [('A', 'accesslog', '', _('name of access log file to write to')),
3360 3361 ('d', 'daemon', None, _('run server in background')),
3361 3362 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3362 3363 ('E', 'errorlog', '', _('name of error log file to write to')),
3363 3364 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3364 3365 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3365 3366 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3366 3367 ('n', 'name', '',
3367 3368 _('name to show in web pages (default: working dir)')),
3368 3369 ('', 'webdir-conf', '', _('name of the webdir config file'
3369 3370 ' (serve more than one repo)')),
3370 3371 ('', 'pid-file', '', _('name of file to write process ID to')),
3371 3372 ('', 'stdio', None, _('for remote clients')),
3372 3373 ('t', 'templates', '', _('web templates to use')),
3373 3374 ('', 'style', '', _('template style to use')),
3374 3375 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3375 3376 ('', 'certificate', '', _('SSL certificate file'))],
3376 3377 _('[OPTION]...')),
3377 3378 "showconfig|debugconfig":
3378 3379 (showconfig,
3379 3380 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3380 3381 _('[-u] [NAME]...')),
3381 3382 "^status|st":
3382 3383 (status,
3383 3384 [('A', 'all', None, _('show status of all files')),
3384 3385 ('m', 'modified', None, _('show only modified files')),
3385 3386 ('a', 'added', None, _('show only added files')),
3386 3387 ('r', 'removed', None, _('show only removed files')),
3387 3388 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3388 3389 ('c', 'clean', None, _('show only files without changes')),
3389 3390 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3390 3391 ('i', 'ignored', None, _('show only ignored files')),
3391 3392 ('n', 'no-status', None, _('hide status prefix')),
3392 3393 ('C', 'copies', None, _('show source of copied files')),
3393 3394 ('0', 'print0', None,
3394 3395 _('end filenames with NUL, for use with xargs')),
3395 3396 ('', 'rev', [], _('show difference from revision')),
3396 3397 ] + walkopts,
3397 3398 _('[OPTION]... [FILE]...')),
3398 3399 "tag":
3399 3400 (tag,
3400 3401 [('f', 'force', None, _('replace existing tag')),
3401 3402 ('l', 'local', None, _('make the tag local')),
3402 3403 ('r', 'rev', '', _('revision to tag')),
3403 3404 ('', 'remove', None, _('remove a tag')),
3404 3405 # -l/--local is already there, commitopts cannot be used
3405 3406 ('m', 'message', '', _('use <text> as commit message')),
3406 3407 ] + commitopts2,
3407 3408 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3408 3409 "tags": (tags, []),
3409 3410 "tip":
3410 3411 (tip,
3411 3412 [('p', 'patch', None, _('show patch')),
3412 3413 ('g', 'git', None, _('use git extended diff format')),
3413 3414 ] + templateopts,
3414 3415 _('[-p]')),
3415 3416 "unbundle":
3416 3417 (unbundle,
3417 3418 [('u', 'update', None,
3418 3419 _('update to new tip if changesets were unbundled'))],
3419 3420 _('[-u] FILE...')),
3420 3421 "^update|up|checkout|co":
3421 3422 (update,
3422 3423 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3423 3424 ('d', 'date', '', _('tipmost revision matching date')),
3424 3425 ('r', 'rev', '', _('revision'))],
3425 3426 _('[-C] [-d DATE] [[-r] REV]')),
3426 3427 "verify": (verify, []),
3427 3428 "version": (version_, []),
3428 3429 }
3429 3430
3430 3431 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3431 3432 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3432 3433 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,417 +1,417 b''
1 1 # dispatch.py - command dispatching for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time
10 10 import util, commands, hg, fancyopts, extensions, hook, error
11 import cmdutil
11 import cmdutil, encoding
12 12 import ui as _ui
13 13
14 14 def run():
15 15 "run the command in sys.argv"
16 16 sys.exit(dispatch(sys.argv[1:]))
17 17
18 18 def dispatch(args):
19 19 "run the command specified in args"
20 20 try:
21 21 u = _ui.ui(traceback='--traceback' in args)
22 22 except util.Abort, inst:
23 23 sys.stderr.write(_("abort: %s\n") % inst)
24 24 return -1
25 25 return _runcatch(u, args)
26 26
27 27 def _runcatch(ui, args):
28 28 def catchterm(*args):
29 29 raise error.SignalInterrupt
30 30
31 31 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
32 32 num = getattr(signal, name, None)
33 33 if num: signal.signal(num, catchterm)
34 34
35 35 try:
36 36 try:
37 37 # enter the debugger before command execution
38 38 if '--debugger' in args:
39 39 pdb.set_trace()
40 40 try:
41 41 return _dispatch(ui, args)
42 42 finally:
43 43 ui.flush()
44 44 except:
45 45 # enter the debugger when we hit an exception
46 46 if '--debugger' in args:
47 47 pdb.post_mortem(sys.exc_info()[2])
48 48 ui.print_exc()
49 49 raise
50 50
51 51 # Global exception handling, alphabetically
52 52 # Mercurial-specific first, followed by built-in and library exceptions
53 53 except error.AmbiguousCommand, inst:
54 54 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
55 55 (inst.args[0], " ".join(inst.args[1])))
56 56 except error.LockHeld, inst:
57 57 if inst.errno == errno.ETIMEDOUT:
58 58 reason = _('timed out waiting for lock held by %s') % inst.locker
59 59 else:
60 60 reason = _('lock held by %s') % inst.locker
61 61 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
62 62 except error.LockUnavailable, inst:
63 63 ui.warn(_("abort: could not lock %s: %s\n") %
64 64 (inst.desc or inst.filename, inst.strerror))
65 65 except error.ParseError, inst:
66 66 if inst.args[0]:
67 67 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
68 68 commands.help_(ui, inst.args[0])
69 69 else:
70 70 ui.warn(_("hg: %s\n") % inst.args[1])
71 71 commands.help_(ui, 'shortlist')
72 72 except error.RepoError, inst:
73 73 ui.warn(_("abort: %s!\n") % inst)
74 74 except error.ResponseError, inst:
75 75 ui.warn(_("abort: %s") % inst.args[0])
76 76 if not isinstance(inst.args[1], basestring):
77 77 ui.warn(" %r\n" % (inst.args[1],))
78 78 elif not inst.args[1]:
79 79 ui.warn(_(" empty string\n"))
80 80 else:
81 81 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
82 82 except error.RevlogError, inst:
83 83 ui.warn(_("abort: %s!\n") % inst)
84 84 except error.SignalInterrupt:
85 85 ui.warn(_("killed!\n"))
86 86 except error.UnknownCommand, inst:
87 87 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
88 88 commands.help_(ui, 'shortlist')
89 89 except util.Abort, inst:
90 90 ui.warn(_("abort: %s\n") % inst)
91 91 except ImportError, inst:
92 92 m = str(inst).split()[-1]
93 93 ui.warn(_("abort: could not import module %s!\n") % m)
94 94 if m in "mpatch bdiff".split():
95 95 ui.warn(_("(did you forget to compile extensions?)\n"))
96 96 elif m in "zlib".split():
97 97 ui.warn(_("(is your Python install correct?)\n"))
98 98 except IOError, inst:
99 99 if hasattr(inst, "code"):
100 100 ui.warn(_("abort: %s\n") % inst)
101 101 elif hasattr(inst, "reason"):
102 102 try: # usually it is in the form (errno, strerror)
103 103 reason = inst.reason.args[1]
104 104 except: # it might be anything, for example a string
105 105 reason = inst.reason
106 106 ui.warn(_("abort: error: %s\n") % reason)
107 107 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
108 108 if ui.debugflag:
109 109 ui.warn(_("broken pipe\n"))
110 110 elif getattr(inst, "strerror", None):
111 111 if getattr(inst, "filename", None):
112 112 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
113 113 else:
114 114 ui.warn(_("abort: %s\n") % inst.strerror)
115 115 else:
116 116 raise
117 117 except OSError, inst:
118 118 if getattr(inst, "filename", None):
119 119 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
120 120 else:
121 121 ui.warn(_("abort: %s\n") % inst.strerror)
122 122 except KeyboardInterrupt:
123 123 try:
124 124 ui.warn(_("interrupted!\n"))
125 125 except IOError, inst:
126 126 if inst.errno == errno.EPIPE:
127 127 if ui.debugflag:
128 128 ui.warn(_("\nbroken pipe\n"))
129 129 else:
130 130 raise
131 131 except MemoryError:
132 132 ui.warn(_("abort: out of memory\n"))
133 133 except SystemExit, inst:
134 134 # Commands shouldn't sys.exit directly, but give a return code.
135 135 # Just in case catch this and and pass exit code to caller.
136 136 return inst.code
137 137 except socket.error, inst:
138 138 ui.warn(_("abort: %s\n") % inst.args[-1])
139 139 except:
140 140 ui.warn(_("** unknown exception encountered, details follow\n"))
141 141 ui.warn(_("** report bug details to "
142 142 "http://www.selenic.com/mercurial/bts\n"))
143 143 ui.warn(_("** or mercurial@selenic.com\n"))
144 144 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
145 145 % util.version())
146 146 ui.warn(_("** Extensions loaded: %s\n")
147 147 % ", ".join([x[0] for x in extensions.extensions()]))
148 148 raise
149 149
150 150 return -1
151 151
152 152 def _findrepo(p):
153 153 while not os.path.isdir(os.path.join(p, ".hg")):
154 154 oldp, p = p, os.path.dirname(p)
155 155 if p == oldp:
156 156 return None
157 157
158 158 return p
159 159
160 160 def _parse(ui, args):
161 161 options = {}
162 162 cmdoptions = {}
163 163
164 164 try:
165 165 args = fancyopts.fancyopts(args, commands.globalopts, options)
166 166 except fancyopts.getopt.GetoptError, inst:
167 167 raise error.ParseError(None, inst)
168 168
169 169 if args:
170 170 cmd, args = args[0], args[1:]
171 171 aliases, i = cmdutil.findcmd(cmd, commands.table,
172 172 ui.config("ui", "strict"))
173 173 cmd = aliases[0]
174 174 defaults = ui.config("defaults", cmd)
175 175 if defaults:
176 176 args = shlex.split(defaults) + args
177 177 c = list(i[1])
178 178 else:
179 179 cmd = None
180 180 c = []
181 181
182 182 # combine global options into local
183 183 for o in commands.globalopts:
184 184 c.append((o[0], o[1], options[o[1]], o[3]))
185 185
186 186 try:
187 187 args = fancyopts.fancyopts(args, c, cmdoptions, True)
188 188 except fancyopts.getopt.GetoptError, inst:
189 189 raise error.ParseError(cmd, inst)
190 190
191 191 # separate global options back out
192 192 for o in commands.globalopts:
193 193 n = o[1]
194 194 options[n] = cmdoptions[n]
195 195 del cmdoptions[n]
196 196
197 197 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
198 198
199 199 def _parseconfig(config):
200 200 """parse the --config options from the command line"""
201 201 parsed = []
202 202 for cfg in config:
203 203 try:
204 204 name, value = cfg.split('=', 1)
205 205 section, name = name.split('.', 1)
206 206 if not section or not name:
207 207 raise IndexError
208 208 parsed.append((section, name, value))
209 209 except (IndexError, ValueError):
210 210 raise util.Abort(_('malformed --config option: %s') % cfg)
211 211 return parsed
212 212
213 213 def _earlygetopt(aliases, args):
214 214 """Return list of values for an option (or aliases).
215 215
216 216 The values are listed in the order they appear in args.
217 217 The options and values are removed from args.
218 218 """
219 219 try:
220 220 argcount = args.index("--")
221 221 except ValueError:
222 222 argcount = len(args)
223 223 shortopts = [opt for opt in aliases if len(opt) == 2]
224 224 values = []
225 225 pos = 0
226 226 while pos < argcount:
227 227 if args[pos] in aliases:
228 228 if pos + 1 >= argcount:
229 229 # ignore and let getopt report an error if there is no value
230 230 break
231 231 del args[pos]
232 232 values.append(args.pop(pos))
233 233 argcount -= 2
234 234 elif args[pos][:2] in shortopts:
235 235 # short option can have no following space, e.g. hg log -Rfoo
236 236 values.append(args.pop(pos)[2:])
237 237 argcount -= 1
238 238 else:
239 239 pos += 1
240 240 return values
241 241
242 242 def runcommand(lui, repo, cmd, fullargs, ui, options, d):
243 243 # run pre-hook, and abort if it fails
244 244 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
245 245 if ret:
246 246 return ret
247 247 ret = _runcommand(ui, options, cmd, d)
248 248 # run post-hook, passing command result
249 249 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
250 250 result = ret)
251 251 return ret
252 252
253 253 _loaded = {}
254 254 def _dispatch(ui, args):
255 255 # read --config before doing anything else
256 256 # (e.g. to change trust settings for reading .hg/hgrc)
257 257 config = _earlygetopt(['--config'], args)
258 258 if config:
259 259 ui.updateopts(config=_parseconfig(config))
260 260
261 261 # check for cwd
262 262 cwd = _earlygetopt(['--cwd'], args)
263 263 if cwd:
264 264 os.chdir(cwd[-1])
265 265
266 266 # read the local repository .hgrc into a local ui object
267 267 path = _findrepo(os.getcwd()) or ""
268 268 if not path:
269 269 lui = ui
270 270 if path:
271 271 try:
272 272 lui = _ui.ui(parentui=ui)
273 273 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
274 274 except IOError:
275 275 pass
276 276
277 277 # now we can expand paths, even ones in .hg/hgrc
278 278 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
279 279 if rpath:
280 280 path = lui.expandpath(rpath[-1])
281 281 lui = _ui.ui(parentui=ui)
282 282 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
283 283
284 284 extensions.loadall(lui)
285 285 for name, module in extensions.extensions():
286 286 if name in _loaded:
287 287 continue
288 288
289 289 # setup extensions
290 290 # TODO this should be generalized to scheme, where extensions can
291 291 # redepend on other extensions. then we should toposort them, and
292 292 # do initialization in correct order
293 293 extsetup = getattr(module, 'extsetup', None)
294 294 if extsetup:
295 295 extsetup()
296 296
297 297 cmdtable = getattr(module, 'cmdtable', {})
298 298 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
299 299 if overrides:
300 300 ui.warn(_("extension '%s' overrides commands: %s\n")
301 301 % (name, " ".join(overrides)))
302 302 commands.table.update(cmdtable)
303 303 _loaded[name] = 1
304 304 # check for fallback encoding
305 305 fallback = lui.config('ui', 'fallbackencoding')
306 306 if fallback:
307 util._fallbackencoding = fallback
307 encoding.fallbackencoding = fallback
308 308
309 309 fullargs = args
310 310 cmd, func, args, options, cmdoptions = _parse(lui, args)
311 311
312 312 if options["config"]:
313 313 raise util.Abort(_("Option --config may not be abbreviated!"))
314 314 if options["cwd"]:
315 315 raise util.Abort(_("Option --cwd may not be abbreviated!"))
316 316 if options["repository"]:
317 317 raise util.Abort(_(
318 318 "Option -R has to be separated from other options (i.e. not -qR) "
319 319 "and --repository may only be abbreviated as --repo!"))
320 320
321 321 if options["encoding"]:
322 util._encoding = options["encoding"]
322 encoding.encoding = options["encoding"]
323 323 if options["encodingmode"]:
324 util._encodingmode = options["encodingmode"]
324 encoding.encodingmode = options["encodingmode"]
325 325 if options["time"]:
326 326 def get_times():
327 327 t = os.times()
328 328 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
329 329 t = (t[0], t[1], t[2], t[3], time.clock())
330 330 return t
331 331 s = get_times()
332 332 def print_time():
333 333 t = get_times()
334 334 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
335 335 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
336 336 atexit.register(print_time)
337 337
338 338 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
339 339 not options["noninteractive"], options["traceback"])
340 340
341 341 if options['help']:
342 342 return commands.help_(ui, cmd, options['version'])
343 343 elif options['version']:
344 344 return commands.version_(ui)
345 345 elif not cmd:
346 346 return commands.help_(ui, 'shortlist')
347 347
348 348 repo = None
349 349 if cmd not in commands.norepo.split():
350 350 try:
351 351 repo = hg.repository(ui, path=path)
352 352 ui = repo.ui
353 353 if not repo.local():
354 354 raise util.Abort(_("repository '%s' is not local") % path)
355 355 ui.setconfig("bundle", "mainreporoot", repo.root)
356 356 except error.RepoError:
357 357 if cmd not in commands.optionalrepo.split():
358 358 if args and not path: # try to infer -R from command args
359 359 repos = map(_findrepo, args)
360 360 guess = repos[0]
361 361 if guess and repos.count(guess) == len(repos):
362 362 return _dispatch(ui, ['--repository', guess] + fullargs)
363 363 if not path:
364 364 raise error.RepoError(_("There is no Mercurial repository"
365 365 " here (.hg not found)"))
366 366 raise
367 367 args.insert(0, repo)
368 368 elif rpath:
369 369 ui.warn("warning: --repository ignored\n")
370 370
371 371 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
372 372 return runcommand(lui, repo, cmd, fullargs, ui, options, d)
373 373
374 374 def _runcommand(ui, options, cmd, cmdfunc):
375 375 def checkargs():
376 376 try:
377 377 return cmdfunc()
378 378 except error.SignatureError:
379 379 raise error.ParseError(cmd, _("invalid arguments"))
380 380
381 381 if options['profile']:
382 382 import hotshot, hotshot.stats
383 383 prof = hotshot.Profile("hg.prof")
384 384 try:
385 385 try:
386 386 return prof.runcall(checkargs)
387 387 except:
388 388 try:
389 389 ui.warn(_('exception raised - generating '
390 390 'profile anyway\n'))
391 391 except:
392 392 pass
393 393 raise
394 394 finally:
395 395 prof.close()
396 396 stats = hotshot.stats.load("hg.prof")
397 397 stats.strip_dirs()
398 398 stats.sort_stats('time', 'calls')
399 399 stats.print_stats(40)
400 400 elif options['lsprof']:
401 401 try:
402 402 from mercurial import lsprof
403 403 except ImportError:
404 404 raise util.Abort(_(
405 405 'lsprof not available - install from '
406 406 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
407 407 p = lsprof.Profiler()
408 408 p.enable(subcalls=True)
409 409 try:
410 410 return checkargs()
411 411 finally:
412 412 p.disable()
413 413 stats = lsprof.Stats(p.getstats())
414 414 stats.sort()
415 415 stats.pprint(top=10, file=sys.stderr, climit=5)
416 416 else:
417 417 return checkargs()
@@ -1,312 +1,312 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 from mercurial import ui, hg, util, hook, error
10 from mercurial import ui, hg, util, hook, error, encoding
11 11 from mercurial import templater, templatefilters
12 12 from common import get_mtime, style_map, ErrorResponse
13 13 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 14 from common import HTTP_UNAUTHORIZED, HTTP_METHOD_NOT_ALLOWED
15 15 from request import wsgirequest
16 16 import webcommands, protocol, webutil
17 17
18 18 perms = {
19 19 'changegroup': 'pull',
20 20 'changegroupsubset': 'pull',
21 21 'unbundle': 'push',
22 22 'stream_out': 'pull',
23 23 }
24 24
25 25 class hgweb(object):
26 26 def __init__(self, repo, name=None):
27 27 if isinstance(repo, str):
28 28 parentui = ui.ui(report_untrusted=False, interactive=False)
29 29 self.repo = hg.repository(parentui, repo)
30 30 else:
31 31 self.repo = repo
32 32
33 33 hook.redirect(True)
34 34 self.mtime = -1
35 35 self.reponame = name
36 36 self.archives = 'zip', 'gz', 'bz2'
37 37 self.stripecount = 1
38 38 # a repo owner may set web.templates in .hg/hgrc to get any file
39 39 # readable by the user running the CGI script
40 40 self.templatepath = self.config("web", "templates",
41 41 templater.templatepath(),
42 42 untrusted=False)
43 43
44 44 # The CGI scripts are often run by a user different from the repo owner.
45 45 # Trust the settings from the .hg/hgrc files by default.
46 46 def config(self, section, name, default=None, untrusted=True):
47 47 return self.repo.ui.config(section, name, default,
48 48 untrusted=untrusted)
49 49
50 50 def configbool(self, section, name, default=False, untrusted=True):
51 51 return self.repo.ui.configbool(section, name, default,
52 52 untrusted=untrusted)
53 53
54 54 def configlist(self, section, name, default=None, untrusted=True):
55 55 return self.repo.ui.configlist(section, name, default,
56 56 untrusted=untrusted)
57 57
58 58 def refresh(self):
59 59 mtime = get_mtime(self.repo.root)
60 60 if mtime != self.mtime:
61 61 self.mtime = mtime
62 62 self.repo = hg.repository(self.repo.ui, self.repo.root)
63 63 self.maxchanges = int(self.config("web", "maxchanges", 10))
64 64 self.stripecount = int(self.config("web", "stripes", 1))
65 65 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
66 66 self.maxfiles = int(self.config("web", "maxfiles", 10))
67 67 self.allowpull = self.configbool("web", "allowpull", True)
68 self.encoding = self.config("web", "encoding", util._encoding)
68 self.encoding = self.config("web", "encoding", encoding.encoding)
69 69
70 70 def run(self):
71 71 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
72 72 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
73 73 import mercurial.hgweb.wsgicgi as wsgicgi
74 74 wsgicgi.launch(self)
75 75
76 76 def __call__(self, env, respond):
77 77 req = wsgirequest(env, respond)
78 78 return self.run_wsgi(req)
79 79
80 80 def run_wsgi(self, req):
81 81
82 82 self.refresh()
83 83
84 84 # process this if it's a protocol request
85 85 # protocol bits don't need to create any URLs
86 86 # and the clients always use the old URL structure
87 87
88 88 cmd = req.form.get('cmd', [''])[0]
89 89 if cmd and cmd in protocol.__all__:
90 90 try:
91 91 if cmd in perms:
92 92 try:
93 93 self.check_perm(req, perms[cmd])
94 94 except ErrorResponse, inst:
95 95 if cmd == 'unbundle':
96 96 req.drain()
97 97 raise
98 98 method = getattr(protocol, cmd)
99 99 return method(self.repo, req)
100 100 except ErrorResponse, inst:
101 101 req.respond(inst, protocol.HGTYPE)
102 102 if not inst.message:
103 103 return []
104 104 return '0\n%s\n' % inst.message,
105 105
106 106 # work with CGI variables to create coherent structure
107 107 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
108 108
109 109 req.url = req.env['SCRIPT_NAME']
110 110 if not req.url.endswith('/'):
111 111 req.url += '/'
112 112 if 'REPO_NAME' in req.env:
113 113 req.url += req.env['REPO_NAME'] + '/'
114 114
115 115 if 'PATH_INFO' in req.env:
116 116 parts = req.env['PATH_INFO'].strip('/').split('/')
117 117 repo_parts = req.env.get('REPO_NAME', '').split('/')
118 118 if parts[:len(repo_parts)] == repo_parts:
119 119 parts = parts[len(repo_parts):]
120 120 query = '/'.join(parts)
121 121 else:
122 122 query = req.env['QUERY_STRING'].split('&', 1)[0]
123 123 query = query.split(';', 1)[0]
124 124
125 125 # translate user-visible url structure to internal structure
126 126
127 127 args = query.split('/', 2)
128 128 if 'cmd' not in req.form and args and args[0]:
129 129
130 130 cmd = args.pop(0)
131 131 style = cmd.rfind('-')
132 132 if style != -1:
133 133 req.form['style'] = [cmd[:style]]
134 134 cmd = cmd[style+1:]
135 135
136 136 # avoid accepting e.g. style parameter as command
137 137 if hasattr(webcommands, cmd):
138 138 req.form['cmd'] = [cmd]
139 139 else:
140 140 cmd = ''
141 141
142 142 if cmd == 'static':
143 143 req.form['file'] = ['/'.join(args)]
144 144 else:
145 145 if args and args[0]:
146 146 node = args.pop(0)
147 147 req.form['node'] = [node]
148 148 if args:
149 149 req.form['file'] = args
150 150
151 151 if cmd == 'archive':
152 152 fn = req.form['node'][0]
153 153 for type_, spec in self.archive_specs.iteritems():
154 154 ext = spec[2]
155 155 if fn.endswith(ext):
156 156 req.form['node'] = [fn[:-len(ext)]]
157 157 req.form['type'] = [type_]
158 158
159 159 # process the web interface request
160 160
161 161 try:
162 162 tmpl = self.templater(req)
163 163 ctype = tmpl('mimetype', encoding=self.encoding)
164 164 ctype = templater.stringify(ctype)
165 165
166 166 # check read permissions non-static content
167 167 if cmd != 'static':
168 168 self.check_perm(req, None)
169 169
170 170 if cmd == '':
171 171 req.form['cmd'] = [tmpl.cache['default']]
172 172 cmd = req.form['cmd'][0]
173 173
174 174 if cmd not in webcommands.__all__:
175 175 msg = 'no such method: %s' % cmd
176 176 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
177 177 elif cmd == 'file' and 'raw' in req.form.get('style', []):
178 178 self.ctype = ctype
179 179 content = webcommands.rawfile(self, req, tmpl)
180 180 else:
181 181 content = getattr(webcommands, cmd)(self, req, tmpl)
182 182 req.respond(HTTP_OK, ctype)
183 183
184 184 return content
185 185
186 186 except error.LookupError, err:
187 187 req.respond(HTTP_NOT_FOUND, ctype)
188 188 msg = str(err)
189 189 if 'manifest' not in msg:
190 190 msg = 'revision not found: %s' % err.name
191 191 return tmpl('error', error=msg)
192 192 except (error.RepoError, error.RevlogError), inst:
193 193 req.respond(HTTP_SERVER_ERROR, ctype)
194 194 return tmpl('error', error=str(inst))
195 195 except ErrorResponse, inst:
196 196 req.respond(inst, ctype)
197 197 return tmpl('error', error=inst.message)
198 198
199 199 def templater(self, req):
200 200
201 201 # determine scheme, port and server name
202 202 # this is needed to create absolute urls
203 203
204 204 proto = req.env.get('wsgi.url_scheme')
205 205 if proto == 'https':
206 206 proto = 'https'
207 207 default_port = "443"
208 208 else:
209 209 proto = 'http'
210 210 default_port = "80"
211 211
212 212 port = req.env["SERVER_PORT"]
213 213 port = port != default_port and (":" + port) or ""
214 214 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
215 215 staticurl = self.config("web", "staticurl") or req.url + 'static/'
216 216 if not staticurl.endswith('/'):
217 217 staticurl += '/'
218 218
219 219 # some functions for the templater
220 220
221 221 def header(**map):
222 222 yield tmpl('header', encoding=self.encoding, **map)
223 223
224 224 def footer(**map):
225 225 yield tmpl("footer", **map)
226 226
227 227 def motd(**map):
228 228 yield self.config("web", "motd", "")
229 229
230 230 # figure out which style to use
231 231
232 232 vars = {}
233 233 style = self.config("web", "style", "paper")
234 234 if 'style' in req.form:
235 235 style = req.form['style'][0]
236 236 vars['style'] = style
237 237
238 238 start = req.url[-1] == '?' and '&' or '?'
239 239 sessionvars = webutil.sessionvars(vars, start)
240 240 mapfile = style_map(self.templatepath, style)
241 241
242 242 if not self.reponame:
243 243 self.reponame = (self.config("web", "name")
244 244 or req.env.get('REPO_NAME')
245 245 or req.url.strip('/') or self.repo.root)
246 246
247 247 # create the templater
248 248
249 249 tmpl = templater.templater(mapfile, templatefilters.filters,
250 250 defaults={"url": req.url,
251 251 "staticurl": staticurl,
252 252 "urlbase": urlbase,
253 253 "repo": self.reponame,
254 254 "header": header,
255 255 "footer": footer,
256 256 "motd": motd,
257 257 "sessionvars": sessionvars
258 258 })
259 259 return tmpl
260 260
261 261 def archivelist(self, nodeid):
262 262 allowed = self.configlist("web", "allow_archive")
263 263 for i, spec in self.archive_specs.iteritems():
264 264 if i in allowed or self.configbool("web", "allow" + i):
265 265 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
266 266
267 267 archive_specs = {
268 268 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
269 269 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
270 270 'zip': ('application/zip', 'zip', '.zip', None),
271 271 }
272 272
273 273 def check_perm(self, req, op):
274 274 '''Check permission for operation based on request data (including
275 275 authentication info). Return if op allowed, else raise an ErrorResponse
276 276 exception.'''
277 277
278 278 user = req.env.get('REMOTE_USER')
279 279
280 280 deny_read = self.configlist('web', 'deny_read')
281 281 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
282 282 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
283 283
284 284 allow_read = self.configlist('web', 'allow_read')
285 285 result = (not allow_read) or (allow_read == ['*'])
286 286 if not (result or user in allow_read):
287 287 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
288 288
289 289 if op == 'pull' and not self.allowpull:
290 290 raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
291 291 elif op == 'pull' or op is None: # op is None for interface requests
292 292 return
293 293
294 294 # enforce that you can only push using POST requests
295 295 if req.env['REQUEST_METHOD'] != 'POST':
296 296 msg = 'push requires POST request'
297 297 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
298 298
299 299 # require ssl by default for pushing, auth info cannot be sniffed
300 300 # and replayed
301 301 scheme = req.env.get('wsgi.url_scheme')
302 302 if self.configbool('web', 'push_ssl', True) and scheme != 'https':
303 303 raise ErrorResponse(HTTP_OK, 'ssl required')
304 304
305 305 deny = self.configlist('web', 'deny_push')
306 306 if deny and (not user or deny == ['*'] or user in deny):
307 307 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
308 308
309 309 allow = self.configlist('web', 'allow_push')
310 310 result = allow and (allow == ['*'] or user in allow)
311 311 if not result:
312 312 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
@@ -1,327 +1,327 b''
1 1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 10 from mercurial.i18n import _
11 from mercurial import ui, hg, util, templater, templatefilters, error
11 from mercurial import ui, hg, util, templater, templatefilters, error, encoding
12 12 from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen,\
13 13 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 14 from hgweb_mod import hgweb
15 15 from request import wsgirequest
16 16
17 17 # This is a stopgap
18 18 class hgwebdir(object):
19 19 def __init__(self, config, parentui=None):
20 20 def cleannames(items):
21 21 return [(util.pconvert(name).strip('/'), path)
22 22 for name, path in items]
23 23
24 24 self.parentui = parentui or ui.ui(report_untrusted=False,
25 25 interactive = False)
26 26 self.motd = None
27 27 self.style = 'paper'
28 28 self.stripecount = None
29 29 self.repos_sorted = ('name', False)
30 30 self._baseurl = None
31 31 if isinstance(config, (list, tuple)):
32 32 self.repos = cleannames(config)
33 33 self.repos_sorted = ('', False)
34 34 elif isinstance(config, dict):
35 35 self.repos = util.sort(cleannames(config.items()))
36 36 else:
37 37 if isinstance(config, util.configparser):
38 38 cp = config
39 39 else:
40 40 cp = util.configparser()
41 41 cp.read(config)
42 42 self.repos = []
43 43 if cp.has_section('web'):
44 44 if cp.has_option('web', 'motd'):
45 45 self.motd = cp.get('web', 'motd')
46 46 if cp.has_option('web', 'style'):
47 47 self.style = cp.get('web', 'style')
48 48 if cp.has_option('web', 'stripes'):
49 49 self.stripecount = int(cp.get('web', 'stripes'))
50 50 if cp.has_option('web', 'baseurl'):
51 51 self._baseurl = cp.get('web', 'baseurl')
52 52 if cp.has_section('paths'):
53 53 paths = cleannames(cp.items('paths'))
54 54 for prefix, root in paths:
55 55 roothead, roottail = os.path.split(root)
56 56 # "foo = /bar/*" makes every subrepo of /bar/ to be
57 57 # mounted as foo/subrepo
58 58 # and "foo = /bar/**" does even recurse inside the
59 59 # subdirectories, remember to use it without working dir.
60 60 try:
61 61 recurse = {'*': False, '**': True}[roottail]
62 62 except KeyError:
63 63 self.repos.append((prefix, root))
64 64 continue
65 65 roothead = os.path.normpath(roothead)
66 66 for path in util.walkrepos(roothead, followsym=True,
67 67 recurse=recurse):
68 68 path = os.path.normpath(path)
69 69 name = util.pconvert(path[len(roothead):]).strip('/')
70 70 if prefix:
71 71 name = prefix + '/' + name
72 72 self.repos.append((name, path))
73 73 if cp.has_section('collections'):
74 74 for prefix, root in cp.items('collections'):
75 75 for path in util.walkrepos(root, followsym=True):
76 76 repo = os.path.normpath(path)
77 77 name = repo
78 78 if name.startswith(prefix):
79 79 name = name[len(prefix):]
80 80 self.repos.append((name.lstrip(os.sep), repo))
81 81 self.repos.sort()
82 82
83 83 def run(self):
84 84 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
85 85 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
86 86 import mercurial.hgweb.wsgicgi as wsgicgi
87 87 wsgicgi.launch(self)
88 88
89 89 def __call__(self, env, respond):
90 90 req = wsgirequest(env, respond)
91 91 return self.run_wsgi(req)
92 92
93 93 def read_allowed(self, ui, req):
94 94 """Check allow_read and deny_read config options of a repo's ui object
95 95 to determine user permissions. By default, with neither option set (or
96 96 both empty), allow all users to read the repo. There are two ways a
97 97 user can be denied read access: (1) deny_read is not empty, and the
98 98 user is unauthenticated or deny_read contains user (or *), and (2)
99 99 allow_read is not empty and the user is not in allow_read. Return True
100 100 if user is allowed to read the repo, else return False."""
101 101
102 102 user = req.env.get('REMOTE_USER')
103 103
104 104 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
105 105 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
106 106 return False
107 107
108 108 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
109 109 # by default, allow reading if no allow_read option has been set
110 110 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
111 111 return True
112 112
113 113 return False
114 114
115 115 def run_wsgi(self, req):
116 116
117 117 try:
118 118 try:
119 119
120 120 virtual = req.env.get("PATH_INFO", "").strip('/')
121 121 tmpl = self.templater(req)
122 ctype = tmpl('mimetype', encoding=util._encoding)
122 ctype = tmpl('mimetype', encoding=encoding.encoding)
123 123 ctype = templater.stringify(ctype)
124 124
125 125 # a static file
126 126 if virtual.startswith('static/') or 'static' in req.form:
127 127 if virtual.startswith('static/'):
128 128 fname = virtual[7:]
129 129 else:
130 130 fname = req.form['static'][0]
131 131 static = templater.templatepath('static')
132 132 return (staticfile(static, fname, req),)
133 133
134 134 # top-level index
135 135 elif not virtual:
136 136 req.respond(HTTP_OK, ctype)
137 137 return self.makeindex(req, tmpl)
138 138
139 139 # nested indexes and hgwebs
140 140
141 141 repos = dict(self.repos)
142 142 while virtual:
143 143 real = repos.get(virtual)
144 144 if real:
145 145 req.env['REPO_NAME'] = virtual
146 146 try:
147 147 repo = hg.repository(self.parentui, real)
148 148 return hgweb(repo).run_wsgi(req)
149 149 except IOError, inst:
150 150 msg = inst.strerror
151 151 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
152 152 except error.RepoError, inst:
153 153 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
154 154
155 155 # browse subdirectories
156 156 subdir = virtual + '/'
157 157 if [r for r in repos if r.startswith(subdir)]:
158 158 req.respond(HTTP_OK, ctype)
159 159 return self.makeindex(req, tmpl, subdir)
160 160
161 161 up = virtual.rfind('/')
162 162 if up < 0:
163 163 break
164 164 virtual = virtual[:up]
165 165
166 166 # prefixes not found
167 167 req.respond(HTTP_NOT_FOUND, ctype)
168 168 return tmpl("notfound", repo=virtual)
169 169
170 170 except ErrorResponse, err:
171 171 req.respond(err, ctype)
172 172 return tmpl('error', error=err.message or '')
173 173 finally:
174 174 tmpl = None
175 175
176 176 def makeindex(self, req, tmpl, subdir=""):
177 177
178 178 def archivelist(ui, nodeid, url):
179 179 allowed = ui.configlist("web", "allow_archive", untrusted=True)
180 180 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
181 181 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
182 182 untrusted=True):
183 183 yield {"type" : i[0], "extension": i[1],
184 184 "node": nodeid, "url": url}
185 185
186 186 def entries(sortcolumn="", descending=False, subdir="", **map):
187 187 def sessionvars(**map):
188 188 fields = []
189 189 if 'style' in req.form:
190 190 style = req.form['style'][0]
191 191 if style != get('web', 'style', ''):
192 192 fields.append(('style', style))
193 193
194 194 separator = url[-1] == '?' and ';' or '?'
195 195 for name, value in fields:
196 196 yield dict(name=name, value=value, separator=separator)
197 197 separator = ';'
198 198
199 199 rows = []
200 200 parity = paritygen(self.stripecount)
201 201 for name, path in self.repos:
202 202 if not name.startswith(subdir):
203 203 continue
204 204 name = name[len(subdir):]
205 205
206 206 u = ui.ui(parentui=self.parentui)
207 207 try:
208 208 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
209 209 except Exception, e:
210 210 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
211 211 continue
212 212 def get(section, name, default=None):
213 213 return u.config(section, name, default, untrusted=True)
214 214
215 215 if u.configbool("web", "hidden", untrusted=True):
216 216 continue
217 217
218 218 if not self.read_allowed(u, req):
219 219 continue
220 220
221 221 parts = [name]
222 222 if 'PATH_INFO' in req.env:
223 223 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
224 224 if req.env['SCRIPT_NAME']:
225 225 parts.insert(0, req.env['SCRIPT_NAME'])
226 226 url = ('/'.join(parts).replace("//", "/")) + '/'
227 227
228 228 # update time with local timezone
229 229 try:
230 230 d = (get_mtime(path), util.makedate()[1])
231 231 except OSError:
232 232 continue
233 233
234 234 contact = get_contact(get)
235 235 description = get("web", "description", "")
236 236 name = get("web", "name", name)
237 237 row = dict(contact=contact or "unknown",
238 238 contact_sort=contact.upper() or "unknown",
239 239 name=name,
240 240 name_sort=name,
241 241 url=url,
242 242 description=description or "unknown",
243 243 description_sort=description.upper() or "unknown",
244 244 lastchange=d,
245 245 lastchange_sort=d[1]-d[0],
246 246 sessionvars=sessionvars,
247 247 archives=archivelist(u, "tip", url))
248 248 if (not sortcolumn
249 249 or (sortcolumn, descending) == self.repos_sorted):
250 250 # fast path for unsorted output
251 251 row['parity'] = parity.next()
252 252 yield row
253 253 else:
254 254 rows.append((row["%s_sort" % sortcolumn], row))
255 255 if rows:
256 256 rows.sort()
257 257 if descending:
258 258 rows.reverse()
259 259 for key, row in rows:
260 260 row['parity'] = parity.next()
261 261 yield row
262 262
263 263 sortable = ["name", "description", "contact", "lastchange"]
264 264 sortcolumn, descending = self.repos_sorted
265 265 if 'sort' in req.form:
266 266 sortcolumn = req.form['sort'][0]
267 267 descending = sortcolumn.startswith('-')
268 268 if descending:
269 269 sortcolumn = sortcolumn[1:]
270 270 if sortcolumn not in sortable:
271 271 sortcolumn = ""
272 272
273 273 sort = [("sort_%s" % column,
274 274 "%s%s" % ((not descending and column == sortcolumn)
275 275 and "-" or "", column))
276 276 for column in sortable]
277 277
278 278 if self._baseurl is not None:
279 279 req.env['SCRIPT_NAME'] = self._baseurl
280 280
281 281 return tmpl("index", entries=entries, subdir=subdir,
282 282 sortcolumn=sortcolumn, descending=descending,
283 283 **dict(sort))
284 284
285 285 def templater(self, req):
286 286
287 287 def header(**map):
288 yield tmpl('header', encoding=util._encoding, **map)
288 yield tmpl('header', encoding=encoding.encoding, **map)
289 289
290 290 def footer(**map):
291 291 yield tmpl("footer", **map)
292 292
293 293 def motd(**map):
294 294 if self.motd is not None:
295 295 yield self.motd
296 296 else:
297 297 yield config('web', 'motd', '')
298 298
299 299 def config(section, name, default=None, untrusted=True):
300 300 return self.parentui.config(section, name, default, untrusted)
301 301
302 302 if self._baseurl is not None:
303 303 req.env['SCRIPT_NAME'] = self._baseurl
304 304
305 305 url = req.env.get('SCRIPT_NAME', '')
306 306 if not url.endswith('/'):
307 307 url += '/'
308 308
309 309 staticurl = config('web', 'staticurl') or url + 'static/'
310 310 if not staticurl.endswith('/'):
311 311 staticurl += '/'
312 312
313 313 style = self.style
314 314 if style is None:
315 315 style = config('web', 'style', '')
316 316 if 'style' in req.form:
317 317 style = req.form['style'][0]
318 318 if self.stripecount is None:
319 319 self.stripecount = int(config('web', 'stripes', 1))
320 320 mapfile = style_map(templater.templatepath(), style)
321 321 tmpl = templater.templater(mapfile, templatefilters.filters,
322 322 defaults={"header": header,
323 323 "footer": footer,
324 324 "motd": motd,
325 325 "url": url,
326 326 "staticurl": staticurl})
327 327 return tmpl
@@ -1,51 +1,49 b''
1 1 """
2 2 i18n.py - internationalization support for mercurial
3 3
4 4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 import gettext, sys, os
10 import gettext, sys, os, encoding
11 11
12 12 # modelled after templater.templatepath:
13 13 if hasattr(sys, 'frozen'):
14 14 module = sys.executable
15 15 else:
16 16 module = __file__
17 17
18 18 base = os.path.dirname(module)
19 19 for dir in ('.', '..'):
20 20 localedir = os.path.normpath(os.path.join(base, dir, 'locale'))
21 21 if os.path.isdir(localedir):
22 22 break
23 23
24 24 t = gettext.translation('hg', localedir, fallback=True)
25 25
26 26 def gettext(message):
27 27 """Translate message.
28 28
29 29 The message is looked up in the catalog to get a Unicode string,
30 30 which is encoded in the local encoding before being returned.
31 31
32 32 Important: message is restricted to characters in the encoding
33 33 given by sys.getdefaultencoding() which is most likely 'ascii'.
34 34 """
35 35 # If message is None, t.ugettext will return u'None' as the
36 36 # translation whereas our callers expect us to return None.
37 37 if message is None:
38 38 return message
39 39
40 # We cannot just run the text through util.tolocal since that
41 # leads to infinite recursion when util._encoding is invalid.
40 # We cannot just run the text through encoding.tolocal since that
41 # leads to infinite recursion when encoding._encoding is invalid.
42 42 try:
43 43 u = t.ugettext(message)
44 return u.encode(util._encoding, "replace")
44 return u.encode(encoding.encoding, "replace")
45 45 except LookupError:
46 46 return message
47 47
48 48 _ = gettext
49 49
50 # Moved after _ because of circular import.
51 import util
@@ -1,2166 +1,2167 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store, encoding
13 13 import os, time, util, extensions, hook, inspect, error
14 14 import match as match_
15 15 import merge as merge_
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 19 supported = ('revlogv1', 'store', 'fncache')
20 20
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.root = os.path.realpath(path)
24 24 self.path = os.path.join(self.root, ".hg")
25 25 self.origroot = path
26 26 self.opener = util.opener(self.path)
27 27 self.wopener = util.opener(self.root)
28 28
29 29 if not os.path.isdir(self.path):
30 30 if create:
31 31 if not os.path.exists(path):
32 32 os.mkdir(path)
33 33 os.mkdir(self.path)
34 34 requirements = ["revlogv1"]
35 35 if parentui.configbool('format', 'usestore', True):
36 36 os.mkdir(os.path.join(self.path, "store"))
37 37 requirements.append("store")
38 38 if parentui.configbool('format', 'usefncache', True):
39 39 requirements.append("fncache")
40 40 # create an invalid changelog
41 41 self.opener("00changelog.i", "a").write(
42 42 '\0\0\0\2' # represents revlogv2
43 43 ' dummy changelog to prevent using the old repo layout'
44 44 )
45 45 reqfile = self.opener("requires", "w")
46 46 for r in requirements:
47 47 reqfile.write("%s\n" % r)
48 48 reqfile.close()
49 49 else:
50 50 raise error.RepoError(_("repository %s not found") % path)
51 51 elif create:
52 52 raise error.RepoError(_("repository %s already exists") % path)
53 53 else:
54 54 # find requirements
55 55 requirements = []
56 56 try:
57 57 requirements = self.opener("requires").read().splitlines()
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise error.RepoError(_("requirement '%s' not supported") % r)
61 61 except IOError, inst:
62 62 if inst.errno != errno.ENOENT:
63 63 raise
64 64
65 65 self.store = store.store(requirements, self.path, util.opener)
66 66 self.spath = self.store.path
67 67 self.sopener = self.store.opener
68 68 self.sjoin = self.store.join
69 69 self.opener.createmode = self.store.createmode
70 70
71 71 self.ui = ui.ui(parentui=parentui)
72 72 try:
73 73 self.ui.readconfig(self.join("hgrc"), self.root)
74 74 extensions.loadall(self.ui)
75 75 except IOError:
76 76 pass
77 77
78 78 self.tagscache = None
79 79 self._tagstypecache = None
80 80 self.branchcache = None
81 81 self._ubranchcache = None # UTF-8 version of branchcache
82 82 self._branchcachetip = None
83 83 self.nodetagscache = None
84 84 self.filterpats = {}
85 85 self._datafilters = {}
86 86 self._transref = self._lockref = self._wlockref = None
87 87
88 88 def __getattr__(self, name):
89 89 if name == 'changelog':
90 90 self.changelog = changelog.changelog(self.sopener)
91 91 if 'HG_PENDING' in os.environ:
92 92 p = os.environ['HG_PENDING']
93 93 if p.startswith(self.root):
94 94 self.changelog.readpending('00changelog.i.a')
95 95 self.sopener.defversion = self.changelog.version
96 96 return self.changelog
97 97 if name == 'manifest':
98 98 self.changelog
99 99 self.manifest = manifest.manifest(self.sopener)
100 100 return self.manifest
101 101 if name == 'dirstate':
102 102 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
103 103 return self.dirstate
104 104 else:
105 105 raise AttributeError(name)
106 106
107 107 def __getitem__(self, changeid):
108 108 if changeid == None:
109 109 return context.workingctx(self)
110 110 return context.changectx(self, changeid)
111 111
112 112 def __nonzero__(self):
113 113 return True
114 114
115 115 def __len__(self):
116 116 return len(self.changelog)
117 117
118 118 def __iter__(self):
119 119 for i in xrange(len(self)):
120 120 yield i
121 121
122 122 def url(self):
123 123 return 'file:' + self.root
124 124
125 125 def hook(self, name, throw=False, **args):
126 126 return hook.hook(self.ui, self, name, throw, **args)
127 127
128 128 tag_disallowed = ':\r\n'
129 129
130 130 def _tag(self, names, node, message, local, user, date, parent=None,
131 131 extra={}):
132 132 use_dirstate = parent is None
133 133
134 134 if isinstance(names, str):
135 135 allchars = names
136 136 names = (names,)
137 137 else:
138 138 allchars = ''.join(names)
139 139 for c in self.tag_disallowed:
140 140 if c in allchars:
141 141 raise util.Abort(_('%r cannot be used in a tag name') % c)
142 142
143 143 for name in names:
144 144 self.hook('pretag', throw=True, node=hex(node), tag=name,
145 145 local=local)
146 146
147 147 def writetags(fp, names, munge, prevtags):
148 148 fp.seek(0, 2)
149 149 if prevtags and prevtags[-1] != '\n':
150 150 fp.write('\n')
151 151 for name in names:
152 152 m = munge and munge(name) or name
153 153 if self._tagstypecache and name in self._tagstypecache:
154 154 old = self.tagscache.get(name, nullid)
155 155 fp.write('%s %s\n' % (hex(old), m))
156 156 fp.write('%s %s\n' % (hex(node), m))
157 157 fp.close()
158 158
159 159 prevtags = ''
160 160 if local:
161 161 try:
162 162 fp = self.opener('localtags', 'r+')
163 163 except IOError:
164 164 fp = self.opener('localtags', 'a')
165 165 else:
166 166 prevtags = fp.read()
167 167
168 168 # local tags are stored in the current charset
169 169 writetags(fp, names, None, prevtags)
170 170 for name in names:
171 171 self.hook('tag', node=hex(node), tag=name, local=local)
172 172 return
173 173
174 174 if use_dirstate:
175 175 try:
176 176 fp = self.wfile('.hgtags', 'rb+')
177 177 except IOError:
178 178 fp = self.wfile('.hgtags', 'ab')
179 179 else:
180 180 prevtags = fp.read()
181 181 else:
182 182 try:
183 183 prevtags = self.filectx('.hgtags', parent).data()
184 184 except error.LookupError:
185 185 pass
186 186 fp = self.wfile('.hgtags', 'wb')
187 187 if prevtags:
188 188 fp.write(prevtags)
189 189
190 190 # committed tags are stored in UTF-8
191 writetags(fp, names, util.fromlocal, prevtags)
191 writetags(fp, names, encoding.fromlocal, prevtags)
192 192
193 193 if use_dirstate and '.hgtags' not in self.dirstate:
194 194 self.add(['.hgtags'])
195 195
196 196 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
197 197 extra=extra)
198 198
199 199 for name in names:
200 200 self.hook('tag', node=hex(node), tag=name, local=local)
201 201
202 202 return tagnode
203 203
204 204 def tag(self, names, node, message, local, user, date):
205 205 '''tag a revision with one or more symbolic names.
206 206
207 207 names is a list of strings or, when adding a single tag, names may be a
208 208 string.
209 209
210 210 if local is True, the tags are stored in a per-repository file.
211 211 otherwise, they are stored in the .hgtags file, and a new
212 212 changeset is committed with the change.
213 213
214 214 keyword arguments:
215 215
216 216 local: whether to store tags in non-version-controlled file
217 217 (default False)
218 218
219 219 message: commit message to use if committing
220 220
221 221 user: name of user to use if committing
222 222
223 223 date: date tuple to use if committing'''
224 224
225 225 for x in self.status()[:5]:
226 226 if '.hgtags' in x:
227 227 raise util.Abort(_('working copy of .hgtags is changed '
228 228 '(please commit .hgtags manually)'))
229 229
230 230 self.tags() # instantiate the cache
231 231 self._tag(names, node, message, local, user, date)
232 232
233 233 def tags(self):
234 234 '''return a mapping of tag to node'''
235 235 if self.tagscache:
236 236 return self.tagscache
237 237
238 238 globaltags = {}
239 239 tagtypes = {}
240 240
241 241 def readtags(lines, fn, tagtype):
242 242 filetags = {}
243 243 count = 0
244 244
245 245 def warn(msg):
246 246 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
247 247
248 248 for l in lines:
249 249 count += 1
250 250 if not l:
251 251 continue
252 252 s = l.split(" ", 1)
253 253 if len(s) != 2:
254 254 warn(_("cannot parse entry"))
255 255 continue
256 256 node, key = s
257 key = util.tolocal(key.strip()) # stored in UTF-8
257 key = encoding.tolocal(key.strip()) # stored in UTF-8
258 258 try:
259 259 bin_n = bin(node)
260 260 except TypeError:
261 261 warn(_("node '%s' is not well formed") % node)
262 262 continue
263 263 if bin_n not in self.changelog.nodemap:
264 264 warn(_("tag '%s' refers to unknown node") % key)
265 265 continue
266 266
267 267 h = []
268 268 if key in filetags:
269 269 n, h = filetags[key]
270 270 h.append(n)
271 271 filetags[key] = (bin_n, h)
272 272
273 273 for k, nh in filetags.iteritems():
274 274 if k not in globaltags:
275 275 globaltags[k] = nh
276 276 tagtypes[k] = tagtype
277 277 continue
278 278
279 279 # we prefer the global tag if:
280 280 # it supercedes us OR
281 281 # mutual supercedes and it has a higher rank
282 282 # otherwise we win because we're tip-most
283 283 an, ah = nh
284 284 bn, bh = globaltags[k]
285 285 if (bn != an and an in bh and
286 286 (bn not in ah or len(bh) > len(ah))):
287 287 an = bn
288 288 ah.extend([n for n in bh if n not in ah])
289 289 globaltags[k] = an, ah
290 290 tagtypes[k] = tagtype
291 291
292 292 # read the tags file from each head, ending with the tip
293 293 f = None
294 294 for rev, node, fnode in self._hgtagsnodes():
295 295 f = (f and f.filectx(fnode) or
296 296 self.filectx('.hgtags', fileid=fnode))
297 297 readtags(f.data().splitlines(), f, "global")
298 298
299 299 try:
300 data = util.fromlocal(self.opener("localtags").read())
300 data = encoding.fromlocal(self.opener("localtags").read())
301 301 # localtags are stored in the local character set
302 302 # while the internal tag table is stored in UTF-8
303 303 readtags(data.splitlines(), "localtags", "local")
304 304 except IOError:
305 305 pass
306 306
307 307 self.tagscache = {}
308 308 self._tagstypecache = {}
309 309 for k, nh in globaltags.iteritems():
310 310 n = nh[0]
311 311 if n != nullid:
312 312 self.tagscache[k] = n
313 313 self._tagstypecache[k] = tagtypes[k]
314 314 self.tagscache['tip'] = self.changelog.tip()
315 315 return self.tagscache
316 316
317 317 def tagtype(self, tagname):
318 318 '''
319 319 return the type of the given tag. result can be:
320 320
321 321 'local' : a local tag
322 322 'global' : a global tag
323 323 None : tag does not exist
324 324 '''
325 325
326 326 self.tags()
327 327
328 328 return self._tagstypecache.get(tagname)
329 329
330 330 def _hgtagsnodes(self):
331 331 heads = self.heads()
332 332 heads.reverse()
333 333 last = {}
334 334 ret = []
335 335 for node in heads:
336 336 c = self[node]
337 337 rev = c.rev()
338 338 try:
339 339 fnode = c.filenode('.hgtags')
340 340 except error.LookupError:
341 341 continue
342 342 ret.append((rev, node, fnode))
343 343 if fnode in last:
344 344 ret[last[fnode]] = None
345 345 last[fnode] = len(ret) - 1
346 346 return [item for item in ret if item]
347 347
348 348 def tagslist(self):
349 349 '''return a list of tags ordered by revision'''
350 350 l = []
351 351 for t, n in self.tags().iteritems():
352 352 try:
353 353 r = self.changelog.rev(n)
354 354 except:
355 355 r = -2 # sort to the beginning of the list if unknown
356 356 l.append((r, t, n))
357 357 return [(t, n) for r, t, n in util.sort(l)]
358 358
359 359 def nodetags(self, node):
360 360 '''return the tags associated with a node'''
361 361 if not self.nodetagscache:
362 362 self.nodetagscache = {}
363 363 for t, n in self.tags().iteritems():
364 364 self.nodetagscache.setdefault(n, []).append(t)
365 365 return self.nodetagscache.get(node, [])
366 366
367 367 def _branchtags(self, partial, lrev):
368 368 # TODO: rename this function?
369 369 tiprev = len(self) - 1
370 370 if lrev != tiprev:
371 371 self._updatebranchcache(partial, lrev+1, tiprev+1)
372 372 self._writebranchcache(partial, self.changelog.tip(), tiprev)
373 373
374 374 return partial
375 375
376 376 def _branchheads(self):
377 377 tip = self.changelog.tip()
378 378 if self.branchcache is not None and self._branchcachetip == tip:
379 379 return self.branchcache
380 380
381 381 oldtip = self._branchcachetip
382 382 self._branchcachetip = tip
383 383 if self.branchcache is None:
384 384 self.branchcache = {} # avoid recursion in changectx
385 385 else:
386 386 self.branchcache.clear() # keep using the same dict
387 387 if oldtip is None or oldtip not in self.changelog.nodemap:
388 388 partial, last, lrev = self._readbranchcache()
389 389 else:
390 390 lrev = self.changelog.rev(oldtip)
391 391 partial = self._ubranchcache
392 392
393 393 self._branchtags(partial, lrev)
394 394 # this private cache holds all heads (not just tips)
395 395 self._ubranchcache = partial
396 396
397 397 # the branch cache is stored on disk as UTF-8, but in the local
398 398 # charset internally
399 399 for k, v in partial.iteritems():
400 self.branchcache[util.tolocal(k)] = v
400 self.branchcache[encoding.tolocal(k)] = v
401 401 return self.branchcache
402 402
403 403
404 404 def branchtags(self):
405 405 '''return a dict where branch names map to the tipmost head of
406 406 the branch, open heads come before closed'''
407 407 bt = {}
408 408 for bn, heads in self._branchheads().iteritems():
409 409 head = None
410 410 for i in range(len(heads)-1, -1, -1):
411 411 h = heads[i]
412 412 if 'close' not in self.changelog.read(h)[5]:
413 413 head = h
414 414 break
415 415 # no open heads were found
416 416 if head is None:
417 417 head = heads[-1]
418 418 bt[bn] = head
419 419 return bt
420 420
421 421
422 422 def _readbranchcache(self):
423 423 partial = {}
424 424 try:
425 425 f = self.opener("branchheads.cache")
426 426 lines = f.read().split('\n')
427 427 f.close()
428 428 except (IOError, OSError):
429 429 return {}, nullid, nullrev
430 430
431 431 try:
432 432 last, lrev = lines.pop(0).split(" ", 1)
433 433 last, lrev = bin(last), int(lrev)
434 434 if lrev >= len(self) or self[lrev].node() != last:
435 435 # invalidate the cache
436 436 raise ValueError('invalidating branch cache (tip differs)')
437 437 for l in lines:
438 438 if not l: continue
439 439 node, label = l.split(" ", 1)
440 440 partial.setdefault(label.strip(), []).append(bin(node))
441 441 except KeyboardInterrupt:
442 442 raise
443 443 except Exception, inst:
444 444 if self.ui.debugflag:
445 445 self.ui.warn(str(inst), '\n')
446 446 partial, last, lrev = {}, nullid, nullrev
447 447 return partial, last, lrev
448 448
449 449 def _writebranchcache(self, branches, tip, tiprev):
450 450 try:
451 451 f = self.opener("branchheads.cache", "w", atomictemp=True)
452 452 f.write("%s %s\n" % (hex(tip), tiprev))
453 453 for label, nodes in branches.iteritems():
454 454 for node in nodes:
455 455 f.write("%s %s\n" % (hex(node), label))
456 456 f.rename()
457 457 except (IOError, OSError):
458 458 pass
459 459
460 460 def _updatebranchcache(self, partial, start, end):
461 461 for r in xrange(start, end):
462 462 c = self[r]
463 463 b = c.branch()
464 464 bheads = partial.setdefault(b, [])
465 465 bheads.append(c.node())
466 466 for p in c.parents():
467 467 pn = p.node()
468 468 if pn in bheads:
469 469 bheads.remove(pn)
470 470
471 471 def lookup(self, key):
472 472 if isinstance(key, int):
473 473 return self.changelog.node(key)
474 474 elif key == '.':
475 475 return self.dirstate.parents()[0]
476 476 elif key == 'null':
477 477 return nullid
478 478 elif key == 'tip':
479 479 return self.changelog.tip()
480 480 n = self.changelog._match(key)
481 481 if n:
482 482 return n
483 483 if key in self.tags():
484 484 return self.tags()[key]
485 485 if key in self.branchtags():
486 486 return self.branchtags()[key]
487 487 n = self.changelog._partialmatch(key)
488 488 if n:
489 489 return n
490 490 try:
491 491 if len(key) == 20:
492 492 key = hex(key)
493 493 except:
494 494 pass
495 495 raise error.RepoError(_("unknown revision '%s'") % key)
496 496
497 497 def local(self):
498 498 return True
499 499
500 500 def join(self, f):
501 501 return os.path.join(self.path, f)
502 502
503 503 def wjoin(self, f):
504 504 return os.path.join(self.root, f)
505 505
506 506 def rjoin(self, f):
507 507 return os.path.join(self.root, util.pconvert(f))
508 508
509 509 def file(self, f):
510 510 if f[0] == '/':
511 511 f = f[1:]
512 512 return filelog.filelog(self.sopener, f)
513 513
514 514 def changectx(self, changeid):
515 515 return self[changeid]
516 516
517 517 def parents(self, changeid=None):
518 518 '''get list of changectxs for parents of changeid'''
519 519 return self[changeid].parents()
520 520
521 521 def filectx(self, path, changeid=None, fileid=None):
522 522 """changeid can be a changeset revision, node, or tag.
523 523 fileid can be a file revision or node."""
524 524 return context.filectx(self, path, changeid, fileid)
525 525
526 526 def getcwd(self):
527 527 return self.dirstate.getcwd()
528 528
529 529 def pathto(self, f, cwd=None):
530 530 return self.dirstate.pathto(f, cwd)
531 531
532 532 def wfile(self, f, mode='r'):
533 533 return self.wopener(f, mode)
534 534
535 535 def _link(self, f):
536 536 return os.path.islink(self.wjoin(f))
537 537
538 538 def _filter(self, filter, filename, data):
539 539 if filter not in self.filterpats:
540 540 l = []
541 541 for pat, cmd in self.ui.configitems(filter):
542 542 if cmd == '!':
543 543 continue
544 544 mf = util.matcher(self.root, "", [pat], [], [])[1]
545 545 fn = None
546 546 params = cmd
547 547 for name, filterfn in self._datafilters.iteritems():
548 548 if cmd.startswith(name):
549 549 fn = filterfn
550 550 params = cmd[len(name):].lstrip()
551 551 break
552 552 if not fn:
553 553 fn = lambda s, c, **kwargs: util.filter(s, c)
554 554 # Wrap old filters not supporting keyword arguments
555 555 if not inspect.getargspec(fn)[2]:
556 556 oldfn = fn
557 557 fn = lambda s, c, **kwargs: oldfn(s, c)
558 558 l.append((mf, fn, params))
559 559 self.filterpats[filter] = l
560 560
561 561 for mf, fn, cmd in self.filterpats[filter]:
562 562 if mf(filename):
563 563 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
564 564 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
565 565 break
566 566
567 567 return data
568 568
569 569 def adddatafilter(self, name, filter):
570 570 self._datafilters[name] = filter
571 571
572 572 def wread(self, filename):
573 573 if self._link(filename):
574 574 data = os.readlink(self.wjoin(filename))
575 575 else:
576 576 data = self.wopener(filename, 'r').read()
577 577 return self._filter("encode", filename, data)
578 578
579 579 def wwrite(self, filename, data, flags):
580 580 data = self._filter("decode", filename, data)
581 581 try:
582 582 os.unlink(self.wjoin(filename))
583 583 except OSError:
584 584 pass
585 585 if 'l' in flags:
586 586 self.wopener.symlink(data, filename)
587 587 else:
588 588 self.wopener(filename, 'w').write(data)
589 589 if 'x' in flags:
590 590 util.set_flags(self.wjoin(filename), False, True)
591 591
592 592 def wwritedata(self, filename, data):
593 593 return self._filter("decode", filename, data)
594 594
595 595 def transaction(self):
596 596 if self._transref and self._transref():
597 597 return self._transref().nest()
598 598
599 599 # abort here if the journal already exists
600 600 if os.path.exists(self.sjoin("journal")):
601 601 raise error.RepoError(_("journal already exists - run hg recover"))
602 602
603 603 # save dirstate for rollback
604 604 try:
605 605 ds = self.opener("dirstate").read()
606 606 except IOError:
607 607 ds = ""
608 608 self.opener("journal.dirstate", "w").write(ds)
609 609 self.opener("journal.branch", "w").write(self.dirstate.branch())
610 610
611 611 renames = [(self.sjoin("journal"), self.sjoin("undo")),
612 612 (self.join("journal.dirstate"), self.join("undo.dirstate")),
613 613 (self.join("journal.branch"), self.join("undo.branch"))]
614 614 tr = transaction.transaction(self.ui.warn, self.sopener,
615 615 self.sjoin("journal"),
616 616 aftertrans(renames),
617 617 self.store.createmode)
618 618 self._transref = weakref.ref(tr)
619 619 return tr
620 620
621 621 def recover(self):
622 622 l = self.lock()
623 623 try:
624 624 if os.path.exists(self.sjoin("journal")):
625 625 self.ui.status(_("rolling back interrupted transaction\n"))
626 626 transaction.rollback(self.sopener, self.sjoin("journal"))
627 627 self.invalidate()
628 628 return True
629 629 else:
630 630 self.ui.warn(_("no interrupted transaction available\n"))
631 631 return False
632 632 finally:
633 633 del l
634 634
635 635 def rollback(self):
636 636 wlock = lock = None
637 637 try:
638 638 wlock = self.wlock()
639 639 lock = self.lock()
640 640 if os.path.exists(self.sjoin("undo")):
641 641 self.ui.status(_("rolling back last transaction\n"))
642 642 transaction.rollback(self.sopener, self.sjoin("undo"))
643 643 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
644 644 try:
645 645 branch = self.opener("undo.branch").read()
646 646 self.dirstate.setbranch(branch)
647 647 except IOError:
648 648 self.ui.warn(_("Named branch could not be reset, "
649 649 "current branch still is: %s\n")
650 % util.tolocal(self.dirstate.branch()))
650 % encoding.tolocal(self.dirstate.branch()))
651 651 self.invalidate()
652 652 self.dirstate.invalidate()
653 653 else:
654 654 self.ui.warn(_("no rollback information available\n"))
655 655 finally:
656 656 del lock, wlock
657 657
658 658 def invalidate(self):
659 659 for a in "changelog manifest".split():
660 660 if a in self.__dict__:
661 661 delattr(self, a)
662 662 self.tagscache = None
663 663 self._tagstypecache = None
664 664 self.nodetagscache = None
665 665 self.branchcache = None
666 666 self._ubranchcache = None
667 667 self._branchcachetip = None
668 668
669 669 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
670 670 try:
671 671 l = lock.lock(lockname, 0, releasefn, desc=desc)
672 672 except error.LockHeld, inst:
673 673 if not wait:
674 674 raise
675 675 self.ui.warn(_("waiting for lock on %s held by %r\n") %
676 676 (desc, inst.locker))
677 677 # default to 600 seconds timeout
678 678 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
679 679 releasefn, desc=desc)
680 680 if acquirefn:
681 681 acquirefn()
682 682 return l
683 683
684 684 def lock(self, wait=True):
685 685 if self._lockref and self._lockref():
686 686 return self._lockref()
687 687
688 688 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
689 689 _('repository %s') % self.origroot)
690 690 self._lockref = weakref.ref(l)
691 691 return l
692 692
693 693 def wlock(self, wait=True):
694 694 if self._wlockref and self._wlockref():
695 695 return self._wlockref()
696 696
697 697 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
698 698 self.dirstate.invalidate, _('working directory of %s') %
699 699 self.origroot)
700 700 self._wlockref = weakref.ref(l)
701 701 return l
702 702
703 703 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
704 704 """
705 705 commit an individual file as part of a larger transaction
706 706 """
707 707
708 708 fn = fctx.path()
709 709 t = fctx.data()
710 710 fl = self.file(fn)
711 711 fp1 = manifest1.get(fn, nullid)
712 712 fp2 = manifest2.get(fn, nullid)
713 713
714 714 meta = {}
715 715 cp = fctx.renamed()
716 716 if cp and cp[0] != fn:
717 717 # Mark the new revision of this file as a copy of another
718 718 # file. This copy data will effectively act as a parent
719 719 # of this new revision. If this is a merge, the first
720 720 # parent will be the nullid (meaning "look up the copy data")
721 721 # and the second one will be the other parent. For example:
722 722 #
723 723 # 0 --- 1 --- 3 rev1 changes file foo
724 724 # \ / rev2 renames foo to bar and changes it
725 725 # \- 2 -/ rev3 should have bar with all changes and
726 726 # should record that bar descends from
727 727 # bar in rev2 and foo in rev1
728 728 #
729 729 # this allows this merge to succeed:
730 730 #
731 731 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
732 732 # \ / merging rev3 and rev4 should use bar@rev2
733 733 # \- 2 --- 4 as the merge base
734 734 #
735 735
736 736 cf = cp[0]
737 737 cr = manifest1.get(cf)
738 738 nfp = fp2
739 739
740 740 if manifest2: # branch merge
741 741 if fp2 == nullid or cr is None: # copied on remote side
742 742 if cf in manifest2:
743 743 cr = manifest2[cf]
744 744 nfp = fp1
745 745
746 746 # find source in nearest ancestor if we've lost track
747 747 if not cr:
748 748 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
749 749 (fn, cf))
750 750 for a in self['.'].ancestors():
751 751 if cf in a:
752 752 cr = a[cf].filenode()
753 753 break
754 754
755 755 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
756 756 meta["copy"] = cf
757 757 meta["copyrev"] = hex(cr)
758 758 fp1, fp2 = nullid, nfp
759 759 elif fp2 != nullid:
760 760 # is one parent an ancestor of the other?
761 761 fpa = fl.ancestor(fp1, fp2)
762 762 if fpa == fp1:
763 763 fp1, fp2 = fp2, nullid
764 764 elif fpa == fp2:
765 765 fp2 = nullid
766 766
767 767 # is the file unmodified from the parent? report existing entry
768 768 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
769 769 return fp1
770 770
771 771 changelist.append(fn)
772 772 return fl.add(t, meta, tr, linkrev, fp1, fp2)
773 773
774 774 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
775 775 if p1 is None:
776 776 p1, p2 = self.dirstate.parents()
777 777 return self.commit(files=files, text=text, user=user, date=date,
778 778 p1=p1, p2=p2, extra=extra, empty_ok=True)
779 779
780 780 def commit(self, files=None, text="", user=None, date=None,
781 781 match=None, force=False, force_editor=False,
782 782 p1=None, p2=None, extra={}, empty_ok=False):
783 783 wlock = lock = None
784 784 if extra.get("close"):
785 785 force = True
786 786 if files:
787 787 files = util.unique(files)
788 788 try:
789 789 wlock = self.wlock()
790 790 lock = self.lock()
791 791 use_dirstate = (p1 is None) # not rawcommit
792 792
793 793 if use_dirstate:
794 794 p1, p2 = self.dirstate.parents()
795 795 update_dirstate = True
796 796
797 797 if (not force and p2 != nullid and
798 798 (match and (match.files() or match.anypats()))):
799 799 raise util.Abort(_('cannot partially commit a merge '
800 800 '(do not specify files or patterns)'))
801 801
802 802 if files:
803 803 modified, removed = [], []
804 804 for f in files:
805 805 s = self.dirstate[f]
806 806 if s in 'nma':
807 807 modified.append(f)
808 808 elif s == 'r':
809 809 removed.append(f)
810 810 else:
811 811 self.ui.warn(_("%s not tracked!\n") % f)
812 812 changes = [modified, [], removed, [], []]
813 813 else:
814 814 changes = self.status(match=match)
815 815 else:
816 816 p1, p2 = p1, p2 or nullid
817 817 update_dirstate = (self.dirstate.parents()[0] == p1)
818 818 changes = [files, [], [], [], []]
819 819
820 820 ms = merge_.mergestate(self)
821 821 for f in changes[0]:
822 822 if f in ms and ms[f] == 'u':
823 823 raise util.Abort(_("unresolved merge conflicts "
824 824 "(see hg resolve)"))
825 825 wctx = context.workingctx(self, (p1, p2), text, user, date,
826 826 extra, changes)
827 827 r = self._commitctx(wctx, force, force_editor, empty_ok,
828 828 use_dirstate, update_dirstate)
829 829 ms.reset()
830 830 return r
831 831
832 832 finally:
833 833 del lock, wlock
834 834
835 835 def commitctx(self, ctx):
836 836 """Add a new revision to current repository.
837 837
838 838 Revision information is passed in the context.memctx argument.
839 839 commitctx() does not touch the working directory.
840 840 """
841 841 wlock = lock = None
842 842 try:
843 843 wlock = self.wlock()
844 844 lock = self.lock()
845 845 return self._commitctx(ctx, force=True, force_editor=False,
846 846 empty_ok=True, use_dirstate=False,
847 847 update_dirstate=False)
848 848 finally:
849 849 del lock, wlock
850 850
851 851 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
852 852 use_dirstate=True, update_dirstate=True):
853 853 tr = None
854 854 valid = 0 # don't save the dirstate if this isn't set
855 855 try:
856 856 commit = util.sort(wctx.modified() + wctx.added())
857 857 remove = wctx.removed()
858 858 extra = wctx.extra().copy()
859 859 branchname = extra['branch']
860 860 user = wctx.user()
861 861 text = wctx.description()
862 862
863 863 p1, p2 = [p.node() for p in wctx.parents()]
864 864 c1 = self.changelog.read(p1)
865 865 c2 = self.changelog.read(p2)
866 866 m1 = self.manifest.read(c1[0]).copy()
867 867 m2 = self.manifest.read(c2[0])
868 868
869 869 if use_dirstate:
870 870 oldname = c1[5].get("branch") # stored in UTF-8
871 871 if (not commit and not remove and not force and p2 == nullid
872 872 and branchname == oldname):
873 873 self.ui.status(_("nothing changed\n"))
874 874 return None
875 875
876 876 xp1 = hex(p1)
877 877 if p2 == nullid: xp2 = ''
878 878 else: xp2 = hex(p2)
879 879
880 880 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
881 881
882 882 tr = self.transaction()
883 883 trp = weakref.proxy(tr)
884 884
885 885 # check in files
886 886 new = {}
887 887 changed = []
888 888 linkrev = len(self)
889 889 for f in commit:
890 890 self.ui.note(f + "\n")
891 891 try:
892 892 fctx = wctx.filectx(f)
893 893 newflags = fctx.flags()
894 894 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
895 895 if ((not changed or changed[-1] != f) and
896 896 m2.get(f) != new[f]):
897 897 # mention the file in the changelog if some
898 898 # flag changed, even if there was no content
899 899 # change.
900 900 if m1.flags(f) != newflags:
901 901 changed.append(f)
902 902 m1.set(f, newflags)
903 903 if use_dirstate:
904 904 self.dirstate.normal(f)
905 905
906 906 except (OSError, IOError):
907 907 if use_dirstate:
908 908 self.ui.warn(_("trouble committing %s!\n") % f)
909 909 raise
910 910 else:
911 911 remove.append(f)
912 912
913 913 updated, added = [], []
914 914 for f in util.sort(changed):
915 915 if f in m1 or f in m2:
916 916 updated.append(f)
917 917 else:
918 918 added.append(f)
919 919
920 920 # update manifest
921 921 m1.update(new)
922 922 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
923 923 removed1 = []
924 924
925 925 for f in removed:
926 926 if f in m1:
927 927 del m1[f]
928 928 removed1.append(f)
929 929 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
930 930 (new, removed1))
931 931
932 932 # add changeset
933 933 if (not empty_ok and not text) or force_editor:
934 934 edittext = []
935 935 if text:
936 936 edittext.append(text)
937 937 edittext.append("")
938 938 edittext.append("") # Empty line between message and comments.
939 939 edittext.append(_("HG: Enter commit message."
940 940 " Lines beginning with 'HG:' are removed."))
941 941 edittext.append("HG: --")
942 942 edittext.append("HG: user: %s" % user)
943 943 if p2 != nullid:
944 944 edittext.append("HG: branch merge")
945 945 if branchname:
946 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
946 edittext.append("HG: branch '%s'"
947 % encoding.tolocal(branchname))
947 948 edittext.extend(["HG: added %s" % f for f in added])
948 949 edittext.extend(["HG: changed %s" % f for f in updated])
949 950 edittext.extend(["HG: removed %s" % f for f in removed])
950 951 if not added and not updated and not removed:
951 952 edittext.append("HG: no files changed")
952 953 edittext.append("")
953 954 # run editor in the repository root
954 955 olddir = os.getcwd()
955 956 os.chdir(self.root)
956 957 text = self.ui.edit("\n".join(edittext), user)
957 958 os.chdir(olddir)
958 959
959 960 lines = [line.rstrip() for line in text.rstrip().splitlines()]
960 961 while lines and not lines[0]:
961 962 del lines[0]
962 963 if not lines and use_dirstate:
963 964 raise util.Abort(_("empty commit message"))
964 965 text = '\n'.join(lines)
965 966
966 967 self.changelog.delayupdate()
967 968 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
968 969 user, wctx.date(), extra)
969 970 p = lambda: self.changelog.writepending() and self.root or ""
970 971 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
971 972 parent2=xp2, pending=p)
972 973 self.changelog.finalize(trp)
973 974 tr.close()
974 975
975 976 if self.branchcache:
976 977 self.branchtags()
977 978
978 979 if use_dirstate or update_dirstate:
979 980 self.dirstate.setparents(n)
980 981 if use_dirstate:
981 982 for f in removed:
982 983 self.dirstate.forget(f)
983 984 valid = 1 # our dirstate updates are complete
984 985
985 986 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
986 987 return n
987 988 finally:
988 989 if not valid: # don't save our updated dirstate
989 990 self.dirstate.invalidate()
990 991 del tr
991 992
992 993 def walk(self, match, node=None):
993 994 '''
994 995 walk recursively through the directory tree or a given
995 996 changeset, finding all files matched by the match
996 997 function
997 998 '''
998 999 return self[node].walk(match)
999 1000
1000 1001 def status(self, node1='.', node2=None, match=None,
1001 1002 ignored=False, clean=False, unknown=False):
1002 1003 """return status of files between two nodes or node and working directory
1003 1004
1004 1005 If node1 is None, use the first dirstate parent instead.
1005 1006 If node2 is None, compare node1 with working directory.
1006 1007 """
1007 1008
1008 1009 def mfmatches(ctx):
1009 1010 mf = ctx.manifest().copy()
1010 1011 for fn in mf.keys():
1011 1012 if not match(fn):
1012 1013 del mf[fn]
1013 1014 return mf
1014 1015
1015 1016 if isinstance(node1, context.changectx):
1016 1017 ctx1 = node1
1017 1018 else:
1018 1019 ctx1 = self[node1]
1019 1020 if isinstance(node2, context.changectx):
1020 1021 ctx2 = node2
1021 1022 else:
1022 1023 ctx2 = self[node2]
1023 1024
1024 1025 working = ctx2.rev() is None
1025 1026 parentworking = working and ctx1 == self['.']
1026 1027 match = match or match_.always(self.root, self.getcwd())
1027 1028 listignored, listclean, listunknown = ignored, clean, unknown
1028 1029
1029 1030 # load earliest manifest first for caching reasons
1030 1031 if not working and ctx2.rev() < ctx1.rev():
1031 1032 ctx2.manifest()
1032 1033
1033 1034 if not parentworking:
1034 1035 def bad(f, msg):
1035 1036 if f not in ctx1:
1036 1037 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1037 1038 return False
1038 1039 match.bad = bad
1039 1040
1040 1041 if working: # we need to scan the working dir
1041 1042 s = self.dirstate.status(match, listignored, listclean, listunknown)
1042 1043 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1043 1044
1044 1045 # check for any possibly clean files
1045 1046 if parentworking and cmp:
1046 1047 fixup = []
1047 1048 # do a full compare of any files that might have changed
1048 1049 for f in cmp:
1049 1050 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1050 1051 or ctx1[f].cmp(ctx2[f].data())):
1051 1052 modified.append(f)
1052 1053 else:
1053 1054 fixup.append(f)
1054 1055
1055 1056 if listclean:
1056 1057 clean += fixup
1057 1058
1058 1059 # update dirstate for files that are actually clean
1059 1060 if fixup:
1060 1061 wlock = None
1061 1062 try:
1062 1063 try:
1063 1064 wlock = self.wlock(False)
1064 1065 for f in fixup:
1065 1066 self.dirstate.normal(f)
1066 1067 except error.LockError:
1067 1068 pass
1068 1069 finally:
1069 1070 del wlock
1070 1071
1071 1072 if not parentworking:
1072 1073 mf1 = mfmatches(ctx1)
1073 1074 if working:
1074 1075 # we are comparing working dir against non-parent
1075 1076 # generate a pseudo-manifest for the working dir
1076 1077 mf2 = mfmatches(self['.'])
1077 1078 for f in cmp + modified + added:
1078 1079 mf2[f] = None
1079 1080 mf2.set(f, ctx2.flags(f))
1080 1081 for f in removed:
1081 1082 if f in mf2:
1082 1083 del mf2[f]
1083 1084 else:
1084 1085 # we are comparing two revisions
1085 1086 deleted, unknown, ignored = [], [], []
1086 1087 mf2 = mfmatches(ctx2)
1087 1088
1088 1089 modified, added, clean = [], [], []
1089 1090 for fn in mf2:
1090 1091 if fn in mf1:
1091 1092 if (mf1.flags(fn) != mf2.flags(fn) or
1092 1093 (mf1[fn] != mf2[fn] and
1093 1094 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1094 1095 modified.append(fn)
1095 1096 elif listclean:
1096 1097 clean.append(fn)
1097 1098 del mf1[fn]
1098 1099 else:
1099 1100 added.append(fn)
1100 1101 removed = mf1.keys()
1101 1102
1102 1103 r = modified, added, removed, deleted, unknown, ignored, clean
1103 1104 [l.sort() for l in r]
1104 1105 return r
1105 1106
1106 1107 def add(self, list):
1107 1108 wlock = self.wlock()
1108 1109 try:
1109 1110 rejected = []
1110 1111 for f in list:
1111 1112 p = self.wjoin(f)
1112 1113 try:
1113 1114 st = os.lstat(p)
1114 1115 except:
1115 1116 self.ui.warn(_("%s does not exist!\n") % f)
1116 1117 rejected.append(f)
1117 1118 continue
1118 1119 if st.st_size > 10000000:
1119 1120 self.ui.warn(_("%s: files over 10MB may cause memory and"
1120 1121 " performance problems\n"
1121 1122 "(use 'hg revert %s' to unadd the file)\n")
1122 1123 % (f, f))
1123 1124 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1124 1125 self.ui.warn(_("%s not added: only files and symlinks "
1125 1126 "supported currently\n") % f)
1126 1127 rejected.append(p)
1127 1128 elif self.dirstate[f] in 'amn':
1128 1129 self.ui.warn(_("%s already tracked!\n") % f)
1129 1130 elif self.dirstate[f] == 'r':
1130 1131 self.dirstate.normallookup(f)
1131 1132 else:
1132 1133 self.dirstate.add(f)
1133 1134 return rejected
1134 1135 finally:
1135 1136 del wlock
1136 1137
1137 1138 def forget(self, list):
1138 1139 wlock = self.wlock()
1139 1140 try:
1140 1141 for f in list:
1141 1142 if self.dirstate[f] != 'a':
1142 1143 self.ui.warn(_("%s not added!\n") % f)
1143 1144 else:
1144 1145 self.dirstate.forget(f)
1145 1146 finally:
1146 1147 del wlock
1147 1148
1148 1149 def remove(self, list, unlink=False):
1149 1150 wlock = None
1150 1151 try:
1151 1152 if unlink:
1152 1153 for f in list:
1153 1154 try:
1154 1155 util.unlink(self.wjoin(f))
1155 1156 except OSError, inst:
1156 1157 if inst.errno != errno.ENOENT:
1157 1158 raise
1158 1159 wlock = self.wlock()
1159 1160 for f in list:
1160 1161 if unlink and os.path.exists(self.wjoin(f)):
1161 1162 self.ui.warn(_("%s still exists!\n") % f)
1162 1163 elif self.dirstate[f] == 'a':
1163 1164 self.dirstate.forget(f)
1164 1165 elif f not in self.dirstate:
1165 1166 self.ui.warn(_("%s not tracked!\n") % f)
1166 1167 else:
1167 1168 self.dirstate.remove(f)
1168 1169 finally:
1169 1170 del wlock
1170 1171
1171 1172 def undelete(self, list):
1172 1173 wlock = None
1173 1174 try:
1174 1175 manifests = [self.manifest.read(self.changelog.read(p)[0])
1175 1176 for p in self.dirstate.parents() if p != nullid]
1176 1177 wlock = self.wlock()
1177 1178 for f in list:
1178 1179 if self.dirstate[f] != 'r':
1179 1180 self.ui.warn(_("%s not removed!\n") % f)
1180 1181 else:
1181 1182 m = f in manifests[0] and manifests[0] or manifests[1]
1182 1183 t = self.file(f).read(m[f])
1183 1184 self.wwrite(f, t, m.flags(f))
1184 1185 self.dirstate.normal(f)
1185 1186 finally:
1186 1187 del wlock
1187 1188
1188 1189 def copy(self, source, dest):
1189 1190 wlock = None
1190 1191 try:
1191 1192 p = self.wjoin(dest)
1192 1193 if not (os.path.exists(p) or os.path.islink(p)):
1193 1194 self.ui.warn(_("%s does not exist!\n") % dest)
1194 1195 elif not (os.path.isfile(p) or os.path.islink(p)):
1195 1196 self.ui.warn(_("copy failed: %s is not a file or a "
1196 1197 "symbolic link\n") % dest)
1197 1198 else:
1198 1199 wlock = self.wlock()
1199 1200 if self.dirstate[dest] in '?r':
1200 1201 self.dirstate.add(dest)
1201 1202 self.dirstate.copy(source, dest)
1202 1203 finally:
1203 1204 del wlock
1204 1205
1205 1206 def heads(self, start=None, closed=True):
1206 1207 heads = self.changelog.heads(start)
1207 1208 def display(head):
1208 1209 if closed:
1209 1210 return True
1210 1211 extras = self.changelog.read(head)[5]
1211 1212 return ('close' not in extras)
1212 1213 # sort the output in rev descending order
1213 1214 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1214 1215 return [n for (r, n) in util.sort(heads)]
1215 1216
1216 1217 def branchheads(self, branch=None, start=None, closed=True):
1217 1218 if branch is None:
1218 1219 branch = self[None].branch()
1219 1220 branches = self._branchheads()
1220 1221 if branch not in branches:
1221 1222 return []
1222 1223 bheads = branches[branch]
1223 1224 # the cache returns heads ordered lowest to highest
1224 1225 bheads.reverse()
1225 1226 if start is not None:
1226 1227 # filter out the heads that cannot be reached from startrev
1227 1228 bheads = self.changelog.nodesbetween([start], bheads)[2]
1228 1229 if not closed:
1229 1230 bheads = [h for h in bheads if
1230 1231 ('close' not in self.changelog.read(h)[5])]
1231 1232 return bheads
1232 1233
1233 1234 def branches(self, nodes):
1234 1235 if not nodes:
1235 1236 nodes = [self.changelog.tip()]
1236 1237 b = []
1237 1238 for n in nodes:
1238 1239 t = n
1239 1240 while 1:
1240 1241 p = self.changelog.parents(n)
1241 1242 if p[1] != nullid or p[0] == nullid:
1242 1243 b.append((t, n, p[0], p[1]))
1243 1244 break
1244 1245 n = p[0]
1245 1246 return b
1246 1247
1247 1248 def between(self, pairs):
1248 1249 r = []
1249 1250
1250 1251 for top, bottom in pairs:
1251 1252 n, l, i = top, [], 0
1252 1253 f = 1
1253 1254
1254 1255 while n != bottom and n != nullid:
1255 1256 p = self.changelog.parents(n)[0]
1256 1257 if i == f:
1257 1258 l.append(n)
1258 1259 f = f * 2
1259 1260 n = p
1260 1261 i += 1
1261 1262
1262 1263 r.append(l)
1263 1264
1264 1265 return r
1265 1266
1266 1267 def findincoming(self, remote, base=None, heads=None, force=False):
1267 1268 """Return list of roots of the subsets of missing nodes from remote
1268 1269
1269 1270 If base dict is specified, assume that these nodes and their parents
1270 1271 exist on the remote side and that no child of a node of base exists
1271 1272 in both remote and self.
1272 1273 Furthermore base will be updated to include the nodes that exists
1273 1274 in self and remote but no children exists in self and remote.
1274 1275 If a list of heads is specified, return only nodes which are heads
1275 1276 or ancestors of these heads.
1276 1277
1277 1278 All the ancestors of base are in self and in remote.
1278 1279 All the descendants of the list returned are missing in self.
1279 1280 (and so we know that the rest of the nodes are missing in remote, see
1280 1281 outgoing)
1281 1282 """
1282 1283 return self.findcommonincoming(remote, base, heads, force)[1]
1283 1284
1284 1285 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1285 1286 """Return a tuple (common, missing roots, heads) used to identify
1286 1287 missing nodes from remote.
1287 1288
1288 1289 If base dict is specified, assume that these nodes and their parents
1289 1290 exist on the remote side and that no child of a node of base exists
1290 1291 in both remote and self.
1291 1292 Furthermore base will be updated to include the nodes that exists
1292 1293 in self and remote but no children exists in self and remote.
1293 1294 If a list of heads is specified, return only nodes which are heads
1294 1295 or ancestors of these heads.
1295 1296
1296 1297 All the ancestors of base are in self and in remote.
1297 1298 """
1298 1299 m = self.changelog.nodemap
1299 1300 search = []
1300 1301 fetch = {}
1301 1302 seen = {}
1302 1303 seenbranch = {}
1303 1304 if base == None:
1304 1305 base = {}
1305 1306
1306 1307 if not heads:
1307 1308 heads = remote.heads()
1308 1309
1309 1310 if self.changelog.tip() == nullid:
1310 1311 base[nullid] = 1
1311 1312 if heads != [nullid]:
1312 1313 return [nullid], [nullid], list(heads)
1313 1314 return [nullid], [], []
1314 1315
1315 1316 # assume we're closer to the tip than the root
1316 1317 # and start by examining the heads
1317 1318 self.ui.status(_("searching for changes\n"))
1318 1319
1319 1320 unknown = []
1320 1321 for h in heads:
1321 1322 if h not in m:
1322 1323 unknown.append(h)
1323 1324 else:
1324 1325 base[h] = 1
1325 1326
1326 1327 heads = unknown
1327 1328 if not unknown:
1328 1329 return base.keys(), [], []
1329 1330
1330 1331 req = dict.fromkeys(unknown)
1331 1332 reqcnt = 0
1332 1333
1333 1334 # search through remote branches
1334 1335 # a 'branch' here is a linear segment of history, with four parts:
1335 1336 # head, root, first parent, second parent
1336 1337 # (a branch always has two parents (or none) by definition)
1337 1338 unknown = remote.branches(unknown)
1338 1339 while unknown:
1339 1340 r = []
1340 1341 while unknown:
1341 1342 n = unknown.pop(0)
1342 1343 if n[0] in seen:
1343 1344 continue
1344 1345
1345 1346 self.ui.debug(_("examining %s:%s\n")
1346 1347 % (short(n[0]), short(n[1])))
1347 1348 if n[0] == nullid: # found the end of the branch
1348 1349 pass
1349 1350 elif n in seenbranch:
1350 1351 self.ui.debug(_("branch already found\n"))
1351 1352 continue
1352 1353 elif n[1] and n[1] in m: # do we know the base?
1353 1354 self.ui.debug(_("found incomplete branch %s:%s\n")
1354 1355 % (short(n[0]), short(n[1])))
1355 1356 search.append(n[0:2]) # schedule branch range for scanning
1356 1357 seenbranch[n] = 1
1357 1358 else:
1358 1359 if n[1] not in seen and n[1] not in fetch:
1359 1360 if n[2] in m and n[3] in m:
1360 1361 self.ui.debug(_("found new changeset %s\n") %
1361 1362 short(n[1]))
1362 1363 fetch[n[1]] = 1 # earliest unknown
1363 1364 for p in n[2:4]:
1364 1365 if p in m:
1365 1366 base[p] = 1 # latest known
1366 1367
1367 1368 for p in n[2:4]:
1368 1369 if p not in req and p not in m:
1369 1370 r.append(p)
1370 1371 req[p] = 1
1371 1372 seen[n[0]] = 1
1372 1373
1373 1374 if r:
1374 1375 reqcnt += 1
1375 1376 self.ui.debug(_("request %d: %s\n") %
1376 1377 (reqcnt, " ".join(map(short, r))))
1377 1378 for p in xrange(0, len(r), 10):
1378 1379 for b in remote.branches(r[p:p+10]):
1379 1380 self.ui.debug(_("received %s:%s\n") %
1380 1381 (short(b[0]), short(b[1])))
1381 1382 unknown.append(b)
1382 1383
1383 1384 # do binary search on the branches we found
1384 1385 while search:
1385 1386 newsearch = []
1386 1387 reqcnt += 1
1387 1388 for n, l in zip(search, remote.between(search)):
1388 1389 l.append(n[1])
1389 1390 p = n[0]
1390 1391 f = 1
1391 1392 for i in l:
1392 1393 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1393 1394 if i in m:
1394 1395 if f <= 2:
1395 1396 self.ui.debug(_("found new branch changeset %s\n") %
1396 1397 short(p))
1397 1398 fetch[p] = 1
1398 1399 base[i] = 1
1399 1400 else:
1400 1401 self.ui.debug(_("narrowed branch search to %s:%s\n")
1401 1402 % (short(p), short(i)))
1402 1403 newsearch.append((p, i))
1403 1404 break
1404 1405 p, f = i, f * 2
1405 1406 search = newsearch
1406 1407
1407 1408 # sanity check our fetch list
1408 1409 for f in fetch.keys():
1409 1410 if f in m:
1410 1411 raise error.RepoError(_("already have changeset ")
1411 1412 + short(f[:4]))
1412 1413
1413 1414 if base.keys() == [nullid]:
1414 1415 if force:
1415 1416 self.ui.warn(_("warning: repository is unrelated\n"))
1416 1417 else:
1417 1418 raise util.Abort(_("repository is unrelated"))
1418 1419
1419 1420 self.ui.debug(_("found new changesets starting at ") +
1420 1421 " ".join([short(f) for f in fetch]) + "\n")
1421 1422
1422 1423 self.ui.debug(_("%d total queries\n") % reqcnt)
1423 1424
1424 1425 return base.keys(), fetch.keys(), heads
1425 1426
1426 1427 def findoutgoing(self, remote, base=None, heads=None, force=False):
1427 1428 """Return list of nodes that are roots of subsets not in remote
1428 1429
1429 1430 If base dict is specified, assume that these nodes and their parents
1430 1431 exist on the remote side.
1431 1432 If a list of heads is specified, return only nodes which are heads
1432 1433 or ancestors of these heads, and return a second element which
1433 1434 contains all remote heads which get new children.
1434 1435 """
1435 1436 if base == None:
1436 1437 base = {}
1437 1438 self.findincoming(remote, base, heads, force=force)
1438 1439
1439 1440 self.ui.debug(_("common changesets up to ")
1440 1441 + " ".join(map(short, base.keys())) + "\n")
1441 1442
1442 1443 remain = dict.fromkeys(self.changelog.nodemap)
1443 1444
1444 1445 # prune everything remote has from the tree
1445 1446 del remain[nullid]
1446 1447 remove = base.keys()
1447 1448 while remove:
1448 1449 n = remove.pop(0)
1449 1450 if n in remain:
1450 1451 del remain[n]
1451 1452 for p in self.changelog.parents(n):
1452 1453 remove.append(p)
1453 1454
1454 1455 # find every node whose parents have been pruned
1455 1456 subset = []
1456 1457 # find every remote head that will get new children
1457 1458 updated_heads = {}
1458 1459 for n in remain:
1459 1460 p1, p2 = self.changelog.parents(n)
1460 1461 if p1 not in remain and p2 not in remain:
1461 1462 subset.append(n)
1462 1463 if heads:
1463 1464 if p1 in heads:
1464 1465 updated_heads[p1] = True
1465 1466 if p2 in heads:
1466 1467 updated_heads[p2] = True
1467 1468
1468 1469 # this is the set of all roots we have to push
1469 1470 if heads:
1470 1471 return subset, updated_heads.keys()
1471 1472 else:
1472 1473 return subset
1473 1474
1474 1475 def pull(self, remote, heads=None, force=False):
1475 1476 lock = self.lock()
1476 1477 try:
1477 1478 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1478 1479 force=force)
1479 1480 if fetch == [nullid]:
1480 1481 self.ui.status(_("requesting all changes\n"))
1481 1482
1482 1483 if not fetch:
1483 1484 self.ui.status(_("no changes found\n"))
1484 1485 return 0
1485 1486
1486 1487 if heads is None and remote.capable('changegroupsubset'):
1487 1488 heads = rheads
1488 1489
1489 1490 if heads is None:
1490 1491 cg = remote.changegroup(fetch, 'pull')
1491 1492 else:
1492 1493 if not remote.capable('changegroupsubset'):
1493 1494 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1494 1495 cg = remote.changegroupsubset(fetch, heads, 'pull')
1495 1496 return self.addchangegroup(cg, 'pull', remote.url())
1496 1497 finally:
1497 1498 del lock
1498 1499
1499 1500 def push(self, remote, force=False, revs=None):
1500 1501 # there are two ways to push to remote repo:
1501 1502 #
1502 1503 # addchangegroup assumes local user can lock remote
1503 1504 # repo (local filesystem, old ssh servers).
1504 1505 #
1505 1506 # unbundle assumes local user cannot lock remote repo (new ssh
1506 1507 # servers, http servers).
1507 1508
1508 1509 if remote.capable('unbundle'):
1509 1510 return self.push_unbundle(remote, force, revs)
1510 1511 return self.push_addchangegroup(remote, force, revs)
1511 1512
1512 1513 def prepush(self, remote, force, revs):
1513 1514 common = {}
1514 1515 remote_heads = remote.heads()
1515 1516 inc = self.findincoming(remote, common, remote_heads, force=force)
1516 1517
1517 1518 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1518 1519 if revs is not None:
1519 1520 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1520 1521 else:
1521 1522 bases, heads = update, self.changelog.heads()
1522 1523
1523 1524 if not bases:
1524 1525 self.ui.status(_("no changes found\n"))
1525 1526 return None, 1
1526 1527 elif not force:
1527 1528 # check if we're creating new remote heads
1528 1529 # to be a remote head after push, node must be either
1529 1530 # - unknown locally
1530 1531 # - a local outgoing head descended from update
1531 1532 # - a remote head that's known locally and not
1532 1533 # ancestral to an outgoing head
1533 1534
1534 1535 warn = 0
1535 1536
1536 1537 if remote_heads == [nullid]:
1537 1538 warn = 0
1538 1539 elif not revs and len(heads) > len(remote_heads):
1539 1540 warn = 1
1540 1541 else:
1541 1542 newheads = list(heads)
1542 1543 for r in remote_heads:
1543 1544 if r in self.changelog.nodemap:
1544 1545 desc = self.changelog.heads(r, heads)
1545 1546 l = [h for h in heads if h in desc]
1546 1547 if not l:
1547 1548 newheads.append(r)
1548 1549 else:
1549 1550 newheads.append(r)
1550 1551 if len(newheads) > len(remote_heads):
1551 1552 warn = 1
1552 1553
1553 1554 if warn:
1554 1555 self.ui.warn(_("abort: push creates new remote heads!\n"))
1555 1556 self.ui.status(_("(did you forget to merge?"
1556 1557 " use push -f to force)\n"))
1557 1558 return None, 0
1558 1559 elif inc:
1559 1560 self.ui.warn(_("note: unsynced remote changes!\n"))
1560 1561
1561 1562
1562 1563 if revs is None:
1563 1564 # use the fast path, no race possible on push
1564 1565 cg = self._changegroup(common.keys(), 'push')
1565 1566 else:
1566 1567 cg = self.changegroupsubset(update, revs, 'push')
1567 1568 return cg, remote_heads
1568 1569
1569 1570 def push_addchangegroup(self, remote, force, revs):
1570 1571 lock = remote.lock()
1571 1572 try:
1572 1573 ret = self.prepush(remote, force, revs)
1573 1574 if ret[0] is not None:
1574 1575 cg, remote_heads = ret
1575 1576 return remote.addchangegroup(cg, 'push', self.url())
1576 1577 return ret[1]
1577 1578 finally:
1578 1579 del lock
1579 1580
1580 1581 def push_unbundle(self, remote, force, revs):
1581 1582 # local repo finds heads on server, finds out what revs it
1582 1583 # must push. once revs transferred, if server finds it has
1583 1584 # different heads (someone else won commit/push race), server
1584 1585 # aborts.
1585 1586
1586 1587 ret = self.prepush(remote, force, revs)
1587 1588 if ret[0] is not None:
1588 1589 cg, remote_heads = ret
1589 1590 if force: remote_heads = ['force']
1590 1591 return remote.unbundle(cg, remote_heads, 'push')
1591 1592 return ret[1]
1592 1593
1593 1594 def changegroupinfo(self, nodes, source):
1594 1595 if self.ui.verbose or source == 'bundle':
1595 1596 self.ui.status(_("%d changesets found\n") % len(nodes))
1596 1597 if self.ui.debugflag:
1597 1598 self.ui.debug(_("list of changesets:\n"))
1598 1599 for node in nodes:
1599 1600 self.ui.debug("%s\n" % hex(node))
1600 1601
1601 1602 def changegroupsubset(self, bases, heads, source, extranodes=None):
1602 1603 """This function generates a changegroup consisting of all the nodes
1603 1604 that are descendents of any of the bases, and ancestors of any of
1604 1605 the heads.
1605 1606
1606 1607 It is fairly complex as determining which filenodes and which
1607 1608 manifest nodes need to be included for the changeset to be complete
1608 1609 is non-trivial.
1609 1610
1610 1611 Another wrinkle is doing the reverse, figuring out which changeset in
1611 1612 the changegroup a particular filenode or manifestnode belongs to.
1612 1613
1613 1614 The caller can specify some nodes that must be included in the
1614 1615 changegroup using the extranodes argument. It should be a dict
1615 1616 where the keys are the filenames (or 1 for the manifest), and the
1616 1617 values are lists of (node, linknode) tuples, where node is a wanted
1617 1618 node and linknode is the changelog node that should be transmitted as
1618 1619 the linkrev.
1619 1620 """
1620 1621
1621 1622 if extranodes is None:
1622 1623 # can we go through the fast path ?
1623 1624 heads.sort()
1624 1625 allheads = self.heads()
1625 1626 allheads.sort()
1626 1627 if heads == allheads:
1627 1628 common = []
1628 1629 # parents of bases are known from both sides
1629 1630 for n in bases:
1630 1631 for p in self.changelog.parents(n):
1631 1632 if p != nullid:
1632 1633 common.append(p)
1633 1634 return self._changegroup(common, source)
1634 1635
1635 1636 self.hook('preoutgoing', throw=True, source=source)
1636 1637
1637 1638 # Set up some initial variables
1638 1639 # Make it easy to refer to self.changelog
1639 1640 cl = self.changelog
1640 1641 # msng is short for missing - compute the list of changesets in this
1641 1642 # changegroup.
1642 1643 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1643 1644 self.changegroupinfo(msng_cl_lst, source)
1644 1645 # Some bases may turn out to be superfluous, and some heads may be
1645 1646 # too. nodesbetween will return the minimal set of bases and heads
1646 1647 # necessary to re-create the changegroup.
1647 1648
1648 1649 # Known heads are the list of heads that it is assumed the recipient
1649 1650 # of this changegroup will know about.
1650 1651 knownheads = {}
1651 1652 # We assume that all parents of bases are known heads.
1652 1653 for n in bases:
1653 1654 for p in cl.parents(n):
1654 1655 if p != nullid:
1655 1656 knownheads[p] = 1
1656 1657 knownheads = knownheads.keys()
1657 1658 if knownheads:
1658 1659 # Now that we know what heads are known, we can compute which
1659 1660 # changesets are known. The recipient must know about all
1660 1661 # changesets required to reach the known heads from the null
1661 1662 # changeset.
1662 1663 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1663 1664 junk = None
1664 1665 # Transform the list into an ersatz set.
1665 1666 has_cl_set = dict.fromkeys(has_cl_set)
1666 1667 else:
1667 1668 # If there were no known heads, the recipient cannot be assumed to
1668 1669 # know about any changesets.
1669 1670 has_cl_set = {}
1670 1671
1671 1672 # Make it easy to refer to self.manifest
1672 1673 mnfst = self.manifest
1673 1674 # We don't know which manifests are missing yet
1674 1675 msng_mnfst_set = {}
1675 1676 # Nor do we know which filenodes are missing.
1676 1677 msng_filenode_set = {}
1677 1678
1678 1679 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1679 1680 junk = None
1680 1681
1681 1682 # A changeset always belongs to itself, so the changenode lookup
1682 1683 # function for a changenode is identity.
1683 1684 def identity(x):
1684 1685 return x
1685 1686
1686 1687 # A function generating function. Sets up an environment for the
1687 1688 # inner function.
1688 1689 def cmp_by_rev_func(revlog):
1689 1690 # Compare two nodes by their revision number in the environment's
1690 1691 # revision history. Since the revision number both represents the
1691 1692 # most efficient order to read the nodes in, and represents a
1692 1693 # topological sorting of the nodes, this function is often useful.
1693 1694 def cmp_by_rev(a, b):
1694 1695 return cmp(revlog.rev(a), revlog.rev(b))
1695 1696 return cmp_by_rev
1696 1697
1697 1698 # If we determine that a particular file or manifest node must be a
1698 1699 # node that the recipient of the changegroup will already have, we can
1699 1700 # also assume the recipient will have all the parents. This function
1700 1701 # prunes them from the set of missing nodes.
1701 1702 def prune_parents(revlog, hasset, msngset):
1702 1703 haslst = hasset.keys()
1703 1704 haslst.sort(cmp_by_rev_func(revlog))
1704 1705 for node in haslst:
1705 1706 parentlst = [p for p in revlog.parents(node) if p != nullid]
1706 1707 while parentlst:
1707 1708 n = parentlst.pop()
1708 1709 if n not in hasset:
1709 1710 hasset[n] = 1
1710 1711 p = [p for p in revlog.parents(n) if p != nullid]
1711 1712 parentlst.extend(p)
1712 1713 for n in hasset:
1713 1714 msngset.pop(n, None)
1714 1715
1715 1716 # This is a function generating function used to set up an environment
1716 1717 # for the inner function to execute in.
1717 1718 def manifest_and_file_collector(changedfileset):
1718 1719 # This is an information gathering function that gathers
1719 1720 # information from each changeset node that goes out as part of
1720 1721 # the changegroup. The information gathered is a list of which
1721 1722 # manifest nodes are potentially required (the recipient may
1722 1723 # already have them) and total list of all files which were
1723 1724 # changed in any changeset in the changegroup.
1724 1725 #
1725 1726 # We also remember the first changenode we saw any manifest
1726 1727 # referenced by so we can later determine which changenode 'owns'
1727 1728 # the manifest.
1728 1729 def collect_manifests_and_files(clnode):
1729 1730 c = cl.read(clnode)
1730 1731 for f in c[3]:
1731 1732 # This is to make sure we only have one instance of each
1732 1733 # filename string for each filename.
1733 1734 changedfileset.setdefault(f, f)
1734 1735 msng_mnfst_set.setdefault(c[0], clnode)
1735 1736 return collect_manifests_and_files
1736 1737
1737 1738 # Figure out which manifest nodes (of the ones we think might be part
1738 1739 # of the changegroup) the recipient must know about and remove them
1739 1740 # from the changegroup.
1740 1741 def prune_manifests():
1741 1742 has_mnfst_set = {}
1742 1743 for n in msng_mnfst_set:
1743 1744 # If a 'missing' manifest thinks it belongs to a changenode
1744 1745 # the recipient is assumed to have, obviously the recipient
1745 1746 # must have that manifest.
1746 1747 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1747 1748 if linknode in has_cl_set:
1748 1749 has_mnfst_set[n] = 1
1749 1750 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1750 1751
1751 1752 # Use the information collected in collect_manifests_and_files to say
1752 1753 # which changenode any manifestnode belongs to.
1753 1754 def lookup_manifest_link(mnfstnode):
1754 1755 return msng_mnfst_set[mnfstnode]
1755 1756
1756 1757 # A function generating function that sets up the initial environment
1757 1758 # the inner function.
1758 1759 def filenode_collector(changedfiles):
1759 1760 next_rev = [0]
1760 1761 # This gathers information from each manifestnode included in the
1761 1762 # changegroup about which filenodes the manifest node references
1762 1763 # so we can include those in the changegroup too.
1763 1764 #
1764 1765 # It also remembers which changenode each filenode belongs to. It
1765 1766 # does this by assuming the a filenode belongs to the changenode
1766 1767 # the first manifest that references it belongs to.
1767 1768 def collect_msng_filenodes(mnfstnode):
1768 1769 r = mnfst.rev(mnfstnode)
1769 1770 if r == next_rev[0]:
1770 1771 # If the last rev we looked at was the one just previous,
1771 1772 # we only need to see a diff.
1772 1773 deltamf = mnfst.readdelta(mnfstnode)
1773 1774 # For each line in the delta
1774 1775 for f, fnode in deltamf.iteritems():
1775 1776 f = changedfiles.get(f, None)
1776 1777 # And if the file is in the list of files we care
1777 1778 # about.
1778 1779 if f is not None:
1779 1780 # Get the changenode this manifest belongs to
1780 1781 clnode = msng_mnfst_set[mnfstnode]
1781 1782 # Create the set of filenodes for the file if
1782 1783 # there isn't one already.
1783 1784 ndset = msng_filenode_set.setdefault(f, {})
1784 1785 # And set the filenode's changelog node to the
1785 1786 # manifest's if it hasn't been set already.
1786 1787 ndset.setdefault(fnode, clnode)
1787 1788 else:
1788 1789 # Otherwise we need a full manifest.
1789 1790 m = mnfst.read(mnfstnode)
1790 1791 # For every file in we care about.
1791 1792 for f in changedfiles:
1792 1793 fnode = m.get(f, None)
1793 1794 # If it's in the manifest
1794 1795 if fnode is not None:
1795 1796 # See comments above.
1796 1797 clnode = msng_mnfst_set[mnfstnode]
1797 1798 ndset = msng_filenode_set.setdefault(f, {})
1798 1799 ndset.setdefault(fnode, clnode)
1799 1800 # Remember the revision we hope to see next.
1800 1801 next_rev[0] = r + 1
1801 1802 return collect_msng_filenodes
1802 1803
1803 1804 # We have a list of filenodes we think we need for a file, lets remove
1804 1805 # all those we now the recipient must have.
1805 1806 def prune_filenodes(f, filerevlog):
1806 1807 msngset = msng_filenode_set[f]
1807 1808 hasset = {}
1808 1809 # If a 'missing' filenode thinks it belongs to a changenode we
1809 1810 # assume the recipient must have, then the recipient must have
1810 1811 # that filenode.
1811 1812 for n in msngset:
1812 1813 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1813 1814 if clnode in has_cl_set:
1814 1815 hasset[n] = 1
1815 1816 prune_parents(filerevlog, hasset, msngset)
1816 1817
1817 1818 # A function generator function that sets up the a context for the
1818 1819 # inner function.
1819 1820 def lookup_filenode_link_func(fname):
1820 1821 msngset = msng_filenode_set[fname]
1821 1822 # Lookup the changenode the filenode belongs to.
1822 1823 def lookup_filenode_link(fnode):
1823 1824 return msngset[fnode]
1824 1825 return lookup_filenode_link
1825 1826
1826 1827 # Add the nodes that were explicitly requested.
1827 1828 def add_extra_nodes(name, nodes):
1828 1829 if not extranodes or name not in extranodes:
1829 1830 return
1830 1831
1831 1832 for node, linknode in extranodes[name]:
1832 1833 if node not in nodes:
1833 1834 nodes[node] = linknode
1834 1835
1835 1836 # Now that we have all theses utility functions to help out and
1836 1837 # logically divide up the task, generate the group.
1837 1838 def gengroup():
1838 1839 # The set of changed files starts empty.
1839 1840 changedfiles = {}
1840 1841 # Create a changenode group generator that will call our functions
1841 1842 # back to lookup the owning changenode and collect information.
1842 1843 group = cl.group(msng_cl_lst, identity,
1843 1844 manifest_and_file_collector(changedfiles))
1844 1845 for chnk in group:
1845 1846 yield chnk
1846 1847
1847 1848 # The list of manifests has been collected by the generator
1848 1849 # calling our functions back.
1849 1850 prune_manifests()
1850 1851 add_extra_nodes(1, msng_mnfst_set)
1851 1852 msng_mnfst_lst = msng_mnfst_set.keys()
1852 1853 # Sort the manifestnodes by revision number.
1853 1854 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1854 1855 # Create a generator for the manifestnodes that calls our lookup
1855 1856 # and data collection functions back.
1856 1857 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1857 1858 filenode_collector(changedfiles))
1858 1859 for chnk in group:
1859 1860 yield chnk
1860 1861
1861 1862 # These are no longer needed, dereference and toss the memory for
1862 1863 # them.
1863 1864 msng_mnfst_lst = None
1864 1865 msng_mnfst_set.clear()
1865 1866
1866 1867 if extranodes:
1867 1868 for fname in extranodes:
1868 1869 if isinstance(fname, int):
1869 1870 continue
1870 1871 msng_filenode_set.setdefault(fname, {})
1871 1872 changedfiles[fname] = 1
1872 1873 # Go through all our files in order sorted by name.
1873 1874 for fname in util.sort(changedfiles):
1874 1875 filerevlog = self.file(fname)
1875 1876 if not len(filerevlog):
1876 1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1877 1878 # Toss out the filenodes that the recipient isn't really
1878 1879 # missing.
1879 1880 if fname in msng_filenode_set:
1880 1881 prune_filenodes(fname, filerevlog)
1881 1882 add_extra_nodes(fname, msng_filenode_set[fname])
1882 1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1883 1884 else:
1884 1885 msng_filenode_lst = []
1885 1886 # If any filenodes are left, generate the group for them,
1886 1887 # otherwise don't bother.
1887 1888 if len(msng_filenode_lst) > 0:
1888 1889 yield changegroup.chunkheader(len(fname))
1889 1890 yield fname
1890 1891 # Sort the filenodes by their revision #
1891 1892 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1892 1893 # Create a group generator and only pass in a changenode
1893 1894 # lookup function as we need to collect no information
1894 1895 # from filenodes.
1895 1896 group = filerevlog.group(msng_filenode_lst,
1896 1897 lookup_filenode_link_func(fname))
1897 1898 for chnk in group:
1898 1899 yield chnk
1899 1900 if fname in msng_filenode_set:
1900 1901 # Don't need this anymore, toss it to free memory.
1901 1902 del msng_filenode_set[fname]
1902 1903 # Signal that no more groups are left.
1903 1904 yield changegroup.closechunk()
1904 1905
1905 1906 if msng_cl_lst:
1906 1907 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1907 1908
1908 1909 return util.chunkbuffer(gengroup())
1909 1910
1910 1911 def changegroup(self, basenodes, source):
1911 1912 # to avoid a race we use changegroupsubset() (issue1320)
1912 1913 return self.changegroupsubset(basenodes, self.heads(), source)
1913 1914
1914 1915 def _changegroup(self, common, source):
1915 1916 """Generate a changegroup of all nodes that we have that a recipient
1916 1917 doesn't.
1917 1918
1918 1919 This is much easier than the previous function as we can assume that
1919 1920 the recipient has any changenode we aren't sending them.
1920 1921
1921 1922 common is the set of common nodes between remote and self"""
1922 1923
1923 1924 self.hook('preoutgoing', throw=True, source=source)
1924 1925
1925 1926 cl = self.changelog
1926 1927 nodes = cl.findmissing(common)
1927 1928 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1928 1929 self.changegroupinfo(nodes, source)
1929 1930
1930 1931 def identity(x):
1931 1932 return x
1932 1933
1933 1934 def gennodelst(log):
1934 1935 for r in log:
1935 1936 if log.linkrev(r) in revset:
1936 1937 yield log.node(r)
1937 1938
1938 1939 def changed_file_collector(changedfileset):
1939 1940 def collect_changed_files(clnode):
1940 1941 c = cl.read(clnode)
1941 1942 for fname in c[3]:
1942 1943 changedfileset[fname] = 1
1943 1944 return collect_changed_files
1944 1945
1945 1946 def lookuprevlink_func(revlog):
1946 1947 def lookuprevlink(n):
1947 1948 return cl.node(revlog.linkrev(revlog.rev(n)))
1948 1949 return lookuprevlink
1949 1950
1950 1951 def gengroup():
1951 1952 # construct a list of all changed files
1952 1953 changedfiles = {}
1953 1954
1954 1955 for chnk in cl.group(nodes, identity,
1955 1956 changed_file_collector(changedfiles)):
1956 1957 yield chnk
1957 1958
1958 1959 mnfst = self.manifest
1959 1960 nodeiter = gennodelst(mnfst)
1960 1961 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1961 1962 yield chnk
1962 1963
1963 1964 for fname in util.sort(changedfiles):
1964 1965 filerevlog = self.file(fname)
1965 1966 if not len(filerevlog):
1966 1967 raise util.Abort(_("empty or missing revlog for %s") % fname)
1967 1968 nodeiter = gennodelst(filerevlog)
1968 1969 nodeiter = list(nodeiter)
1969 1970 if nodeiter:
1970 1971 yield changegroup.chunkheader(len(fname))
1971 1972 yield fname
1972 1973 lookup = lookuprevlink_func(filerevlog)
1973 1974 for chnk in filerevlog.group(nodeiter, lookup):
1974 1975 yield chnk
1975 1976
1976 1977 yield changegroup.closechunk()
1977 1978
1978 1979 if nodes:
1979 1980 self.hook('outgoing', node=hex(nodes[0]), source=source)
1980 1981
1981 1982 return util.chunkbuffer(gengroup())
1982 1983
1983 1984 def addchangegroup(self, source, srctype, url, emptyok=False):
1984 1985 """add changegroup to repo.
1985 1986
1986 1987 return values:
1987 1988 - nothing changed or no source: 0
1988 1989 - more heads than before: 1+added heads (2..n)
1989 1990 - less heads than before: -1-removed heads (-2..-n)
1990 1991 - number of heads stays the same: 1
1991 1992 """
1992 1993 def csmap(x):
1993 1994 self.ui.debug(_("add changeset %s\n") % short(x))
1994 1995 return len(cl)
1995 1996
1996 1997 def revmap(x):
1997 1998 return cl.rev(x)
1998 1999
1999 2000 if not source:
2000 2001 return 0
2001 2002
2002 2003 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2003 2004
2004 2005 changesets = files = revisions = 0
2005 2006
2006 2007 # write changelog data to temp files so concurrent readers will not see
2007 2008 # inconsistent view
2008 2009 cl = self.changelog
2009 2010 cl.delayupdate()
2010 2011 oldheads = len(cl.heads())
2011 2012
2012 2013 tr = self.transaction()
2013 2014 try:
2014 2015 trp = weakref.proxy(tr)
2015 2016 # pull off the changeset group
2016 2017 self.ui.status(_("adding changesets\n"))
2017 2018 cor = len(cl) - 1
2018 2019 chunkiter = changegroup.chunkiter(source)
2019 2020 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2020 2021 raise util.Abort(_("received changelog group is empty"))
2021 2022 cnr = len(cl) - 1
2022 2023 changesets = cnr - cor
2023 2024
2024 2025 # pull off the manifest group
2025 2026 self.ui.status(_("adding manifests\n"))
2026 2027 chunkiter = changegroup.chunkiter(source)
2027 2028 # no need to check for empty manifest group here:
2028 2029 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2029 2030 # no new manifest will be created and the manifest group will
2030 2031 # be empty during the pull
2031 2032 self.manifest.addgroup(chunkiter, revmap, trp)
2032 2033
2033 2034 # process the files
2034 2035 self.ui.status(_("adding file changes\n"))
2035 2036 while 1:
2036 2037 f = changegroup.getchunk(source)
2037 2038 if not f:
2038 2039 break
2039 2040 self.ui.debug(_("adding %s revisions\n") % f)
2040 2041 fl = self.file(f)
2041 2042 o = len(fl)
2042 2043 chunkiter = changegroup.chunkiter(source)
2043 2044 if fl.addgroup(chunkiter, revmap, trp) is None:
2044 2045 raise util.Abort(_("received file revlog group is empty"))
2045 2046 revisions += len(fl) - o
2046 2047 files += 1
2047 2048
2048 2049 newheads = len(self.changelog.heads())
2049 2050 heads = ""
2050 2051 if oldheads and newheads != oldheads:
2051 2052 heads = _(" (%+d heads)") % (newheads - oldheads)
2052 2053
2053 2054 self.ui.status(_("added %d changesets"
2054 2055 " with %d changes to %d files%s\n")
2055 2056 % (changesets, revisions, files, heads))
2056 2057
2057 2058 if changesets > 0:
2058 2059 p = lambda: self.changelog.writepending() and self.root or ""
2059 2060 self.hook('pretxnchangegroup', throw=True,
2060 2061 node=hex(self.changelog.node(cor+1)), source=srctype,
2061 2062 url=url, pending=p)
2062 2063
2063 2064 # make changelog see real files again
2064 2065 cl.finalize(trp)
2065 2066
2066 2067 tr.close()
2067 2068 finally:
2068 2069 del tr
2069 2070
2070 2071 if changesets > 0:
2071 2072 # forcefully update the on-disk branch cache
2072 2073 self.ui.debug(_("updating the branch cache\n"))
2073 2074 self.branchtags()
2074 2075 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2075 2076 source=srctype, url=url)
2076 2077
2077 2078 for i in xrange(cor + 1, cnr + 1):
2078 2079 self.hook("incoming", node=hex(self.changelog.node(i)),
2079 2080 source=srctype, url=url)
2080 2081
2081 2082 # never return 0 here:
2082 2083 if newheads < oldheads:
2083 2084 return newheads - oldheads - 1
2084 2085 else:
2085 2086 return newheads - oldheads + 1
2086 2087
2087 2088
2088 2089 def stream_in(self, remote):
2089 2090 fp = remote.stream_out()
2090 2091 l = fp.readline()
2091 2092 try:
2092 2093 resp = int(l)
2093 2094 except ValueError:
2094 2095 raise error.ResponseError(
2095 2096 _('Unexpected response from remote server:'), l)
2096 2097 if resp == 1:
2097 2098 raise util.Abort(_('operation forbidden by server'))
2098 2099 elif resp == 2:
2099 2100 raise util.Abort(_('locking the remote repository failed'))
2100 2101 elif resp != 0:
2101 2102 raise util.Abort(_('the server sent an unknown error code'))
2102 2103 self.ui.status(_('streaming all changes\n'))
2103 2104 l = fp.readline()
2104 2105 try:
2105 2106 total_files, total_bytes = map(int, l.split(' ', 1))
2106 2107 except (ValueError, TypeError):
2107 2108 raise error.ResponseError(
2108 2109 _('Unexpected response from remote server:'), l)
2109 2110 self.ui.status(_('%d files to transfer, %s of data\n') %
2110 2111 (total_files, util.bytecount(total_bytes)))
2111 2112 start = time.time()
2112 2113 for i in xrange(total_files):
2113 2114 # XXX doesn't support '\n' or '\r' in filenames
2114 2115 l = fp.readline()
2115 2116 try:
2116 2117 name, size = l.split('\0', 1)
2117 2118 size = int(size)
2118 2119 except (ValueError, TypeError):
2119 2120 raise error.ResponseError(
2120 2121 _('Unexpected response from remote server:'), l)
2121 2122 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2122 2123 ofp = self.sopener(name, 'w')
2123 2124 for chunk in util.filechunkiter(fp, limit=size):
2124 2125 ofp.write(chunk)
2125 2126 ofp.close()
2126 2127 elapsed = time.time() - start
2127 2128 if elapsed <= 0:
2128 2129 elapsed = 0.001
2129 2130 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2130 2131 (util.bytecount(total_bytes), elapsed,
2131 2132 util.bytecount(total_bytes / elapsed)))
2132 2133 self.invalidate()
2133 2134 return len(self.heads()) + 1
2134 2135
2135 2136 def clone(self, remote, heads=[], stream=False):
2136 2137 '''clone remote repository.
2137 2138
2138 2139 keyword arguments:
2139 2140 heads: list of revs to clone (forces use of pull)
2140 2141 stream: use streaming clone if possible'''
2141 2142
2142 2143 # now, all clients that can request uncompressed clones can
2143 2144 # read repo formats supported by all servers that can serve
2144 2145 # them.
2145 2146
2146 2147 # if revlog format changes, client will have to check version
2147 2148 # and format flags on "stream" capability, and use
2148 2149 # uncompressed only if compatible.
2149 2150
2150 2151 if stream and not heads and remote.capable('stream'):
2151 2152 return self.stream_in(remote)
2152 2153 return self.pull(remote, heads)
2153 2154
2154 2155 # used to avoid circular references so destructors work
2155 2156 def aftertrans(files):
2156 2157 renamefiles = [tuple(t) for t in files]
2157 2158 def a():
2158 2159 for src, dest in renamefiles:
2159 2160 util.rename(src, dest)
2160 2161 return a
2161 2162
2162 2163 def instance(ui, path, create):
2163 2164 return localrepository(ui, util.drop_scheme('file', path), create)
2164 2165
2165 2166 def islocal(path):
2166 2167 return True
@@ -1,170 +1,170 b''
1 1 # mail.py - mail sending bits for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import os, smtplib, socket
10 10 import email.Header, email.MIMEText, email.Utils
11 import util
11 import util, encoding
12 12
13 13 def _smtp(ui):
14 14 '''build an smtp connection and return a function to send mail'''
15 15 local_hostname = ui.config('smtp', 'local_hostname')
16 16 s = smtplib.SMTP(local_hostname=local_hostname)
17 17 mailhost = ui.config('smtp', 'host')
18 18 if not mailhost:
19 19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 20 mailport = int(ui.config('smtp', 'port', 25))
21 21 ui.note(_('sending mail: smtp host %s, port %s\n') %
22 22 (mailhost, mailport))
23 23 s.connect(host=mailhost, port=mailport)
24 24 if ui.configbool('smtp', 'tls'):
25 25 if not hasattr(socket, 'ssl'):
26 26 raise util.Abort(_("can't use TLS: Python SSL support "
27 27 "not installed"))
28 28 ui.note(_('(using tls)\n'))
29 29 s.ehlo()
30 30 s.starttls()
31 31 s.ehlo()
32 32 username = ui.config('smtp', 'username')
33 33 password = ui.config('smtp', 'password')
34 34 if username and not password:
35 35 password = ui.getpass()
36 36 if username and password:
37 37 ui.note(_('(authenticating to mail server as %s)\n') %
38 38 (username))
39 39 s.login(username, password)
40 40
41 41 def send(sender, recipients, msg):
42 42 try:
43 43 return s.sendmail(sender, recipients, msg)
44 44 except smtplib.SMTPRecipientsRefused, inst:
45 45 recipients = [r[1] for r in inst.recipients.values()]
46 46 raise util.Abort('\n' + '\n'.join(recipients))
47 47 except smtplib.SMTPException, inst:
48 48 raise util.Abort(inst)
49 49
50 50 return send
51 51
52 52 def _sendmail(ui, sender, recipients, msg):
53 53 '''send mail using sendmail.'''
54 54 program = ui.config('email', 'method')
55 55 cmdline = '%s -f %s %s' % (program, util.email(sender),
56 56 ' '.join(map(util.email, recipients)))
57 57 ui.note(_('sending mail: %s\n') % cmdline)
58 58 fp = util.popen(cmdline, 'w')
59 59 fp.write(msg)
60 60 ret = fp.close()
61 61 if ret:
62 62 raise util.Abort('%s %s' % (
63 63 os.path.basename(program.split(None, 1)[0]),
64 64 util.explain_exit(ret)[0]))
65 65
66 66 def connect(ui):
67 67 '''make a mail connection. return a function to send mail.
68 68 call as sendmail(sender, list-of-recipients, msg).'''
69 69 if ui.config('email', 'method', 'smtp') == 'smtp':
70 70 return _smtp(ui)
71 71 return lambda s, r, m: _sendmail(ui, s, r, m)
72 72
73 73 def sendmail(ui, sender, recipients, msg):
74 74 send = connect(ui)
75 75 return send(sender, recipients, msg)
76 76
77 77 def validateconfig(ui):
78 78 '''determine if we have enough config data to try sending email.'''
79 79 method = ui.config('email', 'method', 'smtp')
80 80 if method == 'smtp':
81 81 if not ui.config('smtp', 'host'):
82 82 raise util.Abort(_('smtp specified as email transport, '
83 83 'but no smtp host configured'))
84 84 else:
85 85 if not util.find_exe(method):
86 86 raise util.Abort(_('%r specified as email transport, '
87 87 'but not in PATH') % method)
88 88
89 89 def mimetextpatch(s, subtype='plain', display=False):
90 90 '''If patch in utf-8 transfer-encode it.'''
91 91 if not display:
92 92 for cs in ('us-ascii', 'utf-8'):
93 93 try:
94 94 s.decode(cs)
95 95 return email.MIMEText.MIMEText(s, subtype, cs)
96 96 except UnicodeDecodeError:
97 97 pass
98 98 return email.MIMEText.MIMEText(s, subtype)
99 99
100 100 def _charsets(ui):
101 101 '''Obtains charsets to send mail parts not containing patches.'''
102 102 charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
103 fallbacks = [util._fallbackencoding.lower(),
104 util._encoding.lower(), 'utf-8']
103 fallbacks = [encoding.fallbackencoding.lower(),
104 encoding.encoding.lower(), 'utf-8']
105 105 for cs in fallbacks: # util.unique does not keep order
106 106 if cs not in charsets:
107 107 charsets.append(cs)
108 108 return [cs for cs in charsets if not cs.endswith('ascii')]
109 109
110 110 def _encode(ui, s, charsets):
111 111 '''Returns (converted) string, charset tuple.
112 112 Finds out best charset by cycling through sendcharsets in descending
113 order. Tries both _encoding and _fallbackencoding for input. Only as
113 order. Tries both encoding and fallbackencoding for input. Only as
114 114 last resort send as is in fake ascii.
115 115 Caveat: Do not use for mail parts containing patches!'''
116 116 try:
117 117 s.decode('ascii')
118 118 except UnicodeDecodeError:
119 119 sendcharsets = charsets or _charsets(ui)
120 for ics in (util._encoding, util._fallbackencoding):
120 for ics in (encoding.encoding, encoding.fallbackencoding):
121 121 try:
122 122 u = s.decode(ics)
123 123 except UnicodeDecodeError:
124 124 continue
125 125 for ocs in sendcharsets:
126 126 try:
127 127 return u.encode(ocs), ocs
128 128 except UnicodeEncodeError:
129 129 pass
130 130 except LookupError:
131 131 ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs)
132 132 # if ascii, or all conversion attempts fail, send (broken) ascii
133 133 return s, 'us-ascii'
134 134
135 135 def headencode(ui, s, charsets=None, display=False):
136 136 '''Returns RFC-2047 compliant header from given string.'''
137 137 if not display:
138 138 # split into words?
139 139 s, cs = _encode(ui, s, charsets)
140 140 return str(email.Header.Header(s, cs))
141 141 return s
142 142
143 143 def addressencode(ui, address, charsets=None, display=False):
144 144 '''Turns address into RFC-2047 compliant header.'''
145 145 if display or not address:
146 146 return address or ''
147 147 name, addr = email.Utils.parseaddr(address)
148 148 name = headencode(ui, name, charsets)
149 149 try:
150 150 acc, dom = addr.split('@')
151 151 acc = acc.encode('ascii')
152 152 dom = dom.encode('idna')
153 153 addr = '%s@%s' % (acc, dom)
154 154 except UnicodeDecodeError:
155 155 raise util.Abort(_('invalid email address: %s') % addr)
156 156 except ValueError:
157 157 try:
158 158 # too strict?
159 159 addr = addr.encode('ascii')
160 160 except UnicodeDecodeError:
161 161 raise util.Abort(_('invalid local address: %s') % addr)
162 162 return email.Utils.formataddr((name, addr))
163 163
164 164 def mimeencode(ui, s, charsets=None, display=False):
165 165 '''creates mime text object, encodes it if needed, and sets
166 166 charset and transfer-encoding accordingly.'''
167 167 cs = 'us-ascii'
168 168 if not display:
169 169 s, cs = _encode(ui, s, charsets)
170 170 return email.MIMEText.MIMEText(s, 'plain', cs)
@@ -1,186 +1,186 b''
1 1 # template-filters.py - common template expansion filters
2 2 #
3 3 # Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import cgi, re, os, time, urllib, textwrap
9 import util, templater
9 import util, templater, encoding
10 10
11 11 agescales = [("second", 1),
12 12 ("minute", 60),
13 13 ("hour", 3600),
14 14 ("day", 3600 * 24),
15 15 ("week", 3600 * 24 * 7),
16 16 ("month", 3600 * 24 * 30),
17 17 ("year", 3600 * 24 * 365)]
18 18
19 19 agescales.reverse()
20 20
21 21 def age(date):
22 22 '''turn a (timestamp, tzoff) tuple into an age string.'''
23 23
24 24 def plural(t, c):
25 25 if c == 1:
26 26 return t
27 27 return t + "s"
28 28 def fmt(t, c):
29 29 return "%d %s" % (c, plural(t, c))
30 30
31 31 now = time.time()
32 32 then = date[0]
33 33 if then > now:
34 34 return 'in the future'
35 35
36 36 delta = max(1, int(now - then))
37 37 for t, s in agescales:
38 38 n = delta / s
39 39 if n >= 2 or s == 1:
40 40 return fmt(t, n)
41 41
42 42 para_re = None
43 43 space_re = None
44 44
45 45 def fill(text, width):
46 46 '''fill many paragraphs.'''
47 47 global para_re, space_re
48 48 if para_re is None:
49 49 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
50 50 space_re = re.compile(r' +')
51 51
52 52 def findparas():
53 53 start = 0
54 54 while True:
55 55 m = para_re.search(text, start)
56 56 if not m:
57 57 w = len(text)
58 58 while w > start and text[w-1].isspace(): w -= 1
59 59 yield text[start:w], text[w:]
60 60 break
61 61 yield text[start:m.start(0)], m.group(1)
62 62 start = m.end(1)
63 63
64 64 return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
65 65 for para, rest in findparas()])
66 66
67 67 def firstline(text):
68 68 '''return the first line of text'''
69 69 try:
70 70 return text.splitlines(1)[0].rstrip('\r\n')
71 71 except IndexError:
72 72 return ''
73 73
74 74 def nl2br(text):
75 75 '''replace raw newlines with xhtml line breaks.'''
76 76 return text.replace('\n', '<br/>\n')
77 77
78 78 def obfuscate(text):
79 text = unicode(text, util._encoding, 'replace')
79 text = unicode(text, encoding.encoding, 'replace')
80 80 return ''.join(['&#%d;' % ord(c) for c in text])
81 81
82 82 def domain(author):
83 83 '''get domain of author, or empty string if none.'''
84 84 f = author.find('@')
85 85 if f == -1: return ''
86 86 author = author[f+1:]
87 87 f = author.find('>')
88 88 if f >= 0: author = author[:f]
89 89 return author
90 90
91 91 def person(author):
92 92 '''get name of author, or else username.'''
93 93 f = author.find('<')
94 94 if f == -1: return util.shortuser(author)
95 95 return author[:f].rstrip()
96 96
97 97 def indent(text, prefix):
98 98 '''indent each non-empty line of text after first with prefix.'''
99 99 lines = text.splitlines()
100 100 num_lines = len(lines)
101 101 def indenter():
102 102 for i in xrange(num_lines):
103 103 l = lines[i]
104 104 if i and l.strip():
105 105 yield prefix
106 106 yield l
107 107 if i < num_lines - 1 or text.endswith('\n'):
108 108 yield '\n'
109 109 return "".join(indenter())
110 110
111 111 def permissions(flags):
112 112 if "l" in flags:
113 113 return "lrwxrwxrwx"
114 114 if "x" in flags:
115 115 return "-rwxr-xr-x"
116 116 return "-rw-r--r--"
117 117
118 118 def xmlescape(text):
119 119 text = (text
120 120 .replace('&', '&amp;')
121 121 .replace('<', '&lt;')
122 122 .replace('>', '&gt;')
123 123 .replace('"', '&quot;')
124 124 .replace("'", '&#39;')) # &apos; invalid in HTML
125 125 return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
126 126
127 127 _escapes = [
128 128 ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'),
129 129 ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'),
130 130 ]
131 131
132 132 def json(obj):
133 133 if obj is None or obj is False or obj is True:
134 134 return {None: 'null', False: 'false', True: 'true'}[obj]
135 135 elif isinstance(obj, int) or isinstance(obj, float):
136 136 return str(obj)
137 137 elif isinstance(obj, str):
138 138 for k, v in _escapes:
139 139 obj = obj.replace(k, v)
140 140 return '"%s"' % obj
141 141 elif isinstance(obj, unicode):
142 142 return json(obj.encode('utf-8'))
143 143 elif hasattr(obj, 'keys'):
144 144 out = []
145 145 for k, v in obj.iteritems():
146 146 s = '%s: %s' % (json(k), json(v))
147 147 out.append(s)
148 148 return '{' + ', '.join(out) + '}'
149 149 elif hasattr(obj, '__iter__'):
150 150 out = []
151 151 for i in obj:
152 152 out.append(json(i))
153 153 return '[' + ', '.join(out) + ']'
154 154 else:
155 155 raise TypeError('cannot encode type %s' % obj.__class__.__name__)
156 156
157 157 filters = {
158 158 "addbreaks": nl2br,
159 159 "basename": os.path.basename,
160 160 "age": age,
161 161 "date": lambda x: util.datestr(x),
162 162 "domain": domain,
163 163 "email": util.email,
164 164 "escape": lambda x: cgi.escape(x, True),
165 165 "fill68": lambda x: fill(x, width=68),
166 166 "fill76": lambda x: fill(x, width=76),
167 167 "firstline": firstline,
168 168 "tabindent": lambda x: indent(x, '\t'),
169 169 "hgdate": lambda x: "%d %d" % x,
170 170 "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'),
171 171 "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'),
172 172 "obfuscate": obfuscate,
173 173 "permissions": permissions,
174 174 "person": person,
175 175 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"),
176 176 "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"),
177 177 "short": lambda x: x[:12],
178 178 "shortdate": util.shortdate,
179 179 "stringify": templater.stringify,
180 180 "strip": lambda x: x.strip(),
181 181 "urlescape": lambda x: urllib.quote(x),
182 182 "user": lambda x: util.shortuser(x),
183 183 "stringescape": lambda x: x.encode('string_escape'),
184 184 "xmlescape": xmlescape,
185 185 "json": json,
186 186 }
@@ -1,1567 +1,1502 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 import imp, unicodedata
17 import os, stat, threading, time, calendar, ConfigParser, glob, osutil
18 import imp
19 19
20 20 # Python compatibility
21 21
22 22 try:
23 23 set = set
24 24 frozenset = frozenset
25 25 except NameError:
26 26 from sets import Set as set, ImmutableSet as frozenset
27 27
28 28 _md5 = None
29 29 def md5(s):
30 30 global _md5
31 31 if _md5 is None:
32 32 try:
33 33 import hashlib
34 34 _md5 = hashlib.md5
35 35 except ImportError:
36 36 import md5
37 37 _md5 = md5.md5
38 38 return _md5(s)
39 39
40 40 _sha1 = None
41 41 def sha1(s):
42 42 global _sha1
43 43 if _sha1 is None:
44 44 try:
45 45 import hashlib
46 46 _sha1 = hashlib.sha1
47 47 except ImportError:
48 48 import sha
49 49 _sha1 = sha.sha
50 50 return _sha1(s)
51 51
52 52 try:
53 53 import subprocess
54 54 subprocess.Popen # trigger ImportError early
55 55 closefds = os.name == 'posix'
56 56 def popen2(cmd, mode='t', bufsize=-1):
57 57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 58 close_fds=closefds,
59 59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 60 return p.stdin, p.stdout
61 61 def popen3(cmd, mode='t', bufsize=-1):
62 62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 63 close_fds=closefds,
64 64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 65 stderr=subprocess.PIPE)
66 66 return p.stdin, p.stdout, p.stderr
67 67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 68 stderr = capturestderr and subprocess.PIPE or None
69 69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 70 close_fds=closefds,
71 71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 72 stderr=stderr)
73 73 p.fromchild = p.stdout
74 74 p.tochild = p.stdin
75 75 p.childerr = p.stderr
76 76 return p
77 77 except ImportError:
78 78 subprocess = None
79 79 from popen2 import Popen3
80 80 popen2 = os.popen2
81 81 popen3 = os.popen3
82 82
83 83
84 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
85
86 try:
87 _encoding = os.environ.get("HGENCODING")
88 if sys.platform == 'darwin' and not _encoding:
89 # On darwin, getpreferredencoding ignores the locale environment and
90 # always returns mac-roman. We override this if the environment is
91 # not C (has been customized by the user).
92 locale.setlocale(locale.LC_CTYPE, '')
93 _encoding = locale.getlocale()[1]
94 if not _encoding:
95 _encoding = locale.getpreferredencoding() or 'ascii'
96 _encoding = _encodingfixup.get(_encoding, _encoding)
97 except locale.Error:
98 _encoding = 'ascii'
99 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
100 _fallbackencoding = 'ISO-8859-1'
101
102 def tolocal(s):
103 """
104 Convert a string from internal UTF-8 to local encoding
105
106 All internal strings should be UTF-8 but some repos before the
107 implementation of locale support may contain latin1 or possibly
108 other character sets. We attempt to decode everything strictly
109 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
110 replace unknown characters.
111 """
112 for e in ('UTF-8', _fallbackencoding):
113 try:
114 u = s.decode(e) # attempt strict decoding
115 return u.encode(_encoding, "replace")
116 except LookupError, k:
117 raise Abort(_("%s, please check your locale settings") % k)
118 except UnicodeDecodeError:
119 pass
120 u = s.decode("utf-8", "replace") # last ditch
121 return u.encode(_encoding, "replace")
122
123 def fromlocal(s):
124 """
125 Convert a string from the local character encoding to UTF-8
126
127 We attempt to decode strings using the encoding mode set by
128 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
129 characters will cause an error message. Other modes include
130 'replace', which replaces unknown characters with a special
131 Unicode character, and 'ignore', which drops the character.
132 """
133 try:
134 return s.decode(_encoding, _encodingmode).encode("utf-8")
135 except UnicodeDecodeError, inst:
136 sub = s[max(0, inst.start-10):inst.start+10]
137 raise Abort("decoding near '%s': %s!" % (sub, inst))
138 except LookupError, k:
139 raise Abort(_("%s, please check your locale settings") % k)
140
141 def colwidth(s):
142 "Find the column width of a UTF-8 string for display"
143 d = s.decode(_encoding, 'replace')
144 if hasattr(unicodedata, 'east_asian_width'):
145 w = unicodedata.east_asian_width
146 return sum([w(c) in 'WF' and 2 or 1 for c in d])
147 return len(d)
148
149 84 def version():
150 85 """Return version information if available."""
151 86 try:
152 87 import __version__
153 88 return __version__.version
154 89 except ImportError:
155 90 return 'unknown'
156 91
157 92 # used by parsedate
158 93 defaultdateformats = (
159 94 '%Y-%m-%d %H:%M:%S',
160 95 '%Y-%m-%d %I:%M:%S%p',
161 96 '%Y-%m-%d %H:%M',
162 97 '%Y-%m-%d %I:%M%p',
163 98 '%Y-%m-%d',
164 99 '%m-%d',
165 100 '%m/%d',
166 101 '%m/%d/%y',
167 102 '%m/%d/%Y',
168 103 '%a %b %d %H:%M:%S %Y',
169 104 '%a %b %d %I:%M:%S%p %Y',
170 105 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
171 106 '%b %d %H:%M:%S %Y',
172 107 '%b %d %I:%M:%S%p %Y',
173 108 '%b %d %H:%M:%S',
174 109 '%b %d %I:%M:%S%p',
175 110 '%b %d %H:%M',
176 111 '%b %d %I:%M%p',
177 112 '%b %d %Y',
178 113 '%b %d',
179 114 '%H:%M:%S',
180 115 '%I:%M:%SP',
181 116 '%H:%M',
182 117 '%I:%M%p',
183 118 )
184 119
185 120 extendeddateformats = defaultdateformats + (
186 121 "%Y",
187 122 "%Y-%m",
188 123 "%b",
189 124 "%b %Y",
190 125 )
191 126
192 127 # differences from SafeConfigParser:
193 128 # - case-sensitive keys
194 129 # - allows values that are not strings (this means that you may not
195 130 # be able to save the configuration to a file)
196 131 class configparser(ConfigParser.SafeConfigParser):
197 132 def optionxform(self, optionstr):
198 133 return optionstr
199 134
200 135 def set(self, section, option, value):
201 136 return ConfigParser.ConfigParser.set(self, section, option, value)
202 137
203 138 def _interpolate(self, section, option, rawval, vars):
204 139 if not isinstance(rawval, basestring):
205 140 return rawval
206 141 return ConfigParser.SafeConfigParser._interpolate(self, section,
207 142 option, rawval, vars)
208 143
209 144 def cachefunc(func):
210 145 '''cache the result of function calls'''
211 146 # XXX doesn't handle keywords args
212 147 cache = {}
213 148 if func.func_code.co_argcount == 1:
214 149 # we gain a small amount of time because
215 150 # we don't need to pack/unpack the list
216 151 def f(arg):
217 152 if arg not in cache:
218 153 cache[arg] = func(arg)
219 154 return cache[arg]
220 155 else:
221 156 def f(*args):
222 157 if args not in cache:
223 158 cache[args] = func(*args)
224 159 return cache[args]
225 160
226 161 return f
227 162
228 163 def pipefilter(s, cmd):
229 164 '''filter string S through command CMD, returning its output'''
230 165 (pin, pout) = popen2(cmd, 'b')
231 166 def writer():
232 167 try:
233 168 pin.write(s)
234 169 pin.close()
235 170 except IOError, inst:
236 171 if inst.errno != errno.EPIPE:
237 172 raise
238 173
239 174 # we should use select instead on UNIX, but this will work on most
240 175 # systems, including Windows
241 176 w = threading.Thread(target=writer)
242 177 w.start()
243 178 f = pout.read()
244 179 pout.close()
245 180 w.join()
246 181 return f
247 182
248 183 def tempfilter(s, cmd):
249 184 '''filter string S through a pair of temporary files with CMD.
250 185 CMD is used as a template to create the real command to be run,
251 186 with the strings INFILE and OUTFILE replaced by the real names of
252 187 the temporary files generated.'''
253 188 inname, outname = None, None
254 189 try:
255 190 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 191 fp = os.fdopen(infd, 'wb')
257 192 fp.write(s)
258 193 fp.close()
259 194 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 195 os.close(outfd)
261 196 cmd = cmd.replace('INFILE', inname)
262 197 cmd = cmd.replace('OUTFILE', outname)
263 198 code = os.system(cmd)
264 199 if sys.platform == 'OpenVMS' and code & 1:
265 200 code = 0
266 201 if code: raise Abort(_("command '%s' failed: %s") %
267 202 (cmd, explain_exit(code)))
268 203 return open(outname, 'rb').read()
269 204 finally:
270 205 try:
271 206 if inname: os.unlink(inname)
272 207 except: pass
273 208 try:
274 209 if outname: os.unlink(outname)
275 210 except: pass
276 211
277 212 filtertable = {
278 213 'tempfile:': tempfilter,
279 214 'pipe:': pipefilter,
280 215 }
281 216
282 217 def filter(s, cmd):
283 218 "filter a string through a command that transforms its input to its output"
284 219 for name, fn in filtertable.iteritems():
285 220 if cmd.startswith(name):
286 221 return fn(s, cmd[len(name):].lstrip())
287 222 return pipefilter(s, cmd)
288 223
289 224 def binary(s):
290 225 """return true if a string is binary data"""
291 226 if s and '\0' in s:
292 227 return True
293 228 return False
294 229
295 230 def unique(g):
296 231 """return the uniq elements of iterable g"""
297 232 return dict.fromkeys(g).keys()
298 233
299 234 def sort(l):
300 235 if not isinstance(l, list):
301 236 l = list(l)
302 237 l.sort()
303 238 return l
304 239
305 240 def increasingchunks(source, min=1024, max=65536):
306 241 '''return no less than min bytes per chunk while data remains,
307 242 doubling min after each chunk until it reaches max'''
308 243 def log2(x):
309 244 if not x:
310 245 return 0
311 246 i = 0
312 247 while x:
313 248 x >>= 1
314 249 i += 1
315 250 return i - 1
316 251
317 252 buf = []
318 253 blen = 0
319 254 for chunk in source:
320 255 buf.append(chunk)
321 256 blen += len(chunk)
322 257 if blen >= min:
323 258 if min < max:
324 259 min = min << 1
325 260 nmin = 1 << log2(blen)
326 261 if nmin > min:
327 262 min = nmin
328 263 if min > max:
329 264 min = max
330 265 yield ''.join(buf)
331 266 blen = 0
332 267 buf = []
333 268 if buf:
334 269 yield ''.join(buf)
335 270
336 271 Abort = error.Abort
337 272
338 273 def always(fn): return True
339 274 def never(fn): return False
340 275
341 276 def patkind(name, default):
342 277 """Split a string into an optional pattern kind prefix and the
343 278 actual pattern."""
344 279 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
345 280 if name.startswith(prefix + ':'): return name.split(':', 1)
346 281 return default, name
347 282
348 283 def globre(pat, head='^', tail='$'):
349 284 "convert a glob pattern into a regexp"
350 285 i, n = 0, len(pat)
351 286 res = ''
352 287 group = 0
353 288 def peek(): return i < n and pat[i]
354 289 while i < n:
355 290 c = pat[i]
356 291 i = i+1
357 292 if c == '*':
358 293 if peek() == '*':
359 294 i += 1
360 295 res += '.*'
361 296 else:
362 297 res += '[^/]*'
363 298 elif c == '?':
364 299 res += '.'
365 300 elif c == '[':
366 301 j = i
367 302 if j < n and pat[j] in '!]':
368 303 j += 1
369 304 while j < n and pat[j] != ']':
370 305 j += 1
371 306 if j >= n:
372 307 res += '\\['
373 308 else:
374 309 stuff = pat[i:j].replace('\\','\\\\')
375 310 i = j + 1
376 311 if stuff[0] == '!':
377 312 stuff = '^' + stuff[1:]
378 313 elif stuff[0] == '^':
379 314 stuff = '\\' + stuff
380 315 res = '%s[%s]' % (res, stuff)
381 316 elif c == '{':
382 317 group += 1
383 318 res += '(?:'
384 319 elif c == '}' and group:
385 320 res += ')'
386 321 group -= 1
387 322 elif c == ',' and group:
388 323 res += '|'
389 324 elif c == '\\':
390 325 p = peek()
391 326 if p:
392 327 i += 1
393 328 res += re.escape(p)
394 329 else:
395 330 res += re.escape(c)
396 331 else:
397 332 res += re.escape(c)
398 333 return head + res + tail
399 334
400 335 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
401 336
402 337 def pathto(root, n1, n2):
403 338 '''return the relative path from one place to another.
404 339 root should use os.sep to separate directories
405 340 n1 should use os.sep to separate directories
406 341 n2 should use "/" to separate directories
407 342 returns an os.sep-separated path.
408 343
409 344 If n1 is a relative path, it's assumed it's
410 345 relative to root.
411 346 n2 should always be relative to root.
412 347 '''
413 348 if not n1: return localpath(n2)
414 349 if os.path.isabs(n1):
415 350 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
416 351 return os.path.join(root, localpath(n2))
417 352 n2 = '/'.join((pconvert(root), n2))
418 353 a, b = splitpath(n1), n2.split('/')
419 354 a.reverse()
420 355 b.reverse()
421 356 while a and b and a[-1] == b[-1]:
422 357 a.pop()
423 358 b.pop()
424 359 b.reverse()
425 360 return os.sep.join((['..'] * len(a)) + b) or '.'
426 361
427 362 def canonpath(root, cwd, myname):
428 363 """return the canonical path of myname, given cwd and root"""
429 364 if root == os.sep:
430 365 rootsep = os.sep
431 366 elif endswithsep(root):
432 367 rootsep = root
433 368 else:
434 369 rootsep = root + os.sep
435 370 name = myname
436 371 if not os.path.isabs(name):
437 372 name = os.path.join(root, cwd, name)
438 373 name = os.path.normpath(name)
439 374 audit_path = path_auditor(root)
440 375 if name != rootsep and name.startswith(rootsep):
441 376 name = name[len(rootsep):]
442 377 audit_path(name)
443 378 return pconvert(name)
444 379 elif name == root:
445 380 return ''
446 381 else:
447 382 # Determine whether `name' is in the hierarchy at or beneath `root',
448 383 # by iterating name=dirname(name) until that causes no change (can't
449 384 # check name == '/', because that doesn't work on windows). For each
450 385 # `name', compare dev/inode numbers. If they match, the list `rel'
451 386 # holds the reversed list of components making up the relative file
452 387 # name we want.
453 388 root_st = os.stat(root)
454 389 rel = []
455 390 while True:
456 391 try:
457 392 name_st = os.stat(name)
458 393 except OSError:
459 394 break
460 395 if samestat(name_st, root_st):
461 396 if not rel:
462 397 # name was actually the same as root (maybe a symlink)
463 398 return ''
464 399 rel.reverse()
465 400 name = os.path.join(*rel)
466 401 audit_path(name)
467 402 return pconvert(name)
468 403 dirname, basename = os.path.split(name)
469 404 rel.append(basename)
470 405 if dirname == name:
471 406 break
472 407 name = dirname
473 408
474 409 raise Abort('%s not under root' % myname)
475 410
476 411 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
477 412 """build a function to match a set of file patterns
478 413
479 414 arguments:
480 415 canonroot - the canonical root of the tree you're matching against
481 416 cwd - the current working directory, if relevant
482 417 names - patterns to find
483 418 inc - patterns to include
484 419 exc - patterns to exclude
485 420 dflt_pat - if a pattern in names has no explicit type, assume this one
486 421 src - where these patterns came from (e.g. .hgignore)
487 422
488 423 a pattern is one of:
489 424 'glob:<glob>' - a glob relative to cwd
490 425 're:<regexp>' - a regular expression
491 426 'path:<path>' - a path relative to canonroot
492 427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
493 428 'relpath:<path>' - a path relative to cwd
494 429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
495 430 '<something>' - one of the cases above, selected by the dflt_pat argument
496 431
497 432 returns:
498 433 a 3-tuple containing
499 434 - list of roots (places where one should start a recursive walk of the fs);
500 435 this often matches the explicit non-pattern names passed in, but also
501 436 includes the initial part of glob: patterns that has no glob characters
502 437 - a bool match(filename) function
503 438 - a bool indicating if any patterns were passed in
504 439 """
505 440
506 441 # a common case: no patterns at all
507 442 if not names and not inc and not exc:
508 443 return [], always, False
509 444
510 445 def contains_glob(name):
511 446 for c in name:
512 447 if c in _globchars: return True
513 448 return False
514 449
515 450 def regex(kind, name, tail):
516 451 '''convert a pattern into a regular expression'''
517 452 if not name:
518 453 return ''
519 454 if kind == 're':
520 455 return name
521 456 elif kind == 'path':
522 457 return '^' + re.escape(name) + '(?:/|$)'
523 458 elif kind == 'relglob':
524 459 return globre(name, '(?:|.*/)', tail)
525 460 elif kind == 'relpath':
526 461 return re.escape(name) + '(?:/|$)'
527 462 elif kind == 'relre':
528 463 if name.startswith('^'):
529 464 return name
530 465 return '.*' + name
531 466 return globre(name, '', tail)
532 467
533 468 def matchfn(pats, tail):
534 469 """build a matching function from a set of patterns"""
535 470 if not pats:
536 471 return
537 472 try:
538 473 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
539 474 if len(pat) > 20000:
540 475 raise OverflowError()
541 476 return re.compile(pat).match
542 477 except OverflowError:
543 478 # We're using a Python with a tiny regex engine and we
544 479 # made it explode, so we'll divide the pattern list in two
545 480 # until it works
546 481 l = len(pats)
547 482 if l < 2:
548 483 raise
549 484 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
550 485 return lambda s: a(s) or b(s)
551 486 except re.error:
552 487 for k, p in pats:
553 488 try:
554 489 re.compile('(?:%s)' % regex(k, p, tail))
555 490 except re.error:
556 491 if src:
557 492 raise Abort("%s: invalid pattern (%s): %s" %
558 493 (src, k, p))
559 494 else:
560 495 raise Abort("invalid pattern (%s): %s" % (k, p))
561 496 raise Abort("invalid pattern")
562 497
563 498 def globprefix(pat):
564 499 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
565 500 root = []
566 501 for p in pat.split('/'):
567 502 if contains_glob(p): break
568 503 root.append(p)
569 504 return '/'.join(root) or '.'
570 505
571 506 def normalizepats(names, default):
572 507 pats = []
573 508 roots = []
574 509 anypats = False
575 510 for kind, name in [patkind(p, default) for p in names]:
576 511 if kind in ('glob', 'relpath'):
577 512 name = canonpath(canonroot, cwd, name)
578 513 elif kind in ('relglob', 'path'):
579 514 name = normpath(name)
580 515
581 516 pats.append((kind, name))
582 517
583 518 if kind in ('glob', 're', 'relglob', 'relre'):
584 519 anypats = True
585 520
586 521 if kind == 'glob':
587 522 root = globprefix(name)
588 523 roots.append(root)
589 524 elif kind in ('relpath', 'path'):
590 525 roots.append(name or '.')
591 526 elif kind == 'relglob':
592 527 roots.append('.')
593 528 return roots, pats, anypats
594 529
595 530 roots, pats, anypats = normalizepats(names, dflt_pat)
596 531
597 532 patmatch = matchfn(pats, '$') or always
598 533 incmatch = always
599 534 if inc:
600 535 dummy, inckinds, dummy = normalizepats(inc, 'glob')
601 536 incmatch = matchfn(inckinds, '(?:/|$)')
602 537 excmatch = never
603 538 if exc:
604 539 dummy, exckinds, dummy = normalizepats(exc, 'glob')
605 540 excmatch = matchfn(exckinds, '(?:/|$)')
606 541
607 542 if not names and inc and not exc:
608 543 # common case: hgignore patterns
609 544 match = incmatch
610 545 else:
611 546 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
612 547
613 548 return (roots, match, (inc or exc or anypats) and True)
614 549
615 550 _hgexecutable = None
616 551
617 552 def main_is_frozen():
618 553 """return True if we are a frozen executable.
619 554
620 555 The code supports py2exe (most common, Windows only) and tools/freeze
621 556 (portable, not much used).
622 557 """
623 558 return (hasattr(sys, "frozen") or # new py2exe
624 559 hasattr(sys, "importers") or # old py2exe
625 560 imp.is_frozen("__main__")) # tools/freeze
626 561
627 562 def hgexecutable():
628 563 """return location of the 'hg' executable.
629 564
630 565 Defaults to $HG or 'hg' in the search path.
631 566 """
632 567 if _hgexecutable is None:
633 568 hg = os.environ.get('HG')
634 569 if hg:
635 570 set_hgexecutable(hg)
636 571 elif main_is_frozen():
637 572 set_hgexecutable(sys.executable)
638 573 else:
639 574 set_hgexecutable(find_exe('hg') or 'hg')
640 575 return _hgexecutable
641 576
642 577 def set_hgexecutable(path):
643 578 """set location of the 'hg' executable"""
644 579 global _hgexecutable
645 580 _hgexecutable = path
646 581
647 582 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
648 583 '''enhanced shell command execution.
649 584 run with environment maybe modified, maybe in different dir.
650 585
651 586 if command fails and onerr is None, return status. if ui object,
652 587 print error message and return status, else raise onerr object as
653 588 exception.'''
654 589 def py2shell(val):
655 590 'convert python object into string that is useful to shell'
656 591 if val in (None, False):
657 592 return '0'
658 593 if val == True:
659 594 return '1'
660 595 return str(val)
661 596 oldenv = {}
662 597 for k in environ:
663 598 oldenv[k] = os.environ.get(k)
664 599 if cwd is not None:
665 600 oldcwd = os.getcwd()
666 601 origcmd = cmd
667 602 if os.name == 'nt':
668 603 cmd = '"%s"' % cmd
669 604 try:
670 605 for k, v in environ.iteritems():
671 606 os.environ[k] = py2shell(v)
672 607 os.environ['HG'] = hgexecutable()
673 608 if cwd is not None and oldcwd != cwd:
674 609 os.chdir(cwd)
675 610 rc = os.system(cmd)
676 611 if sys.platform == 'OpenVMS' and rc & 1:
677 612 rc = 0
678 613 if rc and onerr:
679 614 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
680 615 explain_exit(rc)[0])
681 616 if errprefix:
682 617 errmsg = '%s: %s' % (errprefix, errmsg)
683 618 try:
684 619 onerr.warn(errmsg + '\n')
685 620 except AttributeError:
686 621 raise onerr(errmsg)
687 622 return rc
688 623 finally:
689 624 for k, v in oldenv.iteritems():
690 625 if v is None:
691 626 del os.environ[k]
692 627 else:
693 628 os.environ[k] = v
694 629 if cwd is not None and oldcwd != cwd:
695 630 os.chdir(oldcwd)
696 631
697 632 def checksignature(func):
698 633 '''wrap a function with code to check for calling errors'''
699 634 def check(*args, **kwargs):
700 635 try:
701 636 return func(*args, **kwargs)
702 637 except TypeError:
703 638 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
704 639 raise error.SignatureError
705 640 raise
706 641
707 642 return check
708 643
709 644 # os.path.lexists is not available on python2.3
710 645 def lexists(filename):
711 646 "test whether a file with this name exists. does not follow symlinks"
712 647 try:
713 648 os.lstat(filename)
714 649 except:
715 650 return False
716 651 return True
717 652
718 653 def rename(src, dst):
719 654 """forcibly rename a file"""
720 655 try:
721 656 os.rename(src, dst)
722 657 except OSError, err: # FIXME: check err (EEXIST ?)
723 658 # on windows, rename to existing file is not allowed, so we
724 659 # must delete destination first. but if file is open, unlink
725 660 # schedules it for delete but does not delete it. rename
726 661 # happens immediately even for open files, so we rename
727 662 # destination to a temporary name, then delete that. then
728 663 # rename is safe to do.
729 664 temp = dst + "-force-rename"
730 665 os.rename(dst, temp)
731 666 os.unlink(temp)
732 667 os.rename(src, dst)
733 668
734 669 def unlink(f):
735 670 """unlink and remove the directory if it is empty"""
736 671 os.unlink(f)
737 672 # try removing directories that might now be empty
738 673 try:
739 674 os.removedirs(os.path.dirname(f))
740 675 except OSError:
741 676 pass
742 677
743 678 def copyfile(src, dest):
744 679 "copy a file, preserving mode and atime/mtime"
745 680 if os.path.islink(src):
746 681 try:
747 682 os.unlink(dest)
748 683 except:
749 684 pass
750 685 os.symlink(os.readlink(src), dest)
751 686 else:
752 687 try:
753 688 shutil.copyfile(src, dest)
754 689 shutil.copystat(src, dest)
755 690 except shutil.Error, inst:
756 691 raise Abort(str(inst))
757 692
758 693 def copyfiles(src, dst, hardlink=None):
759 694 """Copy a directory tree using hardlinks if possible"""
760 695
761 696 if hardlink is None:
762 697 hardlink = (os.stat(src).st_dev ==
763 698 os.stat(os.path.dirname(dst)).st_dev)
764 699
765 700 if os.path.isdir(src):
766 701 os.mkdir(dst)
767 702 for name, kind in osutil.listdir(src):
768 703 srcname = os.path.join(src, name)
769 704 dstname = os.path.join(dst, name)
770 705 copyfiles(srcname, dstname, hardlink)
771 706 else:
772 707 if hardlink:
773 708 try:
774 709 os_link(src, dst)
775 710 except (IOError, OSError):
776 711 hardlink = False
777 712 shutil.copy(src, dst)
778 713 else:
779 714 shutil.copy(src, dst)
780 715
781 716 class path_auditor(object):
782 717 '''ensure that a filesystem path contains no banned components.
783 718 the following properties of a path are checked:
784 719
785 720 - under top-level .hg
786 721 - starts at the root of a windows drive
787 722 - contains ".."
788 723 - traverses a symlink (e.g. a/symlink_here/b)
789 724 - inside a nested repository'''
790 725
791 726 def __init__(self, root):
792 727 self.audited = set()
793 728 self.auditeddir = set()
794 729 self.root = root
795 730
796 731 def __call__(self, path):
797 732 if path in self.audited:
798 733 return
799 734 normpath = os.path.normcase(path)
800 735 parts = splitpath(normpath)
801 736 if (os.path.splitdrive(path)[0]
802 737 or parts[0].lower() in ('.hg', '.hg.', '')
803 738 or os.pardir in parts):
804 739 raise Abort(_("path contains illegal component: %s") % path)
805 740 if '.hg' in path.lower():
806 741 lparts = [p.lower() for p in parts]
807 742 for p in '.hg', '.hg.':
808 743 if p in lparts[1:]:
809 744 pos = lparts.index(p)
810 745 base = os.path.join(*parts[:pos])
811 746 raise Abort(_('path %r is inside repo %r') % (path, base))
812 747 def check(prefix):
813 748 curpath = os.path.join(self.root, prefix)
814 749 try:
815 750 st = os.lstat(curpath)
816 751 except OSError, err:
817 752 # EINVAL can be raised as invalid path syntax under win32.
818 753 # They must be ignored for patterns can be checked too.
819 754 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
820 755 raise
821 756 else:
822 757 if stat.S_ISLNK(st.st_mode):
823 758 raise Abort(_('path %r traverses symbolic link %r') %
824 759 (path, prefix))
825 760 elif (stat.S_ISDIR(st.st_mode) and
826 761 os.path.isdir(os.path.join(curpath, '.hg'))):
827 762 raise Abort(_('path %r is inside repo %r') %
828 763 (path, prefix))
829 764 parts.pop()
830 765 prefixes = []
831 766 for n in range(len(parts)):
832 767 prefix = os.sep.join(parts)
833 768 if prefix in self.auditeddir:
834 769 break
835 770 check(prefix)
836 771 prefixes.append(prefix)
837 772 parts.pop()
838 773
839 774 self.audited.add(path)
840 775 # only add prefixes to the cache after checking everything: we don't
841 776 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
842 777 self.auditeddir.update(prefixes)
843 778
844 779 if os.name == 'nt':
845 780 from windows import *
846 781 def expand_glob(pats):
847 782 '''On Windows, expand the implicit globs in a list of patterns'''
848 783 ret = []
849 784 for p in pats:
850 785 kind, name = patkind(p, None)
851 786 if kind is None:
852 787 globbed = glob.glob(name)
853 788 if globbed:
854 789 ret.extend(globbed)
855 790 continue
856 791 # if we couldn't expand the glob, just keep it around
857 792 ret.append(p)
858 793 return ret
859 794 else:
860 795 from posix import *
861 796
862 797 def makelock(info, pathname):
863 798 try:
864 799 return os.symlink(info, pathname)
865 800 except OSError, why:
866 801 if why.errno == errno.EEXIST:
867 802 raise
868 803 except AttributeError: # no symlink in os
869 804 pass
870 805
871 806 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
872 807 os.write(ld, info)
873 808 os.close(ld)
874 809
875 810 def readlock(pathname):
876 811 try:
877 812 return os.readlink(pathname)
878 813 except OSError, why:
879 814 if why.errno not in (errno.EINVAL, errno.ENOSYS):
880 815 raise
881 816 except AttributeError: # no symlink in os
882 817 pass
883 818 return posixfile(pathname).read()
884 819
885 820 def nlinks(pathname):
886 821 """Return number of hardlinks for the given file."""
887 822 return os.lstat(pathname).st_nlink
888 823
889 824 if hasattr(os, 'link'):
890 825 os_link = os.link
891 826 else:
892 827 def os_link(src, dst):
893 828 raise OSError(0, _("Hardlinks not supported"))
894 829
895 830 def fstat(fp):
896 831 '''stat file object that may not have fileno method.'''
897 832 try:
898 833 return os.fstat(fp.fileno())
899 834 except AttributeError:
900 835 return os.stat(fp.name)
901 836
902 837 # File system features
903 838
904 839 def checkcase(path):
905 840 """
906 841 Check whether the given path is on a case-sensitive filesystem
907 842
908 843 Requires a path (like /foo/.hg) ending with a foldable final
909 844 directory component.
910 845 """
911 846 s1 = os.stat(path)
912 847 d, b = os.path.split(path)
913 848 p2 = os.path.join(d, b.upper())
914 849 if path == p2:
915 850 p2 = os.path.join(d, b.lower())
916 851 try:
917 852 s2 = os.stat(p2)
918 853 if s2 == s1:
919 854 return False
920 855 return True
921 856 except:
922 857 return True
923 858
924 859 _fspathcache = {}
925 860 def fspath(name, root):
926 861 '''Get name in the case stored in the filesystem
927 862
928 863 The name is either relative to root, or it is an absolute path starting
929 864 with root. Note that this function is unnecessary, and should not be
930 865 called, for case-sensitive filesystems (simply because it's expensive).
931 866 '''
932 867 # If name is absolute, make it relative
933 868 if name.lower().startswith(root.lower()):
934 869 l = len(root)
935 870 if name[l] == os.sep or name[l] == os.altsep:
936 871 l = l + 1
937 872 name = name[l:]
938 873
939 874 if not os.path.exists(os.path.join(root, name)):
940 875 return None
941 876
942 877 seps = os.sep
943 878 if os.altsep:
944 879 seps = seps + os.altsep
945 880 # Protect backslashes. This gets silly very quickly.
946 881 seps.replace('\\','\\\\')
947 882 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
948 883 dir = os.path.normcase(os.path.normpath(root))
949 884 result = []
950 885 for part, sep in pattern.findall(name):
951 886 if sep:
952 887 result.append(sep)
953 888 continue
954 889
955 890 if dir not in _fspathcache:
956 891 _fspathcache[dir] = os.listdir(dir)
957 892 contents = _fspathcache[dir]
958 893
959 894 lpart = part.lower()
960 895 for n in contents:
961 896 if n.lower() == lpart:
962 897 result.append(n)
963 898 break
964 899 else:
965 900 # Cannot happen, as the file exists!
966 901 result.append(part)
967 902 dir = os.path.join(dir, lpart)
968 903
969 904 return ''.join(result)
970 905
971 906 def checkexec(path):
972 907 """
973 908 Check whether the given path is on a filesystem with UNIX-like exec flags
974 909
975 910 Requires a directory (like /foo/.hg)
976 911 """
977 912
978 913 # VFAT on some Linux versions can flip mode but it doesn't persist
979 914 # a FS remount. Frequently we can detect it if files are created
980 915 # with exec bit on.
981 916
982 917 try:
983 918 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
984 919 fh, fn = tempfile.mkstemp("", "", path)
985 920 try:
986 921 os.close(fh)
987 922 m = os.stat(fn).st_mode & 0777
988 923 new_file_has_exec = m & EXECFLAGS
989 924 os.chmod(fn, m ^ EXECFLAGS)
990 925 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
991 926 finally:
992 927 os.unlink(fn)
993 928 except (IOError, OSError):
994 929 # we don't care, the user probably won't be able to commit anyway
995 930 return False
996 931 return not (new_file_has_exec or exec_flags_cannot_flip)
997 932
998 933 def checklink(path):
999 934 """check whether the given path is on a symlink-capable filesystem"""
1000 935 # mktemp is not racy because symlink creation will fail if the
1001 936 # file already exists
1002 937 name = tempfile.mktemp(dir=path)
1003 938 try:
1004 939 os.symlink(".", name)
1005 940 os.unlink(name)
1006 941 return True
1007 942 except (OSError, AttributeError):
1008 943 return False
1009 944
1010 945 def needbinarypatch():
1011 946 """return True if patches should be applied in binary mode by default."""
1012 947 return os.name == 'nt'
1013 948
1014 949 def endswithsep(path):
1015 950 '''Check path ends with os.sep or os.altsep.'''
1016 951 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1017 952
1018 953 def splitpath(path):
1019 954 '''Split path by os.sep.
1020 955 Note that this function does not use os.altsep because this is
1021 956 an alternative of simple "xxx.split(os.sep)".
1022 957 It is recommended to use os.path.normpath() before using this
1023 958 function if need.'''
1024 959 return path.split(os.sep)
1025 960
1026 961 def gui():
1027 962 '''Are we running in a GUI?'''
1028 963 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
1029 964
1030 965 def lookup_reg(key, name=None, scope=None):
1031 966 return None
1032 967
1033 968 def mktempcopy(name, emptyok=False, createmode=None):
1034 969 """Create a temporary file with the same contents from name
1035 970
1036 971 The permission bits are copied from the original file.
1037 972
1038 973 If the temporary file is going to be truncated immediately, you
1039 974 can use emptyok=True as an optimization.
1040 975
1041 976 Returns the name of the temporary file.
1042 977 """
1043 978 d, fn = os.path.split(name)
1044 979 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1045 980 os.close(fd)
1046 981 # Temporary files are created with mode 0600, which is usually not
1047 982 # what we want. If the original file already exists, just copy
1048 983 # its mode. Otherwise, manually obey umask.
1049 984 try:
1050 985 st_mode = os.lstat(name).st_mode & 0777
1051 986 except OSError, inst:
1052 987 if inst.errno != errno.ENOENT:
1053 988 raise
1054 989 st_mode = createmode
1055 990 if st_mode is None:
1056 991 st_mode = ~umask
1057 992 st_mode &= 0666
1058 993 os.chmod(temp, st_mode)
1059 994 if emptyok:
1060 995 return temp
1061 996 try:
1062 997 try:
1063 998 ifp = posixfile(name, "rb")
1064 999 except IOError, inst:
1065 1000 if inst.errno == errno.ENOENT:
1066 1001 return temp
1067 1002 if not getattr(inst, 'filename', None):
1068 1003 inst.filename = name
1069 1004 raise
1070 1005 ofp = posixfile(temp, "wb")
1071 1006 for chunk in filechunkiter(ifp):
1072 1007 ofp.write(chunk)
1073 1008 ifp.close()
1074 1009 ofp.close()
1075 1010 except:
1076 1011 try: os.unlink(temp)
1077 1012 except: pass
1078 1013 raise
1079 1014 return temp
1080 1015
1081 1016 class atomictempfile(posixfile):
1082 1017 """file-like object that atomically updates a file
1083 1018
1084 1019 All writes will be redirected to a temporary copy of the original
1085 1020 file. When rename is called, the copy is renamed to the original
1086 1021 name, making the changes visible.
1087 1022 """
1088 1023 def __init__(self, name, mode, createmode):
1089 1024 self.__name = name
1090 1025 self.temp = mktempcopy(name, emptyok=('w' in mode),
1091 1026 createmode=createmode)
1092 1027 posixfile.__init__(self, self.temp, mode)
1093 1028
1094 1029 def rename(self):
1095 1030 if not self.closed:
1096 1031 posixfile.close(self)
1097 1032 rename(self.temp, localpath(self.__name))
1098 1033
1099 1034 def __del__(self):
1100 1035 if not self.closed:
1101 1036 try:
1102 1037 os.unlink(self.temp)
1103 1038 except: pass
1104 1039 posixfile.close(self)
1105 1040
1106 1041 def makedirs(name, mode=None):
1107 1042 """recursive directory creation with parent mode inheritance"""
1108 1043 try:
1109 1044 os.mkdir(name)
1110 1045 if mode is not None:
1111 1046 os.chmod(name, mode)
1112 1047 return
1113 1048 except OSError, err:
1114 1049 if err.errno == errno.EEXIST:
1115 1050 return
1116 1051 if err.errno != errno.ENOENT:
1117 1052 raise
1118 1053 parent = os.path.abspath(os.path.dirname(name))
1119 1054 makedirs(parent, mode)
1120 1055 makedirs(name, mode)
1121 1056
1122 1057 class opener(object):
1123 1058 """Open files relative to a base directory
1124 1059
1125 1060 This class is used to hide the details of COW semantics and
1126 1061 remote file access from higher level code.
1127 1062 """
1128 1063 def __init__(self, base, audit=True):
1129 1064 self.base = base
1130 1065 if audit:
1131 1066 self.audit_path = path_auditor(base)
1132 1067 else:
1133 1068 self.audit_path = always
1134 1069 self.createmode = None
1135 1070
1136 1071 def __getattr__(self, name):
1137 1072 if name == '_can_symlink':
1138 1073 self._can_symlink = checklink(self.base)
1139 1074 return self._can_symlink
1140 1075 raise AttributeError(name)
1141 1076
1142 1077 def _fixfilemode(self, name):
1143 1078 if self.createmode is None:
1144 1079 return
1145 1080 os.chmod(name, self.createmode & 0666)
1146 1081
1147 1082 def __call__(self, path, mode="r", text=False, atomictemp=False):
1148 1083 self.audit_path(path)
1149 1084 f = os.path.join(self.base, path)
1150 1085
1151 1086 if not text and "b" not in mode:
1152 1087 mode += "b" # for that other OS
1153 1088
1154 1089 nlink = -1
1155 1090 if mode not in ("r", "rb"):
1156 1091 try:
1157 1092 nlink = nlinks(f)
1158 1093 except OSError:
1159 1094 nlink = 0
1160 1095 d = os.path.dirname(f)
1161 1096 if not os.path.isdir(d):
1162 1097 makedirs(d, self.createmode)
1163 1098 if atomictemp:
1164 1099 return atomictempfile(f, mode, self.createmode)
1165 1100 if nlink > 1:
1166 1101 rename(mktempcopy(f), f)
1167 1102 fp = posixfile(f, mode)
1168 1103 if nlink == 0:
1169 1104 self._fixfilemode(f)
1170 1105 return fp
1171 1106
1172 1107 def symlink(self, src, dst):
1173 1108 self.audit_path(dst)
1174 1109 linkname = os.path.join(self.base, dst)
1175 1110 try:
1176 1111 os.unlink(linkname)
1177 1112 except OSError:
1178 1113 pass
1179 1114
1180 1115 dirname = os.path.dirname(linkname)
1181 1116 if not os.path.exists(dirname):
1182 1117 makedirs(dirname, self.createmode)
1183 1118
1184 1119 if self._can_symlink:
1185 1120 try:
1186 1121 os.symlink(src, linkname)
1187 1122 except OSError, err:
1188 1123 raise OSError(err.errno, _('could not symlink to %r: %s') %
1189 1124 (src, err.strerror), linkname)
1190 1125 else:
1191 1126 f = self(dst, "w")
1192 1127 f.write(src)
1193 1128 f.close()
1194 1129 self._fixfilemode(dst)
1195 1130
1196 1131 class chunkbuffer(object):
1197 1132 """Allow arbitrary sized chunks of data to be efficiently read from an
1198 1133 iterator over chunks of arbitrary size."""
1199 1134
1200 1135 def __init__(self, in_iter):
1201 1136 """in_iter is the iterator that's iterating over the input chunks.
1202 1137 targetsize is how big a buffer to try to maintain."""
1203 1138 self.iter = iter(in_iter)
1204 1139 self.buf = ''
1205 1140 self.targetsize = 2**16
1206 1141
1207 1142 def read(self, l):
1208 1143 """Read L bytes of data from the iterator of chunks of data.
1209 1144 Returns less than L bytes if the iterator runs dry."""
1210 1145 if l > len(self.buf) and self.iter:
1211 1146 # Clamp to a multiple of self.targetsize
1212 1147 targetsize = max(l, self.targetsize)
1213 1148 collector = cStringIO.StringIO()
1214 1149 collector.write(self.buf)
1215 1150 collected = len(self.buf)
1216 1151 for chunk in self.iter:
1217 1152 collector.write(chunk)
1218 1153 collected += len(chunk)
1219 1154 if collected >= targetsize:
1220 1155 break
1221 1156 if collected < targetsize:
1222 1157 self.iter = False
1223 1158 self.buf = collector.getvalue()
1224 1159 if len(self.buf) == l:
1225 1160 s, self.buf = str(self.buf), ''
1226 1161 else:
1227 1162 s, self.buf = self.buf[:l], buffer(self.buf, l)
1228 1163 return s
1229 1164
1230 1165 def filechunkiter(f, size=65536, limit=None):
1231 1166 """Create a generator that produces the data in the file size
1232 1167 (default 65536) bytes at a time, up to optional limit (default is
1233 1168 to read all data). Chunks may be less than size bytes if the
1234 1169 chunk is the last chunk in the file, or the file is a socket or
1235 1170 some other type of file that sometimes reads less data than is
1236 1171 requested."""
1237 1172 assert size >= 0
1238 1173 assert limit is None or limit >= 0
1239 1174 while True:
1240 1175 if limit is None: nbytes = size
1241 1176 else: nbytes = min(limit, size)
1242 1177 s = nbytes and f.read(nbytes)
1243 1178 if not s: break
1244 1179 if limit: limit -= len(s)
1245 1180 yield s
1246 1181
1247 1182 def makedate():
1248 1183 lt = time.localtime()
1249 1184 if lt[8] == 1 and time.daylight:
1250 1185 tz = time.altzone
1251 1186 else:
1252 1187 tz = time.timezone
1253 1188 return time.mktime(lt), tz
1254 1189
1255 1190 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1256 1191 """represent a (unixtime, offset) tuple as a localized time.
1257 1192 unixtime is seconds since the epoch, and offset is the time zone's
1258 1193 number of seconds away from UTC. if timezone is false, do not
1259 1194 append time zone to string."""
1260 1195 t, tz = date or makedate()
1261 1196 if "%1" in format or "%2" in format:
1262 1197 sign = (tz > 0) and "-" or "+"
1263 1198 minutes = abs(tz) / 60
1264 1199 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1265 1200 format = format.replace("%2", "%02d" % (minutes % 60))
1266 1201 s = time.strftime(format, time.gmtime(float(t) - tz))
1267 1202 return s
1268 1203
1269 1204 def shortdate(date=None):
1270 1205 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1271 1206 return datestr(date, format='%Y-%m-%d')
1272 1207
1273 1208 def strdate(string, format, defaults=[]):
1274 1209 """parse a localized time string and return a (unixtime, offset) tuple.
1275 1210 if the string cannot be parsed, ValueError is raised."""
1276 1211 def timezone(string):
1277 1212 tz = string.split()[-1]
1278 1213 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1279 1214 sign = (tz[0] == "+") and 1 or -1
1280 1215 hours = int(tz[1:3])
1281 1216 minutes = int(tz[3:5])
1282 1217 return -sign * (hours * 60 + minutes) * 60
1283 1218 if tz == "GMT" or tz == "UTC":
1284 1219 return 0
1285 1220 return None
1286 1221
1287 1222 # NOTE: unixtime = localunixtime + offset
1288 1223 offset, date = timezone(string), string
1289 1224 if offset != None:
1290 1225 date = " ".join(string.split()[:-1])
1291 1226
1292 1227 # add missing elements from defaults
1293 1228 for part in defaults:
1294 1229 found = [True for p in part if ("%"+p) in format]
1295 1230 if not found:
1296 1231 date += "@" + defaults[part]
1297 1232 format += "@%" + part[0]
1298 1233
1299 1234 timetuple = time.strptime(date, format)
1300 1235 localunixtime = int(calendar.timegm(timetuple))
1301 1236 if offset is None:
1302 1237 # local timezone
1303 1238 unixtime = int(time.mktime(timetuple))
1304 1239 offset = unixtime - localunixtime
1305 1240 else:
1306 1241 unixtime = localunixtime + offset
1307 1242 return unixtime, offset
1308 1243
1309 1244 def parsedate(date, formats=None, defaults=None):
1310 1245 """parse a localized date/time string and return a (unixtime, offset) tuple.
1311 1246
1312 1247 The date may be a "unixtime offset" string or in one of the specified
1313 1248 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1314 1249 """
1315 1250 if not date:
1316 1251 return 0, 0
1317 1252 if isinstance(date, tuple) and len(date) == 2:
1318 1253 return date
1319 1254 if not formats:
1320 1255 formats = defaultdateformats
1321 1256 date = date.strip()
1322 1257 try:
1323 1258 when, offset = map(int, date.split(' '))
1324 1259 except ValueError:
1325 1260 # fill out defaults
1326 1261 if not defaults:
1327 1262 defaults = {}
1328 1263 now = makedate()
1329 1264 for part in "d mb yY HI M S".split():
1330 1265 if part not in defaults:
1331 1266 if part[0] in "HMS":
1332 1267 defaults[part] = "00"
1333 1268 else:
1334 1269 defaults[part] = datestr(now, "%" + part[0])
1335 1270
1336 1271 for format in formats:
1337 1272 try:
1338 1273 when, offset = strdate(date, format, defaults)
1339 1274 except (ValueError, OverflowError):
1340 1275 pass
1341 1276 else:
1342 1277 break
1343 1278 else:
1344 1279 raise Abort(_('invalid date: %r ') % date)
1345 1280 # validate explicit (probably user-specified) date and
1346 1281 # time zone offset. values must fit in signed 32 bits for
1347 1282 # current 32-bit linux runtimes. timezones go from UTC-12
1348 1283 # to UTC+14
1349 1284 if abs(when) > 0x7fffffff:
1350 1285 raise Abort(_('date exceeds 32 bits: %d') % when)
1351 1286 if offset < -50400 or offset > 43200:
1352 1287 raise Abort(_('impossible time zone offset: %d') % offset)
1353 1288 return when, offset
1354 1289
1355 1290 def matchdate(date):
1356 1291 """Return a function that matches a given date match specifier
1357 1292
1358 1293 Formats include:
1359 1294
1360 1295 '{date}' match a given date to the accuracy provided
1361 1296
1362 1297 '<{date}' on or before a given date
1363 1298
1364 1299 '>{date}' on or after a given date
1365 1300
1366 1301 """
1367 1302
1368 1303 def lower(date):
1369 1304 d = dict(mb="1", d="1")
1370 1305 return parsedate(date, extendeddateformats, d)[0]
1371 1306
1372 1307 def upper(date):
1373 1308 d = dict(mb="12", HI="23", M="59", S="59")
1374 1309 for days in "31 30 29".split():
1375 1310 try:
1376 1311 d["d"] = days
1377 1312 return parsedate(date, extendeddateformats, d)[0]
1378 1313 except:
1379 1314 pass
1380 1315 d["d"] = "28"
1381 1316 return parsedate(date, extendeddateformats, d)[0]
1382 1317
1383 1318 if date[0] == "<":
1384 1319 when = upper(date[1:])
1385 1320 return lambda x: x <= when
1386 1321 elif date[0] == ">":
1387 1322 when = lower(date[1:])
1388 1323 return lambda x: x >= when
1389 1324 elif date[0] == "-":
1390 1325 try:
1391 1326 days = int(date[1:])
1392 1327 except ValueError:
1393 1328 raise Abort(_("invalid day spec: %s") % date[1:])
1394 1329 when = makedate()[0] - days * 3600 * 24
1395 1330 return lambda x: x >= when
1396 1331 elif " to " in date:
1397 1332 a, b = date.split(" to ")
1398 1333 start, stop = lower(a), upper(b)
1399 1334 return lambda x: x >= start and x <= stop
1400 1335 else:
1401 1336 start, stop = lower(date), upper(date)
1402 1337 return lambda x: x >= start and x <= stop
1403 1338
1404 1339 def shortuser(user):
1405 1340 """Return a short representation of a user name or email address."""
1406 1341 f = user.find('@')
1407 1342 if f >= 0:
1408 1343 user = user[:f]
1409 1344 f = user.find('<')
1410 1345 if f >= 0:
1411 1346 user = user[f+1:]
1412 1347 f = user.find(' ')
1413 1348 if f >= 0:
1414 1349 user = user[:f]
1415 1350 f = user.find('.')
1416 1351 if f >= 0:
1417 1352 user = user[:f]
1418 1353 return user
1419 1354
1420 1355 def email(author):
1421 1356 '''get email of author.'''
1422 1357 r = author.find('>')
1423 1358 if r == -1: r = None
1424 1359 return author[author.find('<')+1:r]
1425 1360
1426 1361 def ellipsis(text, maxlength=400):
1427 1362 """Trim string to at most maxlength (default: 400) characters."""
1428 1363 if len(text) <= maxlength:
1429 1364 return text
1430 1365 else:
1431 1366 return "%s..." % (text[:maxlength-3])
1432 1367
1433 1368 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1434 1369 '''yield every hg repository under path, recursively.'''
1435 1370 def errhandler(err):
1436 1371 if err.filename == path:
1437 1372 raise err
1438 1373 if followsym and hasattr(os.path, 'samestat'):
1439 1374 def _add_dir_if_not_there(dirlst, dirname):
1440 1375 match = False
1441 1376 samestat = os.path.samestat
1442 1377 dirstat = os.stat(dirname)
1443 1378 for lstdirstat in dirlst:
1444 1379 if samestat(dirstat, lstdirstat):
1445 1380 match = True
1446 1381 break
1447 1382 if not match:
1448 1383 dirlst.append(dirstat)
1449 1384 return not match
1450 1385 else:
1451 1386 followsym = False
1452 1387
1453 1388 if (seen_dirs is None) and followsym:
1454 1389 seen_dirs = []
1455 1390 _add_dir_if_not_there(seen_dirs, path)
1456 1391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1457 1392 if '.hg' in dirs:
1458 1393 yield root # found a repository
1459 1394 qroot = os.path.join(root, '.hg', 'patches')
1460 1395 if os.path.isdir(os.path.join(qroot, '.hg')):
1461 1396 yield qroot # we have a patch queue repo here
1462 1397 if recurse:
1463 1398 # avoid recursing inside the .hg directory
1464 1399 dirs.remove('.hg')
1465 1400 else:
1466 1401 dirs[:] = [] # don't descend further
1467 1402 elif followsym:
1468 1403 newdirs = []
1469 1404 for d in dirs:
1470 1405 fname = os.path.join(root, d)
1471 1406 if _add_dir_if_not_there(seen_dirs, fname):
1472 1407 if os.path.islink(fname):
1473 1408 for hgname in walkrepos(fname, True, seen_dirs):
1474 1409 yield hgname
1475 1410 else:
1476 1411 newdirs.append(d)
1477 1412 dirs[:] = newdirs
1478 1413
1479 1414 _rcpath = None
1480 1415
1481 1416 def os_rcpath():
1482 1417 '''return default os-specific hgrc search path'''
1483 1418 path = system_rcpath()
1484 1419 path.extend(user_rcpath())
1485 1420 path = [os.path.normpath(f) for f in path]
1486 1421 return path
1487 1422
1488 1423 def rcpath():
1489 1424 '''return hgrc search path. if env var HGRCPATH is set, use it.
1490 1425 for each item in path, if directory, use files ending in .rc,
1491 1426 else use item.
1492 1427 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1493 1428 if no HGRCPATH, use default os-specific path.'''
1494 1429 global _rcpath
1495 1430 if _rcpath is None:
1496 1431 if 'HGRCPATH' in os.environ:
1497 1432 _rcpath = []
1498 1433 for p in os.environ['HGRCPATH'].split(os.pathsep):
1499 1434 if not p: continue
1500 1435 if os.path.isdir(p):
1501 1436 for f, kind in osutil.listdir(p):
1502 1437 if f.endswith('.rc'):
1503 1438 _rcpath.append(os.path.join(p, f))
1504 1439 else:
1505 1440 _rcpath.append(p)
1506 1441 else:
1507 1442 _rcpath = os_rcpath()
1508 1443 return _rcpath
1509 1444
1510 1445 def bytecount(nbytes):
1511 1446 '''return byte count formatted as readable string, with units'''
1512 1447
1513 1448 units = (
1514 1449 (100, 1<<30, _('%.0f GB')),
1515 1450 (10, 1<<30, _('%.1f GB')),
1516 1451 (1, 1<<30, _('%.2f GB')),
1517 1452 (100, 1<<20, _('%.0f MB')),
1518 1453 (10, 1<<20, _('%.1f MB')),
1519 1454 (1, 1<<20, _('%.2f MB')),
1520 1455 (100, 1<<10, _('%.0f KB')),
1521 1456 (10, 1<<10, _('%.1f KB')),
1522 1457 (1, 1<<10, _('%.2f KB')),
1523 1458 (1, 1, _('%.0f bytes')),
1524 1459 )
1525 1460
1526 1461 for multiplier, divisor, format in units:
1527 1462 if nbytes >= divisor * multiplier:
1528 1463 return format % (nbytes / float(divisor))
1529 1464 return units[-1][2] % nbytes
1530 1465
1531 1466 def drop_scheme(scheme, path):
1532 1467 sc = scheme + ':'
1533 1468 if path.startswith(sc):
1534 1469 path = path[len(sc):]
1535 1470 if path.startswith('//'):
1536 1471 path = path[2:]
1537 1472 return path
1538 1473
1539 1474 def uirepr(s):
1540 1475 # Avoid double backslash in Windows path repr()
1541 1476 return repr(s).replace('\\\\', '\\')
1542 1477
1543 1478 def termwidth():
1544 1479 if 'COLUMNS' in os.environ:
1545 1480 try:
1546 1481 return int(os.environ['COLUMNS'])
1547 1482 except ValueError:
1548 1483 pass
1549 1484 try:
1550 1485 import termios, array, fcntl
1551 1486 for dev in (sys.stdout, sys.stdin):
1552 1487 try:
1553 1488 fd = dev.fileno()
1554 1489 if not os.isatty(fd):
1555 1490 continue
1556 1491 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1557 1492 return array.array('h', arri)[1]
1558 1493 except ValueError:
1559 1494 pass
1560 1495 except ImportError:
1561 1496 pass
1562 1497 return 80
1563 1498
1564 1499 def iterlines(iterator):
1565 1500 for chunk in iterator:
1566 1501 for line in chunk.splitlines():
1567 1502 yield line
@@ -1,379 +1,379 b''
1 1 '''
2 2 win32.py - utility functions that use win32 API
3 3
4 4 Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8
9 9 Mark Hammond's win32all package allows better functionality on
10 10 Windows. this module overrides definitions in util.py. if not
11 11 available, import of this module will fail, and generic code will be
12 12 used.
13 13 '''
14 14
15 15 import win32api
16 16
17 17 import errno, os, sys, pywintypes, win32con, win32file, win32process
18 18 import cStringIO, winerror
19 import osutil
19 import osutil, encoding
20 20 import util
21 21 from win32com.shell import shell,shellcon
22 22
23 23 class WinError(Exception):
24 24 winerror_map = {
25 25 winerror.ERROR_ACCESS_DENIED: errno.EACCES,
26 26 winerror.ERROR_ACCOUNT_DISABLED: errno.EACCES,
27 27 winerror.ERROR_ACCOUNT_RESTRICTION: errno.EACCES,
28 28 winerror.ERROR_ALREADY_ASSIGNED: errno.EBUSY,
29 29 winerror.ERROR_ALREADY_EXISTS: errno.EEXIST,
30 30 winerror.ERROR_ARITHMETIC_OVERFLOW: errno.ERANGE,
31 31 winerror.ERROR_BAD_COMMAND: errno.EIO,
32 32 winerror.ERROR_BAD_DEVICE: errno.ENODEV,
33 33 winerror.ERROR_BAD_DRIVER_LEVEL: errno.ENXIO,
34 34 winerror.ERROR_BAD_EXE_FORMAT: errno.ENOEXEC,
35 35 winerror.ERROR_BAD_FORMAT: errno.ENOEXEC,
36 36 winerror.ERROR_BAD_LENGTH: errno.EINVAL,
37 37 winerror.ERROR_BAD_PATHNAME: errno.ENOENT,
38 38 winerror.ERROR_BAD_PIPE: errno.EPIPE,
39 39 winerror.ERROR_BAD_UNIT: errno.ENODEV,
40 40 winerror.ERROR_BAD_USERNAME: errno.EINVAL,
41 41 winerror.ERROR_BROKEN_PIPE: errno.EPIPE,
42 42 winerror.ERROR_BUFFER_OVERFLOW: errno.ENAMETOOLONG,
43 43 winerror.ERROR_BUSY: errno.EBUSY,
44 44 winerror.ERROR_BUSY_DRIVE: errno.EBUSY,
45 45 winerror.ERROR_CALL_NOT_IMPLEMENTED: errno.ENOSYS,
46 46 winerror.ERROR_CANNOT_MAKE: errno.EACCES,
47 47 winerror.ERROR_CANTOPEN: errno.EIO,
48 48 winerror.ERROR_CANTREAD: errno.EIO,
49 49 winerror.ERROR_CANTWRITE: errno.EIO,
50 50 winerror.ERROR_CRC: errno.EIO,
51 51 winerror.ERROR_CURRENT_DIRECTORY: errno.EACCES,
52 52 winerror.ERROR_DEVICE_IN_USE: errno.EBUSY,
53 53 winerror.ERROR_DEV_NOT_EXIST: errno.ENODEV,
54 54 winerror.ERROR_DIRECTORY: errno.EINVAL,
55 55 winerror.ERROR_DIR_NOT_EMPTY: errno.ENOTEMPTY,
56 56 winerror.ERROR_DISK_CHANGE: errno.EIO,
57 57 winerror.ERROR_DISK_FULL: errno.ENOSPC,
58 58 winerror.ERROR_DRIVE_LOCKED: errno.EBUSY,
59 59 winerror.ERROR_ENVVAR_NOT_FOUND: errno.EINVAL,
60 60 winerror.ERROR_EXE_MARKED_INVALID: errno.ENOEXEC,
61 61 winerror.ERROR_FILENAME_EXCED_RANGE: errno.ENAMETOOLONG,
62 62 winerror.ERROR_FILE_EXISTS: errno.EEXIST,
63 63 winerror.ERROR_FILE_INVALID: errno.ENODEV,
64 64 winerror.ERROR_FILE_NOT_FOUND: errno.ENOENT,
65 65 winerror.ERROR_GEN_FAILURE: errno.EIO,
66 66 winerror.ERROR_HANDLE_DISK_FULL: errno.ENOSPC,
67 67 winerror.ERROR_INSUFFICIENT_BUFFER: errno.ENOMEM,
68 68 winerror.ERROR_INVALID_ACCESS: errno.EACCES,
69 69 winerror.ERROR_INVALID_ADDRESS: errno.EFAULT,
70 70 winerror.ERROR_INVALID_BLOCK: errno.EFAULT,
71 71 winerror.ERROR_INVALID_DATA: errno.EINVAL,
72 72 winerror.ERROR_INVALID_DRIVE: errno.ENODEV,
73 73 winerror.ERROR_INVALID_EXE_SIGNATURE: errno.ENOEXEC,
74 74 winerror.ERROR_INVALID_FLAGS: errno.EINVAL,
75 75 winerror.ERROR_INVALID_FUNCTION: errno.ENOSYS,
76 76 winerror.ERROR_INVALID_HANDLE: errno.EBADF,
77 77 winerror.ERROR_INVALID_LOGON_HOURS: errno.EACCES,
78 78 winerror.ERROR_INVALID_NAME: errno.EINVAL,
79 79 winerror.ERROR_INVALID_OWNER: errno.EINVAL,
80 80 winerror.ERROR_INVALID_PARAMETER: errno.EINVAL,
81 81 winerror.ERROR_INVALID_PASSWORD: errno.EPERM,
82 82 winerror.ERROR_INVALID_PRIMARY_GROUP: errno.EINVAL,
83 83 winerror.ERROR_INVALID_SIGNAL_NUMBER: errno.EINVAL,
84 84 winerror.ERROR_INVALID_TARGET_HANDLE: errno.EIO,
85 85 winerror.ERROR_INVALID_WORKSTATION: errno.EACCES,
86 86 winerror.ERROR_IO_DEVICE: errno.EIO,
87 87 winerror.ERROR_IO_INCOMPLETE: errno.EINTR,
88 88 winerror.ERROR_LOCKED: errno.EBUSY,
89 89 winerror.ERROR_LOCK_VIOLATION: errno.EACCES,
90 90 winerror.ERROR_LOGON_FAILURE: errno.EACCES,
91 91 winerror.ERROR_MAPPED_ALIGNMENT: errno.EINVAL,
92 92 winerror.ERROR_META_EXPANSION_TOO_LONG: errno.E2BIG,
93 93 winerror.ERROR_MORE_DATA: errno.EPIPE,
94 94 winerror.ERROR_NEGATIVE_SEEK: errno.ESPIPE,
95 95 winerror.ERROR_NOACCESS: errno.EFAULT,
96 96 winerror.ERROR_NONE_MAPPED: errno.EINVAL,
97 97 winerror.ERROR_NOT_ENOUGH_MEMORY: errno.ENOMEM,
98 98 winerror.ERROR_NOT_READY: errno.EAGAIN,
99 99 winerror.ERROR_NOT_SAME_DEVICE: errno.EXDEV,
100 100 winerror.ERROR_NO_DATA: errno.EPIPE,
101 101 winerror.ERROR_NO_MORE_SEARCH_HANDLES: errno.EIO,
102 102 winerror.ERROR_NO_PROC_SLOTS: errno.EAGAIN,
103 103 winerror.ERROR_NO_SUCH_PRIVILEGE: errno.EACCES,
104 104 winerror.ERROR_OPEN_FAILED: errno.EIO,
105 105 winerror.ERROR_OPEN_FILES: errno.EBUSY,
106 106 winerror.ERROR_OPERATION_ABORTED: errno.EINTR,
107 107 winerror.ERROR_OUTOFMEMORY: errno.ENOMEM,
108 108 winerror.ERROR_PASSWORD_EXPIRED: errno.EACCES,
109 109 winerror.ERROR_PATH_BUSY: errno.EBUSY,
110 110 winerror.ERROR_PATH_NOT_FOUND: errno.ENOENT,
111 111 winerror.ERROR_PIPE_BUSY: errno.EBUSY,
112 112 winerror.ERROR_PIPE_CONNECTED: errno.EPIPE,
113 113 winerror.ERROR_PIPE_LISTENING: errno.EPIPE,
114 114 winerror.ERROR_PIPE_NOT_CONNECTED: errno.EPIPE,
115 115 winerror.ERROR_PRIVILEGE_NOT_HELD: errno.EACCES,
116 116 winerror.ERROR_READ_FAULT: errno.EIO,
117 117 winerror.ERROR_SEEK: errno.EIO,
118 118 winerror.ERROR_SEEK_ON_DEVICE: errno.ESPIPE,
119 119 winerror.ERROR_SHARING_BUFFER_EXCEEDED: errno.ENFILE,
120 120 winerror.ERROR_SHARING_VIOLATION: errno.EACCES,
121 121 winerror.ERROR_STACK_OVERFLOW: errno.ENOMEM,
122 122 winerror.ERROR_SWAPERROR: errno.ENOENT,
123 123 winerror.ERROR_TOO_MANY_MODULES: errno.EMFILE,
124 124 winerror.ERROR_TOO_MANY_OPEN_FILES: errno.EMFILE,
125 125 winerror.ERROR_UNRECOGNIZED_MEDIA: errno.ENXIO,
126 126 winerror.ERROR_UNRECOGNIZED_VOLUME: errno.ENODEV,
127 127 winerror.ERROR_WAIT_NO_CHILDREN: errno.ECHILD,
128 128 winerror.ERROR_WRITE_FAULT: errno.EIO,
129 129 winerror.ERROR_WRITE_PROTECT: errno.EROFS,
130 130 }
131 131
132 132 def __init__(self, err):
133 133 self.win_errno, self.win_function, self.win_strerror = err
134 134 if self.win_strerror.endswith('.'):
135 135 self.win_strerror = self.win_strerror[:-1]
136 136
137 137 class WinIOError(WinError, IOError):
138 138 def __init__(self, err, filename=None):
139 139 WinError.__init__(self, err)
140 140 IOError.__init__(self, self.winerror_map.get(self.win_errno, 0),
141 141 self.win_strerror)
142 142 self.filename = filename
143 143
144 144 class WinOSError(WinError, OSError):
145 145 def __init__(self, err):
146 146 WinError.__init__(self, err)
147 147 OSError.__init__(self, self.winerror_map.get(self.win_errno, 0),
148 148 self.win_strerror)
149 149
150 150 def os_link(src, dst):
151 151 try:
152 152 win32file.CreateHardLink(dst, src)
153 153 # CreateHardLink sometimes succeeds on mapped drives but
154 154 # following nlinks() returns 1. Check it now and bail out.
155 155 if nlinks(src) < 2:
156 156 try:
157 157 win32file.DeleteFile(dst)
158 158 except:
159 159 pass
160 160 # Fake hardlinking error
161 161 raise WinOSError((18, 'CreateHardLink', 'The system cannot '
162 162 'move the file to a different disk drive'))
163 163 except pywintypes.error, details:
164 164 raise WinOSError(details)
165 165 except NotImplementedError: # Another fake error win Win98
166 166 raise WinOSError((18, 'CreateHardLink', 'Hardlinking not supported'))
167 167
168 168 def nlinks(pathname):
169 169 """Return number of hardlinks for the given file."""
170 170 try:
171 171 fh = win32file.CreateFile(pathname,
172 172 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
173 173 None, win32file.OPEN_EXISTING, 0, None)
174 174 res = win32file.GetFileInformationByHandle(fh)
175 175 fh.Close()
176 176 return res[7]
177 177 except pywintypes.error:
178 178 return os.lstat(pathname).st_nlink
179 179
180 180 def testpid(pid):
181 181 '''return True if pid is still running or unable to
182 182 determine, False otherwise'''
183 183 try:
184 184 handle = win32api.OpenProcess(
185 185 win32con.PROCESS_QUERY_INFORMATION, False, pid)
186 186 if handle:
187 187 status = win32process.GetExitCodeProcess(handle)
188 188 return status == win32con.STILL_ACTIVE
189 189 except pywintypes.error, details:
190 190 return details[0] != winerror.ERROR_INVALID_PARAMETER
191 191 return True
192 192
193 193 def lookup_reg(key, valname=None, scope=None):
194 194 ''' Look up a key/value name in the Windows registry.
195 195
196 196 valname: value name. If unspecified, the default value for the key
197 197 is used.
198 198 scope: optionally specify scope for registry lookup, this can be
199 199 a sequence of scopes to look up in order. Default (CURRENT_USER,
200 200 LOCAL_MACHINE).
201 201 '''
202 202 try:
203 203 from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
204 204 QueryValueEx, OpenKey
205 205 except ImportError:
206 206 return None
207 207
208 208 if scope is None:
209 209 scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
210 210 elif not isinstance(scope, (list, tuple)):
211 211 scope = (scope,)
212 212 for s in scope:
213 213 try:
214 214 val = QueryValueEx(OpenKey(s, key), valname)[0]
215 215 # never let a Unicode string escape into the wild
216 return util.tolocal(val.encode('UTF-8'))
216 return encoding.tolocal(val.encode('UTF-8'))
217 217 except EnvironmentError:
218 218 pass
219 219
220 220 def system_rcpath_win32():
221 221 '''return default os-specific hgrc search path'''
222 222 proc = win32api.GetCurrentProcess()
223 223 try:
224 224 # This will fail on windows < NT
225 225 filename = win32process.GetModuleFileNameEx(proc, 0)
226 226 except:
227 227 filename = win32api.GetModuleFileName(0)
228 228 # Use mercurial.ini found in directory with hg.exe
229 229 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
230 230 if os.path.isfile(progrc):
231 231 return [progrc]
232 232 # else look for a system rcpath in the registry
233 233 try:
234 234 value = win32api.RegQueryValue(
235 235 win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
236 236 rcpath = []
237 237 for p in value.split(os.pathsep):
238 238 if p.lower().endswith('mercurial.ini'):
239 239 rcpath.append(p)
240 240 elif os.path.isdir(p):
241 241 for f, kind in osutil.listdir(p):
242 242 if f.endswith('.rc'):
243 243 rcpath.append(os.path.join(p, f))
244 244 return rcpath
245 245 except pywintypes.error:
246 246 return []
247 247
248 248 def user_rcpath_win32():
249 249 '''return os-specific hgrc search path to the user dir'''
250 250 userdir = os.path.expanduser('~')
251 251 if sys.getwindowsversion()[3] != 2 and userdir == '~':
252 252 # We are on win < nt: fetch the APPDATA directory location and use
253 253 # the parent directory as the user home dir.
254 254 appdir = shell.SHGetPathFromIDList(
255 255 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
256 256 userdir = os.path.dirname(appdir)
257 257 return [os.path.join(userdir, 'mercurial.ini'),
258 258 os.path.join(userdir, '.hgrc')]
259 259
260 260 class posixfile_nt(object):
261 261 '''file object with posix-like semantics. on windows, normal
262 262 files can not be deleted or renamed if they are open. must open
263 263 with win32file.FILE_SHARE_DELETE. this flag does not exist on
264 264 windows < nt, so do not use this class there.'''
265 265
266 266 # tried to use win32file._open_osfhandle to pass fd to os.fdopen,
267 267 # but does not work at all. wrap win32 file api instead.
268 268
269 269 def __init__(self, name, mode='rb'):
270 270 self.closed = False
271 271 self.name = name
272 272 self.mode = mode
273 273 access = 0
274 274 if 'r' in mode or '+' in mode:
275 275 access |= win32file.GENERIC_READ
276 276 if 'w' in mode or 'a' in mode or '+' in mode:
277 277 access |= win32file.GENERIC_WRITE
278 278 if 'r' in mode:
279 279 creation = win32file.OPEN_EXISTING
280 280 elif 'a' in mode:
281 281 creation = win32file.OPEN_ALWAYS
282 282 else:
283 283 creation = win32file.CREATE_ALWAYS
284 284 try:
285 285 self.handle = win32file.CreateFile(name,
286 286 access,
287 287 win32file.FILE_SHARE_READ |
288 288 win32file.FILE_SHARE_WRITE |
289 289 win32file.FILE_SHARE_DELETE,
290 290 None,
291 291 creation,
292 292 win32file.FILE_ATTRIBUTE_NORMAL,
293 293 0)
294 294 except pywintypes.error, err:
295 295 raise WinIOError(err, name)
296 296
297 297 def __iter__(self):
298 298 for line in self.readlines():
299 299 yield line
300 300
301 301 def read(self, count=-1):
302 302 try:
303 303 cs = cStringIO.StringIO()
304 304 while count:
305 305 wincount = int(count)
306 306 if wincount == -1:
307 307 wincount = 1048576
308 308 val, data = win32file.ReadFile(self.handle, wincount)
309 309 if not data: break
310 310 cs.write(data)
311 311 if count != -1:
312 312 count -= len(data)
313 313 return cs.getvalue()
314 314 except pywintypes.error, err:
315 315 raise WinIOError(err)
316 316
317 317 def readlines(self, sizehint=None):
318 318 # splitlines() splits on single '\r' while readlines()
319 319 # does not. cStringIO has a well behaving readlines() and is fast.
320 320 return cStringIO.StringIO(self.read()).readlines()
321 321
322 322 def write(self, data):
323 323 try:
324 324 if 'a' in self.mode:
325 325 win32file.SetFilePointer(self.handle, 0, win32file.FILE_END)
326 326 nwrit = 0
327 327 while nwrit < len(data):
328 328 val, nwrit = win32file.WriteFile(self.handle, data)
329 329 data = data[nwrit:]
330 330 except pywintypes.error, err:
331 331 raise WinIOError(err)
332 332
333 333 def writelines(self, sequence):
334 334 for s in sequence:
335 335 self.write(s)
336 336
337 337 def seek(self, pos, whence=0):
338 338 try:
339 339 win32file.SetFilePointer(self.handle, int(pos), whence)
340 340 except pywintypes.error, err:
341 341 raise WinIOError(err)
342 342
343 343 def tell(self):
344 344 try:
345 345 return win32file.SetFilePointer(self.handle, 0,
346 346 win32file.FILE_CURRENT)
347 347 except pywintypes.error, err:
348 348 raise WinIOError(err)
349 349
350 350 def close(self):
351 351 if not self.closed:
352 352 self.handle = None
353 353 self.closed = True
354 354
355 355 def flush(self):
356 356 # we have no application-level buffering
357 357 pass
358 358
359 359 def truncate(self, pos=0):
360 360 try:
361 361 win32file.SetFilePointer(self.handle, int(pos),
362 362 win32file.FILE_BEGIN)
363 363 win32file.SetEndOfFile(self.handle)
364 364 except pywintypes.error, err:
365 365 raise WinIOError(err)
366 366
367 367 def getuser():
368 368 '''return name of current user'''
369 369 return win32api.GetUserName()
370 370
371 371 def set_signal_handler_win32():
372 372 """Register a termination handler for console events including
373 373 CTRL+C. python signal handlers do not work well with socket
374 374 operations.
375 375 """
376 376 def handler(event):
377 377 win32process.ExitProcess(1)
378 378 win32api.SetConsoleCtrlHandler(handler)
379 379
General Comments 0
You need to be logged in to leave comments. Login now