##// END OF EJS Templates
Merge with crew
Matt Mackall -
r4965:4106dde1 merge default
parent child Browse files
Show More
@@ -1,178 +1,204 b''
1 1 # churn.py - create a graph showing who changed the most lines
2 2 #
3 3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 #
9 9 # Aliases map file format is simple one alias per line in the following
10 10 # format:
11 11 #
12 12 # <alias email> <actual email>
13 13
14 import sys
15 14 from mercurial.i18n import gettext as _
16 15 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
16 import os, sys
17
18 def get_tty_width():
19 if 'COLUMNS' in os.environ:
20 try:
21 return int(os.environ['COLUMNS'])
22 except ValueError:
23 pass
24 try:
25 import termios, fcntl, struct
26 buf = 'abcd'
27 for dev in (sys.stdout, sys.stdin):
28 try:
29 if buf != 'abcd':
30 break
31 fd = dev.fileno()
32 if not os.isatty(fd):
33 continue
34 buf = fcntl.ioctl(fd, termios.TIOCGWINSZ, buf)
35 except ValueError:
36 pass
37 if buf != 'abcd':
38 return struct.unpack('hh', buf)[1]
39 except ImportError:
40 pass
41 return 80
17 42
18 43 def __gather(ui, repo, node1, node2):
19 44 def dirtywork(f, mmap1, mmap2):
20 45 lines = 0
21 46
22 47 to = mmap1 and repo.file(f).read(mmap1[f]) or None
23 48 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
24 49
25 50 diff = mdiff.unidiff(to, "", tn, "", f).split("\n")
26 51
27 52 for line in diff:
28 53 if not line:
29 54 continue # skip EOF
30 55 if line.startswith(" "):
31 56 continue # context line
32 57 if line.startswith("--- ") or line.startswith("+++ "):
33 58 continue # begining of diff
34 59 if line.startswith("@@ "):
35 60 continue # info line
36 61
37 62 # changed lines
38 63 lines += 1
39 64
40 65 return lines
41 66
42 67 ##
43 68
44 69 lines = 0
45 70
46 71 changes = repo.status(node1, node2, None, util.always)[:5]
47 72
48 73 modified, added, removed, deleted, unknown = changes
49 74
50 75 who = repo.changelog.read(node2)[1]
51 76 who = templater.email(who) # get the email of the person
52 77
53 78 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
54 79 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
55 80 for f in modified:
56 81 lines += dirtywork(f, mmap1, mmap2)
57 82
58 83 for f in added:
59 84 lines += dirtywork(f, None, mmap2)
60 85
61 86 for f in removed:
62 87 lines += dirtywork(f, mmap1, None)
63 88
64 89 for f in deleted:
65 90 lines += dirtywork(f, mmap1, mmap2)
66 91
67 92 for f in unknown:
68 93 lines += dirtywork(f, mmap1, mmap2)
69 94
70 95 return (who, lines)
71 96
72 97 def gather_stats(ui, repo, amap, revs=None, progress=False):
73 98 stats = {}
74 99
75 100 cl = repo.changelog
76 101
77 102 if not revs:
78 103 revs = range(0, cl.count())
79 104
80 105 nr_revs = len(revs)
81 106 cur_rev = 0
82 107
83 108 for rev in revs:
84 109 cur_rev += 1 # next revision
85 110
86 111 node2 = cl.node(rev)
87 112 node1 = cl.parents(node2)[0]
88 113
89 114 if cl.parents(node2)[1] != node.nullid:
90 115 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
91 116 continue
92 117
93 118 who, lines = __gather(ui, repo, node1, node2)
94 119
95 120 # remap the owner if possible
96 121 if amap.has_key(who):
97 122 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
98 123 who = amap[who]
99 124
100 125 if not stats.has_key(who):
101 126 stats[who] = 0
102 127 stats[who] += lines
103 128
104 129 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
105 130
106 131 if progress:
107 132 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
108 133 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
109 134 sys.stdout.flush()
110 135
111 136 if progress:
112 137 ui.write("done\n")
113 138 sys.stdout.flush()
114 139
115 140 return stats
116 141
117 142 def churn(ui, repo, **opts):
118 143 "Graphs the number of lines changed"
119 144
120 145 def pad(s, l):
121 146 if len(s) < l:
122 147 return s + " " * (l-len(s))
123 148 return s[0:l]
124 149
125 150 def graph(n, maximum, width, char):
126 151 n = int(n * width / float(maximum))
127 152
128 153 return char * (n)
129 154
130 155 def get_aliases(f):
131 156 aliases = {}
132 157
133 158 for l in f.readlines():
134 159 l = l.strip()
135 160 alias, actual = l.split(" ")
136 161 aliases[alias] = actual
137 162
138 163 return aliases
139 164
140 165 amap = {}
141 166 aliases = opts.get('aliases')
142 167 if aliases:
143 168 try:
144 169 f = open(aliases,"r")
145 170 except OSError, e:
146 171 print "Error: " + e
147 172 return
148 173
149 174 amap = get_aliases(f)
150 175 f.close()
151 176
152 177 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
153 178 revs.sort()
154 179 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
155 180
156 181 # make a list of tuples (name, lines) and sort it in descending order
157 182 ordered = stats.items()
158 183 ordered.sort(lambda x, y: cmp(y[1], x[1]))
159 184
160 185 maximum = ordered[0][1]
161 186
162 ui.note("Assuming 80 character terminal\n")
163 width = 80 - 1
187 width = get_tty_width()
188 ui.note(_("assuming %i character terminal\n") % width)
189 width -= 1
164 190
165 191 for i in ordered:
166 192 person = i[0]
167 193 lines = i[1]
168 194 print "%s %6d %s" % (pad(person, 20), lines,
169 195 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
170 196
171 197 cmdtable = {
172 198 "churn":
173 199 (churn,
174 200 [('r', 'rev', [], _('limit statistics to the specified revisions')),
175 201 ('', 'aliases', '', _('file with email aliases')),
176 202 ('', 'progress', None, _('show progress'))],
177 203 'hg churn [-r revision range] [-a file] [--progress]'),
178 204 }
@@ -1,77 +1,76 b''
1 1 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
2 2 # This file is published under the GNU GPL.
3 3
4 4 '''allow user-defined command aliases
5 5
6 6 To use, create entries in your hgrc of the form
7 7
8 8 [alias]
9 9 mycmd = cmd --args
10 10 '''
11 11
12 12 from mercurial.cmdutil import findcmd, UnknownCommand, AmbiguousCommand
13 13 from mercurial import commands
14 14
15 15 cmdtable = {}
16 16
17 17 class RecursiveCommand(Exception): pass
18 18
19 19 class lazycommand(object):
20 20 '''defer command lookup until needed, so that extensions loaded
21 21 after alias can be aliased'''
22 22 def __init__(self, ui, name, target):
23 23 self._ui = ui
24 24 self._name = name
25 25 self._target = target
26 26 self._cmd = None
27 27
28 28 def __len__(self):
29 29 self._resolve()
30 30 return len(self._cmd)
31 31
32 32 def __getitem__(self, key):
33 33 self._resolve()
34 34 return self._cmd[key]
35 35
36 36 def __iter__(self):
37 37 self._resolve()
38 38 return self._cmd.__iter__()
39 39
40 40 def _resolve(self):
41 41 if self._cmd is not None:
42 42 return
43 43
44 44 try:
45 45 self._cmd = findcmd(self._ui, self._target)[1]
46 46 if self._cmd == self:
47 47 raise RecursiveCommand()
48 48 if self._target in commands.norepo.split(' '):
49 49 commands.norepo += ' %s' % self._name
50 50 return
51 51 except UnknownCommand:
52 52 msg = '*** [alias] %s: command %s is unknown' % \
53 53 (self._name, self._target)
54 54 except AmbiguousCommand:
55 55 msg = '*** [alias] %s: command %s is ambiguous' % \
56 56 (self._name, self._target)
57 57 except RecursiveCommand:
58 58 msg = '*** [alias] %s: circular dependency on %s' % \
59 59 (self._name, self._target)
60 60 def nocmd(*args, **opts):
61 61 self._ui.warn(msg + '\n')
62 62 return 1
63 63 nocmd.__doc__ = msg
64 64 self._cmd = (nocmd, [], '')
65 65 commands.norepo += ' %s' % self._name
66 66
67 67 def uisetup(ui):
68 68 for cmd, target in ui.configitems('alias'):
69 69 if not target:
70 70 ui.warn('*** [alias] %s: no definition\n' % cmd)
71 71 continue
72 72 args = target.split(' ')
73 73 tcmd = args.pop(0)
74 74 if args:
75 pui = ui.parentui or ui
76 pui.setconfig('defaults', cmd, ' '.join(args))
75 ui.setconfig('defaults', cmd, ' '.join(args))
77 76 cmdtable[cmd] = lazycommand(ui, cmd, tcmd)
@@ -1,353 +1,353 b''
1 1 # convert.py Foreign SCM converter
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from common import NoRepo, converter_source, converter_sink
9 9 from cvs import convert_cvs
10 10 from git import convert_git
11 11 from hg import convert_mercurial
12 12 from subversion import convert_svn
13 13
14 14 import os, shutil
15 15 from mercurial import hg, ui, util, commands
16 16
17 17 commands.norepo += " convert"
18 18
19 19 converters = [convert_cvs, convert_git, convert_svn, convert_mercurial]
20 20
21 21 def convertsource(ui, path, **opts):
22 22 for c in converters:
23 23 if not hasattr(c, 'getcommit'):
24 24 continue
25 25 try:
26 26 return c(ui, path, **opts)
27 27 except NoRepo:
28 28 pass
29 29 raise util.Abort('%s: unknown repository type' % path)
30 30
31 31 def convertsink(ui, path):
32 32 if not os.path.isdir(path):
33 33 raise util.Abort("%s: not a directory" % path)
34 34 for c in converters:
35 35 if not hasattr(c, 'putcommit'):
36 36 continue
37 37 try:
38 38 return c(ui, path)
39 39 except NoRepo:
40 40 pass
41 41 raise util.Abort('%s: unknown repository type' % path)
42 42
43 43 class convert(object):
44 44 def __init__(self, ui, source, dest, mapfile, opts):
45 45
46 46 self.source = source
47 47 self.dest = dest
48 48 self.ui = ui
49 49 self.opts = opts
50 50 self.commitcache = {}
51 51 self.mapfile = mapfile
52 52 self.mapfilefd = None
53 53 self.authors = {}
54 54 self.authorfile = None
55 55
56 56 self.map = {}
57 57 try:
58 58 origmapfile = open(self.mapfile, 'r')
59 59 for l in origmapfile:
60 60 sv, dv = l[:-1].split()
61 61 self.map[sv] = dv
62 62 origmapfile.close()
63 63 except IOError:
64 64 pass
65 65
66 66 # Read first the dst author map if any
67 67 authorfile = self.dest.authorfile()
68 68 if authorfile and os.path.exists(authorfile):
69 69 self.readauthormap(authorfile)
70 70 # Extend/Override with new author map if necessary
71 71 if opts.get('authors'):
72 72 self.readauthormap(opts.get('authors'))
73 73 self.authorfile = self.dest.authorfile()
74 74
75 75 def walktree(self, heads):
76 76 '''Return a mapping that identifies the uncommitted parents of every
77 77 uncommitted changeset.'''
78 78 visit = heads
79 79 known = {}
80 80 parents = {}
81 81 while visit:
82 82 n = visit.pop(0)
83 83 if n in known or n in self.map: continue
84 84 known[n] = 1
85 85 self.commitcache[n] = self.source.getcommit(n)
86 86 cp = self.commitcache[n].parents
87 87 parents[n] = []
88 88 for p in cp:
89 89 parents[n].append(p)
90 90 visit.append(p)
91 91
92 92 return parents
93 93
94 94 def toposort(self, parents):
95 95 '''Return an ordering such that every uncommitted changeset is
96 96 preceeded by all its uncommitted ancestors.'''
97 97 visit = parents.keys()
98 98 seen = {}
99 99 children = {}
100 100
101 101 while visit:
102 102 n = visit.pop(0)
103 103 if n in seen: continue
104 104 seen[n] = 1
105 105 # Ensure that nodes without parents are present in the 'children'
106 106 # mapping.
107 107 children.setdefault(n, [])
108 108 for p in parents[n]:
109 109 if not p in self.map:
110 110 visit.append(p)
111 111 children.setdefault(p, []).append(n)
112 112
113 113 s = []
114 114 removed = {}
115 115 visit = children.keys()
116 116 while visit:
117 117 n = visit.pop(0)
118 118 if n in removed: continue
119 119 dep = 0
120 120 if n in parents:
121 121 for p in parents[n]:
122 122 if p in self.map: continue
123 123 if p not in removed:
124 124 # we're still dependent
125 125 visit.append(n)
126 126 dep = 1
127 127 break
128 128
129 129 if not dep:
130 130 # all n's parents are in the list
131 131 removed[n] = 1
132 132 if n not in self.map:
133 133 s.append(n)
134 134 if n in children:
135 135 for c in children[n]:
136 136 visit.insert(0, c)
137 137
138 138 if self.opts.get('datesort'):
139 139 depth = {}
140 140 for n in s:
141 141 depth[n] = 0
142 142 pl = [p for p in self.commitcache[n].parents
143 143 if p not in self.map]
144 144 if pl:
145 145 depth[n] = max([depth[p] for p in pl]) + 1
146 146
147 147 s = [(depth[n], self.commitcache[n].date, n) for n in s]
148 148 s.sort()
149 149 s = [e[2] for e in s]
150 150
151 151 return s
152 152
153 153 def mapentry(self, src, dst):
154 154 if self.mapfilefd is None:
155 155 try:
156 156 self.mapfilefd = open(self.mapfile, "a")
157 157 except IOError, (errno, strerror):
158 158 raise util.Abort("Could not open map file %s: %s, %s\n" % (self.mapfile, errno, strerror))
159 159 self.map[src] = dst
160 160 self.mapfilefd.write("%s %s\n" % (src, dst))
161 161 self.mapfilefd.flush()
162 162
163 163 def writeauthormap(self):
164 164 authorfile = self.authorfile
165 165 if authorfile:
166 166 self.ui.status('Writing author map file %s\n' % authorfile)
167 167 ofile = open(authorfile, 'w+')
168 168 for author in self.authors:
169 169 ofile.write("%s=%s\n" % (author, self.authors[author]))
170 170 ofile.close()
171 171
172 172 def readauthormap(self, authorfile):
173 173 afile = open(authorfile, 'r')
174 174 for line in afile:
175 175 try:
176 176 srcauthor = line.split('=')[0].strip()
177 177 dstauthor = line.split('=')[1].strip()
178 178 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
179 179 self.ui.status(
180 180 'Overriding mapping for author %s, was %s, will be %s\n'
181 181 % (srcauthor, self.authors[srcauthor], dstauthor))
182 182 else:
183 183 self.ui.debug('Mapping author %s to %s\n'
184 184 % (srcauthor, dstauthor))
185 185 self.authors[srcauthor] = dstauthor
186 186 except IndexError:
187 187 self.ui.warn(
188 188 'Ignoring bad line in author file map %s: %s\n'
189 189 % (authorfile, line))
190 190 afile.close()
191 191
192 192 def copy(self, rev):
193 193 c = self.commitcache[rev]
194 194 files = self.source.getchanges(rev)
195
195
196 196 do_copies = (hasattr(c, 'copies') and hasattr(self.dest, 'copyfile'))
197 197
198 198 for f, v in files:
199 199 try:
200 200 data = self.source.getfile(f, v)
201 201 except IOError, inst:
202 202 self.dest.delfile(f)
203 203 else:
204 204 e = self.source.getmode(f, v)
205 205 self.dest.putfile(f, e, data)
206 206 if do_copies:
207 207 if f in c.copies:
208 208 # Merely marks that a copy happened.
209 209 self.dest.copyfile(c.copies[f], f)
210 210
211 211
212 212 r = [self.map[v] for v in c.parents]
213 213 f = [f for f, v in files]
214 214 newnode = self.dest.putcommit(f, r, c)
215 215 self.mapentry(rev, newnode)
216 216
217 217 def convert(self):
218 218 try:
219 219 self.source.setrevmap(self.map)
220 220 self.ui.status("scanning source...\n")
221 221 heads = self.source.getheads()
222 222 parents = self.walktree(heads)
223 223 self.ui.status("sorting...\n")
224 224 t = self.toposort(parents)
225 225 num = len(t)
226 226 c = None
227 227
228 228 self.ui.status("converting...\n")
229 229 for c in t:
230 230 num -= 1
231 231 desc = self.commitcache[c].desc
232 232 if "\n" in desc:
233 233 desc = desc.splitlines()[0]
234 234 author = self.commitcache[c].author
235 235 author = self.authors.get(author, author)
236 236 self.commitcache[c].author = author
237 237 self.ui.status("%d %s\n" % (num, desc))
238 238 self.copy(c)
239 239
240 240 tags = self.source.gettags()
241 241 ctags = {}
242 242 for k in tags:
243 243 v = tags[k]
244 244 if v in self.map:
245 245 ctags[k] = self.map[v]
246 246
247 247 if c and ctags:
248 248 nrev = self.dest.puttags(ctags)
249 249 # write another hash correspondence to override the previous
250 250 # one so we don't end up with extra tag heads
251 251 if nrev:
252 252 self.mapentry(c, nrev)
253 253
254 254 self.writeauthormap()
255 255 finally:
256 256 self.cleanup()
257 257
258 258 def cleanup(self):
259 259 if self.mapfilefd:
260 260 self.mapfilefd.close()
261 261
262 262 def _convert(ui, src, dest=None, mapfile=None, **opts):
263 '''Convert a foreign SCM repository to a Mercurial one.
263 """Convert a foreign SCM repository to a Mercurial one.
264 264
265 265 Accepted source formats:
266 266 - GIT
267 267 - CVS
268 268 - SVN
269 269
270 270 Accepted destination formats:
271 271 - Mercurial
272 272
273 273 If no revision is given, all revisions will be converted. Otherwise,
274 274 convert will only import up to the named revision (given in a format
275 275 understood by the source).
276 276
277 277 If no destination directory name is specified, it defaults to the
278 278 basename of the source with '-hg' appended. If the destination
279 279 repository doesn't exist, it will be created.
280 280
281 281 If <mapfile> isn't given, it will be put in a default location
282 282 (<dest>/.hg/shamap by default). The <mapfile> is a simple text
283 283 file that maps each source commit ID to the destination ID for
284 284 that revision, like so:
285 285 <source ID> <destination ID>
286 286
287 287 If the file doesn't exist, it's automatically created. It's updated
288 288 on each commit copied, so convert-repo can be interrupted and can
289 289 be run repeatedly to copy new commits.
290 290
291 291 The [username mapping] file is a simple text file that maps each source
292 292 commit author to a destination commit author. It is handy for source SCMs
293 293 that use unix logins to identify authors (eg: CVS). One line per author
294 294 mapping and the line format is:
295 295 srcauthor=whatever string you want
296 '''
296 """
297 297
298 298 util._encoding = 'UTF-8'
299 299
300 300 if not dest:
301 301 dest = hg.defaultdest(src) + "-hg"
302 302 ui.status("assuming destination %s\n" % dest)
303 303
304 304 # Try to be smart and initalize things when required
305 305 created = False
306 306 if os.path.isdir(dest):
307 307 if len(os.listdir(dest)) > 0:
308 308 try:
309 309 hg.repository(ui, dest)
310 310 ui.status("destination %s is a Mercurial repository\n" % dest)
311 311 except hg.RepoError:
312 312 raise util.Abort(
313 313 "destination directory %s is not empty.\n"
314 314 "Please specify an empty directory to be initialized\n"
315 315 "or an already initialized mercurial repository"
316 316 % dest)
317 317 else:
318 318 ui.status("initializing destination %s repository\n" % dest)
319 319 hg.repository(ui, dest, create=True)
320 320 created = True
321 321 elif os.path.exists(dest):
322 322 raise util.Abort("destination %s exists and is not a directory" % dest)
323 323 else:
324 324 ui.status("initializing destination %s repository\n" % dest)
325 325 hg.repository(ui, dest, create=True)
326 326 created = True
327 327
328 328 destc = convertsink(ui, dest)
329 329
330 330 try:
331 331 srcc = convertsource(ui, src, rev=opts.get('rev'))
332 332 except Exception:
333 333 if created:
334 334 shutil.rmtree(dest, True)
335 335 raise
336 336
337 337 if not mapfile:
338 338 try:
339 339 mapfile = destc.mapfile()
340 340 except:
341 341 mapfile = os.path.join(destc, "map")
342 342
343 343 c = convert(ui, srcc, destc, mapfile, opts)
344 344 c.convert()
345 345
346 346 cmdtable = {
347 347 "convert":
348 348 (_convert,
349 349 [('A', 'authors', '', 'username mapping filename'),
350 350 ('r', 'rev', '', 'import up to target revision REV'),
351 351 ('', 'datesort', None, 'try to sort changesets by date')],
352 352 'hg convert [OPTION]... SOURCE [DEST [MAPFILE]]'),
353 353 }
@@ -1,121 +1,121 b''
1 1 # common code for the convert extension
2 2
3 3 class NoRepo(Exception): pass
4 4
5 5 class commit(object):
6 6 def __init__(self, **parts):
7 7 self.rev = None
8 8 self.branch = None
9 9
10 10 for x in "author date desc parents".split():
11 11 if not x in parts:
12 12 raise util.Abort("commit missing field %s" % x)
13 13 self.__dict__.update(parts)
14 14 if not self.desc or self.desc.isspace():
15 15 self.desc = '*** empty log message ***'
16 16
17 17 class converter_source(object):
18 18 """Conversion source interface"""
19 19
20 20 def __init__(self, ui, path, rev=None):
21 21 """Initialize conversion source (or raise NoRepo("message")
22 22 exception if path is not a valid repository)"""
23 23 self.ui = ui
24 24 self.path = path
25 25 self.rev = rev
26 26
27 27 self.encoding = 'utf-8'
28 28
29 29 def setrevmap(self, revmap):
30 30 """set the map of already-converted revisions"""
31 31 pass
32 32
33 33 def getheads(self):
34 34 """Return a list of this repository's heads"""
35 35 raise NotImplementedError()
36 36
37 37 def getfile(self, name, rev):
38 38 """Return file contents as a string"""
39 39 raise NotImplementedError()
40 40
41 41 def getmode(self, name, rev):
42 42 """Return file mode, eg. '', 'x', or 'l'"""
43 43 raise NotImplementedError()
44 44
45 45 def getchanges(self, version):
46 46 """Return sorted list of (filename, id) tuples for all files changed in rev.
47 47
48 48 id just tells us which revision to return in getfile(), e.g. in
49 49 git it's an object hash."""
50 50 raise NotImplementedError()
51 51
52 52 def getcommit(self, version):
53 53 """Return the commit object for version"""
54 54 raise NotImplementedError()
55 55
56 56 def gettags(self):
57 57 """Return the tags as a dictionary of name: revision"""
58 58 raise NotImplementedError()
59 59
60 60 def recode(self, s, encoding=None):
61 61 if not encoding:
62 62 encoding = self.encoding or 'utf-8'
63
63
64 64 try:
65 65 return s.decode(encoding).encode("utf-8")
66 66 except:
67 67 try:
68 68 return s.decode("latin-1").encode("utf-8")
69 69 except:
70 70 return s.decode(encoding, "replace").encode("utf-8")
71 71
72 72 class converter_sink(object):
73 73 """Conversion sink (target) interface"""
74 74
75 75 def __init__(self, ui, path):
76 76 """Initialize conversion sink (or raise NoRepo("message")
77 77 exception if path is not a valid repository)"""
78 78 raise NotImplementedError()
79 79
80 80 def getheads(self):
81 81 """Return a list of this repository's heads"""
82 82 raise NotImplementedError()
83 83
84 84 def mapfile(self):
85 85 """Path to a file that will contain lines
86 86 source_rev_id sink_rev_id
87 87 mapping equivalent revision identifiers for each system."""
88 88 raise NotImplementedError()
89 89
90 90 def authorfile(self):
91 91 """Path to a file that will contain lines
92 92 srcauthor=dstauthor
93 93 mapping equivalent authors identifiers for each system."""
94 94 return None
95 95
96 96 def putfile(self, f, e, data):
97 97 """Put file for next putcommit().
98 98 f: path to file
99 99 e: '', 'x', or 'l' (regular file, executable, or symlink)
100 100 data: file contents"""
101 101 raise NotImplementedError()
102 102
103 103 def delfile(self, f):
104 104 """Delete file for next putcommit().
105 105 f: path to file"""
106 106 raise NotImplementedError()
107 107
108 108 def putcommit(self, files, parents, commit):
109 109 """Create a revision with all changed files listed in 'files'
110 110 and having listed parents. 'commit' is a commit object containing
111 111 at a minimum the author, date, and message for this changeset.
112 112 Called after putfile() and delfile() calls. Note that the sink
113 113 repository is not told to update itself to a particular revision
114 114 (or even what that revision would be) before it receives the
115 115 file data."""
116 116 raise NotImplementedError()
117 117
118 118 def puttags(self, tags):
119 119 """Put tags into sink.
120 120 tags: {tagname: sink_rev_id, ...}"""
121 121 raise NotImplementedError()
@@ -1,589 +1,667 b''
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 #
5 # Configuration options:
6 #
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
11 #
12 # Set these in a hgrc, or on the command line as follows:
13 #
14 # hg convert --config convert.svn.trunk=wackoname [...]
4 15
5 import pprint
6 16 import locale
7
17 import os
18 import cPickle as pickle
8 19 from mercurial import util
9 20
10 21 # Subversion stuff. Works best with very recent Python SVN bindings
11 22 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
12 23 # these bindings.
13 24
14 25 from cStringIO import StringIO
15 26
16 27 from common import NoRepo, commit, converter_source
17 28
18 29 try:
19 30 from svn.core import SubversionException, Pool
20 31 import svn.core
21 32 import svn.ra
22 33 import svn.delta
23 34 import svn
24 35 import transport
25 36 except ImportError:
26 37 pass
27 38
28 39 class CompatibilityException(Exception): pass
29 40
41 class changedpath(object):
42 def __init__(self, p):
43 self.copyfrom_path = p.copyfrom_path
44 self.copyfrom_rev = p.copyfrom_rev
45 self.action = p.action
46
30 47 # SVN conversion code stolen from bzr-svn and tailor
31 48 class convert_svn(converter_source):
32 49 def __init__(self, ui, url, rev=None):
33 50 super(convert_svn, self).__init__(ui, url, rev=rev)
34 51
35 52 try:
36 53 SubversionException
37 54 except NameError:
38 55 msg = 'subversion python bindings could not be loaded\n'
39 56 ui.warn(msg)
40 57 raise NoRepo(msg)
41 58
42 59 self.encoding = locale.getpreferredencoding()
43 60 self.lastrevs = {}
44 61
45 62 latest = None
46 63 if rev:
47 64 try:
48 65 latest = int(rev)
49 66 except ValueError:
50 67 raise util.Abort('svn: revision %s is not an integer' % rev)
51 68 try:
52 69 # Support file://path@rev syntax. Useful e.g. to convert
53 70 # deleted branches.
54 url, latest = url.rsplit("@", 1)
55 latest = int(latest)
71 at = url.rfind('@')
72 if at >= 0:
73 latest = int(url[at+1:])
74 url = url[:at]
56 75 except ValueError, e:
57 76 pass
58 77 self.url = url
59 78 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
60 79 try:
61 self.transport = transport.SvnRaTransport(url = url)
80 self.transport = transport.SvnRaTransport(url=url)
62 81 self.ra = self.transport.ra
63 self.ctx = svn.client.create_context()
82 self.ctx = self.transport.client
64 83 self.base = svn.ra.get_repos_root(self.ra)
65 84 self.module = self.url[len(self.base):]
66 85 self.modulemap = {} # revision, module
67 86 self.commits = {}
68 87 self.files = {}
69 88 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
70 89 except SubversionException, e:
71 90 raise NoRepo("couldn't open SVN repo %s" % url)
72 91
73 92 try:
74 93 self.get_blacklist()
75 94 except IOError, e:
76 95 pass
77 96
78 97 self.last_changed = self.latest(self.module, latest)
79 98
80 99 self.head = self.revid(self.last_changed)
81 100
82 101 def setrevmap(self, revmap):
83 102 lastrevs = {}
84 103 for revid in revmap.keys():
85 104 uuid, module, revnum = self.revsplit(revid)
86 105 lastrevnum = lastrevs.setdefault(module, revnum)
87 106 if revnum > lastrevnum:
88 107 lastrevs[module] = revnum
89 108 self.lastrevs = lastrevs
90 109
110 def exists(self, path, optrev):
111 try:
112 return svn.client.ls(self.url.rstrip('/') + '/' + path,
113 optrev, False, self.ctx)
114 except SubversionException, err:
115 return []
116
91 117 def getheads(self):
92 118 # detect standard /branches, /tags, /trunk layout
93 119 optrev = svn.core.svn_opt_revision_t()
94 120 optrev.kind = svn.core.svn_opt_revision_number
95 121 optrev.value.number = self.last_changed
96 122 rpath = self.url.strip('/')
97 paths = svn.client.ls(rpath, optrev, False, self.ctx)
98 if 'branches' in paths and 'trunk' in paths:
99 self.module += '/trunk'
123 cfgtrunk = self.ui.config('convert', 'svn.trunk')
124 cfgbranches = self.ui.config('convert', 'svn.branches')
125 trunk = (cfgtrunk or 'trunk').strip('/')
126 branches = (cfgbranches or 'branches').strip('/')
127 if self.exists(trunk, optrev) and self.exists(branches, optrev):
128 self.ui.note('found trunk at %r and branches at %r\n' %
129 (trunk, branches))
130 oldmodule = self.module
131 self.module += '/' + trunk
100 132 lt = self.latest(self.module, self.last_changed)
101 133 self.head = self.revid(lt)
102 134 self.heads = [self.head]
103 branches = svn.client.ls(rpath + '/branches', optrev, False, self.ctx)
104 for branch in branches.keys():
105 module = '/branches/' + branch
135 branchnames = svn.client.ls(rpath + '/' + branches, optrev, False,
136 self.ctx)
137 for branch in branchnames.keys():
138 if oldmodule:
139 module = '/' + oldmodule + '/' + branches + '/' + branch
140 else:
141 module = '/' + branches + '/' + branch
106 142 brevnum = self.latest(module, self.last_changed)
107 143 brev = self.revid(brevnum, module)
108 144 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
109 145 self.heads.append(brev)
146 elif cfgtrunk or cfgbranches:
147 raise util.Abort(_('trunk/branch layout expected, '
148 'but not found'))
110 149 else:
150 self.ui.note('working with one branch\n')
111 151 self.heads = [self.head]
112 152 return self.heads
113 153
114 154 def getfile(self, file, rev):
115 155 data, mode = self._getfile(file, rev)
116 156 self.modecache[(file, rev)] = mode
117 157 return data
118 158
119 def getmode(self, file, rev):
159 def getmode(self, file, rev):
120 160 return self.modecache[(file, rev)]
121 161
122 162 def getchanges(self, rev):
123 163 self.modecache = {}
124 164 files = self.files[rev]
125 165 cl = files
126 166 cl.sort()
127 167 # caller caches the result, so free it here to release memory
128 168 del self.files[rev]
129 169 return cl
130 170
131 171 def getcommit(self, rev):
132 172 if rev not in self.commits:
133 173 uuid, module, revnum = self.revsplit(rev)
134 174 self.module = module
135 175 self.reparent(module)
136 176 stop = self.lastrevs.get(module, 0)
137 177 self._fetch_revisions(from_revnum=revnum, to_revnum=stop)
138 178 commit = self.commits[rev]
139 179 # caller caches the result, so free it here to release memory
140 180 del self.commits[rev]
141 181 return commit
142 182
183 def get_log(self, paths, start, end, limit=0, discover_changed_paths=True,
184 strict_node_history=False):
185 '''wrapper for svn.ra.get_log.
186 on a large repository, svn.ra.get_log pins huge amounts of
187 memory that cannot be recovered. work around it by forking
188 and writing results over a pipe.'''
189
190 def child(fp):
191 protocol = -1
192 def receiver(orig_paths, revnum, author, date, message, pool):
193 if orig_paths is not None:
194 for k, v in orig_paths.iteritems():
195 orig_paths[k] = changedpath(v)
196 pickle.dump((orig_paths, revnum, author, date, message),
197 fp, protocol)
198
199 try:
200 # Use an ra of our own so that our parent can consume
201 # our results without confusing the server.
202 t = transport.SvnRaTransport(url=self.url)
203 svn.ra.get_log(t.ra, paths, start, end, limit,
204 discover_changed_paths,
205 strict_node_history,
206 receiver)
207 except SubversionException, (_, num):
208 self.ui.print_exc()
209 pickle.dump(num, fp, protocol)
210 else:
211 pickle.dump(None, fp, protocol)
212 fp.close()
213
214 def parent(fp):
215 while True:
216 entry = pickle.load(fp)
217 try:
218 orig_paths, revnum, author, date, message = entry
219 except:
220 if entry is None:
221 break
222 raise SubversionException("child raised exception", entry)
223 yield entry
224
225 rfd, wfd = os.pipe()
226 pid = os.fork()
227 if pid:
228 os.close(wfd)
229 for p in parent(os.fdopen(rfd, 'rb')):
230 yield p
231 ret = os.waitpid(pid, 0)[1]
232 if ret:
233 raise util.Abort(_('get_log %s') % util.explain_exit(ret))
234 else:
235 os.close(rfd)
236 child(os.fdopen(wfd, 'wb'))
237 os._exit(0)
238
143 239 def gettags(self):
144 240 tags = {}
145 def parselogentry(*arg, **args):
146 orig_paths, revnum, author, date, message, pool = arg
147 for path in orig_paths:
148 if not path.startswith('/tags/'):
149 continue
150 ent = orig_paths[path]
151 source = ent.copyfrom_path
152 rev = ent.copyfrom_rev
153 tag = path.split('/', 2)[2]
154 tags[tag] = self.revid(rev, module=source)
155
156 241 start = self.revnum(self.head)
157 242 try:
158 svn.ra.get_log(self.ra, ['/tags'], 0, start, 0, True, False,
159 parselogentry)
160 return tags
161 except SubversionException:
243 for entry in self.get_log(['/tags'], 0, start):
244 orig_paths, revnum, author, date, message = entry
245 for path in orig_paths:
246 if not path.startswith('/tags/'):
247 continue
248 ent = orig_paths[path]
249 source = ent.copyfrom_path
250 rev = ent.copyfrom_rev
251 tag = path.split('/', 2)[2]
252 tags[tag] = self.revid(rev, module=source)
253 except SubversionException, (_, num):
162 254 self.ui.note('no tags found at revision %d\n' % start)
163 return {}
255 return tags
164 256
165 257 # -- helper functions --
166 258
167 259 def revid(self, revnum, module=None):
168 260 if not module:
169 261 module = self.module
170 262 return (u"svn:%s%s@%s" % (self.uuid, module, revnum)).decode(self.encoding)
171 263
172 264 def revnum(self, rev):
173 265 return int(rev.split('@')[-1])
174 266
175 267 def revsplit(self, rev):
176 268 url, revnum = rev.encode(self.encoding).split('@', 1)
177 269 revnum = int(revnum)
178 270 parts = url.split('/', 1)
179 271 uuid = parts.pop(0)[4:]
180 272 mod = ''
181 273 if parts:
182 274 mod = '/' + parts[0]
183 275 return uuid, mod, revnum
184 276
185 277 def latest(self, path, stop=0):
186 278 'find the latest revision affecting path, up to stop'
187 279 if not stop:
188 280 stop = svn.ra.get_latest_revnum(self.ra)
189 281 try:
190 282 self.reparent('')
191 283 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
192 284 self.reparent(self.module)
193 285 except SubversionException:
194 286 dirent = None
195 287 if not dirent:
196 raise util.Abort('%s not found up to revision %d' \
197 % (path, stop))
288 print self.base, path
289 raise util.Abort('%s not found up to revision %d' % (path, stop))
198 290
199 291 return dirent.created_rev
200 292
201 293 def get_blacklist(self):
202 294 """Avoid certain revision numbers.
203 295 It is not uncommon for two nearby revisions to cancel each other
204 296 out, e.g. 'I copied trunk into a subdirectory of itself instead
205 297 of making a branch'. The converted repository is significantly
206 298 smaller if we ignore such revisions."""
207 299 self.blacklist = set()
208 300 blacklist = self.blacklist
209 301 for line in file("blacklist.txt", "r"):
210 302 if not line.startswith("#"):
211 303 try:
212 304 svn_rev = int(line.strip())
213 305 blacklist.add(svn_rev)
214 306 except ValueError, e:
215 307 pass # not an integer or a comment
216 308
217 309 def is_blacklisted(self, svn_rev):
218 310 return svn_rev in self.blacklist
219 311
220 312 def reparent(self, module):
221 313 svn_url = self.base + module
222 314 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
223 315 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
224 316
225 317 def _fetch_revisions(self, from_revnum = 0, to_revnum = 347):
226 318 def get_entry_from_path(path, module=self.module):
227 319 # Given the repository url of this wc, say
228 320 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
229 321 # extract the "entry" portion (a relative path) from what
230 322 # svn log --xml says, ie
231 323 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
232 324 # that is to say "tests/PloneTestCase.py"
233 325
234 326 if path.startswith(module):
235 327 relative = path[len(module):]
236 328 if relative.startswith('/'):
237 329 return relative[1:]
238 330 else:
239 331 return relative
240 332
241 333 # The path is outside our tracked tree...
242 334 self.ui.debug('Ignoring %r since it is not under %r\n' % (path, module))
243 335 return None
244 336
245 received = []
246 # svn.ra.get_log requires no other calls to the ra until it completes,
247 # so we just collect the log entries and parse them afterwards
248 def receivelog(*arg, **args):
249 received.append(arg)
250
251 337 self.child_cset = None
252 def parselogentry(*arg, **args):
253 orig_paths, revnum, author, date, message, pool = arg
254
255 if self.is_blacklisted(revnum):
256 self.ui.note('skipping blacklisted revision %d\n' % revnum)
257 return
258
259 self.ui.debug("parsing revision %d\n" % revnum)
260
261 if orig_paths is None:
262 self.ui.debug('revision %d has no entries\n' % revnum)
263 return
338 def parselogentry(orig_paths, revnum, author, date, message):
339 self.ui.debug("parsing revision %d (%d changes)\n" %
340 (revnum, len(orig_paths)))
264 341
265 342 if revnum in self.modulemap:
266 343 new_module = self.modulemap[revnum]
267 344 if new_module != self.module:
268 345 self.module = new_module
269 346 self.reparent(self.module)
270 347
271 348 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
272 349 copies = {}
273 350 entries = []
274 351 rev = self.revid(revnum)
275 352 parents = []
276 353
277 354 # branch log might return entries for a parent we already have
278 355 if (rev in self.commits or
279 356 (revnum < self.lastrevs.get(self.module, 0))):
280 357 return
281 358
282 359 try:
283 360 branch = self.module.split("/")[-1]
284 361 if branch == 'trunk':
285 362 branch = ''
286 363 except IndexError:
287 364 branch = None
288 365
289 paths = orig_paths.keys()
290 paths.sort()
291 for path in paths:
366 orig_paths = orig_paths.items()
367 orig_paths.sort()
368 for path, ent in orig_paths:
292 369 # self.ui.write("path %s\n" % path)
293 370 if path == self.module: # Follow branching back in history
294 ent = orig_paths[path]
295 371 if ent:
296 372 if ent.copyfrom_path:
297 373 # ent.copyfrom_rev may not be the actual last revision
298 374 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
299 375 self.modulemap[prev] = ent.copyfrom_path
300 376 parents = [self.revid(prev, ent.copyfrom_path)]
301 377 self.ui.note('found parent of branch %s at %d: %s\n' % \
302 378 (self.module, prev, ent.copyfrom_path))
303 379 else:
304 380 self.ui.debug("No copyfrom path, don't know what to do.\n")
305 381 # Maybe it was added and there is no more history.
306 382 entrypath = get_entry_from_path(path, module=self.module)
307 383 # self.ui.write("entrypath %s\n" % entrypath)
308 384 if entrypath is None:
309 385 # Outside our area of interest
310 386 self.ui.debug("boring@%s: %s\n" % (revnum, path))
311 387 continue
312 388 entry = entrypath.decode(self.encoding)
313 ent = orig_paths[path]
314 389
315 390 kind = svn.ra.check_path(self.ra, entrypath, revnum)
316 391 if kind == svn.core.svn_node_file:
317 392 if ent.copyfrom_path:
318 393 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
319 394 if copyfrom_path:
320 395 self.ui.debug("Copied to %s from %s@%s\n" % (entry, copyfrom_path, ent.copyfrom_rev))
321 396 # It's probably important for hg that the source
322 397 # exists in the revision's parent, not just the
323 398 # ent.copyfrom_rev
324 399 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
325 400 if fromkind != 0:
326 401 copies[self.recode(entry)] = self.recode(copyfrom_path)
327 402 entries.append(self.recode(entry))
328 403 elif kind == 0: # gone, but had better be a deleted *file*
329 404 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
330 405
331 406 # if a branch is created but entries are removed in the same
332 407 # changeset, get the right fromrev
333 408 if parents:
334 409 uuid, old_module, fromrev = self.revsplit(parents[0])
335 410 else:
336 411 fromrev = revnum - 1
337 412 # might always need to be revnum - 1 in these 3 lines?
338 413 old_module = self.modulemap.get(fromrev, self.module)
339 414
340 415 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
341 416 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
342 417
343 418 def lookup_parts(p):
344 419 rc = None
345 420 parts = p.split("/")
346 421 for i in range(len(parts)):
347 422 part = "/".join(parts[:i])
348 423 info = part, copyfrom.get(part, None)
349 424 if info[1] is not None:
350 425 self.ui.debug("Found parent directory %s\n" % info[1])
351 426 rc = info
352 427 return rc
353 428
354 429 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
355 430
356 431 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
357 432
358 433 # need to remove fragment from lookup_parts and replace with copyfrom_path
359 434 if frompath is not None:
360 435 self.ui.debug("munge-o-matic\n")
361 436 self.ui.debug(entrypath + '\n')
362 437 self.ui.debug(entrypath[len(frompath):] + '\n')
363 438 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
364 439 fromrev = froment.copyfrom_rev
365 440 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
366 441
367 442 fromkind = svn.ra.check_path(self.ra, entrypath, fromrev)
368 443 if fromkind == svn.core.svn_node_file: # a deleted file
369 444 entries.append(self.recode(entry))
370 445 elif fromkind == svn.core.svn_node_dir:
371 446 # print "Deleted/moved non-file:", revnum, path, ent
372 447 # children = self._find_children(path, revnum - 1)
373 448 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
374 449 # Sometimes this is tricky. For example: in
375 450 # The Subversion Repository revision 6940 a dir
376 # was copied and one of its files was deleted
451 # was copied and one of its files was deleted
377 452 # from the new location in the same commit. This
378 453 # code can't deal with that yet.
379 454 if ent.action == 'C':
380 455 children = self._find_children(path, fromrev)
381 456 else:
382 457 oroot = entrypath.strip('/')
383 458 nroot = path.strip('/')
384 459 children = self._find_children(oroot, fromrev)
385 460 children = [s.replace(oroot,nroot) for s in children]
386 461 # Mark all [files, not directories] as deleted.
387 462 for child in children:
388 463 # Can we move a child directory and its
389 464 # parent in the same commit? (probably can). Could
390 # cause problems if instead of revnum -1,
465 # cause problems if instead of revnum -1,
391 466 # we have to look in (copyfrom_path, revnum - 1)
392 467 entrypath = get_entry_from_path("/" + child, module=old_module)
393 468 if entrypath:
394 469 entry = self.recode(entrypath.decode(self.encoding))
395 470 if entry in copies:
396 471 # deleted file within a copy
397 472 del copies[entry]
398 473 else:
399 474 entries.append(entry)
400 475 else:
401 476 self.ui.debug('unknown path in revision %d: %s\n' % \
402 477 (revnum, path))
403 478 elif kind == svn.core.svn_node_dir:
404 479 # Should probably synthesize normal file entries
405 480 # and handle as above to clean up copy/rename handling.
406 481
407 482 # If the directory just had a prop change,
408 483 # then we shouldn't need to look for its children.
409 484 # Also this could create duplicate entries. Not sure
410 485 # whether this will matter. Maybe should make entries a set.
411 486 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
412 487 # This will fail if a directory was copied
413 488 # from another branch and then some of its files
414 489 # were deleted in the same transaction.
415 490 children = self._find_children(path, revnum)
416 491 children.sort()
417 492 for child in children:
418 493 # Can we move a child directory and its
419 494 # parent in the same commit? (probably can). Could
420 # cause problems if instead of revnum -1,
495 # cause problems if instead of revnum -1,
421 496 # we have to look in (copyfrom_path, revnum - 1)
422 497 entrypath = get_entry_from_path("/" + child, module=self.module)
423 498 # print child, self.module, entrypath
424 499 if entrypath:
425 500 # Need to filter out directories here...
426 501 kind = svn.ra.check_path(self.ra, entrypath, revnum)
427 502 if kind != svn.core.svn_node_dir:
428 503 entries.append(self.recode(entrypath))
429 504
430 505 # Copies here (must copy all from source)
431 506 # Probably not a real problem for us if
432 507 # source does not exist
433 508
434 509 # Can do this with the copy command "hg copy"
435 510 # if ent.copyfrom_path:
436 511 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
437 512 # module=self.module)
438 513 # copyto_entry = entrypath
439 514 #
440 515 # print "copy directory", copyfrom_entry, 'to', copyto_entry
441 516 #
442 517 # copies.append((copyfrom_entry, copyto_entry))
443 518
444 519 if ent.copyfrom_path:
445 520 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
446 521 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
447 522 if copyfrom_entry:
448 523 copyfrom[path] = ent
449 524 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
450 525
451 526 # Good, /probably/ a regular copy. Really should check
452 527 # to see whether the parent revision actually contains
453 528 # the directory in question.
454 529 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
455 530 children.sort()
456 531 for child in children:
457 532 entrypath = get_entry_from_path("/" + child, module=self.module)
458 533 if entrypath:
459 534 entry = entrypath.decode(self.encoding)
460 535 # print "COPY COPY From", copyfrom_entry, entry
461 536 copyto_path = path + entry[len(copyfrom_entry):]
462 537 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
463 538 # print "COPY", entry, "COPY To", copyto_entry
464 539 copies[self.recode(copyto_entry)] = self.recode(entry)
465 540 # copy from quux splort/quuxfile
466 541
467 542 self.modulemap[revnum] = self.module # track backwards in time
468 543 # a list of (filename, id) where id lets us retrieve the file.
469 # eg in git, id is the object hash. for svn it'll be the
544 # eg in git, id is the object hash. for svn it'll be the
470 545 self.files[rev] = zip(entries, [rev] * len(entries))
471 546 if not entries:
472 547 return
473 548
474 549 # Example SVN datetime. Includes microseconds.
475 550 # ISO-8601 conformant
476 551 # '2007-01-04T17:35:00.902377Z'
477 552 date = util.parsedate(date[:18] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
478 553
479 554 log = message and self.recode(message)
480 555 author = author and self.recode(author) or ''
481 556
482 557 cset = commit(author=author,
483 date=util.datestr(date),
484 desc=log,
558 date=util.datestr(date),
559 desc=log,
485 560 parents=parents,
486 561 copies=copies,
487 562 branch=branch,
488 563 rev=rev.encode('utf-8'))
489 564
490 565 self.commits[rev] = cset
491 566 if self.child_cset and not self.child_cset.parents:
492 567 self.child_cset.parents = [rev]
493 568 self.child_cset = cset
494 569
495 self.ui.note('fetching revision log for "%s" from %d to %d\n' % \
570 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
496 571 (self.module, from_revnum, to_revnum))
497 572
498 573 try:
499 574 discover_changed_paths = True
500 575 strict_node_history = False
501 svn.ra.get_log(self.ra, [self.module], from_revnum, to_revnum, 0,
502 discover_changed_paths, strict_node_history,
503 receivelog)
504 for entry in received:
505 parselogentry(*entry)
576 for entry in self.get_log([self.module], from_revnum, to_revnum):
577 orig_paths, revnum, author, date, message = entry
578 if self.is_blacklisted(revnum):
579 self.ui.note('skipping blacklisted revision %d\n' % revnum)
580 continue
581 if orig_paths is None:
582 self.ui.debug('revision %d has no entries\n' % revnum)
583 continue
584 parselogentry(orig_paths, revnum, author, date, message)
506 585 except SubversionException, (_, num):
507 586 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
508 raise NoSuchRevision(branch=self,
587 raise NoSuchRevision(branch=self,
509 588 revision="Revision number %d" % to_revnum)
510 589 raise
511 590
512 591 def _getfile(self, file, rev):
513 592 io = StringIO()
514 593 # TODO: ra.get_file transmits the whole file instead of diffs.
515 594 mode = ''
516 595 try:
517 596 revnum = self.revnum(rev)
518 597 if self.module != self.modulemap[revnum]:
519 598 self.module = self.modulemap[revnum]
520 599 self.reparent(self.module)
521 600 info = svn.ra.get_file(self.ra, file, revnum, io)
522 601 if isinstance(info, list):
523 602 info = info[-1]
524 603 mode = ("svn:executable" in info) and 'x' or ''
525 604 mode = ("svn:special" in info) and 'l' or mode
526 605 except SubversionException, e:
527 606 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
528 607 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
529 608 if e.apr_err in notfound: # File not found
530 609 raise IOError()
531 610 raise
532 611 data = io.getvalue()
533 612 if mode == 'l':
534 613 link_prefix = "link "
535 614 if data.startswith(link_prefix):
536 615 data = data[len(link_prefix):]
537 616 return data, mode
538 617
539 618 def _find_children(self, path, revnum):
540 619 path = path.strip("/")
541 620
542 621 def _find_children_fallback(path, revnum):
543 622 # SWIG python bindings for getdir are broken up to at least 1.4.3
544 623 pool = Pool()
545 624 optrev = svn.core.svn_opt_revision_t()
546 625 optrev.kind = svn.core.svn_opt_revision_number
547 626 optrev.value.number = revnum
548 627 rpath = '/'.join([self.base, path]).strip('/')
549 628 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev, True, self.ctx, pool).keys()]
550 629
551 630 if hasattr(self, '_find_children_fallback'):
552 631 return _find_children_fallback(path, revnum)
553 632
554 633 self.reparent("/" + path)
555 634 pool = Pool()
556 635
557 636 children = []
558 637 def find_children_inner(children, path, revnum = revnum):
559 638 if hasattr(svn.ra, 'get_dir2'): # Since SVN 1.4
560 639 fields = 0xffffffff # Binding does not provide SVN_DIRENT_ALL
561 640 getdir = svn.ra.get_dir2(self.ra, path, revnum, fields, pool)
562 641 else:
563 642 getdir = svn.ra.get_dir(self.ra, path, revnum, pool)
564 643 if type(getdir) == dict:
565 644 # python binding for getdir is broken up to at least 1.4.3
566 645 raise CompatibilityException()
567 646 dirents = getdir[0]
568 647 if type(dirents) == int:
569 648 # got here once due to infinite recursion bug
570 # pprint.pprint(getdir)
571 649 return
572 650 c = dirents.keys()
573 651 c.sort()
574 652 for child in c:
575 653 dirent = dirents[child]
576 654 if dirent.kind == svn.core.svn_node_dir:
577 655 find_children_inner(children, (path + "/" + child).strip("/"))
578 656 else:
579 657 children.append((path + "/" + child).strip("/"))
580 658
581 659 try:
582 660 find_children_inner(children, "")
583 661 except CompatibilityException:
584 662 self._find_children_fallback = True
585 663 self.reparent(self.module)
586 664 return _find_children_fallback(path, revnum)
587 665
588 666 self.reparent(self.module)
589 667 return [path + "/" + c for c in children]
@@ -1,134 +1,125 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 4 # This is a stripped-down version of the original bzr-svn transport.py,
5 5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6 6
7 7 # This program is free software; you can redistribute it and/or modify
8 8 # it under the terms of the GNU General Public License as published by
9 9 # the Free Software Foundation; either version 2 of the License, or
10 10 # (at your option) any later version.
11 11
12 12 # This program is distributed in the hope that it will be useful,
13 13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 15 # GNU General Public License for more details.
16 16
17 17 # You should have received a copy of the GNU General Public License
18 18 # along with this program; if not, write to the Free Software
19 19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 20
21 21 from cStringIO import StringIO
22 22 import os
23 23 from tempfile import mktemp
24 24
25 25 from svn.core import SubversionException, Pool
26 26 import svn.ra
27 import svn.client
27 28 import svn.core
28 29
29 # Some older versions of the Python bindings need to be
30 # Some older versions of the Python bindings need to be
30 31 # explicitly initialized. But what we want to do probably
31 32 # won't work worth a darn against those libraries anyway!
32 33 svn.ra.initialize()
33 34
34 35 svn_config = svn.core.svn_config_get_config(None)
35 36
36 37
37 38 def _create_auth_baton(pool):
38 39 """Create a Subversion authentication baton. """
39 40 import svn.client
40 41 # Give the client context baton a suite of authentication
41 42 # providers.h
42 43 providers = [
43 44 svn.client.get_simple_provider(pool),
44 45 svn.client.get_username_provider(pool),
45 46 svn.client.get_ssl_client_cert_file_provider(pool),
46 47 svn.client.get_ssl_client_cert_pw_file_provider(pool),
47 48 svn.client.get_ssl_server_trust_file_provider(pool),
48 49 ]
49 50 return svn.core.svn_auth_open(providers, pool)
50 51
51
52 # # The SVN libraries don't like trailing slashes...
53 # return url.rstrip('/')
54
55
56 class SvnRaCallbacks(svn.ra.callbacks2_t):
57 """Remote access callbacks implementation for bzr-svn."""
58 def __init__(self, pool):
59 svn.ra.callbacks2_t.__init__(self)
60 self.auth_baton = _create_auth_baton(pool)
61 self.pool = pool
62
63 def open_tmp_file(self, pool):
64 return mktemp(prefix='tailor-svn')
65
66 52 class NotBranchError(SubversionException):
67 53 pass
68 54
69 55 class SvnRaTransport(object):
70 56 """
71 57 Open an ra connection to a Subversion repository.
72 58 """
73 59 def __init__(self, url="", ra=None):
74 60 self.pool = Pool()
75 61 self.svn_url = url
62 self.username = ''
63 self.password = ''
76 64
77 65 # Only Subversion 1.4 has reparent()
78 66 if ra is None or not hasattr(svn.ra, 'reparent'):
79 self.callbacks = SvnRaCallbacks(self.pool)
67 self.client = svn.client.create_context(self.pool)
68 ab = _create_auth_baton(self.pool)
69 if False:
70 svn.core.svn_auth_set_parameter(
71 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
72 svn.core.svn_auth_set_parameter(
73 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
74 self.client.auth_baton = ab
75 self.client.config = svn_config
80 76 try:
81 ver = svn.ra.version()
82 try: # Older SVN bindings
83 self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, None, svn_config, None)
84 except TypeError, e:
85 self.ra = svn.ra.open2(self.svn_url.encode('utf8'), self.callbacks, svn_config, None)
77 self.ra = svn.client.open_ra_session(
78 self.svn_url.encode('utf8'),
79 self.client, self.pool)
86 80 except SubversionException, (_, num):
87 if num == svn.core.SVN_ERR_RA_ILLEGAL_URL:
88 raise NotBranchError(url)
89 if num == svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED:
90 raise NotBranchError(url)
91 if num == svn.core.SVN_ERR_BAD_URL:
81 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
82 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
83 svn.core.SVN_ERR_BAD_URL):
92 84 raise NotBranchError(url)
93 85 raise
94
95 86 else:
96 87 self.ra = ra
97 88 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
98 89
99 90 class Reporter:
100 91 def __init__(self, (reporter, report_baton)):
101 92 self._reporter = reporter
102 93 self._baton = report_baton
103 94
104 95 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
105 96 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
106 97 path, revnum, start_empty, lock_token, pool)
107 98
108 99 def delete_path(self, path, pool=None):
109 100 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
110 101 path, pool)
111 102
112 103 def link_path(self, path, url, revision, start_empty, lock_token,
113 104 pool=None):
114 105 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
115 106 path, url, revision, start_empty, lock_token,
116 107 pool)
117 108
118 109 def finish_report(self, pool=None):
119 110 svn.ra.reporter2_invoke_finish_report(self._reporter,
120 111 self._baton, pool)
121 112
122 113 def abort_report(self, pool=None):
123 114 svn.ra.reporter2_invoke_abort_report(self._reporter,
124 115 self._baton, pool)
125 116
126 117 def do_update(self, revnum, path, *args, **kwargs):
127 118 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
128 119
129 120 def clone(self, offset=None):
130 121 """See Transport.clone()."""
131 122 if offset is None:
132 123 return self.__class__(self.base)
133 124
134 125 return SvnRaTransport(urlutils.join(self.base, offset), ra=self.ra)
@@ -1,168 +1,168 b''
1 1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 2 #
3 3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 4 # that removes files not known to mercurial
5 5 #
6 6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 7 # (http://www.red-bean.com/cvsutils/).
8 8 #
9 9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 10 # [extensions]
11 11 # hgext.purge =
12 12 #
13 13 # For help on the usage of "hg purge" use:
14 14 # hg help purge
15 15 #
16 16 # This program is free software; you can redistribute it and/or modify
17 17 # it under the terms of the GNU General Public License as published by
18 18 # the Free Software Foundation; either version 2 of the License, or
19 19 # (at your option) any later version.
20 20 #
21 21 # This program is distributed in the hope that it will be useful,
22 22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 24 # GNU General Public License for more details.
25 25 #
26 26 # You should have received a copy of the GNU General Public License
27 27 # along with this program; if not, write to the Free Software
28 28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29 29
30 30 from mercurial import hg, util
31 31 from mercurial.i18n import _
32 32 import os
33 33
34 def dopurge(ui, repo, dirs=None, act=True, ignored=False,
34 def dopurge(ui, repo, dirs=None, act=True, ignored=False,
35 35 abort_on_err=False, eol='\n',
36 36 force=False, include=None, exclude=None):
37 37 def error(msg):
38 38 if abort_on_err:
39 39 raise util.Abort(msg)
40 40 else:
41 41 ui.warn(_('warning: %s\n') % msg)
42 42
43 43 def remove(remove_func, name):
44 44 if act:
45 45 try:
46 46 remove_func(os.path.join(repo.root, name))
47 47 except OSError, e:
48 48 error(_('%s cannot be removed') % name)
49 49 else:
50 50 ui.write('%s%s' % (name, eol))
51 51
52 52 directories = []
53 53 files = []
54 54 missing = []
55 55 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
56 56 include, exclude)
57 57 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
58 58 ignored=ignored, directories=True):
59 59 if src == 'd':
60 60 directories.append(f)
61 61 elif src == 'm':
62 62 missing.append(f)
63 63 elif src == 'f' and f not in repo.dirstate:
64 64 files.append(f)
65 65
66 66 _check_missing(ui, repo, missing, force)
67 67
68 68 directories.sort()
69 69
70 70 for f in files:
71 71 if f not in repo.dirstate:
72 72 ui.note(_('Removing file %s\n') % f)
73 73 remove(os.remove, f)
74 74
75 75 for f in directories[::-1]:
76 76 if match(f) and not os.listdir(repo.wjoin(f)):
77 77 ui.note(_('Removing directory %s\n') % f)
78 78 remove(os.rmdir, f)
79 79
80 80 def _check_missing(ui, repo, missing, force=False):
81 81 """Abort if there is the chance of having problems with name-mangling fs
82 82
83 83 In a name mangling filesystem (e.g. a case insensitive one)
84 84 dirstate.walk() can yield filenames different from the ones
85 85 stored in the dirstate. This already confuses the status and
86 86 add commands, but with purge this may cause data loss.
87 87
88 88 To prevent this, _check_missing will abort if there are missing
89 89 files. The force option will let the user skip the check if he
90 90 knows it is safe.
91 91
92 92 Even with the force option this function will check if any of the
93 93 missing files is still available in the working dir: if so there
94 94 may be some problem with the underlying filesystem, so it
95 95 aborts unconditionally."""
96 96
97 97 found = [f for f in missing if util.lexists(repo.wjoin(f))]
98 98
99 99 if found:
100 100 if not ui.quiet:
101 101 ui.warn(_("The following tracked files weren't listed by the "
102 102 "filesystem, but could still be found:\n"))
103 103 for f in found:
104 104 ui.warn("%s\n" % f)
105 105 if util.checkfolding(repo.path):
106 106 ui.warn(_("This is probably due to a case-insensitive "
107 107 "filesystem\n"))
108 108 raise util.Abort(_("purging on name mangling filesystems is not "
109 109 "yet fully supported"))
110 110
111 111 if missing and not force:
112 112 raise util.Abort(_("there are missing files in the working dir and "
113 113 "purge still has problems with them due to name "
114 114 "mangling filesystems. "
115 115 "Use --force if you know what you are doing"))
116 116
117 117
118 118 def purge(ui, repo, *dirs, **opts):
119 119 '''removes files not tracked by mercurial
120 120
121 121 Delete files not known to mercurial, this is useful to test local and
122 122 uncommitted changes in the otherwise clean source tree.
123 123
124 124 This means that purge will delete:
125 125 - Unknown files: files marked with "?" by "hg status"
126 126 - Ignored files: files usually ignored by Mercurial because they match
127 127 a pattern in a ".hgignore" file
128 128 - Empty directories: in fact Mercurial ignores directories unless they
129 129 contain files under source control managment
130 130 But it will leave untouched:
131 131 - Unmodified tracked files
132 132 - Modified tracked files
133 133 - New files added to the repository (with "hg add")
134 134
135 135 If directories are given on the command line, only files in these
136 136 directories are considered.
137 137
138 138 Be careful with purge, you could irreversibly delete some files you
139 139 forgot to add to the repository. If you only want to print the list of
140 140 files that this program would delete use the --print option.
141 141 '''
142 142 act = not opts['print']
143 143 ignored = bool(opts['all'])
144 144 abort_on_err = bool(opts['abort_on_err'])
145 145 eol = opts['print0'] and '\0' or '\n'
146 146 if eol == '\0':
147 147 # --print0 implies --print
148 148 act = False
149 149 force = bool(opts['force'])
150 150 include = opts['include']
151 151 exclude = opts['exclude']
152 152 dopurge(ui, repo, dirs, act, ignored, abort_on_err,
153 153 eol, force, include, exclude)
154 154
155 155
156 156 cmdtable = {
157 157 'purge|clean':
158 158 (purge,
159 159 [('a', 'abort-on-err', None, _('abort if an error occurs')),
160 160 ('', 'all', None, _('purge ignored files too')),
161 161 ('f', 'force', None, _('purge even when missing files are detected')),
162 162 ('p', 'print', None, _('print the file names instead of deleting them')),
163 163 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
164 164 ' (implies -p)')),
165 165 ('I', 'include', [], _('include names matching the given patterns')),
166 166 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
167 167 _('hg purge [OPTION]... [DIR]...'))
168 168 }
@@ -1,219 +1,220 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of
6 6 # the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 from node import *
10 10 import cStringIO, os, stat, tarfile, time, util, zipfile
11 11 import zlib, gzip
12 12
13 13 def tidyprefix(dest, prefix, suffixes):
14 14 '''choose prefix to use for names in archive. make sure prefix is
15 15 safe for consumers.'''
16 16
17 17 if prefix:
18 18 prefix = prefix.replace('\\', '/')
19 19 else:
20 20 if not isinstance(dest, str):
21 21 raise ValueError('dest must be string if no prefix')
22 22 prefix = os.path.basename(dest)
23 23 lower = prefix.lower()
24 24 for sfx in suffixes:
25 25 if lower.endswith(sfx):
26 26 prefix = prefix[:-len(sfx)]
27 27 break
28 28 lpfx = os.path.normpath(util.localpath(prefix))
29 29 prefix = util.pconvert(lpfx)
30 30 if not prefix.endswith('/'):
31 31 prefix += '/'
32 32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 33 raise util.Abort(_('archive prefix contains illegal components'))
34 34 return prefix
35 35
36 36 class tarit:
37 37 '''write archive to tar file or stream. can write uncompressed,
38 38 or compress with gzip or bzip2.'''
39 39
40 40 class GzipFileWithTime(gzip.GzipFile):
41 41
42 42 def __init__(self, *args, **kw):
43 43 timestamp = None
44 44 if 'timestamp' in kw:
45 45 timestamp = kw.pop('timestamp')
46 46 if timestamp == None:
47 47 self.timestamp = time.time()
48 48 else:
49 49 self.timestamp = timestamp
50 50 gzip.GzipFile.__init__(self, *args, **kw)
51 51
52 52 def _write_gzip_header(self):
53 53 self.fileobj.write('\037\213') # magic header
54 54 self.fileobj.write('\010') # compression method
55 55 fname = self.filename[:-3]
56 56 flags = 0
57 57 if fname:
58 58 flags = gzip.FNAME
59 59 self.fileobj.write(chr(flags))
60 60 gzip.write32u(self.fileobj, long(self.timestamp))
61 61 self.fileobj.write('\002')
62 62 self.fileobj.write('\377')
63 63 if fname:
64 64 self.fileobj.write(fname + '\000')
65 65
66 66 def __init__(self, dest, prefix, mtime, kind=''):
67 67 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
68 68 '.tgz', '.tbz2'])
69 69 self.mtime = mtime
70 70
71 71 def taropen(name, mode, fileobj=None):
72 72 if kind == 'gz':
73 73 mode = mode[0]
74 74 if not fileobj:
75 75 fileobj = open(name, mode + 'b')
76 76 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
77 77 zlib.Z_BEST_COMPRESSION,
78 78 fileobj, timestamp=mtime)
79 79 return tarfile.TarFile.taropen(name, mode, gzfileobj)
80 80 else:
81 81 return tarfile.open(name, mode + kind, fileobj)
82 82
83 83 if isinstance(dest, str):
84 84 self.z = taropen(dest, mode='w:')
85 85 else:
86 86 # Python 2.5-2.5.1 have a regression that requires a name arg
87 87 self.z = taropen(name='', mode='w|', fileobj=dest)
88 88
89 89 def addfile(self, name, mode, islink, data):
90 90 i = tarfile.TarInfo(self.prefix + name)
91 91 i.mtime = self.mtime
92 92 i.size = len(data)
93 93 if islink:
94 94 i.type = tarfile.SYMTYPE
95 95 i.mode = 0777
96 96 i.linkname = data
97 97 data = None
98 98 else:
99 99 i.mode = mode
100 100 data = cStringIO.StringIO(data)
101 101 self.z.addfile(i, data)
102 102
103 103 def done(self):
104 104 self.z.close()
105 105
106 106 class tellable:
107 107 '''provide tell method for zipfile.ZipFile when writing to http
108 108 response file object.'''
109 109
110 110 def __init__(self, fp):
111 111 self.fp = fp
112 112 self.offset = 0
113 113
114 114 def __getattr__(self, key):
115 115 return getattr(self.fp, key)
116 116
117 117 def write(self, s):
118 118 self.fp.write(s)
119 119 self.offset += len(s)
120 120
121 121 def tell(self):
122 122 return self.offset
123 123
124 124 class zipit:
125 125 '''write archive to zip file or stream. can write uncompressed,
126 126 or compressed with deflate.'''
127 127
128 128 def __init__(self, dest, prefix, mtime, compress=True):
129 129 self.prefix = tidyprefix(dest, prefix, ('.zip',))
130 130 if not isinstance(dest, str):
131 131 try:
132 132 dest.tell()
133 133 except (AttributeError, IOError):
134 134 dest = tellable(dest)
135 135 self.z = zipfile.ZipFile(dest, 'w',
136 136 compress and zipfile.ZIP_DEFLATED or
137 137 zipfile.ZIP_STORED)
138 138 self.date_time = time.gmtime(mtime)[:6]
139 139
140 140 def addfile(self, name, mode, islink, data):
141 141 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
142 142 i.compress_type = self.z.compression
143 143 # unzip will not honor unix file modes unless file creator is
144 144 # set to unix (id 3).
145 145 i.create_system = 3
146 146 ftype = stat.S_IFREG
147 147 if islink:
148 148 mode = 0777
149 149 ftype = stat.S_IFLNK
150 150 i.external_attr = (mode | ftype) << 16L
151 151 self.z.writestr(i, data)
152 152
153 153 def done(self):
154 154 self.z.close()
155 155
156 156 class fileit:
157 157 '''write archive as files in directory.'''
158 158
159 159 def __init__(self, name, prefix, mtime):
160 160 if prefix:
161 161 raise util.Abort(_('cannot give prefix when archiving to files'))
162 162 self.basedir = name
163 163 self.opener = util.opener(self.basedir)
164 164
165 165 def addfile(self, name, mode, islink, data):
166 166 if islink:
167 167 self.opener.symlink(data, name)
168 168 return
169 169 f = self.opener(name, "w", atomictemp=True)
170 170 f.write(data)
171 171 f.rename()
172 172 destfile = os.path.join(self.basedir, name)
173 173 os.chmod(destfile, mode)
174 174
175 175 def done(self):
176 176 pass
177 177
178 178 archivers = {
179 179 'files': fileit,
180 180 'tar': tarit,
181 181 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
182 182 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
183 183 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
184 184 'zip': zipit,
185 185 }
186 186
187 187 def archive(repo, dest, node, kind, decode=True, matchfn=None,
188 188 prefix=None, mtime=None):
189 189 '''create archive of repo as it was at node.
190 190
191 191 dest can be name of directory, name of archive file, or file
192 192 object to write archive to.
193 193
194 194 kind is type of archive to create.
195 195
196 196 decode tells whether to put files through decode filters from
197 197 hgrc.
198 198
199 199 matchfn is function to filter names of files to write to archive.
200 200
201 201 prefix is name of path to put before every archive member.'''
202 202
203 def write(name, mode, islink, data):
203 def write(name, mode, islink, getdata):
204 204 if matchfn and not matchfn(name): return
205 data = getdata()
205 206 if decode:
206 207 data = repo.wwritedata(name, data)
207 208 archiver.addfile(name, mode, islink, data)
208 209
209 210 ctx = repo.changectx(node)
210 211 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
211 212 m = ctx.manifest()
212 213 items = m.items()
213 214 items.sort()
214 215 write('.hg_archival.txt', 0644, False,
215 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
216 lambda: 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
216 217 for filename, filenode in items:
217 218 write(filename, m.execf(filename) and 0755 or 0644, m.linkf(filename),
218 repo.file(filename).read(filenode))
219 lambda: repo.file(filename).read(filenode))
219 220 archiver.done()
@@ -1,199 +1,198 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import _
10 10 import os, time, util
11 11
12 12 def _string_escape(text):
13 13 """
14 14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 16 >>> s
17 17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 18 >>> res = _string_escape(s)
19 19 >>> s == _string_unescape(res)
20 20 True
21 21 """
22 22 # subset of the string_escape codec
23 23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 24 return text.replace('\0', '\\0')
25 25
26 26 def _string_unescape(text):
27 27 return text.decode('string_escape')
28 28
29 29 class appender:
30 30 '''the changelog index must be update last on disk, so we use this class
31 31 to delay writes to it'''
32 32 def __init__(self, fp, buf):
33 33 self.data = buf
34 34 self.fp = fp
35 35 self.offset = fp.tell()
36 36 self.size = util.fstat(fp).st_size
37 37
38 38 def end(self):
39 39 return self.size + len("".join(self.data))
40 40 def tell(self):
41 41 return self.offset
42 42 def flush(self):
43 43 pass
44 44 def close(self):
45 close(self.fp)
45 self.fp.close()
46 46
47 47 def seek(self, offset, whence=0):
48 48 '''virtual file offset spans real file and data'''
49 49 if whence == 0:
50 50 self.offset = offset
51 51 elif whence == 1:
52 52 self.offset += offset
53 53 elif whence == 2:
54 54 self.offset = self.end() + offset
55 55 if self.offset < self.size:
56 56 self.fp.seek(self.offset)
57 57
58 58 def read(self, count=-1):
59 59 '''only trick here is reads that span real file and data'''
60 60 ret = ""
61 old_offset = self.offset
62 61 if self.offset < self.size:
63 62 s = self.fp.read(count)
64 63 ret = s
65 64 self.offset += len(s)
66 65 if count > 0:
67 66 count -= len(s)
68 67 if count != 0:
69 68 doff = self.offset - self.size
70 69 self.data.insert(0, "".join(self.data))
71 70 del self.data[1:]
72 71 s = self.data[0][doff:doff+count]
73 72 self.offset += len(s)
74 73 ret += s
75 74 return ret
76 75
77 76 def write(self, s):
78 77 self.data.append(s)
79 78 self.offset += len(s)
80 79
81 80 class changelog(revlog):
82 81 def __init__(self, opener):
83 82 revlog.__init__(self, opener, "00changelog.i")
84 83
85 84 def delayupdate(self):
86 85 "delay visibility of index updates to other readers"
87 86 self._realopener = self.opener
88 87 self.opener = self._delayopener
89 88 self._delaycount = self.count()
90 89 self._delaybuf = []
91 90 self._delayname = None
92 91
93 92 def finalize(self, tr):
94 93 "finalize index updates"
95 94 self.opener = self._realopener
96 95 # move redirected index data back into place
97 96 if self._delayname:
98 97 util.rename(self._delayname + ".a", self._delayname)
99 98 elif self._delaybuf:
100 99 fp = self.opener(self.indexfile, 'a')
101 100 fp.write("".join(self._delaybuf))
102 101 fp.close()
103 102 del self._delaybuf
104 103 # split when we're done
105 104 self.checkinlinesize(tr)
106 105
107 106 def _delayopener(self, name, mode='r'):
108 107 fp = self._realopener(name, mode)
109 108 # only divert the index
110 109 if not name == self.indexfile:
111 110 return fp
112 111 # if we're doing an initial clone, divert to another file
113 112 if self._delaycount == 0:
114 113 self._delayname = fp.name
115 114 return self._realopener(name + ".a", mode)
116 115 # otherwise, divert to memory
117 116 return appender(fp, self._delaybuf)
118 117
119 118 def checkinlinesize(self, tr, fp=None):
120 119 if self.opener == self._delayopener:
121 120 return
122 121 return revlog.checkinlinesize(self, tr, fp)
123 122
124 123 def decode_extra(self, text):
125 124 extra = {}
126 125 for l in text.split('\0'):
127 126 if not l:
128 127 continue
129 128 k, v = _string_unescape(l).split(':', 1)
130 129 extra[k] = v
131 130 return extra
132 131
133 132 def encode_extra(self, d):
134 133 # keys must be sorted to produce a deterministic changelog entry
135 134 keys = d.keys()
136 135 keys.sort()
137 136 items = [_string_escape('%s:%s' % (k, d[k])) for k in keys]
138 137 return "\0".join(items)
139 138
140 139 def extract(self, text):
141 140 """
142 141 format used:
143 142 nodeid\n : manifest node in ascii
144 143 user\n : user, no \n or \r allowed
145 144 time tz extra\n : date (time is int or float, timezone is int)
146 145 : extra is metadatas, encoded and separated by '\0'
147 146 : older versions ignore it
148 147 files\n\n : files modified by the cset, no \n or \r allowed
149 148 (.*) : comment (free text, ideally utf-8)
150 149
151 150 changelog v0 doesn't use extra
152 151 """
153 152 if not text:
154 153 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
155 154 last = text.index("\n\n")
156 155 desc = util.tolocal(text[last + 2:])
157 156 l = text[:last].split('\n')
158 157 manifest = bin(l[0])
159 158 user = util.tolocal(l[1])
160 159
161 160 extra_data = l[2].split(' ', 2)
162 161 if len(extra_data) != 3:
163 162 time = float(extra_data.pop(0))
164 163 try:
165 164 # various tools did silly things with the time zone field.
166 165 timezone = int(extra_data[0])
167 166 except:
168 167 timezone = 0
169 168 extra = {}
170 169 else:
171 170 time, timezone, extra = extra_data
172 171 time, timezone = float(time), int(timezone)
173 172 extra = self.decode_extra(extra)
174 173 if not extra.get('branch'):
175 174 extra['branch'] = 'default'
176 175 files = l[3:]
177 176 return (manifest, user, (time, timezone), files, desc, extra)
178 177
179 178 def read(self, node):
180 179 return self.extract(self.revision(node))
181 180
182 181 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
183 182 user=None, date=None, extra={}):
184 183
185 184 user, desc = util.fromlocal(user), util.fromlocal(desc)
186 185
187 186 if date:
188 187 parseddate = "%d %d" % util.parsedate(date)
189 188 else:
190 189 parseddate = "%d %d" % util.makedate()
191 190 if extra and extra.get("branch") in ("default", ""):
192 191 del extra["branch"]
193 192 if extra:
194 193 extra = self.encode_extra(extra)
195 194 parseddate = "%s %s" % (parseddate, extra)
196 195 list.sort()
197 196 l = [hex(manifest), user, parseddate] + list + ["", desc]
198 197 text = "\n".join(l)
199 198 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,1277 +1,1277 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 12 import fancyopts, revlog, version, extensions, hook
13 13
14 14 revrangesep = ':'
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18 class AmbiguousCommand(Exception):
19 19 """Exception raised if command shortcut matches more than one command."""
20 20 class ParseError(Exception):
21 21 """Exception raised on errors in parsing the command line."""
22 22
23 23 def runcatch(ui, args, argv0=None):
24 24 def catchterm(*args):
25 25 raise util.SignalInterrupt
26 26
27 27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 28 num = getattr(signal, name, None)
29 29 if num: signal.signal(num, catchterm)
30 30
31 31 try:
32 32 try:
33 33 # enter the debugger before command execution
34 34 if '--debugger' in args:
35 35 pdb.set_trace()
36 36 try:
37 37 return dispatch(ui, args, argv0=argv0)
38 38 finally:
39 39 ui.flush()
40 40 except:
41 41 # enter the debugger when we hit an exception
42 42 if '--debugger' in args:
43 43 pdb.post_mortem(sys.exc_info()[2])
44 44 ui.print_exc()
45 45 raise
46 46
47 47 except ParseError, inst:
48 48 if inst.args[0]:
49 49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 50 commands.help_(ui, inst.args[0])
51 51 else:
52 52 ui.warn(_("hg: %s\n") % inst.args[1])
53 53 commands.help_(ui, 'shortlist')
54 54 except AmbiguousCommand, inst:
55 55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 56 (inst.args[0], " ".join(inst.args[1])))
57 57 except UnknownCommand, inst:
58 58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 59 commands.help_(ui, 'shortlist')
60 60 except hg.RepoError, inst:
61 61 ui.warn(_("abort: %s!\n") % inst)
62 62 except lock.LockHeld, inst:
63 63 if inst.errno == errno.ETIMEDOUT:
64 64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 65 else:
66 66 reason = _('lock held by %s') % inst.locker
67 67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 68 except lock.LockUnavailable, inst:
69 69 ui.warn(_("abort: could not lock %s: %s\n") %
70 70 (inst.desc or inst.filename, inst.strerror))
71 71 except revlog.RevlogError, inst:
72 72 ui.warn(_("abort: %s!\n") % inst)
73 73 except util.SignalInterrupt:
74 74 ui.warn(_("killed!\n"))
75 75 except KeyboardInterrupt:
76 76 try:
77 77 ui.warn(_("interrupted!\n"))
78 78 except IOError, inst:
79 79 if inst.errno == errno.EPIPE:
80 80 if ui.debugflag:
81 81 ui.warn(_("\nbroken pipe\n"))
82 82 else:
83 83 raise
84 84 except socket.error, inst:
85 85 ui.warn(_("abort: %s\n") % inst[1])
86 86 except IOError, inst:
87 87 if hasattr(inst, "code"):
88 88 ui.warn(_("abort: %s\n") % inst)
89 89 elif hasattr(inst, "reason"):
90 90 try: # usually it is in the form (errno, strerror)
91 91 reason = inst.reason.args[1]
92 92 except: # it might be anything, for example a string
93 93 reason = inst.reason
94 94 ui.warn(_("abort: error: %s\n") % reason)
95 95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 96 if ui.debugflag:
97 97 ui.warn(_("broken pipe\n"))
98 98 elif getattr(inst, "strerror", None):
99 99 if getattr(inst, "filename", None):
100 100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 101 else:
102 102 ui.warn(_("abort: %s\n") % inst.strerror)
103 103 else:
104 104 raise
105 105 except OSError, inst:
106 106 if getattr(inst, "filename", None):
107 107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 108 else:
109 109 ui.warn(_("abort: %s\n") % inst.strerror)
110 110 except util.UnexpectedOutput, inst:
111 111 ui.warn(_("abort: %s") % inst[0])
112 112 if not isinstance(inst[1], basestring):
113 113 ui.warn(" %r\n" % (inst[1],))
114 114 elif not inst[1]:
115 115 ui.warn(_(" empty string\n"))
116 116 else:
117 117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 118 except ImportError, inst:
119 119 m = str(inst).split()[-1]
120 120 ui.warn(_("abort: could not import module %s!\n" % m))
121 121 if m in "mpatch bdiff".split():
122 122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 123 elif m in "zlib".split():
124 124 ui.warn(_("(is your Python install correct?)\n"))
125 125
126 126 except util.Abort, inst:
127 127 ui.warn(_("abort: %s\n") % inst)
128 128 except SystemExit, inst:
129 129 # Commands shouldn't sys.exit directly, but give a return code.
130 130 # Just in case catch this and and pass exit code to caller.
131 131 return inst.code
132 132 except:
133 133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 134 ui.warn(_("** report bug details to "
135 135 "http://www.selenic.com/mercurial/bts\n"))
136 136 ui.warn(_("** or mercurial@selenic.com\n"))
137 137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 138 % version.get_version())
139 139 raise
140 140
141 141 return -1
142 142
143 143 def findpossible(ui, cmd):
144 144 """
145 145 Return cmd -> (aliases, command table entry)
146 146 for each matching command.
147 147 Return debug commands (or their aliases) only if no normal command matches.
148 148 """
149 149 choice = {}
150 150 debugchoice = {}
151 151 for e in commands.table.keys():
152 152 aliases = e.lstrip("^").split("|")
153 153 found = None
154 154 if cmd in aliases:
155 155 found = cmd
156 156 elif not ui.config("ui", "strict"):
157 157 for a in aliases:
158 158 if a.startswith(cmd):
159 159 found = a
160 160 break
161 161 if found is not None:
162 162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 163 debugchoice[found] = (aliases, commands.table[e])
164 164 else:
165 165 choice[found] = (aliases, commands.table[e])
166 166
167 167 if not choice and debugchoice:
168 168 choice = debugchoice
169 169
170 170 return choice
171 171
172 172 def findcmd(ui, cmd):
173 173 """Return (aliases, command table entry) for command string."""
174 174 choice = findpossible(ui, cmd)
175 175
176 176 if choice.has_key(cmd):
177 177 return choice[cmd]
178 178
179 179 if len(choice) > 1:
180 180 clist = choice.keys()
181 181 clist.sort()
182 182 raise AmbiguousCommand(cmd, clist)
183 183
184 184 if choice:
185 185 return choice.values()[0]
186 186
187 187 raise UnknownCommand(cmd)
188 188
189 189 def findrepo():
190 190 p = os.getcwd()
191 191 while not os.path.isdir(os.path.join(p, ".hg")):
192 192 oldp, p = p, os.path.dirname(p)
193 193 if p == oldp:
194 194 return None
195 195
196 196 return p
197 197
198 198 def parse(ui, args):
199 199 options = {}
200 200 cmdoptions = {}
201 201
202 202 try:
203 203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 204 except fancyopts.getopt.GetoptError, inst:
205 205 raise ParseError(None, inst)
206 206
207 207 if args:
208 208 cmd, args = args[0], args[1:]
209 209 aliases, i = findcmd(ui, cmd)
210 210 cmd = aliases[0]
211 211 defaults = ui.config("defaults", cmd)
212 212 if defaults:
213 213 args = shlex.split(defaults) + args
214 214 c = list(i[1])
215 215 else:
216 216 cmd = None
217 217 c = []
218 218
219 219 # combine global options into local
220 220 for o in commands.globalopts:
221 221 c.append((o[0], o[1], options[o[1]], o[3]))
222 222
223 223 try:
224 224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 225 except fancyopts.getopt.GetoptError, inst:
226 226 raise ParseError(cmd, inst)
227 227
228 228 # separate global options back out
229 229 for o in commands.globalopts:
230 230 n = o[1]
231 231 options[n] = cmdoptions[n]
232 232 del cmdoptions[n]
233 233
234 234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235 235
236 236 def parseconfig(config):
237 237 """parse the --config options from the command line"""
238 238 parsed = []
239 239 for cfg in config:
240 240 try:
241 241 name, value = cfg.split('=', 1)
242 242 section, name = name.split('.', 1)
243 243 if not section or not name:
244 244 raise IndexError
245 245 parsed.append((section, name, value))
246 246 except (IndexError, ValueError):
247 247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 248 return parsed
249 249
250 250 def earlygetopt(aliases, args):
251 251 """Return list of values for an option (or aliases).
252 252
253 253 The values are listed in the order they appear in args.
254 254 The options and values are removed from args.
255 255 """
256 256 try:
257 257 argcount = args.index("--")
258 258 except ValueError:
259 259 argcount = len(args)
260 260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 261 values = []
262 262 pos = 0
263 263 while pos < argcount:
264 264 if args[pos] in aliases:
265 265 if pos + 1 >= argcount:
266 266 # ignore and let getopt report an error if there is no value
267 267 break
268 268 del args[pos]
269 269 values.append(args.pop(pos))
270 270 argcount -= 2
271 271 elif args[pos][:2] in shortopts:
272 272 # short option can have no following space, e.g. hg log -Rfoo
273 273 values.append(args.pop(pos)[2:])
274 274 argcount -= 1
275 275 else:
276 276 pos += 1
277 277 return values
278 278
279 279 def dispatch(ui, args, argv0=None):
280 280 # remember how to call 'hg' before changing the working dir
281 281 util.set_hgexecutable(argv0)
282 282
283 283 # read --config before doing anything else
284 284 # (e.g. to change trust settings for reading .hg/hgrc)
285 285 config = earlygetopt(['--config'], args)
286 286 if config:
287 287 ui.updateopts(config=parseconfig(config))
288 288
289 289 # check for cwd
290 290 cwd = earlygetopt(['--cwd'], args)
291 291 if cwd:
292 292 os.chdir(cwd[-1])
293 293
294 294 # read the local repository .hgrc into a local ui object
295 295 path = findrepo() or ""
296 296 if not path:
297 297 lui = ui
298 298 if path:
299 299 try:
300 300 lui = commands.ui.ui(parentui=ui)
301 301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 302 except IOError:
303 303 pass
304 304
305 305 # now we can expand paths, even ones in .hg/hgrc
306 306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 307 if rpath:
308 308 path = lui.expandpath(rpath[-1])
309 309 lui = commands.ui.ui(parentui=ui)
310 310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311 311
312 312 extensions.loadall(lui)
313 313 # check for fallback encoding
314 314 fallback = lui.config('ui', 'fallbackencoding')
315 315 if fallback:
316 316 util._fallbackencoding = fallback
317 317
318 318 fullargs = args
319 cmd, func, args, options, cmdoptions = parse(ui, args)
319 cmd, func, args, options, cmdoptions = parse(lui, args)
320 320
321 321 if options["config"]:
322 322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 323 if options["cwd"]:
324 324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 325 if options["repository"]:
326 326 raise util.Abort(_(
327 327 "Option -R has to be separated from other options (i.e. not -qR) "
328 328 "and --repository may only be abbreviated as --repo!"))
329 329
330 330 if options["encoding"]:
331 331 util._encoding = options["encoding"]
332 332 if options["encodingmode"]:
333 333 util._encodingmode = options["encodingmode"]
334 334 if options["time"]:
335 335 def get_times():
336 336 t = os.times()
337 337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 338 t = (t[0], t[1], t[2], t[3], time.clock())
339 339 return t
340 340 s = get_times()
341 341 def print_time():
342 342 t = get_times()
343 343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 345 atexit.register(print_time)
346 346
347 347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 348 not options["noninteractive"], options["traceback"])
349 349
350 350 if options['help']:
351 351 return commands.help_(ui, cmd, options['version'])
352 352 elif options['version']:
353 353 return commands.version_(ui)
354 354 elif not cmd:
355 355 return commands.help_(ui, 'shortlist')
356 356
357 357 repo = None
358 358 if cmd not in commands.norepo.split():
359 359 try:
360 360 repo = hg.repository(ui, path=path)
361 361 ui = repo.ui
362 362 if not repo.local():
363 363 raise util.Abort(_("repository '%s' is not local") % path)
364 364 except hg.RepoError:
365 365 if cmd not in commands.optionalrepo.split():
366 366 if not path:
367 367 raise hg.RepoError(_("There is no Mercurial repository here"
368 368 " (.hg not found)"))
369 369 raise
370 370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 371 else:
372 372 d = lambda: func(ui, *args, **cmdoptions)
373 373
374 374 # run pre-hook, and abort if it fails
375 375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 376 if ret:
377 377 return ret
378 378 ret = runcommand(ui, options, cmd, d)
379 379 # run post-hook, passing command result
380 380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 381 result = ret)
382 382 return ret
383 383
384 384 def runcommand(ui, options, cmd, cmdfunc):
385 385 def checkargs():
386 386 try:
387 387 return cmdfunc()
388 388 except TypeError, inst:
389 389 # was this an argument error?
390 390 tb = traceback.extract_tb(sys.exc_info()[2])
391 391 if len(tb) != 2: # no
392 392 raise
393 393 raise ParseError(cmd, _("invalid arguments"))
394 394
395 395 if options['profile']:
396 396 import hotshot, hotshot.stats
397 397 prof = hotshot.Profile("hg.prof")
398 398 try:
399 399 try:
400 400 return prof.runcall(checkargs)
401 401 except:
402 402 try:
403 403 ui.warn(_('exception raised - generating '
404 404 'profile anyway\n'))
405 405 except:
406 406 pass
407 407 raise
408 408 finally:
409 409 prof.close()
410 410 stats = hotshot.stats.load("hg.prof")
411 411 stats.strip_dirs()
412 412 stats.sort_stats('time', 'calls')
413 413 stats.print_stats(40)
414 414 elif options['lsprof']:
415 415 try:
416 416 from mercurial import lsprof
417 417 except ImportError:
418 418 raise util.Abort(_(
419 419 'lsprof not available - install from '
420 420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 421 p = lsprof.Profiler()
422 422 p.enable(subcalls=True)
423 423 try:
424 424 return checkargs()
425 425 finally:
426 426 p.disable()
427 427 stats = lsprof.Stats(p.getstats())
428 428 stats.sort()
429 429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 430 else:
431 431 return checkargs()
432 432
433 433 def bail_if_changed(repo):
434 434 modified, added, removed, deleted = repo.status()[:4]
435 435 if modified or added or removed or deleted:
436 436 raise util.Abort(_("outstanding uncommitted changes"))
437 437
438 438 def logmessage(opts):
439 439 """ get the log message according to -m and -l option """
440 440 message = opts['message']
441 441 logfile = opts['logfile']
442 442
443 443 if message and logfile:
444 444 raise util.Abort(_('options --message and --logfile are mutually '
445 445 'exclusive'))
446 446 if not message and logfile:
447 447 try:
448 448 if logfile == '-':
449 449 message = sys.stdin.read()
450 450 else:
451 451 message = open(logfile).read()
452 452 except IOError, inst:
453 453 raise util.Abort(_("can't read commit message '%s': %s") %
454 454 (logfile, inst.strerror))
455 455 return message
456 456
457 457 def setremoteconfig(ui, opts):
458 458 "copy remote options to ui tree"
459 459 if opts.get('ssh'):
460 460 ui.setconfig("ui", "ssh", opts['ssh'])
461 461 if opts.get('remotecmd'):
462 462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463 463
464 464 def parseurl(url, revs):
465 465 '''parse url#branch, returning url, branch + revs'''
466 466
467 467 if '#' not in url:
468 468 return url, (revs or None)
469 469
470 470 url, rev = url.split('#', 1)
471 471 return url, revs + [rev]
472 472
473 473 def revpair(repo, revs):
474 474 '''return pair of nodes, given list of revisions. second item can
475 475 be None, meaning use working dir.'''
476 476
477 477 def revfix(repo, val, defval):
478 478 if not val and val != 0 and defval is not None:
479 479 val = defval
480 480 return repo.lookup(val)
481 481
482 482 if not revs:
483 483 return repo.dirstate.parents()[0], None
484 484 end = None
485 485 if len(revs) == 1:
486 486 if revrangesep in revs[0]:
487 487 start, end = revs[0].split(revrangesep, 1)
488 488 start = revfix(repo, start, 0)
489 489 end = revfix(repo, end, repo.changelog.count() - 1)
490 490 else:
491 491 start = revfix(repo, revs[0], None)
492 492 elif len(revs) == 2:
493 493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 494 raise util.Abort(_('too many revisions specified'))
495 495 start = revfix(repo, revs[0], None)
496 496 end = revfix(repo, revs[1], None)
497 497 else:
498 498 raise util.Abort(_('too many revisions specified'))
499 499 return start, end
500 500
501 501 def revrange(repo, revs):
502 502 """Yield revision as strings from a list of revision specifications."""
503 503
504 504 def revfix(repo, val, defval):
505 505 if not val and val != 0 and defval is not None:
506 506 return defval
507 507 return repo.changelog.rev(repo.lookup(val))
508 508
509 509 seen, l = {}, []
510 510 for spec in revs:
511 511 if revrangesep in spec:
512 512 start, end = spec.split(revrangesep, 1)
513 513 start = revfix(repo, start, 0)
514 514 end = revfix(repo, end, repo.changelog.count() - 1)
515 515 step = start > end and -1 or 1
516 516 for rev in xrange(start, end+step, step):
517 517 if rev in seen:
518 518 continue
519 519 seen[rev] = 1
520 520 l.append(rev)
521 521 else:
522 522 rev = revfix(repo, spec, None)
523 523 if rev in seen:
524 524 continue
525 525 seen[rev] = 1
526 526 l.append(rev)
527 527
528 528 return l
529 529
530 530 def make_filename(repo, pat, node,
531 531 total=None, seqno=None, revwidth=None, pathname=None):
532 532 node_expander = {
533 533 'H': lambda: hex(node),
534 534 'R': lambda: str(repo.changelog.rev(node)),
535 535 'h': lambda: short(node),
536 536 }
537 537 expander = {
538 538 '%': lambda: '%',
539 539 'b': lambda: os.path.basename(repo.root),
540 540 }
541 541
542 542 try:
543 543 if node:
544 544 expander.update(node_expander)
545 545 if node:
546 546 expander['r'] = (lambda:
547 547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 548 if total is not None:
549 549 expander['N'] = lambda: str(total)
550 550 if seqno is not None:
551 551 expander['n'] = lambda: str(seqno)
552 552 if total is not None and seqno is not None:
553 553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 554 if pathname is not None:
555 555 expander['s'] = lambda: os.path.basename(pathname)
556 556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 557 expander['p'] = lambda: pathname
558 558
559 559 newname = []
560 560 patlen = len(pat)
561 561 i = 0
562 562 while i < patlen:
563 563 c = pat[i]
564 564 if c == '%':
565 565 i += 1
566 566 c = pat[i]
567 567 c = expander[c]()
568 568 newname.append(c)
569 569 i += 1
570 570 return ''.join(newname)
571 571 except KeyError, inst:
572 572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 573 inst.args[0])
574 574
575 575 def make_file(repo, pat, node=None,
576 576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 577 if not pat or pat == '-':
578 578 return 'w' in mode and sys.stdout or sys.stdin
579 579 if hasattr(pat, 'write') and 'w' in mode:
580 580 return pat
581 581 if hasattr(pat, 'read') and 'r' in mode:
582 582 return pat
583 583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 584 pathname),
585 585 mode)
586 586
587 587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 588 cwd = repo.getcwd()
589 589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 590 opts.get('exclude'), globbed=globbed,
591 591 default=default)
592 592
593 593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 594 default=None):
595 595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 596 default=default)
597 597 exact = dict.fromkeys(files)
598 598 cwd = repo.getcwd()
599 599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 600 badmatch=badmatch):
601 601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602 602
603 603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 604 '''find renamed files -- yields (before, after, score) tuples'''
605 605 if added is None or removed is None:
606 606 added, removed = repo.status()[1:3]
607 607 ctx = repo.changectx()
608 608 for a in added:
609 609 aa = repo.wread(a)
610 610 bestname, bestscore = None, threshold
611 611 for r in removed:
612 612 rr = ctx.filectx(r).data()
613 613
614 614 # bdiff.blocks() returns blocks of matching lines
615 615 # count the number of bytes in each
616 616 equal = 0
617 617 alines = mdiff.splitnewlines(aa)
618 618 matches = bdiff.blocks(aa, rr)
619 619 for x1,x2,y1,y2 in matches:
620 620 for line in alines[x1:x2]:
621 621 equal += len(line)
622 622
623 623 lengths = len(aa) + len(rr)
624 624 if lengths:
625 625 myscore = equal*2.0 / lengths
626 626 if myscore >= bestscore:
627 627 bestname, bestscore = r, myscore
628 628 if bestname:
629 629 yield bestname, a, bestscore
630 630
631 631 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
632 632 if dry_run is None:
633 633 dry_run = opts.get('dry_run')
634 634 if similarity is None:
635 635 similarity = float(opts.get('similarity') or 0)
636 636 add, remove = [], []
637 637 mapping = {}
638 638 for src, abs, rel, exact in walk(repo, pats, opts):
639 639 target = repo.wjoin(abs)
640 640 if src == 'f' and abs not in repo.dirstate:
641 641 add.append(abs)
642 642 mapping[abs] = rel, exact
643 643 if repo.ui.verbose or not exact:
644 644 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
645 645 if repo.dirstate[abs] != 'r' and not util.lexists(target):
646 646 remove.append(abs)
647 647 mapping[abs] = rel, exact
648 648 if repo.ui.verbose or not exact:
649 649 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
650 650 if not dry_run:
651 651 repo.add(add)
652 652 repo.remove(remove)
653 653 if similarity > 0:
654 654 for old, new, score in findrenames(repo, add, remove, similarity):
655 655 oldrel, oldexact = mapping[old]
656 656 newrel, newexact = mapping[new]
657 657 if repo.ui.verbose or not oldexact or not newexact:
658 658 repo.ui.status(_('recording removal of %s as rename to %s '
659 659 '(%d%% similar)\n') %
660 660 (oldrel, newrel, score * 100))
661 661 if not dry_run:
662 662 repo.copy(old, new)
663 663
664 664 def service(opts, parentfn=None, initfn=None, runfn=None):
665 665 '''Run a command as a service.'''
666 666
667 667 if opts['daemon'] and not opts['daemon_pipefds']:
668 668 rfd, wfd = os.pipe()
669 669 args = sys.argv[:]
670 670 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
671 671 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
672 672 args[0], args)
673 673 os.close(wfd)
674 674 os.read(rfd, 1)
675 675 if parentfn:
676 676 return parentfn(pid)
677 677 else:
678 678 os._exit(0)
679 679
680 680 if initfn:
681 681 initfn()
682 682
683 683 if opts['pid_file']:
684 684 fp = open(opts['pid_file'], 'w')
685 685 fp.write(str(os.getpid()) + '\n')
686 686 fp.close()
687 687
688 688 if opts['daemon_pipefds']:
689 689 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
690 690 os.close(rfd)
691 691 try:
692 692 os.setsid()
693 693 except AttributeError:
694 694 pass
695 695 os.write(wfd, 'y')
696 696 os.close(wfd)
697 697 sys.stdout.flush()
698 698 sys.stderr.flush()
699 699 fd = os.open(util.nulldev, os.O_RDWR)
700 700 if fd != 0: os.dup2(fd, 0)
701 701 if fd != 1: os.dup2(fd, 1)
702 702 if fd != 2: os.dup2(fd, 2)
703 703 if fd not in (0, 1, 2): os.close(fd)
704 704
705 705 if runfn:
706 706 return runfn()
707 707
708 708 class changeset_printer(object):
709 709 '''show changeset information when templating not requested.'''
710 710
711 711 def __init__(self, ui, repo, patch, buffered):
712 712 self.ui = ui
713 713 self.repo = repo
714 714 self.buffered = buffered
715 715 self.patch = patch
716 716 self.header = {}
717 717 self.hunk = {}
718 718 self.lastheader = None
719 719
720 720 def flush(self, rev):
721 721 if rev in self.header:
722 722 h = self.header[rev]
723 723 if h != self.lastheader:
724 724 self.lastheader = h
725 725 self.ui.write(h)
726 726 del self.header[rev]
727 727 if rev in self.hunk:
728 728 self.ui.write(self.hunk[rev])
729 729 del self.hunk[rev]
730 730 return 1
731 731 return 0
732 732
733 733 def show(self, rev=0, changenode=None, copies=(), **props):
734 734 if self.buffered:
735 735 self.ui.pushbuffer()
736 736 self._show(rev, changenode, copies, props)
737 737 self.hunk[rev] = self.ui.popbuffer()
738 738 else:
739 739 self._show(rev, changenode, copies, props)
740 740
741 741 def _show(self, rev, changenode, copies, props):
742 742 '''show a single changeset or file revision'''
743 743 log = self.repo.changelog
744 744 if changenode is None:
745 745 changenode = log.node(rev)
746 746 elif not rev:
747 747 rev = log.rev(changenode)
748 748
749 749 if self.ui.quiet:
750 750 self.ui.write("%d:%s\n" % (rev, short(changenode)))
751 751 return
752 752
753 753 changes = log.read(changenode)
754 754 date = util.datestr(changes[2])
755 755 extra = changes[5]
756 756 branch = extra.get("branch")
757 757
758 758 hexfunc = self.ui.debugflag and hex or short
759 759
760 760 parents = [(p, hexfunc(log.node(p)))
761 761 for p in self._meaningful_parentrevs(log, rev)]
762 762
763 763 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
764 764
765 765 # don't show the default branch name
766 766 if branch != 'default':
767 767 branch = util.tolocal(branch)
768 768 self.ui.write(_("branch: %s\n") % branch)
769 769 for tag in self.repo.nodetags(changenode):
770 770 self.ui.write(_("tag: %s\n") % tag)
771 771 for parent in parents:
772 772 self.ui.write(_("parent: %d:%s\n") % parent)
773 773
774 774 if self.ui.debugflag:
775 775 self.ui.write(_("manifest: %d:%s\n") %
776 776 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
777 777 self.ui.write(_("user: %s\n") % changes[1])
778 778 self.ui.write(_("date: %s\n") % date)
779 779
780 780 if self.ui.debugflag:
781 781 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
782 782 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
783 783 files):
784 784 if value:
785 785 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
786 786 elif changes[3] and self.ui.verbose:
787 787 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
788 788 if copies and self.ui.verbose:
789 789 copies = ['%s (%s)' % c for c in copies]
790 790 self.ui.write(_("copies: %s\n") % ' '.join(copies))
791 791
792 792 if extra and self.ui.debugflag:
793 793 extraitems = extra.items()
794 794 extraitems.sort()
795 795 for key, value in extraitems:
796 796 self.ui.write(_("extra: %s=%s\n")
797 797 % (key, value.encode('string_escape')))
798 798
799 799 description = changes[4].strip()
800 800 if description:
801 801 if self.ui.verbose:
802 802 self.ui.write(_("description:\n"))
803 803 self.ui.write(description)
804 804 self.ui.write("\n\n")
805 805 else:
806 806 self.ui.write(_("summary: %s\n") %
807 807 description.splitlines()[0])
808 808 self.ui.write("\n")
809 809
810 810 self.showpatch(changenode)
811 811
812 812 def showpatch(self, node):
813 813 if self.patch:
814 814 prev = self.repo.changelog.parents(node)[0]
815 815 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
816 816 opts=patch.diffopts(self.ui))
817 817 self.ui.write("\n")
818 818
819 819 def _meaningful_parentrevs(self, log, rev):
820 820 """Return list of meaningful (or all if debug) parentrevs for rev.
821 821
822 822 For merges (two non-nullrev revisions) both parents are meaningful.
823 823 Otherwise the first parent revision is considered meaningful if it
824 824 is not the preceding revision.
825 825 """
826 826 parents = log.parentrevs(rev)
827 827 if not self.ui.debugflag and parents[1] == nullrev:
828 828 if parents[0] >= rev - 1:
829 829 parents = []
830 830 else:
831 831 parents = [parents[0]]
832 832 return parents
833 833
834 834
835 835 class changeset_templater(changeset_printer):
836 836 '''format changeset information.'''
837 837
838 838 def __init__(self, ui, repo, patch, mapfile, buffered):
839 839 changeset_printer.__init__(self, ui, repo, patch, buffered)
840 840 filters = templater.common_filters.copy()
841 841 filters['formatnode'] = (ui.debugflag and (lambda x: x)
842 842 or (lambda x: x[:12]))
843 843 self.t = templater.templater(mapfile, filters,
844 844 cache={
845 845 'parent': '{rev}:{node|formatnode} ',
846 846 'manifest': '{rev}:{node|formatnode}',
847 847 'filecopy': '{name} ({source})'})
848 848
849 849 def use_template(self, t):
850 850 '''set template string to use'''
851 851 self.t.cache['changeset'] = t
852 852
853 853 def _show(self, rev, changenode, copies, props):
854 854 '''show a single changeset or file revision'''
855 855 log = self.repo.changelog
856 856 if changenode is None:
857 857 changenode = log.node(rev)
858 858 elif not rev:
859 859 rev = log.rev(changenode)
860 860
861 861 changes = log.read(changenode)
862 862
863 863 def showlist(name, values, plural=None, **args):
864 864 '''expand set of values.
865 865 name is name of key in template map.
866 866 values is list of strings or dicts.
867 867 plural is plural of name, if not simply name + 's'.
868 868
869 869 expansion works like this, given name 'foo'.
870 870
871 871 if values is empty, expand 'no_foos'.
872 872
873 873 if 'foo' not in template map, return values as a string,
874 874 joined by space.
875 875
876 876 expand 'start_foos'.
877 877
878 878 for each value, expand 'foo'. if 'last_foo' in template
879 879 map, expand it instead of 'foo' for last key.
880 880
881 881 expand 'end_foos'.
882 882 '''
883 883 if plural: names = plural
884 884 else: names = name + 's'
885 885 if not values:
886 886 noname = 'no_' + names
887 887 if noname in self.t:
888 888 yield self.t(noname, **args)
889 889 return
890 890 if name not in self.t:
891 891 if isinstance(values[0], str):
892 892 yield ' '.join(values)
893 893 else:
894 894 for v in values:
895 895 yield dict(v, **args)
896 896 return
897 897 startname = 'start_' + names
898 898 if startname in self.t:
899 899 yield self.t(startname, **args)
900 900 vargs = args.copy()
901 901 def one(v, tag=name):
902 902 try:
903 903 vargs.update(v)
904 904 except (AttributeError, ValueError):
905 905 try:
906 906 for a, b in v:
907 907 vargs[a] = b
908 908 except ValueError:
909 909 vargs[name] = v
910 910 return self.t(tag, **vargs)
911 911 lastname = 'last_' + name
912 912 if lastname in self.t:
913 913 last = values.pop()
914 914 else:
915 915 last = None
916 916 for v in values:
917 917 yield one(v)
918 918 if last is not None:
919 919 yield one(last, tag=lastname)
920 920 endname = 'end_' + names
921 921 if endname in self.t:
922 922 yield self.t(endname, **args)
923 923
924 924 def showbranches(**args):
925 925 branch = changes[5].get("branch")
926 926 if branch != 'default':
927 927 branch = util.tolocal(branch)
928 928 return showlist('branch', [branch], plural='branches', **args)
929 929
930 930 def showparents(**args):
931 931 parents = [[('rev', p), ('node', hex(log.node(p)))]
932 932 for p in self._meaningful_parentrevs(log, rev)]
933 933 return showlist('parent', parents, **args)
934 934
935 935 def showtags(**args):
936 936 return showlist('tag', self.repo.nodetags(changenode), **args)
937 937
938 938 def showextras(**args):
939 939 extras = changes[5].items()
940 940 extras.sort()
941 941 for key, value in extras:
942 942 args = args.copy()
943 943 args.update(dict(key=key, value=value))
944 944 yield self.t('extra', **args)
945 945
946 946 def showcopies(**args):
947 947 c = [{'name': x[0], 'source': x[1]} for x in copies]
948 948 return showlist('file_copy', c, plural='file_copies', **args)
949 949
950 950 if self.ui.debugflag:
951 951 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
952 952 def showfiles(**args):
953 953 return showlist('file', files[0], **args)
954 954 def showadds(**args):
955 955 return showlist('file_add', files[1], **args)
956 956 def showdels(**args):
957 957 return showlist('file_del', files[2], **args)
958 958 def showmanifest(**args):
959 959 args = args.copy()
960 960 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
961 961 node=hex(changes[0])))
962 962 return self.t('manifest', **args)
963 963 else:
964 964 def showfiles(**args):
965 965 return showlist('file', changes[3], **args)
966 966 showadds = ''
967 967 showdels = ''
968 968 showmanifest = ''
969 969
970 970 defprops = {
971 971 'author': changes[1],
972 972 'branches': showbranches,
973 973 'date': changes[2],
974 974 'desc': changes[4].strip(),
975 975 'file_adds': showadds,
976 976 'file_dels': showdels,
977 977 'files': showfiles,
978 978 'file_copies': showcopies,
979 979 'manifest': showmanifest,
980 980 'node': hex(changenode),
981 981 'parents': showparents,
982 982 'rev': rev,
983 983 'tags': showtags,
984 984 'extras': showextras,
985 985 }
986 986 props = props.copy()
987 987 props.update(defprops)
988 988
989 989 try:
990 990 if self.ui.debugflag and 'header_debug' in self.t:
991 991 key = 'header_debug'
992 992 elif self.ui.quiet and 'header_quiet' in self.t:
993 993 key = 'header_quiet'
994 994 elif self.ui.verbose and 'header_verbose' in self.t:
995 995 key = 'header_verbose'
996 996 elif 'header' in self.t:
997 997 key = 'header'
998 998 else:
999 999 key = ''
1000 1000 if key:
1001 1001 h = templater.stringify(self.t(key, **props))
1002 1002 if self.buffered:
1003 1003 self.header[rev] = h
1004 1004 else:
1005 1005 self.ui.write(h)
1006 1006 if self.ui.debugflag and 'changeset_debug' in self.t:
1007 1007 key = 'changeset_debug'
1008 1008 elif self.ui.quiet and 'changeset_quiet' in self.t:
1009 1009 key = 'changeset_quiet'
1010 1010 elif self.ui.verbose and 'changeset_verbose' in self.t:
1011 1011 key = 'changeset_verbose'
1012 1012 else:
1013 1013 key = 'changeset'
1014 1014 self.ui.write(templater.stringify(self.t(key, **props)))
1015 1015 self.showpatch(changenode)
1016 1016 except KeyError, inst:
1017 1017 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1018 1018 inst.args[0]))
1019 1019 except SyntaxError, inst:
1020 1020 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1021 1021
1022 1022 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1023 1023 """show one changeset using template or regular display.
1024 1024
1025 1025 Display format will be the first non-empty hit of:
1026 1026 1. option 'template'
1027 1027 2. option 'style'
1028 1028 3. [ui] setting 'logtemplate'
1029 1029 4. [ui] setting 'style'
1030 1030 If all of these values are either the unset or the empty string,
1031 1031 regular display via changeset_printer() is done.
1032 1032 """
1033 1033 # options
1034 1034 patch = False
1035 1035 if opts.get('patch'):
1036 1036 patch = matchfn or util.always
1037 1037
1038 1038 tmpl = opts.get('template')
1039 1039 mapfile = None
1040 1040 if tmpl:
1041 1041 tmpl = templater.parsestring(tmpl, quoted=False)
1042 1042 else:
1043 1043 mapfile = opts.get('style')
1044 1044 # ui settings
1045 1045 if not mapfile:
1046 1046 tmpl = ui.config('ui', 'logtemplate')
1047 1047 if tmpl:
1048 1048 tmpl = templater.parsestring(tmpl)
1049 1049 else:
1050 1050 mapfile = ui.config('ui', 'style')
1051 1051
1052 1052 if tmpl or mapfile:
1053 1053 if mapfile:
1054 1054 if not os.path.split(mapfile)[0]:
1055 1055 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1056 1056 or templater.templatepath(mapfile))
1057 1057 if mapname: mapfile = mapname
1058 1058 try:
1059 1059 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1060 1060 except SyntaxError, inst:
1061 1061 raise util.Abort(inst.args[0])
1062 1062 if tmpl: t.use_template(tmpl)
1063 1063 return t
1064 1064 return changeset_printer(ui, repo, patch, buffered)
1065 1065
1066 1066 def finddate(ui, repo, date):
1067 1067 """Find the tipmost changeset that matches the given date spec"""
1068 1068 df = util.matchdate(date + " to " + date)
1069 1069 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1070 1070 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1071 1071 results = {}
1072 1072 for st, rev, fns in changeiter:
1073 1073 if st == 'add':
1074 1074 d = get(rev)[2]
1075 1075 if df(d[0]):
1076 1076 results[rev] = d
1077 1077 elif st == 'iter':
1078 1078 if rev in results:
1079 1079 ui.status("Found revision %s from %s\n" %
1080 1080 (rev, util.datestr(results[rev])))
1081 1081 return str(rev)
1082 1082
1083 1083 raise util.Abort(_("revision matching date not found"))
1084 1084
1085 1085 def walkchangerevs(ui, repo, pats, change, opts):
1086 1086 '''Iterate over files and the revs they changed in.
1087 1087
1088 1088 Callers most commonly need to iterate backwards over the history
1089 1089 it is interested in. Doing so has awful (quadratic-looking)
1090 1090 performance, so we use iterators in a "windowed" way.
1091 1091
1092 1092 We walk a window of revisions in the desired order. Within the
1093 1093 window, we first walk forwards to gather data, then in the desired
1094 1094 order (usually backwards) to display it.
1095 1095
1096 1096 This function returns an (iterator, matchfn) tuple. The iterator
1097 1097 yields 3-tuples. They will be of one of the following forms:
1098 1098
1099 1099 "window", incrementing, lastrev: stepping through a window,
1100 1100 positive if walking forwards through revs, last rev in the
1101 1101 sequence iterated over - use to reset state for the current window
1102 1102
1103 1103 "add", rev, fns: out-of-order traversal of the given file names
1104 1104 fns, which changed during revision rev - use to gather data for
1105 1105 possible display
1106 1106
1107 1107 "iter", rev, None: in-order traversal of the revs earlier iterated
1108 1108 over with "add" - use to display data'''
1109 1109
1110 1110 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1111 1111 if start < end:
1112 1112 while start < end:
1113 1113 yield start, min(windowsize, end-start)
1114 1114 start += windowsize
1115 1115 if windowsize < sizelimit:
1116 1116 windowsize *= 2
1117 1117 else:
1118 1118 while start > end:
1119 1119 yield start, min(windowsize, start-end-1)
1120 1120 start -= windowsize
1121 1121 if windowsize < sizelimit:
1122 1122 windowsize *= 2
1123 1123
1124 1124 files, matchfn, anypats = matchpats(repo, pats, opts)
1125 1125 follow = opts.get('follow') or opts.get('follow_first')
1126 1126
1127 1127 if repo.changelog.count() == 0:
1128 1128 return [], matchfn
1129 1129
1130 1130 if follow:
1131 1131 defrange = '%s:0' % repo.changectx().rev()
1132 1132 else:
1133 1133 defrange = 'tip:0'
1134 1134 revs = revrange(repo, opts['rev'] or [defrange])
1135 1135 wanted = {}
1136 1136 slowpath = anypats or opts.get('removed')
1137 1137 fncache = {}
1138 1138
1139 1139 if not slowpath and not files:
1140 1140 # No files, no patterns. Display all revs.
1141 1141 wanted = dict.fromkeys(revs)
1142 1142 copies = []
1143 1143 if not slowpath:
1144 1144 # Only files, no patterns. Check the history of each file.
1145 1145 def filerevgen(filelog, node):
1146 1146 cl_count = repo.changelog.count()
1147 1147 if node is None:
1148 1148 last = filelog.count() - 1
1149 1149 else:
1150 1150 last = filelog.rev(node)
1151 1151 for i, window in increasing_windows(last, nullrev):
1152 1152 revs = []
1153 1153 for j in xrange(i - window, i + 1):
1154 1154 n = filelog.node(j)
1155 1155 revs.append((filelog.linkrev(n),
1156 1156 follow and filelog.renamed(n)))
1157 1157 revs.reverse()
1158 1158 for rev in revs:
1159 1159 # only yield rev for which we have the changelog, it can
1160 1160 # happen while doing "hg log" during a pull or commit
1161 1161 if rev[0] < cl_count:
1162 1162 yield rev
1163 1163 def iterfiles():
1164 1164 for filename in files:
1165 1165 yield filename, None
1166 1166 for filename_node in copies:
1167 1167 yield filename_node
1168 1168 minrev, maxrev = min(revs), max(revs)
1169 1169 for file_, node in iterfiles():
1170 1170 filelog = repo.file(file_)
1171 1171 # A zero count may be a directory or deleted file, so
1172 1172 # try to find matching entries on the slow path.
1173 1173 if filelog.count() == 0:
1174 1174 slowpath = True
1175 1175 break
1176 1176 for rev, copied in filerevgen(filelog, node):
1177 1177 if rev <= maxrev:
1178 1178 if rev < minrev:
1179 1179 break
1180 1180 fncache.setdefault(rev, [])
1181 1181 fncache[rev].append(file_)
1182 1182 wanted[rev] = 1
1183 1183 if follow and copied:
1184 1184 copies.append(copied)
1185 1185 if slowpath:
1186 1186 if follow:
1187 1187 raise util.Abort(_('can only follow copies/renames for explicit '
1188 1188 'file names'))
1189 1189
1190 1190 # The slow path checks files modified in every changeset.
1191 1191 def changerevgen():
1192 1192 for i, window in increasing_windows(repo.changelog.count()-1,
1193 1193 nullrev):
1194 1194 for j in xrange(i - window, i + 1):
1195 1195 yield j, change(j)[3]
1196 1196
1197 1197 for rev, changefiles in changerevgen():
1198 1198 matches = filter(matchfn, changefiles)
1199 1199 if matches:
1200 1200 fncache[rev] = matches
1201 1201 wanted[rev] = 1
1202 1202
1203 1203 class followfilter:
1204 1204 def __init__(self, onlyfirst=False):
1205 1205 self.startrev = nullrev
1206 1206 self.roots = []
1207 1207 self.onlyfirst = onlyfirst
1208 1208
1209 1209 def match(self, rev):
1210 1210 def realparents(rev):
1211 1211 if self.onlyfirst:
1212 1212 return repo.changelog.parentrevs(rev)[0:1]
1213 1213 else:
1214 1214 return filter(lambda x: x != nullrev,
1215 1215 repo.changelog.parentrevs(rev))
1216 1216
1217 1217 if self.startrev == nullrev:
1218 1218 self.startrev = rev
1219 1219 return True
1220 1220
1221 1221 if rev > self.startrev:
1222 1222 # forward: all descendants
1223 1223 if not self.roots:
1224 1224 self.roots.append(self.startrev)
1225 1225 for parent in realparents(rev):
1226 1226 if parent in self.roots:
1227 1227 self.roots.append(rev)
1228 1228 return True
1229 1229 else:
1230 1230 # backwards: all parents
1231 1231 if not self.roots:
1232 1232 self.roots.extend(realparents(self.startrev))
1233 1233 if rev in self.roots:
1234 1234 self.roots.remove(rev)
1235 1235 self.roots.extend(realparents(rev))
1236 1236 return True
1237 1237
1238 1238 return False
1239 1239
1240 1240 # it might be worthwhile to do this in the iterator if the rev range
1241 1241 # is descending and the prune args are all within that range
1242 1242 for rev in opts.get('prune', ()):
1243 1243 rev = repo.changelog.rev(repo.lookup(rev))
1244 1244 ff = followfilter()
1245 1245 stop = min(revs[0], revs[-1])
1246 1246 for x in xrange(rev, stop-1, -1):
1247 1247 if ff.match(x) and x in wanted:
1248 1248 del wanted[x]
1249 1249
1250 1250 def iterate():
1251 1251 if follow and not files:
1252 1252 ff = followfilter(onlyfirst=opts.get('follow_first'))
1253 1253 def want(rev):
1254 1254 if ff.match(rev) and rev in wanted:
1255 1255 return True
1256 1256 return False
1257 1257 else:
1258 1258 def want(rev):
1259 1259 return rev in wanted
1260 1260
1261 1261 for i, window in increasing_windows(0, len(revs)):
1262 1262 yield 'window', revs[0] < revs[-1], revs[-1]
1263 1263 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1264 1264 srevs = list(nrevs)
1265 1265 srevs.sort()
1266 1266 for rev in srevs:
1267 1267 fns = fncache.get(rev)
1268 1268 if not fns:
1269 1269 def fns_generator():
1270 1270 for f in change(rev)[3]:
1271 1271 if matchfn(f):
1272 1272 yield f
1273 1273 fns = fns_generator()
1274 1274 yield 'add', rev, fns
1275 1275 for rev in nrevs:
1276 1276 yield 'iter', rev, None
1277 1277 return iterate(), matchfn
@@ -1,3179 +1,3174 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, stat
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 36 elif abs not in repo.dirstate:
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 sim = float(opts.get('similarity') or 0)
57 57 if sim < 0 or sim > 100:
58 58 raise util.Abort(_('similarity must be between 0 and 100'))
59 59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60 60
61 61 def annotate(ui, repo, *pats, **opts):
62 62 """show changeset information per file line
63 63
64 64 List changes in files, showing the revision id responsible for each line
65 65
66 66 This command is useful to discover who did a change or when a change took
67 67 place.
68 68
69 69 Without the -a option, annotate will avoid processing files it
70 70 detects as binary. With -a, annotate will generate an annotation
71 71 anyway, probably with undesirable results.
72 72 """
73 73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74 74
75 75 if not pats:
76 76 raise util.Abort(_('at least one file name or pattern required'))
77 77
78 78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 79 ('number', lambda x: str(x[0].rev())),
80 80 ('changeset', lambda x: short(x[0].node())),
81 81 ('date', getdate),
82 82 ('follow', lambda x: x[0].path()),
83 83 ]
84 84
85 85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 86 and not opts['follow']):
87 87 opts['number'] = 1
88 88
89 89 linenumber = opts.get('line_number') is not None
90 90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92 92
93 93 funcmap = [func for op, func in opmap if opts.get(op)]
94 94 if linenumber:
95 95 lastfunc = funcmap[-1]
96 96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97 97
98 98 ctx = repo.changectx(opts['rev'])
99 99
100 100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 101 node=ctx.node()):
102 102 fctx = ctx.filectx(abs)
103 103 if not opts['text'] and util.binary(fctx.data()):
104 104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 105 continue
106 106
107 107 lines = fctx.annotate(follow=opts.get('follow'),
108 108 linenumber=linenumber)
109 109 pieces = []
110 110
111 111 for f in funcmap:
112 112 l = [f(n) for n, dummy in lines]
113 113 if l:
114 114 m = max(map(len, l))
115 115 pieces.append(["%*s" % (m, x) for x in l])
116 116
117 117 if pieces:
118 118 for p, l in zip(zip(*pieces), lines):
119 119 ui.write("%s: %s" % (" ".join(p), l[1]))
120 120
121 121 def archive(ui, repo, dest, **opts):
122 122 '''create unversioned archive of a repository revision
123 123
124 124 By default, the revision used is the parent of the working
125 125 directory; use "-r" to specify a different revision.
126 126
127 127 To specify the type of archive to create, use "-t". Valid
128 128 types are:
129 129
130 130 "files" (default): a directory full of files
131 131 "tar": tar archive, uncompressed
132 132 "tbz2": tar archive, compressed using bzip2
133 133 "tgz": tar archive, compressed using gzip
134 134 "uzip": zip archive, uncompressed
135 135 "zip": zip archive, compressed using deflate
136 136
137 137 The exact name of the destination archive or directory is given
138 138 using a format string; see "hg help export" for details.
139 139
140 140 Each member added to an archive file has a directory prefix
141 141 prepended. Use "-p" to specify a format string for the prefix.
142 142 The default is the basename of the archive, with suffixes removed.
143 143 '''
144 144
145 145 ctx = repo.changectx(opts['rev'])
146 146 if not ctx:
147 147 raise util.Abort(_('repository has no revisions'))
148 148 node = ctx.node()
149 149 dest = cmdutil.make_filename(repo, dest, node)
150 150 if os.path.realpath(dest) == repo.root:
151 151 raise util.Abort(_('repository root cannot be destination'))
152 152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 153 kind = opts.get('type') or 'files'
154 154 prefix = opts['prefix']
155 155 if dest == '-':
156 156 if kind == 'files':
157 157 raise util.Abort(_('cannot archive plain files to stdout'))
158 158 dest = sys.stdout
159 159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 160 prefix = cmdutil.make_filename(repo, prefix, node)
161 161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 162 matchfn, prefix)
163 163
164 164 def backout(ui, repo, node=None, rev=None, **opts):
165 165 '''reverse effect of earlier changeset
166 166
167 167 Commit the backed out changes as a new changeset. The new
168 168 changeset is a child of the backed out changeset.
169 169
170 170 If you back out a changeset other than the tip, a new head is
171 171 created. This head is the parent of the working directory. If
172 172 you back out an old changeset, your working directory will appear
173 173 old after the backout. You should merge the backout changeset
174 174 with another head.
175 175
176 176 The --merge option remembers the parent of the working directory
177 177 before starting the backout, then merges the new head with that
178 178 changeset afterwards. This saves you from doing the merge by
179 179 hand. The result of this merge is not committed, as for a normal
180 180 merge.'''
181 181 if rev and node:
182 182 raise util.Abort(_("please specify just one revision"))
183 183
184 184 if not rev:
185 185 rev = node
186 186
187 187 if not rev:
188 188 raise util.Abort(_("please specify a revision to backout"))
189 189
190 190 cmdutil.bail_if_changed(repo)
191 191 op1, op2 = repo.dirstate.parents()
192 192 if op2 != nullid:
193 193 raise util.Abort(_('outstanding uncommitted merge'))
194 194 node = repo.lookup(rev)
195 195 p1, p2 = repo.changelog.parents(node)
196 196 if p1 == nullid:
197 197 raise util.Abort(_('cannot back out a change with no parents'))
198 198 if p2 != nullid:
199 199 if not opts['parent']:
200 200 raise util.Abort(_('cannot back out a merge changeset without '
201 201 '--parent'))
202 202 p = repo.lookup(opts['parent'])
203 203 if p not in (p1, p2):
204 204 raise util.Abort(_('%s is not a parent of %s') %
205 205 (short(p), short(node)))
206 206 parent = p
207 207 else:
208 208 if opts['parent']:
209 209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 210 parent = p1
211 211 hg.clean(repo, node, show_stats=False)
212 212 revert_opts = opts.copy()
213 213 revert_opts['date'] = None
214 214 revert_opts['all'] = True
215 215 revert_opts['rev'] = hex(parent)
216 216 revert(ui, repo, **revert_opts)
217 217 commit_opts = opts.copy()
218 218 commit_opts['addremove'] = False
219 219 if not commit_opts['message'] and not commit_opts['logfile']:
220 220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 221 commit_opts['force_editor'] = True
222 222 commit(ui, repo, **commit_opts)
223 223 def nice(node):
224 224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 225 ui.status(_('changeset %s backs out changeset %s\n') %
226 226 (nice(repo.changelog.tip()), nice(node)))
227 227 if op1 != node:
228 228 if opts['merge']:
229 229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 230 hg.merge(repo, hex(op1))
231 231 else:
232 232 ui.status(_('the backout changeset is a new head - '
233 233 'do not forget to merge\n'))
234 234 ui.status(_('(use "backout --merge" '
235 235 'if you want to auto-merge)\n'))
236 236
237 237 def branch(ui, repo, label=None, **opts):
238 238 """set or show the current branch name
239 239
240 240 With no argument, show the current branch name. With one argument,
241 241 set the working directory branch name (the branch does not exist in
242 242 the repository until the next commit).
243 243
244 244 Unless --force is specified, branch will not let you set a
245 245 branch name that shadows an existing branch.
246 246 """
247 247
248 248 if label:
249 249 if not opts.get('force') and label in repo.branchtags():
250 250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 251 raise util.Abort(_('a branch of the same name already exists'
252 252 ' (use --force to override)'))
253 253 repo.dirstate.setbranch(util.fromlocal(label))
254 254 ui.status(_('marked working directory as branch %s\n') % label)
255 255 else:
256 256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257 257
258 258 def branches(ui, repo, active=False):
259 259 """list repository named branches
260 260
261 261 List the repository's named branches, indicating which ones are
262 262 inactive. If active is specified, only show active branches.
263 263
264 264 A branch is considered active if it contains unmerged heads.
265 265 """
266 266 b = repo.branchtags()
267 267 heads = dict.fromkeys(repo.heads(), 1)
268 268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 269 l.sort()
270 270 l.reverse()
271 271 for ishead, r, n, t in l:
272 272 if active and not ishead:
273 273 # If we're only displaying active branches, abort the loop on
274 274 # encountering the first inactive head
275 275 break
276 276 else:
277 277 hexfunc = ui.debugflag and hex or short
278 278 if ui.quiet:
279 279 ui.write("%s\n" % t)
280 280 else:
281 281 spaces = " " * (30 - util.locallen(t))
282 282 # The code only gets here if inactive branches are being
283 283 # displayed or the branch is active.
284 284 isinactive = ((not ishead) and " (inactive)") or ''
285 285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286 286
287 287 def bundle(ui, repo, fname, dest=None, **opts):
288 288 """create a changegroup file
289 289
290 290 Generate a compressed changegroup file collecting changesets not
291 291 found in the other repository.
292 292
293 293 If no destination repository is specified the destination is assumed
294 294 to have all the nodes specified by one or more --base parameters.
295 295
296 296 The bundle file can then be transferred using conventional means and
297 297 applied to another repository with the unbundle or pull command.
298 298 This is useful when direct push and pull are not available or when
299 299 exporting an entire repository is undesirable.
300 300
301 301 Applying bundles preserves all changeset contents including
302 302 permissions, copy/rename information, and revision history.
303 303 """
304 304 revs = opts.get('rev') or None
305 305 if revs:
306 306 revs = [repo.lookup(rev) for rev in revs]
307 307 base = opts.get('base')
308 308 if base:
309 309 if dest:
310 310 raise util.Abort(_("--base is incompatible with specifiying "
311 311 "a destination"))
312 312 base = [repo.lookup(rev) for rev in base]
313 313 # create the right base
314 314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 315 o = []
316 316 has = {nullid: None}
317 317 for n in base:
318 318 has.update(repo.changelog.reachable(n))
319 319 if revs:
320 320 visit = list(revs)
321 321 else:
322 322 visit = repo.changelog.heads()
323 323 seen = {}
324 324 while visit:
325 325 n = visit.pop(0)
326 326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 327 if len(parents) == 0:
328 328 o.insert(0, n)
329 329 else:
330 330 for p in parents:
331 331 if p not in seen:
332 332 seen[p] = 1
333 333 visit.append(p)
334 334 else:
335 335 cmdutil.setremoteconfig(ui, opts)
336 336 dest, revs = cmdutil.parseurl(
337 337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 338 other = hg.repository(ui, dest)
339 339 o = repo.findoutgoing(other, force=opts['force'])
340 340
341 341 if revs:
342 342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 343 else:
344 344 cg = repo.changegroup(o, 'bundle')
345 345 changegroup.writebundle(cg, fname, "HG10BZ")
346 346
347 347 def cat(ui, repo, file1, *pats, **opts):
348 348 """output the current or given revision of files
349 349
350 350 Print the specified files as they were at the given revision.
351 351 If no revision is given, the parent of the working directory is used,
352 352 or tip if no revision is checked out.
353 353
354 354 Output may be to a file, in which case the name of the file is
355 355 given using a format string. The formatting rules are the same as
356 356 for the export command, with the following additions:
357 357
358 358 %s basename of file being printed
359 359 %d dirname of file being printed, or '.' if in repo root
360 360 %p root-relative path name of file being printed
361 361 """
362 362 ctx = repo.changectx(opts['rev'])
363 363 err = 1
364 364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 365 ctx.node()):
366 366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 367 fp.write(ctx.filectx(abs).data())
368 368 err = 0
369 369 return err
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 cmdutil.setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = cmdutil.logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 try:
444 444 mode = os.lstat(rf)[stat.ST_MODE]
445 445 except OSError:
446 446 raise util.Abort(_("file %s not found!") % rf)
447 447 if stat.S_ISDIR(mode):
448 448 name = f + '/'
449 449 if slist is None:
450 450 slist = list(files)
451 451 slist.sort()
452 452 i = bisect.bisect(slist, name)
453 453 if i >= len(slist) or not slist[i].startswith(name):
454 454 raise util.Abort(_("no match under directory %s!")
455 455 % rf)
456 456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 457 raise util.Abort(_("can't commit %s: "
458 458 "unsupported file type!") % rf)
459 459 elif f not in repo.dirstate:
460 460 raise util.Abort(_("file %s not tracked!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 469 def docopy(ui, repo, pats, opts):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'r': _('has been marked for remove')}
485 485 state = repo.dirstate[abs]
486 486 reason = reasons.get(state)
487 487 if reason:
488 488 if exact:
489 489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 490 else:
491 491 if state == 'a':
492 492 origsrc = repo.dirstate.copied(abs)
493 493 if origsrc is not None:
494 494 return origsrc
495 495 return abs
496 496
497 497 # origsrc: hgsep
498 498 # abssrc: hgsep
499 499 # relsrc: ossep
500 500 # otarget: ossep
501 501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 503 reltarget = repo.pathto(abstarget, cwd)
504 504 prevsrc = targets.get(abstarget)
505 505 src = repo.wjoin(abssrc)
506 506 target = repo.wjoin(abstarget)
507 507 if prevsrc is not None:
508 508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 509 (reltarget, repo.pathto(abssrc, cwd),
510 510 repo.pathto(prevsrc, cwd)))
511 511 return
512 512 if (not opts['after'] and os.path.exists(target) or
513 513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 514 if not opts['force']:
515 515 ui.warn(_('%s: not overwriting - file exists\n') %
516 516 reltarget)
517 517 return
518 518 if not opts['after'] and not opts.get('dry_run'):
519 519 os.unlink(target)
520 520 if opts['after']:
521 521 if not os.path.exists(target):
522 522 return
523 523 else:
524 524 targetdir = os.path.dirname(target) or '.'
525 525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 526 os.makedirs(targetdir)
527 527 try:
528 528 restore = repo.dirstate[abstarget] == 'r'
529 529 if restore and not opts.get('dry_run'):
530 530 repo.undelete([abstarget])
531 531 try:
532 532 if not opts.get('dry_run'):
533 533 util.copyfile(src, target)
534 534 restore = False
535 535 finally:
536 536 if restore:
537 537 repo.remove([abstarget])
538 538 except IOError, inst:
539 539 if inst.errno == errno.ENOENT:
540 540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 541 else:
542 542 ui.warn(_('%s: cannot copy - %s\n') %
543 543 (relsrc, inst.strerror))
544 544 errors += 1
545 545 return
546 546 if ui.verbose or not exact:
547 547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 548 targets[abstarget] = abssrc
549 549 if abstarget != origsrc:
550 550 if repo.dirstate[origsrc] == 'a':
551 551 if not ui.quiet:
552 552 ui.warn(_("%s has not been committed yet, so no copy "
553 553 "data will be stored for %s.\n")
554 554 % (repo.pathto(origsrc, cwd), reltarget))
555 555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 556 repo.add([abstarget])
557 557 elif not opts.get('dry_run'):
558 558 repo.copy(origsrc, abstarget)
559 559 copied.append((abssrc, relsrc, exact))
560 560
561 561 # pat: ossep
562 562 # dest ossep
563 563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 564 # return: function that takes hgsep and returns ossep
565 565 def targetpathfn(pat, dest, srcs):
566 566 if os.path.isdir(pat):
567 567 abspfx = util.canonpath(repo.root, cwd, pat)
568 568 abspfx = util.localpath(abspfx)
569 569 if destdirexists:
570 570 striplen = len(os.path.split(abspfx)[0])
571 571 else:
572 572 striplen = len(abspfx)
573 573 if striplen:
574 574 striplen += len(os.sep)
575 575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 576 elif destdirexists:
577 577 res = lambda p: os.path.join(dest,
578 578 os.path.basename(util.localpath(p)))
579 579 else:
580 580 res = lambda p: dest
581 581 return res
582 582
583 583 # pat: ossep
584 584 # dest ossep
585 585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 586 # return: function that takes hgsep and returns ossep
587 587 def targetpathafterfn(pat, dest, srcs):
588 588 if util.patkind(pat, None)[0]:
589 589 # a mercurial pattern
590 590 res = lambda p: os.path.join(dest,
591 591 os.path.basename(util.localpath(p)))
592 592 else:
593 593 abspfx = util.canonpath(repo.root, cwd, pat)
594 594 if len(abspfx) < len(srcs[0][0]):
595 595 # A directory. Either the target path contains the last
596 596 # component of the source path or it does not.
597 597 def evalpath(striplen):
598 598 score = 0
599 599 for s in srcs:
600 600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 601 if os.path.exists(t):
602 602 score += 1
603 603 return score
604 604
605 605 abspfx = util.localpath(abspfx)
606 606 striplen = len(abspfx)
607 607 if striplen:
608 608 striplen += len(os.sep)
609 609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 610 score = evalpath(striplen)
611 611 striplen1 = len(os.path.split(abspfx)[0])
612 612 if striplen1:
613 613 striplen1 += len(os.sep)
614 614 if evalpath(striplen1) > score:
615 615 striplen = striplen1
616 616 res = lambda p: os.path.join(dest,
617 617 util.localpath(p)[striplen:])
618 618 else:
619 619 # a file
620 620 if destdirexists:
621 621 res = lambda p: os.path.join(dest,
622 622 os.path.basename(util.localpath(p)))
623 623 else:
624 624 res = lambda p: dest
625 625 return res
626 626
627 627
628 628 pats = util.expand_glob(pats)
629 629 if not pats:
630 630 raise util.Abort(_('no source or destination specified'))
631 631 if len(pats) == 1:
632 632 raise util.Abort(_('no destination specified'))
633 633 dest = pats.pop()
634 634 destdirexists = os.path.isdir(dest)
635 635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 636 raise util.Abort(_('with multiple sources, destination must be an '
637 637 'existing directory'))
638 638 if opts['after']:
639 639 tfn = targetpathafterfn
640 640 else:
641 641 tfn = targetpathfn
642 642 copylist = []
643 643 for pat in pats:
644 644 srcs = []
645 645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 646 globbed=True):
647 647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 648 if origsrc:
649 649 srcs.append((origsrc, abssrc, relsrc, exact))
650 650 if not srcs:
651 651 continue
652 652 copylist.append((tfn(pat, dest, srcs), srcs))
653 653 if not copylist:
654 654 raise util.Abort(_('no files to copy'))
655 655
656 656 for targetpath, srcs in copylist:
657 657 for origsrc, abssrc, relsrc, exact in srcs:
658 658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659 659
660 660 if errors:
661 661 ui.warn(_('(consider using --after)\n'))
662 662 return errors, copied
663 663
664 664 def copy(ui, repo, *pats, **opts):
665 665 """mark files as copied for the next commit
666 666
667 667 Mark dest as having copies of source files. If dest is a
668 668 directory, copies are put in that directory. If dest is a file,
669 669 there can only be one source.
670 670
671 671 By default, this command copies the contents of files as they
672 672 stand in the working directory. If invoked with --after, the
673 673 operation is recorded, but no copying is performed.
674 674
675 675 This command takes effect in the next commit. To undo a copy
676 676 before that, see hg revert.
677 677 """
678 678 wlock = repo.wlock(False)
679 679 try:
680 680 errs, copied = docopy(ui, repo, pats, opts)
681 681 finally:
682 682 del wlock
683 683 return errs
684 684
685 685 def debugancestor(ui, index, rev1, rev2):
686 686 """find the ancestor revision of two revisions in a given index"""
687 687 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
688 688 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
689 689 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
690 690
691 691 def debugcomplete(ui, cmd='', **opts):
692 692 """returns the completion list associated with the given command"""
693 693
694 694 if opts['options']:
695 695 options = []
696 696 otables = [globalopts]
697 697 if cmd:
698 698 aliases, entry = cmdutil.findcmd(ui, cmd)
699 699 otables.append(entry[1])
700 700 for t in otables:
701 701 for o in t:
702 702 if o[0]:
703 703 options.append('-%s' % o[0])
704 704 options.append('--%s' % o[1])
705 705 ui.write("%s\n" % "\n".join(options))
706 706 return
707 707
708 708 clist = cmdutil.findpossible(ui, cmd).keys()
709 709 clist.sort()
710 710 ui.write("%s\n" % "\n".join(clist))
711 711
712 712 def debugrebuildstate(ui, repo, rev=""):
713 713 """rebuild the dirstate as it would look like for the given revision"""
714 714 if rev == "":
715 715 rev = repo.changelog.tip()
716 716 ctx = repo.changectx(rev)
717 717 files = ctx.manifest()
718 718 wlock = repo.wlock()
719 719 try:
720 720 repo.dirstate.rebuild(rev, files)
721 721 finally:
722 722 del wlock
723 723
724 724 def debugcheckstate(ui, repo):
725 725 """validate the correctness of the current dirstate"""
726 726 parent1, parent2 = repo.dirstate.parents()
727 727 m1 = repo.changectx(parent1).manifest()
728 728 m2 = repo.changectx(parent2).manifest()
729 729 errors = 0
730 730 for f in repo.dirstate:
731 731 state = repo.dirstate[f]
732 732 if state in "nr" and f not in m1:
733 733 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
734 734 errors += 1
735 735 if state in "a" and f in m1:
736 736 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
737 737 errors += 1
738 738 if state in "m" and f not in m1 and f not in m2:
739 739 ui.warn(_("%s in state %s, but not in either manifest\n") %
740 740 (f, state))
741 741 errors += 1
742 742 for f in m1:
743 743 state = repo.dirstate[f]
744 744 if state not in "nrm":
745 745 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
746 746 errors += 1
747 747 if errors:
748 748 error = _(".hg/dirstate inconsistent with current parent's manifest")
749 749 raise util.Abort(error)
750 750
751 751 def showconfig(ui, repo, *values, **opts):
752 752 """show combined config settings from all hgrc files
753 753
754 754 With no args, print names and values of all config items.
755 755
756 756 With one arg of the form section.name, print just the value of
757 757 that config item.
758 758
759 759 With multiple args, print names and values of all config items
760 760 with matching section names."""
761 761
762 762 untrusted = bool(opts.get('untrusted'))
763 763 if values:
764 764 if len([v for v in values if '.' in v]) > 1:
765 765 raise util.Abort(_('only one config item permitted'))
766 766 for section, name, value in ui.walkconfig(untrusted=untrusted):
767 767 sectname = section + '.' + name
768 768 if values:
769 769 for v in values:
770 770 if v == section:
771 771 ui.write('%s=%s\n' % (sectname, value))
772 772 elif v == sectname:
773 773 ui.write(value, '\n')
774 774 else:
775 775 ui.write('%s=%s\n' % (sectname, value))
776 776
777 777 def debugsetparents(ui, repo, rev1, rev2=None):
778 778 """manually set the parents of the current working directory
779 779
780 780 This is useful for writing repository conversion tools, but should
781 781 be used with care.
782 782 """
783 783
784 784 if not rev2:
785 785 rev2 = hex(nullid)
786 786
787 787 wlock = repo.wlock()
788 788 try:
789 789 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
790 790 finally:
791 791 del wlock
792 792
793 793 def debugstate(ui, repo):
794 794 """show the contents of the current dirstate"""
795 795 dc = repo.dirstate._map
796 796 k = dc.keys()
797 797 k.sort()
798 798 for file_ in k:
799 799 if dc[file_][3] == -1:
800 800 # Pad or slice to locale representation
801 801 locale_len = len(time.strftime("%x %X", time.localtime(0)))
802 802 timestr = 'unset'
803 803 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
804 804 else:
805 805 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
806 806 ui.write("%c %3o %10d %s %s\n"
807 807 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
808 808 timestr, file_))
809 809 for f in repo.dirstate.copies():
810 810 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
811 811
812 812 def debugdata(ui, file_, rev):
813 813 """dump the contents of a data file revision"""
814 814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
815 815 try:
816 816 ui.write(r.revision(r.lookup(rev)))
817 817 except KeyError:
818 818 raise util.Abort(_('invalid revision identifier %s') % rev)
819 819
820 820 def debugdate(ui, date, range=None, **opts):
821 821 """parse and display a date"""
822 822 if opts["extended"]:
823 823 d = util.parsedate(date, util.extendeddateformats)
824 824 else:
825 825 d = util.parsedate(date)
826 826 ui.write("internal: %s %s\n" % d)
827 827 ui.write("standard: %s\n" % util.datestr(d))
828 828 if range:
829 829 m = util.matchdate(range)
830 830 ui.write("match: %s\n" % m(d[0]))
831 831
832 832 def debugindex(ui, file_):
833 833 """dump the contents of an index file"""
834 834 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
835 835 ui.write(" rev offset length base linkrev" +
836 836 " nodeid p1 p2\n")
837 837 for i in xrange(r.count()):
838 838 node = r.node(i)
839 839 pp = r.parents(node)
840 840 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
841 841 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
842 842 short(node), short(pp[0]), short(pp[1])))
843 843
844 844 def debugindexdot(ui, file_):
845 845 """dump an index DAG as a .dot file"""
846 846 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
847 847 ui.write("digraph G {\n")
848 848 for i in xrange(r.count()):
849 849 node = r.node(i)
850 850 pp = r.parents(node)
851 851 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
852 852 if pp[1] != nullid:
853 853 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
854 854 ui.write("}\n")
855 855
856 856 def debuginstall(ui):
857 857 '''test Mercurial installation'''
858 858
859 859 def writetemp(contents):
860 860 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
861 861 f = os.fdopen(fd, "wb")
862 862 f.write(contents)
863 863 f.close()
864 864 return name
865 865
866 866 problems = 0
867 867
868 868 # encoding
869 869 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
870 870 try:
871 871 util.fromlocal("test")
872 872 except util.Abort, inst:
873 873 ui.write(" %s\n" % inst)
874 874 ui.write(_(" (check that your locale is properly set)\n"))
875 875 problems += 1
876 876
877 877 # compiled modules
878 878 ui.status(_("Checking extensions...\n"))
879 879 try:
880 880 import bdiff, mpatch, base85
881 881 except Exception, inst:
882 882 ui.write(" %s\n" % inst)
883 883 ui.write(_(" One or more extensions could not be found"))
884 884 ui.write(_(" (check that you compiled the extensions)\n"))
885 885 problems += 1
886 886
887 887 # templates
888 888 ui.status(_("Checking templates...\n"))
889 889 try:
890 890 import templater
891 891 t = templater.templater(templater.templatepath("map-cmdline.default"))
892 892 except Exception, inst:
893 893 ui.write(" %s\n" % inst)
894 894 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
895 895 problems += 1
896 896
897 897 # patch
898 898 ui.status(_("Checking patch...\n"))
899 899 patcher = ui.config('ui', 'patch')
900 900 patcher = ((patcher and util.find_exe(patcher)) or
901 901 util.find_exe('gpatch') or
902 902 util.find_exe('patch'))
903 903 if not patcher:
904 904 ui.write(_(" Can't find patch or gpatch in PATH\n"))
905 905 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
906 906 problems += 1
907 907 else:
908 908 # actually attempt a patch here
909 909 a = "1\n2\n3\n4\n"
910 910 b = "1\n2\n3\ninsert\n4\n"
911 911 fa = writetemp(a)
912 912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
913 913 fd = writetemp(d)
914 914
915 915 files = {}
916 916 try:
917 917 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
918 918 except util.Abort, e:
919 919 ui.write(_(" patch call failed:\n"))
920 920 ui.write(" " + str(e) + "\n")
921 921 problems += 1
922 922 else:
923 923 if list(files) != [os.path.basename(fa)]:
924 924 ui.write(_(" unexpected patch output!"))
925 925 ui.write(_(" (you may have an incompatible version of patch)\n"))
926 926 problems += 1
927 927 a = file(fa).read()
928 928 if a != b:
929 929 ui.write(_(" patch test failed!"))
930 930 ui.write(_(" (you may have an incompatible version of patch)\n"))
931 931 problems += 1
932 932
933 933 os.unlink(fa)
934 934 os.unlink(fd)
935 935
936 936 # merge helper
937 937 ui.status(_("Checking merge helper...\n"))
938 938 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
939 939 or "hgmerge")
940 940 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
941 941 if not cmdpath:
942 942 if cmd == 'hgmerge':
943 943 ui.write(_(" No merge helper set and can't find default"
944 944 " hgmerge script in PATH\n"))
945 945 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
946 946 else:
947 947 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
948 948 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
949 949 problems += 1
950 950 else:
951 951 # actually attempt a patch here
952 952 fa = writetemp("1\n2\n3\n4\n")
953 953 fl = writetemp("1\n2\n3\ninsert\n4\n")
954 954 fr = writetemp("begin\n1\n2\n3\n4\n")
955 955 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
956 956 if r:
957 957 ui.write(_(" Got unexpected merge error %d!\n") % r)
958 958 problems += 1
959 959 m = file(fl).read()
960 960 if m != "begin\n1\n2\n3\ninsert\n4\n":
961 961 ui.write(_(" Got unexpected merge results!\n"))
962 962 ui.write(_(" (your merge helper may have the"
963 963 " wrong argument order)\n"))
964 964 ui.write(_(" Result: %r\n") % m)
965 965 problems += 1
966 966 os.unlink(fa)
967 967 os.unlink(fl)
968 968 os.unlink(fr)
969 969
970 970 # editor
971 971 ui.status(_("Checking commit editor...\n"))
972 972 editor = (os.environ.get("HGEDITOR") or
973 973 ui.config("ui", "editor") or
974 974 os.environ.get("EDITOR", "vi"))
975 975 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
976 976 if not cmdpath:
977 977 if editor == 'vi':
978 978 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
979 979 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
980 980 else:
981 981 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
982 982 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
983 983 problems += 1
984 984
985 985 # check username
986 986 ui.status(_("Checking username...\n"))
987 987 user = os.environ.get("HGUSER")
988 988 if user is None:
989 989 user = ui.config("ui", "username")
990 990 if user is None:
991 991 user = os.environ.get("EMAIL")
992 992 if not user:
993 993 ui.warn(" ")
994 994 ui.username()
995 995 ui.write(_(" (specify a username in your .hgrc file)\n"))
996 996
997 997 if not problems:
998 998 ui.status(_("No problems detected\n"))
999 999 else:
1000 1000 ui.write(_("%s problems detected,"
1001 1001 " please check your install!\n") % problems)
1002 1002
1003 1003 return problems
1004 1004
1005 1005 def debugrename(ui, repo, file1, *pats, **opts):
1006 1006 """dump rename information"""
1007 1007
1008 1008 ctx = repo.changectx(opts.get('rev', 'tip'))
1009 1009 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1010 1010 ctx.node()):
1011 1011 m = ctx.filectx(abs).renamed()
1012 1012 if m:
1013 1013 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1014 1014 else:
1015 1015 ui.write(_("%s not renamed\n") % rel)
1016 1016
1017 1017 def debugwalk(ui, repo, *pats, **opts):
1018 1018 """show how files match on given patterns"""
1019 1019 items = list(cmdutil.walk(repo, pats, opts))
1020 1020 if not items:
1021 1021 return
1022 1022 fmt = '%%s %%-%ds %%-%ds %%s' % (
1023 1023 max([len(abs) for (src, abs, rel, exact) in items]),
1024 1024 max([len(rel) for (src, abs, rel, exact) in items]))
1025 1025 for src, abs, rel, exact in items:
1026 1026 line = fmt % (src, abs, rel, exact and 'exact' or '')
1027 1027 ui.write("%s\n" % line.rstrip())
1028 1028
1029 1029 def diff(ui, repo, *pats, **opts):
1030 1030 """diff repository (or selected files)
1031 1031
1032 1032 Show differences between revisions for the specified files.
1033 1033
1034 1034 Differences between files are shown using the unified diff format.
1035 1035
1036 1036 NOTE: diff may generate unexpected results for merges, as it will
1037 1037 default to comparing against the working directory's first parent
1038 1038 changeset if no revisions are specified.
1039 1039
1040 1040 When two revision arguments are given, then changes are shown
1041 1041 between those revisions. If only one revision is specified then
1042 1042 that revision is compared to the working directory, and, when no
1043 1043 revisions are specified, the working directory files are compared
1044 1044 to its parent.
1045 1045
1046 1046 Without the -a option, diff will avoid generating diffs of files
1047 1047 it detects as binary. With -a, diff will generate a diff anyway,
1048 1048 probably with undesirable results.
1049 1049 """
1050 1050 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1051 1051
1052 1052 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1053 1053
1054 1054 patch.diff(repo, node1, node2, fns, match=matchfn,
1055 1055 opts=patch.diffopts(ui, opts))
1056 1056
1057 1057 def export(ui, repo, *changesets, **opts):
1058 1058 """dump the header and diffs for one or more changesets
1059 1059
1060 1060 Print the changeset header and diffs for one or more revisions.
1061 1061
1062 1062 The information shown in the changeset header is: author,
1063 1063 changeset hash, parent(s) and commit comment.
1064 1064
1065 1065 NOTE: export may generate unexpected diff output for merge changesets,
1066 1066 as it will compare the merge changeset against its first parent only.
1067 1067
1068 1068 Output may be to a file, in which case the name of the file is
1069 1069 given using a format string. The formatting rules are as follows:
1070 1070
1071 1071 %% literal "%" character
1072 1072 %H changeset hash (40 bytes of hexadecimal)
1073 1073 %N number of patches being generated
1074 1074 %R changeset revision number
1075 1075 %b basename of the exporting repository
1076 1076 %h short-form changeset hash (12 bytes of hexadecimal)
1077 1077 %n zero-padded sequence number, starting at 1
1078 1078 %r zero-padded changeset revision number
1079 1079
1080 1080 Without the -a option, export will avoid generating diffs of files
1081 1081 it detects as binary. With -a, export will generate a diff anyway,
1082 1082 probably with undesirable results.
1083 1083
1084 1084 With the --switch-parent option, the diff will be against the second
1085 1085 parent. It can be useful to review a merge.
1086 1086 """
1087 1087 if not changesets:
1088 1088 raise util.Abort(_("export requires at least one changeset"))
1089 1089 revs = cmdutil.revrange(repo, changesets)
1090 1090 if len(revs) > 1:
1091 1091 ui.note(_('exporting patches:\n'))
1092 1092 else:
1093 1093 ui.note(_('exporting patch:\n'))
1094 1094 patch.export(repo, revs, template=opts['output'],
1095 1095 switch_parent=opts['switch_parent'],
1096 1096 opts=patch.diffopts(ui, opts))
1097 1097
1098 1098 def grep(ui, repo, pattern, *pats, **opts):
1099 1099 """search for a pattern in specified files and revisions
1100 1100
1101 1101 Search revisions of files for a regular expression.
1102 1102
1103 1103 This command behaves differently than Unix grep. It only accepts
1104 1104 Python/Perl regexps. It searches repository history, not the
1105 1105 working directory. It always prints the revision number in which
1106 1106 a match appears.
1107 1107
1108 1108 By default, grep only prints output for the first revision of a
1109 1109 file in which it finds a match. To get it to print every revision
1110 1110 that contains a change in match status ("-" for a match that
1111 1111 becomes a non-match, or "+" for a non-match that becomes a match),
1112 1112 use the --all flag.
1113 1113 """
1114 1114 reflags = 0
1115 1115 if opts['ignore_case']:
1116 1116 reflags |= re.I
1117 1117 try:
1118 1118 regexp = re.compile(pattern, reflags)
1119 1119 except Exception, inst:
1120 1120 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1121 1121 return None
1122 1122 sep, eol = ':', '\n'
1123 1123 if opts['print0']:
1124 1124 sep = eol = '\0'
1125 1125
1126 1126 fcache = {}
1127 1127 def getfile(fn):
1128 1128 if fn not in fcache:
1129 1129 fcache[fn] = repo.file(fn)
1130 1130 return fcache[fn]
1131 1131
1132 1132 def matchlines(body):
1133 1133 begin = 0
1134 1134 linenum = 0
1135 1135 while True:
1136 1136 match = regexp.search(body, begin)
1137 1137 if not match:
1138 1138 break
1139 1139 mstart, mend = match.span()
1140 1140 linenum += body.count('\n', begin, mstart) + 1
1141 1141 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1142 1142 lend = body.find('\n', mend)
1143 1143 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1144 1144 begin = lend + 1
1145 1145
1146 1146 class linestate(object):
1147 1147 def __init__(self, line, linenum, colstart, colend):
1148 1148 self.line = line
1149 1149 self.linenum = linenum
1150 1150 self.colstart = colstart
1151 1151 self.colend = colend
1152 1152
1153 1153 def __eq__(self, other):
1154 1154 return self.line == other.line
1155 1155
1156 1156 matches = {}
1157 1157 copies = {}
1158 1158 def grepbody(fn, rev, body):
1159 1159 matches[rev].setdefault(fn, [])
1160 1160 m = matches[rev][fn]
1161 1161 for lnum, cstart, cend, line in matchlines(body):
1162 1162 s = linestate(line, lnum, cstart, cend)
1163 1163 m.append(s)
1164 1164
1165 1165 def difflinestates(a, b):
1166 1166 sm = difflib.SequenceMatcher(None, a, b)
1167 1167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1168 1168 if tag == 'insert':
1169 1169 for i in xrange(blo, bhi):
1170 1170 yield ('+', b[i])
1171 1171 elif tag == 'delete':
1172 1172 for i in xrange(alo, ahi):
1173 1173 yield ('-', a[i])
1174 1174 elif tag == 'replace':
1175 1175 for i in xrange(alo, ahi):
1176 1176 yield ('-', a[i])
1177 1177 for i in xrange(blo, bhi):
1178 1178 yield ('+', b[i])
1179 1179
1180 1180 prev = {}
1181 1181 def display(fn, rev, states, prevstates):
1182 1182 found = False
1183 1183 filerevmatches = {}
1184 1184 r = prev.get(fn, -1)
1185 1185 if opts['all']:
1186 1186 iter = difflinestates(states, prevstates)
1187 1187 else:
1188 1188 iter = [('', l) for l in prevstates]
1189 1189 for change, l in iter:
1190 1190 cols = [fn, str(r)]
1191 1191 if opts['line_number']:
1192 1192 cols.append(str(l.linenum))
1193 1193 if opts['all']:
1194 1194 cols.append(change)
1195 1195 if opts['user']:
1196 1196 cols.append(ui.shortuser(get(r)[1]))
1197 1197 if opts['files_with_matches']:
1198 1198 c = (fn, r)
1199 1199 if c in filerevmatches:
1200 1200 continue
1201 1201 filerevmatches[c] = 1
1202 1202 else:
1203 1203 cols.append(l.line)
1204 1204 ui.write(sep.join(cols), eol)
1205 1205 found = True
1206 1206 return found
1207 1207
1208 1208 fstate = {}
1209 1209 skip = {}
1210 1210 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1211 1211 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1212 1212 found = False
1213 1213 follow = opts.get('follow')
1214 1214 for st, rev, fns in changeiter:
1215 1215 if st == 'window':
1216 1216 matches.clear()
1217 1217 elif st == 'add':
1218 1218 mf = repo.changectx(rev).manifest()
1219 1219 matches[rev] = {}
1220 1220 for fn in fns:
1221 1221 if fn in skip:
1222 1222 continue
1223 1223 fstate.setdefault(fn, {})
1224 1224 try:
1225 1225 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1226 1226 if follow:
1227 1227 copied = getfile(fn).renamed(mf[fn])
1228 1228 if copied:
1229 1229 copies.setdefault(rev, {})[fn] = copied[0]
1230 1230 except KeyError:
1231 1231 pass
1232 1232 elif st == 'iter':
1233 1233 states = matches[rev].items()
1234 1234 states.sort()
1235 1235 for fn, m in states:
1236 1236 copy = copies.get(rev, {}).get(fn)
1237 1237 if fn in skip:
1238 1238 if copy:
1239 1239 skip[copy] = True
1240 1240 continue
1241 1241 if fn in prev or fstate[fn]:
1242 1242 r = display(fn, rev, m, fstate[fn])
1243 1243 found = found or r
1244 1244 if r and not opts['all']:
1245 1245 skip[fn] = True
1246 1246 if copy:
1247 1247 skip[copy] = True
1248 1248 fstate[fn] = m
1249 1249 if copy:
1250 1250 fstate[copy] = m
1251 1251 prev[fn] = rev
1252 1252
1253 1253 fstate = fstate.items()
1254 1254 fstate.sort()
1255 1255 for fn, state in fstate:
1256 1256 if fn in skip:
1257 1257 continue
1258 1258 if fn not in copies.get(prev[fn], {}):
1259 1259 found = display(fn, rev, {}, state) or found
1260 1260 return (not found and 1) or 0
1261 1261
1262 1262 def heads(ui, repo, *branchrevs, **opts):
1263 1263 """show current repository heads or show branch heads
1264 1264
1265 1265 With no arguments, show all repository head changesets.
1266 1266
1267 1267 If branch or revisions names are given this will show the heads of
1268 1268 the specified branches or the branches those revisions are tagged
1269 1269 with.
1270 1270
1271 1271 Repository "heads" are changesets that don't have child
1272 1272 changesets. They are where development generally takes place and
1273 1273 are the usual targets for update and merge operations.
1274 1274
1275 1275 Branch heads are changesets that have a given branch tag, but have
1276 1276 no child changesets with that tag. They are usually where
1277 1277 development on the given branch takes place.
1278 1278 """
1279 1279 if opts['rev']:
1280 1280 start = repo.lookup(opts['rev'])
1281 1281 else:
1282 1282 start = None
1283 1283 if not branchrevs:
1284 1284 # Assume we're looking repo-wide heads if no revs were specified.
1285 1285 heads = repo.heads(start)
1286 1286 else:
1287 1287 heads = []
1288 1288 visitedset = util.set()
1289 1289 for branchrev in branchrevs:
1290 1290 branch = repo.changectx(branchrev).branch()
1291 1291 if branch in visitedset:
1292 1292 continue
1293 1293 visitedset.add(branch)
1294 1294 bheads = repo.branchheads(branch, start)
1295 1295 if not bheads:
1296 1296 if branch != branchrev:
1297 1297 ui.warn(_("no changes on branch %s containing %s are "
1298 1298 "reachable from %s\n")
1299 1299 % (branch, branchrev, opts['rev']))
1300 1300 else:
1301 1301 ui.warn(_("no changes on branch %s are reachable from %s\n")
1302 1302 % (branch, opts['rev']))
1303 1303 heads.extend(bheads)
1304 1304 if not heads:
1305 1305 return 1
1306 1306 displayer = cmdutil.show_changeset(ui, repo, opts)
1307 1307 for n in heads:
1308 1308 displayer.show(changenode=n)
1309 1309
1310 1310 def help_(ui, name=None, with_version=False):
1311 1311 """show help for a command, extension, or list of commands
1312 1312
1313 1313 With no arguments, print a list of commands and short help.
1314 1314
1315 1315 Given a command name, print help for that command.
1316 1316
1317 1317 Given an extension name, print help for that extension, and the
1318 1318 commands it provides."""
1319 1319 option_lists = []
1320 1320
1321 1321 def addglobalopts(aliases):
1322 1322 if ui.verbose:
1323 1323 option_lists.append((_("global options:"), globalopts))
1324 1324 if name == 'shortlist':
1325 1325 option_lists.append((_('use "hg help" for the full list '
1326 1326 'of commands'), ()))
1327 1327 else:
1328 1328 if name == 'shortlist':
1329 1329 msg = _('use "hg help" for the full list of commands '
1330 1330 'or "hg -v" for details')
1331 1331 elif aliases:
1332 1332 msg = _('use "hg -v help%s" to show aliases and '
1333 1333 'global options') % (name and " " + name or "")
1334 1334 else:
1335 1335 msg = _('use "hg -v help %s" to show global options') % name
1336 1336 option_lists.append((msg, ()))
1337 1337
1338 1338 def helpcmd(name):
1339 1339 if with_version:
1340 1340 version_(ui)
1341 1341 ui.write('\n')
1342 1342 aliases, i = cmdutil.findcmd(ui, name)
1343 1343 # synopsis
1344 1344 ui.write("%s\n\n" % i[2])
1345 1345
1346 1346 # description
1347 1347 doc = i[0].__doc__
1348 1348 if not doc:
1349 1349 doc = _("(No help text available)")
1350 1350 if ui.quiet:
1351 1351 doc = doc.splitlines(0)[0]
1352 1352 ui.write("%s\n" % doc.rstrip())
1353 1353
1354 1354 if not ui.quiet:
1355 1355 # aliases
1356 1356 if len(aliases) > 1:
1357 1357 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1358 1358
1359 1359 # options
1360 1360 if i[1]:
1361 1361 option_lists.append((_("options:\n"), i[1]))
1362 1362
1363 1363 addglobalopts(False)
1364 1364
1365 def helplist(select=None):
1365 def helplist(header, select=None):
1366 1366 h = {}
1367 1367 cmds = {}
1368 1368 for c, e in table.items():
1369 1369 f = c.split("|", 1)[0]
1370 1370 if select and not select(f):
1371 1371 continue
1372 1372 if name == "shortlist" and not f.startswith("^"):
1373 1373 continue
1374 1374 f = f.lstrip("^")
1375 1375 if not ui.debugflag and f.startswith("debug"):
1376 1376 continue
1377 1377 doc = e[0].__doc__
1378 1378 if not doc:
1379 1379 doc = _("(No help text available)")
1380 1380 h[f] = doc.splitlines(0)[0].rstrip()
1381 1381 cmds[f] = c.lstrip("^")
1382 1382
1383 if not h:
1384 ui.status(_('no commands defined\n'))
1385 return
1386
1387 ui.status(header)
1383 1388 fns = h.keys()
1384 1389 fns.sort()
1385 1390 m = max(map(len, fns))
1386 1391 for f in fns:
1387 1392 if ui.verbose:
1388 1393 commands = cmds[f].replace("|",", ")
1389 1394 ui.write(" %s:\n %s\n"%(commands, h[f]))
1390 1395 else:
1391 1396 ui.write(' %-*s %s\n' % (m, f, h[f]))
1392 1397
1393 1398 if not ui.quiet:
1394 1399 addglobalopts(True)
1395 1400
1396 1401 def helptopic(name):
1397 1402 v = None
1398 1403 for i in help.helptable:
1399 1404 l = i.split('|')
1400 1405 if name in l:
1401 1406 v = i
1402 1407 header = l[-1]
1403 1408 if not v:
1404 1409 raise cmdutil.UnknownCommand(name)
1405 1410
1406 1411 # description
1407 1412 doc = help.helptable[v]
1408 1413 if not doc:
1409 1414 doc = _("(No help text available)")
1410 1415 if callable(doc):
1411 1416 doc = doc()
1412 1417
1413 1418 ui.write("%s\n" % header)
1414 1419 ui.write("%s\n" % doc.rstrip())
1415 1420
1416 1421 def helpext(name):
1417 1422 try:
1418 1423 mod = extensions.find(name)
1419 1424 except KeyError:
1420 1425 raise cmdutil.UnknownCommand(name)
1421 1426
1422 1427 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1423 1428 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1424 1429 for d in doc[1:]:
1425 1430 ui.write(d, '\n')
1426 1431
1427 1432 ui.status('\n')
1428 1433
1429 1434 try:
1430 1435 ct = mod.cmdtable
1431 1436 except AttributeError:
1432 ct = None
1433 if not ct:
1434 ui.status(_('no commands defined\n'))
1435 return
1436
1437 ui.status(_('list of commands:\n\n'))
1437 ct = {}
1438
1438 1439 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1439 helplist(modcmds.has_key)
1440 helplist(_('list of commands:\n\n'), modcmds.has_key)
1440 1441
1441 1442 if name and name != 'shortlist':
1442 1443 i = None
1443 1444 for f in (helpcmd, helptopic, helpext):
1444 1445 try:
1445 1446 f(name)
1446 1447 i = None
1447 1448 break
1448 1449 except cmdutil.UnknownCommand, inst:
1449 1450 i = inst
1450 1451 if i:
1451 1452 raise i
1452 1453
1453 1454 else:
1454 1455 # program name
1455 1456 if ui.verbose or with_version:
1456 1457 version_(ui)
1457 1458 else:
1458 1459 ui.status(_("Mercurial Distributed SCM\n"))
1459 1460 ui.status('\n')
1460 1461
1461 1462 # list of commands
1462 1463 if name == "shortlist":
1463 ui.status(_('basic commands:\n\n'))
1464 header = _('basic commands:\n\n')
1464 1465 else:
1465 ui.status(_('list of commands:\n\n'))
1466
1467 helplist()
1466 header = _('list of commands:\n\n')
1467
1468 helplist(header)
1468 1469
1469 1470 # list all option lists
1470 1471 opt_output = []
1471 1472 for title, options in option_lists:
1472 1473 opt_output.append(("\n%s" % title, None))
1473 1474 for shortopt, longopt, default, desc in options:
1474 1475 if "DEPRECATED" in desc and not ui.verbose: continue
1475 1476 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1476 1477 longopt and " --%s" % longopt),
1477 1478 "%s%s" % (desc,
1478 1479 default
1479 1480 and _(" (default: %s)") % default
1480 1481 or "")))
1481 1482
1482 1483 if opt_output:
1483 1484 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1484 1485 for first, second in opt_output:
1485 1486 if second:
1486 1487 ui.write(" %-*s %s\n" % (opts_len, first, second))
1487 1488 else:
1488 1489 ui.write("%s\n" % first)
1489 1490
1490 1491 def identify(ui, repo, source=None,
1491 1492 rev=None, num=None, id=None, branch=None, tags=None):
1492 1493 """identify the working copy or specified revision
1493 1494
1494 1495 With no revision, print a summary of the current state of the repo.
1495 1496
1496 1497 With a path, do a lookup in another repository.
1497 1498
1498 1499 This summary identifies the repository state using one or two parent
1499 1500 hash identifiers, followed by a "+" if there are uncommitted changes
1500 1501 in the working directory, a list of tags for this revision and a branch
1501 1502 name for non-default branches.
1502 1503 """
1503 1504
1504 1505 hexfunc = ui.debugflag and hex or short
1505 1506 default = not (num or id or branch or tags)
1506 1507 output = []
1507 1508
1508 1509 if source:
1509 1510 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1510 1511 srepo = hg.repository(ui, source)
1511 1512 if not rev and revs:
1512 1513 rev = revs[0]
1513 1514 if not rev:
1514 1515 rev = "tip"
1515 1516 if num or branch or tags:
1516 1517 raise util.Abort(
1517 1518 "can't query remote revision number, branch, or tags")
1518 1519 output = [hexfunc(srepo.lookup(rev))]
1519 1520 elif not rev:
1520 1521 ctx = repo.workingctx()
1521 1522 parents = ctx.parents()
1522 1523 changed = False
1523 1524 if default or id or num:
1524 1525 changed = ctx.files() + ctx.deleted()
1525 1526 if default or id:
1526 1527 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1527 1528 (changed) and "+" or "")]
1528 1529 if num:
1529 1530 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1530 1531 (changed) and "+" or ""))
1531 1532 else:
1532 1533 ctx = repo.changectx(rev)
1533 1534 if default or id:
1534 1535 output = [hexfunc(ctx.node())]
1535 1536 if num:
1536 1537 output.append(str(ctx.rev()))
1537 1538
1538 1539 if not source and default and not ui.quiet:
1539 1540 b = util.tolocal(ctx.branch())
1540 1541 if b != 'default':
1541 1542 output.append("(%s)" % b)
1542 1543
1543 1544 # multiple tags for a single parent separated by '/'
1544 1545 t = "/".join(ctx.tags())
1545 1546 if t:
1546 1547 output.append(t)
1547 1548
1548 1549 if branch:
1549 1550 output.append(util.tolocal(ctx.branch()))
1550 1551
1551 1552 if tags:
1552 1553 output.extend(ctx.tags())
1553 1554
1554 1555 ui.write("%s\n" % ' '.join(output))
1555 1556
1556 1557 def import_(ui, repo, patch1, *patches, **opts):
1557 1558 """import an ordered set of patches
1558 1559
1559 1560 Import a list of patches and commit them individually.
1560 1561
1561 1562 If there are outstanding changes in the working directory, import
1562 1563 will abort unless given the -f flag.
1563 1564
1564 1565 You can import a patch straight from a mail message. Even patches
1565 1566 as attachments work (body part must be type text/plain or
1566 1567 text/x-patch to be used). From and Subject headers of email
1567 1568 message are used as default committer and commit message. All
1568 1569 text/plain body parts before first diff are added to commit
1569 1570 message.
1570 1571
1571 1572 If the imported patch was generated by hg export, user and description
1572 1573 from patch override values from message headers and body. Values
1573 1574 given on command line with -m and -u override these.
1574 1575
1575 1576 If --exact is specified, import will set the working directory
1576 1577 to the parent of each patch before applying it, and will abort
1577 1578 if the resulting changeset has a different ID than the one
1578 1579 recorded in the patch. This may happen due to character set
1579 1580 problems or other deficiencies in the text patch format.
1580 1581
1581 1582 To read a patch from standard input, use patch name "-".
1582 1583 """
1583 1584 patches = (patch1,) + patches
1584 1585
1585 1586 if opts.get('exact') or not opts['force']:
1586 1587 cmdutil.bail_if_changed(repo)
1587 1588
1588 1589 d = opts["base"]
1589 1590 strip = opts["strip"]
1590 1591 wlock = lock = None
1591 1592 try:
1592 1593 wlock = repo.wlock()
1593 1594 lock = repo.lock()
1594 1595 for p in patches:
1595 1596 pf = os.path.join(d, p)
1596 1597
1597 1598 if pf == '-':
1598 1599 ui.status(_("applying patch from stdin\n"))
1599 1600 data = patch.extract(ui, sys.stdin)
1600 1601 else:
1601 1602 ui.status(_("applying %s\n") % p)
1602 1603 data = patch.extract(ui, file(pf, 'rb'))
1603 1604
1604 1605 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1605 1606
1606 1607 if tmpname is None:
1607 1608 raise util.Abort(_('no diffs found'))
1608 1609
1609 1610 try:
1610 1611 cmdline_message = cmdutil.logmessage(opts)
1611 1612 if cmdline_message:
1612 1613 # pickup the cmdline msg
1613 1614 message = cmdline_message
1614 1615 elif message:
1615 1616 # pickup the patch msg
1616 1617 message = message.strip()
1617 1618 else:
1618 1619 # launch the editor
1619 1620 message = None
1620 1621 ui.debug(_('message:\n%s\n') % message)
1621 1622
1622 1623 wp = repo.workingctx().parents()
1623 1624 if opts.get('exact'):
1624 1625 if not nodeid or not p1:
1625 1626 raise util.Abort(_('not a mercurial patch'))
1626 1627 p1 = repo.lookup(p1)
1627 1628 p2 = repo.lookup(p2 or hex(nullid))
1628 1629
1629 1630 if p1 != wp[0].node():
1630 1631 hg.clean(repo, p1)
1631 1632 repo.dirstate.setparents(p1, p2)
1632 1633 elif p2:
1633 1634 try:
1634 1635 p1 = repo.lookup(p1)
1635 1636 p2 = repo.lookup(p2)
1636 1637 if p1 == wp[0].node():
1637 1638 repo.dirstate.setparents(p1, p2)
1638 1639 except hg.RepoError:
1639 1640 pass
1640 1641 if opts.get('exact') or opts.get('import_branch'):
1641 1642 repo.dirstate.setbranch(branch or 'default')
1642 1643
1643 1644 files = {}
1644 1645 try:
1645 1646 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1646 1647 files=files)
1647 1648 finally:
1648 1649 files = patch.updatedir(ui, repo, files)
1649 1650 n = repo.commit(files, message, user, date)
1650 1651 if opts.get('exact'):
1651 1652 if hex(n) != nodeid:
1652 1653 repo.rollback()
1653 1654 raise util.Abort(_('patch is damaged' +
1654 1655 ' or loses information'))
1655 1656 finally:
1656 1657 os.unlink(tmpname)
1657 1658 finally:
1658 1659 del wlock, lock
1659 1660
1660 1661 def incoming(ui, repo, source="default", **opts):
1661 1662 """show new changesets found in source
1662 1663
1663 1664 Show new changesets found in the specified path/URL or the default
1664 1665 pull location. These are the changesets that would be pulled if a pull
1665 1666 was requested.
1666 1667
1667 1668 For remote repository, using --bundle avoids downloading the changesets
1668 1669 twice if the incoming is followed by a pull.
1669 1670
1670 1671 See pull for valid source format details.
1671 1672 """
1672 1673 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1673 1674 cmdutil.setremoteconfig(ui, opts)
1674 1675
1675 1676 other = hg.repository(ui, source)
1676 1677 ui.status(_('comparing with %s\n') % source)
1677 1678 if revs:
1678 1679 if 'lookup' in other.capabilities:
1679 1680 revs = [other.lookup(rev) for rev in revs]
1680 1681 else:
1681 1682 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1682 1683 raise util.Abort(error)
1683 1684 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1684 1685 if not incoming:
1685 1686 try:
1686 1687 os.unlink(opts["bundle"])
1687 1688 except:
1688 1689 pass
1689 1690 ui.status(_("no changes found\n"))
1690 1691 return 1
1691 1692
1692 1693 cleanup = None
1693 1694 try:
1694 1695 fname = opts["bundle"]
1695 1696 if fname or not other.local():
1696 1697 # create a bundle (uncompressed if other repo is not local)
1697 1698 if revs is None:
1698 1699 cg = other.changegroup(incoming, "incoming")
1699 1700 else:
1700 1701 if 'changegroupsubset' not in other.capabilities:
1701 1702 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1702 1703 cg = other.changegroupsubset(incoming, revs, 'incoming')
1703 1704 bundletype = other.local() and "HG10BZ" or "HG10UN"
1704 1705 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1705 1706 # keep written bundle?
1706 1707 if opts["bundle"]:
1707 1708 cleanup = None
1708 1709 if not other.local():
1709 1710 # use the created uncompressed bundlerepo
1710 1711 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1711 1712
1712 1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1713 1714 if opts['newest_first']:
1714 1715 o.reverse()
1715 1716 displayer = cmdutil.show_changeset(ui, other, opts)
1716 1717 for n in o:
1717 1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1718 1719 if opts['no_merges'] and len(parents) == 2:
1719 1720 continue
1720 1721 displayer.show(changenode=n)
1721 1722 finally:
1722 1723 if hasattr(other, 'close'):
1723 1724 other.close()
1724 1725 if cleanup:
1725 1726 os.unlink(cleanup)
1726 1727
1727 1728 def init(ui, dest=".", **opts):
1728 1729 """create a new repository in the given directory
1729 1730
1730 1731 Initialize a new repository in the given directory. If the given
1731 1732 directory does not exist, it is created.
1732 1733
1733 1734 If no directory is given, the current directory is used.
1734 1735
1735 1736 It is possible to specify an ssh:// URL as the destination.
1736 1737 Look at the help text for the pull command for important details
1737 1738 about ssh:// URLs.
1738 1739 """
1739 1740 cmdutil.setremoteconfig(ui, opts)
1740 1741 hg.repository(ui, dest, create=1)
1741 1742
1742 1743 def locate(ui, repo, *pats, **opts):
1743 1744 """locate files matching specific patterns
1744 1745
1745 1746 Print all files under Mercurial control whose names match the
1746 1747 given patterns.
1747 1748
1748 1749 This command searches the entire repository by default. To search
1749 1750 just the current directory and its subdirectories, use
1750 1751 "--include .".
1751 1752
1752 1753 If no patterns are given to match, this command prints all file
1753 1754 names.
1754 1755
1755 1756 If you want to feed the output of this command into the "xargs"
1756 1757 command, use the "-0" option to both this command and "xargs".
1757 1758 This will avoid the problem of "xargs" treating single filenames
1758 1759 that contain white space as multiple filenames.
1759 1760 """
1760 1761 end = opts['print0'] and '\0' or '\n'
1761 1762 rev = opts['rev']
1762 1763 if rev:
1763 1764 node = repo.lookup(rev)
1764 1765 else:
1765 1766 node = None
1766 1767
1767 1768 ret = 1
1768 1769 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1769 1770 badmatch=util.always,
1770 1771 default='relglob'):
1771 1772 if src == 'b':
1772 1773 continue
1773 1774 if not node and abs not in repo.dirstate:
1774 1775 continue
1775 1776 if opts['fullpath']:
1776 1777 ui.write(os.path.join(repo.root, abs), end)
1777 1778 else:
1778 1779 ui.write(((pats and rel) or abs), end)
1779 1780 ret = 0
1780 1781
1781 1782 return ret
1782 1783
1783 1784 def log(ui, repo, *pats, **opts):
1784 1785 """show revision history of entire repository or files
1785 1786
1786 1787 Print the revision history of the specified files or the entire
1787 1788 project.
1788 1789
1789 1790 File history is shown without following rename or copy history of
1790 1791 files. Use -f/--follow with a file name to follow history across
1791 1792 renames and copies. --follow without a file name will only show
1792 1793 ancestors or descendants of the starting revision. --follow-first
1793 1794 only follows the first parent of merge revisions.
1794 1795
1795 1796 If no revision range is specified, the default is tip:0 unless
1796 1797 --follow is set, in which case the working directory parent is
1797 1798 used as the starting revision.
1798 1799
1799 1800 By default this command outputs: changeset id and hash, tags,
1800 1801 non-trivial parents, user, date and time, and a summary for each
1801 1802 commit. When the -v/--verbose switch is used, the list of changed
1802 1803 files and full commit message is shown.
1803 1804
1804 1805 NOTE: log -p may generate unexpected diff output for merge
1805 1806 changesets, as it will compare the merge changeset against its
1806 1807 first parent only. Also, the files: list will only reflect files
1807 1808 that are different from BOTH parents.
1808 1809
1809 1810 """
1810 1811
1811 1812 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1812 1813 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1813 1814
1814 1815 if opts['limit']:
1815 1816 try:
1816 1817 limit = int(opts['limit'])
1817 1818 except ValueError:
1818 1819 raise util.Abort(_('limit must be a positive integer'))
1819 1820 if limit <= 0: raise util.Abort(_('limit must be positive'))
1820 1821 else:
1821 1822 limit = sys.maxint
1822 1823 count = 0
1823 1824
1824 1825 if opts['copies'] and opts['rev']:
1825 1826 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1826 1827 else:
1827 1828 endrev = repo.changelog.count()
1828 1829 rcache = {}
1829 1830 ncache = {}
1830 1831 dcache = []
1831 1832 def getrenamed(fn, rev, man):
1832 1833 '''looks up all renames for a file (up to endrev) the first
1833 1834 time the file is given. It indexes on the changerev and only
1834 1835 parses the manifest if linkrev != changerev.
1835 1836 Returns rename info for fn at changerev rev.'''
1836 1837 if fn not in rcache:
1837 1838 rcache[fn] = {}
1838 1839 ncache[fn] = {}
1839 1840 fl = repo.file(fn)
1840 1841 for i in xrange(fl.count()):
1841 1842 node = fl.node(i)
1842 1843 lr = fl.linkrev(node)
1843 1844 renamed = fl.renamed(node)
1844 1845 rcache[fn][lr] = renamed
1845 1846 if renamed:
1846 1847 ncache[fn][node] = renamed
1847 1848 if lr >= endrev:
1848 1849 break
1849 1850 if rev in rcache[fn]:
1850 1851 return rcache[fn][rev]
1851 1852 mr = repo.manifest.rev(man)
1852 1853 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1853 1854 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1854 1855 if not dcache or dcache[0] != man:
1855 1856 dcache[:] = [man, repo.manifest.readdelta(man)]
1856 1857 if fn in dcache[1]:
1857 1858 return ncache[fn].get(dcache[1][fn])
1858 1859 return None
1859 1860
1860 1861 df = False
1861 1862 if opts["date"]:
1862 1863 df = util.matchdate(opts["date"])
1863 1864
1864 1865 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1865 1866 for st, rev, fns in changeiter:
1866 1867 if st == 'add':
1867 1868 changenode = repo.changelog.node(rev)
1868 1869 parents = [p for p in repo.changelog.parentrevs(rev)
1869 1870 if p != nullrev]
1870 1871 if opts['no_merges'] and len(parents) == 2:
1871 1872 continue
1872 1873 if opts['only_merges'] and len(parents) != 2:
1873 1874 continue
1874 1875
1875 1876 if df:
1876 1877 changes = get(rev)
1877 1878 if not df(changes[2][0]):
1878 1879 continue
1879 1880
1880 1881 if opts['keyword']:
1881 1882 changes = get(rev)
1882 1883 miss = 0
1883 1884 for k in [kw.lower() for kw in opts['keyword']]:
1884 1885 if not (k in changes[1].lower() or
1885 1886 k in changes[4].lower() or
1886 1887 k in " ".join(changes[3]).lower()):
1887 1888 miss = 1
1888 1889 break
1889 1890 if miss:
1890 1891 continue
1891 1892
1892 1893 copies = []
1893 1894 if opts.get('copies') and rev:
1894 1895 mf = get(rev)[0]
1895 1896 for fn in get(rev)[3]:
1896 1897 rename = getrenamed(fn, rev, mf)
1897 1898 if rename:
1898 1899 copies.append((fn, rename[0]))
1899 1900 displayer.show(rev, changenode, copies=copies)
1900 1901 elif st == 'iter':
1901 1902 if count == limit: break
1902 1903 if displayer.flush(rev):
1903 1904 count += 1
1904 1905
1905 1906 def manifest(ui, repo, rev=None):
1906 1907 """output the current or given revision of the project manifest
1907 1908
1908 1909 Print a list of version controlled files for the given revision.
1909 1910 If no revision is given, the parent of the working directory is used,
1910 1911 or tip if no revision is checked out.
1911 1912
1912 1913 The manifest is the list of files being version controlled. If no revision
1913 1914 is given then the first parent of the working directory is used.
1914 1915
1915 1916 With -v flag, print file permissions. With --debug flag, print
1916 1917 file revision hashes.
1917 1918 """
1918 1919
1919 1920 m = repo.changectx(rev).manifest()
1920 1921 files = m.keys()
1921 1922 files.sort()
1922 1923
1923 1924 for f in files:
1924 1925 if ui.debugflag:
1925 1926 ui.write("%40s " % hex(m[f]))
1926 1927 if ui.verbose:
1927 1928 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1928 1929 ui.write("%s\n" % f)
1929 1930
1930 1931 def merge(ui, repo, node=None, force=None, rev=None):
1931 1932 """merge working directory with another revision
1932 1933
1933 1934 Merge the contents of the current working directory and the
1934 1935 requested revision. Files that changed between either parent are
1935 1936 marked as changed for the next commit and a commit must be
1936 1937 performed before any further updates are allowed.
1937 1938
1938 1939 If no revision is specified, the working directory's parent is a
1939 1940 head revision, and the repository contains exactly one other head,
1940 1941 the other head is merged with by default. Otherwise, an explicit
1941 1942 revision to merge with must be provided.
1942 1943 """
1943 1944
1944 1945 if rev and node:
1945 1946 raise util.Abort(_("please specify just one revision"))
1946 1947
1947 1948 if not node:
1948 1949 node = rev
1949 1950
1950 1951 if not node:
1951 1952 heads = repo.heads()
1952 1953 if len(heads) > 2:
1953 1954 raise util.Abort(_('repo has %d heads - '
1954 1955 'please merge with an explicit rev') %
1955 1956 len(heads))
1956 1957 if len(heads) == 1:
1957 1958 raise util.Abort(_('there is nothing to merge - '
1958 1959 'use "hg update" instead'))
1959 1960 parent = repo.dirstate.parents()[0]
1960 1961 if parent not in heads:
1961 1962 raise util.Abort(_('working dir not at a head rev - '
1962 1963 'use "hg update" or merge with an explicit rev'))
1963 1964 node = parent == heads[0] and heads[-1] or heads[0]
1964 1965 return hg.merge(repo, node, force=force)
1965 1966
1966 1967 def outgoing(ui, repo, dest=None, **opts):
1967 1968 """show changesets not found in destination
1968 1969
1969 1970 Show changesets not found in the specified destination repository or
1970 1971 the default push location. These are the changesets that would be pushed
1971 1972 if a push was requested.
1972 1973
1973 1974 See pull for valid destination format details.
1974 1975 """
1975 1976 dest, revs = cmdutil.parseurl(
1976 1977 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1977 1978 cmdutil.setremoteconfig(ui, opts)
1978 1979 if revs:
1979 1980 revs = [repo.lookup(rev) for rev in revs]
1980 1981
1981 1982 other = hg.repository(ui, dest)
1982 1983 ui.status(_('comparing with %s\n') % dest)
1983 1984 o = repo.findoutgoing(other, force=opts['force'])
1984 1985 if not o:
1985 1986 ui.status(_("no changes found\n"))
1986 1987 return 1
1987 1988 o = repo.changelog.nodesbetween(o, revs)[0]
1988 1989 if opts['newest_first']:
1989 1990 o.reverse()
1990 1991 displayer = cmdutil.show_changeset(ui, repo, opts)
1991 1992 for n in o:
1992 1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1993 1994 if opts['no_merges'] and len(parents) == 2:
1994 1995 continue
1995 1996 displayer.show(changenode=n)
1996 1997
1997 1998 def parents(ui, repo, file_=None, **opts):
1998 1999 """show the parents of the working dir or revision
1999 2000
2000 2001 Print the working directory's parent revisions. If a
2001 2002 revision is given via --rev, the parent of that revision
2002 2003 will be printed. If a file argument is given, revision in
2003 2004 which the file was last changed (before the working directory
2004 2005 revision or the argument to --rev if given) is printed.
2005 2006 """
2006 2007 rev = opts.get('rev')
2007 2008 if file_:
2008 2009 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
2009 2010 if anypats or len(files) != 1:
2010 2011 raise util.Abort(_('can only specify an explicit file name'))
2011 2012 ctx = repo.filectx(files[0], changeid=rev)
2012 2013 elif rev:
2013 2014 ctx = repo.changectx(rev)
2014 2015 else:
2015 2016 ctx = repo.workingctx()
2016 2017 p = [cp.node() for cp in ctx.parents()]
2017 2018
2018 2019 displayer = cmdutil.show_changeset(ui, repo, opts)
2019 2020 for n in p:
2020 2021 if n != nullid:
2021 2022 displayer.show(changenode=n)
2022 2023
2023 2024 def paths(ui, repo, search=None):
2024 2025 """show definition of symbolic path names
2025 2026
2026 2027 Show definition of symbolic path name NAME. If no name is given, show
2027 2028 definition of available names.
2028 2029
2029 2030 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2030 2031 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2031 2032 """
2032 2033 if search:
2033 2034 for name, path in ui.configitems("paths"):
2034 2035 if name == search:
2035 2036 ui.write("%s\n" % path)
2036 2037 return
2037 2038 ui.warn(_("not found!\n"))
2038 2039 return 1
2039 2040 else:
2040 2041 for name, path in ui.configitems("paths"):
2041 2042 ui.write("%s = %s\n" % (name, path))
2042 2043
2043 def postincoming(ui, repo, modheads, optupdate, wasempty):
2044 def postincoming(ui, repo, modheads, optupdate):
2044 2045 if modheads == 0:
2045 2046 return
2046 2047 if optupdate:
2047 if wasempty:
2048 return hg.update(repo, repo.lookup('default'))
2049 elif modheads == 1:
2050 return hg.update(repo, repo.changelog.tip()) # update
2048 if modheads == 1:
2049 return hg.update(repo, None)
2051 2050 else:
2052 2051 ui.status(_("not updating, since new heads added\n"))
2053 2052 if modheads > 1:
2054 2053 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2055 2054 else:
2056 2055 ui.status(_("(run 'hg update' to get a working copy)\n"))
2057 2056
2058 2057 def pull(ui, repo, source="default", **opts):
2059 2058 """pull changes from the specified source
2060 2059
2061 2060 Pull changes from a remote repository to a local one.
2062 2061
2063 2062 This finds all changes from the repository at the specified path
2064 2063 or URL and adds them to the local repository. By default, this
2065 2064 does not update the copy of the project in the working directory.
2066 2065
2067 2066 Valid URLs are of the form:
2068 2067
2069 2068 local/filesystem/path (or file://local/filesystem/path)
2070 2069 http://[user@]host[:port]/[path]
2071 2070 https://[user@]host[:port]/[path]
2072 2071 ssh://[user@]host[:port]/[path]
2073 2072 static-http://host[:port]/[path]
2074 2073
2075 2074 Paths in the local filesystem can either point to Mercurial
2076 2075 repositories or to bundle files (as created by 'hg bundle' or
2077 2076 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2078 2077 allows access to a Mercurial repository where you simply use a web
2079 2078 server to publish the .hg directory as static content.
2080 2079
2081 2080 An optional identifier after # indicates a particular branch, tag,
2082 2081 or changeset to pull.
2083 2082
2084 2083 Some notes about using SSH with Mercurial:
2085 2084 - SSH requires an accessible shell account on the destination machine
2086 2085 and a copy of hg in the remote path or specified with as remotecmd.
2087 2086 - path is relative to the remote user's home directory by default.
2088 2087 Use an extra slash at the start of a path to specify an absolute path:
2089 2088 ssh://example.com//tmp/repository
2090 2089 - Mercurial doesn't use its own compression via SSH; the right thing
2091 2090 to do is to configure it in your ~/.ssh/config, e.g.:
2092 2091 Host *.mylocalnetwork.example.com
2093 2092 Compression no
2094 2093 Host *
2095 2094 Compression yes
2096 2095 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2097 2096 with the --ssh command line option.
2098 2097 """
2099 2098 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2100 2099 cmdutil.setremoteconfig(ui, opts)
2101 2100
2102 2101 other = hg.repository(ui, source)
2103 2102 ui.status(_('pulling from %s\n') % (source))
2104 2103 if revs:
2105 2104 if 'lookup' in other.capabilities:
2106 2105 revs = [other.lookup(rev) for rev in revs]
2107 2106 else:
2108 2107 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2109 2108 raise util.Abort(error)
2110 2109
2111 wasempty = repo.changelog.count() == 0
2112 2110 modheads = repo.pull(other, heads=revs, force=opts['force'])
2113 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2111 return postincoming(ui, repo, modheads, opts['update'])
2114 2112
2115 2113 def push(ui, repo, dest=None, **opts):
2116 2114 """push changes to the specified destination
2117 2115
2118 2116 Push changes from the local repository to the given destination.
2119 2117
2120 2118 This is the symmetrical operation for pull. It helps to move
2121 2119 changes from the current repository to a different one. If the
2122 2120 destination is local this is identical to a pull in that directory
2123 2121 from the current one.
2124 2122
2125 2123 By default, push will refuse to run if it detects the result would
2126 2124 increase the number of remote heads. This generally indicates the
2127 2125 the client has forgotten to sync and merge before pushing.
2128 2126
2129 2127 Valid URLs are of the form:
2130 2128
2131 2129 local/filesystem/path (or file://local/filesystem/path)
2132 2130 ssh://[user@]host[:port]/[path]
2133 2131 http://[user@]host[:port]/[path]
2134 2132 https://[user@]host[:port]/[path]
2135 2133
2136 2134 An optional identifier after # indicates a particular branch, tag,
2137 2135 or changeset to push.
2138 2136
2139 2137 Look at the help text for the pull command for important details
2140 2138 about ssh:// URLs.
2141 2139
2142 2140 Pushing to http:// and https:// URLs is only possible, if this
2143 2141 feature is explicitly enabled on the remote Mercurial server.
2144 2142 """
2145 2143 dest, revs = cmdutil.parseurl(
2146 2144 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2147 2145 cmdutil.setremoteconfig(ui, opts)
2148 2146
2149 2147 other = hg.repository(ui, dest)
2150 2148 ui.status('pushing to %s\n' % (dest))
2151 2149 if revs:
2152 2150 revs = [repo.lookup(rev) for rev in revs]
2153 2151 r = repo.push(other, opts['force'], revs=revs)
2154 2152 return r == 0
2155 2153
2156 2154 def rawcommit(ui, repo, *pats, **opts):
2157 2155 """raw commit interface (DEPRECATED)
2158 2156
2159 2157 (DEPRECATED)
2160 2158 Lowlevel commit, for use in helper scripts.
2161 2159
2162 2160 This command is not intended to be used by normal users, as it is
2163 2161 primarily useful for importing from other SCMs.
2164 2162
2165 2163 This command is now deprecated and will be removed in a future
2166 2164 release, please use debugsetparents and commit instead.
2167 2165 """
2168 2166
2169 2167 ui.warn(_("(the rawcommit command is deprecated)\n"))
2170 2168
2171 2169 message = cmdutil.logmessage(opts)
2172 2170
2173 2171 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2174 2172 if opts['files']:
2175 2173 files += open(opts['files']).read().splitlines()
2176 2174
2177 2175 parents = [repo.lookup(p) for p in opts['parent']]
2178 2176
2179 2177 try:
2180 2178 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2181 2179 except ValueError, inst:
2182 2180 raise util.Abort(str(inst))
2183 2181
2184 2182 def recover(ui, repo):
2185 2183 """roll back an interrupted transaction
2186 2184
2187 2185 Recover from an interrupted commit or pull.
2188 2186
2189 2187 This command tries to fix the repository status after an interrupted
2190 2188 operation. It should only be necessary when Mercurial suggests it.
2191 2189 """
2192 2190 if repo.recover():
2193 2191 return hg.verify(repo)
2194 2192 return 1
2195 2193
2196 2194 def remove(ui, repo, *pats, **opts):
2197 2195 """remove the specified files on the next commit
2198 2196
2199 2197 Schedule the indicated files for removal from the repository.
2200 2198
2201 2199 This only removes files from the current branch, not from the
2202 2200 entire project history. If the files still exist in the working
2203 2201 directory, they will be deleted from it. If invoked with --after,
2204 2202 files are marked as removed, but not actually unlinked unless --force
2205 2203 is also given. Without exact file names, --after will only mark
2206 2204 files as removed if they are no longer in the working directory.
2207 2205
2208 2206 This command schedules the files to be removed at the next commit.
2209 2207 To undo a remove before that, see hg revert.
2210 2208
2211 2209 Modified files and added files are not removed by default. To
2212 2210 remove them, use the -f/--force option.
2213 2211 """
2214 names = []
2215 2212 if not opts['after'] and not pats:
2216 2213 raise util.Abort(_('no files specified'))
2217 2214 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2218 2215 exact = dict.fromkeys(files)
2219 2216 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2220 2217 modified, added, removed, deleted, unknown = mardu
2221 2218 remove, forget = [], []
2222 2219 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2223 2220 reason = None
2224 2221 if abs in modified and not opts['force']:
2225 2222 reason = _('is modified (use -f to force removal)')
2226 2223 elif abs in added:
2227 2224 if opts['force']:
2228 2225 forget.append(abs)
2229 2226 continue
2230 2227 reason = _('has been marked for add (use -f to force removal)')
2231 2228 elif abs not in repo.dirstate:
2232 2229 reason = _('is not managed')
2233 2230 elif opts['after'] and not exact and abs not in deleted:
2234 2231 continue
2235 2232 elif abs in removed:
2236 2233 continue
2237 2234 if reason:
2238 2235 if exact:
2239 2236 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2240 2237 else:
2241 2238 if ui.verbose or not exact:
2242 2239 ui.status(_('removing %s\n') % rel)
2243 2240 remove.append(abs)
2244 2241 repo.forget(forget)
2245 2242 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2246 2243
2247 2244 def rename(ui, repo, *pats, **opts):
2248 2245 """rename files; equivalent of copy + remove
2249 2246
2250 2247 Mark dest as copies of sources; mark sources for deletion. If
2251 2248 dest is a directory, copies are put in that directory. If dest is
2252 2249 a file, there can only be one source.
2253 2250
2254 2251 By default, this command copies the contents of files as they
2255 2252 stand in the working directory. If invoked with --after, the
2256 2253 operation is recorded, but no copying is performed.
2257 2254
2258 2255 This command takes effect in the next commit. To undo a rename
2259 2256 before that, see hg revert.
2260 2257 """
2261 2258 wlock = repo.wlock(False)
2262 2259 try:
2263 2260 errs, copied = docopy(ui, repo, pats, opts)
2264 2261 names = []
2265 2262 for abs, rel, exact in copied:
2266 2263 if ui.verbose or not exact:
2267 2264 ui.status(_('removing %s\n') % rel)
2268 2265 names.append(abs)
2269 2266 if not opts.get('dry_run'):
2270 2267 repo.remove(names, True)
2271 2268 return errs
2272 2269 finally:
2273 2270 del wlock
2274 2271
2275 2272 def revert(ui, repo, *pats, **opts):
2276 2273 """revert files or dirs to their states as of some revision
2277 2274
2278 2275 With no revision specified, revert the named files or directories
2279 2276 to the contents they had in the parent of the working directory.
2280 2277 This restores the contents of the affected files to an unmodified
2281 2278 state and unschedules adds, removes, copies, and renames. If the
2282 2279 working directory has two parents, you must explicitly specify the
2283 2280 revision to revert to.
2284 2281
2285 2282 Modified files are saved with a .orig suffix before reverting.
2286 2283 To disable these backups, use --no-backup.
2287 2284
2288 2285 Using the -r option, revert the given files or directories to their
2289 2286 contents as of a specific revision. This can be helpful to "roll
2290 2287 back" some or all of a change that should not have been committed.
2291 2288
2292 2289 Revert modifies the working directory. It does not commit any
2293 2290 changes, or change the parent of the working directory. If you
2294 2291 revert to a revision other than the parent of the working
2295 2292 directory, the reverted files will thus appear modified
2296 2293 afterwards.
2297 2294
2298 2295 If a file has been deleted, it is restored. If the executable
2299 2296 mode of a file was changed, it is reset.
2300 2297
2301 2298 If names are given, all files matching the names are reverted.
2302 2299
2303 2300 If no arguments are given, no files are reverted.
2304 2301 """
2305 2302
2306 2303 if opts["date"]:
2307 2304 if opts["rev"]:
2308 2305 raise util.Abort(_("you can't specify a revision and a date"))
2309 2306 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2310 2307
2311 2308 if not pats and not opts['all']:
2312 2309 raise util.Abort(_('no files or directories specified; '
2313 2310 'use --all to revert the whole repo'))
2314 2311
2315 2312 parent, p2 = repo.dirstate.parents()
2316 2313 if not opts['rev'] and p2 != nullid:
2317 2314 raise util.Abort(_('uncommitted merge - please provide a '
2318 2315 'specific revision'))
2319 2316 ctx = repo.changectx(opts['rev'])
2320 2317 node = ctx.node()
2321 2318 mf = ctx.manifest()
2322 2319 if node == parent:
2323 2320 pmf = mf
2324 2321 else:
2325 2322 pmf = None
2326 2323
2327 2324 # need all matching names in dirstate and manifest of target rev,
2328 2325 # so have to walk both. do not print errors if files exist in one
2329 2326 # but not other.
2330 2327
2331 2328 names = {}
2332 2329 target_only = {}
2333 2330
2334 2331 wlock = repo.wlock()
2335 2332 try:
2336 2333 # walk dirstate.
2337 2334 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2338 2335 badmatch=mf.has_key):
2339 2336 names[abs] = (rel, exact)
2340 2337 if src == 'b':
2341 2338 target_only[abs] = True
2342 2339
2343 2340 # walk target manifest.
2344 2341
2345 2342 def badmatch(path):
2346 2343 if path in names:
2347 2344 return True
2348 2345 path_ = path + '/'
2349 2346 for f in names:
2350 2347 if f.startswith(path_):
2351 2348 return True
2352 2349 return False
2353 2350
2354 2351 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2355 2352 badmatch=badmatch):
2356 2353 if abs in names or src == 'b':
2357 2354 continue
2358 2355 names[abs] = (rel, exact)
2359 2356 target_only[abs] = True
2360 2357
2361 2358 changes = repo.status(match=names.has_key)[:5]
2362 2359 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2363 2360
2364 2361 revert = ([], _('reverting %s\n'))
2365 2362 add = ([], _('adding %s\n'))
2366 2363 remove = ([], _('removing %s\n'))
2367 2364 forget = ([], _('forgetting %s\n'))
2368 2365 undelete = ([], _('undeleting %s\n'))
2369 2366 update = {}
2370 2367
2371 2368 disptable = (
2372 2369 # dispatch table:
2373 2370 # file state
2374 2371 # action if in target manifest
2375 2372 # action if not in target manifest
2376 2373 # make backup if in target manifest
2377 2374 # make backup if not in target manifest
2378 2375 (modified, revert, remove, True, True),
2379 2376 (added, revert, forget, True, False),
2380 2377 (removed, undelete, None, False, False),
2381 2378 (deleted, revert, remove, False, False),
2382 2379 (unknown, add, None, True, False),
2383 2380 (target_only, add, None, False, False),
2384 2381 )
2385 2382
2386 2383 entries = names.items()
2387 2384 entries.sort()
2388 2385
2389 2386 for abs, (rel, exact) in entries:
2390 2387 mfentry = mf.get(abs)
2391 2388 target = repo.wjoin(abs)
2392 2389 def handle(xlist, dobackup):
2393 2390 xlist[0].append(abs)
2394 2391 update[abs] = 1
2395 2392 if dobackup and not opts['no_backup'] and util.lexists(target):
2396 2393 bakname = "%s.orig" % rel
2397 2394 ui.note(_('saving current version of %s as %s\n') %
2398 2395 (rel, bakname))
2399 2396 if not opts.get('dry_run'):
2400 2397 util.copyfile(target, bakname)
2401 2398 if ui.verbose or not exact:
2402 2399 ui.status(xlist[1] % rel)
2403 2400 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2404 2401 if abs not in table: continue
2405 2402 # file has changed in dirstate
2406 2403 if mfentry:
2407 2404 handle(hitlist, backuphit)
2408 2405 elif misslist is not None:
2409 2406 handle(misslist, backupmiss)
2410 2407 else:
2411 2408 if exact: ui.warn(_('file not managed: %s\n') % rel)
2412 2409 break
2413 2410 else:
2414 2411 # file has not changed in dirstate
2415 2412 if node == parent:
2416 2413 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2417 2414 continue
2418 2415 if pmf is None:
2419 2416 # only need parent manifest in this unlikely case,
2420 2417 # so do not read by default
2421 2418 pmf = repo.changectx(parent).manifest()
2422 2419 if abs in pmf:
2423 2420 if mfentry:
2424 2421 # if version of file is same in parent and target
2425 2422 # manifests, do nothing
2426 2423 if pmf[abs] != mfentry:
2427 2424 handle(revert, False)
2428 2425 else:
2429 2426 handle(remove, False)
2430 2427
2431 2428 if not opts.get('dry_run'):
2432 2429 for f in forget[0]:
2433 2430 repo.dirstate.forget(f)
2434 2431 r = hg.revert(repo, node, update.has_key)
2435 2432 for f in add[0]:
2436 2433 repo.dirstate.add(f)
2437 2434 for f in undelete[0]:
2438 2435 repo.dirstate.normal(f)
2439 2436 for f in remove[0]:
2440 2437 repo.dirstate.remove(f)
2441 2438 return r
2442 2439 finally:
2443 2440 del wlock
2444 2441
2445 2442 def rollback(ui, repo):
2446 2443 """roll back the last transaction in this repository
2447 2444
2448 2445 Roll back the last transaction in this repository, restoring the
2449 2446 project to its state prior to the transaction.
2450 2447
2451 2448 Transactions are used to encapsulate the effects of all commands
2452 2449 that create new changesets or propagate existing changesets into a
2453 2450 repository. For example, the following commands are transactional,
2454 2451 and their effects can be rolled back:
2455 2452
2456 2453 commit
2457 2454 import
2458 2455 pull
2459 2456 push (with this repository as destination)
2460 2457 unbundle
2461 2458
2462 2459 This command should be used with care. There is only one level of
2463 2460 rollback, and there is no way to undo a rollback. It will also
2464 2461 restore the dirstate at the time of the last transaction, which
2465 2462 may lose subsequent dirstate changes.
2466 2463
2467 2464 This command is not intended for use on public repositories. Once
2468 2465 changes are visible for pull by other users, rolling a transaction
2469 2466 back locally is ineffective (someone else may already have pulled
2470 2467 the changes). Furthermore, a race is possible with readers of the
2471 2468 repository; for example an in-progress pull from the repository
2472 2469 may fail if a rollback is performed.
2473 2470 """
2474 2471 repo.rollback()
2475 2472
2476 2473 def root(ui, repo):
2477 2474 """print the root (top) of the current working dir
2478 2475
2479 2476 Print the root directory of the current repository.
2480 2477 """
2481 2478 ui.write(repo.root + "\n")
2482 2479
2483 2480 def serve(ui, repo, **opts):
2484 2481 """export the repository via HTTP
2485 2482
2486 2483 Start a local HTTP repository browser and pull server.
2487 2484
2488 2485 By default, the server logs accesses to stdout and errors to
2489 2486 stderr. Use the "-A" and "-E" options to log to files.
2490 2487 """
2491 2488
2492 2489 if opts["stdio"]:
2493 2490 if repo is None:
2494 2491 raise hg.RepoError(_("There is no Mercurial repository here"
2495 2492 " (.hg not found)"))
2496 2493 s = sshserver.sshserver(ui, repo)
2497 2494 s.serve_forever()
2498 2495
2499 2496 parentui = ui.parentui or ui
2500 2497 optlist = ("name templates style address port ipv6"
2501 2498 " accesslog errorlog webdir_conf certificate")
2502 2499 for o in optlist.split():
2503 2500 if opts[o]:
2504 2501 parentui.setconfig("web", o, str(opts[o]))
2505 2502 if repo.ui != parentui:
2506 2503 repo.ui.setconfig("web", o, str(opts[o]))
2507 2504
2508 2505 if repo is None and not ui.config("web", "webdir_conf"):
2509 2506 raise hg.RepoError(_("There is no Mercurial repository here"
2510 2507 " (.hg not found)"))
2511 2508
2512 2509 class service:
2513 2510 def init(self):
2514 2511 util.set_signal_handler()
2515 2512 try:
2516 2513 self.httpd = hgweb.server.create_server(parentui, repo)
2517 2514 except socket.error, inst:
2518 2515 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519 2516
2520 2517 if not ui.verbose: return
2521 2518
2522 2519 if self.httpd.port != 80:
2523 2520 ui.status(_('listening at http://%s:%d/\n') %
2524 2521 (self.httpd.addr, self.httpd.port))
2525 2522 else:
2526 2523 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2527 2524
2528 2525 def run(self):
2529 2526 self.httpd.serve_forever()
2530 2527
2531 2528 service = service()
2532 2529
2533 2530 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2534 2531
2535 2532 def status(ui, repo, *pats, **opts):
2536 2533 """show changed files in the working directory
2537 2534
2538 2535 Show status of files in the repository. If names are given, only
2539 2536 files that match are shown. Files that are clean or ignored, are
2540 2537 not listed unless -c (clean), -i (ignored) or -A is given.
2541 2538
2542 2539 NOTE: status may appear to disagree with diff if permissions have
2543 2540 changed or a merge has occurred. The standard diff format does not
2544 2541 report permission changes and diff only reports changes relative
2545 2542 to one merge parent.
2546 2543
2547 2544 If one revision is given, it is used as the base revision.
2548 2545 If two revisions are given, the difference between them is shown.
2549 2546
2550 2547 The codes used to show the status of files are:
2551 2548 M = modified
2552 2549 A = added
2553 2550 R = removed
2554 2551 C = clean
2555 2552 ! = deleted, but still tracked
2556 2553 ? = not tracked
2557 2554 I = ignored (not shown by default)
2558 2555 = the previous added file was copied from here
2559 2556 """
2560 2557
2561 2558 all = opts['all']
2562 2559 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2563 2560
2564 2561 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2565 2562 cwd = (pats and repo.getcwd()) or ''
2566 2563 modified, added, removed, deleted, unknown, ignored, clean = [
2567 2564 n for n in repo.status(node1=node1, node2=node2, files=files,
2568 2565 match=matchfn,
2569 2566 list_ignored=all or opts['ignored'],
2570 2567 list_clean=all or opts['clean'])]
2571 2568
2572 2569 changetypes = (('modified', 'M', modified),
2573 2570 ('added', 'A', added),
2574 2571 ('removed', 'R', removed),
2575 2572 ('deleted', '!', deleted),
2576 2573 ('unknown', '?', unknown),
2577 2574 ('ignored', 'I', ignored))
2578 2575
2579 2576 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2580 2577
2581 2578 end = opts['print0'] and '\0' or '\n'
2582 2579
2583 2580 for opt, char, changes in ([ct for ct in explicit_changetypes
2584 2581 if all or opts[ct[0]]]
2585 2582 or changetypes):
2586 2583 if opts['no_status']:
2587 2584 format = "%%s%s" % end
2588 2585 else:
2589 2586 format = "%s %%s%s" % (char, end)
2590 2587
2591 2588 for f in changes:
2592 2589 ui.write(format % repo.pathto(f, cwd))
2593 2590 if ((all or opts.get('copies')) and not opts.get('no_status')):
2594 2591 copied = repo.dirstate.copied(f)
2595 2592 if copied:
2596 2593 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2597 2594
2598 2595 def tag(ui, repo, name, rev_=None, **opts):
2599 2596 """add a tag for the current or given revision
2600 2597
2601 2598 Name a particular revision using <name>.
2602 2599
2603 2600 Tags are used to name particular revisions of the repository and are
2604 2601 very useful to compare different revision, to go back to significant
2605 2602 earlier versions or to mark branch points as releases, etc.
2606 2603
2607 2604 If no revision is given, the parent of the working directory is used,
2608 2605 or tip if no revision is checked out.
2609 2606
2610 2607 To facilitate version control, distribution, and merging of tags,
2611 2608 they are stored as a file named ".hgtags" which is managed
2612 2609 similarly to other project files and can be hand-edited if
2613 2610 necessary. The file '.hg/localtags' is used for local tags (not
2614 2611 shared among repositories).
2615 2612 """
2616 2613 if name in ['tip', '.', 'null']:
2617 2614 raise util.Abort(_("the name '%s' is reserved") % name)
2618 2615 if rev_ is not None:
2619 2616 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2620 2617 "please use 'hg tag [-r REV] NAME' instead\n"))
2621 2618 if opts['rev']:
2622 2619 raise util.Abort(_("use only one form to specify the revision"))
2623 2620 if opts['rev'] and opts['remove']:
2624 2621 raise util.Abort(_("--rev and --remove are incompatible"))
2625 2622 if opts['rev']:
2626 2623 rev_ = opts['rev']
2627 2624 message = opts['message']
2628 2625 if opts['remove']:
2629 2626 if not name in repo.tags():
2630 2627 raise util.Abort(_('tag %s does not exist') % name)
2631 2628 rev_ = nullid
2632 2629 if not message:
2633 2630 message = _('Removed tag %s') % name
2634 2631 elif name in repo.tags() and not opts['force']:
2635 2632 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2636 2633 % name)
2637 2634 if not rev_ and repo.dirstate.parents()[1] != nullid:
2638 2635 raise util.Abort(_('uncommitted merge - please provide a '
2639 2636 'specific revision'))
2640 2637 r = repo.changectx(rev_).node()
2641 2638
2642 2639 if not message:
2643 2640 message = _('Added tag %s for changeset %s') % (name, short(r))
2644 2641
2645 2642 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2646 2643
2647 2644 def tags(ui, repo):
2648 2645 """list repository tags
2649 2646
2650 2647 List the repository tags.
2651 2648
2652 2649 This lists both regular and local tags.
2653 2650 """
2654 2651
2655 2652 l = repo.tagslist()
2656 2653 l.reverse()
2657 2654 hexfunc = ui.debugflag and hex or short
2658 2655 for t, n in l:
2659 2656 try:
2660 2657 hn = hexfunc(n)
2661 2658 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2662 2659 except revlog.LookupError:
2663 2660 r = " ?:%s" % hn
2664 2661 if ui.quiet:
2665 2662 ui.write("%s\n" % t)
2666 2663 else:
2667 2664 spaces = " " * (30 - util.locallen(t))
2668 2665 ui.write("%s%s %s\n" % (t, spaces, r))
2669 2666
2670 2667 def tip(ui, repo, **opts):
2671 2668 """show the tip revision
2672 2669
2673 2670 Show the tip revision.
2674 2671 """
2675 2672 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2676 2673
2677 2674 def unbundle(ui, repo, fname1, *fnames, **opts):
2678 2675 """apply one or more changegroup files
2679 2676
2680 2677 Apply one or more compressed changegroup files generated by the
2681 2678 bundle command.
2682 2679 """
2683 2680 fnames = (fname1,) + fnames
2684 result = None
2685 wasempty = repo.changelog.count() == 0
2686 2681 for fname in fnames:
2687 2682 if os.path.exists(fname):
2688 2683 f = open(fname, "rb")
2689 2684 else:
2690 2685 f = urllib.urlopen(fname)
2691 2686 gen = changegroup.readbundle(f, fname)
2692 2687 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2693 2688
2694 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2689 return postincoming(ui, repo, modheads, opts['update'])
2695 2690
2696 2691 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2697 2692 """update working directory
2698 2693
2699 2694 Update the working directory to the specified revision, or the
2700 2695 tip of the current branch if none is specified.
2701 2696
2702 2697 If there are no outstanding changes in the working directory and
2703 2698 there is a linear relationship between the current version and the
2704 2699 requested version, the result is the requested version.
2705 2700
2706 2701 To merge the working directory with another revision, use the
2707 2702 merge command.
2708 2703
2709 2704 By default, update will refuse to run if doing so would require
2710 2705 discarding local changes.
2711 2706 """
2712 2707 if rev and node:
2713 2708 raise util.Abort(_("please specify just one revision"))
2714 2709
2715 2710 if not rev:
2716 2711 rev = node
2717 2712
2718 2713 if date:
2719 2714 if rev:
2720 2715 raise util.Abort(_("you can't specify a revision and a date"))
2721 2716 rev = cmdutil.finddate(ui, repo, date)
2722 2717
2723 2718 if clean:
2724 2719 return hg.clean(repo, rev)
2725 2720 else:
2726 2721 return hg.update(repo, rev)
2727 2722
2728 2723 def verify(ui, repo):
2729 2724 """verify the integrity of the repository
2730 2725
2731 2726 Verify the integrity of the current repository.
2732 2727
2733 2728 This will perform an extensive check of the repository's
2734 2729 integrity, validating the hashes and checksums of each entry in
2735 2730 the changelog, manifest, and tracked files, as well as the
2736 2731 integrity of their crosslinks and indices.
2737 2732 """
2738 2733 return hg.verify(repo)
2739 2734
2740 2735 def version_(ui):
2741 2736 """output version and copyright information"""
2742 2737 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2743 2738 % version.get_version())
2744 2739 ui.status(_(
2745 2740 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2746 2741 "This is free software; see the source for copying conditions. "
2747 2742 "There is NO\nwarranty; "
2748 2743 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2749 2744 ))
2750 2745
2751 2746 # Command options and aliases are listed here, alphabetically
2752 2747
2753 2748 globalopts = [
2754 2749 ('R', 'repository', '',
2755 2750 _('repository root directory or symbolic path name')),
2756 2751 ('', 'cwd', '', _('change working directory')),
2757 2752 ('y', 'noninteractive', None,
2758 2753 _('do not prompt, assume \'yes\' for any required answers')),
2759 2754 ('q', 'quiet', None, _('suppress output')),
2760 2755 ('v', 'verbose', None, _('enable additional output')),
2761 2756 ('', 'config', [], _('set/override config option')),
2762 2757 ('', 'debug', None, _('enable debugging output')),
2763 2758 ('', 'debugger', None, _('start debugger')),
2764 2759 ('', 'encoding', util._encoding, _('set the charset encoding')),
2765 2760 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2766 2761 ('', 'lsprof', None, _('print improved command execution profile')),
2767 2762 ('', 'traceback', None, _('print traceback on exception')),
2768 2763 ('', 'time', None, _('time how long the command takes')),
2769 2764 ('', 'profile', None, _('print command execution profile')),
2770 2765 ('', 'version', None, _('output version information and exit')),
2771 2766 ('h', 'help', None, _('display help and exit')),
2772 2767 ]
2773 2768
2774 2769 dryrunopts = [('n', 'dry-run', None,
2775 2770 _('do not perform actions, just print output'))]
2776 2771
2777 2772 remoteopts = [
2778 2773 ('e', 'ssh', '', _('specify ssh command to use')),
2779 2774 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2780 2775 ]
2781 2776
2782 2777 walkopts = [
2783 2778 ('I', 'include', [], _('include names matching the given patterns')),
2784 2779 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2785 2780 ]
2786 2781
2787 2782 commitopts = [
2788 2783 ('m', 'message', '', _('use <text> as commit message')),
2789 2784 ('l', 'logfile', '', _('read commit message from <file>')),
2790 2785 ]
2791 2786
2792 2787 table = {
2793 2788 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2794 2789 "addremove":
2795 2790 (addremove,
2796 2791 [('s', 'similarity', '',
2797 2792 _('guess renamed files by similarity (0<=s<=100)')),
2798 2793 ] + walkopts + dryrunopts,
2799 2794 _('hg addremove [OPTION]... [FILE]...')),
2800 2795 "^annotate":
2801 2796 (annotate,
2802 2797 [('r', 'rev', '', _('annotate the specified revision')),
2803 2798 ('f', 'follow', None, _('follow file copies and renames')),
2804 2799 ('a', 'text', None, _('treat all files as text')),
2805 2800 ('u', 'user', None, _('list the author')),
2806 2801 ('d', 'date', None, _('list the date')),
2807 2802 ('n', 'number', None, _('list the revision number (default)')),
2808 2803 ('c', 'changeset', None, _('list the changeset')),
2809 2804 ('l', 'line-number', None,
2810 2805 _('show line number at the first appearance'))
2811 2806 ] + walkopts,
2812 2807 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2813 2808 "archive":
2814 2809 (archive,
2815 2810 [('', 'no-decode', None, _('do not pass files through decoders')),
2816 2811 ('p', 'prefix', '', _('directory prefix for files in archive')),
2817 2812 ('r', 'rev', '', _('revision to distribute')),
2818 2813 ('t', 'type', '', _('type of distribution to create')),
2819 2814 ] + walkopts,
2820 2815 _('hg archive [OPTION]... DEST')),
2821 2816 "backout":
2822 2817 (backout,
2823 2818 [('', 'merge', None,
2824 2819 _('merge with old dirstate parent after backout')),
2825 2820 ('d', 'date', '', _('record datecode as commit date')),
2826 2821 ('', 'parent', '', _('parent to choose when backing out merge')),
2827 2822 ('u', 'user', '', _('record user as committer')),
2828 2823 ('r', 'rev', '', _('revision to backout')),
2829 2824 ] + walkopts + commitopts,
2830 2825 _('hg backout [OPTION]... [-r] REV')),
2831 2826 "branch":
2832 2827 (branch,
2833 2828 [('f', 'force', None,
2834 2829 _('set branch name even if it shadows an existing branch'))],
2835 2830 _('hg branch [NAME]')),
2836 2831 "branches":
2837 2832 (branches,
2838 2833 [('a', 'active', False,
2839 2834 _('show only branches that have unmerged heads'))],
2840 2835 _('hg branches [-a]')),
2841 2836 "bundle":
2842 2837 (bundle,
2843 2838 [('f', 'force', None,
2844 2839 _('run even when remote repository is unrelated')),
2845 2840 ('r', 'rev', [],
2846 2841 _('a changeset you would like to bundle')),
2847 2842 ('', 'base', [],
2848 2843 _('a base changeset to specify instead of a destination')),
2849 2844 ] + remoteopts,
2850 2845 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2851 2846 "cat":
2852 2847 (cat,
2853 2848 [('o', 'output', '', _('print output to file with formatted name')),
2854 2849 ('r', 'rev', '', _('print the given revision')),
2855 2850 ] + walkopts,
2856 2851 _('hg cat [OPTION]... FILE...')),
2857 2852 "^clone":
2858 2853 (clone,
2859 2854 [('U', 'noupdate', None, _('do not update the new working directory')),
2860 2855 ('r', 'rev', [],
2861 2856 _('a changeset you would like to have after cloning')),
2862 2857 ('', 'pull', None, _('use pull protocol to copy metadata')),
2863 2858 ('', 'uncompressed', None,
2864 2859 _('use uncompressed transfer (fast over LAN)')),
2865 2860 ] + remoteopts,
2866 2861 _('hg clone [OPTION]... SOURCE [DEST]')),
2867 2862 "^commit|ci":
2868 2863 (commit,
2869 2864 [('A', 'addremove', None,
2870 2865 _('mark new/missing files as added/removed before committing')),
2871 2866 ('d', 'date', '', _('record datecode as commit date')),
2872 2867 ('u', 'user', '', _('record user as commiter')),
2873 2868 ] + walkopts + commitopts,
2874 2869 _('hg commit [OPTION]... [FILE]...')),
2875 2870 "copy|cp":
2876 2871 (copy,
2877 2872 [('A', 'after', None, _('record a copy that has already occurred')),
2878 2873 ('f', 'force', None,
2879 2874 _('forcibly copy over an existing managed file')),
2880 2875 ] + walkopts + dryrunopts,
2881 2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2882 2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2883 2878 "debugcomplete":
2884 2879 (debugcomplete,
2885 2880 [('o', 'options', None, _('show the command options'))],
2886 2881 _('debugcomplete [-o] CMD')),
2887 2882 "debuginstall": (debuginstall, [], _('debuginstall')),
2888 2883 "debugrebuildstate":
2889 2884 (debugrebuildstate,
2890 2885 [('r', 'rev', '', _('revision to rebuild to'))],
2891 2886 _('debugrebuildstate [-r REV] [REV]')),
2892 2887 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2893 2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2894 2889 "debugstate": (debugstate, [], _('debugstate')),
2895 2890 "debugdate":
2896 2891 (debugdate,
2897 2892 [('e', 'extended', None, _('try extended date formats'))],
2898 2893 _('debugdate [-e] DATE [RANGE]')),
2899 2894 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2900 2895 "debugindex": (debugindex, [], _('debugindex FILE')),
2901 2896 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2902 2897 "debugrename":
2903 2898 (debugrename,
2904 2899 [('r', 'rev', '', _('revision to debug'))],
2905 2900 _('debugrename [-r REV] FILE')),
2906 2901 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2907 2902 "^diff":
2908 2903 (diff,
2909 2904 [('r', 'rev', [], _('revision')),
2910 2905 ('a', 'text', None, _('treat all files as text')),
2911 2906 ('p', 'show-function', None,
2912 2907 _('show which function each change is in')),
2913 2908 ('g', 'git', None, _('use git extended diff format')),
2914 2909 ('', 'nodates', None, _("don't include dates in diff headers")),
2915 2910 ('w', 'ignore-all-space', None,
2916 2911 _('ignore white space when comparing lines')),
2917 2912 ('b', 'ignore-space-change', None,
2918 2913 _('ignore changes in the amount of white space')),
2919 2914 ('B', 'ignore-blank-lines', None,
2920 2915 _('ignore changes whose lines are all blank')),
2921 2916 ] + walkopts,
2922 2917 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2923 2918 "^export":
2924 2919 (export,
2925 2920 [('o', 'output', '', _('print output to file with formatted name')),
2926 2921 ('a', 'text', None, _('treat all files as text')),
2927 2922 ('g', 'git', None, _('use git extended diff format')),
2928 2923 ('', 'nodates', None, _("don't include dates in diff headers")),
2929 2924 ('', 'switch-parent', None, _('diff against the second parent'))],
2930 2925 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2931 2926 "grep":
2932 2927 (grep,
2933 2928 [('0', 'print0', None, _('end fields with NUL')),
2934 2929 ('', 'all', None, _('print all revisions that match')),
2935 2930 ('f', 'follow', None,
2936 2931 _('follow changeset history, or file history across copies and renames')),
2937 2932 ('i', 'ignore-case', None, _('ignore case when matching')),
2938 2933 ('l', 'files-with-matches', None,
2939 2934 _('print only filenames and revs that match')),
2940 2935 ('n', 'line-number', None, _('print matching line numbers')),
2941 2936 ('r', 'rev', [], _('search in given revision range')),
2942 2937 ('u', 'user', None, _('print user who committed change')),
2943 2938 ] + walkopts,
2944 2939 _('hg grep [OPTION]... PATTERN [FILE]...')),
2945 2940 "heads":
2946 2941 (heads,
2947 2942 [('', 'style', '', _('display using template map file')),
2948 2943 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2949 2944 ('', 'template', '', _('display with template'))],
2950 2945 _('hg heads [-r REV] [REV]...')),
2951 2946 "help": (help_, [], _('hg help [COMMAND]')),
2952 2947 "identify|id":
2953 2948 (identify,
2954 2949 [('r', 'rev', '', _('identify the specified rev')),
2955 2950 ('n', 'num', None, _('show local revision number')),
2956 2951 ('i', 'id', None, _('show global revision id')),
2957 2952 ('b', 'branch', None, _('show branch')),
2958 2953 ('t', 'tags', None, _('show tags'))],
2959 2954 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2960 2955 "import|patch":
2961 2956 (import_,
2962 2957 [('p', 'strip', 1,
2963 2958 _('directory strip option for patch. This has the same\n'
2964 2959 'meaning as the corresponding patch option')),
2965 2960 ('b', 'base', '', _('base path')),
2966 2961 ('f', 'force', None,
2967 2962 _('skip check for outstanding uncommitted changes')),
2968 2963 ('', 'exact', None,
2969 2964 _('apply patch to the nodes from which it was generated')),
2970 2965 ('', 'import-branch', None,
2971 2966 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2972 2967 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2973 2968 "incoming|in": (incoming,
2974 2969 [('M', 'no-merges', None, _('do not show merges')),
2975 2970 ('f', 'force', None,
2976 2971 _('run even when remote repository is unrelated')),
2977 2972 ('', 'style', '', _('display using template map file')),
2978 2973 ('n', 'newest-first', None, _('show newest record first')),
2979 2974 ('', 'bundle', '', _('file to store the bundles into')),
2980 2975 ('p', 'patch', None, _('show patch')),
2981 2976 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2982 2977 ('', 'template', '', _('display with template')),
2983 2978 ] + remoteopts,
2984 2979 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2985 2980 ' [--bundle FILENAME] [SOURCE]')),
2986 2981 "^init":
2987 2982 (init,
2988 2983 remoteopts,
2989 2984 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2990 2985 "locate":
2991 2986 (locate,
2992 2987 [('r', 'rev', '', _('search the repository as it stood at rev')),
2993 2988 ('0', 'print0', None,
2994 2989 _('end filenames with NUL, for use with xargs')),
2995 2990 ('f', 'fullpath', None,
2996 2991 _('print complete paths from the filesystem root')),
2997 2992 ] + walkopts,
2998 2993 _('hg locate [OPTION]... [PATTERN]...')),
2999 2994 "^log|history":
3000 2995 (log,
3001 2996 [('f', 'follow', None,
3002 2997 _('follow changeset history, or file history across copies and renames')),
3003 2998 ('', 'follow-first', None,
3004 2999 _('only follow the first parent of merge changesets')),
3005 3000 ('d', 'date', '', _('show revs matching date spec')),
3006 3001 ('C', 'copies', None, _('show copied files')),
3007 3002 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3008 3003 ('l', 'limit', '', _('limit number of changes displayed')),
3009 3004 ('r', 'rev', [], _('show the specified revision or range')),
3010 3005 ('', 'removed', None, _('include revs where files were removed')),
3011 3006 ('M', 'no-merges', None, _('do not show merges')),
3012 3007 ('', 'style', '', _('display using template map file')),
3013 3008 ('m', 'only-merges', None, _('show only merges')),
3014 3009 ('p', 'patch', None, _('show patch')),
3015 3010 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3016 3011 ('', 'template', '', _('display with template')),
3017 3012 ] + walkopts,
3018 3013 _('hg log [OPTION]... [FILE]')),
3019 3014 "manifest": (manifest, [], _('hg manifest [REV]')),
3020 3015 "^merge":
3021 3016 (merge,
3022 3017 [('f', 'force', None, _('force a merge with outstanding changes')),
3023 3018 ('r', 'rev', '', _('revision to merge')),
3024 3019 ],
3025 3020 _('hg merge [-f] [[-r] REV]')),
3026 3021 "outgoing|out": (outgoing,
3027 3022 [('M', 'no-merges', None, _('do not show merges')),
3028 3023 ('f', 'force', None,
3029 3024 _('run even when remote repository is unrelated')),
3030 3025 ('p', 'patch', None, _('show patch')),
3031 3026 ('', 'style', '', _('display using template map file')),
3032 3027 ('r', 'rev', [], _('a specific revision you would like to push')),
3033 3028 ('n', 'newest-first', None, _('show newest record first')),
3034 3029 ('', 'template', '', _('display with template')),
3035 3030 ] + remoteopts,
3036 3031 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3037 3032 "^parents":
3038 3033 (parents,
3039 3034 [('r', 'rev', '', _('show parents from the specified rev')),
3040 3035 ('', 'style', '', _('display using template map file')),
3041 3036 ('', 'template', '', _('display with template'))],
3042 3037 _('hg parents [-r REV] [FILE]')),
3043 3038 "paths": (paths, [], _('hg paths [NAME]')),
3044 3039 "^pull":
3045 3040 (pull,
3046 3041 [('u', 'update', None,
3047 3042 _('update to new tip if changesets were pulled')),
3048 3043 ('f', 'force', None,
3049 3044 _('run even when remote repository is unrelated')),
3050 3045 ('r', 'rev', [],
3051 3046 _('a specific revision up to which you would like to pull')),
3052 3047 ] + remoteopts,
3053 3048 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3054 3049 "^push":
3055 3050 (push,
3056 3051 [('f', 'force', None, _('force push')),
3057 3052 ('r', 'rev', [], _('a specific revision you would like to push')),
3058 3053 ] + remoteopts,
3059 3054 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3060 3055 "debugrawcommit|rawcommit":
3061 3056 (rawcommit,
3062 3057 [('p', 'parent', [], _('parent')),
3063 3058 ('d', 'date', '', _('date code')),
3064 3059 ('u', 'user', '', _('user')),
3065 3060 ('F', 'files', '', _('file list'))
3066 3061 ] + commitopts,
3067 3062 _('hg debugrawcommit [OPTION]... [FILE]...')),
3068 3063 "recover": (recover, [], _('hg recover')),
3069 3064 "^remove|rm":
3070 3065 (remove,
3071 3066 [('A', 'after', None, _('record remove that has already occurred')),
3072 3067 ('f', 'force', None, _('remove file even if modified')),
3073 3068 ] + walkopts,
3074 3069 _('hg remove [OPTION]... FILE...')),
3075 3070 "rename|mv":
3076 3071 (rename,
3077 3072 [('A', 'after', None, _('record a rename that has already occurred')),
3078 3073 ('f', 'force', None,
3079 3074 _('forcibly copy over an existing managed file')),
3080 3075 ] + walkopts + dryrunopts,
3081 3076 _('hg rename [OPTION]... SOURCE... DEST')),
3082 3077 "^revert":
3083 3078 (revert,
3084 3079 [('a', 'all', None, _('revert all changes when no arguments given')),
3085 3080 ('d', 'date', '', _('tipmost revision matching date')),
3086 3081 ('r', 'rev', '', _('revision to revert to')),
3087 3082 ('', 'no-backup', None, _('do not save backup copies of files')),
3088 3083 ] + walkopts + dryrunopts,
3089 3084 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3090 3085 "rollback": (rollback, [], _('hg rollback')),
3091 3086 "root": (root, [], _('hg root')),
3092 3087 "showconfig|debugconfig":
3093 3088 (showconfig,
3094 3089 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3095 3090 _('showconfig [-u] [NAME]...')),
3096 3091 "^serve":
3097 3092 (serve,
3098 3093 [('A', 'accesslog', '', _('name of access log file to write to')),
3099 3094 ('d', 'daemon', None, _('run server in background')),
3100 3095 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3101 3096 ('E', 'errorlog', '', _('name of error log file to write to')),
3102 3097 ('p', 'port', 0, _('port to use (default: 8000)')),
3103 3098 ('a', 'address', '', _('address to use')),
3104 3099 ('n', 'name', '',
3105 3100 _('name to show in web pages (default: working dir)')),
3106 3101 ('', 'webdir-conf', '', _('name of the webdir config file'
3107 3102 ' (serve more than one repo)')),
3108 3103 ('', 'pid-file', '', _('name of file to write process ID to')),
3109 3104 ('', 'stdio', None, _('for remote clients')),
3110 3105 ('t', 'templates', '', _('web templates to use')),
3111 3106 ('', 'style', '', _('template style to use')),
3112 3107 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3113 3108 ('', 'certificate', '', _('SSL certificate file'))],
3114 3109 _('hg serve [OPTION]...')),
3115 3110 "^status|st":
3116 3111 (status,
3117 3112 [('A', 'all', None, _('show status of all files')),
3118 3113 ('m', 'modified', None, _('show only modified files')),
3119 3114 ('a', 'added', None, _('show only added files')),
3120 3115 ('r', 'removed', None, _('show only removed files')),
3121 3116 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3122 3117 ('c', 'clean', None, _('show only files without changes')),
3123 3118 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3124 3119 ('i', 'ignored', None, _('show only ignored files')),
3125 3120 ('n', 'no-status', None, _('hide status prefix')),
3126 3121 ('C', 'copies', None, _('show source of copied files')),
3127 3122 ('0', 'print0', None,
3128 3123 _('end filenames with NUL, for use with xargs')),
3129 3124 ('', 'rev', [], _('show difference from revision')),
3130 3125 ] + walkopts,
3131 3126 _('hg status [OPTION]... [FILE]...')),
3132 3127 "tag":
3133 3128 (tag,
3134 3129 [('f', 'force', None, _('replace existing tag')),
3135 3130 ('l', 'local', None, _('make the tag local')),
3136 3131 ('m', 'message', '', _('message for tag commit log entry')),
3137 3132 ('d', 'date', '', _('record datecode as commit date')),
3138 3133 ('u', 'user', '', _('record user as commiter')),
3139 3134 ('r', 'rev', '', _('revision to tag')),
3140 3135 ('', 'remove', None, _('remove a tag'))],
3141 3136 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3142 3137 "tags": (tags, [], _('hg tags')),
3143 3138 "tip":
3144 3139 (tip,
3145 3140 [('', 'style', '', _('display using template map file')),
3146 3141 ('p', 'patch', None, _('show patch')),
3147 3142 ('', 'template', '', _('display with template'))],
3148 3143 _('hg tip [-p]')),
3149 3144 "unbundle":
3150 3145 (unbundle,
3151 3146 [('u', 'update', None,
3152 3147 _('update to new tip if changesets were unbundled'))],
3153 3148 _('hg unbundle [-u] FILE...')),
3154 3149 "^update|up|checkout|co":
3155 3150 (update,
3156 3151 [('C', 'clean', None, _('overwrite locally modified files')),
3157 3152 ('d', 'date', '', _('tipmost revision matching date')),
3158 3153 ('r', 'rev', '', _('revision'))],
3159 3154 _('hg update [-C] [-d DATE] [[-r] REV]')),
3160 3155 "verify": (verify, [], _('hg verify')),
3161 3156 "version": (version_, [], _('hg version')),
3162 3157 }
3163 3158
3164 3159 extensions.commandtable = table
3165 3160
3166 3161 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3167 3162 " debugindex debugindexdot debugdate debuginstall")
3168 3163 optionalrepo = ("paths serve showconfig")
3169 3164
3170 3165 def dispatch(args, argv0=None):
3171 3166 try:
3172 3167 u = ui.ui(traceback='--traceback' in args)
3173 3168 except util.Abort, inst:
3174 3169 sys.stderr.write(_("abort: %s\n") % inst)
3175 3170 return -1
3176 3171 return cmdutil.runcatch(u, args, argv0=argv0)
3177 3172
3178 3173 def run():
3179 3174 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,500 +1,503 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 self._dirtypl = False
24 25 self._ui = ui
25 26
26 27 def __getattr__(self, name):
27 28 if name == '_map':
28 29 self._read()
29 30 return self._map
30 31 elif name == '_copymap':
31 32 self._read()
32 33 return self._copymap
33 34 elif name == '_branch':
34 35 try:
35 36 self._branch = (self._opener("branch").read().strip()
36 37 or "default")
37 38 except IOError:
38 39 self._branch = "default"
39 40 return self._branch
40 41 elif name == '_pl':
41 42 self._pl = [nullid, nullid]
42 43 try:
43 44 st = self._opener("dirstate").read(40)
44 45 if len(st) == 40:
45 46 self._pl = st[:20], st[20:40]
46 47 except IOError, err:
47 48 if err.errno != errno.ENOENT: raise
48 49 return self._pl
49 50 elif name == '_dirs':
50 51 self._dirs = {}
51 52 for f in self._map:
52 53 self._incpath(f)
53 54 return self._dirs
54 55 elif name == '_ignore':
55 56 files = [self._join('.hgignore')]
56 57 for name, path in self._ui.configitems("ui"):
57 58 if name == 'ignore' or name.startswith('ignore.'):
58 59 files.append(os.path.expanduser(path))
59 60 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 61 return self._ignore
61 62 elif name == '_slash':
62 63 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 64 return self._slash
64 65 else:
65 66 raise AttributeError, name
66 67
67 68 def _join(self, f):
68 69 return os.path.join(self._root, f)
69 70
70 71 def getcwd(self):
71 72 cwd = os.getcwd()
72 73 if cwd == self._root: return ''
73 74 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 75 rootsep = self._root
75 76 if not rootsep.endswith(os.sep):
76 77 rootsep += os.sep
77 78 if cwd.startswith(rootsep):
78 79 return cwd[len(rootsep):]
79 80 else:
80 81 # we're outside the repo. return an absolute path.
81 82 return cwd
82 83
83 84 def pathto(self, f, cwd=None):
84 85 if cwd is None:
85 86 cwd = self.getcwd()
86 87 path = util.pathto(self._root, cwd, f)
87 88 if self._slash:
88 89 return path.replace(os.sep, '/')
89 90 return path
90 91
91 92 def __getitem__(self, key):
92 93 ''' current states:
93 94 n normal
94 95 m needs merging
95 96 r marked for removal
96 97 a marked for addition
97 98 ? not tracked'''
98 99 return self._map.get(key, ("?",))[0]
99 100
100 101 def __contains__(self, key):
101 102 return key in self._map
102 103
103 104 def __iter__(self):
104 105 a = self._map.keys()
105 106 a.sort()
106 107 for x in a:
107 108 yield x
108 109
109 110 def parents(self):
110 111 return self._pl
111 112
112 113 def branch(self):
113 114 return self._branch
114 115
115 116 def setparents(self, p1, p2=nullid):
116 self._dirty = True
117 self._dirty = self._dirtypl = True
117 118 self._pl = p1, p2
118 119
119 120 def setbranch(self, branch):
120 121 self._branch = branch
121 122 self._opener("branch", "w").write(branch + '\n')
122 123
123 124 def _read(self):
124 125 self._map = {}
125 126 self._copymap = {}
126 self._pl = [nullid, nullid]
127 if not self._dirtypl:
128 self._pl = [nullid, nullid]
127 129 try:
128 130 st = self._opener("dirstate").read()
129 131 except IOError, err:
130 132 if err.errno != errno.ENOENT: raise
131 133 return
132 134 if not st:
133 135 return
134 136
135 self._pl = [st[:20], st[20: 40]]
137 if not self._dirtypl:
138 self._pl = [st[:20], st[20: 40]]
136 139
137 140 # deref fields so they will be local in loop
138 141 dmap = self._map
139 142 copymap = self._copymap
140 143 unpack = struct.unpack
141 144
142 145 pos = 40
143 146 e_size = struct.calcsize(_format)
144 147
145 148 while pos < len(st):
146 149 newpos = pos + e_size
147 150 e = unpack(_format, st[pos:newpos])
148 151 l = e[4]
149 152 pos = newpos
150 153 newpos = pos + l
151 154 f = st[pos:newpos]
152 155 if '\0' in f:
153 156 f, c = f.split('\0')
154 157 copymap[f] = c
155 158 dmap[f] = e[:4]
156 159 pos = newpos
157 160
158 161 def invalidate(self):
159 162 for a in "_map _copymap _branch _pl _dirs _ignore".split():
160 if hasattr(self, a):
161 self.__delattr__(a)
163 if a in self.__dict__:
164 delattr(self, a)
162 165 self._dirty = False
163 166
164 167 def copy(self, source, dest):
165 168 self._dirty = True
166 169 self._copymap[dest] = source
167 170
168 171 def copied(self, file):
169 172 return self._copymap.get(file, None)
170 173
171 174 def copies(self):
172 175 return self._copymap
173 176
174 177 def _incpath(self, path):
175 178 for c in strutil.findall(path, '/'):
176 179 pc = path[:c]
177 180 self._dirs.setdefault(pc, 0)
178 181 self._dirs[pc] += 1
179 182
180 183 def _decpath(self, path):
181 184 for c in strutil.findall(path, '/'):
182 185 pc = path[:c]
183 186 self._dirs.setdefault(pc, 0)
184 187 self._dirs[pc] -= 1
185 188
186 189 def _incpathcheck(self, f):
187 190 if '\r' in f or '\n' in f:
188 191 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
189 192 # shadows
190 193 if f in self._dirs:
191 194 raise util.Abort(_('directory named %r already in dirstate') % f)
192 195 for c in strutil.rfindall(f, '/'):
193 196 d = f[:c]
194 197 if d in self._dirs:
195 198 break
196 199 if d in self._map:
197 200 raise util.Abort(_('file named %r already in dirstate') % d)
198 201 self._incpath(f)
199 202
200 203 def normal(self, f):
201 204 'mark a file normal'
202 205 self._dirty = True
203 206 s = os.lstat(self._join(f))
204 207 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
205 208 if self._copymap.has_key(f):
206 209 del self._copymap[f]
207 210
208 211 def normaldirty(self, f):
209 212 'mark a file normal, but possibly dirty'
210 213 self._dirty = True
211 214 s = os.lstat(self._join(f))
212 215 self._map[f] = ('n', s.st_mode, -1, -1)
213 216 if f in self._copymap:
214 217 del self._copymap[f]
215 218
216 219 def add(self, f):
217 220 'mark a file added'
218 221 self._dirty = True
219 222 self._incpathcheck(f)
220 223 self._map[f] = ('a', 0, -1, -1)
221 224 if f in self._copymap:
222 225 del self._copymap[f]
223 226
224 227 def remove(self, f):
225 228 'mark a file removed'
226 229 self._dirty = True
227 230 self._map[f] = ('r', 0, 0, 0)
228 231 self._decpath(f)
229 232 if f in self._copymap:
230 233 del self._copymap[f]
231 234
232 235 def merge(self, f):
233 236 'mark a file merged'
234 237 self._dirty = True
235 238 s = os.lstat(self._join(f))
236 239 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
237 240 if f in self._copymap:
238 241 del self._copymap[f]
239 242
240 243 def forget(self, f):
241 244 'forget a file'
242 245 self._dirty = True
243 246 try:
244 247 del self._map[f]
245 248 self._decpath(f)
246 249 except KeyError:
247 250 self._ui.warn(_("not in dirstate: %s!\n") % f)
248 251
249 252 def rebuild(self, parent, files):
250 253 self.invalidate()
251 254 for f in files:
252 255 if files.execf(f):
253 256 self._map[f] = ('n', 0777, -1, 0)
254 257 else:
255 258 self._map[f] = ('n', 0666, -1, 0)
256 259 self._pl = (parent, nullid)
257 260 self._dirty = True
258 261
259 262 def write(self):
260 263 if not self._dirty:
261 264 return
262 265 cs = cStringIO.StringIO()
263 266 cs.write("".join(self._pl))
264 267 for f, e in self._map.iteritems():
265 268 c = self.copied(f)
266 269 if c:
267 270 f = f + "\0" + c
268 271 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
269 272 cs.write(e)
270 273 cs.write(f)
271 274 st = self._opener("dirstate", "w", atomictemp=True)
272 275 st.write(cs.getvalue())
273 276 st.rename()
274 self._dirty = False
277 self._dirty = self._dirtypl = False
275 278
276 279 def _filter(self, files):
277 280 ret = {}
278 281 unknown = []
279 282
280 283 for x in files:
281 284 if x == '.':
282 285 return self._map.copy()
283 286 if x not in self._map:
284 287 unknown.append(x)
285 288 else:
286 289 ret[x] = self._map[x]
287 290
288 291 if not unknown:
289 292 return ret
290 293
291 294 b = self._map.keys()
292 295 b.sort()
293 296 blen = len(b)
294 297
295 298 for x in unknown:
296 299 bs = bisect.bisect(b, "%s%s" % (x, '/'))
297 300 while bs < blen:
298 301 s = b[bs]
299 302 if len(s) > len(x) and s.startswith(x):
300 303 ret[s] = self._map[s]
301 304 else:
302 305 break
303 306 bs += 1
304 307 return ret
305 308
306 309 def _supported(self, f, st, verbose=False):
307 310 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
308 311 return True
309 312 if verbose:
310 313 kind = 'unknown'
311 314 if stat.S_ISCHR(st.st_mode): kind = _('character device')
312 315 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
313 316 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
314 317 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
315 318 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
316 319 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
317 320 % (self.pathto(f), kind))
318 321 return False
319 322
320 323 def walk(self, files=None, match=util.always, badmatch=None):
321 324 # filter out the stat
322 325 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
323 326 yield src, f
324 327
325 328 def statwalk(self, files=None, match=util.always, ignored=False,
326 329 badmatch=None, directories=False):
327 330 '''
328 331 walk recursively through the directory tree, finding all files
329 332 matched by the match function
330 333
331 334 results are yielded in a tuple (src, filename, st), where src
332 335 is one of:
333 336 'f' the file was found in the directory tree
334 337 'd' the file is a directory of the tree
335 338 'm' the file was only in the dirstate and not in the tree
336 339 'b' file was not found and matched badmatch
337 340
338 341 and st is the stat result if the file was found in the directory.
339 342 '''
340 343
341 344 # walk all files by default
342 345 if not files:
343 346 files = ['.']
344 347 dc = self._map.copy()
345 348 else:
346 349 files = util.unique(files)
347 350 dc = self._filter(files)
348 351
349 352 def imatch(file_):
350 353 if file_ not in dc and self._ignore(file_):
351 354 return False
352 355 return match(file_)
353 356
354 357 ignore = self._ignore
355 358 if ignored:
356 359 imatch = match
357 360 ignore = util.never
358 361
359 362 # self._root may end with a path separator when self._root == '/'
360 363 common_prefix_len = len(self._root)
361 364 if not self._root.endswith(os.sep):
362 365 common_prefix_len += 1
363 366 # recursion free walker, faster than os.walk.
364 367 def findfiles(s):
365 368 work = [s]
366 369 if directories:
367 370 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
368 371 while work:
369 372 top = work.pop()
370 373 names = os.listdir(top)
371 374 names.sort()
372 375 # nd is the top of the repository dir tree
373 376 nd = util.normpath(top[common_prefix_len:])
374 377 if nd == '.':
375 378 nd = ''
376 379 else:
377 380 # do not recurse into a repo contained in this
378 381 # one. use bisect to find .hg directory so speed
379 382 # is good on big directory.
380 383 hg = bisect.bisect_left(names, '.hg')
381 384 if hg < len(names) and names[hg] == '.hg':
382 385 if os.path.isdir(os.path.join(top, '.hg')):
383 386 continue
384 387 for f in names:
385 388 np = util.pconvert(os.path.join(nd, f))
386 389 if seen(np):
387 390 continue
388 391 p = os.path.join(top, f)
389 392 # don't trip over symlinks
390 393 st = os.lstat(p)
391 394 if stat.S_ISDIR(st.st_mode):
392 395 if not ignore(np):
393 396 work.append(p)
394 397 if directories:
395 398 yield 'd', np, st
396 399 if imatch(np) and np in dc:
397 400 yield 'm', np, st
398 401 elif imatch(np):
399 402 if self._supported(np, st):
400 403 yield 'f', np, st
401 404 elif np in dc:
402 405 yield 'm', np, st
403 406
404 407 known = {'.hg': 1}
405 408 def seen(fn):
406 409 if fn in known: return True
407 410 known[fn] = 1
408 411
409 412 # step one, find all files that match our criteria
410 413 files.sort()
411 414 for ff in files:
412 415 nf = util.normpath(ff)
413 416 f = self._join(ff)
414 417 try:
415 418 st = os.lstat(f)
416 419 except OSError, inst:
417 420 found = False
418 421 for fn in dc:
419 422 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
420 423 found = True
421 424 break
422 425 if not found:
423 426 if inst.errno != errno.ENOENT or not badmatch:
424 427 self._ui.warn('%s: %s\n' %
425 428 (self.pathto(ff), inst.strerror))
426 429 elif badmatch and badmatch(ff) and imatch(nf):
427 430 yield 'b', ff, None
428 431 continue
429 432 if stat.S_ISDIR(st.st_mode):
430 433 cmp1 = (lambda x, y: cmp(x[1], y[1]))
431 434 sorted_ = [ x for x in findfiles(f) ]
432 435 sorted_.sort(cmp1)
433 436 for e in sorted_:
434 437 yield e
435 438 else:
436 439 if not seen(nf) and match(nf):
437 440 if self._supported(ff, st, verbose=True):
438 441 yield 'f', nf, st
439 442 elif ff in dc:
440 443 yield 'm', nf, st
441 444
442 445 # step two run through anything left in the dc hash and yield
443 446 # if we haven't already seen it
444 447 ks = dc.keys()
445 448 ks.sort()
446 449 for k in ks:
447 450 if not seen(k) and imatch(k):
448 451 yield 'm', k, None
449 452
450 453 def status(self, files, match, list_ignored, list_clean):
451 454 lookup, modified, added, unknown, ignored = [], [], [], [], []
452 455 removed, deleted, clean = [], [], []
453 456
454 457 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
455 458 try:
456 459 type_, mode, size, time = self._map[fn]
457 460 except KeyError:
458 461 if list_ignored and self._ignore(fn):
459 462 ignored.append(fn)
460 463 else:
461 464 unknown.append(fn)
462 465 continue
463 466 if src == 'm':
464 467 nonexistent = True
465 468 if not st:
466 469 try:
467 470 st = os.lstat(self._join(fn))
468 471 except OSError, inst:
469 472 if inst.errno != errno.ENOENT:
470 473 raise
471 474 st = None
472 475 # We need to re-check that it is a valid file
473 476 if st and self._supported(fn, st):
474 477 nonexistent = False
475 478 # XXX: what to do with file no longer present in the fs
476 479 # who are not removed in the dirstate ?
477 480 if nonexistent and type_ in "nm":
478 481 deleted.append(fn)
479 482 continue
480 483 # check the common case first
481 484 if type_ == 'n':
482 485 if not st:
483 486 st = os.lstat(self._join(fn))
484 487 if (size >= 0 and (size != st.st_size
485 488 or (mode ^ st.st_mode) & 0100)
486 489 or fn in self._copymap):
487 490 modified.append(fn)
488 491 elif time != int(st.st_mtime):
489 492 lookup.append(fn)
490 493 elif list_clean:
491 494 clean.append(fn)
492 495 elif type_ == 'm':
493 496 modified.append(fn)
494 497 elif type_ == 'a':
495 498 added.append(fn)
496 499 elif type_ == 'r':
497 500 removed.append(fn)
498 501
499 502 return (lookup, modified, added, removed, deleted, unknown, ignored,
500 503 clean)
@@ -1,258 +1,258 b''
1 1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from mercurial import demandimport; demandimport.enable()
10 10 import os, mimetools, cStringIO
11 11 from mercurial.i18n import gettext as _
12 12 from mercurial import ui, hg, util, templater
13 13 from common import get_mtime, staticfile, style_map, paritygen
14 14 from hgweb_mod import hgweb
15 15
16 16 # This is a stopgap
17 17 class hgwebdir(object):
18 18 def __init__(self, config, parentui=None):
19 19 def cleannames(items):
20 20 return [(name.strip(os.sep), path) for name, path in items]
21 21
22 22 self.parentui = parentui
23 23 self.motd = None
24 24 self.style = None
25 25 self.stripecount = None
26 26 self.repos_sorted = ('name', False)
27 27 if isinstance(config, (list, tuple)):
28 28 self.repos = cleannames(config)
29 29 self.repos_sorted = ('', False)
30 30 elif isinstance(config, dict):
31 31 self.repos = cleannames(config.items())
32 32 self.repos.sort()
33 33 else:
34 34 if isinstance(config, util.configparser):
35 35 cp = config
36 36 else:
37 37 cp = util.configparser()
38 38 cp.read(config)
39 39 self.repos = []
40 40 if cp.has_section('web'):
41 41 if cp.has_option('web', 'motd'):
42 42 self.motd = cp.get('web', 'motd')
43 43 if cp.has_option('web', 'style'):
44 44 self.style = cp.get('web', 'style')
45 45 if cp.has_option('web', 'stripes'):
46 46 self.stripecount = int(cp.get('web', 'stripes'))
47 47 if cp.has_section('paths'):
48 48 self.repos.extend(cleannames(cp.items('paths')))
49 49 if cp.has_section('collections'):
50 50 for prefix, root in cp.items('collections'):
51 51 for path in util.walkrepos(root):
52 52 repo = os.path.normpath(path)
53 53 name = repo
54 54 if name.startswith(prefix):
55 55 name = name[len(prefix):]
56 56 self.repos.append((name.lstrip(os.sep), repo))
57 57 self.repos.sort()
58 58
59 59 def run(self):
60 60 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
61 61 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
62 62 import mercurial.hgweb.wsgicgi as wsgicgi
63 63 from request import wsgiapplication
64 64 def make_web_app():
65 65 return self
66 66 wsgicgi.launch(wsgiapplication(make_web_app))
67 67
68 68 def run_wsgi(self, req):
69 69 def header(**map):
70 70 header_file = cStringIO.StringIO(
71 71 ''.join(tmpl("header", encoding=util._encoding, **map)))
72 72 msg = mimetools.Message(header_file, 0)
73 73 req.header(msg.items())
74 74 yield header_file.read()
75 75
76 76 def footer(**map):
77 77 yield tmpl("footer", **map)
78 78
79 79 def motd(**map):
80 80 if self.motd is not None:
81 81 yield self.motd
82 82 else:
83 83 yield config('web', 'motd', '')
84 84
85 85 parentui = self.parentui or ui.ui(report_untrusted=False)
86 86
87 87 def config(section, name, default=None, untrusted=True):
88 88 return parentui.config(section, name, default, untrusted)
89 89
90 90 url = req.env['REQUEST_URI'].split('?')[0]
91 91 if not url.endswith('/'):
92 92 url += '/'
93 93 pathinfo = req.env.get('PATH_INFO', '').strip('/') + '/'
94 94 base = url[:len(url) - len(pathinfo)]
95 95 if not base.endswith('/'):
96 96 base += '/'
97 97
98 98 staticurl = config('web', 'staticurl') or base + 'static/'
99 99 if not staticurl.endswith('/'):
100 100 staticurl += '/'
101 101
102 102 style = self.style
103 103 if style is None:
104 104 style = config('web', 'style', '')
105 105 if req.form.has_key('style'):
106 106 style = req.form['style'][0]
107 107 if self.stripecount is None:
108 108 self.stripecount = int(config('web', 'stripes', 1))
109 109 mapfile = style_map(templater.templatepath(), style)
110 110 tmpl = templater.templater(mapfile, templater.common_filters,
111 111 defaults={"header": header,
112 112 "footer": footer,
113 113 "motd": motd,
114 114 "url": url,
115 115 "staticurl": staticurl})
116 116
117 117 def archivelist(ui, nodeid, url):
118 118 allowed = ui.configlist("web", "allow_archive", untrusted=True)
119 119 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
120 120 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
121 121 untrusted=True):
122 122 yield {"type" : i[0], "extension": i[1],
123 123 "node": nodeid, "url": url}
124 124
125 125 def entries(sortcolumn="", descending=False, subdir="", **map):
126 126 def sessionvars(**map):
127 127 fields = []
128 128 if req.form.has_key('style'):
129 129 style = req.form['style'][0]
130 130 if style != get('web', 'style', ''):
131 131 fields.append(('style', style))
132 132
133 133 separator = url[-1] == '?' and ';' or '?'
134 134 for name, value in fields:
135 135 yield dict(name=name, value=value, separator=separator)
136 136 separator = ';'
137 137
138 138 rows = []
139 139 parity = paritygen(self.stripecount)
140 140 for name, path in self.repos:
141 141 if not name.startswith(subdir):
142 142 continue
143 143 name = name[len(subdir):]
144 144
145 145 u = ui.ui(parentui=parentui)
146 146 try:
147 147 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
148 148 except IOError:
149 149 pass
150 150 def get(section, name, default=None):
151 151 return u.config(section, name, default, untrusted=True)
152 152
153 153 if u.configbool("web", "hidden", untrusted=True):
154 154 continue
155 155
156 156 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
157 157 .replace("//", "/")) + '/'
158 158
159 159 # update time with local timezone
160 160 try:
161 161 d = (get_mtime(path), util.makedate()[1])
162 162 except OSError:
163 163 continue
164 164
165 165 contact = (get("ui", "username") or # preferred
166 166 get("web", "contact") or # deprecated
167 167 get("web", "author", "")) # also
168 168 description = get("web", "description", "")
169 169 name = get("web", "name", name)
170 170 row = dict(contact=contact or "unknown",
171 171 contact_sort=contact.upper() or "unknown",
172 172 name=name,
173 173 name_sort=name,
174 174 url=url,
175 175 description=description or "unknown",
176 176 description_sort=description.upper() or "unknown",
177 177 lastchange=d,
178 178 lastchange_sort=d[1]-d[0],
179 179 sessionvars=sessionvars,
180 180 archives=archivelist(u, "tip", url))
181 181 if (not sortcolumn
182 182 or (sortcolumn, descending) == self.repos_sorted):
183 183 # fast path for unsorted output
184 184 row['parity'] = parity.next()
185 185 yield row
186 186 else:
187 187 rows.append((row["%s_sort" % sortcolumn], row))
188 188 if rows:
189 189 rows.sort()
190 190 if descending:
191 191 rows.reverse()
192 192 for key, row in rows:
193 193 row['parity'] = parity.next()
194 194 yield row
195 195
196 196 def makeindex(req, subdir=""):
197 197 sortable = ["name", "description", "contact", "lastchange"]
198 198 sortcolumn, descending = self.repos_sorted
199 199 if req.form.has_key('sort'):
200 200 sortcolumn = req.form['sort'][0]
201 201 descending = sortcolumn.startswith('-')
202 202 if descending:
203 203 sortcolumn = sortcolumn[1:]
204 204 if sortcolumn not in sortable:
205 205 sortcolumn = ""
206 206
207 207 sort = [("sort_%s" % column,
208 208 "%s%s" % ((not descending and column == sortcolumn)
209 209 and "-" or "", column))
210 210 for column in sortable]
211 211 req.write(tmpl("index", entries=entries, subdir=subdir,
212 212 sortcolumn=sortcolumn, descending=descending,
213 213 **dict(sort)))
214 214
215 215 try:
216 216 virtual = req.env.get("PATH_INFO", "").strip('/')
217 217 if virtual.startswith('static/'):
218 218 static = os.path.join(templater.templatepath(), 'static')
219 219 fname = virtual[7:]
220 220 req.write(staticfile(static, fname, req) or
221 221 tmpl('error', error='%r not found' % fname))
222 222 elif virtual:
223 223 repos = dict(self.repos)
224 224 while virtual:
225 225 real = repos.get(virtual)
226 226 if real:
227 227 req.env['REPO_NAME'] = virtual
228 228 try:
229 229 repo = hg.repository(parentui, real)
230 230 hgweb(repo).run_wsgi(req)
231 231 except IOError, inst:
232 232 req.write(tmpl("error", error=inst.strerror))
233 233 except hg.RepoError, inst:
234 234 req.write(tmpl("error", error=str(inst)))
235 235 return
236 236
237 237 # browse subdirectories
238 238 subdir = virtual + '/'
239 239 if [r for r in repos if r.startswith(subdir)]:
240 240 makeindex(req, subdir)
241 241 return
242 242
243 243 up = virtual.rfind('/')
244 244 if up < 0:
245 245 break
246 246 virtual = virtual[:up]
247
247
248 248 req.write(tmpl("notfound", repo=virtual))
249 249 else:
250 250 if req.form.has_key('static'):
251 251 static = os.path.join(templater.templatepath(), "static")
252 252 fname = req.form['static'][0]
253 253 req.write(staticfile(static, fname, req)
254 254 or tmpl("error", error="%r not found" % fname))
255 255 else:
256 256 makeindex(req)
257 257 finally:
258 258 tmpl = None
@@ -1,289 +1,289 b''
1 1 # hgweb/server.py - The standalone hg web server.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
10 10 from mercurial import ui, hg, util, templater
11 11 from hgweb_mod import hgweb
12 12 from hgwebdir_mod import hgwebdir
13 13 from request import wsgiapplication
14 14 from mercurial.i18n import gettext as _
15 15
16 16 def _splitURI(uri):
17 17 """ Return path and query splited from uri
18 18
19 19 Just like CGI environment, the path is unquoted, the query is
20 20 not.
21 21 """
22 22 if '?' in uri:
23 23 path, query = uri.split('?', 1)
24 24 else:
25 25 path, query = uri, ''
26 26 return urllib.unquote(path), query
27 27
28 28 class _error_logger(object):
29 29 def __init__(self, handler):
30 30 self.handler = handler
31 31 def flush(self):
32 32 pass
33 33 def write(self, str):
34 34 self.writelines(str.split('\n'))
35 35 def writelines(self, seq):
36 36 for msg in seq:
37 37 self.handler.log_error("HG error: %s", msg)
38 38
39 39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40 40
41 41 url_scheme = 'http'
42
42
43 43 def __init__(self, *args, **kargs):
44 44 self.protocol_version = 'HTTP/1.1'
45 45 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
46 46
47 47 def log_error(self, format, *args):
48 48 errorlog = self.server.errorlog
49 49 errorlog.write("%s - - [%s] %s\n" % (self.client_address[0],
50 50 self.log_date_time_string(),
51 51 format % args))
52 52
53 53 def log_message(self, format, *args):
54 54 accesslog = self.server.accesslog
55 55 accesslog.write("%s - - [%s] %s\n" % (self.client_address[0],
56 56 self.log_date_time_string(),
57 57 format % args))
58 58
59 59 def do_write(self):
60 60 try:
61 61 self.do_hgweb()
62 62 except socket.error, inst:
63 63 if inst[0] != errno.EPIPE:
64 64 raise
65 65
66 66 def do_POST(self):
67 67 try:
68 68 self.do_write()
69 69 except StandardError, inst:
70 70 self._start_response("500 Internal Server Error", [])
71 71 self._write("Internal Server Error")
72 72 tb = "".join(traceback.format_exception(*sys.exc_info()))
73 73 self.log_error("Exception happened during processing request '%s':\n%s",
74 74 self.path, tb)
75 75
76 76 def do_GET(self):
77 77 self.do_POST()
78 78
79 79 def do_hgweb(self):
80 80 path_info, query = _splitURI(self.path)
81 81
82 82 env = {}
83 83 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
84 84 env['REQUEST_METHOD'] = self.command
85 85 env['SERVER_NAME'] = self.server.server_name
86 86 env['SERVER_PORT'] = str(self.server.server_port)
87 87 env['REQUEST_URI'] = self.path
88 88 env['PATH_INFO'] = path_info
89 89 env['REMOTE_HOST'] = self.client_address[0]
90 90 env['REMOTE_ADDR'] = self.client_address[0]
91 91 if query:
92 92 env['QUERY_STRING'] = query
93 93
94 94 if self.headers.typeheader is None:
95 95 env['CONTENT_TYPE'] = self.headers.type
96 96 else:
97 97 env['CONTENT_TYPE'] = self.headers.typeheader
98 98 length = self.headers.getheader('content-length')
99 99 if length:
100 100 env['CONTENT_LENGTH'] = length
101 101 for header in [h for h in self.headers.keys()
102 102 if h not in ('content-type', 'content-length')]:
103 103 hkey = 'HTTP_' + header.replace('-', '_').upper()
104 104 hval = self.headers.getheader(header)
105 105 hval = hval.replace('\n', '').strip()
106 106 if hval:
107 107 env[hkey] = hval
108 108 env['SERVER_PROTOCOL'] = self.request_version
109 109 env['wsgi.version'] = (1, 0)
110 110 env['wsgi.url_scheme'] = self.url_scheme
111 111 env['wsgi.input'] = self.rfile
112 112 env['wsgi.errors'] = _error_logger(self)
113 113 env['wsgi.multithread'] = isinstance(self.server,
114 114 SocketServer.ThreadingMixIn)
115 115 env['wsgi.multiprocess'] = isinstance(self.server,
116 116 SocketServer.ForkingMixIn)
117 117 env['wsgi.run_once'] = 0
118 118
119 119 self.close_connection = True
120 120 self.saved_status = None
121 121 self.saved_headers = []
122 122 self.sent_headers = False
123 123 self.length = None
124 124 req = self.server.reqmaker(env, self._start_response)
125 125 for data in req:
126 126 if data:
127 127 self._write(data)
128 128
129 129 def send_headers(self):
130 130 if not self.saved_status:
131 131 raise AssertionError("Sending headers before start_response() called")
132 132 saved_status = self.saved_status.split(None, 1)
133 133 saved_status[0] = int(saved_status[0])
134 134 self.send_response(*saved_status)
135 135 should_close = True
136 136 for h in self.saved_headers:
137 137 self.send_header(*h)
138 138 if h[0].lower() == 'content-length':
139 139 should_close = False
140 140 self.length = int(h[1])
141 141 # The value of the Connection header is a list of case-insensitive
142 142 # tokens separated by commas and optional whitespace.
143 143 if 'close' in [token.strip().lower() for token in
144 144 self.headers.get('connection', '').split(',')]:
145 145 should_close = True
146 146 if should_close:
147 147 self.send_header('Connection', 'close')
148 148 self.close_connection = should_close
149 149 self.end_headers()
150 150 self.sent_headers = True
151 151
152 152 def _start_response(self, http_status, headers, exc_info=None):
153 153 code, msg = http_status.split(None, 1)
154 154 code = int(code)
155 155 self.saved_status = http_status
156 156 bad_headers = ('connection', 'transfer-encoding')
157 157 self.saved_headers = [h for h in headers
158 158 if h[0].lower() not in bad_headers]
159 159 return self._write
160 160
161 161 def _write(self, data):
162 162 if not self.saved_status:
163 163 raise AssertionError("data written before start_response() called")
164 164 elif not self.sent_headers:
165 165 self.send_headers()
166 166 if self.length is not None:
167 167 if len(data) > self.length:
168 168 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
169 169 self.length = self.length - len(data)
170 170 self.wfile.write(data)
171 171 self.wfile.flush()
172 172
173 173 class _shgwebhandler(_hgwebhandler):
174 174
175 175 url_scheme = 'https'
176
176
177 177 def setup(self):
178 178 self.connection = self.request
179 179 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
180 180 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
181 181
182 182 def do_write(self):
183 183 from OpenSSL.SSL import SysCallError
184 184 try:
185 185 super(_shgwebhandler, self).do_write()
186 186 except SysCallError, inst:
187 187 if inst.args[0] != errno.EPIPE:
188 188 raise
189 189
190 190 def handle_one_request(self):
191 191 from OpenSSL.SSL import SysCallError, ZeroReturnError
192 192 try:
193 193 super(_shgwebhandler, self).handle_one_request()
194 194 except (SysCallError, ZeroReturnError):
195 195 self.close_connection = True
196 196 pass
197 197
198 198 def create_server(ui, repo):
199 199 use_threads = True
200 200
201 201 def openlog(opt, default):
202 202 if opt and opt != '-':
203 203 return open(opt, 'w')
204 204 return default
205 205
206 206 address = repo.ui.config("web", "address", "")
207 207 port = int(repo.ui.config("web", "port", 8000))
208 208 use_ipv6 = repo.ui.configbool("web", "ipv6")
209 209 webdir_conf = repo.ui.config("web", "webdir_conf")
210 210 ssl_cert = repo.ui.config("web", "certificate")
211 211 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
212 212 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
213 213
214 214 if use_threads:
215 215 try:
216 216 from threading import activeCount
217 217 except ImportError:
218 218 use_threads = False
219 219
220 220 if use_threads:
221 221 _mixin = SocketServer.ThreadingMixIn
222 222 else:
223 223 if hasattr(os, "fork"):
224 224 _mixin = SocketServer.ForkingMixIn
225 225 else:
226 226 class _mixin:
227 227 pass
228 228
229 229 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
230 230
231 231 # SO_REUSEADDR has broken semantics on windows
232 232 if os.name == 'nt':
233 233 allow_reuse_address = 0
234 234
235 235 def __init__(self, *args, **kargs):
236 236 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
237 237 self.accesslog = accesslog
238 238 self.errorlog = errorlog
239 239 self.daemon_threads = True
240 240 def make_handler():
241 241 if webdir_conf:
242 242 hgwebobj = hgwebdir(webdir_conf, ui)
243 243 elif repo is not None:
244 244 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
245 245 else:
246 246 raise hg.RepoError(_("There is no Mercurial repository here"
247 247 " (.hg not found)"))
248 248 return hgwebobj
249 249 self.reqmaker = wsgiapplication(make_handler)
250 250
251 251 addr = address
252 252 if addr in ('', '::'):
253 253 addr = socket.gethostname()
254 254
255 255 self.addr, self.port = addr, port
256 256
257 257 if ssl_cert:
258 258 try:
259 259 from OpenSSL import SSL
260 260 ctx = SSL.Context(SSL.SSLv23_METHOD)
261 261 except ImportError:
262 262 raise util.Abort("SSL support is unavailable")
263 263 ctx.use_privatekey_file(ssl_cert)
264 264 ctx.use_certificate_file(ssl_cert)
265 265 sock = socket.socket(self.address_family, self.socket_type)
266 266 self.socket = SSL.Connection(ctx, sock)
267 267 self.server_bind()
268 268 self.server_activate()
269 269
270 270 class IPv6HTTPServer(MercurialHTTPServer):
271 271 address_family = getattr(socket, 'AF_INET6', None)
272 272
273 273 def __init__(self, *args, **kwargs):
274 274 if self.address_family is None:
275 275 raise hg.RepoError(_('IPv6 not available on this system'))
276 276 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
277 277
278 278 if ssl_cert:
279 279 handler = _shgwebhandler
280 280 else:
281 281 handler = _hgwebhandler
282 282
283 283 try:
284 284 if use_ipv6:
285 285 return IPv6HTTPServer((address, port), handler)
286 286 else:
287 287 return MercurialHTTPServer((address, port), handler)
288 288 except socket.error, inst:
289 289 raise util.Abort(_('cannot start server: %s') % inst.args[1])
@@ -1,122 +1,120 b''
1 1 # lock.py - simple locking scheme for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import errno, os, socket, time, util
9 9
10 10 class LockException(IOError):
11 11 def __init__(self, errno, strerror, filename, desc):
12 12 IOError.__init__(self, errno, strerror, filename)
13 13 self.desc = desc
14 14
15 15 class LockHeld(LockException):
16 16 def __init__(self, errno, filename, desc, locker):
17 17 LockException.__init__(self, errno, 'Lock held', filename, desc)
18 18 self.locker = locker
19 19
20 20 class LockUnavailable(LockException):
21 21 pass
22 22
23 23 class lock(object):
24 24 # lock is symlink on platforms that support it, file on others.
25 25
26 26 # symlink is used because create of directory entry and contents
27 27 # are atomic even over nfs.
28 28
29 29 # old-style lock: symlink to pid
30 30 # new-style lock: symlink to hostname:pid
31 31
32 _host = None
33
32 34 def __init__(self, file, timeout=-1, releasefn=None, desc=None):
33 35 self.f = file
34 36 self.held = 0
35 37 self.timeout = timeout
36 38 self.releasefn = releasefn
37 self.id = None
38 self.host = None
39 self.pid = None
40 39 self.desc = desc
41 40 self.lock()
42 41
43 42 def __del__(self):
44 43 self.release()
45 44
46 45 def lock(self):
47 46 timeout = self.timeout
48 47 while 1:
49 48 try:
50 49 self.trylock()
51 50 return 1
52 51 except LockHeld, inst:
53 52 if timeout != 0:
54 53 time.sleep(1)
55 54 if timeout > 0:
56 55 timeout -= 1
57 56 continue
58 57 raise LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
59 58 inst.locker)
60 59
61 60 def trylock(self):
62 if self.id is None:
63 self.host = socket.gethostname()
64 self.pid = os.getpid()
65 self.id = '%s:%s' % (self.host, self.pid)
61 if lock._host is None:
62 lock._host = socket.gethostname()
63 lockname = '%s:%s' % (lock._host, os.getpid())
66 64 while not self.held:
67 65 try:
68 util.makelock(self.id, self.f)
66 util.makelock(lockname, self.f)
69 67 self.held = 1
70 68 except (OSError, IOError), why:
71 69 if why.errno == errno.EEXIST:
72 70 locker = self.testlock()
73 71 if locker is not None:
74 72 raise LockHeld(errno.EAGAIN, self.f, self.desc,
75 73 locker)
76 74 else:
77 75 raise LockUnavailable(why.errno, why.strerror,
78 76 why.filename, self.desc)
79 77
80 78 def testlock(self):
81 79 """return id of locker if lock is valid, else None.
82 80
83 81 If old-style lock, we cannot tell what machine locker is on.
84 82 with new-style lock, if locker is on this machine, we can
85 83 see if locker is alive. If locker is on this machine but
86 84 not alive, we can safely break lock.
87 85
88 86 The lock file is only deleted when None is returned.
89 87
90 88 """
91 89 locker = util.readlock(self.f)
92 90 try:
93 91 host, pid = locker.split(":", 1)
94 92 except ValueError:
95 93 return locker
96 if host != self.host:
94 if host != lock._host:
97 95 return locker
98 96 try:
99 97 pid = int(pid)
100 98 except:
101 99 return locker
102 100 if util.testpid(pid):
103 101 return locker
104 102 # if locker dead, break lock. must do this with another lock
105 103 # held, or can race and break valid lock.
106 104 try:
107 105 l = lock(self.f + '.break')
108 106 l.trylock()
109 107 os.unlink(self.f)
110 108 l.release()
111 109 except (LockHeld, LockUnavailable):
112 110 return locker
113 111
114 112 def release(self):
115 113 if self.held:
116 114 self.held = 0
117 115 if self.releasefn:
118 116 self.releasefn()
119 117 try:
120 118 os.unlink(self.f)
121 119 except: pass
122 120
@@ -1,1319 +1,1319 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import *
11 11 import base85, cmdutil, mdiff, util, context, revlog, diffhelpers
12 12 import cStringIO, email.Parser, os, popen2, re, sha
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 # should try to parse msg['Date']
63 63 date = None
64 64 nodeid = None
65 65 branch = None
66 66 parents = []
67 67
68 68 if subject:
69 69 if subject.startswith('[PATCH'):
70 70 pend = subject.find(']')
71 71 if pend >= 0:
72 72 subject = subject[pend+1:].lstrip()
73 73 subject = subject.replace('\n\t', ' ')
74 74 ui.debug('Subject: %s\n' % subject)
75 75 if user:
76 76 ui.debug('From: %s\n' % user)
77 77 diffs_seen = 0
78 78 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 79 message = ''
80 80 for part in msg.walk():
81 81 content_type = part.get_content_type()
82 82 ui.debug('Content-Type: %s\n' % content_type)
83 83 if content_type not in ok_types:
84 84 continue
85 85 payload = part.get_payload(decode=True)
86 86 m = diffre.search(payload)
87 87 if m:
88 88 hgpatch = False
89 89 ignoretext = False
90 90
91 91 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 92 diffs_seen += 1
93 93 cfp = cStringIO.StringIO()
94 94 for line in payload[:m.start(0)].splitlines():
95 95 if line.startswith('# HG changeset patch'):
96 96 ui.debug(_('patch generated by hg export\n'))
97 97 hgpatch = True
98 98 # drop earlier commit message content
99 99 cfp.seek(0)
100 100 cfp.truncate()
101 101 subject = None
102 102 elif hgpatch:
103 103 if line.startswith('# User '):
104 104 user = line[7:]
105 105 ui.debug('From: %s\n' % user)
106 106 elif line.startswith("# Date "):
107 107 date = line[7:]
108 108 elif line.startswith("# Branch "):
109 109 branch = line[9:]
110 110 elif line.startswith("# Node ID "):
111 111 nodeid = line[10:]
112 112 elif line.startswith("# Parent "):
113 113 parents.append(line[10:])
114 114 elif line == '---' and 'git-send-email' in msg['X-Mailer']:
115 115 ignoretext = True
116 116 if not line.startswith('# ') and not ignoretext:
117 117 cfp.write(line)
118 118 cfp.write('\n')
119 119 message = cfp.getvalue()
120 120 if tmpfp:
121 121 tmpfp.write(payload)
122 122 if not payload.endswith('\n'):
123 123 tmpfp.write('\n')
124 124 elif not diffs_seen and message and content_type == 'text/plain':
125 125 message += '\n' + payload
126 126 except:
127 127 tmpfp.close()
128 128 os.unlink(tmpname)
129 129 raise
130 130
131 131 if subject and not message.startswith(subject):
132 132 message = '%s\n%s' % (subject, message)
133 133 tmpfp.close()
134 134 if not diffs_seen:
135 135 os.unlink(tmpname)
136 136 return None, message, user, date, branch, None, None, None
137 137 p1 = parents and parents.pop(0) or None
138 138 p2 = parents and parents.pop(0) or None
139 139 return tmpname, message, user, date, branch, nodeid, p1, p2
140 140
141 141 GP_PATCH = 1 << 0 # we have to run patch
142 142 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 143 GP_BINARY = 1 << 2 # there's a binary patch
144 144
145 145 def readgitpatch(fp, firstline):
146 146 """extract git-style metadata about patches from <patchname>"""
147 147 class gitpatch:
148 148 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 149 def __init__(self, path):
150 150 self.path = path
151 151 self.oldpath = None
152 152 self.mode = None
153 153 self.op = 'MODIFY'
154 154 self.copymod = False
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 yield firstline
160 160 for line in fp:
161 161 yield line
162 162
163 163 # Filter patch for git information
164 164 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 165 gp = None
166 166 gitpatches = []
167 167 # Can have a git patch with only metadata, causing patch to complain
168 168 dopatch = 0
169 169
170 170 lineno = 0
171 171 for line in reader(fp, firstline):
172 172 lineno += 1
173 173 if line.startswith('diff --git'):
174 174 m = gitre.match(line)
175 175 if m:
176 176 if gp:
177 177 gitpatches.append(gp)
178 178 src, dst = m.group(1, 2)
179 179 gp = gitpatch(dst)
180 180 gp.lineno = lineno
181 181 elif gp:
182 182 if line.startswith('--- '):
183 183 if gp.op in ('COPY', 'RENAME'):
184 184 gp.copymod = True
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-3:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-3:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchname, ui, strip, cwd, files):
282 282 """use builtin patch to apply <patchname> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 fp = file(patchname)
284 fp = file(patchname, 'rb')
285 285 if cwd:
286 286 curdir = os.getcwd()
287 287 os.chdir(cwd)
288 288 try:
289 289 ret = applydiff(ui, fp, files, strip=strip)
290 290 finally:
291 291 if cwd:
292 292 os.chdir(curdir)
293 293 if ret < 0:
294 294 raise PatchError
295 295 return ret > 0
296 296
297 297 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
298 298 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
299 299 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
300 300
301 301 class patchfile:
302 302 def __init__(self, ui, fname):
303 303 self.fname = fname
304 304 self.ui = ui
305 305 try:
306 fp = file(fname, 'r')
306 fp = file(fname, 'rb')
307 307 self.lines = fp.readlines()
308 308 self.exists = True
309 309 except IOError:
310 310 dirname = os.path.dirname(fname)
311 311 if dirname and not os.path.isdir(dirname):
312 312 dirs = dirname.split(os.path.sep)
313 313 d = ""
314 314 for x in dirs:
315 315 d = os.path.join(d, x)
316 316 if not os.path.isdir(d):
317 317 os.mkdir(d)
318 318 self.lines = []
319 319 self.exists = False
320 320
321 321 self.hash = {}
322 322 self.dirty = 0
323 323 self.offset = 0
324 324 self.rej = []
325 325 self.fileprinted = False
326 326 self.printfile(False)
327 327 self.hunks = 0
328 328
329 329 def printfile(self, warn):
330 330 if self.fileprinted:
331 331 return
332 332 if warn or self.ui.verbose:
333 333 self.fileprinted = True
334 334 s = _("patching file %s\n") % self.fname
335 335 if warn:
336 336 self.ui.warn(s)
337 337 else:
338 338 self.ui.note(s)
339 339
340 340
341 341 def findlines(self, l, linenum):
342 342 # looks through the hash and finds candidate lines. The
343 343 # result is a list of line numbers sorted based on distance
344 344 # from linenum
345 345 def sorter(a, b):
346 346 vala = abs(a - linenum)
347 347 valb = abs(b - linenum)
348 348 return cmp(vala, valb)
349 349
350 350 try:
351 351 cand = self.hash[l]
352 352 except:
353 353 return []
354 354
355 355 if len(cand) > 1:
356 356 # resort our list of potentials forward then back.
357 357 cand.sort(cmp=sorter)
358 358 return cand
359 359
360 360 def hashlines(self):
361 361 self.hash = {}
362 362 for x in xrange(len(self.lines)):
363 363 s = self.lines[x]
364 364 self.hash.setdefault(s, []).append(x)
365 365
366 366 def write_rej(self):
367 367 # our rejects are a little different from patch(1). This always
368 368 # creates rejects in the same form as the original patch. A file
369 369 # header is inserted so that you can run the reject through patch again
370 370 # without having to type the filename.
371 371
372 372 if not self.rej:
373 373 return
374 374 if self.hunks != 1:
375 375 hunkstr = "s"
376 376 else:
377 377 hunkstr = ""
378 378
379 379 fname = self.fname + ".rej"
380 380 self.ui.warn(
381 381 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
382 382 (len(self.rej), self.hunks, hunkstr, fname))
383 383 try: os.unlink(fname)
384 384 except:
385 385 pass
386 fp = file(fname, 'w')
386 fp = file(fname, 'wb')
387 387 base = os.path.basename(self.fname)
388 388 fp.write("--- %s\n+++ %s\n" % (base, base))
389 389 for x in self.rej:
390 390 for l in x.hunk:
391 391 fp.write(l)
392 392 if l[-1] != '\n':
393 393 fp.write("\n\ No newline at end of file\n")
394 394
395 395 def write(self, dest=None):
396 396 if self.dirty:
397 397 if not dest:
398 398 dest = self.fname
399 399 st = None
400 400 try:
401 401 st = os.lstat(dest)
402 402 if st.st_nlink > 1:
403 403 os.unlink(dest)
404 404 except: pass
405 fp = file(dest, 'w')
405 fp = file(dest, 'wb')
406 406 if st:
407 407 os.chmod(dest, st.st_mode)
408 408 fp.writelines(self.lines)
409 409 fp.close()
410 410
411 411 def close(self):
412 412 self.write()
413 413 self.write_rej()
414 414
415 415 def apply(self, h, reverse):
416 416 if not h.complete():
417 417 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
418 418 (h.number, h.desc, len(h.a), h.lena, len(h.b),
419 419 h.lenb))
420 420
421 421 self.hunks += 1
422 422 if reverse:
423 423 h.reverse()
424 424
425 425 if self.exists and h.createfile():
426 426 self.ui.warn(_("file %s already exists\n") % self.fname)
427 427 self.rej.append(h)
428 428 return -1
429 429
430 430 if isinstance(h, binhunk):
431 431 if h.rmfile():
432 432 os.unlink(self.fname)
433 433 else:
434 434 self.lines[:] = h.new()
435 435 self.offset += len(h.new())
436 436 self.dirty = 1
437 437 return 0
438 438
439 439 # fast case first, no offsets, no fuzz
440 440 old = h.old()
441 441 # patch starts counting at 1 unless we are adding the file
442 442 if h.starta == 0:
443 443 start = 0
444 444 else:
445 445 start = h.starta + self.offset - 1
446 446 orig_start = start
447 447 if diffhelpers.testhunk(old, self.lines, start) == 0:
448 448 if h.rmfile():
449 449 os.unlink(self.fname)
450 450 else:
451 451 self.lines[start : start + h.lena] = h.new()
452 452 self.offset += h.lenb - h.lena
453 453 self.dirty = 1
454 454 return 0
455 455
456 456 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
457 457 self.hashlines()
458 458 if h.hunk[-1][0] != ' ':
459 459 # if the hunk tried to put something at the bottom of the file
460 460 # override the start line and use eof here
461 461 search_start = len(self.lines)
462 462 else:
463 463 search_start = orig_start
464 464
465 465 for fuzzlen in xrange(3):
466 466 for toponly in [ True, False ]:
467 467 old = h.old(fuzzlen, toponly)
468 468
469 469 cand = self.findlines(old[0][1:], search_start)
470 470 for l in cand:
471 471 if diffhelpers.testhunk(old, self.lines, l) == 0:
472 472 newlines = h.new(fuzzlen, toponly)
473 473 self.lines[l : l + len(old)] = newlines
474 474 self.offset += len(newlines) - len(old)
475 475 self.dirty = 1
476 476 if fuzzlen:
477 477 fuzzstr = "with fuzz %d " % fuzzlen
478 478 f = self.ui.warn
479 479 self.printfile(True)
480 480 else:
481 481 fuzzstr = ""
482 482 f = self.ui.note
483 483 offset = l - orig_start - fuzzlen
484 484 if offset == 1:
485 485 linestr = "line"
486 486 else:
487 487 linestr = "lines"
488 488 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
489 489 (h.number, l+1, fuzzstr, offset, linestr))
490 490 return fuzzlen
491 491 self.printfile(True)
492 492 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
493 493 self.rej.append(h)
494 494 return -1
495 495
496 496 class hunk:
497 497 def __init__(self, desc, num, lr, context):
498 498 self.number = num
499 499 self.desc = desc
500 500 self.hunk = [ desc ]
501 501 self.a = []
502 502 self.b = []
503 503 if context:
504 504 self.read_context_hunk(lr)
505 505 else:
506 506 self.read_unified_hunk(lr)
507 507
508 508 def read_unified_hunk(self, lr):
509 509 m = unidesc.match(self.desc)
510 510 if not m:
511 511 raise PatchError(_("bad hunk #%d") % self.number)
512 512 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
513 513 if self.lena == None:
514 514 self.lena = 1
515 515 else:
516 516 self.lena = int(self.lena)
517 517 if self.lenb == None:
518 518 self.lenb = 1
519 519 else:
520 520 self.lenb = int(self.lenb)
521 521 self.starta = int(self.starta)
522 522 self.startb = int(self.startb)
523 523 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
524 524 # if we hit eof before finishing out the hunk, the last line will
525 525 # be zero length. Lets try to fix it up.
526 526 while len(self.hunk[-1]) == 0:
527 527 del self.hunk[-1]
528 528 del self.a[-1]
529 529 del self.b[-1]
530 530 self.lena -= 1
531 531 self.lenb -= 1
532 532
533 533 def read_context_hunk(self, lr):
534 534 self.desc = lr.readline()
535 535 m = contextdesc.match(self.desc)
536 536 if not m:
537 537 raise PatchError(_("bad hunk #%d") % self.number)
538 538 foo, self.starta, foo2, aend, foo3 = m.groups()
539 539 self.starta = int(self.starta)
540 540 if aend == None:
541 541 aend = self.starta
542 542 self.lena = int(aend) - self.starta
543 543 if self.starta:
544 544 self.lena += 1
545 545 for x in xrange(self.lena):
546 546 l = lr.readline()
547 547 if l.startswith('---'):
548 548 lr.push(l)
549 549 break
550 550 s = l[2:]
551 551 if l.startswith('- ') or l.startswith('! '):
552 552 u = '-' + s
553 553 elif l.startswith(' '):
554 554 u = ' ' + s
555 555 else:
556 556 raise PatchError(_("bad hunk #%d old text line %d") %
557 557 (self.number, x))
558 558 self.a.append(u)
559 559 self.hunk.append(u)
560 560
561 561 l = lr.readline()
562 562 if l.startswith('\ '):
563 563 s = self.a[-1][:-1]
564 564 self.a[-1] = s
565 565 self.hunk[-1] = s
566 566 l = lr.readline()
567 567 m = contextdesc.match(l)
568 568 if not m:
569 569 raise PatchError(_("bad hunk #%d") % self.number)
570 570 foo, self.startb, foo2, bend, foo3 = m.groups()
571 571 self.startb = int(self.startb)
572 572 if bend == None:
573 573 bend = self.startb
574 574 self.lenb = int(bend) - self.startb
575 575 if self.startb:
576 576 self.lenb += 1
577 577 hunki = 1
578 578 for x in xrange(self.lenb):
579 579 l = lr.readline()
580 580 if l.startswith('\ '):
581 581 s = self.b[-1][:-1]
582 582 self.b[-1] = s
583 583 self.hunk[hunki-1] = s
584 584 continue
585 585 if not l:
586 586 lr.push(l)
587 587 break
588 588 s = l[2:]
589 589 if l.startswith('+ ') or l.startswith('! '):
590 590 u = '+' + s
591 591 elif l.startswith(' '):
592 592 u = ' ' + s
593 593 elif len(self.b) == 0:
594 594 # this can happen when the hunk does not add any lines
595 595 lr.push(l)
596 596 break
597 597 else:
598 598 raise PatchError(_("bad hunk #%d old text line %d") %
599 599 (self.number, x))
600 600 self.b.append(s)
601 601 while True:
602 602 if hunki >= len(self.hunk):
603 603 h = ""
604 604 else:
605 605 h = self.hunk[hunki]
606 606 hunki += 1
607 607 if h == u:
608 608 break
609 609 elif h.startswith('-'):
610 610 continue
611 611 else:
612 612 self.hunk.insert(hunki-1, u)
613 613 break
614 614
615 615 if not self.a:
616 616 # this happens when lines were only added to the hunk
617 617 for x in self.hunk:
618 618 if x.startswith('-') or x.startswith(' '):
619 619 self.a.append(x)
620 620 if not self.b:
621 621 # this happens when lines were only deleted from the hunk
622 622 for x in self.hunk:
623 623 if x.startswith('+') or x.startswith(' '):
624 624 self.b.append(x[1:])
625 625 # @@ -start,len +start,len @@
626 626 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
627 627 self.startb, self.lenb)
628 628 self.hunk[0] = self.desc
629 629
630 630 def reverse(self):
631 631 origlena = self.lena
632 632 origstarta = self.starta
633 633 self.lena = self.lenb
634 634 self.starta = self.startb
635 635 self.lenb = origlena
636 636 self.startb = origstarta
637 637 self.a = []
638 638 self.b = []
639 639 # self.hunk[0] is the @@ description
640 640 for x in xrange(1, len(self.hunk)):
641 641 o = self.hunk[x]
642 642 if o.startswith('-'):
643 643 n = '+' + o[1:]
644 644 self.b.append(o[1:])
645 645 elif o.startswith('+'):
646 646 n = '-' + o[1:]
647 647 self.a.append(n)
648 648 else:
649 649 n = o
650 650 self.b.append(o[1:])
651 651 self.a.append(o)
652 652 self.hunk[x] = o
653 653
654 654 def fix_newline(self):
655 655 diffhelpers.fix_newline(self.hunk, self.a, self.b)
656 656
657 657 def complete(self):
658 658 return len(self.a) == self.lena and len(self.b) == self.lenb
659 659
660 660 def createfile(self):
661 661 return self.starta == 0 and self.lena == 0
662 662
663 663 def rmfile(self):
664 664 return self.startb == 0 and self.lenb == 0
665 665
666 666 def fuzzit(self, l, fuzz, toponly):
667 667 # this removes context lines from the top and bottom of list 'l'. It
668 668 # checks the hunk to make sure only context lines are removed, and then
669 669 # returns a new shortened list of lines.
670 670 fuzz = min(fuzz, len(l)-1)
671 671 if fuzz:
672 672 top = 0
673 673 bot = 0
674 674 hlen = len(self.hunk)
675 675 for x in xrange(hlen-1):
676 676 # the hunk starts with the @@ line, so use x+1
677 677 if self.hunk[x+1][0] == ' ':
678 678 top += 1
679 679 else:
680 680 break
681 681 if not toponly:
682 682 for x in xrange(hlen-1):
683 683 if self.hunk[hlen-bot-1][0] == ' ':
684 684 bot += 1
685 685 else:
686 686 break
687 687
688 688 # top and bot now count context in the hunk
689 689 # adjust them if either one is short
690 690 context = max(top, bot, 3)
691 691 if bot < context:
692 692 bot = max(0, fuzz - (context - bot))
693 693 else:
694 694 bot = min(fuzz, bot)
695 695 if top < context:
696 696 top = max(0, fuzz - (context - top))
697 697 else:
698 698 top = min(fuzz, top)
699 699
700 700 return l[top:len(l)-bot]
701 701 return l
702 702
703 703 def old(self, fuzz=0, toponly=False):
704 704 return self.fuzzit(self.a, fuzz, toponly)
705 705
706 706 def newctrl(self):
707 707 res = []
708 708 for x in self.hunk:
709 709 c = x[0]
710 710 if c == ' ' or c == '+':
711 711 res.append(x)
712 712 return res
713 713
714 714 def new(self, fuzz=0, toponly=False):
715 715 return self.fuzzit(self.b, fuzz, toponly)
716 716
717 717 class binhunk:
718 718 'A binary patch file. Only understands literals so far.'
719 719 def __init__(self, gitpatch):
720 720 self.gitpatch = gitpatch
721 721 self.text = None
722 722 self.hunk = ['GIT binary patch\n']
723 723
724 724 def createfile(self):
725 725 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
726 726
727 727 def rmfile(self):
728 728 return self.gitpatch.op == 'DELETE'
729 729
730 730 def complete(self):
731 731 return self.text is not None
732 732
733 733 def new(self):
734 734 return [self.text]
735 735
736 736 def extract(self, fp):
737 737 line = fp.readline()
738 738 self.hunk.append(line)
739 739 while line and not line.startswith('literal '):
740 740 line = fp.readline()
741 741 self.hunk.append(line)
742 742 if not line:
743 743 raise PatchError(_('could not extract binary patch'))
744 744 size = int(line[8:].rstrip())
745 745 dec = []
746 746 line = fp.readline()
747 747 self.hunk.append(line)
748 748 while len(line) > 1:
749 749 l = line[0]
750 750 if l <= 'Z' and l >= 'A':
751 751 l = ord(l) - ord('A') + 1
752 752 else:
753 753 l = ord(l) - ord('a') + 27
754 754 dec.append(base85.b85decode(line[1:-1])[:l])
755 755 line = fp.readline()
756 756 self.hunk.append(line)
757 757 text = zlib.decompress(''.join(dec))
758 758 if len(text) != size:
759 759 raise PatchError(_('binary patch is %d bytes, not %d') %
760 760 len(text), size)
761 761 self.text = text
762 762
763 763 def parsefilename(str):
764 764 # --- filename \t|space stuff
765 765 s = str[4:]
766 766 i = s.find('\t')
767 767 if i < 0:
768 768 i = s.find(' ')
769 769 if i < 0:
770 770 return s
771 771 return s[:i]
772 772
773 773 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
774 774 def pathstrip(path, count=1):
775 775 pathlen = len(path)
776 776 i = 0
777 777 if count == 0:
778 778 return path.rstrip()
779 779 while count > 0:
780 i = path.find(os.sep, i)
780 i = path.find('/', i)
781 781 if i == -1:
782 782 raise PatchError(_("unable to strip away %d dirs from %s") %
783 783 (count, path))
784 784 i += 1
785 785 # consume '//' in the path
786 while i < pathlen - 1 and path[i] == os.sep:
786 while i < pathlen - 1 and path[i] == '/':
787 787 i += 1
788 788 count -= 1
789 789 return path[i:].rstrip()
790 790
791 791 nulla = afile_orig == "/dev/null"
792 792 nullb = bfile_orig == "/dev/null"
793 793 afile = pathstrip(afile_orig, strip)
794 794 gooda = os.path.exists(afile) and not nulla
795 795 bfile = pathstrip(bfile_orig, strip)
796 796 if afile == bfile:
797 797 goodb = gooda
798 798 else:
799 799 goodb = os.path.exists(bfile) and not nullb
800 800 createfunc = hunk.createfile
801 801 if reverse:
802 802 createfunc = hunk.rmfile
803 803 if not goodb and not gooda and not createfunc():
804 804 raise PatchError(_("unable to find %s or %s for patching") %
805 805 (afile, bfile))
806 806 if gooda and goodb:
807 807 fname = bfile
808 808 if afile in bfile:
809 809 fname = afile
810 810 elif gooda:
811 811 fname = afile
812 812 elif not nullb:
813 813 fname = bfile
814 814 if afile in bfile:
815 815 fname = afile
816 816 elif not nulla:
817 817 fname = afile
818 818 return fname
819 819
820 820 class linereader:
821 821 # simple class to allow pushing lines back into the input stream
822 822 def __init__(self, fp):
823 823 self.fp = fp
824 824 self.buf = []
825 825
826 826 def push(self, line):
827 827 self.buf.append(line)
828 828
829 829 def readline(self):
830 830 if self.buf:
831 831 l = self.buf[0]
832 832 del self.buf[0]
833 833 return l
834 834 return self.fp.readline()
835 835
836 836 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
837 837 rejmerge=None, updatedir=None):
838 838 """reads a patch from fp and tries to apply it. The dict 'changed' is
839 839 filled in with all of the filenames changed by the patch. Returns 0
840 840 for a clean patch, -1 if any rejects were found and 1 if there was
841 841 any fuzz."""
842 842
843 843 def scangitpatch(fp, firstline, cwd=None):
844 844 '''git patches can modify a file, then copy that file to
845 845 a new file, but expect the source to be the unmodified form.
846 846 So we scan the patch looking for that case so we can do
847 847 the copies ahead of time.'''
848 848
849 849 pos = 0
850 850 try:
851 851 pos = fp.tell()
852 852 except IOError:
853 853 fp = cStringIO.StringIO(fp.read())
854 854
855 855 (dopatch, gitpatches) = readgitpatch(fp, firstline)
856 856 for gp in gitpatches:
857 857 if gp.copymod:
858 858 copyfile(gp.oldpath, gp.path, basedir=cwd)
859 859
860 860 fp.seek(pos)
861 861
862 862 return fp, dopatch, gitpatches
863 863
864 864 current_hunk = None
865 865 current_file = None
866 866 afile = ""
867 867 bfile = ""
868 868 state = None
869 869 hunknum = 0
870 870 rejects = 0
871 871
872 872 git = False
873 873 gitre = re.compile('diff --git (a/.*) (b/.*)')
874 874
875 875 # our states
876 876 BFILE = 1
877 877 err = 0
878 878 context = None
879 879 lr = linereader(fp)
880 880 dopatch = True
881 881 gitworkdone = False
882 882
883 883 while True:
884 884 newfile = False
885 885 x = lr.readline()
886 886 if not x:
887 887 break
888 888 if current_hunk:
889 889 if x.startswith('\ '):
890 890 current_hunk.fix_newline()
891 891 ret = current_file.apply(current_hunk, reverse)
892 892 if ret >= 0:
893 893 changed.setdefault(current_file.fname, (None, None))
894 894 if ret > 0:
895 895 err = 1
896 896 current_hunk = None
897 897 gitworkdone = False
898 898 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
899 899 ((context or context == None) and x.startswith('***************')))):
900 900 try:
901 901 if context == None and x.startswith('***************'):
902 902 context = True
903 903 current_hunk = hunk(x, hunknum + 1, lr, context)
904 904 except PatchError, err:
905 905 ui.debug(err)
906 906 current_hunk = None
907 907 continue
908 908 hunknum += 1
909 909 if not current_file:
910 910 if sourcefile:
911 911 current_file = patchfile(ui, sourcefile)
912 912 else:
913 913 current_file = selectfile(afile, bfile, current_hunk,
914 914 strip, reverse)
915 915 current_file = patchfile(ui, current_file)
916 916 elif state == BFILE and x.startswith('GIT binary patch'):
917 917 current_hunk = binhunk(changed[bfile[2:]][1])
918 918 if not current_file:
919 919 if sourcefile:
920 920 current_file = patchfile(ui, sourcefile)
921 921 else:
922 922 current_file = selectfile(afile, bfile, current_hunk,
923 923 strip, reverse)
924 924 current_file = patchfile(ui, current_file)
925 925 hunknum += 1
926 926 current_hunk.extract(fp)
927 927 elif x.startswith('diff --git'):
928 928 # check for git diff, scanning the whole patch file if needed
929 929 m = gitre.match(x)
930 930 if m:
931 931 afile, bfile = m.group(1, 2)
932 932 if not git:
933 933 git = True
934 934 fp, dopatch, gitpatches = scangitpatch(fp, x)
935 935 for gp in gitpatches:
936 936 changed[gp.path] = (gp.op, gp)
937 937 # else error?
938 938 # copy/rename + modify should modify target, not source
939 939 if changed.get(bfile[2:], (None, None))[0] in ('COPY',
940 940 'RENAME'):
941 941 afile = bfile
942 942 gitworkdone = True
943 943 newfile = True
944 944 elif x.startswith('---'):
945 945 # check for a unified diff
946 946 l2 = lr.readline()
947 947 if not l2.startswith('+++'):
948 948 lr.push(l2)
949 949 continue
950 950 newfile = True
951 951 context = False
952 952 afile = parsefilename(x)
953 953 bfile = parsefilename(l2)
954 954 elif x.startswith('***'):
955 955 # check for a context diff
956 956 l2 = lr.readline()
957 957 if not l2.startswith('---'):
958 958 lr.push(l2)
959 959 continue
960 960 l3 = lr.readline()
961 961 lr.push(l3)
962 962 if not l3.startswith("***************"):
963 963 lr.push(l2)
964 964 continue
965 965 newfile = True
966 966 context = True
967 967 afile = parsefilename(x)
968 968 bfile = parsefilename(l2)
969 969
970 970 if newfile:
971 971 if current_file:
972 972 current_file.close()
973 973 if rejmerge:
974 974 rejmerge(current_file)
975 975 rejects += len(current_file.rej)
976 976 state = BFILE
977 977 current_file = None
978 978 hunknum = 0
979 979 if current_hunk:
980 980 if current_hunk.complete():
981 981 ret = current_file.apply(current_hunk, reverse)
982 982 if ret >= 0:
983 983 changed.setdefault(current_file.fname, (None, None))
984 984 if ret > 0:
985 985 err = 1
986 986 else:
987 987 fname = current_file and current_file.fname or None
988 988 raise PatchError(_("malformed patch %s %s") % (fname,
989 989 current_hunk.desc))
990 990 if current_file:
991 991 current_file.close()
992 992 if rejmerge:
993 993 rejmerge(current_file)
994 994 rejects += len(current_file.rej)
995 995 if updatedir and git:
996 996 updatedir(gitpatches)
997 997 if rejects:
998 998 return -1
999 999 if hunknum == 0 and dopatch and not gitworkdone:
1000 1000 raise NoHunks
1001 1001 return err
1002 1002
1003 1003 def diffopts(ui, opts={}, untrusted=False):
1004 1004 def get(key, name=None):
1005 1005 return (opts.get(key) or
1006 1006 ui.configbool('diff', name or key, None, untrusted=untrusted))
1007 1007 return mdiff.diffopts(
1008 1008 text=opts.get('text'),
1009 1009 git=get('git'),
1010 1010 nodates=get('nodates'),
1011 1011 showfunc=get('show_function', 'showfunc'),
1012 1012 ignorews=get('ignore_all_space', 'ignorews'),
1013 1013 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1014 1014 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'))
1015 1015
1016 1016 def updatedir(ui, repo, patches):
1017 1017 '''Update dirstate after patch application according to metadata'''
1018 1018 if not patches:
1019 1019 return
1020 1020 copies = []
1021 1021 removes = {}
1022 1022 cfiles = patches.keys()
1023 1023 cwd = repo.getcwd()
1024 1024 if cwd:
1025 1025 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1026 1026 for f in patches:
1027 1027 ctype, gp = patches[f]
1028 1028 if ctype == 'RENAME':
1029 1029 copies.append((gp.oldpath, gp.path, gp.copymod))
1030 1030 removes[gp.oldpath] = 1
1031 1031 elif ctype == 'COPY':
1032 1032 copies.append((gp.oldpath, gp.path, gp.copymod))
1033 1033 elif ctype == 'DELETE':
1034 1034 removes[gp.path] = 1
1035 1035 for src, dst, after in copies:
1036 1036 if not after:
1037 1037 copyfile(src, dst, repo.root)
1038 1038 repo.copy(src, dst)
1039 1039 removes = removes.keys()
1040 1040 if removes:
1041 1041 removes.sort()
1042 1042 repo.remove(removes, True)
1043 1043 for f in patches:
1044 1044 ctype, gp = patches[f]
1045 1045 if gp and gp.mode:
1046 1046 x = gp.mode & 0100 != 0
1047 1047 dst = os.path.join(repo.root, gp.path)
1048 1048 # patch won't create empty files
1049 1049 if ctype == 'ADD' and not os.path.exists(dst):
1050 1050 repo.wwrite(gp.path, '', x and 'x' or '')
1051 1051 else:
1052 1052 util.set_exec(dst, x)
1053 1053 cmdutil.addremove(repo, cfiles)
1054 1054 files = patches.keys()
1055 1055 files.extend([r for r in removes if r not in files])
1056 1056 files.sort()
1057 1057
1058 1058 return files
1059 1059
1060 1060 def b85diff(fp, to, tn):
1061 1061 '''print base85-encoded binary diff'''
1062 1062 def gitindex(text):
1063 1063 if not text:
1064 1064 return '0' * 40
1065 1065 l = len(text)
1066 1066 s = sha.new('blob %d\0' % l)
1067 1067 s.update(text)
1068 1068 return s.hexdigest()
1069 1069
1070 1070 def fmtline(line):
1071 1071 l = len(line)
1072 1072 if l <= 26:
1073 1073 l = chr(ord('A') + l - 1)
1074 1074 else:
1075 1075 l = chr(l - 26 + ord('a') - 1)
1076 1076 return '%c%s\n' % (l, base85.b85encode(line, True))
1077 1077
1078 1078 def chunk(text, csize=52):
1079 1079 l = len(text)
1080 1080 i = 0
1081 1081 while i < l:
1082 1082 yield text[i:i+csize]
1083 1083 i += csize
1084 1084
1085 1085 tohash = gitindex(to)
1086 1086 tnhash = gitindex(tn)
1087 1087 if tohash == tnhash:
1088 1088 return ""
1089 1089
1090 1090 # TODO: deltas
1091 1091 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1092 1092 (tohash, tnhash, len(tn))]
1093 1093 for l in chunk(zlib.compress(tn)):
1094 1094 ret.append(fmtline(l))
1095 1095 ret.append('\n')
1096 1096 return ''.join(ret)
1097 1097
1098 1098 def diff(repo, node1=None, node2=None, files=None, match=util.always,
1099 1099 fp=None, changes=None, opts=None):
1100 1100 '''print diff of changes to files between two nodes, or node and
1101 1101 working directory.
1102 1102
1103 1103 if node1 is None, use first dirstate parent instead.
1104 1104 if node2 is None, compare node1 with working directory.'''
1105 1105
1106 1106 if opts is None:
1107 1107 opts = mdiff.defaultopts
1108 1108 if fp is None:
1109 1109 fp = repo.ui
1110 1110
1111 1111 if not node1:
1112 1112 node1 = repo.dirstate.parents()[0]
1113 1113
1114 1114 ccache = {}
1115 1115 def getctx(r):
1116 1116 if r not in ccache:
1117 1117 ccache[r] = context.changectx(repo, r)
1118 1118 return ccache[r]
1119 1119
1120 1120 flcache = {}
1121 1121 def getfilectx(f, ctx):
1122 1122 flctx = ctx.filectx(f, filelog=flcache.get(f))
1123 1123 if f not in flcache:
1124 1124 flcache[f] = flctx._filelog
1125 1125 return flctx
1126 1126
1127 1127 # reading the data for node1 early allows it to play nicely
1128 1128 # with repo.status and the revlog cache.
1129 1129 ctx1 = context.changectx(repo, node1)
1130 1130 # force manifest reading
1131 1131 man1 = ctx1.manifest()
1132 1132 date1 = util.datestr(ctx1.date())
1133 1133
1134 1134 if not changes:
1135 1135 changes = repo.status(node1, node2, files, match=match)[:5]
1136 1136 modified, added, removed, deleted, unknown = changes
1137 1137
1138 1138 if not modified and not added and not removed:
1139 1139 return
1140 1140
1141 1141 if node2:
1142 1142 ctx2 = context.changectx(repo, node2)
1143 1143 execf2 = ctx2.manifest().execf
1144 1144 else:
1145 1145 ctx2 = context.workingctx(repo)
1146 1146 execf2 = util.execfunc(repo.root, None)
1147 1147 if execf2 is None:
1148 1148 execf2 = ctx2.parents()[0].manifest().copy().execf
1149 1149
1150 1150 # returns False if there was no rename between ctx1 and ctx2
1151 1151 # returns None if the file was created between ctx1 and ctx2
1152 1152 # returns the (file, node) present in ctx1 that was renamed to f in ctx2
1153 1153 def renamed(f):
1154 1154 startrev = ctx1.rev()
1155 1155 c = ctx2
1156 1156 crev = c.rev()
1157 1157 if crev is None:
1158 1158 crev = repo.changelog.count()
1159 1159 orig = f
1160 1160 while crev > startrev:
1161 1161 if f in c.files():
1162 1162 try:
1163 1163 src = getfilectx(f, c).renamed()
1164 1164 except revlog.LookupError:
1165 1165 return None
1166 1166 if src:
1167 1167 f = src[0]
1168 1168 crev = c.parents()[0].rev()
1169 1169 # try to reuse
1170 1170 c = getctx(crev)
1171 1171 if f not in man1:
1172 1172 return None
1173 1173 if f == orig:
1174 1174 return False
1175 1175 return f
1176 1176
1177 1177 if repo.ui.quiet:
1178 1178 r = None
1179 1179 else:
1180 1180 hexfunc = repo.ui.debugflag and hex or short
1181 1181 r = [hexfunc(node) for node in [node1, node2] if node]
1182 1182
1183 1183 if opts.git:
1184 1184 copied = {}
1185 1185 for f in added:
1186 1186 src = renamed(f)
1187 1187 if src:
1188 1188 copied[f] = src
1189 1189 srcs = [x[1] for x in copied.items()]
1190 1190
1191 1191 all = modified + added + removed
1192 1192 all.sort()
1193 1193 gone = {}
1194 1194
1195 1195 for f in all:
1196 1196 to = None
1197 1197 tn = None
1198 1198 dodiff = True
1199 1199 header = []
1200 1200 if f in man1:
1201 1201 to = getfilectx(f, ctx1).data()
1202 1202 if f not in removed:
1203 1203 tn = getfilectx(f, ctx2).data()
1204 1204 if opts.git:
1205 1205 def gitmode(x):
1206 1206 return x and '100755' or '100644'
1207 1207 def addmodehdr(header, omode, nmode):
1208 1208 if omode != nmode:
1209 1209 header.append('old mode %s\n' % omode)
1210 1210 header.append('new mode %s\n' % nmode)
1211 1211
1212 1212 a, b = f, f
1213 1213 if f in added:
1214 1214 mode = gitmode(execf2(f))
1215 1215 if f in copied:
1216 1216 a = copied[f]
1217 1217 omode = gitmode(man1.execf(a))
1218 1218 addmodehdr(header, omode, mode)
1219 1219 if a in removed and a not in gone:
1220 1220 op = 'rename'
1221 1221 gone[a] = 1
1222 1222 else:
1223 1223 op = 'copy'
1224 1224 header.append('%s from %s\n' % (op, a))
1225 1225 header.append('%s to %s\n' % (op, f))
1226 1226 to = getfilectx(a, ctx1).data()
1227 1227 else:
1228 1228 header.append('new file mode %s\n' % mode)
1229 1229 if util.binary(tn):
1230 1230 dodiff = 'binary'
1231 1231 elif f in removed:
1232 1232 if f in srcs:
1233 1233 dodiff = False
1234 1234 else:
1235 1235 mode = gitmode(man1.execf(f))
1236 1236 header.append('deleted file mode %s\n' % mode)
1237 1237 else:
1238 1238 omode = gitmode(man1.execf(f))
1239 1239 nmode = gitmode(execf2(f))
1240 1240 addmodehdr(header, omode, nmode)
1241 1241 if util.binary(to) or util.binary(tn):
1242 1242 dodiff = 'binary'
1243 1243 r = None
1244 1244 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1245 1245 if dodiff:
1246 1246 if dodiff == 'binary':
1247 1247 text = b85diff(fp, to, tn)
1248 1248 else:
1249 1249 text = mdiff.unidiff(to, date1,
1250 1250 # ctx2 date may be dynamic
1251 1251 tn, util.datestr(ctx2.date()),
1252 1252 f, r, opts=opts)
1253 1253 if text or len(header) > 1:
1254 1254 fp.write(''.join(header))
1255 1255 fp.write(text)
1256 1256
1257 1257 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1258 1258 opts=None):
1259 1259 '''export changesets as hg patches.'''
1260 1260
1261 1261 total = len(revs)
1262 1262 revwidth = max([len(str(rev)) for rev in revs])
1263 1263
1264 1264 def single(rev, seqno, fp):
1265 1265 ctx = repo.changectx(rev)
1266 1266 node = ctx.node()
1267 1267 parents = [p.node() for p in ctx.parents() if p]
1268 1268 branch = ctx.branch()
1269 1269 if switch_parent:
1270 1270 parents.reverse()
1271 1271 prev = (parents and parents[0]) or nullid
1272 1272
1273 1273 if not fp:
1274 1274 fp = cmdutil.make_file(repo, template, node, total=total,
1275 1275 seqno=seqno, revwidth=revwidth)
1276 1276 if fp != sys.stdout and hasattr(fp, 'name'):
1277 1277 repo.ui.note("%s\n" % fp.name)
1278 1278
1279 1279 fp.write("# HG changeset patch\n")
1280 1280 fp.write("# User %s\n" % ctx.user())
1281 1281 fp.write("# Date %d %d\n" % ctx.date())
1282 1282 if branch and (branch != 'default'):
1283 1283 fp.write("# Branch %s\n" % branch)
1284 1284 fp.write("# Node ID %s\n" % hex(node))
1285 1285 fp.write("# Parent %s\n" % hex(prev))
1286 1286 if len(parents) > 1:
1287 1287 fp.write("# Parent %s\n" % hex(parents[1]))
1288 1288 fp.write(ctx.description().rstrip())
1289 1289 fp.write("\n\n")
1290 1290
1291 1291 diff(repo, prev, node, fp=fp, opts=opts)
1292 1292 if fp not in (sys.stdout, repo.ui):
1293 1293 fp.close()
1294 1294
1295 1295 for seqno, rev in enumerate(revs):
1296 1296 single(rev, seqno+1, fp)
1297 1297
1298 1298 def diffstat(patchlines):
1299 1299 if not util.find_exe('diffstat'):
1300 1300 return
1301 1301 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1302 1302 try:
1303 1303 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1304 1304 try:
1305 1305 for line in patchlines: print >> p.tochild, line
1306 1306 p.tochild.close()
1307 1307 if p.wait(): return
1308 1308 fp = os.fdopen(fd, 'r')
1309 1309 stat = []
1310 1310 for line in fp: stat.append(line.lstrip())
1311 1311 last = stat.pop()
1312 1312 stat.insert(0, last)
1313 1313 stat = ''.join(stat)
1314 1314 if stat.startswith('0 files'): raise ValueError
1315 1315 return stat
1316 1316 except: raise
1317 1317 finally:
1318 1318 try: os.unlink(name)
1319 1319 except: pass
@@ -1,1614 +1,1618 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 17 import os, threading, time, calendar, ConfigParser, locale, glob
18 18
19 19 try:
20 20 set = set
21 21 frozenset = frozenset
22 22 except NameError:
23 23 from sets import Set as set, ImmutableSet as frozenset
24 24
25 25 try:
26 26 _encoding = os.environ.get("HGENCODING")
27 27 if sys.platform == 'darwin' and not _encoding:
28 28 # On darwin, getpreferredencoding ignores the locale environment and
29 29 # always returns mac-roman. We override this if the environment is
30 30 # not C (has been customized by the user).
31 31 locale.setlocale(locale.LC_CTYPE, '')
32 32 _encoding = locale.getlocale()[1]
33 33 if not _encoding:
34 34 _encoding = locale.getpreferredencoding() or 'ascii'
35 35 except locale.Error:
36 36 _encoding = 'ascii'
37 37 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
38 38 _fallbackencoding = 'ISO-8859-1'
39 39
40 40 def tolocal(s):
41 41 """
42 42 Convert a string from internal UTF-8 to local encoding
43 43
44 44 All internal strings should be UTF-8 but some repos before the
45 45 implementation of locale support may contain latin1 or possibly
46 46 other character sets. We attempt to decode everything strictly
47 47 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
48 48 replace unknown characters.
49 49 """
50 50 for e in ('UTF-8', _fallbackencoding):
51 51 try:
52 52 u = s.decode(e) # attempt strict decoding
53 53 return u.encode(_encoding, "replace")
54 54 except LookupError, k:
55 55 raise Abort(_("%s, please check your locale settings") % k)
56 56 except UnicodeDecodeError:
57 57 pass
58 58 u = s.decode("utf-8", "replace") # last ditch
59 59 return u.encode(_encoding, "replace")
60 60
61 61 def fromlocal(s):
62 62 """
63 63 Convert a string from the local character encoding to UTF-8
64 64
65 65 We attempt to decode strings using the encoding mode set by
66 66 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
67 67 characters will cause an error message. Other modes include
68 68 'replace', which replaces unknown characters with a special
69 69 Unicode character, and 'ignore', which drops the character.
70 70 """
71 71 try:
72 72 return s.decode(_encoding, _encodingmode).encode("utf-8")
73 73 except UnicodeDecodeError, inst:
74 74 sub = s[max(0, inst.start-10):inst.start+10]
75 75 raise Abort("decoding near '%s': %s!" % (sub, inst))
76 76 except LookupError, k:
77 77 raise Abort(_("%s, please check your locale settings") % k)
78 78
79 79 def locallen(s):
80 80 """Find the length in characters of a local string"""
81 81 return len(s.decode(_encoding, "replace"))
82 82
83 83 def localsub(s, a, b=None):
84 84 try:
85 85 u = s.decode(_encoding, _encodingmode)
86 86 if b is not None:
87 87 u = u[a:b]
88 88 else:
89 89 u = u[:a]
90 90 return u.encode(_encoding, _encodingmode)
91 91 except UnicodeDecodeError, inst:
92 92 sub = s[max(0, inst.start-10), inst.start+10]
93 93 raise Abort(_("decoding near '%s': %s!") % (sub, inst))
94 94
95 95 # used by parsedate
96 96 defaultdateformats = (
97 97 '%Y-%m-%d %H:%M:%S',
98 98 '%Y-%m-%d %I:%M:%S%p',
99 99 '%Y-%m-%d %H:%M',
100 100 '%Y-%m-%d %I:%M%p',
101 101 '%Y-%m-%d',
102 102 '%m-%d',
103 103 '%m/%d',
104 104 '%m/%d/%y',
105 105 '%m/%d/%Y',
106 106 '%a %b %d %H:%M:%S %Y',
107 107 '%a %b %d %I:%M:%S%p %Y',
108 108 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
109 109 '%b %d %H:%M:%S %Y',
110 110 '%b %d %I:%M:%S%p %Y',
111 111 '%b %d %H:%M:%S',
112 112 '%b %d %I:%M:%S%p',
113 113 '%b %d %H:%M',
114 114 '%b %d %I:%M%p',
115 115 '%b %d %Y',
116 116 '%b %d',
117 117 '%H:%M:%S',
118 118 '%I:%M:%SP',
119 119 '%H:%M',
120 120 '%I:%M%p',
121 121 )
122 122
123 123 extendeddateformats = defaultdateformats + (
124 124 "%Y",
125 125 "%Y-%m",
126 126 "%b",
127 127 "%b %Y",
128 128 )
129 129
130 130 class SignalInterrupt(Exception):
131 131 """Exception raised on SIGTERM and SIGHUP."""
132 132
133 133 # differences from SafeConfigParser:
134 134 # - case-sensitive keys
135 135 # - allows values that are not strings (this means that you may not
136 136 # be able to save the configuration to a file)
137 137 class configparser(ConfigParser.SafeConfigParser):
138 138 def optionxform(self, optionstr):
139 139 return optionstr
140 140
141 141 def set(self, section, option, value):
142 142 return ConfigParser.ConfigParser.set(self, section, option, value)
143 143
144 144 def _interpolate(self, section, option, rawval, vars):
145 145 if not isinstance(rawval, basestring):
146 146 return rawval
147 147 return ConfigParser.SafeConfigParser._interpolate(self, section,
148 148 option, rawval, vars)
149 149
150 150 def cachefunc(func):
151 151 '''cache the result of function calls'''
152 152 # XXX doesn't handle keywords args
153 153 cache = {}
154 154 if func.func_code.co_argcount == 1:
155 155 # we gain a small amount of time because
156 156 # we don't need to pack/unpack the list
157 157 def f(arg):
158 158 if arg not in cache:
159 159 cache[arg] = func(arg)
160 160 return cache[arg]
161 161 else:
162 162 def f(*args):
163 163 if args not in cache:
164 164 cache[args] = func(*args)
165 165 return cache[args]
166 166
167 167 return f
168 168
169 169 def pipefilter(s, cmd):
170 170 '''filter string S through command CMD, returning its output'''
171 171 (pin, pout) = os.popen2(cmd, 'b')
172 172 def writer():
173 173 try:
174 174 pin.write(s)
175 175 pin.close()
176 176 except IOError, inst:
177 177 if inst.errno != errno.EPIPE:
178 178 raise
179 179
180 180 # we should use select instead on UNIX, but this will work on most
181 181 # systems, including Windows
182 182 w = threading.Thread(target=writer)
183 183 w.start()
184 184 f = pout.read()
185 185 pout.close()
186 186 w.join()
187 187 return f
188 188
189 189 def tempfilter(s, cmd):
190 190 '''filter string S through a pair of temporary files with CMD.
191 191 CMD is used as a template to create the real command to be run,
192 192 with the strings INFILE and OUTFILE replaced by the real names of
193 193 the temporary files generated.'''
194 194 inname, outname = None, None
195 195 try:
196 196 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
197 197 fp = os.fdopen(infd, 'wb')
198 198 fp.write(s)
199 199 fp.close()
200 200 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
201 201 os.close(outfd)
202 202 cmd = cmd.replace('INFILE', inname)
203 203 cmd = cmd.replace('OUTFILE', outname)
204 204 code = os.system(cmd)
205 205 if sys.platform == 'OpenVMS' and code & 1:
206 206 code = 0
207 207 if code: raise Abort(_("command '%s' failed: %s") %
208 208 (cmd, explain_exit(code)))
209 209 return open(outname, 'rb').read()
210 210 finally:
211 211 try:
212 212 if inname: os.unlink(inname)
213 213 except: pass
214 214 try:
215 215 if outname: os.unlink(outname)
216 216 except: pass
217 217
218 218 filtertable = {
219 219 'tempfile:': tempfilter,
220 220 'pipe:': pipefilter,
221 221 }
222 222
223 223 def filter(s, cmd):
224 224 "filter a string through a command that transforms its input to its output"
225 225 for name, fn in filtertable.iteritems():
226 226 if cmd.startswith(name):
227 227 return fn(s, cmd[len(name):].lstrip())
228 228 return pipefilter(s, cmd)
229 229
230 230 def binary(s):
231 231 """return true if a string is binary data using diff's heuristic"""
232 232 if s and '\0' in s[:4096]:
233 233 return True
234 234 return False
235 235
236 236 def unique(g):
237 237 """return the uniq elements of iterable g"""
238 238 seen = {}
239 239 l = []
240 240 for f in g:
241 241 if f not in seen:
242 242 seen[f] = 1
243 243 l.append(f)
244 244 return l
245 245
246 246 class Abort(Exception):
247 247 """Raised if a command needs to print an error and exit."""
248 248
249 249 class UnexpectedOutput(Abort):
250 250 """Raised to print an error with part of output and exit."""
251 251
252 252 def always(fn): return True
253 253 def never(fn): return False
254 254
255 255 def expand_glob(pats):
256 256 '''On Windows, expand the implicit globs in a list of patterns'''
257 257 if os.name != 'nt':
258 258 return list(pats)
259 259 ret = []
260 260 for p in pats:
261 261 kind, name = patkind(p, None)
262 262 if kind is None:
263 263 globbed = glob.glob(name)
264 264 if globbed:
265 265 ret.extend(globbed)
266 266 continue
267 267 # if we couldn't expand the glob, just keep it around
268 268 ret.append(p)
269 269 return ret
270 270
271 271 def patkind(name, dflt_pat='glob'):
272 272 """Split a string into an optional pattern kind prefix and the
273 273 actual pattern."""
274 274 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
275 275 if name.startswith(prefix + ':'): return name.split(':', 1)
276 276 return dflt_pat, name
277 277
278 278 def globre(pat, head='^', tail='$'):
279 279 "convert a glob pattern into a regexp"
280 280 i, n = 0, len(pat)
281 281 res = ''
282 282 group = False
283 283 def peek(): return i < n and pat[i]
284 284 while i < n:
285 285 c = pat[i]
286 286 i = i+1
287 287 if c == '*':
288 288 if peek() == '*':
289 289 i += 1
290 290 res += '.*'
291 291 else:
292 292 res += '[^/]*'
293 293 elif c == '?':
294 294 res += '.'
295 295 elif c == '[':
296 296 j = i
297 297 if j < n and pat[j] in '!]':
298 298 j += 1
299 299 while j < n and pat[j] != ']':
300 300 j += 1
301 301 if j >= n:
302 302 res += '\\['
303 303 else:
304 304 stuff = pat[i:j].replace('\\','\\\\')
305 305 i = j + 1
306 306 if stuff[0] == '!':
307 307 stuff = '^' + stuff[1:]
308 308 elif stuff[0] == '^':
309 309 stuff = '\\' + stuff
310 310 res = '%s[%s]' % (res, stuff)
311 311 elif c == '{':
312 312 group = True
313 313 res += '(?:'
314 314 elif c == '}' and group:
315 315 res += ')'
316 316 group = False
317 317 elif c == ',' and group:
318 318 res += '|'
319 319 elif c == '\\':
320 320 p = peek()
321 321 if p:
322 322 i += 1
323 323 res += re.escape(p)
324 324 else:
325 325 res += re.escape(c)
326 326 else:
327 327 res += re.escape(c)
328 328 return head + res + tail
329 329
330 330 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
331 331
332 332 def pathto(root, n1, n2):
333 333 '''return the relative path from one place to another.
334 334 root should use os.sep to separate directories
335 335 n1 should use os.sep to separate directories
336 336 n2 should use "/" to separate directories
337 337 returns an os.sep-separated path.
338 338
339 339 If n1 is a relative path, it's assumed it's
340 340 relative to root.
341 341 n2 should always be relative to root.
342 342 '''
343 343 if not n1: return localpath(n2)
344 344 if os.path.isabs(n1):
345 345 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
346 346 return os.path.join(root, localpath(n2))
347 347 n2 = '/'.join((pconvert(root), n2))
348 348 a, b = n1.split(os.sep), n2.split('/')
349 349 a.reverse()
350 350 b.reverse()
351 351 while a and b and a[-1] == b[-1]:
352 352 a.pop()
353 353 b.pop()
354 354 b.reverse()
355 355 return os.sep.join((['..'] * len(a)) + b)
356 356
357 357 def canonpath(root, cwd, myname):
358 358 """return the canonical path of myname, given cwd and root"""
359 359 if root == os.sep:
360 360 rootsep = os.sep
361 361 elif root.endswith(os.sep):
362 362 rootsep = root
363 363 else:
364 364 rootsep = root + os.sep
365 365 name = myname
366 366 if not os.path.isabs(name):
367 367 name = os.path.join(root, cwd, name)
368 368 name = os.path.normpath(name)
369 369 if name != rootsep and name.startswith(rootsep):
370 370 name = name[len(rootsep):]
371 371 audit_path(name)
372 372 return pconvert(name)
373 373 elif name == root:
374 374 return ''
375 375 else:
376 376 # Determine whether `name' is in the hierarchy at or beneath `root',
377 377 # by iterating name=dirname(name) until that causes no change (can't
378 378 # check name == '/', because that doesn't work on windows). For each
379 379 # `name', compare dev/inode numbers. If they match, the list `rel'
380 380 # holds the reversed list of components making up the relative file
381 381 # name we want.
382 382 root_st = os.stat(root)
383 383 rel = []
384 384 while True:
385 385 try:
386 386 name_st = os.stat(name)
387 387 except OSError:
388 388 break
389 389 if samestat(name_st, root_st):
390 390 if not rel:
391 391 # name was actually the same as root (maybe a symlink)
392 392 return ''
393 393 rel.reverse()
394 394 name = os.path.join(*rel)
395 395 audit_path(name)
396 396 return pconvert(name)
397 397 dirname, basename = os.path.split(name)
398 398 rel.append(basename)
399 399 if dirname == name:
400 400 break
401 401 name = dirname
402 402
403 403 raise Abort('%s not under root' % myname)
404 404
405 405 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
406 406 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
407 407
408 408 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
409 409 globbed=False, default=None):
410 410 default = default or 'relpath'
411 411 if default == 'relpath' and not globbed:
412 412 names = expand_glob(names)
413 413 return _matcher(canonroot, cwd, names, inc, exc, default, src)
414 414
415 415 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
416 416 """build a function to match a set of file patterns
417 417
418 418 arguments:
419 419 canonroot - the canonical root of the tree you're matching against
420 420 cwd - the current working directory, if relevant
421 421 names - patterns to find
422 422 inc - patterns to include
423 423 exc - patterns to exclude
424 424 dflt_pat - if a pattern in names has no explicit type, assume this one
425 425 src - where these patterns came from (e.g. .hgignore)
426 426
427 427 a pattern is one of:
428 428 'glob:<glob>' - a glob relative to cwd
429 429 're:<regexp>' - a regular expression
430 430 'path:<path>' - a path relative to canonroot
431 431 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
432 432 'relpath:<path>' - a path relative to cwd
433 433 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
434 434 '<something>' - one of the cases above, selected by the dflt_pat argument
435 435
436 436 returns:
437 437 a 3-tuple containing
438 438 - list of roots (places where one should start a recursive walk of the fs);
439 439 this often matches the explicit non-pattern names passed in, but also
440 440 includes the initial part of glob: patterns that has no glob characters
441 441 - a bool match(filename) function
442 442 - a bool indicating if any patterns were passed in
443 443 """
444 444
445 445 # a common case: no patterns at all
446 446 if not names and not inc and not exc:
447 447 return [], always, False
448 448
449 449 def contains_glob(name):
450 450 for c in name:
451 451 if c in _globchars: return True
452 452 return False
453 453
454 454 def regex(kind, name, tail):
455 455 '''convert a pattern into a regular expression'''
456 456 if not name:
457 457 return ''
458 458 if kind == 're':
459 459 return name
460 460 elif kind == 'path':
461 461 return '^' + re.escape(name) + '(?:/|$)'
462 462 elif kind == 'relglob':
463 463 return globre(name, '(?:|.*/)', tail)
464 464 elif kind == 'relpath':
465 465 return re.escape(name) + '(?:/|$)'
466 466 elif kind == 'relre':
467 467 if name.startswith('^'):
468 468 return name
469 469 return '.*' + name
470 470 return globre(name, '', tail)
471 471
472 472 def matchfn(pats, tail):
473 473 """build a matching function from a set of patterns"""
474 474 if not pats:
475 475 return
476 476 try:
477 477 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
478 478 return re.compile(pat).match
479 479 except re.error:
480 480 for k, p in pats:
481 481 try:
482 482 re.compile('(?:%s)' % regex(k, p, tail))
483 483 except re.error:
484 484 if src:
485 485 raise Abort("%s: invalid pattern (%s): %s" %
486 486 (src, k, p))
487 487 else:
488 488 raise Abort("invalid pattern (%s): %s" % (k, p))
489 489 raise Abort("invalid pattern")
490 490
491 491 def globprefix(pat):
492 492 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
493 493 root = []
494 494 for p in pat.split('/'):
495 495 if contains_glob(p): break
496 496 root.append(p)
497 497 return '/'.join(root) or '.'
498 498
499 499 def normalizepats(names, default):
500 500 pats = []
501 501 roots = []
502 502 anypats = False
503 503 for kind, name in [patkind(p, default) for p in names]:
504 504 if kind in ('glob', 'relpath'):
505 505 name = canonpath(canonroot, cwd, name)
506 506 elif kind in ('relglob', 'path'):
507 507 name = normpath(name)
508 508
509 509 pats.append((kind, name))
510 510
511 511 if kind in ('glob', 're', 'relglob', 'relre'):
512 512 anypats = True
513 513
514 514 if kind == 'glob':
515 515 root = globprefix(name)
516 516 roots.append(root)
517 517 elif kind in ('relpath', 'path'):
518 518 roots.append(name or '.')
519 519 elif kind == 'relglob':
520 520 roots.append('.')
521 521 return roots, pats, anypats
522 522
523 523 roots, pats, anypats = normalizepats(names, dflt_pat)
524 524
525 525 patmatch = matchfn(pats, '$') or always
526 526 incmatch = always
527 527 if inc:
528 528 dummy, inckinds, dummy = normalizepats(inc, 'glob')
529 529 incmatch = matchfn(inckinds, '(?:/|$)')
530 530 excmatch = lambda fn: False
531 531 if exc:
532 532 dummy, exckinds, dummy = normalizepats(exc, 'glob')
533 533 excmatch = matchfn(exckinds, '(?:/|$)')
534 534
535 535 if not names and inc and not exc:
536 536 # common case: hgignore patterns
537 537 match = incmatch
538 538 else:
539 539 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
540 540
541 541 return (roots, match, (inc or exc or anypats) and True)
542 542
543 543 _hgexecutable = 'hg'
544 544
545 545 def set_hgexecutable(path):
546 546 """remember location of the 'hg' executable if easily possible
547 547
548 548 path might be None or empty if hg was loaded as a module,
549 549 fall back to 'hg' in this case.
550 550 """
551 551 global _hgexecutable
552 552 if path:
553 553 _hgexecutable = os.path.abspath(path)
554 554
555 555 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
556 556 '''enhanced shell command execution.
557 557 run with environment maybe modified, maybe in different dir.
558 558
559 559 if command fails and onerr is None, return status. if ui object,
560 560 print error message and return status, else raise onerr object as
561 561 exception.'''
562 562 def py2shell(val):
563 563 'convert python object into string that is useful to shell'
564 564 if val in (None, False):
565 565 return '0'
566 566 if val == True:
567 567 return '1'
568 568 return str(val)
569 569 oldenv = {}
570 570 for k in environ:
571 571 oldenv[k] = os.environ.get(k)
572 572 if cwd is not None:
573 573 oldcwd = os.getcwd()
574 574 origcmd = cmd
575 575 if os.name == 'nt':
576 576 cmd = '"%s"' % cmd
577 577 try:
578 578 for k, v in environ.iteritems():
579 579 os.environ[k] = py2shell(v)
580 580 if 'HG' not in os.environ:
581 581 os.environ['HG'] = _hgexecutable
582 582 if cwd is not None and oldcwd != cwd:
583 583 os.chdir(cwd)
584 584 rc = os.system(cmd)
585 585 if sys.platform == 'OpenVMS' and rc & 1:
586 586 rc = 0
587 587 if rc and onerr:
588 588 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
589 589 explain_exit(rc)[0])
590 590 if errprefix:
591 591 errmsg = '%s: %s' % (errprefix, errmsg)
592 592 try:
593 593 onerr.warn(errmsg + '\n')
594 594 except AttributeError:
595 595 raise onerr(errmsg)
596 596 return rc
597 597 finally:
598 598 for k, v in oldenv.iteritems():
599 599 if v is None:
600 600 del os.environ[k]
601 601 else:
602 602 os.environ[k] = v
603 603 if cwd is not None and oldcwd != cwd:
604 604 os.chdir(oldcwd)
605 605
606 606 # os.path.lexists is not available on python2.3
607 607 def lexists(filename):
608 608 "test whether a file with this name exists. does not follow symlinks"
609 609 try:
610 610 os.lstat(filename)
611 611 except:
612 612 return False
613 613 return True
614 614
615 615 def rename(src, dst):
616 616 """forcibly rename a file"""
617 617 try:
618 618 os.rename(src, dst)
619 except OSError, err:
619 except OSError, err: # FIXME: check err (EEXIST ?)
620 620 # on windows, rename to existing file is not allowed, so we
621 621 # must delete destination first. but if file is open, unlink
622 622 # schedules it for delete but does not delete it. rename
623 623 # happens immediately even for open files, so we create
624 624 # temporary file, delete it, rename destination to that name,
625 625 # then delete that. then rename is safe to do.
626 626 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
627 627 os.close(fd)
628 628 os.unlink(temp)
629 629 os.rename(dst, temp)
630 630 os.unlink(temp)
631 631 os.rename(src, dst)
632 632
633 633 def unlink(f):
634 634 """unlink and remove the directory if it is empty"""
635 635 os.unlink(f)
636 636 # try removing directories that might now be empty
637 637 try:
638 638 os.removedirs(os.path.dirname(f))
639 639 except OSError:
640 640 pass
641 641
642 642 def copyfile(src, dest):
643 643 "copy a file, preserving mode"
644 644 if os.path.islink(src):
645 645 try:
646 646 os.unlink(dest)
647 647 except:
648 648 pass
649 649 os.symlink(os.readlink(src), dest)
650 650 else:
651 651 try:
652 652 shutil.copyfile(src, dest)
653 653 shutil.copymode(src, dest)
654 654 except shutil.Error, inst:
655 655 raise Abort(str(inst))
656 656
657 657 def copyfiles(src, dst, hardlink=None):
658 658 """Copy a directory tree using hardlinks if possible"""
659 659
660 660 if hardlink is None:
661 661 hardlink = (os.stat(src).st_dev ==
662 662 os.stat(os.path.dirname(dst)).st_dev)
663 663
664 664 if os.path.isdir(src):
665 665 os.mkdir(dst)
666 666 for name in os.listdir(src):
667 667 srcname = os.path.join(src, name)
668 668 dstname = os.path.join(dst, name)
669 669 copyfiles(srcname, dstname, hardlink)
670 670 else:
671 671 if hardlink:
672 672 try:
673 673 os_link(src, dst)
674 674 except (IOError, OSError):
675 675 hardlink = False
676 676 shutil.copy(src, dst)
677 677 else:
678 678 shutil.copy(src, dst)
679 679
680 680 def audit_path(path):
681 681 """Abort if path contains dangerous components"""
682 682 parts = os.path.normcase(path).split(os.sep)
683 683 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
684 684 or os.pardir in parts):
685 685 raise Abort(_("path contains illegal component: %s") % path)
686 686
687 687 def _makelock_file(info, pathname):
688 688 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
689 689 os.write(ld, info)
690 690 os.close(ld)
691 691
692 692 def _readlock_file(pathname):
693 693 return posixfile(pathname).read()
694 694
695 695 def nlinks(pathname):
696 696 """Return number of hardlinks for the given file."""
697 697 return os.lstat(pathname).st_nlink
698 698
699 699 if hasattr(os, 'link'):
700 700 os_link = os.link
701 701 else:
702 702 def os_link(src, dst):
703 703 raise OSError(0, _("Hardlinks not supported"))
704 704
705 705 def fstat(fp):
706 706 '''stat file object that may not have fileno method.'''
707 707 try:
708 708 return os.fstat(fp.fileno())
709 709 except AttributeError:
710 710 return os.stat(fp.name)
711 711
712 712 posixfile = file
713 713
714 714 def is_win_9x():
715 715 '''return true if run on windows 95, 98 or me.'''
716 716 try:
717 717 return sys.getwindowsversion()[3] == 1
718 718 except AttributeError:
719 719 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
720 720
721 721 getuser_fallback = None
722 722
723 723 def getuser():
724 724 '''return name of current user'''
725 725 try:
726 726 return getpass.getuser()
727 727 except ImportError:
728 728 # import of pwd will fail on windows - try fallback
729 729 if getuser_fallback:
730 730 return getuser_fallback()
731 731 # raised if win32api not available
732 732 raise Abort(_('user name not available - set USERNAME '
733 733 'environment variable'))
734 734
735 735 def username(uid=None):
736 736 """Return the name of the user with the given uid.
737 737
738 738 If uid is None, return the name of the current user."""
739 739 try:
740 740 import pwd
741 741 if uid is None:
742 742 uid = os.getuid()
743 743 try:
744 744 return pwd.getpwuid(uid)[0]
745 745 except KeyError:
746 746 return str(uid)
747 747 except ImportError:
748 748 return None
749 749
750 750 def groupname(gid=None):
751 751 """Return the name of the group with the given gid.
752 752
753 753 If gid is None, return the name of the current group."""
754 754 try:
755 755 import grp
756 756 if gid is None:
757 757 gid = os.getgid()
758 758 try:
759 759 return grp.getgrgid(gid)[0]
760 760 except KeyError:
761 761 return str(gid)
762 762 except ImportError:
763 763 return None
764 764
765 765 # File system features
766 766
767 767 def checkfolding(path):
768 768 """
769 769 Check whether the given path is on a case-sensitive filesystem
770 770
771 771 Requires a path (like /foo/.hg) ending with a foldable final
772 772 directory component.
773 773 """
774 774 s1 = os.stat(path)
775 775 d, b = os.path.split(path)
776 776 p2 = os.path.join(d, b.upper())
777 777 if path == p2:
778 778 p2 = os.path.join(d, b.lower())
779 779 try:
780 780 s2 = os.stat(p2)
781 781 if s2 == s1:
782 782 return False
783 783 return True
784 784 except:
785 785 return True
786 786
787 787 def checkexec(path):
788 788 """
789 789 Check whether the given path is on a filesystem with UNIX-like exec flags
790 790
791 791 Requires a directory (like /foo/.hg)
792 792 """
793 793 fh, fn = tempfile.mkstemp("", "", path)
794 794 os.close(fh)
795 795 m = os.stat(fn).st_mode
796 796 os.chmod(fn, m ^ 0111)
797 797 r = (os.stat(fn).st_mode != m)
798 798 os.unlink(fn)
799 799 return r
800 800
801 801 def execfunc(path, fallback):
802 802 '''return an is_exec() function with default to fallback'''
803 803 if checkexec(path):
804 804 return lambda x: is_exec(os.path.join(path, x))
805 805 return fallback
806 806
807 807 def checklink(path):
808 808 """check whether the given path is on a symlink-capable filesystem"""
809 809 # mktemp is not racy because symlink creation will fail if the
810 810 # file already exists
811 811 name = tempfile.mktemp(dir=path)
812 812 try:
813 813 os.symlink(".", name)
814 814 os.unlink(name)
815 815 return True
816 816 except (OSError, AttributeError):
817 817 return False
818 818
819 819 def linkfunc(path, fallback):
820 820 '''return an is_link() function with default to fallback'''
821 821 if checklink(path):
822 822 return lambda x: os.path.islink(os.path.join(path, x))
823 823 return fallback
824 824
825 825 _umask = os.umask(0)
826 826 os.umask(_umask)
827 827
828 828 def needbinarypatch():
829 829 """return True if patches should be applied in binary mode by default."""
830 830 return os.name == 'nt'
831 831
832 832 # Platform specific variants
833 833 if os.name == 'nt':
834 834 import msvcrt
835 835 nulldev = 'NUL:'
836 836
837 837 class winstdout:
838 838 '''stdout on windows misbehaves if sent through a pipe'''
839 839
840 840 def __init__(self, fp):
841 841 self.fp = fp
842 842
843 843 def __getattr__(self, key):
844 844 return getattr(self.fp, key)
845 845
846 846 def close(self):
847 847 try:
848 848 self.fp.close()
849 849 except: pass
850 850
851 851 def write(self, s):
852 852 try:
853 853 return self.fp.write(s)
854 854 except IOError, inst:
855 855 if inst.errno != 0: raise
856 856 self.close()
857 857 raise IOError(errno.EPIPE, 'Broken pipe')
858 858
859 859 def flush(self):
860 860 try:
861 861 return self.fp.flush()
862 862 except IOError, inst:
863 863 if inst.errno != errno.EINVAL: raise
864 864 self.close()
865 865 raise IOError(errno.EPIPE, 'Broken pipe')
866 866
867 867 sys.stdout = winstdout(sys.stdout)
868 868
869 869 def system_rcpath():
870 870 try:
871 871 return system_rcpath_win32()
872 872 except:
873 873 return [r'c:\mercurial\mercurial.ini']
874 874
875 875 def user_rcpath():
876 876 '''return os-specific hgrc search path to the user dir'''
877 877 try:
878 878 userrc = user_rcpath_win32()
879 879 except:
880 880 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
881 881 path = [userrc]
882 882 userprofile = os.environ.get('USERPROFILE')
883 883 if userprofile:
884 884 path.append(os.path.join(userprofile, 'mercurial.ini'))
885 885 return path
886 886
887 887 def parse_patch_output(output_line):
888 888 """parses the output produced by patch and returns the file name"""
889 889 pf = output_line[14:]
890 890 if pf[0] == '`':
891 891 pf = pf[1:-1] # Remove the quotes
892 892 return pf
893 893
894 894 def testpid(pid):
895 895 '''return False if pid dead, True if running or not known'''
896 896 return True
897 897
898 898 def set_exec(f, mode):
899 899 pass
900 900
901 901 def set_link(f, mode):
902 902 pass
903 903
904 904 def set_binary(fd):
905 905 msvcrt.setmode(fd.fileno(), os.O_BINARY)
906 906
907 907 def pconvert(path):
908 908 return path.replace("\\", "/")
909 909
910 910 def localpath(path):
911 911 return path.replace('/', '\\')
912 912
913 913 def normpath(path):
914 914 return pconvert(os.path.normpath(path))
915 915
916 916 makelock = _makelock_file
917 917 readlock = _readlock_file
918 918
919 919 def samestat(s1, s2):
920 920 return False
921 921
922 922 # A sequence of backslashes is special iff it precedes a double quote:
923 923 # - if there's an even number of backslashes, the double quote is not
924 924 # quoted (i.e. it ends the quoted region)
925 925 # - if there's an odd number of backslashes, the double quote is quoted
926 926 # - in both cases, every pair of backslashes is unquoted into a single
927 927 # backslash
928 928 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
929 929 # So, to quote a string, we must surround it in double quotes, double
930 930 # the number of backslashes that preceed double quotes and add another
931 931 # backslash before every double quote (being careful with the double
932 932 # quote we've appended to the end)
933 933 _quotere = None
934 934 def shellquote(s):
935 935 global _quotere
936 936 if _quotere is None:
937 937 _quotere = re.compile(r'(\\*)("|\\$)')
938 938 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
939 939
940 940 def explain_exit(code):
941 941 return _("exited with status %d") % code, code
942 942
943 943 # if you change this stub into a real check, please try to implement the
944 944 # username and groupname functions above, too.
945 945 def isowner(fp, st=None):
946 946 return True
947 947
948 948 def find_in_path(name, path, default=None):
949 949 '''find name in search path. path can be string (will be split
950 950 with os.pathsep), or iterable thing that returns strings. if name
951 951 found, return path to name. else return default. name is looked up
952 952 using cmd.exe rules, using PATHEXT.'''
953 953 if isinstance(path, str):
954 954 path = path.split(os.pathsep)
955 955
956 956 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
957 957 pathext = pathext.lower().split(os.pathsep)
958 958 isexec = os.path.splitext(name)[1].lower() in pathext
959 959
960 960 for p in path:
961 961 p_name = os.path.join(p, name)
962 962
963 963 if isexec and os.path.exists(p_name):
964 964 return p_name
965 965
966 966 for ext in pathext:
967 967 p_name_ext = p_name + ext
968 968 if os.path.exists(p_name_ext):
969 969 return p_name_ext
970 970 return default
971 971
972 972 def set_signal_handler():
973 973 try:
974 974 set_signal_handler_win32()
975 975 except NameError:
976 976 pass
977 977
978 978 try:
979 979 # override functions with win32 versions if possible
980 980 from util_win32 import *
981 981 if not is_win_9x():
982 982 posixfile = posixfile_nt
983 983 except ImportError:
984 984 pass
985 985
986 986 else:
987 987 nulldev = '/dev/null'
988 988
989 989 def rcfiles(path):
990 990 rcs = [os.path.join(path, 'hgrc')]
991 991 rcdir = os.path.join(path, 'hgrc.d')
992 992 try:
993 993 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
994 994 if f.endswith(".rc")])
995 995 except OSError:
996 996 pass
997 997 return rcs
998 998
999 999 def system_rcpath():
1000 1000 path = []
1001 1001 # old mod_python does not set sys.argv
1002 1002 if len(getattr(sys, 'argv', [])) > 0:
1003 1003 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1004 1004 '/../etc/mercurial'))
1005 1005 path.extend(rcfiles('/etc/mercurial'))
1006 1006 return path
1007 1007
1008 1008 def user_rcpath():
1009 1009 return [os.path.expanduser('~/.hgrc')]
1010 1010
1011 1011 def parse_patch_output(output_line):
1012 1012 """parses the output produced by patch and returns the file name"""
1013 1013 pf = output_line[14:]
1014 1014 if os.sys.platform == 'OpenVMS':
1015 1015 if pf[0] == '`':
1016 1016 pf = pf[1:-1] # Remove the quotes
1017 1017 else:
1018 1018 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1019 1019 pf = pf[1:-1] # Remove the quotes
1020 1020 return pf
1021 1021
1022 1022 def is_exec(f):
1023 1023 """check whether a file is executable"""
1024 1024 return (os.lstat(f).st_mode & 0100 != 0)
1025 1025
1026 1026 def set_exec(f, mode):
1027 1027 s = os.lstat(f).st_mode
1028 1028 if (s & 0100 != 0) == mode:
1029 1029 return
1030 1030 if mode:
1031 1031 # Turn on +x for every +r bit when making a file executable
1032 1032 # and obey umask.
1033 1033 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1034 1034 else:
1035 1035 os.chmod(f, s & 0666)
1036 1036
1037 1037 def set_link(f, mode):
1038 1038 """make a file a symbolic link/regular file
1039 1039
1040 1040 if a file is changed to a link, its contents become the link data
1041 1041 if a link is changed to a file, its link data become its contents
1042 1042 """
1043 1043
1044 1044 m = os.path.islink(f)
1045 1045 if m == bool(mode):
1046 1046 return
1047 1047
1048 1048 if mode: # switch file to link
1049 1049 data = file(f).read()
1050 1050 os.unlink(f)
1051 1051 os.symlink(data, f)
1052 1052 else:
1053 1053 data = os.readlink(f)
1054 1054 os.unlink(f)
1055 1055 file(f, "w").write(data)
1056 1056
1057 1057 def set_binary(fd):
1058 1058 pass
1059 1059
1060 1060 def pconvert(path):
1061 1061 return path
1062 1062
1063 1063 def localpath(path):
1064 1064 return path
1065 1065
1066 1066 normpath = os.path.normpath
1067 1067 samestat = os.path.samestat
1068 1068
1069 1069 def makelock(info, pathname):
1070 1070 try:
1071 1071 os.symlink(info, pathname)
1072 1072 except OSError, why:
1073 1073 if why.errno == errno.EEXIST:
1074 1074 raise
1075 1075 else:
1076 1076 _makelock_file(info, pathname)
1077 1077
1078 1078 def readlock(pathname):
1079 1079 try:
1080 1080 return os.readlink(pathname)
1081 1081 except OSError, why:
1082 1082 if why.errno in (errno.EINVAL, errno.ENOSYS):
1083 1083 return _readlock_file(pathname)
1084 1084 else:
1085 1085 raise
1086 1086
1087 1087 def shellquote(s):
1088 1088 if os.sys.platform == 'OpenVMS':
1089 1089 return '"%s"' % s
1090 1090 else:
1091 1091 return "'%s'" % s.replace("'", "'\\''")
1092 1092
1093 1093 def testpid(pid):
1094 1094 '''return False if pid dead, True if running or not sure'''
1095 1095 if os.sys.platform == 'OpenVMS':
1096 1096 return True
1097 1097 try:
1098 1098 os.kill(pid, 0)
1099 1099 return True
1100 1100 except OSError, inst:
1101 1101 return inst.errno != errno.ESRCH
1102 1102
1103 1103 def explain_exit(code):
1104 1104 """return a 2-tuple (desc, code) describing a process's status"""
1105 1105 if os.WIFEXITED(code):
1106 1106 val = os.WEXITSTATUS(code)
1107 1107 return _("exited with status %d") % val, val
1108 1108 elif os.WIFSIGNALED(code):
1109 1109 val = os.WTERMSIG(code)
1110 1110 return _("killed by signal %d") % val, val
1111 1111 elif os.WIFSTOPPED(code):
1112 1112 val = os.WSTOPSIG(code)
1113 1113 return _("stopped by signal %d") % val, val
1114 1114 raise ValueError(_("invalid exit code"))
1115 1115
1116 1116 def isowner(fp, st=None):
1117 1117 """Return True if the file object f belongs to the current user.
1118 1118
1119 1119 The return value of a util.fstat(f) may be passed as the st argument.
1120 1120 """
1121 1121 if st is None:
1122 1122 st = fstat(fp)
1123 1123 return st.st_uid == os.getuid()
1124 1124
1125 1125 def find_in_path(name, path, default=None):
1126 1126 '''find name in search path. path can be string (will be split
1127 1127 with os.pathsep), or iterable thing that returns strings. if name
1128 1128 found, return path to name. else return default.'''
1129 1129 if isinstance(path, str):
1130 1130 path = path.split(os.pathsep)
1131 1131 for p in path:
1132 1132 p_name = os.path.join(p, name)
1133 1133 if os.path.exists(p_name):
1134 1134 return p_name
1135 1135 return default
1136 1136
1137 1137 def set_signal_handler():
1138 1138 pass
1139 1139
1140 1140 def find_exe(name, default=None):
1141 1141 '''find path of an executable.
1142 1142 if name contains a path component, return it as is. otherwise,
1143 1143 use normal executable search path.'''
1144 1144
1145 1145 if os.sep in name or sys.platform == 'OpenVMS':
1146 1146 # don't check the executable bit. if the file isn't
1147 1147 # executable, whoever tries to actually run it will give a
1148 1148 # much more useful error message.
1149 1149 return name
1150 1150 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1151 1151
1152 1152 def _buildencodefun():
1153 1153 e = '_'
1154 1154 win_reserved = [ord(x) for x in '\\:*?"<>|']
1155 1155 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1156 1156 for x in (range(32) + range(126, 256) + win_reserved):
1157 1157 cmap[chr(x)] = "~%02x" % x
1158 1158 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1159 1159 cmap[chr(x)] = e + chr(x).lower()
1160 1160 dmap = {}
1161 1161 for k, v in cmap.iteritems():
1162 1162 dmap[v] = k
1163 1163 def decode(s):
1164 1164 i = 0
1165 1165 while i < len(s):
1166 1166 for l in xrange(1, 4):
1167 1167 try:
1168 1168 yield dmap[s[i:i+l]]
1169 1169 i += l
1170 1170 break
1171 1171 except KeyError:
1172 1172 pass
1173 1173 else:
1174 1174 raise KeyError
1175 1175 return (lambda s: "".join([cmap[c] for c in s]),
1176 1176 lambda s: "".join(list(decode(s))))
1177 1177
1178 1178 encodefilename, decodefilename = _buildencodefun()
1179 1179
1180 1180 def encodedopener(openerfn, fn):
1181 1181 def o(path, *args, **kw):
1182 1182 return openerfn(fn(path), *args, **kw)
1183 1183 return o
1184 1184
1185 1185 def mktempcopy(name, emptyok=False):
1186 1186 """Create a temporary file with the same contents from name
1187 1187
1188 1188 The permission bits are copied from the original file.
1189 1189
1190 1190 If the temporary file is going to be truncated immediately, you
1191 1191 can use emptyok=True as an optimization.
1192 1192
1193 1193 Returns the name of the temporary file.
1194 1194 """
1195 1195 d, fn = os.path.split(name)
1196 1196 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1197 1197 os.close(fd)
1198 1198 # Temporary files are created with mode 0600, which is usually not
1199 1199 # what we want. If the original file already exists, just copy
1200 1200 # its mode. Otherwise, manually obey umask.
1201 1201 try:
1202 1202 st_mode = os.lstat(name).st_mode
1203 1203 except OSError, inst:
1204 1204 if inst.errno != errno.ENOENT:
1205 1205 raise
1206 1206 st_mode = 0666 & ~_umask
1207 1207 os.chmod(temp, st_mode)
1208 1208 if emptyok:
1209 1209 return temp
1210 1210 try:
1211 1211 try:
1212 1212 ifp = posixfile(name, "rb")
1213 1213 except IOError, inst:
1214 1214 if inst.errno == errno.ENOENT:
1215 1215 return temp
1216 1216 if not getattr(inst, 'filename', None):
1217 1217 inst.filename = name
1218 1218 raise
1219 1219 ofp = posixfile(temp, "wb")
1220 1220 for chunk in filechunkiter(ifp):
1221 1221 ofp.write(chunk)
1222 1222 ifp.close()
1223 1223 ofp.close()
1224 1224 except:
1225 1225 try: os.unlink(temp)
1226 1226 except: pass
1227 1227 raise
1228 1228 return temp
1229 1229
1230 1230 class atomictempfile(posixfile):
1231 1231 """file-like object that atomically updates a file
1232 1232
1233 1233 All writes will be redirected to a temporary copy of the original
1234 1234 file. When rename is called, the copy is renamed to the original
1235 1235 name, making the changes visible.
1236 1236 """
1237 1237 def __init__(self, name, mode):
1238 1238 self.__name = name
1239 1239 self.temp = mktempcopy(name, emptyok=('w' in mode))
1240 1240 posixfile.__init__(self, self.temp, mode)
1241 1241
1242 1242 def rename(self):
1243 1243 if not self.closed:
1244 1244 posixfile.close(self)
1245 1245 rename(self.temp, localpath(self.__name))
1246 1246
1247 1247 def __del__(self):
1248 1248 if not self.closed:
1249 1249 try:
1250 1250 os.unlink(self.temp)
1251 1251 except: pass
1252 1252 posixfile.close(self)
1253 1253
1254 1254 class opener(object):
1255 1255 """Open files relative to a base directory
1256 1256
1257 1257 This class is used to hide the details of COW semantics and
1258 1258 remote file access from higher level code.
1259 1259 """
1260 1260 def __init__(self, base, audit=True):
1261 1261 self.base = base
1262 1262 self.audit = audit
1263 1263
1264 1264 def __getattr__(self, name):
1265 1265 if name == '_can_symlink':
1266 1266 self._can_symlink = checklink(self.base)
1267 1267 return self._can_symlink
1268 1268 raise AttributeError(name)
1269 1269
1270 1270 def __call__(self, path, mode="r", text=False, atomictemp=False):
1271 1271 if self.audit:
1272 1272 audit_path(path)
1273 1273 f = os.path.join(self.base, path)
1274 1274
1275 1275 if not text and "b" not in mode:
1276 1276 mode += "b" # for that other OS
1277 1277
1278 1278 if mode[0] != "r":
1279 1279 try:
1280 1280 nlink = nlinks(f)
1281 1281 except OSError:
1282 1282 nlink = 0
1283 1283 d = os.path.dirname(f)
1284 1284 if not os.path.isdir(d):
1285 1285 os.makedirs(d)
1286 1286 if atomictemp:
1287 1287 return atomictempfile(f, mode)
1288 1288 if nlink > 1:
1289 1289 rename(mktempcopy(f), f)
1290 1290 return posixfile(f, mode)
1291 1291
1292 1292 def symlink(self, src, dst):
1293 1293 if self.audit:
1294 1294 audit_path(dst)
1295 1295 linkname = os.path.join(self.base, dst)
1296 1296 try:
1297 1297 os.unlink(linkname)
1298 1298 except OSError:
1299 1299 pass
1300 1300
1301 1301 dirname = os.path.dirname(linkname)
1302 1302 if not os.path.exists(dirname):
1303 1303 os.makedirs(dirname)
1304 1304
1305 1305 if self._can_symlink:
1306 os.symlink(src, linkname)
1306 try:
1307 os.symlink(src, linkname)
1308 except OSError, err:
1309 raise OSError(err.errno, _('could not symlink to %r: %s') %
1310 (src, err.strerror), linkname)
1307 1311 else:
1308 1312 f = self(self, dst, "w")
1309 1313 f.write(src)
1310 1314 f.close()
1311 1315
1312 1316 class chunkbuffer(object):
1313 1317 """Allow arbitrary sized chunks of data to be efficiently read from an
1314 1318 iterator over chunks of arbitrary size."""
1315 1319
1316 1320 def __init__(self, in_iter, targetsize = 2**16):
1317 1321 """in_iter is the iterator that's iterating over the input chunks.
1318 1322 targetsize is how big a buffer to try to maintain."""
1319 1323 self.in_iter = iter(in_iter)
1320 1324 self.buf = ''
1321 1325 self.targetsize = int(targetsize)
1322 1326 if self.targetsize <= 0:
1323 1327 raise ValueError(_("targetsize must be greater than 0, was %d") %
1324 1328 targetsize)
1325 1329 self.iterempty = False
1326 1330
1327 1331 def fillbuf(self):
1328 1332 """Ignore target size; read every chunk from iterator until empty."""
1329 1333 if not self.iterempty:
1330 1334 collector = cStringIO.StringIO()
1331 1335 collector.write(self.buf)
1332 1336 for ch in self.in_iter:
1333 1337 collector.write(ch)
1334 1338 self.buf = collector.getvalue()
1335 1339 self.iterempty = True
1336 1340
1337 1341 def read(self, l):
1338 1342 """Read L bytes of data from the iterator of chunks of data.
1339 1343 Returns less than L bytes if the iterator runs dry."""
1340 1344 if l > len(self.buf) and not self.iterempty:
1341 1345 # Clamp to a multiple of self.targetsize
1342 1346 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1343 1347 collector = cStringIO.StringIO()
1344 1348 collector.write(self.buf)
1345 1349 collected = len(self.buf)
1346 1350 for chunk in self.in_iter:
1347 1351 collector.write(chunk)
1348 1352 collected += len(chunk)
1349 1353 if collected >= targetsize:
1350 1354 break
1351 1355 if collected < targetsize:
1352 1356 self.iterempty = True
1353 1357 self.buf = collector.getvalue()
1354 1358 s, self.buf = self.buf[:l], buffer(self.buf, l)
1355 1359 return s
1356 1360
1357 1361 def filechunkiter(f, size=65536, limit=None):
1358 1362 """Create a generator that produces the data in the file size
1359 1363 (default 65536) bytes at a time, up to optional limit (default is
1360 1364 to read all data). Chunks may be less than size bytes if the
1361 1365 chunk is the last chunk in the file, or the file is a socket or
1362 1366 some other type of file that sometimes reads less data than is
1363 1367 requested."""
1364 1368 assert size >= 0
1365 1369 assert limit is None or limit >= 0
1366 1370 while True:
1367 1371 if limit is None: nbytes = size
1368 1372 else: nbytes = min(limit, size)
1369 1373 s = nbytes and f.read(nbytes)
1370 1374 if not s: break
1371 1375 if limit: limit -= len(s)
1372 1376 yield s
1373 1377
1374 1378 def makedate():
1375 1379 lt = time.localtime()
1376 1380 if lt[8] == 1 and time.daylight:
1377 1381 tz = time.altzone
1378 1382 else:
1379 1383 tz = time.timezone
1380 1384 return time.mktime(lt), tz
1381 1385
1382 1386 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1383 1387 """represent a (unixtime, offset) tuple as a localized time.
1384 1388 unixtime is seconds since the epoch, and offset is the time zone's
1385 1389 number of seconds away from UTC. if timezone is false, do not
1386 1390 append time zone to string."""
1387 1391 t, tz = date or makedate()
1388 1392 s = time.strftime(format, time.gmtime(float(t) - tz))
1389 1393 if timezone:
1390 1394 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1391 1395 return s
1392 1396
1393 1397 def strdate(string, format, defaults):
1394 1398 """parse a localized time string and return a (unixtime, offset) tuple.
1395 1399 if the string cannot be parsed, ValueError is raised."""
1396 1400 def timezone(string):
1397 1401 tz = string.split()[-1]
1398 1402 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1399 1403 tz = int(tz)
1400 1404 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1401 1405 return offset
1402 1406 if tz == "GMT" or tz == "UTC":
1403 1407 return 0
1404 1408 return None
1405 1409
1406 1410 # NOTE: unixtime = localunixtime + offset
1407 1411 offset, date = timezone(string), string
1408 1412 if offset != None:
1409 1413 date = " ".join(string.split()[:-1])
1410 1414
1411 1415 # add missing elements from defaults
1412 1416 for part in defaults:
1413 1417 found = [True for p in part if ("%"+p) in format]
1414 1418 if not found:
1415 1419 date += "@" + defaults[part]
1416 1420 format += "@%" + part[0]
1417 1421
1418 1422 timetuple = time.strptime(date, format)
1419 1423 localunixtime = int(calendar.timegm(timetuple))
1420 1424 if offset is None:
1421 1425 # local timezone
1422 1426 unixtime = int(time.mktime(timetuple))
1423 1427 offset = unixtime - localunixtime
1424 1428 else:
1425 1429 unixtime = localunixtime + offset
1426 1430 return unixtime, offset
1427 1431
1428 1432 def parsedate(string, formats=None, defaults=None):
1429 1433 """parse a localized time string and return a (unixtime, offset) tuple.
1430 1434 The date may be a "unixtime offset" string or in one of the specified
1431 1435 formats."""
1432 1436 if not string:
1433 1437 return 0, 0
1434 1438 if not formats:
1435 1439 formats = defaultdateformats
1436 1440 string = string.strip()
1437 1441 try:
1438 1442 when, offset = map(int, string.split(' '))
1439 1443 except ValueError:
1440 1444 # fill out defaults
1441 1445 if not defaults:
1442 1446 defaults = {}
1443 1447 now = makedate()
1444 1448 for part in "d mb yY HI M S".split():
1445 1449 if part not in defaults:
1446 1450 if part[0] in "HMS":
1447 1451 defaults[part] = "00"
1448 1452 elif part[0] in "dm":
1449 1453 defaults[part] = "1"
1450 1454 else:
1451 1455 defaults[part] = datestr(now, "%" + part[0], False)
1452 1456
1453 1457 for format in formats:
1454 1458 try:
1455 1459 when, offset = strdate(string, format, defaults)
1456 1460 except ValueError:
1457 1461 pass
1458 1462 else:
1459 1463 break
1460 1464 else:
1461 1465 raise Abort(_('invalid date: %r ') % string)
1462 1466 # validate explicit (probably user-specified) date and
1463 1467 # time zone offset. values must fit in signed 32 bits for
1464 1468 # current 32-bit linux runtimes. timezones go from UTC-12
1465 1469 # to UTC+14
1466 1470 if abs(when) > 0x7fffffff:
1467 1471 raise Abort(_('date exceeds 32 bits: %d') % when)
1468 1472 if offset < -50400 or offset > 43200:
1469 1473 raise Abort(_('impossible time zone offset: %d') % offset)
1470 1474 return when, offset
1471 1475
1472 1476 def matchdate(date):
1473 1477 """Return a function that matches a given date match specifier
1474 1478
1475 1479 Formats include:
1476 1480
1477 1481 '{date}' match a given date to the accuracy provided
1478 1482
1479 1483 '<{date}' on or before a given date
1480 1484
1481 1485 '>{date}' on or after a given date
1482 1486
1483 1487 """
1484 1488
1485 1489 def lower(date):
1486 1490 return parsedate(date, extendeddateformats)[0]
1487 1491
1488 1492 def upper(date):
1489 1493 d = dict(mb="12", HI="23", M="59", S="59")
1490 1494 for days in "31 30 29".split():
1491 1495 try:
1492 1496 d["d"] = days
1493 1497 return parsedate(date, extendeddateformats, d)[0]
1494 1498 except:
1495 1499 pass
1496 1500 d["d"] = "28"
1497 1501 return parsedate(date, extendeddateformats, d)[0]
1498 1502
1499 1503 if date[0] == "<":
1500 1504 when = upper(date[1:])
1501 1505 return lambda x: x <= when
1502 1506 elif date[0] == ">":
1503 1507 when = lower(date[1:])
1504 1508 return lambda x: x >= when
1505 1509 elif date[0] == "-":
1506 1510 try:
1507 1511 days = int(date[1:])
1508 1512 except ValueError:
1509 1513 raise Abort(_("invalid day spec: %s") % date[1:])
1510 1514 when = makedate()[0] - days * 3600 * 24
1511 1515 return lambda x: x >= when
1512 1516 elif " to " in date:
1513 1517 a, b = date.split(" to ")
1514 1518 start, stop = lower(a), upper(b)
1515 1519 return lambda x: x >= start and x <= stop
1516 1520 else:
1517 1521 start, stop = lower(date), upper(date)
1518 1522 return lambda x: x >= start and x <= stop
1519 1523
1520 1524 def shortuser(user):
1521 1525 """Return a short representation of a user name or email address."""
1522 1526 f = user.find('@')
1523 1527 if f >= 0:
1524 1528 user = user[:f]
1525 1529 f = user.find('<')
1526 1530 if f >= 0:
1527 1531 user = user[f+1:]
1528 1532 f = user.find(' ')
1529 1533 if f >= 0:
1530 1534 user = user[:f]
1531 1535 f = user.find('.')
1532 1536 if f >= 0:
1533 1537 user = user[:f]
1534 1538 return user
1535 1539
1536 1540 def ellipsis(text, maxlength=400):
1537 1541 """Trim string to at most maxlength (default: 400) characters."""
1538 1542 if len(text) <= maxlength:
1539 1543 return text
1540 1544 else:
1541 1545 return "%s..." % (text[:maxlength-3])
1542 1546
1543 1547 def walkrepos(path):
1544 1548 '''yield every hg repository under path, recursively.'''
1545 1549 def errhandler(err):
1546 1550 if err.filename == path:
1547 1551 raise err
1548 1552
1549 1553 for root, dirs, files in os.walk(path, onerror=errhandler):
1550 1554 for d in dirs:
1551 1555 if d == '.hg':
1552 1556 yield root
1553 1557 dirs[:] = []
1554 1558 break
1555 1559
1556 1560 _rcpath = None
1557 1561
1558 1562 def os_rcpath():
1559 1563 '''return default os-specific hgrc search path'''
1560 1564 path = system_rcpath()
1561 1565 path.extend(user_rcpath())
1562 1566 path = [os.path.normpath(f) for f in path]
1563 1567 return path
1564 1568
1565 1569 def rcpath():
1566 1570 '''return hgrc search path. if env var HGRCPATH is set, use it.
1567 1571 for each item in path, if directory, use files ending in .rc,
1568 1572 else use item.
1569 1573 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1570 1574 if no HGRCPATH, use default os-specific path.'''
1571 1575 global _rcpath
1572 1576 if _rcpath is None:
1573 1577 if 'HGRCPATH' in os.environ:
1574 1578 _rcpath = []
1575 1579 for p in os.environ['HGRCPATH'].split(os.pathsep):
1576 1580 if not p: continue
1577 1581 if os.path.isdir(p):
1578 1582 for f in os.listdir(p):
1579 1583 if f.endswith('.rc'):
1580 1584 _rcpath.append(os.path.join(p, f))
1581 1585 else:
1582 1586 _rcpath.append(p)
1583 1587 else:
1584 1588 _rcpath = os_rcpath()
1585 1589 return _rcpath
1586 1590
1587 1591 def bytecount(nbytes):
1588 1592 '''return byte count formatted as readable string, with units'''
1589 1593
1590 1594 units = (
1591 1595 (100, 1<<30, _('%.0f GB')),
1592 1596 (10, 1<<30, _('%.1f GB')),
1593 1597 (1, 1<<30, _('%.2f GB')),
1594 1598 (100, 1<<20, _('%.0f MB')),
1595 1599 (10, 1<<20, _('%.1f MB')),
1596 1600 (1, 1<<20, _('%.2f MB')),
1597 1601 (100, 1<<10, _('%.0f KB')),
1598 1602 (10, 1<<10, _('%.1f KB')),
1599 1603 (1, 1<<10, _('%.2f KB')),
1600 1604 (1, 1, _('%.0f bytes')),
1601 1605 )
1602 1606
1603 1607 for multiplier, divisor, format in units:
1604 1608 if nbytes >= divisor * multiplier:
1605 1609 return format % (nbytes / float(divisor))
1606 1610 return units[-1][2] % nbytes
1607 1611
1608 1612 def drop_scheme(scheme, path):
1609 1613 sc = scheme + ':'
1610 1614 if path.startswith(sc):
1611 1615 path = path[len(sc):]
1612 1616 if path.startswith('//'):
1613 1617 path = path[2:]
1614 1618 return path
@@ -1,310 +1,310 b''
1 1 # util_win32.py - utility functions that use win32 API
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of
7 7 # the GNU General Public License, incorporated herein by reference.
8 8
9 9 # Mark Hammond's win32all package allows better functionality on
10 10 # Windows. this module overrides definitions in util.py. if not
11 11 # available, import of this module will fail, and generic code will be
12 12 # used.
13 13
14 14 import win32api
15 15
16 16 from i18n import _
17 17 import errno, os, pywintypes, win32con, win32file, win32process
18 18 import cStringIO, winerror
19 19 from win32com.shell import shell,shellcon
20 20
21 21 class WinError:
22 22 winerror_map = {
23 23 winerror.ERROR_ACCESS_DENIED: errno.EACCES,
24 24 winerror.ERROR_ACCOUNT_DISABLED: errno.EACCES,
25 25 winerror.ERROR_ACCOUNT_RESTRICTION: errno.EACCES,
26 26 winerror.ERROR_ALREADY_ASSIGNED: errno.EBUSY,
27 27 winerror.ERROR_ALREADY_EXISTS: errno.EEXIST,
28 28 winerror.ERROR_ARITHMETIC_OVERFLOW: errno.ERANGE,
29 29 winerror.ERROR_BAD_COMMAND: errno.EIO,
30 30 winerror.ERROR_BAD_DEVICE: errno.ENODEV,
31 31 winerror.ERROR_BAD_DRIVER_LEVEL: errno.ENXIO,
32 32 winerror.ERROR_BAD_EXE_FORMAT: errno.ENOEXEC,
33 33 winerror.ERROR_BAD_FORMAT: errno.ENOEXEC,
34 34 winerror.ERROR_BAD_LENGTH: errno.EINVAL,
35 35 winerror.ERROR_BAD_PATHNAME: errno.ENOENT,
36 36 winerror.ERROR_BAD_PIPE: errno.EPIPE,
37 37 winerror.ERROR_BAD_UNIT: errno.ENODEV,
38 38 winerror.ERROR_BAD_USERNAME: errno.EINVAL,
39 39 winerror.ERROR_BROKEN_PIPE: errno.EPIPE,
40 40 winerror.ERROR_BUFFER_OVERFLOW: errno.ENAMETOOLONG,
41 41 winerror.ERROR_BUSY: errno.EBUSY,
42 42 winerror.ERROR_BUSY_DRIVE: errno.EBUSY,
43 43 winerror.ERROR_CALL_NOT_IMPLEMENTED: errno.ENOSYS,
44 44 winerror.ERROR_CANNOT_MAKE: errno.EACCES,
45 45 winerror.ERROR_CANTOPEN: errno.EIO,
46 46 winerror.ERROR_CANTREAD: errno.EIO,
47 47 winerror.ERROR_CANTWRITE: errno.EIO,
48 48 winerror.ERROR_CRC: errno.EIO,
49 49 winerror.ERROR_CURRENT_DIRECTORY: errno.EACCES,
50 50 winerror.ERROR_DEVICE_IN_USE: errno.EBUSY,
51 51 winerror.ERROR_DEV_NOT_EXIST: errno.ENODEV,
52 52 winerror.ERROR_DIRECTORY: errno.EINVAL,
53 53 winerror.ERROR_DIR_NOT_EMPTY: errno.ENOTEMPTY,
54 54 winerror.ERROR_DISK_CHANGE: errno.EIO,
55 55 winerror.ERROR_DISK_FULL: errno.ENOSPC,
56 56 winerror.ERROR_DRIVE_LOCKED: errno.EBUSY,
57 57 winerror.ERROR_ENVVAR_NOT_FOUND: errno.EINVAL,
58 58 winerror.ERROR_EXE_MARKED_INVALID: errno.ENOEXEC,
59 59 winerror.ERROR_FILENAME_EXCED_RANGE: errno.ENAMETOOLONG,
60 60 winerror.ERROR_FILE_EXISTS: errno.EEXIST,
61 61 winerror.ERROR_FILE_INVALID: errno.ENODEV,
62 62 winerror.ERROR_FILE_NOT_FOUND: errno.ENOENT,
63 63 winerror.ERROR_GEN_FAILURE: errno.EIO,
64 64 winerror.ERROR_HANDLE_DISK_FULL: errno.ENOSPC,
65 65 winerror.ERROR_INSUFFICIENT_BUFFER: errno.ENOMEM,
66 66 winerror.ERROR_INVALID_ACCESS: errno.EACCES,
67 67 winerror.ERROR_INVALID_ADDRESS: errno.EFAULT,
68 68 winerror.ERROR_INVALID_BLOCK: errno.EFAULT,
69 69 winerror.ERROR_INVALID_DATA: errno.EINVAL,
70 70 winerror.ERROR_INVALID_DRIVE: errno.ENODEV,
71 71 winerror.ERROR_INVALID_EXE_SIGNATURE: errno.ENOEXEC,
72 72 winerror.ERROR_INVALID_FLAGS: errno.EINVAL,
73 73 winerror.ERROR_INVALID_FUNCTION: errno.ENOSYS,
74 74 winerror.ERROR_INVALID_HANDLE: errno.EBADF,
75 75 winerror.ERROR_INVALID_LOGON_HOURS: errno.EACCES,
76 76 winerror.ERROR_INVALID_NAME: errno.EINVAL,
77 77 winerror.ERROR_INVALID_OWNER: errno.EINVAL,
78 78 winerror.ERROR_INVALID_PARAMETER: errno.EINVAL,
79 79 winerror.ERROR_INVALID_PASSWORD: errno.EPERM,
80 80 winerror.ERROR_INVALID_PRIMARY_GROUP: errno.EINVAL,
81 81 winerror.ERROR_INVALID_SIGNAL_NUMBER: errno.EINVAL,
82 82 winerror.ERROR_INVALID_TARGET_HANDLE: errno.EIO,
83 83 winerror.ERROR_INVALID_WORKSTATION: errno.EACCES,
84 84 winerror.ERROR_IO_DEVICE: errno.EIO,
85 85 winerror.ERROR_IO_INCOMPLETE: errno.EINTR,
86 86 winerror.ERROR_LOCKED: errno.EBUSY,
87 87 winerror.ERROR_LOCK_VIOLATION: errno.EACCES,
88 88 winerror.ERROR_LOGON_FAILURE: errno.EACCES,
89 89 winerror.ERROR_MAPPED_ALIGNMENT: errno.EINVAL,
90 90 winerror.ERROR_META_EXPANSION_TOO_LONG: errno.E2BIG,
91 91 winerror.ERROR_MORE_DATA: errno.EPIPE,
92 92 winerror.ERROR_NEGATIVE_SEEK: errno.ESPIPE,
93 93 winerror.ERROR_NOACCESS: errno.EFAULT,
94 94 winerror.ERROR_NONE_MAPPED: errno.EINVAL,
95 95 winerror.ERROR_NOT_ENOUGH_MEMORY: errno.ENOMEM,
96 96 winerror.ERROR_NOT_READY: errno.EAGAIN,
97 97 winerror.ERROR_NOT_SAME_DEVICE: errno.EXDEV,
98 98 winerror.ERROR_NO_DATA: errno.EPIPE,
99 99 winerror.ERROR_NO_MORE_SEARCH_HANDLES: errno.EIO,
100 100 winerror.ERROR_NO_PROC_SLOTS: errno.EAGAIN,
101 101 winerror.ERROR_NO_SUCH_PRIVILEGE: errno.EACCES,
102 102 winerror.ERROR_OPEN_FAILED: errno.EIO,
103 103 winerror.ERROR_OPEN_FILES: errno.EBUSY,
104 104 winerror.ERROR_OPERATION_ABORTED: errno.EINTR,
105 105 winerror.ERROR_OUTOFMEMORY: errno.ENOMEM,
106 106 winerror.ERROR_PASSWORD_EXPIRED: errno.EACCES,
107 107 winerror.ERROR_PATH_BUSY: errno.EBUSY,
108 108 winerror.ERROR_PATH_NOT_FOUND: errno.ENOENT,
109 109 winerror.ERROR_PIPE_BUSY: errno.EBUSY,
110 110 winerror.ERROR_PIPE_CONNECTED: errno.EPIPE,
111 111 winerror.ERROR_PIPE_LISTENING: errno.EPIPE,
112 112 winerror.ERROR_PIPE_NOT_CONNECTED: errno.EPIPE,
113 113 winerror.ERROR_PRIVILEGE_NOT_HELD: errno.EACCES,
114 114 winerror.ERROR_READ_FAULT: errno.EIO,
115 115 winerror.ERROR_SEEK: errno.EIO,
116 116 winerror.ERROR_SEEK_ON_DEVICE: errno.ESPIPE,
117 117 winerror.ERROR_SHARING_BUFFER_EXCEEDED: errno.ENFILE,
118 118 winerror.ERROR_SHARING_VIOLATION: errno.EACCES,
119 119 winerror.ERROR_STACK_OVERFLOW: errno.ENOMEM,
120 120 winerror.ERROR_SWAPERROR: errno.ENOENT,
121 121 winerror.ERROR_TOO_MANY_MODULES: errno.EMFILE,
122 122 winerror.ERROR_TOO_MANY_OPEN_FILES: errno.EMFILE,
123 123 winerror.ERROR_UNRECOGNIZED_MEDIA: errno.ENXIO,
124 124 winerror.ERROR_UNRECOGNIZED_VOLUME: errno.ENODEV,
125 125 winerror.ERROR_WAIT_NO_CHILDREN: errno.ECHILD,
126 126 winerror.ERROR_WRITE_FAULT: errno.EIO,
127 127 winerror.ERROR_WRITE_PROTECT: errno.EROFS,
128 128 }
129 129
130 130 def __init__(self, err):
131 131 self.win_errno, self.win_function, self.win_strerror = err
132 132 if self.win_strerror.endswith('.'):
133 133 self.win_strerror = self.win_strerror[:-1]
134 134
135 135 class WinIOError(WinError, IOError):
136 136 def __init__(self, err, filename=None):
137 137 WinError.__init__(self, err)
138 138 IOError.__init__(self, self.winerror_map.get(self.win_errno, 0),
139 139 self.win_strerror)
140 140 self.filename = filename
141 141
142 142 class WinOSError(WinError, OSError):
143 143 def __init__(self, err):
144 144 WinError.__init__(self, err)
145 145 OSError.__init__(self, self.winerror_map.get(self.win_errno, 0),
146 146 self.win_strerror)
147 147
148 148 def os_link(src, dst):
149 149 # NB will only succeed on NTFS
150 150 try:
151 151 win32file.CreateHardLink(dst, src)
152 152 except pywintypes.error, details:
153 153 raise WinOSError(details)
154 154
155 155 def nlinks(pathname):
156 156 """Return number of hardlinks for the given file."""
157 157 try:
158 158 fh = win32file.CreateFile(pathname,
159 159 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
160 160 None, win32file.OPEN_EXISTING, 0, None)
161 161 res = win32file.GetFileInformationByHandle(fh)
162 162 fh.Close()
163 163 return res[7]
164 164 except pywintypes.error:
165 165 return os.lstat(pathname).st_nlink
166 166
167 167 def testpid(pid):
168 168 '''return True if pid is still running or unable to
169 169 determine, False otherwise'''
170 170 try:
171 171 handle = win32api.OpenProcess(
172 172 win32con.PROCESS_QUERY_INFORMATION, False, pid)
173 173 if handle:
174 174 status = win32process.GetExitCodeProcess(handle)
175 175 return status == win32con.STILL_ACTIVE
176 176 except pywintypes.error, details:
177 177 return details[0] != winerror.ERROR_INVALID_PARAMETER
178 178 return True
179 179
180 180 def system_rcpath_win32():
181 181 '''return default os-specific hgrc search path'''
182 182 proc = win32api.GetCurrentProcess()
183 183 try:
184 184 # This will fail on windows < NT
185 185 filename = win32process.GetModuleFileNameEx(proc, 0)
186 186 except:
187 187 filename = win32api.GetModuleFileName(0)
188 188 return [os.path.join(os.path.dirname(filename), 'mercurial.ini')]
189 189
190 190 def user_rcpath_win32():
191 191 '''return os-specific hgrc search path to the user dir'''
192 192 userdir = os.path.expanduser('~')
193 193 if userdir == '~':
194 194 # We are on win < nt: fetch the APPDATA directory location and use
195 195 # the parent directory as the user home dir.
196 196 appdir = shell.SHGetPathFromIDList(
197 197 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
198 198 userdir = os.path.dirname(appdir)
199 199 return os.path.join(userdir, 'mercurial.ini')
200 200
201 201 class posixfile_nt(object):
202 202 '''file object with posix-like semantics. on windows, normal
203 203 files can not be deleted or renamed if they are open. must open
204 204 with win32file.FILE_SHARE_DELETE. this flag does not exist on
205 205 windows < nt, so do not use this class there.'''
206 206
207 207 # tried to use win32file._open_osfhandle to pass fd to os.fdopen,
208 208 # but does not work at all. wrap win32 file api instead.
209 209
210 210 def __init__(self, name, mode='rb'):
211 211 access = 0
212 if 'r' in mode or '+' in mode:
212 if 'r' in mode:
213 213 access |= win32file.GENERIC_READ
214 if 'w' in mode or 'a' in mode:
214 if 'w' in mode or 'a' in mode or '+' in mode:
215 215 access |= win32file.GENERIC_WRITE
216 216 if 'r' in mode:
217 217 creation = win32file.OPEN_EXISTING
218 218 elif 'a' in mode:
219 219 creation = win32file.OPEN_ALWAYS
220 220 else:
221 221 creation = win32file.CREATE_ALWAYS
222 222 try:
223 223 self.handle = win32file.CreateFile(name,
224 224 access,
225 225 win32file.FILE_SHARE_READ |
226 226 win32file.FILE_SHARE_WRITE |
227 227 win32file.FILE_SHARE_DELETE,
228 228 None,
229 229 creation,
230 230 win32file.FILE_ATTRIBUTE_NORMAL,
231 231 0)
232 232 except pywintypes.error, err:
233 233 raise WinIOError(err, name)
234 234 self.closed = False
235 235 self.name = name
236 236 self.mode = mode
237 237
238 238 def __iter__(self):
239 239 for line in self.read().splitlines(True):
240 240 yield line
241 241
242 242 def read(self, count=-1):
243 243 try:
244 244 cs = cStringIO.StringIO()
245 245 while count:
246 246 wincount = int(count)
247 247 if wincount == -1:
248 248 wincount = 1048576
249 249 val, data = win32file.ReadFile(self.handle, wincount)
250 250 if not data: break
251 251 cs.write(data)
252 252 if count != -1:
253 253 count -= len(data)
254 254 return cs.getvalue()
255 255 except pywintypes.error, err:
256 256 raise WinIOError(err)
257 257
258 258 def write(self, data):
259 259 try:
260 260 if 'a' in self.mode:
261 261 win32file.SetFilePointer(self.handle, 0, win32file.FILE_END)
262 262 nwrit = 0
263 263 while nwrit < len(data):
264 264 val, nwrit = win32file.WriteFile(self.handle, data)
265 265 data = data[nwrit:]
266 266 except pywintypes.error, err:
267 267 raise WinIOError(err)
268 268
269 269 def seek(self, pos, whence=0):
270 270 try:
271 271 win32file.SetFilePointer(self.handle, int(pos), whence)
272 272 except pywintypes.error, err:
273 273 raise WinIOError(err)
274 274
275 275 def tell(self):
276 276 try:
277 277 return win32file.SetFilePointer(self.handle, 0,
278 278 win32file.FILE_CURRENT)
279 279 except pywintypes.error, err:
280 280 raise WinIOError(err)
281 281
282 282 def close(self):
283 283 if not self.closed:
284 284 self.handle = None
285 285 self.closed = True
286 286
287 287 def flush(self):
288 288 try:
289 289 win32file.FlushFileBuffers(self.handle)
290 290 except pywintypes.error, err:
291 291 raise WinIOError(err)
292 292
293 293 def truncate(self, pos=0):
294 294 try:
295 295 win32file.SetFilePointer(self.handle, int(pos),
296 296 win32file.FILE_BEGIN)
297 297 win32file.SetEndOfFile(self.handle)
298 298 except pywintypes.error, err:
299 299 raise WinIOError(err)
300 300
301 301 getuser_fallback = win32api.GetUserName
302 302
303 303 def set_signal_handler_win32():
304 304 """Register a termination handler for console events including
305 305 CTRL+C. python signal handlers do not work well with socket
306 306 operations.
307 307 """
308 308 def handler(event):
309 309 win32process.ExitProcess(1)
310 310 win32api.SetConsoleCtrlHandler(handler)
@@ -1,66 +1,81 b''
1 1 #!/bin/sh
2 2 # Test basic extension support
3 3
4 4 cat > foobar.py <<EOF
5 5 import os
6 6 from mercurial import commands
7 7
8 8 def uisetup(ui):
9 9 ui.write("uisetup called\\n")
10 10 ui.write("ui.parentui is%s None\\n" % (ui.parentui is not None
11 11 and "not" or ""))
12 12
13 13 def reposetup(ui, repo):
14 14 ui.write("reposetup called for %s\\n" % os.path.basename(repo.root))
15 15 ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!"))
16 16
17 17 def foo(ui, *args, **kwargs):
18 18 ui.write("Foo\\n")
19 19
20 20 def bar(ui, *args, **kwargs):
21 21 ui.write("Bar\\n")
22 22
23 23 cmdtable = {
24 24 "foo": (foo, [], "hg foo"),
25 25 "bar": (bar, [], "hg bar"),
26 26 }
27 27
28 28 commands.norepo += ' bar'
29 29 EOF
30 30 abspath=`pwd`/foobar.py
31 31
32 32 mkdir barfoo
33 33 cp foobar.py barfoo/__init__.py
34 34 barfoopath=`pwd`/barfoo
35 35
36 36 hg init a
37 37 cd a
38 38 echo foo > file
39 39 hg add file
40 40 hg commit -m 'add file'
41 41
42 42 echo '[extensions]' >> $HGRCPATH
43 43 echo "foobar = $abspath" >> $HGRCPATH
44 44 hg foo
45 45
46 46 cd ..
47 47 hg clone a b
48 48
49 49 hg bar
50 50
51 51 echo '% module/__init__.py-style'
52 52 echo '[extensions]' > $HGRCPATH
53 53 echo "barfoo = $barfoopath" >> $HGRCPATH
54 54 cd a
55 55 hg foo
56 56
57 57 cd ..
58 58 cat > empty.py <<EOF
59 59 '''empty cmdtable
60 60 '''
61 61 cmdtable = {}
62 62 EOF
63 63 emptypath=`pwd`/empty.py
64 64 echo '[extensions]' > $HGRCPATH
65 65 echo "empty = $emptypath" >> $HGRCPATH
66 66 hg help empty
67
68 cat > debugextension.py <<EOF
69 '''only debugcommands
70 '''
71 def debugfoobar(ui, repo, *args, **opts):
72 "yet another debug command"
73 pass
74
75 cmdtable = {"debugfoobar": (debugfoobar, (), "hg debugfoobar")}
76 EOF
77 debugpath=`pwd`/debugextension.py
78 echo '[extensions]' > $HGRCPATH
79 echo "debugextension = $debugpath" >> $HGRCPATH
80 hg help debugextension
81 hg --debug help debugextension
@@ -1,24 +1,51 b''
1 1 uisetup called
2 2 ui.parentui isnot None
3 3 reposetup called for a
4 4 ui == repo.ui
5 5 Foo
6 6 uisetup called
7 7 ui.parentui is None
8 8 reposetup called for a
9 9 ui == repo.ui
10 10 reposetup called for b
11 11 ui == repo.ui
12 12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13 13 uisetup called
14 14 ui.parentui is None
15 15 Bar
16 16 % module/__init__.py-style
17 17 uisetup called
18 18 ui.parentui isnot None
19 19 reposetup called for a
20 20 ui == repo.ui
21 21 Foo
22 22 empty extension - empty cmdtable
23 23
24 24 no commands defined
25 debugextension extension - only debugcommands
26
27 no commands defined
28 debugextension extension - only debugcommands
29
30 list of commands:
31
32 debugfoobar:
33 yet another debug command
34
35 global options:
36 -R --repository repository root directory or symbolic path name
37 --cwd change working directory
38 -y --noninteractive do not prompt, assume 'yes' for any required answers
39 -q --quiet suppress output
40 -v --verbose enable additional output
41 --config set/override config option
42 --debug enable debugging output
43 --debugger start debugger
44 --encoding set the charset encoding (default: ascii)
45 --encodingmode set the charset encoding mode (default: strict)
46 --lsprof print improved command execution profile
47 --traceback print traceback on exception
48 --time time how long the command takes
49 --profile print command execution profile
50 --version output version information and exit
51 -h --help display help and exit
@@ -1,43 +1,47 b''
1 1 #!/bin/sh
2 2
3 3 hg init
4 4 echo a > a
5 5 hg add a
6 6 hg commit -m "test" -d "1000000 0"
7 7 hg history
8 8 hg tag -d "1000000 0" "bleah"
9 9 hg history
10 10
11 11 echo foo >> .hgtags
12 12 hg tag -d "1000000 0" "bleah2" || echo "failed"
13 13 hg tag -d "1000000 0" -r 0 "bleah2" 1 || echo "failed"
14 14
15 15 hg revert .hgtags
16 16 hg tag -d "1000000 0" -r 0 "bleah0"
17 17 hg tag -l -d "1000000 0" "bleah1" 1
18 18
19 19 cat .hgtags
20 20 cat .hg/localtags
21 21
22 22 hg update 0
23 23 hg tag -d "1000000 0" "foobar"
24 24 cat .hgtags
25 25 cat .hg/localtags
26 26
27 27 hg tag -l 'xx
28 28 newline'
29 29 hg tag -l 'xx:xx'
30 30
31 31 echo % issue 601
32 mv .hg/localtags .hg/ltags
33 head -1 .hg/ltags | tr -d '\n' > .hg/localtags
32 python << EOF
33 f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close()
34 f = file('.hg/localtags', 'w'); f.write(last); f.close()
35 EOF
34 36 cat .hg/localtags
35 37 hg tag -l localnewline
36 38 cat .hg/localtags
37 39
38 mv .hgtags hgtags
39 head -1 hgtags | tr -d '\n' > .hgtags
40 python << EOF
41 f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close()
42 f = file('.hgtags', 'w'); f.write(last); f.close()
43 EOF
40 44 hg ci -d '1000000 0' -m'broken manual edit of .hgtags'
41 45 cat .hgtags
42 46 hg tag -d '1000000 0' newline
43 47 cat .hgtags
General Comments 0
You need to be logged in to leave comments. Login now