##// END OF EJS Templates
i18n part2: use '_' for all strings who are part of the user interface
Benoit Boissinot -
r1402:9d2c2e6b default
parent child Browse files
Show More
@@ -1,57 +1,57 b''
1 1 # changelog.py - changelog class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from i18n import gettext as _
10 10 from demandload import demandload
11 11 demandload(globals(), "os time util")
12 12
13 13 class changelog(revlog):
14 14 def __init__(self, opener):
15 15 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
16 16
17 17 def extract(self, text):
18 18 if not text:
19 19 return (nullid, "", (0, 0), [], "")
20 20 last = text.index("\n\n")
21 21 desc = text[last + 2:]
22 22 l = text[:last].splitlines()
23 23 manifest = bin(l[0])
24 24 user = l[1]
25 25 date = l[2].split(' ')
26 26 time = float(date.pop(0))
27 27 try:
28 28 # various tools did silly things with the time zone field.
29 29 timezone = int(date[0])
30 30 except:
31 31 timezone = 0
32 32 files = l[3:]
33 33 return (manifest, user, (time, timezone), files, desc)
34 34
35 35 def read(self, node):
36 36 return self.extract(self.revision(node))
37 37
38 38 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
39 39 user=None, date=None):
40 40 if date:
41 41 # validate explicit (probably user-specified) date and
42 42 # time zone offset. values must fit in signed 32 bits for
43 43 # current 32-bit linux runtimes.
44 44 try:
45 45 when, offset = map(int, date.split(' '))
46 46 except ValueError:
47 raise ValueError('invalid date: %r' % date)
47 raise ValueError(_('invalid date: %r') % date)
48 48 if abs(when) > 0x7fffffff:
49 raise ValueError('date exceeds 32 bits: %d' % when)
49 raise ValueError(_('date exceeds 32 bits: %d') % when)
50 50 if abs(offset) >= 43200:
51 raise ValueError('impossible time zone offset: %d' % offset)
51 raise ValueError(_('impossible time zone offset: %d') % offset)
52 52 else:
53 53 date = "%d %d" % util.makedate()
54 54 list.sort()
55 55 l = [hex(manifest), user, date] + list + ["", desc]
56 56 text = "\n".join(l)
57 57 return self.addrevision(text, transaction, self.count(), p1, p2)
This diff has been collapsed as it changes many lines, (614 lines changed) Show them Hide them
@@ -1,2242 +1,2242 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 14 demandload(globals(), "errno socket version struct atexit sets bz2")
15 15
16 16 class UnknownCommand(Exception):
17 17 """Exception raised if command is not in the command table."""
18 18
19 19 def filterfiles(filters, files):
20 20 l = [x for x in files if x in filters]
21 21
22 22 for t in filters:
23 23 if t and t[-1] != "/":
24 24 t += "/"
25 25 l += [x for x in files if x.startswith(t)]
26 26 return l
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
35 35 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
36 36 opts.get('exclude'), head)
37 37
38 38 def makewalk(repo, pats, opts, head=''):
39 39 cwd = repo.getcwd()
40 40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
41 41 exact = dict(zip(files, files))
42 42 def walk():
43 43 for src, fn in repo.walk(files=files, match=matchfn):
44 44 yield src, fn, util.pathto(cwd, fn), fn in exact
45 45 return files, matchfn, walk()
46 46
47 47 def walk(repo, pats, opts, head=''):
48 48 files, matchfn, results = makewalk(repo, pats, opts, head)
49 49 for r in results:
50 50 yield r
51 51
52 52 def walkchangerevs(ui, repo, cwd, pats, opts):
53 53 '''Iterate over files and the revs they changed in.
54 54
55 55 Callers most commonly need to iterate backwards over the history
56 56 it is interested in. Doing so has awful (quadratic-looking)
57 57 performance, so we use iterators in a "windowed" way.
58 58
59 59 We walk a window of revisions in the desired order. Within the
60 60 window, we first walk forwards to gather data, then in the desired
61 61 order (usually backwards) to display it.
62 62
63 63 This function returns an (iterator, getchange) pair. The
64 64 getchange function returns the changelog entry for a numeric
65 65 revision. The iterator yields 3-tuples. They will be of one of
66 66 the following forms:
67 67
68 68 "window", incrementing, lastrev: stepping through a window,
69 69 positive if walking forwards through revs, last rev in the
70 70 sequence iterated over - use to reset state for the current window
71 71
72 72 "add", rev, fns: out-of-order traversal of the given file names
73 73 fns, which changed during revision rev - use to gather data for
74 74 possible display
75 75
76 76 "iter", rev, None: in-order traversal of the revs earlier iterated
77 77 over with "add" - use to display data'''
78 78
79 79 if repo.changelog.count() == 0:
80 80 return [], False
81 81
82 82 cwd = repo.getcwd()
83 83 if not pats and cwd:
84 84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
85 85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
86 86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
87 87 pats, opts)
88 88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
89 89 wanted = {}
90 90 slowpath = anypats
91 91 window = 300
92 92 fncache = {}
93 93
94 94 chcache = {}
95 95 def getchange(rev):
96 96 ch = chcache.get(rev)
97 97 if ch is None:
98 98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
99 99 return ch
100 100
101 101 if not slowpath and not files:
102 102 # No files, no patterns. Display all revs.
103 103 wanted = dict(zip(revs, revs))
104 104 if not slowpath:
105 105 # Only files, no patterns. Check the history of each file.
106 106 def filerevgen(filelog):
107 107 for i in xrange(filelog.count() - 1, -1, -window):
108 108 revs = []
109 109 for j in xrange(max(0, i - window), i + 1):
110 110 revs.append(filelog.linkrev(filelog.node(j)))
111 111 revs.reverse()
112 112 for rev in revs:
113 113 yield rev
114 114
115 115 minrev, maxrev = min(revs), max(revs)
116 116 for file in files:
117 117 filelog = repo.file(file)
118 118 # A zero count may be a directory or deleted file, so
119 119 # try to find matching entries on the slow path.
120 120 if filelog.count() == 0:
121 121 slowpath = True
122 122 break
123 123 for rev in filerevgen(filelog):
124 124 if rev <= maxrev:
125 125 if rev < minrev:
126 126 break
127 127 fncache.setdefault(rev, [])
128 128 fncache[rev].append(file)
129 129 wanted[rev] = 1
130 130 if slowpath:
131 131 # The slow path checks files modified in every changeset.
132 132 def changerevgen():
133 133 for i in xrange(repo.changelog.count() - 1, -1, -window):
134 134 for j in xrange(max(0, i - window), i + 1):
135 135 yield j, getchange(j)[3]
136 136
137 137 for rev, changefiles in changerevgen():
138 138 matches = filter(matchfn, changefiles)
139 139 if matches:
140 140 fncache[rev] = matches
141 141 wanted[rev] = 1
142 142
143 143 def iterate():
144 144 for i in xrange(0, len(revs), window):
145 145 yield 'window', revs[0] < revs[-1], revs[-1]
146 146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
147 147 if rev in wanted]
148 148 srevs = list(nrevs)
149 149 srevs.sort()
150 150 for rev in srevs:
151 151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
152 152 yield 'add', rev, fns
153 153 for rev in nrevs:
154 154 yield 'iter', rev, None
155 155 return iterate(), getchange
156 156
157 157 revrangesep = ':'
158 158
159 159 def revrange(ui, repo, revs, revlog=None):
160 160 """Yield revision as strings from a list of revision specifications."""
161 161 if revlog is None:
162 162 revlog = repo.changelog
163 163 revcount = revlog.count()
164 164 def fix(val, defval):
165 165 if not val:
166 166 return defval
167 167 try:
168 168 num = int(val)
169 169 if str(num) != val:
170 170 raise ValueError
171 171 if num < 0: num += revcount
172 172 if num < 0: num = 0
173 173 elif num >= revcount:
174 174 raise ValueError
175 175 except ValueError:
176 176 try:
177 177 num = repo.changelog.rev(repo.lookup(val))
178 178 except KeyError:
179 179 try:
180 180 num = revlog.rev(revlog.lookup(val))
181 181 except KeyError:
182 raise util.Abort('invalid revision identifier %s', val)
182 raise util.Abort(_('invalid revision identifier %s'), val)
183 183 return num
184 184 seen = {}
185 185 for spec in revs:
186 186 if spec.find(revrangesep) >= 0:
187 187 start, end = spec.split(revrangesep, 1)
188 188 start = fix(start, 0)
189 189 end = fix(end, revcount - 1)
190 190 step = start > end and -1 or 1
191 191 for rev in xrange(start, end+step, step):
192 192 if rev in seen: continue
193 193 seen[rev] = 1
194 194 yield str(rev)
195 195 else:
196 196 rev = fix(spec, None)
197 197 if rev in seen: continue
198 198 seen[rev] = 1
199 199 yield str(rev)
200 200
201 201 def make_filename(repo, r, pat, node=None,
202 202 total=None, seqno=None, revwidth=None, pathname=None):
203 203 node_expander = {
204 204 'H': lambda: hex(node),
205 205 'R': lambda: str(r.rev(node)),
206 206 'h': lambda: short(node),
207 207 }
208 208 expander = {
209 209 '%': lambda: '%',
210 210 'b': lambda: os.path.basename(repo.root),
211 211 }
212 212
213 213 try:
214 214 if node:
215 215 expander.update(node_expander)
216 216 if node and revwidth is not None:
217 217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
218 218 if total is not None:
219 219 expander['N'] = lambda: str(total)
220 220 if seqno is not None:
221 221 expander['n'] = lambda: str(seqno)
222 222 if total is not None and seqno is not None:
223 223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
224 224 if pathname is not None:
225 225 expander['s'] = lambda: os.path.basename(pathname)
226 226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
227 227 expander['p'] = lambda: pathname
228 228
229 229 newname = []
230 230 patlen = len(pat)
231 231 i = 0
232 232 while i < patlen:
233 233 c = pat[i]
234 234 if c == '%':
235 235 i += 1
236 236 c = pat[i]
237 237 c = expander[c]()
238 238 newname.append(c)
239 239 i += 1
240 240 return ''.join(newname)
241 241 except KeyError, inst:
242 raise util.Abort("invalid format spec '%%%s' in output file name",
242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
243 243 inst.args[0])
244 244
245 245 def make_file(repo, r, pat, node=None,
246 246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
247 247 if not pat or pat == '-':
248 248 return 'w' in mode and sys.stdout or sys.stdin
249 249 if hasattr(pat, 'write') and 'w' in mode:
250 250 return pat
251 251 if hasattr(pat, 'read') and 'r' in mode:
252 252 return pat
253 253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
254 254 pathname),
255 255 mode)
256 256
257 257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
258 258 changes=None, text=False):
259 259 if not changes:
260 260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
261 261 else:
262 262 (c, a, d, u) = changes
263 263 if files:
264 264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
265 265
266 266 if not c and not a and not d:
267 267 return
268 268
269 269 if node2:
270 270 change = repo.changelog.read(node2)
271 271 mmap2 = repo.manifest.read(change[0])
272 272 date2 = util.datestr(change[2])
273 273 def read(f):
274 274 return repo.file(f).read(mmap2[f])
275 275 else:
276 276 date2 = util.datestr()
277 277 if not node1:
278 278 node1 = repo.dirstate.parents()[0]
279 279 def read(f):
280 280 return repo.wfile(f).read()
281 281
282 282 if ui.quiet:
283 283 r = None
284 284 else:
285 285 hexfunc = ui.verbose and hex or short
286 286 r = [hexfunc(node) for node in [node1, node2] if node]
287 287
288 288 change = repo.changelog.read(node1)
289 289 mmap = repo.manifest.read(change[0])
290 290 date1 = util.datestr(change[2])
291 291
292 292 for f in c:
293 293 to = None
294 294 if f in mmap:
295 295 to = repo.file(f).read(mmap[f])
296 296 tn = read(f)
297 297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
298 298 for f in a:
299 299 to = None
300 300 tn = read(f)
301 301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
302 302 for f in d:
303 303 to = repo.file(f).read(mmap[f])
304 304 tn = None
305 305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
306 306
307 307 def trimuser(ui, name, rev, revcache):
308 308 """trim the name of the user who committed a change"""
309 309 user = revcache.get(rev)
310 310 if user is None:
311 311 user = revcache[rev] = ui.shortuser(name)
312 312 return user
313 313
314 314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
315 315 """show a single changeset or file revision"""
316 316 log = repo.changelog
317 317 if changenode is None:
318 318 changenode = log.node(rev)
319 319 elif not rev:
320 320 rev = log.rev(changenode)
321 321
322 322 if ui.quiet:
323 323 ui.write("%d:%s\n" % (rev, short(changenode)))
324 324 return
325 325
326 326 changes = log.read(changenode)
327 327 date = util.datestr(changes[2])
328 328
329 329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
330 330 for p in log.parents(changenode)
331 331 if ui.debugflag or p != nullid]
332 332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
333 333 parents = []
334 334
335 335 if ui.verbose:
336 ui.write("changeset: %d:%s\n" % (rev, hex(changenode)))
336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
337 337 else:
338 ui.write("changeset: %d:%s\n" % (rev, short(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
339 339
340 340 for tag in repo.nodetags(changenode):
341 ui.status("tag: %s\n" % tag)
341 ui.status(_("tag: %s\n") % tag)
342 342 for parent in parents:
343 ui.write("parent: %d:%s\n" % parent)
343 ui.write(_("parent: %d:%s\n") % parent)
344 344
345 345 if brinfo and changenode in brinfo:
346 346 br = brinfo[changenode]
347 ui.write("branch: %s\n" % " ".join(br))
347 ui.write(_("branch: %s\n") % " ".join(br))
348 348
349 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
350 350 hex(changes[0])))
351 ui.status("user: %s\n" % changes[1])
352 ui.status("date: %s\n" % date)
351 ui.status(_("user: %s\n") % changes[1])
352 ui.status(_("date: %s\n") % date)
353 353
354 354 if ui.debugflag:
355 355 files = repo.changes(log.parents(changenode)[0], changenode)
356 for key, value in zip(["files:", "files+:", "files-:"], files):
356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
357 357 if value:
358 358 ui.note("%-12s %s\n" % (key, " ".join(value)))
359 359 else:
360 ui.note("files: %s\n" % " ".join(changes[3]))
360 ui.note(_("files: %s\n") % " ".join(changes[3]))
361 361
362 362 description = changes[4].strip()
363 363 if description:
364 364 if ui.verbose:
365 ui.status("description:\n")
365 ui.status(_("description:\n"))
366 366 ui.status(description)
367 367 ui.status("\n\n")
368 368 else:
369 ui.status("summary: %s\n" % description.splitlines()[0])
369 ui.status(_("summary: %s\n") % description.splitlines()[0])
370 370 ui.status("\n")
371 371
372 372 def show_version(ui):
373 373 """output version and copyright information"""
374 ui.write("Mercurial Distributed SCM (version %s)\n"
374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
375 375 % version.get_version())
376 ui.status(
376 ui.status(_(
377 377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
378 378 "This is free software; see the source for copying conditions. "
379 379 "There is NO\nwarranty; "
380 380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
381 )
381 ))
382 382
383 383 def help_(ui, cmd=None, with_version=False):
384 384 """show help for a given command or all commands"""
385 385 option_lists = []
386 386 if cmd and cmd != 'shortlist':
387 387 if with_version:
388 388 show_version(ui)
389 389 ui.write('\n')
390 390 key, i = find(cmd)
391 391 # synopsis
392 392 ui.write("%s\n\n" % i[2])
393 393
394 394 # description
395 395 doc = i[0].__doc__
396 396 if ui.quiet:
397 397 doc = doc.splitlines(0)[0]
398 398 ui.write("%s\n" % doc.rstrip())
399 399
400 400 if not ui.quiet:
401 401 # aliases
402 402 aliases = ', '.join(key.split('|')[1:])
403 403 if aliases:
404 ui.write("\naliases: %s\n" % aliases)
404 ui.write(_("\naliases: %s\n") % aliases)
405 405
406 406 # options
407 407 if i[1]:
408 408 option_lists.append(("options", i[1]))
409 409
410 410 else:
411 411 # program name
412 412 if ui.verbose or with_version:
413 413 show_version(ui)
414 414 else:
415 ui.status("Mercurial Distributed SCM\n")
415 ui.status(_("Mercurial Distributed SCM\n"))
416 416 ui.status('\n')
417 417
418 418 # list of commands
419 419 if cmd == "shortlist":
420 ui.status('basic commands (use "hg help" '
421 'for the full list or option "-v" for details):\n\n')
420 ui.status(_('basic commands (use "hg help" '
421 'for the full list or option "-v" for details):\n\n'))
422 422 elif ui.verbose:
423 ui.status('list of commands:\n\n')
423 ui.status(_('list of commands:\n\n'))
424 424 else:
425 ui.status('list of commands (use "hg help -v" '
426 'to show aliases and global options):\n\n')
425 ui.status(_('list of commands (use "hg help -v" '
426 'to show aliases and global options):\n\n'))
427 427
428 428 h = {}
429 429 cmds = {}
430 430 for c, e in table.items():
431 431 f = c.split("|")[0]
432 432 if cmd == "shortlist" and not f.startswith("^"):
433 433 continue
434 434 f = f.lstrip("^")
435 435 if not ui.debugflag and f.startswith("debug"):
436 436 continue
437 437 d = ""
438 438 if e[0].__doc__:
439 439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 440 h[f] = d
441 441 cmds[f]=c.lstrip("^")
442 442
443 443 fns = h.keys()
444 444 fns.sort()
445 445 m = max(map(len, fns))
446 446 for f in fns:
447 447 if ui.verbose:
448 448 commands = cmds[f].replace("|",", ")
449 449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 450 else:
451 451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 452
453 453 # global options
454 454 if ui.verbose:
455 455 option_lists.append(("global options", globalopts))
456 456
457 457 # list all option lists
458 458 opt_output = []
459 459 for title, options in option_lists:
460 460 opt_output.append(("\n%s:\n" % title, None))
461 461 for shortopt, longopt, default, desc in options:
462 462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 463 longopt and " --%s" % longopt),
464 464 "%s%s" % (desc,
465 default and " (default: %s)" % default
465 default and _(" (default: %s)") % default
466 466 or "")))
467 467
468 468 if opt_output:
469 469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 470 for first, second in opt_output:
471 471 if second:
472 472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 473 else:
474 474 ui.write("%s\n" % first)
475 475
476 476 # Commands start here, listed alphabetically
477 477
478 478 def add(ui, repo, *pats, **opts):
479 479 '''add the specified files on the next commit'''
480 480 names = []
481 481 for src, abs, rel, exact in walk(repo, pats, opts):
482 482 if exact:
483 if ui.verbose: ui.status('adding %s\n' % rel)
483 if ui.verbose: ui.status(_('adding %s\n') % rel)
484 484 names.append(abs)
485 485 elif repo.dirstate.state(abs) == '?':
486 ui.status('adding %s\n' % rel)
486 ui.status(_('adding %s\n') % rel)
487 487 names.append(abs)
488 488 repo.add(names)
489 489
490 490 def addremove(ui, repo, *pats, **opts):
491 491 """add all new files, delete all missing files"""
492 492 add, remove = [], []
493 493 for src, abs, rel, exact in walk(repo, pats, opts):
494 494 if src == 'f' and repo.dirstate.state(abs) == '?':
495 495 add.append(abs)
496 496 if ui.verbose or not exact:
497 ui.status('adding ', rel, '\n')
497 ui.status(_('adding %s\n') % rel)
498 498 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
499 499 remove.append(abs)
500 500 if ui.verbose or not exact:
501 ui.status('removing ', rel, '\n')
501 ui.status(_('removing %s\n') % rel)
502 502 repo.add(add)
503 503 repo.remove(remove)
504 504
505 505 def annotate(ui, repo, *pats, **opts):
506 506 """show changeset information per file line"""
507 507 def getnode(rev):
508 508 return short(repo.changelog.node(rev))
509 509
510 510 ucache = {}
511 511 def getname(rev):
512 512 cl = repo.changelog.read(repo.changelog.node(rev))
513 513 return trimuser(ui, cl[1], rev, ucache)
514 514
515 515 if not pats:
516 raise util.Abort('at least one file name or pattern required')
516 raise util.Abort(_('at least one file name or pattern required'))
517 517
518 518 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
519 519 if not opts['user'] and not opts['changeset']:
520 520 opts['number'] = 1
521 521
522 522 if opts['rev']:
523 523 node = repo.changelog.lookup(opts['rev'])
524 524 else:
525 525 node = repo.dirstate.parents()[0]
526 526 change = repo.changelog.read(node)
527 527 mmap = repo.manifest.read(change[0])
528 528
529 529 for src, abs, rel, exact in walk(repo, pats, opts):
530 530 if abs not in mmap:
531 ui.warn("warning: %s is not in the repository!\n" % rel)
531 ui.warn(_("warning: %s is not in the repository!\n") % rel)
532 532 continue
533 533
534 534 f = repo.file(abs)
535 535 if not opts['text'] and util.binary(f.read(mmap[abs])):
536 ui.write("%s: binary file\n" % rel)
536 ui.write(_("%s: binary file\n") % rel)
537 537 continue
538 538
539 539 lines = f.annotate(mmap[abs])
540 540 pieces = []
541 541
542 542 for o, f in opmap:
543 543 if opts[o]:
544 544 l = [f(n) for n, dummy in lines]
545 545 if l:
546 546 m = max(map(len, l))
547 547 pieces.append(["%*s" % (m, x) for x in l])
548 548
549 549 if pieces:
550 550 for p, l in zip(zip(*pieces), lines):
551 551 ui.write("%s: %s" % (" ".join(p), l[1]))
552 552
553 553 def bundle(ui, repo, fname, dest="default-push", **opts):
554 554 """create a changegroup file"""
555 555 f = open(fname, "wb")
556 556 dest = ui.expandpath(dest)
557 557 other = hg.repository(ui, dest)
558 558 o = repo.findoutgoing(other)
559 559 cg = repo.changegroup(o)
560 560
561 561 try:
562 562 f.write("HG10")
563 563 z = bz2.BZ2Compressor(9)
564 564 while 1:
565 565 chunk = cg.read(4096)
566 566 if not chunk:
567 567 break
568 568 f.write(z.compress(chunk))
569 569 f.write(z.flush())
570 570 except:
571 571 os.unlink(fname)
572 572 raise
573 573
574 574 def cat(ui, repo, file1, *pats, **opts):
575 575 """output the latest or given revisions of files"""
576 576 mf = {}
577 577 if opts['rev']:
578 578 change = repo.changelog.read(repo.lookup(opts['rev']))
579 579 mf = repo.manifest.read(change[0])
580 580 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
581 581 r = repo.file(abs)
582 582 if opts['rev']:
583 583 try:
584 584 n = mf[abs]
585 585 except (hg.RepoError, KeyError):
586 586 try:
587 587 n = r.lookup(rev)
588 588 except KeyError, inst:
589 raise util.Abort('cannot find file %s in rev %s', rel, rev)
589 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
590 590 else:
591 591 n = r.tip()
592 592 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
593 593 fp.write(r.read(n))
594 594
595 595 def clone(ui, source, dest=None, **opts):
596 596 """make a copy of an existing repository"""
597 597 if dest is None:
598 598 dest = os.path.basename(os.path.normpath(source))
599 599
600 600 if os.path.exists(dest):
601 raise util.Abort("destination '%s' already exists", dest)
601 raise util.Abort(_("destination '%s' already exists"), dest)
602 602
603 603 dest = os.path.realpath(dest)
604 604
605 605 class Dircleanup:
606 606 def __init__(self, dir_):
607 607 self.rmtree = shutil.rmtree
608 608 self.dir_ = dir_
609 609 os.mkdir(dir_)
610 610 def close(self):
611 611 self.dir_ = None
612 612 def __del__(self):
613 613 if self.dir_:
614 614 self.rmtree(self.dir_, True)
615 615
616 616 if opts['ssh']:
617 617 ui.setconfig("ui", "ssh", opts['ssh'])
618 618 if opts['remotecmd']:
619 619 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
620 620
621 621 if not os.path.exists(source):
622 622 source = ui.expandpath(source)
623 623
624 624 d = Dircleanup(dest)
625 625 abspath = source
626 626 other = hg.repository(ui, source)
627 627
628 628 copy = False
629 629 if other.dev() != -1:
630 630 abspath = os.path.abspath(source)
631 631 if not opts['pull']:
632 632 copy = True
633 633
634 634 if copy:
635 635 try:
636 636 # we use a lock here because if we race with commit, we
637 637 # can end up with extra data in the cloned revlogs that's
638 638 # not pointed to by changesets, thus causing verify to
639 639 # fail
640 640 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
641 641 except OSError:
642 642 copy = False
643 643
644 644 if copy:
645 645 # we lock here to avoid premature writing to the target
646 646 os.mkdir(os.path.join(dest, ".hg"))
647 647 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
648 648
649 649 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
650 650 for f in files.split():
651 651 src = os.path.join(source, ".hg", f)
652 652 dst = os.path.join(dest, ".hg", f)
653 653 util.copyfiles(src, dst)
654 654
655 655 repo = hg.repository(ui, dest)
656 656
657 657 else:
658 658 repo = hg.repository(ui, dest, create=1)
659 659 repo.pull(other)
660 660
661 661 f = repo.opener("hgrc", "w", text=True)
662 662 f.write("[paths]\n")
663 663 f.write("default = %s\n" % abspath)
664 664
665 665 if not opts['noupdate']:
666 666 update(ui, repo)
667 667
668 668 d.close()
669 669
670 670 def commit(ui, repo, *pats, **opts):
671 671 """commit the specified files or all outstanding changes"""
672 672 if opts['text']:
673 ui.warn("Warning: -t and --text is deprecated,"
674 " please use -m or --message instead.\n")
673 ui.warn(_("Warning: -t and --text is deprecated,"
674 " please use -m or --message instead.\n"))
675 675 message = opts['message'] or opts['text']
676 676 logfile = opts['logfile']
677 677
678 678 if message and logfile:
679 raise util.Abort('options --message and --logfile are mutually '
680 'exclusive')
679 raise util.Abort(_('options --message and --logfile are mutually '
680 'exclusive'))
681 681 if not message and logfile:
682 682 try:
683 683 if logfile == '-':
684 684 message = sys.stdin.read()
685 685 else:
686 686 message = open(logfile).read()
687 687 except IOError, inst:
688 raise util.Abort("can't read commit message '%s': %s" %
688 raise util.Abort(_("can't read commit message '%s': %s") %
689 689 (logfile, inst.strerror))
690 690
691 691 if opts['addremove']:
692 692 addremove(ui, repo, *pats, **opts)
693 693 cwd = repo.getcwd()
694 694 if not pats and cwd:
695 695 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
696 696 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
697 697 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
698 698 pats, opts)
699 699 if pats:
700 700 c, a, d, u = repo.changes(files=fns, match=match)
701 701 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
702 702 else:
703 703 files = []
704 704 try:
705 705 repo.commit(files, message, opts['user'], opts['date'], match)
706 706 except ValueError, inst:
707 707 raise util.Abort(str(inst))
708 708
709 709 def docopy(ui, repo, pats, opts):
710 710 if not pats:
711 raise util.Abort('no source or destination specified')
711 raise util.Abort(_('no source or destination specified'))
712 712 elif len(pats) == 1:
713 raise util.Abort('no destination specified')
713 raise util.Abort(_('no destination specified'))
714 714 pats = list(pats)
715 715 dest = pats.pop()
716 716 sources = []
717 717
718 718 def okaytocopy(abs, rel, exact):
719 reasons = {'?': 'is not managed',
720 'a': 'has been marked for add'}
719 reasons = {'?': _('is not managed'),
720 'a': _('has been marked for add')}
721 721 reason = reasons.get(repo.dirstate.state(abs))
722 722 if reason:
723 if exact: ui.warn('%s: not copying - file %s\n' % (rel, reason))
723 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
724 724 else:
725 725 return True
726 726
727 727 for src, abs, rel, exact in walk(repo, pats, opts):
728 728 if okaytocopy(abs, rel, exact):
729 729 sources.append((abs, rel, exact))
730 730 if not sources:
731 raise util.Abort('no files to copy')
731 raise util.Abort(_('no files to copy'))
732 732
733 733 cwd = repo.getcwd()
734 734 absdest = util.canonpath(repo.root, cwd, dest)
735 735 reldest = util.pathto(cwd, absdest)
736 736 if os.path.exists(reldest):
737 737 destisfile = not os.path.isdir(reldest)
738 738 else:
739 739 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
740 740
741 741 if destisfile:
742 742 if opts['parents']:
743 raise util.Abort('with --parents, destination must be a directory')
743 raise util.Abort(_('with --parents, destination must be a directory'))
744 744 elif len(sources) > 1:
745 raise util.Abort('with multiple sources, destination must be a '
746 'directory')
745 raise util.Abort(_('with multiple sources, destination must be a '
746 'directory'))
747 747 errs, copied = 0, []
748 748 for abs, rel, exact in sources:
749 749 if opts['parents']:
750 750 mydest = os.path.join(dest, rel)
751 751 elif destisfile:
752 752 mydest = reldest
753 753 else:
754 754 mydest = os.path.join(dest, os.path.basename(rel))
755 755 myabsdest = util.canonpath(repo.root, cwd, mydest)
756 756 myreldest = util.pathto(cwd, myabsdest)
757 757 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
758 ui.warn('%s: not overwriting - file already managed\n' % myreldest)
758 ui.warn(_('%s: not overwriting - file already managed\n') % myreldest)
759 759 continue
760 760 mydestdir = os.path.dirname(myreldest) or '.'
761 761 if not opts['after']:
762 762 try:
763 763 if opts['parents']: os.makedirs(mydestdir)
764 764 elif not destisfile: os.mkdir(mydestdir)
765 765 except OSError, inst:
766 766 if inst.errno != errno.EEXIST: raise
767 767 if ui.verbose or not exact:
768 ui.status('copying %s to %s\n' % (rel, myreldest))
768 ui.status(_('copying %s to %s\n') % (rel, myreldest))
769 769 if not opts['after']:
770 770 try:
771 771 shutil.copyfile(rel, myreldest)
772 772 shutil.copymode(rel, myreldest)
773 773 except shutil.Error, inst:
774 774 raise util.Abort(str(inst))
775 775 except IOError, inst:
776 776 if inst.errno == errno.ENOENT:
777 ui.warn('%s: deleted in working copy\n' % rel)
777 ui.warn(_('%s: deleted in working copy\n') % rel)
778 778 else:
779 ui.warn('%s: cannot copy - %s\n' % (rel, inst.strerror))
779 ui.warn(_('%s: cannot copy - %s\n') % (rel, inst.strerror))
780 780 errs += 1
781 781 continue
782 782 repo.copy(abs, myabsdest)
783 783 copied.append((abs, rel, exact))
784 784 if errs:
785 ui.warn('(consider using --after)\n')
785 ui.warn(_('(consider using --after)\n'))
786 786 return errs, copied
787 787
788 788 def copy(ui, repo, *pats, **opts):
789 789 """mark files as copied for the next commit"""
790 790 errs, copied = docopy(ui, repo, pats, opts)
791 791 return errs
792 792
793 793 def debugancestor(ui, index, rev1, rev2):
794 794 """find the ancestor revision of two revisions in a given index"""
795 795 r = revlog.revlog(file, index, "")
796 796 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
797 797 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
798 798
799 799 def debugcheckstate(ui, repo):
800 800 """validate the correctness of the current dirstate"""
801 801 parent1, parent2 = repo.dirstate.parents()
802 802 repo.dirstate.read()
803 803 dc = repo.dirstate.map
804 804 keys = dc.keys()
805 805 keys.sort()
806 806 m1n = repo.changelog.read(parent1)[0]
807 807 m2n = repo.changelog.read(parent2)[0]
808 808 m1 = repo.manifest.read(m1n)
809 809 m2 = repo.manifest.read(m2n)
810 810 errors = 0
811 811 for f in dc:
812 812 state = repo.dirstate.state(f)
813 813 if state in "nr" and f not in m1:
814 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
814 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
815 815 errors += 1
816 816 if state in "a" and f in m1:
817 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
817 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
818 818 errors += 1
819 819 if state in "m" and f not in m1 and f not in m2:
820 ui.warn("%s in state %s, but not in either manifest\n" %
820 ui.warn(_("%s in state %s, but not in either manifest\n") %
821 821 (f, state))
822 822 errors += 1
823 823 for f in m1:
824 824 state = repo.dirstate.state(f)
825 825 if state not in "nrm":
826 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
826 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
827 827 errors += 1
828 828 if errors:
829 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
829 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
830 830
831 831 def debugconfig(ui):
832 832 """show combined config settings from all hgrc files"""
833 833 try:
834 834 repo = hg.repository(ui)
835 835 except hg.RepoError:
836 836 pass
837 837 for section, name, value in ui.walkconfig():
838 838 ui.write('%s.%s=%s\n' % (section, name, value))
839 839
840 840 def debugsetparents(ui, repo, rev1, rev2=None):
841 841 """
842 842 manually set the parents of the current working directory
843 843
844 844 This is useful for writing repository conversion tools, but should
845 845 be used with care.
846 846 """
847 847
848 848 if not rev2:
849 849 rev2 = hex(nullid)
850 850
851 851 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
852 852
853 853 def debugstate(ui, repo):
854 854 """show the contents of the current dirstate"""
855 855 repo.dirstate.read()
856 856 dc = repo.dirstate.map
857 857 keys = dc.keys()
858 858 keys.sort()
859 859 for file_ in keys:
860 860 ui.write("%c %3o %10d %s %s\n"
861 861 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
862 862 time.strftime("%x %X",
863 863 time.localtime(dc[file_][3])), file_))
864 864 for f in repo.dirstate.copies:
865 ui.write("copy: %s -> %s\n" % (repo.dirstate.copies[f], f))
865 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
866 866
867 867 def debugdata(ui, file_, rev):
868 868 """dump the contents of an data file revision"""
869 869 r = revlog.revlog(file, file_[:-2] + ".i", file_)
870 870 try:
871 871 ui.write(r.revision(r.lookup(rev)))
872 872 except KeyError:
873 raise util.Abort('invalid revision identifier %s', rev)
873 raise util.Abort(_('invalid revision identifier %s'), rev)
874 874
875 875 def debugindex(ui, file_):
876 876 """dump the contents of an index file"""
877 877 r = revlog.revlog(file, file_, "")
878 878 ui.write(" rev offset length base linkrev" +
879 879 " nodeid p1 p2\n")
880 880 for i in range(r.count()):
881 881 e = r.index[i]
882 882 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
883 883 i, e[0], e[1], e[2], e[3],
884 884 short(e[6]), short(e[4]), short(e[5])))
885 885
886 886 def debugindexdot(ui, file_):
887 887 """dump an index DAG as a .dot file"""
888 888 r = revlog.revlog(file, file_, "")
889 889 ui.write("digraph G {\n")
890 890 for i in range(r.count()):
891 891 e = r.index[i]
892 892 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
893 893 if e[5] != nullid:
894 894 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
895 895 ui.write("}\n")
896 896
897 897 def debugrename(ui, repo, file, rev=None):
898 898 """dump rename information"""
899 899 r = repo.file(relpath(repo, [file])[0])
900 900 if rev:
901 901 try:
902 902 # assume all revision numbers are for changesets
903 903 n = repo.lookup(rev)
904 904 change = repo.changelog.read(n)
905 905 m = repo.manifest.read(change[0])
906 906 n = m[relpath(repo, [file])[0]]
907 907 except hg.RepoError, KeyError:
908 908 n = r.lookup(rev)
909 909 else:
910 910 n = r.tip()
911 911 m = r.renamed(n)
912 912 if m:
913 ui.write("renamed from %s:%s\n" % (m[0], hex(m[1])))
913 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
914 914 else:
915 ui.write("not renamed\n")
915 ui.write(_("not renamed\n"))
916 916
917 917 def debugwalk(ui, repo, *pats, **opts):
918 918 """show how files match on given patterns"""
919 919 items = list(walk(repo, pats, opts))
920 920 if not items:
921 921 return
922 922 fmt = '%%s %%-%ds %%-%ds %%s' % (
923 923 max([len(abs) for (src, abs, rel, exact) in items]),
924 924 max([len(rel) for (src, abs, rel, exact) in items]))
925 925 for src, abs, rel, exact in items:
926 926 line = fmt % (src, abs, rel, exact and 'exact' or '')
927 927 ui.write("%s\n" % line.rstrip())
928 928
929 929 def diff(ui, repo, *pats, **opts):
930 930 """diff working directory (or selected files)"""
931 931 node1, node2 = None, None
932 932 revs = [repo.lookup(x) for x in opts['rev']]
933 933
934 934 if len(revs) > 0:
935 935 node1 = revs[0]
936 936 if len(revs) > 1:
937 937 node2 = revs[1]
938 938 if len(revs) > 2:
939 raise util.Abort("too many revisions to diff")
939 raise util.Abort(_("too many revisions to diff"))
940 940
941 941 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
942 942
943 943 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
944 944 text=opts['text'])
945 945
946 946 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
947 947 node = repo.lookup(changeset)
948 948 prev, other = repo.changelog.parents(node)
949 949 change = repo.changelog.read(node)
950 950
951 951 fp = make_file(repo, repo.changelog, opts['output'],
952 952 node=node, total=total, seqno=seqno,
953 953 revwidth=revwidth)
954 954 if fp != sys.stdout:
955 955 ui.note("%s\n" % fp.name)
956 956
957 957 fp.write("# HG changeset patch\n")
958 958 fp.write("# User %s\n" % change[1])
959 959 fp.write("# Node ID %s\n" % hex(node))
960 960 fp.write("# Parent %s\n" % hex(prev))
961 961 if other != nullid:
962 962 fp.write("# Parent %s\n" % hex(other))
963 963 fp.write(change[4].rstrip())
964 964 fp.write("\n\n")
965 965
966 966 dodiff(fp, ui, repo, prev, node, text=opts['text'])
967 967 if fp != sys.stdout:
968 968 fp.close()
969 969
970 970 def export(ui, repo, *changesets, **opts):
971 971 """dump the header and diffs for one or more changesets"""
972 972 if not changesets:
973 raise util.Abort("export requires at least one changeset")
973 raise util.Abort(_("export requires at least one changeset"))
974 974 seqno = 0
975 975 revs = list(revrange(ui, repo, changesets))
976 976 total = len(revs)
977 977 revwidth = max(map(len, revs))
978 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
978 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
979 979 for cset in revs:
980 980 seqno += 1
981 981 doexport(ui, repo, cset, seqno, total, revwidth, opts)
982 982
983 983 def forget(ui, repo, *pats, **opts):
984 984 """don't add the specified files on the next commit"""
985 985 forget = []
986 986 for src, abs, rel, exact in walk(repo, pats, opts):
987 987 if repo.dirstate.state(abs) == 'a':
988 988 forget.append(abs)
989 989 if ui.verbose or not exact:
990 ui.status('forgetting ', rel, '\n')
990 ui.status(_('forgetting %s\n') % rel)
991 991 repo.forget(forget)
992 992
993 993 def grep(ui, repo, pattern, *pats, **opts):
994 994 """search for a pattern in specified files and revisions"""
995 995 reflags = 0
996 996 if opts['ignore_case']:
997 997 reflags |= re.I
998 998 regexp = re.compile(pattern, reflags)
999 999 sep, eol = ':', '\n'
1000 1000 if opts['print0']:
1001 1001 sep = eol = '\0'
1002 1002
1003 1003 fcache = {}
1004 1004 def getfile(fn):
1005 1005 if fn not in fcache:
1006 1006 fcache[fn] = repo.file(fn)
1007 1007 return fcache[fn]
1008 1008
1009 1009 def matchlines(body):
1010 1010 begin = 0
1011 1011 linenum = 0
1012 1012 while True:
1013 1013 match = regexp.search(body, begin)
1014 1014 if not match:
1015 1015 break
1016 1016 mstart, mend = match.span()
1017 1017 linenum += body.count('\n', begin, mstart) + 1
1018 1018 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1019 1019 lend = body.find('\n', mend)
1020 1020 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1021 1021 begin = lend + 1
1022 1022
1023 1023 class linestate:
1024 1024 def __init__(self, line, linenum, colstart, colend):
1025 1025 self.line = line
1026 1026 self.linenum = linenum
1027 1027 self.colstart = colstart
1028 1028 self.colend = colend
1029 1029 def __eq__(self, other):
1030 1030 return self.line == other.line
1031 1031 def __hash__(self):
1032 1032 return hash(self.line)
1033 1033
1034 1034 matches = {}
1035 1035 def grepbody(fn, rev, body):
1036 1036 matches[rev].setdefault(fn, {})
1037 1037 m = matches[rev][fn]
1038 1038 for lnum, cstart, cend, line in matchlines(body):
1039 1039 s = linestate(line, lnum, cstart, cend)
1040 1040 m[s] = s
1041 1041
1042 1042 prev = {}
1043 1043 ucache = {}
1044 1044 def display(fn, rev, states, prevstates):
1045 1045 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1046 1046 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1047 1047 counts = {'-': 0, '+': 0}
1048 1048 filerevmatches = {}
1049 1049 for l in diff:
1050 1050 if incrementing or not opts['all']:
1051 1051 change = ((l in prevstates) and '-') or '+'
1052 1052 r = rev
1053 1053 else:
1054 1054 change = ((l in states) and '-') or '+'
1055 1055 r = prev[fn]
1056 1056 cols = [fn, str(rev)]
1057 1057 if opts['line_number']: cols.append(str(l.linenum))
1058 1058 if opts['all']: cols.append(change)
1059 1059 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1060 1060 ucache))
1061 1061 if opts['files_with_matches']:
1062 1062 c = (fn, rev)
1063 1063 if c in filerevmatches: continue
1064 1064 filerevmatches[c] = 1
1065 1065 else:
1066 1066 cols.append(l.line)
1067 1067 ui.write(sep.join(cols), eol)
1068 1068 counts[change] += 1
1069 1069 return counts['+'], counts['-']
1070 1070
1071 1071 fstate = {}
1072 1072 skip = {}
1073 1073 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1074 1074 count = 0
1075 1075 incrementing = False
1076 1076 for st, rev, fns in changeiter:
1077 1077 if st == 'window':
1078 1078 incrementing = rev
1079 1079 matches.clear()
1080 1080 elif st == 'add':
1081 1081 change = repo.changelog.read(repo.lookup(str(rev)))
1082 1082 mf = repo.manifest.read(change[0])
1083 1083 matches[rev] = {}
1084 1084 for fn in fns:
1085 1085 if fn in skip: continue
1086 1086 fstate.setdefault(fn, {})
1087 1087 try:
1088 1088 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1089 1089 except KeyError:
1090 1090 pass
1091 1091 elif st == 'iter':
1092 1092 states = matches[rev].items()
1093 1093 states.sort()
1094 1094 for fn, m in states:
1095 1095 if fn in skip: continue
1096 1096 if incrementing or not opts['all'] or fstate[fn]:
1097 1097 pos, neg = display(fn, rev, m, fstate[fn])
1098 1098 count += pos + neg
1099 1099 if pos and not opts['all']:
1100 1100 skip[fn] = True
1101 1101 fstate[fn] = m
1102 1102 prev[fn] = rev
1103 1103
1104 1104 if not incrementing:
1105 1105 fstate = fstate.items()
1106 1106 fstate.sort()
1107 1107 for fn, state in fstate:
1108 1108 if fn in skip: continue
1109 1109 display(fn, rev, {}, state)
1110 1110 return (count == 0 and 1) or 0
1111 1111
1112 1112 def heads(ui, repo, **opts):
1113 1113 """show current repository heads"""
1114 1114 heads = repo.changelog.heads()
1115 1115 br = None
1116 1116 if opts['branches']:
1117 1117 br = repo.branchlookup(heads)
1118 1118 for n in repo.changelog.heads():
1119 1119 show_changeset(ui, repo, changenode=n, brinfo=br)
1120 1120
1121 1121 def identify(ui, repo):
1122 1122 """print information about the working copy"""
1123 1123 parents = [p for p in repo.dirstate.parents() if p != nullid]
1124 1124 if not parents:
1125 ui.write("unknown\n")
1125 ui.write(_("unknown\n"))
1126 1126 return
1127 1127
1128 1128 hexfunc = ui.verbose and hex or short
1129 1129 (c, a, d, u) = repo.changes()
1130 1130 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1131 1131 (c or a or d) and "+" or "")]
1132 1132
1133 1133 if not ui.quiet:
1134 1134 # multiple tags for a single parent separated by '/'
1135 1135 parenttags = ['/'.join(tags)
1136 1136 for tags in map(repo.nodetags, parents) if tags]
1137 1137 # tags for multiple parents separated by ' + '
1138 1138 if parenttags:
1139 1139 output.append(' + '.join(parenttags))
1140 1140
1141 1141 ui.write("%s\n" % ' '.join(output))
1142 1142
1143 1143 def import_(ui, repo, patch1, *patches, **opts):
1144 1144 """import an ordered set of patches"""
1145 1145 patches = (patch1,) + patches
1146 1146
1147 1147 if not opts['force']:
1148 1148 (c, a, d, u) = repo.changes()
1149 1149 if c or a or d:
1150 raise util.Abort("outstanding uncommitted changes")
1150 raise util.Abort(_("outstanding uncommitted changes"))
1151 1151
1152 1152 d = opts["base"]
1153 1153 strip = opts["strip"]
1154 1154
1155 1155 mailre = re.compile(r'(?:From |[\w-]+:)')
1156 1156
1157 1157 # attempt to detect the start of a patch
1158 1158 # (this heuristic is borrowed from quilt)
1159 1159 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1160 1160 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1161 1161 '(---|\*\*\*)[ \t])')
1162 1162
1163 1163 for patch in patches:
1164 ui.status("applying %s\n" % patch)
1164 ui.status(_("applying %s\n") % patch)
1165 1165 pf = os.path.join(d, patch)
1166 1166
1167 1167 message = []
1168 1168 user = None
1169 1169 hgpatch = False
1170 1170 for line in file(pf):
1171 1171 line = line.rstrip()
1172 1172 if (not message and not hgpatch and
1173 1173 mailre.match(line) and not opts['force']):
1174 1174 if len(line) > 35: line = line[:32] + '...'
1175 raise util.Abort('first line looks like a '
1176 'mail header: ' + line)
1175 raise util.Abort(_('first line looks like a '
1176 'mail header: ') + line)
1177 1177 if diffre.match(line):
1178 1178 break
1179 1179 elif hgpatch:
1180 1180 # parse values when importing the result of an hg export
1181 1181 if line.startswith("# User "):
1182 1182 user = line[7:]
1183 ui.debug('User: %s\n' % user)
1183 ui.debug(_('User: %s\n') % user)
1184 1184 elif not line.startswith("# ") and line:
1185 1185 message.append(line)
1186 1186 hgpatch = False
1187 1187 elif line == '# HG changeset patch':
1188 1188 hgpatch = True
1189 1189 message = [] # We may have collected garbage
1190 1190 else:
1191 1191 message.append(line)
1192 1192
1193 1193 # make sure message isn't empty
1194 1194 if not message:
1195 message = "imported patch %s\n" % patch
1195 message = _("imported patch %s\n") % patch
1196 1196 else:
1197 1197 message = "%s\n" % '\n'.join(message)
1198 ui.debug('message:\n%s\n' % message)
1198 ui.debug(_('message:\n%s\n') % message)
1199 1199
1200 1200 files = util.patch(strip, pf, ui)
1201 1201
1202 1202 if len(files) > 0:
1203 1203 addremove(ui, repo, *files)
1204 1204 repo.commit(files, message, user)
1205 1205
1206 1206 def incoming(ui, repo, source="default", **opts):
1207 1207 """show new changesets found in source"""
1208 1208 source = ui.expandpath(source)
1209 1209 other = hg.repository(ui, source)
1210 1210 if not other.local():
1211 raise util.Abort("incoming doesn't work for remote repositories yet")
1211 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1212 1212 o = repo.findincoming(other)
1213 1213 if not o:
1214 1214 return
1215 1215 o = other.newer(o)
1216 1216 for n in o:
1217 1217 show_changeset(ui, other, changenode=n)
1218 1218 if opts['patch']:
1219 1219 prev = other.changelog.parents(n)[0]
1220 1220 dodiff(ui, ui, other, prev, n)
1221 1221 ui.write("\n")
1222 1222
1223 1223 def init(ui, dest="."):
1224 1224 """create a new repository in the given directory"""
1225 1225 if not os.path.exists(dest):
1226 1226 os.mkdir(dest)
1227 1227 hg.repository(ui, dest, create=1)
1228 1228
1229 1229 def locate(ui, repo, *pats, **opts):
1230 1230 """locate files matching specific patterns"""
1231 1231 end = opts['print0'] and '\0' or '\n'
1232 1232
1233 1233 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1234 1234 if repo.dirstate.state(abs) == '?':
1235 1235 continue
1236 1236 if opts['fullpath']:
1237 1237 ui.write(os.path.join(repo.root, abs), end)
1238 1238 else:
1239 1239 ui.write(rel, end)
1240 1240
1241 1241 def log(ui, repo, *pats, **opts):
1242 1242 """show revision history of entire repository or files"""
1243 1243 class dui:
1244 1244 # Implement and delegate some ui protocol. Save hunks of
1245 1245 # output for later display in the desired order.
1246 1246 def __init__(self, ui):
1247 1247 self.ui = ui
1248 1248 self.hunk = {}
1249 1249 def bump(self, rev):
1250 1250 self.rev = rev
1251 1251 self.hunk[rev] = []
1252 1252 def note(self, *args):
1253 1253 if self.verbose:
1254 1254 self.write(*args)
1255 1255 def status(self, *args):
1256 1256 if not self.quiet:
1257 1257 self.write(*args)
1258 1258 def write(self, *args):
1259 1259 self.hunk[self.rev].append(args)
1260 1260 def debug(self, *args):
1261 1261 if self.debugflag:
1262 1262 self.write(*args)
1263 1263 def __getattr__(self, key):
1264 1264 return getattr(self.ui, key)
1265 1265 cwd = repo.getcwd()
1266 1266 if not pats and cwd:
1267 1267 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1268 1268 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1269 1269 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1270 1270 pats, opts)
1271 1271 for st, rev, fns in changeiter:
1272 1272 if st == 'window':
1273 1273 du = dui(ui)
1274 1274 elif st == 'add':
1275 1275 du.bump(rev)
1276 1276 br = None
1277 1277 if opts['branch']:
1278 1278 br = repo.branchlookup([repo.changelog.node(rev)])
1279 1279
1280 1280 if opts['keyword']:
1281 1281 changes = repo.changelog.read(repo.changelog.node(rev))
1282 1282 miss = 0
1283 1283 for k in opts['keyword']:
1284 1284 if not (k in changes[1].lower() or
1285 1285 k in changes[4].lower() or
1286 1286 k in " ".join(changes[3][:20]).lower()):
1287 1287 miss = 1
1288 1288 break
1289 1289 if miss:
1290 1290 continue
1291 1291
1292 1292 show_changeset(du, repo, rev, brinfo=br)
1293 1293 if opts['patch']:
1294 1294 changenode = repo.changelog.node(rev)
1295 1295 prev, other = repo.changelog.parents(changenode)
1296 1296 dodiff(du, du, repo, prev, changenode, fns)
1297 1297 du.write("\n\n")
1298 1298 elif st == 'iter':
1299 1299 for args in du.hunk[rev]:
1300 1300 ui.write(*args)
1301 1301
1302 1302 def manifest(ui, repo, rev=None):
1303 1303 """output the latest or given revision of the project manifest"""
1304 1304 if rev:
1305 1305 try:
1306 1306 # assume all revision numbers are for changesets
1307 1307 n = repo.lookup(rev)
1308 1308 change = repo.changelog.read(n)
1309 1309 n = change[0]
1310 1310 except hg.RepoError:
1311 1311 n = repo.manifest.lookup(rev)
1312 1312 else:
1313 1313 n = repo.manifest.tip()
1314 1314 m = repo.manifest.read(n)
1315 1315 mf = repo.manifest.readflags(n)
1316 1316 files = m.keys()
1317 1317 files.sort()
1318 1318
1319 1319 for f in files:
1320 1320 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1321 1321
1322 1322 def outgoing(ui, repo, dest="default-push", **opts):
1323 1323 """show changesets not found in destination"""
1324 1324 dest = ui.expandpath(dest)
1325 1325 other = hg.repository(ui, dest)
1326 1326 o = repo.findoutgoing(other)
1327 1327 o = repo.newer(o)
1328 1328 for n in o:
1329 1329 show_changeset(ui, repo, changenode=n)
1330 1330 if opts['patch']:
1331 1331 prev = repo.changelog.parents(n)[0]
1332 1332 dodiff(ui, ui, repo, prev, n)
1333 1333 ui.write("\n")
1334 1334
1335 1335 def parents(ui, repo, rev=None):
1336 1336 """show the parents of the working dir or revision"""
1337 1337 if rev:
1338 1338 p = repo.changelog.parents(repo.lookup(rev))
1339 1339 else:
1340 1340 p = repo.dirstate.parents()
1341 1341
1342 1342 for n in p:
1343 1343 if n != nullid:
1344 1344 show_changeset(ui, repo, changenode=n)
1345 1345
1346 1346 def paths(ui, search=None):
1347 1347 """show definition of symbolic path names"""
1348 1348 try:
1349 1349 repo = hg.repository(ui=ui)
1350 1350 except hg.RepoError:
1351 1351 pass
1352 1352
1353 1353 if search:
1354 1354 for name, path in ui.configitems("paths"):
1355 1355 if name == search:
1356 1356 ui.write("%s\n" % path)
1357 1357 return
1358 ui.warn("not found!\n")
1358 ui.warn(_("not found!\n"))
1359 1359 return 1
1360 1360 else:
1361 1361 for name, path in ui.configitems("paths"):
1362 1362 ui.write("%s = %s\n" % (name, path))
1363 1363
1364 1364 def pull(ui, repo, source="default", **opts):
1365 1365 """pull changes from the specified source"""
1366 1366 source = ui.expandpath(source)
1367 ui.status('pulling from %s\n' % (source))
1367 ui.status(_('pulling from %s\n') % (source))
1368 1368
1369 1369 if opts['ssh']:
1370 1370 ui.setconfig("ui", "ssh", opts['ssh'])
1371 1371 if opts['remotecmd']:
1372 1372 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1373 1373
1374 1374 other = hg.repository(ui, source)
1375 1375 r = repo.pull(other)
1376 1376 if not r:
1377 1377 if opts['update']:
1378 1378 return update(ui, repo)
1379 1379 else:
1380 ui.status("(run 'hg update' to get a working copy)\n")
1380 ui.status(_("(run 'hg update' to get a working copy)\n"))
1381 1381
1382 1382 return r
1383 1383
1384 1384 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1385 1385 """push changes to the specified destination"""
1386 1386 dest = ui.expandpath(dest)
1387 1387 ui.status('pushing to %s\n' % (dest))
1388 1388
1389 1389 if ssh:
1390 1390 ui.setconfig("ui", "ssh", ssh)
1391 1391 if remotecmd:
1392 1392 ui.setconfig("ui", "remotecmd", remotecmd)
1393 1393
1394 1394 other = hg.repository(ui, dest)
1395 1395 r = repo.push(other, force)
1396 1396 return r
1397 1397
1398 1398 def rawcommit(ui, repo, *flist, **rc):
1399 1399 "raw commit interface"
1400 1400 if rc['text']:
1401 ui.warn("Warning: -t and --text is deprecated,"
1402 " please use -m or --message instead.\n")
1401 ui.warn(_("Warning: -t and --text is deprecated,"
1402 " please use -m or --message instead.\n"))
1403 1403 message = rc['message'] or rc['text']
1404 1404 if not message and rc['logfile']:
1405 1405 try:
1406 1406 message = open(rc['logfile']).read()
1407 1407 except IOError:
1408 1408 pass
1409 1409 if not message and not rc['logfile']:
1410 raise util.Abort("missing commit message")
1410 raise util.Abort(_("missing commit message"))
1411 1411
1412 1412 files = relpath(repo, list(flist))
1413 1413 if rc['files']:
1414 1414 files += open(rc['files']).read().splitlines()
1415 1415
1416 1416 rc['parent'] = map(repo.lookup, rc['parent'])
1417 1417
1418 1418 try:
1419 1419 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1420 1420 except ValueError, inst:
1421 1421 raise util.Abort(str(inst))
1422 1422
1423 1423 def recover(ui, repo):
1424 1424 """roll back an interrupted transaction"""
1425 1425 repo.recover()
1426 1426
1427 1427 def remove(ui, repo, pat, *pats, **opts):
1428 1428 """remove the specified files on the next commit"""
1429 1429 names = []
1430 1430 def okaytoremove(abs, rel, exact):
1431 1431 c, a, d, u = repo.changes(files = [abs])
1432 1432 reason = None
1433 if c: reason = 'is modified'
1434 elif a: reason = 'has been marked for add'
1435 elif u: reason = 'is not managed'
1433 if c: reason = _('is modified')
1434 elif a: reason = _('has been marked for add')
1435 elif u: reason = _('is not managed')
1436 1436 if reason:
1437 if exact: ui.warn('not removing %s: file %s\n' % (rel, reason))
1437 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1438 1438 else:
1439 1439 return True
1440 1440 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1441 1441 if okaytoremove(abs, rel, exact):
1442 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1442 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1443 1443 names.append(abs)
1444 1444 for name in names:
1445 1445 try:
1446 1446 os.unlink(name)
1447 1447 except OSError, inst:
1448 1448 if inst.errno != errno.ENOENT: raise
1449 1449 repo.remove(names)
1450 1450
1451 1451 def rename(ui, repo, *pats, **opts):
1452 1452 """rename files; equivalent of copy + remove"""
1453 1453 errs, copied = docopy(ui, repo, pats, opts)
1454 1454 names = []
1455 1455 for abs, rel, exact in copied:
1456 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1456 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1457 1457 try:
1458 1458 os.unlink(rel)
1459 1459 except OSError, inst:
1460 1460 if inst.errno != errno.ENOENT: raise
1461 1461 names.append(abs)
1462 1462 repo.remove(names)
1463 1463 return errs
1464 1464
1465 1465 def revert(ui, repo, *names, **opts):
1466 1466 """revert modified files or dirs back to their unmodified states"""
1467 1467 node = opts['rev'] and repo.lookup(opts['rev']) or \
1468 1468 repo.dirstate.parents()[0]
1469 1469 root = os.path.realpath(repo.root)
1470 1470
1471 1471 def trimpath(p):
1472 1472 p = os.path.realpath(p)
1473 1473 if p.startswith(root):
1474 1474 rest = p[len(root):]
1475 1475 if not rest:
1476 1476 return rest
1477 1477 if p.startswith(os.sep):
1478 1478 return rest[1:]
1479 1479 return p
1480 1480
1481 1481 relnames = map(trimpath, names or [os.getcwd()])
1482 1482 chosen = {}
1483 1483
1484 1484 def choose(name):
1485 1485 def body(name):
1486 1486 for r in relnames:
1487 1487 if not name.startswith(r):
1488 1488 continue
1489 1489 rest = name[len(r):]
1490 1490 if not rest:
1491 1491 return r, True
1492 1492 depth = rest.count(os.sep)
1493 1493 if not r:
1494 1494 if depth == 0 or not opts['nonrecursive']:
1495 1495 return r, True
1496 1496 elif rest[0] == os.sep:
1497 1497 if depth == 1 or not opts['nonrecursive']:
1498 1498 return r, True
1499 1499 return None, False
1500 1500 relname, ret = body(name)
1501 1501 if ret:
1502 1502 chosen[relname] = 1
1503 1503 return ret
1504 1504
1505 1505 r = repo.update(node, False, True, choose, False)
1506 1506 for n in relnames:
1507 1507 if n not in chosen:
1508 ui.warn('error: no matches for %s\n' % n)
1508 ui.warn(_('error: no matches for %s\n') % n)
1509 1509 r = 1
1510 1510 sys.stdout.flush()
1511 1511 return r
1512 1512
1513 1513 def root(ui, repo):
1514 1514 """print the root (top) of the current working dir"""
1515 1515 ui.write(repo.root + "\n")
1516 1516
1517 1517 def serve(ui, repo, **opts):
1518 1518 """export the repository via HTTP"""
1519 1519
1520 1520 if opts["stdio"]:
1521 1521 fin, fout = sys.stdin, sys.stdout
1522 1522 sys.stdout = sys.stderr
1523 1523
1524 1524 def getarg():
1525 1525 argline = fin.readline()[:-1]
1526 1526 arg, l = argline.split()
1527 1527 val = fin.read(int(l))
1528 1528 return arg, val
1529 1529 def respond(v):
1530 1530 fout.write("%d\n" % len(v))
1531 1531 fout.write(v)
1532 1532 fout.flush()
1533 1533
1534 1534 lock = None
1535 1535
1536 1536 while 1:
1537 1537 cmd = fin.readline()[:-1]
1538 1538 if cmd == '':
1539 1539 return
1540 1540 if cmd == "heads":
1541 1541 h = repo.heads()
1542 1542 respond(" ".join(map(hex, h)) + "\n")
1543 1543 if cmd == "lock":
1544 1544 lock = repo.lock()
1545 1545 respond("")
1546 1546 if cmd == "unlock":
1547 1547 if lock:
1548 1548 lock.release()
1549 1549 lock = None
1550 1550 respond("")
1551 1551 elif cmd == "branches":
1552 1552 arg, nodes = getarg()
1553 1553 nodes = map(bin, nodes.split(" "))
1554 1554 r = []
1555 1555 for b in repo.branches(nodes):
1556 1556 r.append(" ".join(map(hex, b)) + "\n")
1557 1557 respond("".join(r))
1558 1558 elif cmd == "between":
1559 1559 arg, pairs = getarg()
1560 1560 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1561 1561 r = []
1562 1562 for b in repo.between(pairs):
1563 1563 r.append(" ".join(map(hex, b)) + "\n")
1564 1564 respond("".join(r))
1565 1565 elif cmd == "changegroup":
1566 1566 nodes = []
1567 1567 arg, roots = getarg()
1568 1568 nodes = map(bin, roots.split(" "))
1569 1569
1570 1570 cg = repo.changegroup(nodes)
1571 1571 while 1:
1572 1572 d = cg.read(4096)
1573 1573 if not d:
1574 1574 break
1575 1575 fout.write(d)
1576 1576
1577 1577 fout.flush()
1578 1578
1579 1579 elif cmd == "addchangegroup":
1580 1580 if not lock:
1581 1581 respond("not locked")
1582 1582 continue
1583 1583 respond("")
1584 1584
1585 1585 r = repo.addchangegroup(fin)
1586 1586 respond("")
1587 1587
1588 1588 optlist = "name templates style address port ipv6 accesslog errorlog"
1589 1589 for o in optlist.split():
1590 1590 if opts[o]:
1591 1591 ui.setconfig("web", o, opts[o])
1592 1592
1593 1593 try:
1594 1594 httpd = hgweb.create_server(repo)
1595 1595 except socket.error, inst:
1596 1596 raise util.Abort('cannot start server: ' + inst.args[1])
1597 1597
1598 1598 if ui.verbose:
1599 1599 addr, port = httpd.socket.getsockname()
1600 1600 if addr == '0.0.0.0':
1601 1601 addr = socket.gethostname()
1602 1602 else:
1603 1603 try:
1604 1604 addr = socket.gethostbyaddr(addr)[0]
1605 1605 except socket.error:
1606 1606 pass
1607 1607 if port != 80:
1608 ui.status('listening at http://%s:%d/\n' % (addr, port))
1608 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1609 1609 else:
1610 ui.status('listening at http://%s/\n' % addr)
1610 ui.status(_('listening at http://%s/\n') % addr)
1611 1611 httpd.serve_forever()
1612 1612
1613 1613 def status(ui, repo, *pats, **opts):
1614 1614 '''show changed files in the working directory
1615 1615
1616 1616 M = modified
1617 1617 A = added
1618 1618 R = removed
1619 1619 ? = not tracked
1620 1620 '''
1621 1621
1622 1622 cwd = repo.getcwd()
1623 1623 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1624 1624 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1625 1625 for n in repo.changes(files=files, match=matchfn)]
1626 1626
1627 changetypes = [('modified', 'M', c),
1628 ('added', 'A', a),
1629 ('removed', 'R', d),
1630 ('unknown', '?', u)]
1627 changetypes = [(_('modified'), 'M', c),
1628 (_('added'), 'A', a),
1629 (_('removed'), 'R', d),
1630 (_('unknown'), '?', u)]
1631 1631
1632 1632 end = opts['print0'] and '\0' or '\n'
1633 1633
1634 1634 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1635 1635 or changetypes):
1636 1636 if opts['no_status']:
1637 1637 format = "%%s%s" % end
1638 1638 else:
1639 1639 format = "%s %%s%s" % (char, end);
1640 1640
1641 1641 for f in changes:
1642 1642 ui.write(format % f)
1643 1643
1644 1644 def tag(ui, repo, name, rev=None, **opts):
1645 1645 """add a tag for the current tip or a given revision"""
1646 1646 if opts['text']:
1647 ui.warn("Warning: -t and --text is deprecated,"
1648 " please use -m or --message instead.\n")
1647 ui.warn(_("Warning: -t and --text is deprecated,"
1648 " please use -m or --message instead.\n"))
1649 1649 if name == "tip":
1650 raise util.Abort("the name 'tip' is reserved")
1650 raise util.Abort(_("the name 'tip' is reserved"))
1651 1651 if rev:
1652 1652 r = hex(repo.lookup(rev))
1653 1653 else:
1654 1654 r = hex(repo.changelog.tip())
1655 1655
1656 1656 if name.find(revrangesep) >= 0:
1657 raise util.Abort("'%s' cannot be used in a tag name" % revrangesep)
1657 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
1658 1658
1659 1659 if opts['local']:
1660 1660 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1661 1661 return
1662 1662
1663 1663 (c, a, d, u) = repo.changes()
1664 1664 for x in (c, a, d, u):
1665 1665 if ".hgtags" in x:
1666 raise util.Abort("working copy of .hgtags is changed "
1667 "(please commit .hgtags manually)")
1666 raise util.Abort(_("working copy of .hgtags is changed "
1667 "(please commit .hgtags manually)"))
1668 1668
1669 1669 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1670 1670 if repo.dirstate.state(".hgtags") == '?':
1671 1671 repo.add([".hgtags"])
1672 1672
1673 1673 message = (opts['message'] or opts['text'] or
1674 "Added tag %s for changeset %s" % (name, r))
1674 _("Added tag %s for changeset %s") % (name, r))
1675 1675 try:
1676 1676 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1677 1677 except ValueError, inst:
1678 1678 raise util.Abort(str(inst))
1679 1679
1680 1680 def tags(ui, repo):
1681 1681 """list repository tags"""
1682 1682
1683 1683 l = repo.tagslist()
1684 1684 l.reverse()
1685 1685 for t, n in l:
1686 1686 try:
1687 1687 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1688 1688 except KeyError:
1689 1689 r = " ?:?"
1690 1690 ui.write("%-30s %s\n" % (t, r))
1691 1691
1692 1692 def tip(ui, repo):
1693 1693 """show the tip revision"""
1694 1694 n = repo.changelog.tip()
1695 1695 show_changeset(ui, repo, changenode=n)
1696 1696
1697 1697 def unbundle(ui, repo, fname):
1698 1698 """apply a changegroup file"""
1699 1699 f = urllib.urlopen(fname)
1700 1700
1701 1701 if f.read(4) != "HG10":
1702 raise util.Abort("%s: not a Mercurial bundle file" % fname)
1702 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
1703 1703
1704 1704 def bzgenerator(f):
1705 1705 zd = bz2.BZ2Decompressor()
1706 1706 for chunk in f:
1707 1707 yield zd.decompress(chunk)
1708 1708 yield zd.flush()
1709 1709
1710 1710 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1711 1711 repo.addchangegroup(util.chunkbuffer(bzgen))
1712 1712
1713 1713 def undo(ui, repo):
1714 1714 """undo the last commit or pull
1715 1715
1716 1716 Roll back the last pull or commit transaction on the
1717 1717 repository, restoring the project to its earlier state.
1718 1718
1719 1719 This command should be used with care. There is only one level of
1720 1720 undo and there is no redo.
1721 1721
1722 1722 This command is not intended for use on public repositories. Once
1723 1723 a change is visible for pull by other users, undoing it locally is
1724 1724 ineffective.
1725 1725 """
1726 1726 repo.undo()
1727 1727
1728 1728 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1729 1729 '''update or merge working directory
1730 1730
1731 1731 If there are no outstanding changes in the working directory and
1732 1732 there is a linear relationship between the current version and the
1733 1733 requested version, the result is the requested version.
1734 1734
1735 1735 Otherwise the result is a merge between the contents of the
1736 1736 current working directory and the requested version. Files that
1737 1737 changed between either parent are marked as changed for the next
1738 1738 commit and a commit must be performed before any further updates
1739 1739 are allowed.
1740 1740 '''
1741 1741 if branch:
1742 1742 br = repo.branchlookup(branch=branch)
1743 1743 found = []
1744 1744 for x in br:
1745 1745 if branch in br[x]:
1746 1746 found.append(x)
1747 1747 if len(found) > 1:
1748 ui.warn("Found multiple heads for %s\n" % branch)
1748 ui.warn(_("Found multiple heads for %s\n") % branch)
1749 1749 for x in found:
1750 1750 show_changeset(ui, repo, changenode=x, brinfo=br)
1751 1751 return 1
1752 1752 if len(found) == 1:
1753 1753 node = found[0]
1754 ui.warn("Using head %s for branch %s\n" % (short(node), branch))
1754 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
1755 1755 else:
1756 ui.warn("branch %s not found\n" % (branch))
1756 ui.warn(_("branch %s not found\n") % (branch))
1757 1757 return 1
1758 1758 else:
1759 1759 node = node and repo.lookup(node) or repo.changelog.tip()
1760 1760 return repo.update(node, allow=merge, force=clean)
1761 1761
1762 1762 def verify(ui, repo):
1763 1763 """verify the integrity of the repository"""
1764 1764 return repo.verify()
1765 1765
1766 1766 # Command options and aliases are listed here, alphabetically
1767 1767
1768 1768 table = {
1769 1769 "^add":
1770 1770 (add,
1771 [('I', 'include', [], 'include path in search'),
1772 ('X', 'exclude', [], 'exclude path from search')],
1771 [('I', 'include', [], _('include path in search')),
1772 ('X', 'exclude', [], _('exclude path from search'))],
1773 1773 "hg add [OPTION]... [FILE]..."),
1774 1774 "addremove":
1775 1775 (addremove,
1776 [('I', 'include', [], 'include path in search'),
1777 ('X', 'exclude', [], 'exclude path from search')],
1778 "hg addremove [OPTION]... [FILE]..."),
1776 [('I', 'include', [], _('include path in search')),
1777 ('X', 'exclude', [], _('exclude path from search'))],
1778 _("hg addremove [OPTION]... [FILE]...")),
1779 1779 "^annotate":
1780 1780 (annotate,
1781 [('r', 'rev', '', 'revision'),
1782 ('a', 'text', None, 'treat all files as text'),
1783 ('u', 'user', None, 'show user'),
1784 ('n', 'number', None, 'show revision number'),
1785 ('c', 'changeset', None, 'show changeset'),
1786 ('I', 'include', [], 'include path in search'),
1787 ('X', 'exclude', [], 'exclude path from search')],
1788 'hg annotate [OPTION]... FILE...'),
1781 [('r', 'rev', '', _('revision')),
1782 ('a', 'text', None, _('treat all files as text')),
1783 ('u', 'user', None, _('show user')),
1784 ('n', 'number', None, _('show revision number')),
1785 ('c', 'changeset', None, _('show changeset')),
1786 ('I', 'include', [], _('include path in search')),
1787 ('X', 'exclude', [], _('exclude path from search'))],
1788 _('hg annotate [OPTION]... FILE...')),
1789 1789 "bundle":
1790 1790 (bundle,
1791 1791 [],
1792 'hg bundle FILE DEST'),
1792 _('hg bundle FILE DEST')),
1793 1793 "cat":
1794 1794 (cat,
1795 [('I', 'include', [], 'include path in search'),
1796 ('X', 'exclude', [], 'exclude path from search'),
1797 ('o', 'output', "", 'output to file'),
1798 ('r', 'rev', '', 'revision')],
1799 'hg cat [OPTION]... FILE...'),
1795 [('I', 'include', [], _('include path in search')),
1796 ('X', 'exclude', [], _('exclude path from search')),
1797 ('o', 'output', "", _('output to file')),
1798 ('r', 'rev', '', _('revision'))],
1799 _('hg cat [OPTION]... FILE...')),
1800 1800 "^clone":
1801 1801 (clone,
1802 [('U', 'noupdate', None, 'skip update after cloning'),
1803 ('e', 'ssh', "", 'ssh command'),
1804 ('', 'pull', None, 'use pull protocol to copy metadata'),
1805 ('', 'remotecmd', "", 'remote hg command')],
1806 'hg clone [OPTION]... SOURCE [DEST]'),
1802 [('U', 'noupdate', None, _('skip update after cloning')),
1803 ('e', 'ssh', "", _('ssh command')),
1804 ('', 'pull', None, _('use pull protocol to copy metadata')),
1805 ('', 'remotecmd', "", _('remote hg command'))],
1806 _('hg clone [OPTION]... SOURCE [DEST]')),
1807 1807 "^commit|ci":
1808 1808 (commit,
1809 [('A', 'addremove', None, 'run add/remove during commit'),
1810 ('I', 'include', [], 'include path in search'),
1811 ('X', 'exclude', [], 'exclude path from search'),
1812 ('m', 'message', "", 'commit message'),
1813 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1814 ('l', 'logfile', "", 'commit message file'),
1815 ('d', 'date', "", 'date code'),
1816 ('u', 'user', "", 'user')],
1817 'hg commit [OPTION]... [FILE]...'),
1809 [('A', 'addremove', None, _('run add/remove during commit')),
1810 ('I', 'include', [], _('include path in search')),
1811 ('X', 'exclude', [], _('exclude path from search')),
1812 ('m', 'message', "", _('commit message')),
1813 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1814 ('l', 'logfile', "", _('commit message file')),
1815 ('d', 'date', "", _('date code')),
1816 ('u', 'user', "", _('user'))],
1817 _('hg commit [OPTION]... [FILE]...')),
1818 1818 "copy|cp": (copy,
1819 [('I', 'include', [], 'include path in search'),
1820 ('X', 'exclude', [], 'exclude path from search'),
1821 ('A', 'after', None, 'record a copy after it has happened'),
1822 ('f', 'force', None, 'replace destination if it exists'),
1823 ('p', 'parents', None, 'append source path to dest')],
1824 'hg copy [OPTION]... [SOURCE]... DEST'),
1825 "debugancestor": (debugancestor, [], 'debugancestor INDEX REV1 REV2'),
1826 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1827 "debugconfig": (debugconfig, [], 'debugconfig'),
1828 "debugsetparents": (debugsetparents, [], 'debugsetparents REV1 [REV2]'),
1829 "debugstate": (debugstate, [], 'debugstate'),
1830 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1831 "debugindex": (debugindex, [], 'debugindex FILE'),
1832 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1833 "debugrename": (debugrename, [], 'debugrename FILE [REV]'),
1819 [('I', 'include', [], _('include path in search')),
1820 ('X', 'exclude', [], _('exclude path from search')),
1821 ('A', 'after', None, _('record a copy after it has happened')),
1822 ('f', 'force', None, _('replace destination if it exists')),
1823 ('p', 'parents', None, _('append source path to dest'))],
1824 _('hg copy [OPTION]... [SOURCE]... DEST')),
1825 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
1826 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
1827 "debugconfig": (debugconfig, [], _('debugconfig')),
1828 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
1829 "debugstate": (debugstate, [], _('debugstate')),
1830 "debugdata": (debugdata, [], _('debugdata FILE REV')),
1831 "debugindex": (debugindex, [], _('debugindex FILE')),
1832 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
1833 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
1834 1834 "debugwalk":
1835 1835 (debugwalk,
1836 [('I', 'include', [], 'include path in search'),
1837 ('X', 'exclude', [], 'exclude path from search')],
1838 'debugwalk [OPTION]... [FILE]...'),
1836 [('I', 'include', [], _('include path in search')),
1837 ('X', 'exclude', [], _('exclude path from search'))],
1838 _('debugwalk [OPTION]... [FILE]...')),
1839 1839 "^diff":
1840 1840 (diff,
1841 [('r', 'rev', [], 'revision'),
1842 ('a', 'text', None, 'treat all files as text'),
1843 ('I', 'include', [], 'include path in search'),
1844 ('X', 'exclude', [], 'exclude path from search')],
1845 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1841 [('r', 'rev', [], _('revision')),
1842 ('a', 'text', None, _('treat all files as text')),
1843 ('I', 'include', [], _('include path in search')),
1844 ('X', 'exclude', [], _('exclude path from search'))],
1845 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
1846 1846 "^export":
1847 1847 (export,
1848 [('o', 'output', "", 'output to file'),
1849 ('a', 'text', None, 'treat all files as text')],
1850 "hg export [-a] [-o OUTFILE] REV..."),
1848 [('o', 'output', "", _('output to file')),
1849 ('a', 'text', None, _('treat all files as text'))],
1850 _("hg export [-a] [-o OUTFILE] REV...")),
1851 1851 "forget":
1852 1852 (forget,
1853 [('I', 'include', [], 'include path in search'),
1854 ('X', 'exclude', [], 'exclude path from search')],
1855 "hg forget [OPTION]... FILE..."),
1853 [('I', 'include', [], _('include path in search')),
1854 ('X', 'exclude', [], _('exclude path from search'))],
1855 _("hg forget [OPTION]... FILE...")),
1856 1856 "grep":
1857 1857 (grep,
1858 [('0', 'print0', None, 'end fields with NUL'),
1859 ('I', 'include', [], 'include path in search'),
1860 ('X', 'exclude', [], 'include path in search'),
1861 ('', 'all', None, 'print all revisions with matches'),
1862 ('i', 'ignore-case', None, 'ignore case when matching'),
1863 ('l', 'files-with-matches', None, 'print names of files and revs with matches'),
1864 ('n', 'line-number', None, 'print line numbers'),
1865 ('r', 'rev', [], 'search in revision rev'),
1866 ('u', 'user', None, 'print user who made change')],
1867 "hg grep [OPTION]... PATTERN [FILE]..."),
1858 [('0', 'print0', None, _('end fields with NUL')),
1859 ('I', 'include', [], _('include path in search')),
1860 ('X', 'exclude', [], _('include path in search')),
1861 ('', 'all', None, _('print all revisions with matches')),
1862 ('i', 'ignore-case', None, _('ignore case when matching')),
1863 ('l', 'files-with-matches', None, _('print names of files and revs with matches')),
1864 ('n', 'line-number', None, _('print line numbers')),
1865 ('r', 'rev', [], _('search in revision rev')),
1866 ('u', 'user', None, _('print user who made change'))],
1867 _("hg grep [OPTION]... PATTERN [FILE]...")),
1868 1868 "heads":
1869 1869 (heads,
1870 [('b', 'branches', None, 'find branch info')],
1871 'hg heads [-b]'),
1872 "help": (help_, [], 'hg help [COMMAND]'),
1873 "identify|id": (identify, [], 'hg identify'),
1870 [('b', 'branches', None, _('find branch info'))],
1871 _('hg heads [-b]')),
1872 "help": (help_, [], _('hg help [COMMAND]')),
1873 "identify|id": (identify, [], _('hg identify')),
1874 1874 "import|patch":
1875 1875 (import_,
1876 [('p', 'strip', 1, 'path strip'),
1877 ('f', 'force', None, 'skip check for outstanding changes'),
1878 ('b', 'base', "", 'base path')],
1879 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1876 [('p', 'strip', 1, _('path strip')),
1877 ('f', 'force', None, _('skip check for outstanding changes')),
1878 ('b', 'base', "", _('base path'))],
1879 _("hg import [-f] [-p NUM] [-b BASE] PATCH...")),
1880 1880 "incoming|in": (incoming,
1881 [('p', 'patch', None, 'show patch')],
1882 'hg incoming [-p] [SOURCE]'),
1883 "^init": (init, [], 'hg init [DEST]'),
1881 [('p', 'patch', None, _('show patch'))],
1882 _('hg incoming [-p] [SOURCE]')),
1883 "^init": (init, [], _('hg init [DEST]')),
1884 1884 "locate":
1885 1885 (locate,
1886 [('r', 'rev', '', 'revision'),
1887 ('0', 'print0', None, 'end filenames with NUL'),
1888 ('f', 'fullpath', None, 'print complete paths'),
1889 ('I', 'include', [], 'include path in search'),
1890 ('X', 'exclude', [], 'exclude path from search')],
1891 'hg locate [OPTION]... [PATTERN]...'),
1886 [('r', 'rev', '', _('revision')),
1887 ('0', 'print0', None, _('end filenames with NUL')),
1888 ('f', 'fullpath', None, _('print complete paths')),
1889 ('I', 'include', [], _('include path in search')),
1890 ('X', 'exclude', [], _('exclude path from search'))],
1891 _('hg locate [OPTION]... [PATTERN]...')),
1892 1892 "^log|history":
1893 1893 (log,
1894 [('I', 'include', [], 'include path in search'),
1895 ('X', 'exclude', [], 'exclude path from search'),
1896 ('b', 'branch', None, 'show branches'),
1897 ('k', 'keyword', [], 'search for a keyword'),
1898 ('r', 'rev', [], 'revision'),
1899 ('p', 'patch', None, 'show patch')],
1900 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1901 "manifest": (manifest, [], 'hg manifest [REV]'),
1894 [('I', 'include', [], _('include path in search')),
1895 ('X', 'exclude', [], _('exclude path from search')),
1896 ('b', 'branch', None, _('show branches')),
1897 ('k', 'keyword', [], _('search for a keyword')),
1898 ('r', 'rev', [], _('revision')),
1899 ('p', 'patch', None, _('show patch'))],
1900 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
1901 "manifest": (manifest, [], _('hg manifest [REV]')),
1902 1902 "outgoing|out": (outgoing,
1903 [('p', 'patch', None, 'show patch')],
1904 'hg outgoing [-p] [DEST]'),
1905 "parents": (parents, [], 'hg parents [REV]'),
1906 "paths": (paths, [], 'hg paths [NAME]'),
1903 [('p', 'patch', None, _('show patch'))],
1904 _('hg outgoing [-p] [DEST]')),
1905 "parents": (parents, [], _('hg parents [REV]')),
1906 "paths": (paths, [], _('hg paths [NAME]')),
1907 1907 "^pull":
1908 1908 (pull,
1909 [('u', 'update', None, 'update working directory'),
1910 ('e', 'ssh', "", 'ssh command'),
1911 ('', 'remotecmd', "", 'remote hg command')],
1912 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1909 [('u', 'update', None, _('update working directory')),
1910 ('e', 'ssh', "", _('ssh command')),
1911 ('', 'remotecmd', "", _('remote hg command'))],
1912 _('hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]')),
1913 1913 "^push":
1914 1914 (push,
1915 [('f', 'force', None, 'force push'),
1916 ('e', 'ssh', "", 'ssh command'),
1917 ('', 'remotecmd', "", 'remote hg command')],
1918 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1915 [('f', 'force', None, _('force push')),
1916 ('e', 'ssh', "", _('ssh command')),
1917 ('', 'remotecmd', "", _('remote hg command'))],
1918 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
1919 1919 "rawcommit":
1920 1920 (rawcommit,
1921 [('p', 'parent', [], 'parent'),
1922 ('d', 'date', "", 'date code'),
1923 ('u', 'user', "", 'user'),
1924 ('F', 'files', "", 'file list'),
1925 ('m', 'message', "", 'commit message'),
1926 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1927 ('l', 'logfile', "", 'commit message file')],
1928 'hg rawcommit [OPTION]... [FILE]...'),
1929 "recover": (recover, [], "hg recover"),
1921 [('p', 'parent', [], _('parent')),
1922 ('d', 'date', "", _('date code')),
1923 ('u', 'user', "", _('user')),
1924 ('F', 'files', "", _('file list')),
1925 ('m', 'message', "", _('commit message')),
1926 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1927 ('l', 'logfile', "", _('commit message file'))],
1928 _('hg rawcommit [OPTION]... [FILE]...')),
1929 "recover": (recover, [], _("hg recover")),
1930 1930 "^remove|rm": (remove,
1931 [('I', 'include', [], 'include path in search'),
1932 ('X', 'exclude', [], 'exclude path from search')],
1933 "hg remove [OPTION]... FILE..."),
1931 [('I', 'include', [], _('include path in search')),
1932 ('X', 'exclude', [], _('exclude path from search'))],
1933 _("hg remove [OPTION]... FILE...")),
1934 1934 "rename|mv": (rename,
1935 [('I', 'include', [], 'include path in search'),
1936 ('X', 'exclude', [], 'exclude path from search'),
1937 ('A', 'after', None, 'record a copy after it has happened'),
1938 ('f', 'force', None, 'replace destination if it exists'),
1939 ('p', 'parents', None, 'append source path to dest')],
1940 'hg rename [OPTION]... [SOURCE]... DEST'),
1935 [('I', 'include', [], _('include path in search')),
1936 ('X', 'exclude', [], _('exclude path from search')),
1937 ('A', 'after', None, _('record a copy after it has happened')),
1938 ('f', 'force', None, _('replace destination if it exists')),
1939 ('p', 'parents', None, _('append source path to dest'))],
1940 _('hg rename [OPTION]... [SOURCE]... DEST')),
1941 1941 "^revert":
1942 1942 (revert,
1943 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1944 ("r", "rev", "", "revision")],
1945 "hg revert [-n] [-r REV] [NAME]..."),
1946 "root": (root, [], "hg root"),
1943 [("n", "nonrecursive", None, _("don't recurse into subdirs")),
1944 ("r", "rev", "", _("revision"))],
1945 _("hg revert [-n] [-r REV] [NAME]...")),
1946 "root": (root, [], _("hg root")),
1947 1947 "^serve":
1948 1948 (serve,
1949 [('A', 'accesslog', '', 'access log file'),
1950 ('E', 'errorlog', '', 'error log file'),
1951 ('p', 'port', 0, 'listen port'),
1952 ('a', 'address', '', 'interface address'),
1953 ('n', 'name', "", 'repository name'),
1954 ('', 'stdio', None, 'for remote clients'),
1955 ('t', 'templates', "", 'template directory'),
1956 ('', 'style', "", 'template style'),
1957 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1958 "hg serve [OPTION]..."),
1949 [('A', 'accesslog', '', _('access log file')),
1950 ('E', 'errorlog', '', _('error log file')),
1951 ('p', 'port', 0, _('listen port')),
1952 ('a', 'address', '', _('interface address')),
1953 ('n', 'name', "", _('repository name')),
1954 ('', 'stdio', None, _('for remote clients')),
1955 ('t', 'templates', "", _('template directory')),
1956 ('', 'style', "", _('template style')),
1957 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
1958 _("hg serve [OPTION]...")),
1959 1959 "^status":
1960 1960 (status,
1961 [('m', 'modified', None, 'show only modified files'),
1962 ('a', 'added', None, 'show only added files'),
1963 ('r', 'removed', None, 'show only removed files'),
1964 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1965 ('n', 'no-status', None, 'hide status prefix'),
1966 ('0', 'print0', None, 'end filenames with NUL'),
1967 ('I', 'include', [], 'include path in search'),
1968 ('X', 'exclude', [], 'exclude path from search')],
1969 "hg status [OPTION]... [FILE]..."),
1961 [('m', 'modified', None, _('show only modified files')),
1962 ('a', 'added', None, _('show only added files')),
1963 ('r', 'removed', None, _('show only removed files')),
1964 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
1965 ('n', 'no-status', None, _('hide status prefix')),
1966 ('0', 'print0', None, _('end filenames with NUL')),
1967 ('I', 'include', [], _('include path in search')),
1968 ('X', 'exclude', [], _('exclude path from search'))],
1969 _("hg status [OPTION]... [FILE]...")),
1970 1970 "tag":
1971 1971 (tag,
1972 [('l', 'local', None, 'make the tag local'),
1973 ('m', 'message', "", 'commit message'),
1974 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1975 ('d', 'date', "", 'date code'),
1976 ('u', 'user', "", 'user')],
1977 'hg tag [OPTION]... NAME [REV]'),
1978 "tags": (tags, [], 'hg tags'),
1979 "tip": (tip, [], 'hg tip'),
1972 [('l', 'local', None, _('make the tag local')),
1973 ('m', 'message', "", _('commit message')),
1974 ('t', 'text', "", _('commit message (deprecated: use -m)')),
1975 ('d', 'date', "", _('date code')),
1976 ('u', 'user', "", _('user'))],
1977 _('hg tag [OPTION]... NAME [REV]')),
1978 "tags": (tags, [], _('hg tags')),
1979 "tip": (tip, [], _('hg tip')),
1980 1980 "unbundle":
1981 1981 (unbundle,
1982 1982 [],
1983 'hg unbundle FILE'),
1984 "undo": (undo, [], 'hg undo'),
1983 _('hg unbundle FILE')),
1984 "undo": (undo, [], _('hg undo')),
1985 1985 "^update|up|checkout|co":
1986 1986 (update,
1987 [('b', 'branch', "", 'checkout the head of a specific branch'),
1988 ('m', 'merge', None, 'allow merging of conflicts'),
1989 ('C', 'clean', None, 'overwrite locally modified files')],
1990 'hg update [-b TAG] [-m] [-C] [REV]'),
1991 "verify": (verify, [], 'hg verify'),
1992 "version": (show_version, [], 'hg version'),
1987 [('b', 'branch', "", _('checkout the head of a specific branch')),
1988 ('m', 'merge', None, _('allow merging of conflicts')),
1989 ('C', 'clean', None, _('overwrite locally modified files'))],
1990 _('hg update [-b TAG] [-m] [-C] [REV]')),
1991 "verify": (verify, [], _('hg verify')),
1992 "version": (show_version, [], _('hg version')),
1993 1993 }
1994 1994
1995 1995 globalopts = [
1996 ('R', 'repository', "", 'repository root directory'),
1997 ('', 'cwd', '', 'change working directory'),
1998 ('y', 'noninteractive', None, 'run non-interactively'),
1999 ('q', 'quiet', None, 'quiet mode'),
2000 ('v', 'verbose', None, 'verbose mode'),
2001 ('', 'debug', None, 'debug mode'),
2002 ('', 'debugger', None, 'start debugger'),
2003 ('', 'traceback', None, 'print traceback on exception'),
2004 ('', 'time', None, 'time how long the command takes'),
2005 ('', 'profile', None, 'profile'),
2006 ('', 'version', None, 'output version information and exit'),
2007 ('h', 'help', None, 'display help and exit'),
1996 ('R', 'repository', "", _('repository root directory')),
1997 ('', 'cwd', '', _('change working directory')),
1998 ('y', 'noninteractive', None, _('run non-interactively')),
1999 ('q', 'quiet', None, _('quiet mode')),
2000 ('v', 'verbose', None, _('verbose mode')),
2001 ('', 'debug', None, _('debug mode')),
2002 ('', 'debugger', None, _('start debugger')),
2003 ('', 'traceback', None, _('print traceback on exception')),
2004 ('', 'time', None, _('time how long the command takes')),
2005 ('', 'profile', None, _('profile')),
2006 ('', 'version', None, _('output version information and exit')),
2007 ('h', 'help', None, _('display help and exit')),
2008 2008 ]
2009 2009
2010 2010 norepo = ("clone init version help debugancestor debugconfig debugdata"
2011 2011 " debugindex debugindexdot paths")
2012 2012
2013 2013 def find(cmd):
2014 2014 for e in table.keys():
2015 2015 if re.match("(%s)$" % e, cmd):
2016 2016 return e, table[e]
2017 2017
2018 2018 raise UnknownCommand(cmd)
2019 2019
2020 2020 class SignalInterrupt(Exception):
2021 2021 """Exception raised on SIGTERM and SIGHUP."""
2022 2022
2023 2023 def catchterm(*args):
2024 2024 raise SignalInterrupt
2025 2025
2026 2026 def run():
2027 2027 sys.exit(dispatch(sys.argv[1:]))
2028 2028
2029 2029 class ParseError(Exception):
2030 2030 """Exception raised on errors in parsing the command line."""
2031 2031
2032 2032 def parse(args):
2033 2033 options = {}
2034 2034 cmdoptions = {}
2035 2035
2036 2036 try:
2037 2037 args = fancyopts.fancyopts(args, globalopts, options)
2038 2038 except fancyopts.getopt.GetoptError, inst:
2039 2039 raise ParseError(None, inst)
2040 2040
2041 2041 if args:
2042 2042 cmd, args = args[0], args[1:]
2043 2043 i = find(cmd)[1]
2044 2044 c = list(i[1])
2045 2045 else:
2046 2046 cmd = None
2047 2047 c = []
2048 2048
2049 2049 # combine global options into local
2050 2050 for o in globalopts:
2051 2051 c.append((o[0], o[1], options[o[1]], o[3]))
2052 2052
2053 2053 try:
2054 2054 args = fancyopts.fancyopts(args, c, cmdoptions)
2055 2055 except fancyopts.getopt.GetoptError, inst:
2056 2056 raise ParseError(cmd, inst)
2057 2057
2058 2058 # separate global options back out
2059 2059 for o in globalopts:
2060 2060 n = o[1]
2061 2061 options[n] = cmdoptions[n]
2062 2062 del cmdoptions[n]
2063 2063
2064 2064 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2065 2065
2066 2066 def dispatch(args):
2067 2067 signal.signal(signal.SIGTERM, catchterm)
2068 2068 try:
2069 2069 signal.signal(signal.SIGHUP, catchterm)
2070 2070 except AttributeError:
2071 2071 pass
2072 2072
2073 2073 u = ui.ui()
2074 2074 external = []
2075 2075 for x in u.extensions():
2076 2076 if x[1]:
2077 2077 try:
2078 2078 mod = imp.load_source(x[0], x[1])
2079 2079 except:
2080 u.warn("*** failed to import extension %s\n" % x[1])
2080 u.warn(_("*** failed to import extension %s\n") % x[1])
2081 2081 continue
2082 2082 else:
2083 2083 def importh(name):
2084 2084 mod = __import__(name)
2085 2085 components = name.split('.')
2086 2086 for comp in components[1:]:
2087 2087 mod = getattr(mod, comp)
2088 2088 return mod
2089 2089 try:
2090 2090 mod = importh(x[0])
2091 2091 except:
2092 u.warn("failed to import extension %s\n" % x[0])
2092 u.warn(_("failed to import extension %s\n") % x[0])
2093 2093 continue
2094 2094
2095 2095 external.append(mod)
2096 2096 for x in external:
2097 2097 cmdtable = getattr(x, 'cmdtable', {})
2098 2098 for t in cmdtable:
2099 2099 if t in table:
2100 u.warn("module %s overrides %s\n" % (x.__name__, t))
2100 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2101 2101 table.update(cmdtable)
2102 2102
2103 2103 try:
2104 2104 cmd, func, args, options, cmdoptions = parse(args)
2105 2105 except ParseError, inst:
2106 2106 if inst.args[0]:
2107 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
2107 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2108 2108 help_(u, inst.args[0])
2109 2109 else:
2110 u.warn("hg: %s\n" % inst.args[1])
2110 u.warn(_("hg: %s\n") % inst.args[1])
2111 2111 help_(u, 'shortlist')
2112 2112 sys.exit(-1)
2113 2113 except UnknownCommand, inst:
2114 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2114 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2115 2115 help_(u, 'shortlist')
2116 2116 sys.exit(1)
2117 2117
2118 2118 if options["time"]:
2119 2119 def get_times():
2120 2120 t = os.times()
2121 2121 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2122 2122 t = (t[0], t[1], t[2], t[3], time.clock())
2123 2123 return t
2124 2124 s = get_times()
2125 2125 def print_time():
2126 2126 t = get_times()
2127 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
2127 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2128 2128 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2129 2129 atexit.register(print_time)
2130 2130
2131 2131 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2132 2132 not options["noninteractive"])
2133 2133
2134 2134 # enter the debugger before command execution
2135 2135 if options['debugger']:
2136 2136 pdb.set_trace()
2137 2137
2138 2138 try:
2139 2139 try:
2140 2140 if options['help']:
2141 2141 help_(u, cmd, options['version'])
2142 2142 sys.exit(0)
2143 2143 elif options['version']:
2144 2144 show_version(u)
2145 2145 sys.exit(0)
2146 2146 elif not cmd:
2147 2147 help_(u, 'shortlist')
2148 2148 sys.exit(0)
2149 2149
2150 2150 if options['cwd']:
2151 2151 try:
2152 2152 os.chdir(options['cwd'])
2153 2153 except OSError, inst:
2154 2154 raise util.Abort('%s: %s' %
2155 2155 (options['cwd'], inst.strerror))
2156 2156
2157 2157 if cmd not in norepo.split():
2158 2158 path = options["repository"] or ""
2159 2159 repo = hg.repository(ui=u, path=path)
2160 2160 for x in external:
2161 2161 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2162 2162 d = lambda: func(u, repo, *args, **cmdoptions)
2163 2163 else:
2164 2164 d = lambda: func(u, *args, **cmdoptions)
2165 2165
2166 2166 if options['profile']:
2167 2167 import hotshot, hotshot.stats
2168 2168 prof = hotshot.Profile("hg.prof")
2169 2169 r = prof.runcall(d)
2170 2170 prof.close()
2171 2171 stats = hotshot.stats.load("hg.prof")
2172 2172 stats.strip_dirs()
2173 2173 stats.sort_stats('time', 'calls')
2174 2174 stats.print_stats(40)
2175 2175 return r
2176 2176 else:
2177 2177 return d()
2178 2178 except:
2179 2179 # enter the debugger when we hit an exception
2180 2180 if options['debugger']:
2181 2181 pdb.post_mortem(sys.exc_info()[2])
2182 2182 if options['traceback']:
2183 2183 traceback.print_exc()
2184 2184 raise
2185 2185 except hg.RepoError, inst:
2186 u.warn("abort: ", inst, "!\n")
2186 u.warn(_("abort: "), inst, "!\n")
2187 2187 except revlog.RevlogError, inst:
2188 u.warn("abort: ", inst, "!\n")
2188 u.warn(_("abort: "), inst, "!\n")
2189 2189 except SignalInterrupt:
2190 u.warn("killed!\n")
2190 u.warn(_("killed!\n"))
2191 2191 except KeyboardInterrupt:
2192 2192 try:
2193 u.warn("interrupted!\n")
2193 u.warn(_("interrupted!\n"))
2194 2194 except IOError, inst:
2195 2195 if inst.errno == errno.EPIPE:
2196 2196 if u.debugflag:
2197 u.warn("\nbroken pipe\n")
2197 u.warn(_("\nbroken pipe\n"))
2198 2198 else:
2199 2199 raise
2200 2200 except IOError, inst:
2201 2201 if hasattr(inst, "code"):
2202 u.warn("abort: %s\n" % inst)
2202 u.warn(_("abort: %s\n") % inst)
2203 2203 elif hasattr(inst, "reason"):
2204 u.warn("abort: error: %s\n" % inst.reason[1])
2204 u.warn(_("abort: error: %s\n") % inst.reason[1])
2205 2205 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2206 2206 if u.debugflag:
2207 u.warn("broken pipe\n")
2207 u.warn(_("broken pipe\n"))
2208 2208 elif getattr(inst, "strerror", None):
2209 2209 if getattr(inst, "filename", None):
2210 u.warn("abort: %s - %s\n" % (inst.strerror, inst.filename))
2210 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2211 2211 else:
2212 u.warn("abort: %s\n" % inst.strerror)
2212 u.warn(_("abort: %s\n") % inst.strerror)
2213 2213 else:
2214 2214 raise
2215 2215 except OSError, inst:
2216 2216 if hasattr(inst, "filename"):
2217 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
2217 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2218 2218 else:
2219 u.warn("abort: %s\n" % inst.strerror)
2219 u.warn(_("abort: %s\n") % inst.strerror)
2220 2220 except util.Abort, inst:
2221 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
2221 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2222 2222 sys.exit(1)
2223 2223 except TypeError, inst:
2224 2224 # was this an argument error?
2225 2225 tb = traceback.extract_tb(sys.exc_info()[2])
2226 2226 if len(tb) > 2: # no
2227 2227 raise
2228 2228 u.debug(inst, "\n")
2229 u.warn("%s: invalid arguments\n" % cmd)
2229 u.warn(_("%s: invalid arguments\n") % cmd)
2230 2230 help_(u, cmd)
2231 2231 except UnknownCommand, inst:
2232 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2232 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2233 2233 help_(u, 'shortlist')
2234 2234 except SystemExit:
2235 2235 # don't catch this in the catch-all below
2236 2236 raise
2237 2237 except:
2238 u.warn("** unknown exception encountered, details follow\n")
2239 u.warn("** report bug details to mercurial@selenic.com\n")
2238 u.warn(_("** unknown exception encountered, details follow\n"))
2239 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2240 2240 raise
2241 2241
2242 2242 sys.exit(-1)
@@ -1,426 +1,426 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 import struct, os
11 11 from node import *
12 12 from i18n import gettext as _
13 13 from demandload import *
14 14 demandload(globals(), "time bisect stat util re")
15 15
16 16 class dirstate:
17 17 def __init__(self, opener, ui, root):
18 18 self.opener = opener
19 19 self.root = root
20 20 self.dirty = 0
21 21 self.ui = ui
22 22 self.map = None
23 23 self.pl = None
24 24 self.copies = {}
25 25 self.ignorefunc = None
26 26 self.blockignore = False
27 27
28 28 def wjoin(self, f):
29 29 return os.path.join(self.root, f)
30 30
31 31 def getcwd(self):
32 32 cwd = os.getcwd()
33 33 if cwd == self.root: return ''
34 34 return cwd[len(self.root) + 1:]
35 35
36 36 def hgignore(self):
37 37 '''return the contents of .hgignore as a list of patterns.
38 38
39 39 trailing white space is dropped.
40 40 the escape character is backslash.
41 41 comments start with #.
42 42 empty lines are skipped.
43 43
44 44 lines can be of the following formats:
45 45
46 46 syntax: regexp # defaults following lines to non-rooted regexps
47 47 syntax: glob # defaults following lines to non-rooted globs
48 48 re:pattern # non-rooted regular expression
49 49 glob:pattern # non-rooted glob
50 50 pattern # pattern of the current default type'''
51 51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
52 52 def parselines(fp):
53 53 for line in fp:
54 54 escape = False
55 55 for i in xrange(len(line)):
56 56 if escape: escape = False
57 57 elif line[i] == '\\': escape = True
58 58 elif line[i] == '#': break
59 59 line = line[:i].rstrip()
60 60 if line: yield line
61 61 pats = []
62 62 try:
63 63 fp = open(self.wjoin('.hgignore'))
64 64 syntax = 'relre:'
65 65 for line in parselines(fp):
66 66 if line.startswith('syntax:'):
67 67 s = line[7:].strip()
68 68 try:
69 69 syntax = syntaxes[s]
70 70 except KeyError:
71 self.ui.warn("ignoring invalid syntax '%s'\n" % s)
71 self.ui.warn(_("ignoring invalid syntax '%s'\n") % s)
72 72 continue
73 73 pat = syntax + line
74 74 for s in syntaxes.values():
75 75 if line.startswith(s):
76 76 pat = line
77 77 break
78 78 pats.append(pat)
79 79 except IOError: pass
80 80 return pats
81 81
82 82 def ignore(self, fn):
83 83 '''default match function used by dirstate and localrepository.
84 84 this honours the .hgignore file, and nothing more.'''
85 85 if self.blockignore:
86 86 return False
87 87 if not self.ignorefunc:
88 88 ignore = self.hgignore()
89 89 if ignore:
90 90 files, self.ignorefunc, anypats = util.matcher(self.root,
91 91 inc=ignore)
92 92 else:
93 93 self.ignorefunc = util.never
94 94 return self.ignorefunc(fn)
95 95
96 96 def __del__(self):
97 97 if self.dirty:
98 98 self.write()
99 99
100 100 def __getitem__(self, key):
101 101 try:
102 102 return self.map[key]
103 103 except TypeError:
104 104 self.read()
105 105 return self[key]
106 106
107 107 def __contains__(self, key):
108 108 if not self.map: self.read()
109 109 return key in self.map
110 110
111 111 def parents(self):
112 112 if not self.pl:
113 113 self.read()
114 114 return self.pl
115 115
116 116 def markdirty(self):
117 117 if not self.dirty:
118 118 self.dirty = 1
119 119
120 120 def setparents(self, p1, p2=nullid):
121 121 if not self.pl:
122 122 self.read()
123 123 self.markdirty()
124 124 self.pl = p1, p2
125 125
126 126 def state(self, key):
127 127 try:
128 128 return self[key][0]
129 129 except KeyError:
130 130 return "?"
131 131
132 132 def read(self):
133 133 if self.map is not None: return self.map
134 134
135 135 self.map = {}
136 136 self.pl = [nullid, nullid]
137 137 try:
138 138 st = self.opener("dirstate").read()
139 139 if not st: return
140 140 except: return
141 141
142 142 self.pl = [st[:20], st[20: 40]]
143 143
144 144 pos = 40
145 145 while pos < len(st):
146 146 e = struct.unpack(">cllll", st[pos:pos+17])
147 147 l = e[4]
148 148 pos += 17
149 149 f = st[pos:pos + l]
150 150 if '\0' in f:
151 151 f, c = f.split('\0')
152 152 self.copies[f] = c
153 153 self.map[f] = e[:4]
154 154 pos += l
155 155
156 156 def copy(self, source, dest):
157 157 self.read()
158 158 self.markdirty()
159 159 self.copies[dest] = source
160 160
161 161 def copied(self, file):
162 162 return self.copies.get(file, None)
163 163
164 164 def update(self, files, state, **kw):
165 165 ''' current states:
166 166 n normal
167 167 m needs merging
168 168 r marked for removal
169 169 a marked for addition'''
170 170
171 171 if not files: return
172 172 self.read()
173 173 self.markdirty()
174 174 for f in files:
175 175 if state == "r":
176 176 self.map[f] = ('r', 0, 0, 0)
177 177 else:
178 178 s = os.lstat(os.path.join(self.root, f))
179 179 st_size = kw.get('st_size', s.st_size)
180 180 st_mtime = kw.get('st_mtime', s.st_mtime)
181 181 self.map[f] = (state, s.st_mode, st_size, st_mtime)
182 182 if self.copies.has_key(f):
183 183 del self.copies[f]
184 184
185 185 def forget(self, files):
186 186 if not files: return
187 187 self.read()
188 188 self.markdirty()
189 189 for f in files:
190 190 try:
191 191 del self.map[f]
192 192 except KeyError:
193 self.ui.warn("not in dirstate: %s!\n" % f)
193 self.ui.warn(_("not in dirstate: %s!\n") % f)
194 194 pass
195 195
196 196 def clear(self):
197 197 self.map = {}
198 198 self.markdirty()
199 199
200 200 def write(self):
201 201 st = self.opener("dirstate", "w")
202 202 st.write("".join(self.pl))
203 203 for f, e in self.map.items():
204 204 c = self.copied(f)
205 205 if c:
206 206 f = f + "\0" + c
207 207 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
208 208 st.write(e + f)
209 209 self.dirty = 0
210 210
211 211 def filterfiles(self, files):
212 212 ret = {}
213 213 unknown = []
214 214
215 215 for x in files:
216 216 if x is '.':
217 217 return self.map.copy()
218 218 if x not in self.map:
219 219 unknown.append(x)
220 220 else:
221 221 ret[x] = self.map[x]
222 222
223 223 if not unknown:
224 224 return ret
225 225
226 226 b = self.map.keys()
227 227 b.sort()
228 228 blen = len(b)
229 229
230 230 for x in unknown:
231 231 bs = bisect.bisect(b, x)
232 232 if bs != 0 and b[bs-1] == x:
233 233 ret[x] = self.map[x]
234 234 continue
235 235 while bs < blen:
236 236 s = b[bs]
237 237 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
238 238 ret[s] = self.map[s]
239 239 else:
240 240 break
241 241 bs += 1
242 242 return ret
243 243
244 244 def walk(self, files=None, match=util.always, dc=None):
245 245 self.read()
246 246
247 247 # walk all files by default
248 248 if not files:
249 249 files = [self.root]
250 250 if not dc:
251 251 dc = self.map.copy()
252 252 elif not dc:
253 253 dc = self.filterfiles(files)
254 254
255 255 def statmatch(file, stat):
256 256 file = util.pconvert(file)
257 257 if file not in dc and self.ignore(file):
258 258 return False
259 259 return match(file)
260 260
261 261 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
262 262
263 263 # walk recursively through the directory tree, finding all files
264 264 # matched by the statmatch function
265 265 #
266 266 # results are yielded in a tuple (src, filename), where src is one of:
267 267 # 'f' the file was found in the directory tree
268 268 # 'm' the file was only in the dirstate and not in the tree
269 269 #
270 270 # dc is an optional arg for the current dirstate. dc is not modified
271 271 # directly by this function, but might be modified by your statmatch call.
272 272 #
273 273 def walkhelper(self, files, statmatch, dc):
274 274 def supported_type(f, st):
275 275 if stat.S_ISREG(st.st_mode):
276 276 return True
277 277 else:
278 278 kind = 'unknown'
279 if stat.S_ISCHR(st.st_mode): kind = 'character device'
280 elif stat.S_ISBLK(st.st_mode): kind = 'block device'
281 elif stat.S_ISFIFO(st.st_mode): kind = 'fifo'
282 elif stat.S_ISLNK(st.st_mode): kind = 'symbolic link'
283 elif stat.S_ISSOCK(st.st_mode): kind = 'socket'
284 elif stat.S_ISDIR(st.st_mode): kind = 'directory'
285 self.ui.warn('%s: unsupported file type (type is %s)\n' % (
279 if stat.S_ISCHR(st.st_mode): kind = _('character device')
280 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
281 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
282 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
283 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
284 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
285 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
286 286 util.pathto(self.getcwd(), f),
287 287 kind))
288 288 return False
289 289
290 290 # recursion free walker, faster than os.walk.
291 291 def findfiles(s):
292 292 retfiles = []
293 293 work = [s]
294 294 while work:
295 295 top = work.pop()
296 296 names = os.listdir(top)
297 297 names.sort()
298 298 # nd is the top of the repository dir tree
299 299 nd = util.normpath(top[len(self.root) + 1:])
300 300 if nd == '.': nd = ''
301 301 for f in names:
302 302 np = os.path.join(nd, f)
303 303 if seen(np):
304 304 continue
305 305 p = os.path.join(top, f)
306 306 # don't trip over symlinks
307 307 st = os.lstat(p)
308 308 if stat.S_ISDIR(st.st_mode):
309 309 ds = os.path.join(nd, f +'/')
310 310 if statmatch(ds, st):
311 311 work.append(p)
312 312 elif statmatch(np, st) and supported_type(np, st):
313 313 yield util.pconvert(np)
314 314
315 315
316 316 known = {'.hg': 1}
317 317 def seen(fn):
318 318 if fn in known: return True
319 319 known[fn] = 1
320 320
321 321 # step one, find all files that match our criteria
322 322 files.sort()
323 323 for ff in util.unique(files):
324 324 f = os.path.join(self.root, ff)
325 325 try:
326 326 st = os.lstat(f)
327 327 except OSError, inst:
328 328 if ff not in dc: self.ui.warn('%s: %s\n' % (
329 329 util.pathto(self.getcwd(), ff),
330 330 inst.strerror))
331 331 continue
332 332 if stat.S_ISDIR(st.st_mode):
333 333 sorted = [ x for x in findfiles(f) ]
334 334 sorted.sort()
335 335 for fl in sorted:
336 336 yield 'f', fl
337 337 else:
338 338 ff = util.normpath(ff)
339 339 if seen(ff):
340 340 continue
341 341 found = False
342 342 self.blockignore = True
343 343 if statmatch(ff, st) and supported_type(ff, st):
344 344 found = True
345 345 self.blockignore = False
346 346 if found:
347 347 yield 'f', ff
348 348
349 349 # step two run through anything left in the dc hash and yield
350 350 # if we haven't already seen it
351 351 ks = dc.keys()
352 352 ks.sort()
353 353 for k in ks:
354 354 if not seen(k) and (statmatch(k, None)):
355 355 yield 'm', k
356 356
357 357 def changes(self, files=None, match=util.always):
358 358 self.read()
359 359 if not files:
360 360 files = [self.root]
361 361 dc = self.map.copy()
362 362 else:
363 363 dc = self.filterfiles(files)
364 364 lookup, modified, added, unknown = [], [], [], []
365 365 removed, deleted = [], []
366 366
367 367 # statmatch function to eliminate entries from the dirstate copy
368 368 # and put files into the appropriate array. This gets passed
369 369 # to the walking code
370 370 def statmatch(fn, s):
371 371 fn = util.pconvert(fn)
372 372 def checkappend(l, fn):
373 373 if match is util.always or match(fn):
374 374 l.append(fn)
375 375
376 376 if not s or stat.S_ISDIR(s.st_mode):
377 377 if self.ignore(fn): return False
378 378 return match(fn)
379 379
380 380 c = dc.pop(fn, None)
381 381 if c:
382 382 type, mode, size, time = c
383 383 # check the common case first
384 384 if type == 'n':
385 385 if size != s.st_size or (mode ^ s.st_mode) & 0100:
386 386 checkappend(modified, fn)
387 387 elif time != s.st_mtime:
388 388 checkappend(lookup, fn)
389 389 elif type == 'm':
390 390 checkappend(modified, fn)
391 391 elif type == 'a':
392 392 checkappend(added, fn)
393 393 elif type == 'r':
394 394 checkappend(unknown, fn)
395 395 elif not self.ignore(fn) and match(fn):
396 396 unknown.append(fn)
397 397 # return false because we've already handled all cases above.
398 398 # there's no need for the walking code to process the file
399 399 # any further.
400 400 return False
401 401
402 402 # because our statmatch always returns false, self.walk will only
403 403 # return files in the dirstate map that are not present in the FS.
404 404 # But, we still need to iterate through the results to force the
405 405 # walk to complete
406 406 for src, fn in self.walkhelper(files, statmatch, dc):
407 407 pass
408 408
409 409 # there may be patterns in the .hgignore file that prevent us
410 410 # from examining entire directories in the dirstate map, so we
411 411 # go back and explicitly examine any matching files we've
412 412 # ignored
413 413 unexamined = [fn for fn in dc.iterkeys()
414 414 if self.ignore(fn) and match(fn)]
415 415
416 416 for src, fn in self.walkhelper(unexamined, statmatch, dc):
417 417 pass
418 418
419 419 # anything left in dc didn't exist in the filesystem
420 420 for fn, c in dc.iteritems():
421 421 if not match(fn): continue
422 422 if c[0] == 'r':
423 423 removed.append(fn)
424 424 else:
425 425 deleted.append(fn)
426 426 return (lookup, modified, added, removed + deleted, unknown)
@@ -1,987 +1,987 b''
1 1 # hgweb.py - web interface to a mercurial repository
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os, cgi, sys
10 10 from demandload import demandload
11 11 demandload(globals(), "mdiff time re socket zlib errno ui hg ConfigParser")
12 12 demandload(globals(), "zipfile tempfile StringIO tarfile BaseHTTPServer util")
13 13 from node import *
14 14 from i18n import gettext as _
15 15
16 16 def templatepath():
17 17 for f in "templates", "../templates":
18 18 p = os.path.join(os.path.dirname(__file__), f)
19 19 if os.path.isdir(p):
20 20 return p
21 21
22 22 def age(x):
23 23 def plural(t, c):
24 24 if c == 1:
25 25 return t
26 26 return t + "s"
27 27 def fmt(t, c):
28 28 return "%d %s" % (c, plural(t, c))
29 29
30 30 now = time.time()
31 31 then = x[0]
32 32 delta = max(1, int(now - then))
33 33
34 34 scales = [["second", 1],
35 35 ["minute", 60],
36 36 ["hour", 3600],
37 37 ["day", 3600 * 24],
38 38 ["week", 3600 * 24 * 7],
39 39 ["month", 3600 * 24 * 30],
40 40 ["year", 3600 * 24 * 365]]
41 41
42 42 scales.reverse()
43 43
44 44 for t, s in scales:
45 45 n = delta / s
46 46 if n >= 2 or s == 1:
47 47 return fmt(t, n)
48 48
49 49 def nl2br(text):
50 50 return text.replace('\n', '<br/>\n')
51 51
52 52 def obfuscate(text):
53 53 return ''.join(['&#%d;' % ord(c) for c in text])
54 54
55 55 def up(p):
56 56 if p[0] != "/":
57 57 p = "/" + p
58 58 if p[-1] == "/":
59 59 p = p[:-1]
60 60 up = os.path.dirname(p)
61 61 if up == "/":
62 62 return "/"
63 63 return up + "/"
64 64
65 65 class hgrequest:
66 66 def __init__(self, inp=None, out=None, env=None):
67 67 self.inp = inp or sys.stdin
68 68 self.out = out or sys.stdout
69 69 self.env = env or os.environ
70 70 self.form = cgi.parse(self.inp, self.env)
71 71
72 72 def write(self, *things):
73 73 for thing in things:
74 74 if hasattr(thing, "__iter__"):
75 75 for part in thing:
76 76 self.write(part)
77 77 else:
78 78 try:
79 79 self.out.write(str(thing))
80 80 except socket.error, inst:
81 81 if inst[0] != errno.ECONNRESET:
82 82 raise
83 83
84 84 def header(self, headers=[('Content-type','text/html')]):
85 85 for header in headers:
86 86 self.out.write("%s: %s\r\n" % header)
87 87 self.out.write("\r\n")
88 88
89 89 def httphdr(self, type, file="", size=0):
90 90
91 91 headers = [('Content-type', type)]
92 92 if file:
93 93 headers.append(('Content-disposition', 'attachment; filename=%s' % file))
94 94 if size > 0:
95 95 headers.append(('Content-length', str(size)))
96 96 self.header(headers)
97 97
98 98 class templater:
99 99 def __init__(self, mapfile, filters={}, defaults={}):
100 100 self.cache = {}
101 101 self.map = {}
102 102 self.base = os.path.dirname(mapfile)
103 103 self.filters = filters
104 104 self.defaults = defaults
105 105
106 106 for l in file(mapfile):
107 107 m = re.match(r'(\S+)\s*=\s*"(.*)"$', l)
108 108 if m:
109 109 self.cache[m.group(1)] = m.group(2)
110 110 else:
111 111 m = re.match(r'(\S+)\s*=\s*(\S+)', l)
112 112 if m:
113 113 self.map[m.group(1)] = os.path.join(self.base, m.group(2))
114 114 else:
115 raise LookupError("unknown map entry '%s'" % l)
115 raise LookupError(_("unknown map entry '%s'") % l)
116 116
117 117 def __call__(self, t, **map):
118 118 m = self.defaults.copy()
119 119 m.update(map)
120 120 try:
121 121 tmpl = self.cache[t]
122 122 except KeyError:
123 123 tmpl = self.cache[t] = file(self.map[t]).read()
124 124 return self.template(tmpl, self.filters, **m)
125 125
126 126 def template(self, tmpl, filters={}, **map):
127 127 while tmpl:
128 128 m = re.search(r"#([a-zA-Z0-9]+)((%[a-zA-Z0-9]+)*)((\|[a-zA-Z0-9]+)*)#", tmpl)
129 129 if m:
130 130 yield tmpl[:m.start(0)]
131 131 v = map.get(m.group(1), "")
132 132 v = callable(v) and v(**map) or v
133 133
134 134 format = m.group(2)
135 135 fl = m.group(4)
136 136
137 137 if format:
138 138 q = v.__iter__
139 139 for i in q():
140 140 lm = map.copy()
141 141 lm.update(i)
142 142 yield self(format[1:], **lm)
143 143
144 144 v = ""
145 145
146 146 elif fl:
147 147 for f in fl.split("|")[1:]:
148 148 v = filters[f](v)
149 149
150 150 yield v
151 151 tmpl = tmpl[m.end(0):]
152 152 else:
153 153 yield tmpl
154 154 return
155 155
156 156 common_filters = {
157 157 "escape": cgi.escape,
158 158 "age": age,
159 159 "date": lambda x: util.datestr(x),
160 160 "addbreaks": nl2br,
161 161 "obfuscate": obfuscate,
162 162 "short": (lambda x: x[:12]),
163 163 "firstline": (lambda x: x.splitlines(1)[0]),
164 164 "permissions": (lambda x: x and "-rwxr-xr-x" or "-rw-r--r--"),
165 165 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
166 166 }
167 167
168 168 class hgweb:
169 169 def __init__(self, repo, name=None):
170 170 if type(repo) == type(""):
171 171 self.repo = hg.repository(ui.ui(), repo)
172 172 else:
173 173 self.repo = repo
174 174
175 175 self.mtime = -1
176 176 self.reponame = name
177 177 self.archives = 'zip', 'gz', 'bz2'
178 178
179 179 def refresh(self):
180 180 s = os.stat(os.path.join(self.repo.root, ".hg", "00changelog.i"))
181 181 if s.st_mtime != self.mtime:
182 182 self.mtime = s.st_mtime
183 183 self.repo = hg.repository(self.repo.ui, self.repo.root)
184 184 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
185 185 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
186 186 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
187 187
188 188 def listfiles(self, files, mf):
189 189 for f in files[:self.maxfiles]:
190 190 yield self.t("filenodelink", node=hex(mf[f]), file=f)
191 191 if len(files) > self.maxfiles:
192 192 yield self.t("fileellipses")
193 193
194 194 def listfilediffs(self, files, changeset):
195 195 for f in files[:self.maxfiles]:
196 196 yield self.t("filedifflink", node=hex(changeset), file=f)
197 197 if len(files) > self.maxfiles:
198 198 yield self.t("fileellipses")
199 199
200 200 def parents(self, t1, nodes=[], rev=None,**args):
201 201 if not rev:
202 202 rev = lambda x: ""
203 203 for node in nodes:
204 204 if node != nullid:
205 205 yield self.t(t1, node=hex(node), rev=rev(node), **args)
206 206
207 207 def showtag(self, t1, node=nullid, **args):
208 208 for t in self.repo.nodetags(node):
209 209 yield self.t(t1, tag=t, **args)
210 210
211 211 def diff(self, node1, node2, files):
212 212 def filterfiles(list, files):
213 213 l = [x for x in list if x in files]
214 214
215 215 for f in files:
216 216 if f[-1] != os.sep:
217 217 f += os.sep
218 218 l += [x for x in list if x.startswith(f)]
219 219 return l
220 220
221 221 parity = [0]
222 222 def diffblock(diff, f, fn):
223 223 yield self.t("diffblock",
224 224 lines=prettyprintlines(diff),
225 225 parity=parity[0],
226 226 file=f,
227 227 filenode=hex(fn or nullid))
228 228 parity[0] = 1 - parity[0]
229 229
230 230 def prettyprintlines(diff):
231 231 for l in diff.splitlines(1):
232 232 if l.startswith('+'):
233 233 yield self.t("difflineplus", line=l)
234 234 elif l.startswith('-'):
235 235 yield self.t("difflineminus", line=l)
236 236 elif l.startswith('@'):
237 237 yield self.t("difflineat", line=l)
238 238 else:
239 239 yield self.t("diffline", line=l)
240 240
241 241 r = self.repo
242 242 cl = r.changelog
243 243 mf = r.manifest
244 244 change1 = cl.read(node1)
245 245 change2 = cl.read(node2)
246 246 mmap1 = mf.read(change1[0])
247 247 mmap2 = mf.read(change2[0])
248 248 date1 = util.datestr(change1[2])
249 249 date2 = util.datestr(change2[2])
250 250
251 251 c, a, d, u = r.changes(node1, node2)
252 252 if files:
253 253 c, a, d = map(lambda x: filterfiles(x, files), (c, a, d))
254 254
255 255 for f in c:
256 256 to = r.file(f).read(mmap1[f])
257 257 tn = r.file(f).read(mmap2[f])
258 258 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
259 259 for f in a:
260 260 to = None
261 261 tn = r.file(f).read(mmap2[f])
262 262 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
263 263 for f in d:
264 264 to = r.file(f).read(mmap1[f])
265 265 tn = None
266 266 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f), f, tn)
267 267
268 268 def changelog(self, pos):
269 269 def changenav(**map):
270 270 def seq(factor=1):
271 271 yield 1 * factor
272 272 yield 3 * factor
273 273 #yield 5 * factor
274 274 for f in seq(factor * 10):
275 275 yield f
276 276
277 277 l = []
278 278 for f in seq():
279 279 if f < self.maxchanges / 2:
280 280 continue
281 281 if f > count:
282 282 break
283 283 r = "%d" % f
284 284 if pos + f < count:
285 285 l.append(("+" + r, pos + f))
286 286 if pos - f >= 0:
287 287 l.insert(0, ("-" + r, pos - f))
288 288
289 289 yield {"rev": 0, "label": "(0)"}
290 290
291 291 for label, rev in l:
292 292 yield {"label": label, "rev": rev}
293 293
294 294 yield {"label": "tip", "rev": ""}
295 295
296 296 def changelist(**map):
297 297 parity = (start - end) & 1
298 298 cl = self.repo.changelog
299 299 l = [] # build a list in forward order for efficiency
300 300 for i in range(start, end):
301 301 n = cl.node(i)
302 302 changes = cl.read(n)
303 303 hn = hex(n)
304 304
305 305 l.insert(0, {"parity": parity,
306 306 "author": changes[1],
307 307 "parent": self.parents("changelogparent",
308 308 cl.parents(n), cl.rev),
309 309 "changelogtag": self.showtag("changelogtag",n),
310 310 "manifest": hex(changes[0]),
311 311 "desc": changes[4],
312 312 "date": changes[2],
313 313 "files": self.listfilediffs(changes[3], n),
314 314 "rev": i,
315 315 "node": hn})
316 316 parity = 1 - parity
317 317
318 318 for e in l:
319 319 yield e
320 320
321 321 cl = self.repo.changelog
322 322 mf = cl.read(cl.tip())[0]
323 323 count = cl.count()
324 324 start = max(0, pos - self.maxchanges + 1)
325 325 end = min(count, start + self.maxchanges)
326 326 pos = end - 1
327 327
328 328 yield self.t('changelog',
329 329 changenav=changenav,
330 330 manifest=hex(mf),
331 331 rev=pos, changesets=count, entries=changelist)
332 332
333 333 def search(self, query):
334 334
335 335 def changelist(**map):
336 336 cl = self.repo.changelog
337 337 count = 0
338 338 qw = query.lower().split()
339 339
340 340 def revgen():
341 341 for i in range(cl.count() - 1, 0, -100):
342 342 l = []
343 343 for j in range(max(0, i - 100), i):
344 344 n = cl.node(j)
345 345 changes = cl.read(n)
346 346 l.append((n, j, changes))
347 347 l.reverse()
348 348 for e in l:
349 349 yield e
350 350
351 351 for n, i, changes in revgen():
352 352 miss = 0
353 353 for q in qw:
354 354 if not (q in changes[1].lower() or
355 355 q in changes[4].lower() or
356 356 q in " ".join(changes[3][:20]).lower()):
357 357 miss = 1
358 358 break
359 359 if miss:
360 360 continue
361 361
362 362 count += 1
363 363 hn = hex(n)
364 364
365 365 yield self.t('searchentry',
366 366 parity=count & 1,
367 367 author=changes[1],
368 368 parent=self.parents("changelogparent",
369 369 cl.parents(n), cl.rev),
370 370 changelogtag=self.showtag("changelogtag",n),
371 371 manifest=hex(changes[0]),
372 372 desc=changes[4],
373 373 date=changes[2],
374 374 files=self.listfilediffs(changes[3], n),
375 375 rev=i,
376 376 node=hn)
377 377
378 378 if count >= self.maxchanges:
379 379 break
380 380
381 381 cl = self.repo.changelog
382 382 mf = cl.read(cl.tip())[0]
383 383
384 384 yield self.t('search',
385 385 query=query,
386 386 manifest=hex(mf),
387 387 entries=changelist)
388 388
389 389 def changeset(self, nodeid):
390 390 cl = self.repo.changelog
391 391 n = self.repo.lookup(nodeid)
392 392 nodeid = hex(n)
393 393 changes = cl.read(n)
394 394 p1 = cl.parents(n)[0]
395 395
396 396 files = []
397 397 mf = self.repo.manifest.read(changes[0])
398 398 for f in changes[3]:
399 399 files.append(self.t("filenodelink",
400 400 filenode=hex(mf.get(f, nullid)), file=f))
401 401
402 402 def diff(**map):
403 403 yield self.diff(p1, n, None)
404 404
405 405 def archivelist():
406 406 for i in self.archives:
407 407 if self.repo.ui.configbool("web", "allow" + i, False):
408 408 yield {"type" : i, "node" : nodeid}
409 409
410 410 yield self.t('changeset',
411 411 diff=diff,
412 412 rev=cl.rev(n),
413 413 node=nodeid,
414 414 parent=self.parents("changesetparent",
415 415 cl.parents(n), cl.rev),
416 416 changesettag=self.showtag("changesettag",n),
417 417 manifest=hex(changes[0]),
418 418 author=changes[1],
419 419 desc=changes[4],
420 420 date=changes[2],
421 421 files=files,
422 422 archives=archivelist())
423 423
424 424 def filelog(self, f, filenode):
425 425 cl = self.repo.changelog
426 426 fl = self.repo.file(f)
427 427 filenode = hex(fl.lookup(filenode))
428 428 count = fl.count()
429 429
430 430 def entries(**map):
431 431 l = []
432 432 parity = (count - 1) & 1
433 433
434 434 for i in range(count):
435 435 n = fl.node(i)
436 436 lr = fl.linkrev(n)
437 437 cn = cl.node(lr)
438 438 cs = cl.read(cl.node(lr))
439 439
440 440 l.insert(0, {"parity": parity,
441 441 "filenode": hex(n),
442 442 "filerev": i,
443 443 "file": f,
444 444 "node": hex(cn),
445 445 "author": cs[1],
446 446 "date": cs[2],
447 447 "parent": self.parents("filelogparent",
448 448 fl.parents(n),
449 449 fl.rev, file=f),
450 450 "desc": cs[4]})
451 451 parity = 1 - parity
452 452
453 453 for e in l:
454 454 yield e
455 455
456 456 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
457 457
458 458 def filerevision(self, f, node):
459 459 fl = self.repo.file(f)
460 460 n = fl.lookup(node)
461 461 node = hex(n)
462 462 text = fl.read(n)
463 463 changerev = fl.linkrev(n)
464 464 cl = self.repo.changelog
465 465 cn = cl.node(changerev)
466 466 cs = cl.read(cn)
467 467 mfn = cs[0]
468 468
469 469 def lines():
470 470 for l, t in enumerate(text.splitlines(1)):
471 471 yield {"line": t,
472 472 "linenumber": "% 6d" % (l + 1),
473 473 "parity": l & 1}
474 474
475 475 yield self.t("filerevision",
476 476 file=f,
477 477 filenode=node,
478 478 path=up(f),
479 479 text=lines(),
480 480 rev=changerev,
481 481 node=hex(cn),
482 482 manifest=hex(mfn),
483 483 author=cs[1],
484 484 date=cs[2],
485 485 parent=self.parents("filerevparent",
486 486 fl.parents(n), fl.rev, file=f),
487 487 permissions=self.repo.manifest.readflags(mfn)[f])
488 488
489 489 def fileannotate(self, f, node):
490 490 bcache = {}
491 491 ncache = {}
492 492 fl = self.repo.file(f)
493 493 n = fl.lookup(node)
494 494 node = hex(n)
495 495 changerev = fl.linkrev(n)
496 496
497 497 cl = self.repo.changelog
498 498 cn = cl.node(changerev)
499 499 cs = cl.read(cn)
500 500 mfn = cs[0]
501 501
502 502 def annotate(**map):
503 503 parity = 1
504 504 last = None
505 505 for r, l in fl.annotate(n):
506 506 try:
507 507 cnode = ncache[r]
508 508 except KeyError:
509 509 cnode = ncache[r] = self.repo.changelog.node(r)
510 510
511 511 try:
512 512 name = bcache[r]
513 513 except KeyError:
514 514 cl = self.repo.changelog.read(cnode)
515 515 bcache[r] = name = self.repo.ui.shortuser(cl[1])
516 516
517 517 if last != cnode:
518 518 parity = 1 - parity
519 519 last = cnode
520 520
521 521 yield {"parity": parity,
522 522 "node": hex(cnode),
523 523 "rev": r,
524 524 "author": name,
525 525 "file": f,
526 526 "line": l}
527 527
528 528 yield self.t("fileannotate",
529 529 file=f,
530 530 filenode=node,
531 531 annotate=annotate,
532 532 path=up(f),
533 533 rev=changerev,
534 534 node=hex(cn),
535 535 manifest=hex(mfn),
536 536 author=cs[1],
537 537 date=cs[2],
538 538 parent=self.parents("fileannotateparent",
539 539 fl.parents(n), fl.rev, file=f),
540 540 permissions=self.repo.manifest.readflags(mfn)[f])
541 541
542 542 def manifest(self, mnode, path):
543 543 man = self.repo.manifest
544 544 mn = man.lookup(mnode)
545 545 mnode = hex(mn)
546 546 mf = man.read(mn)
547 547 rev = man.rev(mn)
548 548 node = self.repo.changelog.node(rev)
549 549 mff = man.readflags(mn)
550 550
551 551 files = {}
552 552
553 553 p = path[1:]
554 554 l = len(p)
555 555
556 556 for f,n in mf.items():
557 557 if f[:l] != p:
558 558 continue
559 559 remain = f[l:]
560 560 if "/" in remain:
561 561 short = remain[:remain.find("/") + 1] # bleah
562 562 files[short] = (f, None)
563 563 else:
564 564 short = os.path.basename(remain)
565 565 files[short] = (f, n)
566 566
567 567 def filelist(**map):
568 568 parity = 0
569 569 fl = files.keys()
570 570 fl.sort()
571 571 for f in fl:
572 572 full, fnode = files[f]
573 573 if not fnode:
574 574 continue
575 575
576 576 yield {"file": full,
577 577 "manifest": mnode,
578 578 "filenode": hex(fnode),
579 579 "parity": parity,
580 580 "basename": f,
581 581 "permissions": mff[full]}
582 582 parity = 1 - parity
583 583
584 584 def dirlist(**map):
585 585 parity = 0
586 586 fl = files.keys()
587 587 fl.sort()
588 588 for f in fl:
589 589 full, fnode = files[f]
590 590 if fnode:
591 591 continue
592 592
593 593 yield {"parity": parity,
594 594 "path": os.path.join(path, f),
595 595 "manifest": mnode,
596 596 "basename": f[:-1]}
597 597 parity = 1 - parity
598 598
599 599 yield self.t("manifest",
600 600 manifest=mnode,
601 601 rev=rev,
602 602 node=hex(node),
603 603 path=path,
604 604 up=up(path),
605 605 fentries=filelist,
606 606 dentries=dirlist)
607 607
608 608 def tags(self):
609 609 cl = self.repo.changelog
610 610 mf = cl.read(cl.tip())[0]
611 611
612 612 i = self.repo.tagslist()
613 613 i.reverse()
614 614
615 615 def entries(**map):
616 616 parity = 0
617 617 for k,n in i:
618 618 yield {"parity": parity,
619 619 "tag": k,
620 620 "node": hex(n)}
621 621 parity = 1 - parity
622 622
623 623 yield self.t("tags",
624 624 manifest=hex(mf),
625 625 entries=entries)
626 626
627 627 def filediff(self, file, changeset):
628 628 cl = self.repo.changelog
629 629 n = self.repo.lookup(changeset)
630 630 changeset = hex(n)
631 631 p1 = cl.parents(n)[0]
632 632 cs = cl.read(n)
633 633 mf = self.repo.manifest.read(cs[0])
634 634
635 635 def diff(**map):
636 636 yield self.diff(p1, n, file)
637 637
638 638 yield self.t("filediff",
639 639 file=file,
640 640 filenode=hex(mf.get(file, nullid)),
641 641 node=changeset,
642 642 rev=self.repo.changelog.rev(n),
643 643 parent=self.parents("filediffparent",
644 644 cl.parents(n), cl.rev),
645 645 diff=diff)
646 646
647 647 def archive(self, req, cnode, type):
648 648 cs = self.repo.changelog.read(cnode)
649 649 mnode = cs[0]
650 650 mf = self.repo.manifest.read(mnode)
651 651 rev = self.repo.manifest.rev(mnode)
652 652 reponame = re.sub(r"\W+", "-", self.reponame)
653 653 name = "%s-%s/" % (reponame, short(cnode))
654 654
655 655 files = mf.keys()
656 656 files.sort()
657 657
658 658 if type == 'zip':
659 659 tmp = tempfile.mkstemp()[1]
660 660 try:
661 661 zf = zipfile.ZipFile(tmp, "w", zipfile.ZIP_DEFLATED)
662 662
663 663 for f in files:
664 664 zf.writestr(name + f, self.repo.file(f).read(mf[f]))
665 665 zf.close()
666 666
667 667 f = open(tmp, 'r')
668 668 req.httphdr('application/zip', name[:-1] + '.zip',
669 669 os.path.getsize(tmp))
670 670 req.write(f.read())
671 671 f.close()
672 672 finally:
673 673 os.unlink(tmp)
674 674
675 675 else:
676 676 tf = tarfile.TarFile.open(mode='w|' + type, fileobj=req.out)
677 677 mff = self.repo.manifest.readflags(mnode)
678 678 mtime = int(time.time())
679 679
680 680 if type == "gz":
681 681 encoding = "gzip"
682 682 else:
683 683 encoding = "x-bzip2"
684 684 req.header([('Content-type', 'application/x-tar'),
685 685 ('Content-disposition', 'attachment; filename=%s%s%s' %
686 686 (name[:-1], '.tar.', type)),
687 687 ('Content-encoding', encoding)])
688 688 for fname in files:
689 689 rcont = self.repo.file(fname).read(mf[fname])
690 690 finfo = tarfile.TarInfo(name + fname)
691 691 finfo.mtime = mtime
692 692 finfo.size = len(rcont)
693 693 finfo.mode = mff[fname] and 0755 or 0644
694 694 tf.addfile(finfo, StringIO.StringIO(rcont))
695 695 tf.close()
696 696
697 697 # add tags to things
698 698 # tags -> list of changesets corresponding to tags
699 699 # find tag, changeset, file
700 700
701 701 def run(self, req=hgrequest()):
702 702 def header(**map):
703 703 yield self.t("header", **map)
704 704
705 705 def footer(**map):
706 706 yield self.t("footer", **map)
707 707
708 708 self.refresh()
709 709
710 710 t = self.repo.ui.config("web", "templates", templatepath())
711 711 m = os.path.join(t, "map")
712 712 style = self.repo.ui.config("web", "style", "")
713 713 if req.form.has_key('style'):
714 714 style = req.form['style'][0]
715 715 if style:
716 716 b = os.path.basename("map-" + style)
717 717 p = os.path.join(t, b)
718 718 if os.path.isfile(p):
719 719 m = p
720 720
721 721 port = req.env["SERVER_PORT"]
722 722 port = port != "80" and (":" + port) or ""
723 723 uri = req.env["REQUEST_URI"]
724 724 if "?" in uri:
725 725 uri = uri.split("?")[0]
726 726 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
727 727 if not self.reponame:
728 728 self.reponame = (self.repo.ui.config("web", "name")
729 729 or uri.strip('/') or self.repo.root)
730 730
731 731 self.t = templater(m, common_filters,
732 732 {"url": url,
733 733 "repo": self.reponame,
734 734 "header": header,
735 735 "footer": footer,
736 736 })
737 737
738 738 if not req.form.has_key('cmd'):
739 739 req.form['cmd'] = [self.t.cache['default'],]
740 740
741 741 if req.form['cmd'][0] == 'changelog':
742 742 c = self.repo.changelog.count() - 1
743 743 hi = c
744 744 if req.form.has_key('rev'):
745 745 hi = req.form['rev'][0]
746 746 try:
747 747 hi = self.repo.changelog.rev(self.repo.lookup(hi))
748 748 except hg.RepoError:
749 749 req.write(self.search(hi))
750 750 return
751 751
752 752 req.write(self.changelog(hi))
753 753
754 754 elif req.form['cmd'][0] == 'changeset':
755 755 req.write(self.changeset(req.form['node'][0]))
756 756
757 757 elif req.form['cmd'][0] == 'manifest':
758 758 req.write(self.manifest(req.form['manifest'][0], req.form['path'][0]))
759 759
760 760 elif req.form['cmd'][0] == 'tags':
761 761 req.write(self.tags())
762 762
763 763 elif req.form['cmd'][0] == 'filediff':
764 764 req.write(self.filediff(req.form['file'][0], req.form['node'][0]))
765 765
766 766 elif req.form['cmd'][0] == 'file':
767 767 req.write(self.filerevision(req.form['file'][0], req.form['filenode'][0]))
768 768
769 769 elif req.form['cmd'][0] == 'annotate':
770 770 req.write(self.fileannotate(req.form['file'][0], req.form['filenode'][0]))
771 771
772 772 elif req.form['cmd'][0] == 'filelog':
773 773 req.write(self.filelog(req.form['file'][0], req.form['filenode'][0]))
774 774
775 775 elif req.form['cmd'][0] == 'heads':
776 776 req.httphdr("application/mercurial-0.1")
777 777 h = self.repo.heads()
778 778 req.write(" ".join(map(hex, h)) + "\n")
779 779
780 780 elif req.form['cmd'][0] == 'branches':
781 781 req.httphdr("application/mercurial-0.1")
782 782 nodes = []
783 783 if req.form.has_key('nodes'):
784 784 nodes = map(bin, req.form['nodes'][0].split(" "))
785 785 for b in self.repo.branches(nodes):
786 786 req.write(" ".join(map(hex, b)) + "\n")
787 787
788 788 elif req.form['cmd'][0] == 'between':
789 789 req.httphdr("application/mercurial-0.1")
790 790 nodes = []
791 791 if req.form.has_key('pairs'):
792 792 pairs = [map(bin, p.split("-"))
793 793 for p in req.form['pairs'][0].split(" ")]
794 794 for b in self.repo.between(pairs):
795 795 req.write(" ".join(map(hex, b)) + "\n")
796 796
797 797 elif req.form['cmd'][0] == 'changegroup':
798 798 req.httphdr("application/mercurial-0.1")
799 799 nodes = []
800 800 if not self.allowpull:
801 801 return
802 802
803 803 if req.form.has_key('roots'):
804 804 nodes = map(bin, req.form['roots'][0].split(" "))
805 805
806 806 z = zlib.compressobj()
807 807 f = self.repo.changegroup(nodes)
808 808 while 1:
809 809 chunk = f.read(4096)
810 810 if not chunk:
811 811 break
812 812 req.write(z.compress(chunk))
813 813
814 814 req.write(z.flush())
815 815
816 816 elif req.form['cmd'][0] == 'archive':
817 817 changeset = self.repo.lookup(req.form['node'][0])
818 818 type = req.form['type'][0]
819 819 if (type in self.archives and
820 820 self.repo.ui.configbool("web", "allow" + type, False)):
821 821 self.archive(req, changeset, type)
822 822 return
823 823
824 824 req.write(self.t("error"))
825 825
826 826 else:
827 827 req.write(self.t("error"))
828 828
829 829 def create_server(repo):
830 830
831 831 def openlog(opt, default):
832 832 if opt and opt != '-':
833 833 return open(opt, 'w')
834 834 return default
835 835
836 836 address = repo.ui.config("web", "address", "")
837 837 port = int(repo.ui.config("web", "port", 8000))
838 838 use_ipv6 = repo.ui.configbool("web", "ipv6")
839 839 accesslog = openlog(repo.ui.config("web", "accesslog", "-"), sys.stdout)
840 840 errorlog = openlog(repo.ui.config("web", "errorlog", "-"), sys.stderr)
841 841
842 842 class IPv6HTTPServer(BaseHTTPServer.HTTPServer):
843 843 address_family = getattr(socket, 'AF_INET6', None)
844 844
845 845 def __init__(self, *args, **kwargs):
846 846 if self.address_family is None:
847 raise hg.RepoError('IPv6 not available on this system')
847 raise hg.RepoError(_('IPv6 not available on this system'))
848 848 BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
849 849
850 850 class hgwebhandler(BaseHTTPServer.BaseHTTPRequestHandler):
851 851 def log_error(self, format, *args):
852 852 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
853 853 self.log_date_time_string(),
854 854 format % args))
855 855
856 856 def log_message(self, format, *args):
857 857 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
858 858 self.log_date_time_string(),
859 859 format % args))
860 860
861 861 def do_POST(self):
862 862 try:
863 863 self.do_hgweb()
864 864 except socket.error, inst:
865 865 if inst[0] != errno.EPIPE:
866 866 raise
867 867
868 868 def do_GET(self):
869 869 self.do_POST()
870 870
871 871 def do_hgweb(self):
872 872 query = ""
873 873 p = self.path.find("?")
874 874 if p:
875 875 query = self.path[p + 1:]
876 876 query = query.replace('+', ' ')
877 877
878 878 env = {}
879 879 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
880 880 env['REQUEST_METHOD'] = self.command
881 881 env['SERVER_NAME'] = self.server.server_name
882 882 env['SERVER_PORT'] = str(self.server.server_port)
883 883 env['REQUEST_URI'] = "/"
884 884 if query:
885 885 env['QUERY_STRING'] = query
886 886 host = self.address_string()
887 887 if host != self.client_address[0]:
888 888 env['REMOTE_HOST'] = host
889 889 env['REMOTE_ADDR'] = self.client_address[0]
890 890
891 891 if self.headers.typeheader is None:
892 892 env['CONTENT_TYPE'] = self.headers.type
893 893 else:
894 894 env['CONTENT_TYPE'] = self.headers.typeheader
895 895 length = self.headers.getheader('content-length')
896 896 if length:
897 897 env['CONTENT_LENGTH'] = length
898 898 accept = []
899 899 for line in self.headers.getallmatchingheaders('accept'):
900 900 if line[:1] in "\t\n\r ":
901 901 accept.append(line.strip())
902 902 else:
903 903 accept = accept + line[7:].split(',')
904 904 env['HTTP_ACCEPT'] = ','.join(accept)
905 905
906 906 req = hgrequest(self.rfile, self.wfile, env)
907 907 self.send_response(200, "Script output follows")
908 908 hg.run(req)
909 909
910 910 hg = hgweb(repo)
911 911 if use_ipv6:
912 912 return IPv6HTTPServer((address, port), hgwebhandler)
913 913 else:
914 914 return BaseHTTPServer.HTTPServer((address, port), hgwebhandler)
915 915
916 916 def server(path, name, templates, address, port, use_ipv6=False,
917 917 accesslog=sys.stdout, errorlog=sys.stderr):
918 918 httpd = create_server(path, name, templates, address, port, use_ipv6,
919 919 accesslog, errorlog)
920 920 httpd.serve_forever()
921 921
922 922 # This is a stopgap
923 923 class hgwebdir:
924 924 def __init__(self, config):
925 925 def cleannames(items):
926 926 return [(name.strip('/'), path) for name, path in items]
927 927
928 928 if type(config) == type([]):
929 929 self.repos = cleannames(config)
930 930 elif type(config) == type({}):
931 931 self.repos = cleannames(config.items())
932 932 self.repos.sort()
933 933 else:
934 934 cp = ConfigParser.SafeConfigParser()
935 935 cp.read(config)
936 936 self.repos = cleannames(cp.items("paths"))
937 937 self.repos.sort()
938 938
939 939 def run(self, req=hgrequest()):
940 940 def header(**map):
941 941 yield tmpl("header", **map)
942 942
943 943 def footer(**map):
944 944 yield tmpl("footer", **map)
945 945
946 946 m = os.path.join(templatepath(), "map")
947 947 tmpl = templater(m, common_filters,
948 948 {"header": header, "footer": footer})
949 949
950 950 def entries(**map):
951 951 parity = 0
952 952 for name, path in self.repos:
953 953 u = ui.ui()
954 954 try:
955 955 u.readconfig(file(os.path.join(path, '.hg', 'hgrc')))
956 956 except IOError:
957 957 pass
958 958 get = u.config
959 959
960 960 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
961 961 .replace("//", "/"))
962 962
963 963 # update time with local timezone
964 964 d = (os.stat(os.path.join(path,
965 965 ".hg", "00changelog.d")).st_mtime,
966 966 util.makedate()[1])
967 967
968 968 yield dict(contact=(get("ui", "username") or # preferred
969 969 get("web", "contact") or # deprecated
970 970 get("web", "author", "unknown")), # also
971 971 name=get("web", "name", name),
972 972 url=url,
973 973 parity=parity,
974 974 shortdesc=get("web", "description", "unknown"),
975 975 lastupdate=d)
976 976
977 977 parity = 1 - parity
978 978
979 979 virtual = req.env.get("PATH_INFO", "").strip('/')
980 980 if virtual:
981 981 real = dict(self.repos).get(virtual)
982 982 if real:
983 983 hgweb(real).run(req)
984 984 else:
985 985 req.write(tmpl("notfound", repo=virtual))
986 986 else:
987 987 req.write(tmpl("index", entries=entries))
@@ -1,136 +1,136 b''
1 1 # httprepo.py - HTTP repository proxy classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from remoterepo import *
10 10 from i18n import gettext as _
11 11 from demandload import *
12 12 demandload(globals(), "hg os urllib urllib2 urlparse zlib util")
13 13
14 14 class httprepository(remoterepository):
15 15 def __init__(self, ui, path):
16 16 # fix missing / after hostname
17 17 s = urlparse.urlsplit(path)
18 18 partial = s[2]
19 19 if not partial: partial = "/"
20 20 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
21 21 self.ui = ui
22 22 no_list = [ "localhost", "127.0.0.1" ]
23 23 host = ui.config("http_proxy", "host")
24 24 if host is None:
25 25 host = os.environ.get("http_proxy")
26 26 if host and host.startswith('http://'):
27 27 host = host[7:]
28 28 user = ui.config("http_proxy", "user")
29 29 passwd = ui.config("http_proxy", "passwd")
30 30 no = ui.config("http_proxy", "no")
31 31 if no is None:
32 32 no = os.environ.get("no_proxy")
33 33 if no:
34 34 no_list = no_list + no.split(",")
35 35
36 36 no_proxy = 0
37 37 for h in no_list:
38 38 if (path.startswith("http://" + h + "/") or
39 39 path.startswith("http://" + h + ":") or
40 40 path == "http://" + h):
41 41 no_proxy = 1
42 42
43 43 # Note: urllib2 takes proxy values from the environment and those will
44 44 # take precedence
45 45 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
46 46 try:
47 47 if os.environ.has_key(env):
48 48 del os.environ[env]
49 49 except OSError:
50 50 pass
51 51
52 52 proxy_handler = urllib2.BaseHandler()
53 53 if host and not no_proxy:
54 54 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
55 55
56 56 authinfo = None
57 57 if user and passwd:
58 58 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
59 59 passmgr.add_password(None, host, user, passwd)
60 60 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
61 61
62 62 opener = urllib2.build_opener(proxy_handler, authinfo)
63 63 # 1.0 here is the _protocol_ version
64 64 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
65 65 urllib2.install_opener(opener)
66 66
67 67 def dev(self):
68 68 return -1
69 69
70 70 def do_cmd(self, cmd, **args):
71 self.ui.debug("sending %s command\n" % cmd)
71 self.ui.debug(_("sending %s command\n") % cmd)
72 72 q = {"cmd": cmd}
73 73 q.update(args)
74 74 qs = urllib.urlencode(q)
75 75 cu = "%s?%s" % (self.url, qs)
76 76 resp = urllib2.urlopen(cu)
77 77 proto = resp.headers['content-type']
78 78
79 79 # accept old "text/plain" and "application/hg-changegroup" for now
80 80 if not proto.startswith('application/mercurial') and \
81 81 not proto.startswith('text/plain') and \
82 82 not proto.startswith('application/hg-changegroup'):
83 raise hg.RepoError("'%s' does not appear to be an hg repository" %
83 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
84 84 self.url)
85 85
86 86 if proto.startswith('application/mercurial'):
87 87 version = proto[22:]
88 88 if float(version) > 0.1:
89 raise hg.RepoError("'%s' uses newer protocol %s" %
89 raise hg.RepoError(_("'%s' uses newer protocol %s") %
90 90 (self.url, version))
91 91
92 92 return resp
93 93
94 94 def heads(self):
95 95 d = self.do_cmd("heads").read()
96 96 try:
97 97 return map(bin, d[:-1].split(" "))
98 98 except:
99 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
99 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
100 100 raise
101 101
102 102 def branches(self, nodes):
103 103 n = " ".join(map(hex, nodes))
104 104 d = self.do_cmd("branches", nodes=n).read()
105 105 try:
106 106 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
107 107 return br
108 108 except:
109 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
109 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
110 110 raise
111 111
112 112 def between(self, pairs):
113 113 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
114 114 d = self.do_cmd("between", pairs=n).read()
115 115 try:
116 116 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
117 117 return p
118 118 except:
119 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
119 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
120 120 raise
121 121
122 122 def changegroup(self, nodes):
123 123 n = " ".join(map(hex, nodes))
124 124 f = self.do_cmd("changegroup", roots=n)
125 125 bytes = 0
126 126
127 127 def zgenerator(f):
128 128 zd = zlib.decompressobj()
129 129 for chnk in f:
130 130 yield zd.decompress(chnk)
131 131 yield zd.flush()
132 132
133 133 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
134 134
135 135 class httpsrepository(httprepository):
136 136 pass
@@ -1,1449 +1,1449 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import struct, os, util
9 9 import filelog, manifest, changelog, dirstate, repo
10 10 from node import *
11 11 from i18n import gettext as _
12 12 from demandload import *
13 13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14 14
15 15 class localrepository:
16 16 def __init__(self, ui, path=None, create=0):
17 17 if not path:
18 18 p = os.getcwd()
19 19 while not os.path.isdir(os.path.join(p, ".hg")):
20 20 oldp = p
21 21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError("no repo found")
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 23 path = p
24 24 self.path = os.path.join(path, ".hg")
25 25
26 26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError("repository %s not found" % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28 28
29 29 self.root = os.path.abspath(path)
30 30 self.ui = ui
31 31 self.opener = util.opener(self.path)
32 32 self.wopener = util.opener(self.root)
33 33 self.manifest = manifest.manifest(self.opener)
34 34 self.changelog = changelog.changelog(self.opener)
35 35 self.tagscache = None
36 36 self.nodetagscache = None
37 37 self.encodepats = None
38 38 self.decodepats = None
39 39
40 40 if create:
41 41 os.mkdir(self.path)
42 42 os.mkdir(self.join("data"))
43 43
44 44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 45 try:
46 46 self.ui.readconfig(self.opener("hgrc"))
47 47 except IOError: pass
48 48
49 49 def hook(self, name, **args):
50 50 s = self.ui.config("hooks", name)
51 51 if s:
52 self.ui.note("running hook %s: %s\n" % (name, s))
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 53 old = {}
54 54 for k, v in args.items():
55 55 k = k.upper()
56 56 old[k] = os.environ.get(k, None)
57 57 os.environ[k] = v
58 58
59 59 # Hooks run in the repository root
60 60 olddir = os.getcwd()
61 61 os.chdir(self.root)
62 62 r = os.system(s)
63 63 os.chdir(olddir)
64 64
65 65 for k, v in old.items():
66 66 if v != None:
67 67 os.environ[k] = v
68 68 else:
69 69 del os.environ[k]
70 70
71 71 if r:
72 self.ui.warn("abort: %s hook failed with status %d!\n" %
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 73 (name, r))
74 74 return False
75 75 return True
76 76
77 77 def tags(self):
78 78 '''return a mapping of tag to node'''
79 79 if not self.tagscache:
80 80 self.tagscache = {}
81 81 def addtag(self, k, n):
82 82 try:
83 83 bin_n = bin(n)
84 84 except TypeError:
85 85 bin_n = ''
86 86 self.tagscache[k.strip()] = bin_n
87 87
88 88 try:
89 89 # read each head of the tags file, ending with the tip
90 90 # and add each tag found to the map, with "newer" ones
91 91 # taking precedence
92 92 fl = self.file(".hgtags")
93 93 h = fl.heads()
94 94 h.reverse()
95 95 for r in h:
96 96 for l in fl.read(r).splitlines():
97 97 if l:
98 98 n, k = l.split(" ", 1)
99 99 addtag(self, k, n)
100 100 except KeyError:
101 101 pass
102 102
103 103 try:
104 104 f = self.opener("localtags")
105 105 for l in f:
106 106 n, k = l.split(" ", 1)
107 107 addtag(self, k, n)
108 108 except IOError:
109 109 pass
110 110
111 111 self.tagscache['tip'] = self.changelog.tip()
112 112
113 113 return self.tagscache
114 114
115 115 def tagslist(self):
116 116 '''return a list of tags ordered by revision'''
117 117 l = []
118 118 for t, n in self.tags().items():
119 119 try:
120 120 r = self.changelog.rev(n)
121 121 except:
122 122 r = -2 # sort to the beginning of the list if unknown
123 123 l.append((r,t,n))
124 124 l.sort()
125 125 return [(t,n) for r,t,n in l]
126 126
127 127 def nodetags(self, node):
128 128 '''return the tags associated with a node'''
129 129 if not self.nodetagscache:
130 130 self.nodetagscache = {}
131 131 for t,n in self.tags().items():
132 132 self.nodetagscache.setdefault(n,[]).append(t)
133 133 return self.nodetagscache.get(node, [])
134 134
135 135 def lookup(self, key):
136 136 try:
137 137 return self.tags()[key]
138 138 except KeyError:
139 139 try:
140 140 return self.changelog.lookup(key)
141 141 except:
142 raise repo.RepoError("unknown revision '%s'" % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143 143
144 144 def dev(self):
145 145 return os.stat(self.path).st_dev
146 146
147 147 def local(self):
148 148 return True
149 149
150 150 def join(self, f):
151 151 return os.path.join(self.path, f)
152 152
153 153 def wjoin(self, f):
154 154 return os.path.join(self.root, f)
155 155
156 156 def file(self, f):
157 157 if f[0] == '/': f = f[1:]
158 158 return filelog.filelog(self.opener, f)
159 159
160 160 def getcwd(self):
161 161 return self.dirstate.getcwd()
162 162
163 163 def wfile(self, f, mode='r'):
164 164 return self.wopener(f, mode)
165 165
166 166 def wread(self, filename):
167 167 if self.encodepats == None:
168 168 l = []
169 169 for pat, cmd in self.ui.configitems("encode"):
170 170 mf = util.matcher("", "/", [pat], [], [])[1]
171 171 l.append((mf, cmd))
172 172 self.encodepats = l
173 173
174 174 data = self.wopener(filename, 'r').read()
175 175
176 176 for mf, cmd in self.encodepats:
177 177 if mf(filename):
178 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 179 data = util.filter(data, cmd)
180 180 break
181 181
182 182 return data
183 183
184 184 def wwrite(self, filename, data, fd=None):
185 185 if self.decodepats == None:
186 186 l = []
187 187 for pat, cmd in self.ui.configitems("decode"):
188 188 mf = util.matcher("", "/", [pat], [], [])[1]
189 189 l.append((mf, cmd))
190 190 self.decodepats = l
191 191
192 192 for mf, cmd in self.decodepats:
193 193 if mf(filename):
194 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 195 data = util.filter(data, cmd)
196 196 break
197 197
198 198 if fd:
199 199 return fd.write(data)
200 200 return self.wopener(filename, 'w').write(data)
201 201
202 202 def transaction(self):
203 203 # save dirstate for undo
204 204 try:
205 205 ds = self.opener("dirstate").read()
206 206 except IOError:
207 207 ds = ""
208 208 self.opener("journal.dirstate", "w").write(ds)
209 209
210 210 def after():
211 211 util.rename(self.join("journal"), self.join("undo"))
212 212 util.rename(self.join("journal.dirstate"),
213 213 self.join("undo.dirstate"))
214 214
215 215 return transaction.transaction(self.ui.warn, self.opener,
216 216 self.join("journal"), after)
217 217
218 218 def recover(self):
219 219 lock = self.lock()
220 220 if os.path.exists(self.join("journal")):
221 self.ui.status("rolling back interrupted transaction\n")
221 self.ui.status(_("rolling back interrupted transaction\n"))
222 222 return transaction.rollback(self.opener, self.join("journal"))
223 223 else:
224 self.ui.warn("no interrupted transaction available\n")
224 self.ui.warn(_("no interrupted transaction available\n"))
225 225
226 226 def undo(self):
227 227 lock = self.lock()
228 228 if os.path.exists(self.join("undo")):
229 self.ui.status("rolling back last transaction\n")
229 self.ui.status(_("rolling back last transaction\n"))
230 230 transaction.rollback(self.opener, self.join("undo"))
231 231 self.dirstate = None
232 232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 234 else:
235 self.ui.warn("no undo information available\n")
235 self.ui.warn(_("no undo information available\n"))
236 236
237 237 def lock(self, wait=1):
238 238 try:
239 239 return lock.lock(self.join("lock"), 0)
240 240 except lock.LockHeld, inst:
241 241 if wait:
242 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 243 return lock.lock(self.join("lock"), wait)
244 244 raise inst
245 245
246 246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 247 orig_parent = self.dirstate.parents()[0] or nullid
248 248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 250 c1 = self.changelog.read(p1)
251 251 c2 = self.changelog.read(p2)
252 252 m1 = self.manifest.read(c1[0])
253 253 mf1 = self.manifest.readflags(c1[0])
254 254 m2 = self.manifest.read(c2[0])
255 255 changed = []
256 256
257 257 if orig_parent == p1:
258 258 update_dirstate = 1
259 259 else:
260 260 update_dirstate = 0
261 261
262 262 tr = self.transaction()
263 263 mm = m1.copy()
264 264 mfm = mf1.copy()
265 265 linkrev = self.changelog.count()
266 266 for f in files:
267 267 try:
268 268 t = self.wread(f)
269 269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 270 r = self.file(f)
271 271 mfm[f] = tm
272 272
273 273 fp1 = m1.get(f, nullid)
274 274 fp2 = m2.get(f, nullid)
275 275
276 276 # is the same revision on two branches of a merge?
277 277 if fp2 == fp1:
278 278 fp2 = nullid
279 279
280 280 if fp2 != nullid:
281 281 # is one parent an ancestor of the other?
282 282 fpa = r.ancestor(fp1, fp2)
283 283 if fpa == fp1:
284 284 fp1, fp2 = fp2, nullid
285 285 elif fpa == fp2:
286 286 fp2 = nullid
287 287
288 288 # is the file unmodified from the parent?
289 289 if t == r.read(fp1):
290 290 # record the proper existing parent in manifest
291 291 # no need to add a revision
292 292 mm[f] = fp1
293 293 continue
294 294
295 295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 296 changed.append(f)
297 297 if update_dirstate:
298 298 self.dirstate.update([f], "n")
299 299 except IOError:
300 300 try:
301 301 del mm[f]
302 302 del mfm[f]
303 303 if update_dirstate:
304 304 self.dirstate.forget([f])
305 305 except:
306 306 # deleted from p2?
307 307 pass
308 308
309 309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 310 user = user or self.ui.username()
311 311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 312 tr.close()
313 313 if update_dirstate:
314 314 self.dirstate.setparents(n, nullid)
315 315
316 316 def commit(self, files = None, text = "", user = None, date = None,
317 317 match = util.always, force=False):
318 318 commit = []
319 319 remove = []
320 320 changed = []
321 321
322 322 if files:
323 323 for f in files:
324 324 s = self.dirstate.state(f)
325 325 if s in 'nmai':
326 326 commit.append(f)
327 327 elif s == 'r':
328 328 remove.append(f)
329 329 else:
330 self.ui.warn("%s not tracked!\n" % f)
330 self.ui.warn(_("%s not tracked!\n") % f)
331 331 else:
332 332 (c, a, d, u) = self.changes(match=match)
333 333 commit = c + a
334 334 remove = d
335 335
336 336 p1, p2 = self.dirstate.parents()
337 337 c1 = self.changelog.read(p1)
338 338 c2 = self.changelog.read(p2)
339 339 m1 = self.manifest.read(c1[0])
340 340 mf1 = self.manifest.readflags(c1[0])
341 341 m2 = self.manifest.read(c2[0])
342 342
343 343 if not commit and not remove and not force and p2 == nullid:
344 self.ui.status("nothing changed\n")
344 self.ui.status(_("nothing changed\n"))
345 345 return None
346 346
347 347 if not self.hook("precommit"):
348 348 return None
349 349
350 350 lock = self.lock()
351 351 tr = self.transaction()
352 352
353 353 # check in files
354 354 new = {}
355 355 linkrev = self.changelog.count()
356 356 commit.sort()
357 357 for f in commit:
358 358 self.ui.note(f + "\n")
359 359 try:
360 360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 361 t = self.wread(f)
362 362 except IOError:
363 self.ui.warn("trouble committing %s!\n" % f)
363 self.ui.warn(_("trouble committing %s!\n") % f)
364 364 raise
365 365
366 366 r = self.file(f)
367 367
368 368 meta = {}
369 369 cp = self.dirstate.copied(f)
370 370 if cp:
371 371 meta["copy"] = cp
372 372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 374 fp1, fp2 = nullid, nullid
375 375 else:
376 376 fp1 = m1.get(f, nullid)
377 377 fp2 = m2.get(f, nullid)
378 378
379 379 # is the same revision on two branches of a merge?
380 380 if fp2 == fp1:
381 381 fp2 = nullid
382 382
383 383 if fp2 != nullid:
384 384 # is one parent an ancestor of the other?
385 385 fpa = r.ancestor(fp1, fp2)
386 386 if fpa == fp1:
387 387 fp1, fp2 = fp2, nullid
388 388 elif fpa == fp2:
389 389 fp2 = nullid
390 390
391 391 # is the file unmodified from the parent?
392 392 if not meta and t == r.read(fp1):
393 393 # record the proper existing parent in manifest
394 394 # no need to add a revision
395 395 new[f] = fp1
396 396 continue
397 397
398 398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 399 # remember what we've added so that we can later calculate
400 400 # the files to pull from a set of changesets
401 401 changed.append(f)
402 402
403 403 # update manifest
404 404 m1.update(new)
405 405 for f in remove:
406 406 if f in m1:
407 407 del m1[f]
408 408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 409 (new, remove))
410 410
411 411 # add changeset
412 412 new = new.keys()
413 413 new.sort()
414 414
415 415 if not text:
416 416 edittext = ""
417 417 if p2 != nullid:
418 418 edittext += "HG: branch merge\n"
419 419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 422 if not changed and not remove:
423 423 edittext += "HG: no files changed\n"
424 424 edittext = self.ui.edit(edittext)
425 425 if not edittext.rstrip():
426 426 return None
427 427 text = edittext
428 428
429 429 user = user or self.ui.username()
430 430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 431 tr.close()
432 432
433 433 self.dirstate.setparents(n)
434 434 self.dirstate.update(new, "n")
435 435 self.dirstate.forget(remove)
436 436
437 437 if not self.hook("commit", node=hex(n)):
438 438 return None
439 439 return n
440 440
441 441 def walk(self, node=None, files=[], match=util.always):
442 442 if node:
443 443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 444 if match(fn): yield 'm', fn
445 445 else:
446 446 for src, fn in self.dirstate.walk(files, match):
447 447 yield src, fn
448 448
449 449 def changes(self, node1 = None, node2 = None, files = [],
450 450 match = util.always):
451 451 mf2, u = None, []
452 452
453 453 def fcmp(fn, mf):
454 454 t1 = self.wread(fn)
455 455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 456 return cmp(t1, t2)
457 457
458 458 def mfmatches(node):
459 459 mf = dict(self.manifest.read(node))
460 460 for fn in mf.keys():
461 461 if not match(fn):
462 462 del mf[fn]
463 463 return mf
464 464
465 465 # are we comparing the working directory?
466 466 if not node2:
467 467 l, c, a, d, u = self.dirstate.changes(files, match)
468 468
469 469 # are we comparing working dir against its parent?
470 470 if not node1:
471 471 if l:
472 472 # do a full compare of any files that might have changed
473 473 change = self.changelog.read(self.dirstate.parents()[0])
474 474 mf2 = mfmatches(change[0])
475 475 for f in l:
476 476 if fcmp(f, mf2):
477 477 c.append(f)
478 478
479 479 for l in c, a, d, u:
480 480 l.sort()
481 481
482 482 return (c, a, d, u)
483 483
484 484 # are we comparing working dir against non-tip?
485 485 # generate a pseudo-manifest for the working dir
486 486 if not node2:
487 487 if not mf2:
488 488 change = self.changelog.read(self.dirstate.parents()[0])
489 489 mf2 = mfmatches(change[0])
490 490 for f in a + c + l:
491 491 mf2[f] = ""
492 492 for f in d:
493 493 if f in mf2: del mf2[f]
494 494 else:
495 495 change = self.changelog.read(node2)
496 496 mf2 = mfmatches(change[0])
497 497
498 498 # flush lists from dirstate before comparing manifests
499 499 c, a = [], []
500 500
501 501 change = self.changelog.read(node1)
502 502 mf1 = mfmatches(change[0])
503 503
504 504 for fn in mf2:
505 505 if mf1.has_key(fn):
506 506 if mf1[fn] != mf2[fn]:
507 507 if mf2[fn] != "" or fcmp(fn, mf1):
508 508 c.append(fn)
509 509 del mf1[fn]
510 510 else:
511 511 a.append(fn)
512 512
513 513 d = mf1.keys()
514 514
515 515 for l in c, a, d, u:
516 516 l.sort()
517 517
518 518 return (c, a, d, u)
519 519
520 520 def add(self, list):
521 521 for f in list:
522 522 p = self.wjoin(f)
523 523 if not os.path.exists(p):
524 self.ui.warn("%s does not exist!\n" % f)
524 self.ui.warn(_("%s does not exist!\n") % f)
525 525 elif not os.path.isfile(p):
526 self.ui.warn("%s not added: only files supported currently\n" % f)
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 527 elif self.dirstate.state(f) in 'an':
528 self.ui.warn("%s already tracked!\n" % f)
528 self.ui.warn(_("%s already tracked!\n") % f)
529 529 else:
530 530 self.dirstate.update([f], "a")
531 531
532 532 def forget(self, list):
533 533 for f in list:
534 534 if self.dirstate.state(f) not in 'ai':
535 self.ui.warn("%s not added!\n" % f)
535 self.ui.warn(_("%s not added!\n") % f)
536 536 else:
537 537 self.dirstate.forget([f])
538 538
539 539 def remove(self, list):
540 540 for f in list:
541 541 p = self.wjoin(f)
542 542 if os.path.exists(p):
543 self.ui.warn("%s still exists!\n" % f)
543 self.ui.warn(_("%s still exists!\n") % f)
544 544 elif self.dirstate.state(f) == 'a':
545 self.ui.warn("%s never committed!\n" % f)
545 self.ui.warn(_("%s never committed!\n") % f)
546 546 self.dirstate.forget([f])
547 547 elif f not in self.dirstate:
548 self.ui.warn("%s not tracked!\n" % f)
548 self.ui.warn(_("%s not tracked!\n") % f)
549 549 else:
550 550 self.dirstate.update([f], "r")
551 551
552 552 def copy(self, source, dest):
553 553 p = self.wjoin(dest)
554 554 if not os.path.exists(p):
555 self.ui.warn("%s does not exist!\n" % dest)
555 self.ui.warn(_("%s does not exist!\n") % dest)
556 556 elif not os.path.isfile(p):
557 self.ui.warn("copy failed: %s is not a file\n" % dest)
557 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
558 558 else:
559 559 if self.dirstate.state(dest) == '?':
560 560 self.dirstate.update([dest], "a")
561 561 self.dirstate.copy(source, dest)
562 562
563 563 def heads(self):
564 564 return self.changelog.heads()
565 565
566 566 # branchlookup returns a dict giving a list of branches for
567 567 # each head. A branch is defined as the tag of a node or
568 568 # the branch of the node's parents. If a node has multiple
569 569 # branch tags, tags are eliminated if they are visible from other
570 570 # branch tags.
571 571 #
572 572 # So, for this graph: a->b->c->d->e
573 573 # \ /
574 574 # aa -----/
575 575 # a has tag 2.6.12
576 576 # d has tag 2.6.13
577 577 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
578 578 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
579 579 # from the list.
580 580 #
581 581 # It is possible that more than one head will have the same branch tag.
582 582 # callers need to check the result for multiple heads under the same
583 583 # branch tag if that is a problem for them (ie checkout of a specific
584 584 # branch).
585 585 #
586 586 # passing in a specific branch will limit the depth of the search
587 587 # through the parents. It won't limit the branches returned in the
588 588 # result though.
589 589 def branchlookup(self, heads=None, branch=None):
590 590 if not heads:
591 591 heads = self.heads()
592 592 headt = [ h for h in heads ]
593 593 chlog = self.changelog
594 594 branches = {}
595 595 merges = []
596 596 seenmerge = {}
597 597
598 598 # traverse the tree once for each head, recording in the branches
599 599 # dict which tags are visible from this head. The branches
600 600 # dict also records which tags are visible from each tag
601 601 # while we traverse.
602 602 while headt or merges:
603 603 if merges:
604 604 n, found = merges.pop()
605 605 visit = [n]
606 606 else:
607 607 h = headt.pop()
608 608 visit = [h]
609 609 found = [h]
610 610 seen = {}
611 611 while visit:
612 612 n = visit.pop()
613 613 if n in seen:
614 614 continue
615 615 pp = chlog.parents(n)
616 616 tags = self.nodetags(n)
617 617 if tags:
618 618 for x in tags:
619 619 if x == 'tip':
620 620 continue
621 621 for f in found:
622 622 branches.setdefault(f, {})[n] = 1
623 623 branches.setdefault(n, {})[n] = 1
624 624 break
625 625 if n not in found:
626 626 found.append(n)
627 627 if branch in tags:
628 628 continue
629 629 seen[n] = 1
630 630 if pp[1] != nullid and n not in seenmerge:
631 631 merges.append((pp[1], [x for x in found]))
632 632 seenmerge[n] = 1
633 633 if pp[0] != nullid:
634 634 visit.append(pp[0])
635 635 # traverse the branches dict, eliminating branch tags from each
636 636 # head that are visible from another branch tag for that head.
637 637 out = {}
638 638 viscache = {}
639 639 for h in heads:
640 640 def visible(node):
641 641 if node in viscache:
642 642 return viscache[node]
643 643 ret = {}
644 644 visit = [node]
645 645 while visit:
646 646 x = visit.pop()
647 647 if x in viscache:
648 648 ret.update(viscache[x])
649 649 elif x not in ret:
650 650 ret[x] = 1
651 651 if x in branches:
652 652 visit[len(visit):] = branches[x].keys()
653 653 viscache[node] = ret
654 654 return ret
655 655 if h not in branches:
656 656 continue
657 657 # O(n^2), but somewhat limited. This only searches the
658 658 # tags visible from a specific head, not all the tags in the
659 659 # whole repo.
660 660 for b in branches[h]:
661 661 vis = False
662 662 for bb in branches[h].keys():
663 663 if b != bb:
664 664 if b in visible(bb):
665 665 vis = True
666 666 break
667 667 if not vis:
668 668 l = out.setdefault(h, [])
669 669 l[len(l):] = self.nodetags(b)
670 670 return out
671 671
672 672 def branches(self, nodes):
673 673 if not nodes: nodes = [self.changelog.tip()]
674 674 b = []
675 675 for n in nodes:
676 676 t = n
677 677 while n:
678 678 p = self.changelog.parents(n)
679 679 if p[1] != nullid or p[0] == nullid:
680 680 b.append((t, n, p[0], p[1]))
681 681 break
682 682 n = p[0]
683 683 return b
684 684
685 685 def between(self, pairs):
686 686 r = []
687 687
688 688 for top, bottom in pairs:
689 689 n, l, i = top, [], 0
690 690 f = 1
691 691
692 692 while n != bottom:
693 693 p = self.changelog.parents(n)[0]
694 694 if i == f:
695 695 l.append(n)
696 696 f = f * 2
697 697 n = p
698 698 i += 1
699 699
700 700 r.append(l)
701 701
702 702 return r
703 703
704 704 def newer(self, nodes):
705 705 m = {}
706 706 nl = []
707 707 pm = {}
708 708 cl = self.changelog
709 709 t = l = cl.count()
710 710
711 711 # find the lowest numbered node
712 712 for n in nodes:
713 713 l = min(l, cl.rev(n))
714 714 m[n] = 1
715 715
716 716 for i in xrange(l, t):
717 717 n = cl.node(i)
718 718 if n in m: # explicitly listed
719 719 pm[n] = 1
720 720 nl.append(n)
721 721 continue
722 722 for p in cl.parents(n):
723 723 if p in pm: # parent listed
724 724 pm[n] = 1
725 725 nl.append(n)
726 726 break
727 727
728 728 return nl
729 729
730 730 def findincoming(self, remote, base=None, heads=None):
731 731 m = self.changelog.nodemap
732 732 search = []
733 733 fetch = {}
734 734 seen = {}
735 735 seenbranch = {}
736 736 if base == None:
737 737 base = {}
738 738
739 739 # assume we're closer to the tip than the root
740 740 # and start by examining the heads
741 self.ui.status("searching for changes\n")
741 self.ui.status(_("searching for changes\n"))
742 742
743 743 if not heads:
744 744 heads = remote.heads()
745 745
746 746 unknown = []
747 747 for h in heads:
748 748 if h not in m:
749 749 unknown.append(h)
750 750 else:
751 751 base[h] = 1
752 752
753 753 if not unknown:
754 754 return None
755 755
756 756 rep = {}
757 757 reqcnt = 0
758 758
759 759 # search through remote branches
760 760 # a 'branch' here is a linear segment of history, with four parts:
761 761 # head, root, first parent, second parent
762 762 # (a branch always has two parents (or none) by definition)
763 763 unknown = remote.branches(unknown)
764 764 while unknown:
765 765 r = []
766 766 while unknown:
767 767 n = unknown.pop(0)
768 768 if n[0] in seen:
769 769 continue
770 770
771 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
772 772 if n[0] == nullid:
773 773 break
774 774 if n in seenbranch:
775 self.ui.debug("branch already found\n")
775 self.ui.debug(_("branch already found\n"))
776 776 continue
777 777 if n[1] and n[1] in m: # do we know the base?
778 self.ui.debug("found incomplete branch %s:%s\n"
778 self.ui.debug(_("found incomplete branch %s:%s\n")
779 779 % (short(n[0]), short(n[1])))
780 780 search.append(n) # schedule branch range for scanning
781 781 seenbranch[n] = 1
782 782 else:
783 783 if n[1] not in seen and n[1] not in fetch:
784 784 if n[2] in m and n[3] in m:
785 self.ui.debug("found new changeset %s\n" %
785 self.ui.debug(_("found new changeset %s\n") %
786 786 short(n[1]))
787 787 fetch[n[1]] = 1 # earliest unknown
788 788 base[n[2]] = 1 # latest known
789 789 continue
790 790
791 791 for a in n[2:4]:
792 792 if a not in rep:
793 793 r.append(a)
794 794 rep[a] = 1
795 795
796 796 seen[n[0]] = 1
797 797
798 798 if r:
799 799 reqcnt += 1
800 self.ui.debug("request %d: %s\n" %
800 self.ui.debug(_("request %d: %s\n") %
801 801 (reqcnt, " ".join(map(short, r))))
802 802 for p in range(0, len(r), 10):
803 803 for b in remote.branches(r[p:p+10]):
804 self.ui.debug("received %s:%s\n" %
804 self.ui.debug(_("received %s:%s\n") %
805 805 (short(b[0]), short(b[1])))
806 806 if b[0] in m:
807 self.ui.debug("found base node %s\n" % short(b[0]))
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
808 808 base[b[0]] = 1
809 809 elif b[0] not in seen:
810 810 unknown.append(b)
811 811
812 812 # do binary search on the branches we found
813 813 while search:
814 814 n = search.pop(0)
815 815 reqcnt += 1
816 816 l = remote.between([(n[0], n[1])])[0]
817 817 l.append(n[1])
818 818 p = n[0]
819 819 f = 1
820 820 for i in l:
821 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
822 822 if i in m:
823 823 if f <= 2:
824 self.ui.debug("found new branch changeset %s\n" %
824 self.ui.debug(_("found new branch changeset %s\n") %
825 825 short(p))
826 826 fetch[p] = 1
827 827 base[i] = 1
828 828 else:
829 self.ui.debug("narrowed branch search to %s:%s\n"
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
830 830 % (short(p), short(i)))
831 831 search.append((p, i))
832 832 break
833 833 p, f = i, f * 2
834 834
835 835 # sanity check our fetch list
836 836 for f in fetch.keys():
837 837 if f in m:
838 raise repo.RepoError("already have changeset " + short(f[:4]))
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
839 839
840 840 if base.keys() == [nullid]:
841 self.ui.warn("warning: pulling from an unrelated repository!\n")
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
842 842
843 self.ui.note("found new changesets starting at " +
843 self.ui.note(_("found new changesets starting at ") +
844 844 " ".join([short(f) for f in fetch]) + "\n")
845 845
846 self.ui.debug("%d total queries\n" % reqcnt)
846 self.ui.debug(_("%d total queries\n") % reqcnt)
847 847
848 848 return fetch.keys()
849 849
850 850 def findoutgoing(self, remote, base=None, heads=None):
851 851 if base == None:
852 852 base = {}
853 853 self.findincoming(remote, base, heads)
854 854
855 self.ui.debug("common changesets up to "
855 self.ui.debug(_("common changesets up to ")
856 856 + " ".join(map(short, base.keys())) + "\n")
857 857
858 858 remain = dict.fromkeys(self.changelog.nodemap)
859 859
860 860 # prune everything remote has from the tree
861 861 del remain[nullid]
862 862 remove = base.keys()
863 863 while remove:
864 864 n = remove.pop(0)
865 865 if n in remain:
866 866 del remain[n]
867 867 for p in self.changelog.parents(n):
868 868 remove.append(p)
869 869
870 870 # find every node whose parents have been pruned
871 871 subset = []
872 872 for n in remain:
873 873 p1, p2 = self.changelog.parents(n)
874 874 if p1 not in remain and p2 not in remain:
875 875 subset.append(n)
876 876
877 877 # this is the set of all roots we have to push
878 878 return subset
879 879
880 880 def pull(self, remote):
881 881 lock = self.lock()
882 882
883 883 # if we have an empty repo, fetch everything
884 884 if self.changelog.tip() == nullid:
885 self.ui.status("requesting all changes\n")
885 self.ui.status(_("requesting all changes\n"))
886 886 fetch = [nullid]
887 887 else:
888 888 fetch = self.findincoming(remote)
889 889
890 890 if not fetch:
891 self.ui.status("no changes found\n")
891 self.ui.status(_("no changes found\n"))
892 892 return 1
893 893
894 894 cg = remote.changegroup(fetch)
895 895 return self.addchangegroup(cg)
896 896
897 897 def push(self, remote, force=False):
898 898 lock = remote.lock()
899 899
900 900 base = {}
901 901 heads = remote.heads()
902 902 inc = self.findincoming(remote, base, heads)
903 903 if not force and inc:
904 self.ui.warn("abort: unsynced remote changes!\n")
905 self.ui.status("(did you forget to sync? use push -f to force)\n")
904 self.ui.warn(_("abort: unsynced remote changes!\n"))
905 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
906 906 return 1
907 907
908 908 update = self.findoutgoing(remote, base)
909 909 if not update:
910 self.ui.status("no changes found\n")
910 self.ui.status(_("no changes found\n"))
911 911 return 1
912 912 elif not force:
913 913 if len(heads) < len(self.changelog.heads()):
914 self.ui.warn("abort: push creates new remote branches!\n")
915 self.ui.status("(did you forget to merge?" +
916 " use push -f to force)\n")
914 self.ui.warn(_("abort: push creates new remote branches!\n"))
915 self.ui.status(_("(did you forget to merge?"
916 " use push -f to force)\n"))
917 917 return 1
918 918
919 919 cg = self.changegroup(update)
920 920 return remote.addchangegroup(cg)
921 921
922 922 def changegroup(self, basenodes):
923 923 genread = util.chunkbuffer
924 924
925 925 def gengroup():
926 926 nodes = self.newer(basenodes)
927 927
928 928 # construct the link map
929 929 linkmap = {}
930 930 for n in nodes:
931 931 linkmap[self.changelog.rev(n)] = n
932 932
933 933 # construct a list of all changed files
934 934 changed = {}
935 935 for n in nodes:
936 936 c = self.changelog.read(n)
937 937 for f in c[3]:
938 938 changed[f] = 1
939 939 changed = changed.keys()
940 940 changed.sort()
941 941
942 942 # the changegroup is changesets + manifests + all file revs
943 943 revs = [ self.changelog.rev(n) for n in nodes ]
944 944
945 945 for y in self.changelog.group(linkmap): yield y
946 946 for y in self.manifest.group(linkmap): yield y
947 947 for f in changed:
948 948 yield struct.pack(">l", len(f) + 4) + f
949 949 g = self.file(f).group(linkmap)
950 950 for y in g:
951 951 yield y
952 952
953 953 yield struct.pack(">l", 0)
954 954
955 955 return genread(gengroup())
956 956
957 957 def addchangegroup(self, source):
958 958
959 959 def getchunk():
960 960 d = source.read(4)
961 961 if not d: return ""
962 962 l = struct.unpack(">l", d)[0]
963 963 if l <= 4: return ""
964 964 d = source.read(l - 4)
965 965 if len(d) < l - 4:
966 raise repo.RepoError("premature EOF reading chunk" +
967 " (got %d bytes, expected %d)"
966 raise repo.RepoError(_("premature EOF reading chunk"
967 " (got %d bytes, expected %d)")
968 968 % (len(d), l - 4))
969 969 return d
970 970
971 971 def getgroup():
972 972 while 1:
973 973 c = getchunk()
974 974 if not c: break
975 975 yield c
976 976
977 977 def csmap(x):
978 self.ui.debug("add changeset %s\n" % short(x))
978 self.ui.debug(_("add changeset %s\n") % short(x))
979 979 return self.changelog.count()
980 980
981 981 def revmap(x):
982 982 return self.changelog.rev(x)
983 983
984 984 if not source: return
985 985 changesets = files = revisions = 0
986 986
987 987 tr = self.transaction()
988 988
989 989 oldheads = len(self.changelog.heads())
990 990
991 991 # pull off the changeset group
992 self.ui.status("adding changesets\n")
992 self.ui.status(_("adding changesets\n"))
993 993 co = self.changelog.tip()
994 994 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
995 995 cnr, cor = map(self.changelog.rev, (cn, co))
996 996 if cn == nullid:
997 997 cnr = cor
998 998 changesets = cnr - cor
999 999
1000 1000 # pull off the manifest group
1001 self.ui.status("adding manifests\n")
1001 self.ui.status(_("adding manifests\n"))
1002 1002 mm = self.manifest.tip()
1003 1003 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1004 1004
1005 1005 # process the files
1006 self.ui.status("adding file changes\n")
1006 self.ui.status(_("adding file changes\n"))
1007 1007 while 1:
1008 1008 f = getchunk()
1009 1009 if not f: break
1010 self.ui.debug("adding %s revisions\n" % f)
1010 self.ui.debug(_("adding %s revisions\n") % f)
1011 1011 fl = self.file(f)
1012 1012 o = fl.count()
1013 1013 n = fl.addgroup(getgroup(), revmap, tr)
1014 1014 revisions += fl.count() - o
1015 1015 files += 1
1016 1016
1017 1017 newheads = len(self.changelog.heads())
1018 1018 heads = ""
1019 1019 if oldheads and newheads > oldheads:
1020 heads = " (+%d heads)" % (newheads - oldheads)
1020 heads = _(" (+%d heads)") % (newheads - oldheads)
1021 1021
1022 self.ui.status(("added %d changesets" +
1023 " with %d changes to %d files%s\n")
1024 % (changesets, revisions, files, heads))
1022 self.ui.status(_("added %d changesets"
1023 " with %d changes to %d files%s\n")
1024 % (changesets, revisions, files, heads))
1025 1025
1026 1026 tr.close()
1027 1027
1028 1028 if changesets > 0:
1029 1029 if not self.hook("changegroup",
1030 1030 node=hex(self.changelog.node(cor+1))):
1031 self.ui.warn("abort: changegroup hook returned failure!\n")
1031 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1032 1032 return 1
1033 1033
1034 1034 for i in range(cor + 1, cnr + 1):
1035 1035 self.hook("commit", node=hex(self.changelog.node(i)))
1036 1036
1037 1037 return
1038 1038
1039 1039 def update(self, node, allow=False, force=False, choose=None,
1040 1040 moddirstate=True):
1041 1041 pl = self.dirstate.parents()
1042 1042 if not force and pl[1] != nullid:
1043 self.ui.warn("aborting: outstanding uncommitted merges\n")
1043 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1044 1044 return 1
1045 1045
1046 1046 p1, p2 = pl[0], node
1047 1047 pa = self.changelog.ancestor(p1, p2)
1048 1048 m1n = self.changelog.read(p1)[0]
1049 1049 m2n = self.changelog.read(p2)[0]
1050 1050 man = self.manifest.ancestor(m1n, m2n)
1051 1051 m1 = self.manifest.read(m1n)
1052 1052 mf1 = self.manifest.readflags(m1n)
1053 1053 m2 = self.manifest.read(m2n)
1054 1054 mf2 = self.manifest.readflags(m2n)
1055 1055 ma = self.manifest.read(man)
1056 1056 mfa = self.manifest.readflags(man)
1057 1057
1058 1058 (c, a, d, u) = self.changes()
1059 1059
1060 1060 # is this a jump, or a merge? i.e. is there a linear path
1061 1061 # from p1 to p2?
1062 1062 linear_path = (pa == p1 or pa == p2)
1063 1063
1064 1064 # resolve the manifest to determine which files
1065 1065 # we care about merging
1066 self.ui.note("resolving manifests\n")
1067 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1066 self.ui.note(_("resolving manifests\n"))
1067 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1068 1068 (force, allow, moddirstate, linear_path))
1069 self.ui.debug(" ancestor %s local %s remote %s\n" %
1069 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1070 1070 (short(man), short(m1n), short(m2n)))
1071 1071
1072 1072 merge = {}
1073 1073 get = {}
1074 1074 remove = []
1075 1075
1076 1076 # construct a working dir manifest
1077 1077 mw = m1.copy()
1078 1078 mfw = mf1.copy()
1079 1079 umap = dict.fromkeys(u)
1080 1080
1081 1081 for f in a + c + u:
1082 1082 mw[f] = ""
1083 1083 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1084 1084
1085 1085 for f in d:
1086 1086 if f in mw: del mw[f]
1087 1087
1088 1088 # If we're jumping between revisions (as opposed to merging),
1089 1089 # and if neither the working directory nor the target rev has
1090 1090 # the file, then we need to remove it from the dirstate, to
1091 1091 # prevent the dirstate from listing the file when it is no
1092 1092 # longer in the manifest.
1093 1093 if moddirstate and linear_path and f not in m2:
1094 1094 self.dirstate.forget((f,))
1095 1095
1096 1096 # Compare manifests
1097 1097 for f, n in mw.iteritems():
1098 1098 if choose and not choose(f): continue
1099 1099 if f in m2:
1100 1100 s = 0
1101 1101
1102 1102 # is the wfile new since m1, and match m2?
1103 1103 if f not in m1:
1104 1104 t1 = self.wread(f)
1105 1105 t2 = self.file(f).read(m2[f])
1106 1106 if cmp(t1, t2) == 0:
1107 1107 n = m2[f]
1108 1108 del t1, t2
1109 1109
1110 1110 # are files different?
1111 1111 if n != m2[f]:
1112 1112 a = ma.get(f, nullid)
1113 1113 # are both different from the ancestor?
1114 1114 if n != a and m2[f] != a:
1115 self.ui.debug(" %s versions differ, resolve\n" % f)
1115 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1116 1116 # merge executable bits
1117 1117 # "if we changed or they changed, change in merge"
1118 1118 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1119 1119 mode = ((a^b) | (a^c)) ^ a
1120 1120 merge[f] = (m1.get(f, nullid), m2[f], mode)
1121 1121 s = 1
1122 1122 # are we clobbering?
1123 1123 # is remote's version newer?
1124 1124 # or are we going back in time?
1125 1125 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1126 self.ui.debug(" remote %s is newer, get\n" % f)
1126 self.ui.debug(_(" remote %s is newer, get\n") % f)
1127 1127 get[f] = m2[f]
1128 1128 s = 1
1129 1129 elif f in umap:
1130 1130 # this unknown file is the same as the checkout
1131 1131 get[f] = m2[f]
1132 1132
1133 1133 if not s and mfw[f] != mf2[f]:
1134 1134 if force:
1135 self.ui.debug(" updating permissions for %s\n" % f)
1135 self.ui.debug(_(" updating permissions for %s\n") % f)
1136 1136 util.set_exec(self.wjoin(f), mf2[f])
1137 1137 else:
1138 1138 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1139 1139 mode = ((a^b) | (a^c)) ^ a
1140 1140 if mode != b:
1141 self.ui.debug(" updating permissions for %s\n" % f)
1141 self.ui.debug(_(" updating permissions for %s\n") % f)
1142 1142 util.set_exec(self.wjoin(f), mode)
1143 1143 del m2[f]
1144 1144 elif f in ma:
1145 1145 if n != ma[f]:
1146 r = "d"
1146 r = _("d")
1147 1147 if not force and (linear_path or allow):
1148 1148 r = self.ui.prompt(
1149 (" local changed %s which remote deleted\n" % f) +
1150 "(k)eep or (d)elete?", "[kd]", "k")
1151 if r == "d":
1149 (_(" local changed %s which remote deleted\n") % f) +
1150 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1151 if r == _("d"):
1152 1152 remove.append(f)
1153 1153 else:
1154 self.ui.debug("other deleted %s\n" % f)
1154 self.ui.debug(_("other deleted %s\n") % f)
1155 1155 remove.append(f) # other deleted it
1156 1156 else:
1157 1157 # file is created on branch or in working directory
1158 1158 if force and f not in umap:
1159 self.ui.debug("remote deleted %s, clobbering\n" % f)
1159 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1160 1160 remove.append(f)
1161 1161 elif n == m1.get(f, nullid): # same as parent
1162 1162 if p2 == pa: # going backwards?
1163 self.ui.debug("remote deleted %s\n" % f)
1163 self.ui.debug(_("remote deleted %s\n") % f)
1164 1164 remove.append(f)
1165 1165 else:
1166 self.ui.debug("local modified %s, keeping\n" % f)
1166 self.ui.debug(_("local modified %s, keeping\n") % f)
1167 1167 else:
1168 self.ui.debug("working dir created %s, keeping\n" % f)
1168 self.ui.debug(_("working dir created %s, keeping\n") % f)
1169 1169
1170 1170 for f, n in m2.iteritems():
1171 1171 if choose and not choose(f): continue
1172 1172 if f[0] == "/": continue
1173 1173 if f in ma and n != ma[f]:
1174 r = "k"
1174 r = _("k")
1175 1175 if not force and (linear_path or allow):
1176 1176 r = self.ui.prompt(
1177 ("remote changed %s which local deleted\n" % f) +
1178 "(k)eep or (d)elete?", "[kd]", "k")
1179 if r == "k": get[f] = n
1177 (_("remote changed %s which local deleted\n") % f) +
1178 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1179 if r == _("k"): get[f] = n
1180 1180 elif f not in ma:
1181 self.ui.debug("remote created %s\n" % f)
1181 self.ui.debug(_("remote created %s\n") % f)
1182 1182 get[f] = n
1183 1183 else:
1184 1184 if force or p2 == pa: # going backwards?
1185 self.ui.debug("local deleted %s, recreating\n" % f)
1185 self.ui.debug(_("local deleted %s, recreating\n") % f)
1186 1186 get[f] = n
1187 1187 else:
1188 self.ui.debug("local deleted %s\n" % f)
1188 self.ui.debug(_("local deleted %s\n") % f)
1189 1189
1190 1190 del mw, m1, m2, ma
1191 1191
1192 1192 if force:
1193 1193 for f in merge:
1194 1194 get[f] = merge[f][1]
1195 1195 merge = {}
1196 1196
1197 1197 if linear_path or force:
1198 1198 # we don't need to do any magic, just jump to the new rev
1199 1199 branch_merge = False
1200 1200 p1, p2 = p2, nullid
1201 1201 else:
1202 1202 if not allow:
1203 self.ui.status("this update spans a branch" +
1204 " affecting the following files:\n")
1203 self.ui.status(_("this update spans a branch"
1204 " affecting the following files:\n"))
1205 1205 fl = merge.keys() + get.keys()
1206 1206 fl.sort()
1207 1207 for f in fl:
1208 1208 cf = ""
1209 if f in merge: cf = " (resolve)"
1209 if f in merge: cf = _(" (resolve)")
1210 1210 self.ui.status(" %s%s\n" % (f, cf))
1211 self.ui.warn("aborting update spanning branches!\n")
1212 self.ui.status("(use update -m to merge across branches" +
1213 " or -C to lose changes)\n")
1211 self.ui.warn(_("aborting update spanning branches!\n"))
1212 self.ui.status(_("(use update -m to merge across branches"
1213 " or -C to lose changes)\n"))
1214 1214 return 1
1215 1215 branch_merge = True
1216 1216
1217 1217 if moddirstate:
1218 1218 self.dirstate.setparents(p1, p2)
1219 1219
1220 1220 # get the files we don't need to change
1221 1221 files = get.keys()
1222 1222 files.sort()
1223 1223 for f in files:
1224 1224 if f[0] == "/": continue
1225 self.ui.note("getting %s\n" % f)
1225 self.ui.note(_("getting %s\n") % f)
1226 1226 t = self.file(f).read(get[f])
1227 1227 try:
1228 1228 self.wwrite(f, t)
1229 1229 except IOError, e:
1230 1230 if e.errno != errno.ENOENT:
1231 1231 raise
1232 1232 os.makedirs(os.path.dirname(self.wjoin(f)))
1233 1233 self.wwrite(f, t)
1234 1234 util.set_exec(self.wjoin(f), mf2[f])
1235 1235 if moddirstate:
1236 1236 if branch_merge:
1237 1237 self.dirstate.update([f], 'n', st_mtime=-1)
1238 1238 else:
1239 1239 self.dirstate.update([f], 'n')
1240 1240
1241 1241 # merge the tricky bits
1242 1242 files = merge.keys()
1243 1243 files.sort()
1244 1244 for f in files:
1245 self.ui.status("merging %s\n" % f)
1245 self.ui.status(_("merging %s\n") % f)
1246 1246 my, other, flag = merge[f]
1247 1247 self.merge3(f, my, other)
1248 1248 util.set_exec(self.wjoin(f), flag)
1249 1249 if moddirstate:
1250 1250 if branch_merge:
1251 1251 # We've done a branch merge, mark this file as merged
1252 1252 # so that we properly record the merger later
1253 1253 self.dirstate.update([f], 'm')
1254 1254 else:
1255 1255 # We've update-merged a locally modified file, so
1256 1256 # we set the dirstate to emulate a normal checkout
1257 1257 # of that file some time in the past. Thus our
1258 1258 # merge will appear as a normal local file
1259 1259 # modification.
1260 1260 f_len = len(self.file(f).read(other))
1261 1261 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1262 1262
1263 1263 remove.sort()
1264 1264 for f in remove:
1265 self.ui.note("removing %s\n" % f)
1265 self.ui.note(_("removing %s\n") % f)
1266 1266 try:
1267 1267 os.unlink(self.wjoin(f))
1268 1268 except OSError, inst:
1269 1269 if inst.errno != errno.ENOENT:
1270 self.ui.warn("update failed to remove %s: %s!\n" %
1270 self.ui.warn(_("update failed to remove %s: %s!\n") %
1271 1271 (f, inst.strerror))
1272 1272 # try removing directories that might now be empty
1273 1273 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1274 1274 except: pass
1275 1275 if moddirstate:
1276 1276 if branch_merge:
1277 1277 self.dirstate.update(remove, 'r')
1278 1278 else:
1279 1279 self.dirstate.forget(remove)
1280 1280
1281 1281 def merge3(self, fn, my, other):
1282 1282 """perform a 3-way merge in the working directory"""
1283 1283
1284 1284 def temp(prefix, node):
1285 1285 pre = "%s~%s." % (os.path.basename(fn), prefix)
1286 1286 (fd, name) = tempfile.mkstemp("", pre)
1287 1287 f = os.fdopen(fd, "wb")
1288 1288 self.wwrite(fn, fl.read(node), f)
1289 1289 f.close()
1290 1290 return name
1291 1291
1292 1292 fl = self.file(fn)
1293 1293 base = fl.ancestor(my, other)
1294 1294 a = self.wjoin(fn)
1295 1295 b = temp("base", base)
1296 1296 c = temp("other", other)
1297 1297
1298 self.ui.note("resolving %s\n" % fn)
1299 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1298 self.ui.note(_("resolving %s\n") % fn)
1299 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1300 1300 (fn, short(my), short(other), short(base)))
1301 1301
1302 1302 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1303 1303 or "hgmerge")
1304 1304 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1305 1305 if r:
1306 self.ui.warn("merging %s failed!\n" % fn)
1306 self.ui.warn(_("merging %s failed!\n") % fn)
1307 1307
1308 1308 os.unlink(b)
1309 1309 os.unlink(c)
1310 1310
1311 1311 def verify(self):
1312 1312 filelinkrevs = {}
1313 1313 filenodes = {}
1314 1314 changesets = revisions = files = 0
1315 1315 errors = [0]
1316 1316 neededmanifests = {}
1317 1317
1318 1318 def err(msg):
1319 1319 self.ui.warn(msg + "\n")
1320 1320 errors[0] += 1
1321 1321
1322 1322 seen = {}
1323 self.ui.status("checking changesets\n")
1323 self.ui.status(_("checking changesets\n"))
1324 1324 for i in range(self.changelog.count()):
1325 1325 changesets += 1
1326 1326 n = self.changelog.node(i)
1327 1327 l = self.changelog.linkrev(n)
1328 1328 if l != i:
1329 err("incorrect link (%d) for changeset revision %d" % (l, i))
1329 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1330 1330 if n in seen:
1331 err("duplicate changeset at revision %d" % i)
1331 err(_("duplicate changeset at revision %d") % i)
1332 1332 seen[n] = 1
1333 1333
1334 1334 for p in self.changelog.parents(n):
1335 1335 if p not in self.changelog.nodemap:
1336 err("changeset %s has unknown parent %s" %
1336 err(_("changeset %s has unknown parent %s") %
1337 1337 (short(n), short(p)))
1338 1338 try:
1339 1339 changes = self.changelog.read(n)
1340 1340 except Exception, inst:
1341 err("unpacking changeset %s: %s" % (short(n), inst))
1341 err(_("unpacking changeset %s: %s") % (short(n), inst))
1342 1342
1343 1343 neededmanifests[changes[0]] = n
1344 1344
1345 1345 for f in changes[3]:
1346 1346 filelinkrevs.setdefault(f, []).append(i)
1347 1347
1348 1348 seen = {}
1349 self.ui.status("checking manifests\n")
1349 self.ui.status(_("checking manifests\n"))
1350 1350 for i in range(self.manifest.count()):
1351 1351 n = self.manifest.node(i)
1352 1352 l = self.manifest.linkrev(n)
1353 1353
1354 1354 if l < 0 or l >= self.changelog.count():
1355 err("bad manifest link (%d) at revision %d" % (l, i))
1355 err(_("bad manifest link (%d) at revision %d") % (l, i))
1356 1356
1357 1357 if n in neededmanifests:
1358 1358 del neededmanifests[n]
1359 1359
1360 1360 if n in seen:
1361 err("duplicate manifest at revision %d" % i)
1361 err(_("duplicate manifest at revision %d") % i)
1362 1362
1363 1363 seen[n] = 1
1364 1364
1365 1365 for p in self.manifest.parents(n):
1366 1366 if p not in self.manifest.nodemap:
1367 err("manifest %s has unknown parent %s" %
1367 err(_("manifest %s has unknown parent %s") %
1368 1368 (short(n), short(p)))
1369 1369
1370 1370 try:
1371 1371 delta = mdiff.patchtext(self.manifest.delta(n))
1372 1372 except KeyboardInterrupt:
1373 self.ui.warn("interrupted")
1373 self.ui.warn(_("interrupted"))
1374 1374 raise
1375 1375 except Exception, inst:
1376 err("unpacking manifest %s: %s" % (short(n), inst))
1376 err(_("unpacking manifest %s: %s") % (short(n), inst))
1377 1377
1378 1378 ff = [ l.split('\0') for l in delta.splitlines() ]
1379 1379 for f, fn in ff:
1380 1380 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1381 1381
1382 self.ui.status("crosschecking files in changesets and manifests\n")
1382 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1383 1383
1384 1384 for m,c in neededmanifests.items():
1385 err("Changeset %s refers to unknown manifest %s" %
1385 err(_("Changeset %s refers to unknown manifest %s") %
1386 1386 (short(m), short(c)))
1387 1387 del neededmanifests
1388 1388
1389 1389 for f in filenodes:
1390 1390 if f not in filelinkrevs:
1391 err("file %s in manifest but not in changesets" % f)
1391 err(_("file %s in manifest but not in changesets") % f)
1392 1392
1393 1393 for f in filelinkrevs:
1394 1394 if f not in filenodes:
1395 err("file %s in changeset but not in manifest" % f)
1395 err(_("file %s in changeset but not in manifest") % f)
1396 1396
1397 self.ui.status("checking files\n")
1397 self.ui.status(_("checking files\n"))
1398 1398 ff = filenodes.keys()
1399 1399 ff.sort()
1400 1400 for f in ff:
1401 1401 if f == "/dev/null": continue
1402 1402 files += 1
1403 1403 fl = self.file(f)
1404 1404 nodes = { nullid: 1 }
1405 1405 seen = {}
1406 1406 for i in range(fl.count()):
1407 1407 revisions += 1
1408 1408 n = fl.node(i)
1409 1409
1410 1410 if n in seen:
1411 err("%s: duplicate revision %d" % (f, i))
1411 err(_("%s: duplicate revision %d") % (f, i))
1412 1412 if n not in filenodes[f]:
1413 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1413 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1414 1414 else:
1415 1415 del filenodes[f][n]
1416 1416
1417 1417 flr = fl.linkrev(n)
1418 1418 if flr not in filelinkrevs[f]:
1419 err("%s:%s points to unexpected changeset %d"
1419 err(_("%s:%s points to unexpected changeset %d")
1420 1420 % (f, short(n), flr))
1421 1421 else:
1422 1422 filelinkrevs[f].remove(flr)
1423 1423
1424 1424 # verify contents
1425 1425 try:
1426 1426 t = fl.read(n)
1427 1427 except Exception, inst:
1428 err("unpacking file %s %s: %s" % (f, short(n), inst))
1428 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1429 1429
1430 1430 # verify parents
1431 1431 (p1, p2) = fl.parents(n)
1432 1432 if p1 not in nodes:
1433 err("file %s:%s unknown parent 1 %s" %
1433 err(_("file %s:%s unknown parent 1 %s") %
1434 1434 (f, short(n), short(p1)))
1435 1435 if p2 not in nodes:
1436 err("file %s:%s unknown parent 2 %s" %
1436 err(_("file %s:%s unknown parent 2 %s") %
1437 1437 (f, short(n), short(p1)))
1438 1438 nodes[n] = 1
1439 1439
1440 1440 # cross-check
1441 1441 for node in filenodes[f]:
1442 err("node %s in manifests not in %s" % (hex(node), f))
1442 err(_("node %s in manifests not in %s") % (hex(node), f))
1443 1443
1444 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1444 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1445 1445 (files, changesets, revisions))
1446 1446
1447 1447 if errors[0]:
1448 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1448 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1449 1449 return 1
@@ -1,168 +1,168 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, struct
9 9 from revlog import *
10 10 from i18n import gettext as _
11 11 from demandload import *
12 12 demandload(globals(), "bisect")
13 13
14 14 class manifest(revlog):
15 15 def __init__(self, opener):
16 16 self.mapcache = None
17 17 self.listcache = None
18 18 self.addlist = None
19 19 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
20 20
21 21 def read(self, node):
22 22 if node == nullid: return {} # don't upset local cache
23 23 if self.mapcache and self.mapcache[0] == node:
24 24 return self.mapcache[1]
25 25 text = self.revision(node)
26 26 map = {}
27 27 flag = {}
28 28 self.listcache = (text, text.splitlines(1))
29 29 for l in self.listcache[1]:
30 30 (f, n) = l.split('\0')
31 31 map[f] = bin(n[:40])
32 32 flag[f] = (n[40:-1] == "x")
33 33 self.mapcache = (node, map, flag)
34 34 return map
35 35
36 36 def readflags(self, node):
37 37 if node == nullid: return {} # don't upset local cache
38 38 if not self.mapcache or self.mapcache[0] != node:
39 39 self.read(node)
40 40 return self.mapcache[2]
41 41
42 42 def diff(self, a, b):
43 43 # this is sneaky, as we're not actually using a and b
44 44 if self.listcache and self.addlist and self.listcache[0] == a:
45 45 d = mdiff.diff(self.listcache[1], self.addlist, 1)
46 46 if mdiff.patch(a, d) != b:
47 raise AssertionError("sortdiff failed!")
47 raise AssertionError(_("sortdiff failed!"))
48 48 return d
49 49 else:
50 50 return mdiff.textdiff(a, b)
51 51
52 52 def add(self, map, flags, transaction, link, p1=None, p2=None,
53 53 changed=None):
54 54 # directly generate the mdiff delta from the data collected during
55 55 # the bisect loop below
56 56 def gendelta(delta):
57 57 i = 0
58 58 result = []
59 59 while i < len(delta):
60 60 start = delta[i][2]
61 61 end = delta[i][3]
62 62 l = delta[i][4]
63 63 if l == None:
64 64 l = ""
65 65 while i < len(delta) - 1 and start <= delta[i+1][2] \
66 66 and end >= delta[i+1][2]:
67 67 if delta[i+1][3] > end:
68 68 end = delta[i+1][3]
69 69 if delta[i+1][4]:
70 70 l += delta[i+1][4]
71 71 i += 1
72 72 result.append(struct.pack(">lll", start, end, len(l)) + l)
73 73 i += 1
74 74 return result
75 75
76 76 # apply the changes collected during the bisect loop to our addlist
77 77 def addlistdelta(addlist, delta):
78 78 # apply the deltas to the addlist. start from the bottom up
79 79 # so changes to the offsets don't mess things up.
80 80 i = len(delta)
81 81 while i > 0:
82 82 i -= 1
83 83 start = delta[i][0]
84 84 end = delta[i][1]
85 85 if delta[i][4]:
86 86 addlist[start:end] = [delta[i][4]]
87 87 else:
88 88 del addlist[start:end]
89 89 return addlist
90 90
91 91 # calculate the byte offset of the start of each line in the
92 92 # manifest
93 93 def calcoffsets(addlist):
94 94 offsets = [0] * (len(addlist) + 1)
95 95 offset = 0
96 96 i = 0
97 97 while i < len(addlist):
98 98 offsets[i] = offset
99 99 offset += len(addlist[i])
100 100 i += 1
101 101 offsets[i] = offset
102 102 return offsets
103 103
104 104 # if we're using the listcache, make sure it is valid and
105 105 # parented by the same node we're diffing against
106 106 if not changed or not self.listcache or not p1 or \
107 107 self.mapcache[0] != p1:
108 108 files = map.keys()
109 109 files.sort()
110 110
111 111 self.addlist = ["%s\000%s%s\n" %
112 112 (f, hex(map[f]), flags[f] and "x" or '')
113 113 for f in files]
114 114 cachedelta = None
115 115 else:
116 116 addlist = self.listcache[1]
117 117
118 118 # find the starting offset for each line in the add list
119 119 offsets = calcoffsets(addlist)
120 120
121 121 # combine the changed lists into one list for sorting
122 122 work = [[x, 0] for x in changed[0]]
123 123 work[len(work):] = [[x, 1] for x in changed[1]]
124 124 work.sort()
125 125
126 126 delta = []
127 127 bs = 0
128 128
129 129 for w in work:
130 130 f = w[0]
131 131 # bs will either be the index of the item or the insert point
132 132 bs = bisect.bisect(addlist, f, bs)
133 133 if bs < len(addlist):
134 134 fn = addlist[bs][:addlist[bs].index('\0')]
135 135 else:
136 136 fn = None
137 137 if w[1] == 0:
138 138 l = "%s\000%s%s\n" % (f, hex(map[f]),
139 139 flags[f] and "x" or '')
140 140 else:
141 141 l = None
142 142 start = bs
143 143 if fn != f:
144 144 # item not found, insert a new one
145 145 end = bs
146 146 if w[1] == 1:
147 147 raise AssertionError(
148 "failed to remove %s from manifest\n" % f)
148 _("failed to remove %s from manifest\n") % f)
149 149 else:
150 150 # item is found, replace/delete the existing line
151 151 end = bs + 1
152 152 delta.append([start, end, offsets[start], offsets[end], l])
153 153
154 154 self.addlist = addlistdelta(addlist, delta)
155 155 if self.mapcache[0] == self.tip():
156 156 cachedelta = "".join(gendelta(delta))
157 157 else:
158 158 cachedelta = None
159 159
160 160 text = "".join(self.addlist)
161 161 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
162 raise AssertionError("manifest delta failure\n")
162 raise AssertionError(_("manifest delta failure\n"))
163 163 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
164 164 self.mapcache = (n, map, flags)
165 165 self.listcache = (text, self.addlist)
166 166 self.addlist = None
167 167
168 168 return n
@@ -1,675 +1,675 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import gettext as _
15 15 from demandload import demandload
16 16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17 17
18 18 def hash(text, p1, p2):
19 19 """generate a hash from the given text and its parent hashes
20 20
21 21 This hash combines both the current file contents and its history
22 22 in a manner that makes it easy to distinguish nodes with the same
23 23 content in the revision graph.
24 24 """
25 25 l = [p1, p2]
26 26 l.sort()
27 27 s = sha.new(l[0])
28 28 s.update(l[1])
29 29 s.update(text)
30 30 return s.digest()
31 31
32 32 def compress(text):
33 33 """ generate a possibly-compressed representation of text """
34 34 if not text: return text
35 35 if len(text) < 44:
36 36 if text[0] == '\0': return text
37 37 return 'u' + text
38 38 bin = zlib.compress(text)
39 39 if len(bin) > len(text):
40 40 if text[0] == '\0': return text
41 41 return 'u' + text
42 42 return bin
43 43
44 44 def decompress(bin):
45 45 """ decompress the given input """
46 46 if not bin: return bin
47 47 t = bin[0]
48 48 if t == '\0': return bin
49 49 if t == 'x': return zlib.decompress(bin)
50 50 if t == 'u': return bin[1:]
51 raise RevlogError("unknown compression type %s" % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52 52
53 53 indexformat = ">4l20s20s20s"
54 54
55 55 class lazyparser:
56 56 """
57 57 this class avoids the need to parse the entirety of large indices
58 58
59 59 By default we parse and load 1000 entries at a time.
60 60
61 61 If no position is specified, we load the whole index, and replace
62 62 the lazy objects in revlog with the underlying objects for
63 63 efficiency in cases where we look at most of the nodes.
64 64 """
65 65 def __init__(self, data, revlog):
66 66 self.data = data
67 67 self.s = struct.calcsize(indexformat)
68 68 self.l = len(data)/self.s
69 69 self.index = [None] * self.l
70 70 self.map = {nullid: -1}
71 71 self.all = 0
72 72 self.revlog = revlog
73 73
74 74 def load(self, pos=None):
75 75 if self.all: return
76 76 if pos is not None:
77 77 block = pos / 1000
78 78 i = block * 1000
79 79 end = min(self.l, i + 1000)
80 80 else:
81 81 self.all = 1
82 82 i = 0
83 83 end = self.l
84 84 self.revlog.index = self.index
85 85 self.revlog.nodemap = self.map
86 86
87 87 while i < end:
88 88 d = self.data[i * self.s: (i + 1) * self.s]
89 89 e = struct.unpack(indexformat, d)
90 90 self.index[i] = e
91 91 self.map[e[6]] = i
92 92 i += 1
93 93
94 94 class lazyindex:
95 95 """a lazy version of the index array"""
96 96 def __init__(self, parser):
97 97 self.p = parser
98 98 def __len__(self):
99 99 return len(self.p.index)
100 100 def load(self, pos):
101 101 self.p.load(pos)
102 102 return self.p.index[pos]
103 103 def __getitem__(self, pos):
104 104 return self.p.index[pos] or self.load(pos)
105 105 def append(self, e):
106 106 self.p.index.append(e)
107 107
108 108 class lazymap:
109 109 """a lazy version of the node map"""
110 110 def __init__(self, parser):
111 111 self.p = parser
112 112 def load(self, key):
113 113 if self.p.all: return
114 114 n = self.p.data.find(key)
115 115 if n < 0:
116 116 raise KeyError(key)
117 117 pos = n / self.p.s
118 118 self.p.load(pos)
119 119 def __contains__(self, key):
120 120 self.p.load()
121 121 return key in self.p.map
122 122 def __iter__(self):
123 123 yield nullid
124 124 for i in xrange(self.p.l):
125 125 try:
126 126 yield self.p.index[i][6]
127 127 except:
128 128 self.p.load(i)
129 129 yield self.p.index[i][6]
130 130 def __getitem__(self, key):
131 131 try:
132 132 return self.p.map[key]
133 133 except KeyError:
134 134 try:
135 135 self.load(key)
136 136 return self.p.map[key]
137 137 except KeyError:
138 138 raise KeyError("node " + hex(key))
139 139 def __setitem__(self, key, val):
140 140 self.p.map[key] = val
141 141
142 142 class RevlogError(Exception): pass
143 143
144 144 class revlog:
145 145 """
146 146 the underlying revision storage object
147 147
148 148 A revlog consists of two parts, an index and the revision data.
149 149
150 150 The index is a file with a fixed record size containing
151 151 information on each revision, includings its nodeid (hash), the
152 152 nodeids of its parents, the position and offset of its data within
153 153 the data file, and the revision it's based on. Finally, each entry
154 154 contains a linkrev entry that can serve as a pointer to external
155 155 data.
156 156
157 157 The revision data itself is a linear collection of data chunks.
158 158 Each chunk represents a revision and is usually represented as a
159 159 delta against the previous chunk. To bound lookup time, runs of
160 160 deltas are limited to about 2 times the length of the original
161 161 version data. This makes retrieval of a version proportional to
162 162 its size, or O(1) relative to the number of revisions.
163 163
164 164 Both pieces of the revlog are written to in an append-only
165 165 fashion, which means we never need to rewrite a file to insert or
166 166 remove data, and can use some simple techniques to avoid the need
167 167 for locking while reading.
168 168 """
169 169 def __init__(self, opener, indexfile, datafile):
170 170 """
171 171 create a revlog object
172 172
173 173 opener is a function that abstracts the file opening operation
174 174 and can be used to implement COW semantics or the like.
175 175 """
176 176 self.indexfile = indexfile
177 177 self.datafile = datafile
178 178 self.opener = opener
179 179 self.cache = None
180 180
181 181 try:
182 182 i = self.opener(self.indexfile).read()
183 183 except IOError, inst:
184 184 if inst.errno != errno.ENOENT:
185 185 raise
186 186 i = ""
187 187
188 188 if len(i) > 10000:
189 189 # big index, let's parse it on demand
190 190 parser = lazyparser(i, self)
191 191 self.index = lazyindex(parser)
192 192 self.nodemap = lazymap(parser)
193 193 else:
194 194 s = struct.calcsize(indexformat)
195 195 l = len(i) / s
196 196 self.index = [None] * l
197 197 m = [None] * l
198 198
199 199 n = 0
200 200 for f in xrange(0, len(i), s):
201 201 # offset, size, base, linkrev, p1, p2, nodeid
202 202 e = struct.unpack(indexformat, i[f:f + s])
203 203 m[n] = (e[6], n)
204 204 self.index[n] = e
205 205 n += 1
206 206
207 207 self.nodemap = dict(m)
208 208 self.nodemap[nullid] = -1
209 209
210 210 def tip(self): return self.node(len(self.index) - 1)
211 211 def count(self): return len(self.index)
212 212 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
213 213 def rev(self, node):
214 214 try:
215 215 return self.nodemap[node]
216 216 except KeyError:
217 raise RevlogError('%s: no node %s' % (self.indexfile, hex(node)))
217 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
218 218 def linkrev(self, node): return self.index[self.rev(node)][3]
219 219 def parents(self, node):
220 220 if node == nullid: return (nullid, nullid)
221 221 return self.index[self.rev(node)][4:6]
222 222
223 223 def start(self, rev): return self.index[rev][0]
224 224 def length(self, rev): return self.index[rev][1]
225 225 def end(self, rev): return self.start(rev) + self.length(rev)
226 226 def base(self, rev): return self.index[rev][2]
227 227
228 228 def reachable(self, rev, stop=None):
229 229 reachable = {}
230 230 visit = [rev]
231 231 reachable[rev] = 1
232 232 if stop:
233 233 stopn = self.rev(stop)
234 234 else:
235 235 stopn = 0
236 236 while visit:
237 237 n = visit.pop(0)
238 238 if n == stop:
239 239 continue
240 240 if n == nullid:
241 241 continue
242 242 for p in self.parents(n):
243 243 if self.rev(p) < stopn:
244 244 continue
245 245 if p not in reachable:
246 246 reachable[p] = 1
247 247 visit.append(p)
248 248 return reachable
249 249
250 250 def heads(self, stop=None):
251 251 """return the list of all nodes that have no children"""
252 252 p = {}
253 253 h = []
254 254 stoprev = 0
255 255 if stop and stop in self.nodemap:
256 256 stoprev = self.rev(stop)
257 257
258 258 for r in range(self.count() - 1, -1, -1):
259 259 n = self.node(r)
260 260 if n not in p:
261 261 h.append(n)
262 262 if n == stop:
263 263 break
264 264 if r < stoprev:
265 265 break
266 266 for pn in self.parents(n):
267 267 p[pn] = 1
268 268 return h
269 269
270 270 def children(self, node):
271 271 """find the children of a given node"""
272 272 c = []
273 273 p = self.rev(node)
274 274 for r in range(p + 1, self.count()):
275 275 n = self.node(r)
276 276 for pn in self.parents(n):
277 277 if pn == node:
278 278 c.append(n)
279 279 continue
280 280 elif pn == nullid:
281 281 continue
282 282 return c
283 283
284 284 def lookup(self, id):
285 285 """locate a node based on revision number or subset of hex nodeid"""
286 286 try:
287 287 rev = int(id)
288 288 if str(rev) != id: raise ValueError
289 289 if rev < 0: rev = self.count() + rev
290 290 if rev < 0 or rev >= self.count(): raise ValueError
291 291 return self.node(rev)
292 292 except (ValueError, OverflowError):
293 293 c = []
294 294 for n in self.nodemap:
295 295 if hex(n).startswith(id):
296 296 c.append(n)
297 if len(c) > 1: raise RevlogError("Ambiguous identifier")
298 if len(c) < 1: raise RevlogError("No match found")
297 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
298 if len(c) < 1: raise RevlogError(_("No match found"))
299 299 return c[0]
300 300
301 301 return None
302 302
303 303 def diff(self, a, b):
304 304 """return a delta between two revisions"""
305 305 return mdiff.textdiff(a, b)
306 306
307 307 def patches(self, t, pl):
308 308 """apply a list of patches to a string"""
309 309 return mdiff.patches(t, pl)
310 310
311 311 def delta(self, node):
312 312 """return or calculate a delta between a node and its predecessor"""
313 313 r = self.rev(node)
314 314 b = self.base(r)
315 315 if r == b:
316 316 return self.diff(self.revision(self.node(r - 1)),
317 317 self.revision(node))
318 318 else:
319 319 f = self.opener(self.datafile)
320 320 f.seek(self.start(r))
321 321 data = f.read(self.length(r))
322 322 return decompress(data)
323 323
324 324 def revision(self, node):
325 325 """return an uncompressed revision of a given"""
326 326 if node == nullid: return ""
327 327 if self.cache and self.cache[0] == node: return self.cache[2]
328 328
329 329 # look up what we need to read
330 330 text = None
331 331 rev = self.rev(node)
332 332 start, length, base, link, p1, p2, node = self.index[rev]
333 333 end = start + length
334 334 if base != rev: start = self.start(base)
335 335
336 336 # do we have useful data cached?
337 337 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
338 338 base = self.cache[1]
339 339 start = self.start(base + 1)
340 340 text = self.cache[2]
341 341 last = 0
342 342
343 343 f = self.opener(self.datafile)
344 344 f.seek(start)
345 345 data = f.read(end - start)
346 346
347 347 if text is None:
348 348 last = self.length(base)
349 349 text = decompress(data[:last])
350 350
351 351 bins = []
352 352 for r in xrange(base + 1, rev + 1):
353 353 s = self.length(r)
354 354 bins.append(decompress(data[last:last + s]))
355 355 last = last + s
356 356
357 357 text = mdiff.patches(text, bins)
358 358
359 359 if node != hash(text, p1, p2):
360 raise RevlogError("integrity check failed on %s:%d"
360 raise RevlogError(_("integrity check failed on %s:%d")
361 361 % (self.datafile, rev))
362 362
363 363 self.cache = (node, rev, text)
364 364 return text
365 365
366 366 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
367 367 """add a revision to the log
368 368
369 369 text - the revision data to add
370 370 transaction - the transaction object used for rollback
371 371 link - the linkrev data to add
372 372 p1, p2 - the parent nodeids of the revision
373 373 d - an optional precomputed delta
374 374 """
375 375 if text is None: text = ""
376 376 if p1 is None: p1 = self.tip()
377 377 if p2 is None: p2 = nullid
378 378
379 379 node = hash(text, p1, p2)
380 380
381 381 if node in self.nodemap:
382 382 return node
383 383
384 384 n = self.count()
385 385 t = n - 1
386 386
387 387 if n:
388 388 base = self.base(t)
389 389 start = self.start(base)
390 390 end = self.end(t)
391 391 if not d:
392 392 prev = self.revision(self.tip())
393 393 d = self.diff(prev, text)
394 394 data = compress(d)
395 395 dist = end - start + len(data)
396 396
397 397 # full versions are inserted when the needed deltas
398 398 # become comparable to the uncompressed text
399 399 if not n or dist > len(text) * 2:
400 400 data = compress(text)
401 401 base = n
402 402 else:
403 403 base = self.base(t)
404 404
405 405 offset = 0
406 406 if t >= 0:
407 407 offset = self.end(t)
408 408
409 409 e = (offset, len(data), base, link, p1, p2, node)
410 410
411 411 self.index.append(e)
412 412 self.nodemap[node] = n
413 413 entry = struct.pack(indexformat, *e)
414 414
415 415 transaction.add(self.datafile, e[0])
416 416 self.opener(self.datafile, "a").write(data)
417 417 transaction.add(self.indexfile, n * len(entry))
418 418 self.opener(self.indexfile, "a").write(entry)
419 419
420 420 self.cache = (node, n, text)
421 421 return node
422 422
423 423 def ancestor(self, a, b):
424 424 """calculate the least common ancestor of nodes a and b"""
425 425 # calculate the distance of every node from root
426 426 dist = {nullid: 0}
427 427 for i in xrange(self.count()):
428 428 n = self.node(i)
429 429 p1, p2 = self.parents(n)
430 430 dist[n] = max(dist[p1], dist[p2]) + 1
431 431
432 432 # traverse ancestors in order of decreasing distance from root
433 433 def ancestors(node):
434 434 # we store negative distances because heap returns smallest member
435 435 h = [(-dist[node], node)]
436 436 seen = {}
437 437 earliest = self.count()
438 438 while h:
439 439 d, n = heapq.heappop(h)
440 440 if n not in seen:
441 441 seen[n] = 1
442 442 r = self.rev(n)
443 443 yield (-d, n)
444 444 for p in self.parents(n):
445 445 heapq.heappush(h, (-dist[p], p))
446 446
447 447 def generations(node):
448 448 sg, s = None, {}
449 449 for g,n in ancestors(node):
450 450 if g != sg:
451 451 if sg:
452 452 yield sg, s
453 453 sg, s = g, {n:1}
454 454 else:
455 455 s[n] = 1
456 456 yield sg, s
457 457
458 458 x = generations(a)
459 459 y = generations(b)
460 460 gx = x.next()
461 461 gy = y.next()
462 462
463 463 # increment each ancestor list until it is closer to root than
464 464 # the other, or they match
465 465 while 1:
466 466 #print "ancestor gen %s %s" % (gx[0], gy[0])
467 467 if gx[0] == gy[0]:
468 468 # find the intersection
469 469 i = [ n for n in gx[1] if n in gy[1] ]
470 470 if i:
471 471 return i[0]
472 472 else:
473 473 #print "next"
474 474 gy = y.next()
475 475 gx = x.next()
476 476 elif gx[0] < gy[0]:
477 477 #print "next y"
478 478 gy = y.next()
479 479 else:
480 480 #print "next x"
481 481 gx = x.next()
482 482
483 483 def group(self, linkmap):
484 484 """calculate a delta group
485 485
486 486 Given a list of changeset revs, return a set of deltas and
487 487 metadata corresponding to nodes. the first delta is
488 488 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
489 489 have this parent as it has all history before these
490 490 changesets. parent is parent[0]
491 491 """
492 492 revs = []
493 493 needed = {}
494 494
495 495 # find file nodes/revs that match changeset revs
496 496 for i in xrange(0, self.count()):
497 497 if self.index[i][3] in linkmap:
498 498 revs.append(i)
499 499 needed[i] = 1
500 500
501 501 # if we don't have any revisions touched by these changesets, bail
502 502 if not revs:
503 503 yield struct.pack(">l", 0)
504 504 return
505 505
506 506 # add the parent of the first rev
507 507 p = self.parents(self.node(revs[0]))[0]
508 508 revs.insert(0, self.rev(p))
509 509
510 510 # for each delta that isn't contiguous in the log, we need to
511 511 # reconstruct the base, reconstruct the result, and then
512 512 # calculate the delta. We also need to do this where we've
513 513 # stored a full version and not a delta
514 514 for i in xrange(0, len(revs) - 1):
515 515 a, b = revs[i], revs[i + 1]
516 516 if a + 1 != b or self.base(b) == b:
517 517 for j in xrange(self.base(a), a + 1):
518 518 needed[j] = 1
519 519 for j in xrange(self.base(b), b + 1):
520 520 needed[j] = 1
521 521
522 522 # calculate spans to retrieve from datafile
523 523 needed = needed.keys()
524 524 needed.sort()
525 525 spans = []
526 526 oo = -1
527 527 ol = 0
528 528 for n in needed:
529 529 if n < 0: continue
530 530 o = self.start(n)
531 531 l = self.length(n)
532 532 if oo + ol == o: # can we merge with the previous?
533 533 nl = spans[-1][2]
534 534 nl.append((n, l))
535 535 ol += l
536 536 spans[-1] = (oo, ol, nl)
537 537 else:
538 538 oo = o
539 539 ol = l
540 540 spans.append((oo, ol, [(n, l)]))
541 541
542 542 # read spans in, divide up chunks
543 543 chunks = {}
544 544 for span in spans:
545 545 # we reopen the file for each span to make http happy for now
546 546 f = self.opener(self.datafile)
547 547 f.seek(span[0])
548 548 data = f.read(span[1])
549 549
550 550 # divide up the span
551 551 pos = 0
552 552 for r, l in span[2]:
553 553 chunks[r] = decompress(data[pos: pos + l])
554 554 pos += l
555 555
556 556 # helper to reconstruct intermediate versions
557 557 def construct(text, base, rev):
558 558 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
559 559 return mdiff.patches(text, bins)
560 560
561 561 # build deltas
562 562 deltas = []
563 563 for d in xrange(0, len(revs) - 1):
564 564 a, b = revs[d], revs[d + 1]
565 565 n = self.node(b)
566 566
567 567 # do we need to construct a new delta?
568 568 if a + 1 != b or self.base(b) == b:
569 569 if a >= 0:
570 570 base = self.base(a)
571 571 ta = chunks[self.base(a)]
572 572 ta = construct(ta, base, a)
573 573 else:
574 574 ta = ""
575 575
576 576 base = self.base(b)
577 577 if a > base:
578 578 base = a
579 579 tb = ta
580 580 else:
581 581 tb = chunks[self.base(b)]
582 582 tb = construct(tb, base, b)
583 583 d = self.diff(ta, tb)
584 584 else:
585 585 d = chunks[b]
586 586
587 587 p = self.parents(n)
588 588 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
589 589 l = struct.pack(">l", len(meta) + len(d) + 4)
590 590 yield l
591 591 yield meta
592 592 yield d
593 593
594 594 yield struct.pack(">l", 0)
595 595
596 596 def addgroup(self, revs, linkmapper, transaction, unique=0):
597 597 """
598 598 add a delta group
599 599
600 600 given a set of deltas, add them to the revision log. the
601 601 first delta is against its parent, which should be in our
602 602 log, the rest are against the previous delta.
603 603 """
604 604
605 605 #track the base of the current delta log
606 606 r = self.count()
607 607 t = r - 1
608 608 node = nullid
609 609
610 610 base = prev = -1
611 611 start = end = measure = 0
612 612 if r:
613 613 start = self.start(self.base(t))
614 614 end = self.end(t)
615 615 measure = self.length(self.base(t))
616 616 base = self.base(t)
617 617 prev = self.tip()
618 618
619 619 transaction.add(self.datafile, end)
620 620 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
621 621 dfh = self.opener(self.datafile, "a")
622 622 ifh = self.opener(self.indexfile, "a")
623 623
624 624 # loop through our set of deltas
625 625 chain = None
626 626 for chunk in revs:
627 627 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
628 628 link = linkmapper(cs)
629 629 if node in self.nodemap:
630 630 # this can happen if two branches make the same change
631 631 # if unique:
632 # raise RevlogError("already have %s" % hex(node[:4]))
632 # raise RevlogError(_("already have %s") % hex(node[:4]))
633 633 chain = node
634 634 continue
635 635 delta = chunk[80:]
636 636
637 637 if not chain:
638 638 # retrieve the parent revision of the delta chain
639 639 chain = p1
640 640 if not chain in self.nodemap:
641 raise RevlogError("unknown base %s" % short(chain[:4]))
641 raise RevlogError(_("unknown base %s") % short(chain[:4]))
642 642
643 643 # full versions are inserted when the needed deltas become
644 644 # comparable to the uncompressed text or when the previous
645 645 # version is not the one we have a delta against. We use
646 646 # the size of the previous full rev as a proxy for the
647 647 # current size.
648 648
649 649 if chain == prev:
650 650 cdelta = compress(delta)
651 651
652 652 if chain != prev or (end - start + len(cdelta)) > measure * 2:
653 653 # flush our writes here so we can read it in revision
654 654 dfh.flush()
655 655 ifh.flush()
656 656 text = self.revision(chain)
657 657 text = self.patches(text, [delta])
658 658 chk = self.addrevision(text, transaction, link, p1, p2)
659 659 if chk != node:
660 raise RevlogError("consistency error adding group")
660 raise RevlogError(_("consistency error adding group"))
661 661 measure = len(text)
662 662 else:
663 663 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
664 664 self.index.append(e)
665 665 self.nodemap[node] = r
666 666 dfh.write(cdelta)
667 667 ifh.write(struct.pack(indexformat, *e))
668 668
669 669 t, r, chain, prev = r, r + 1, node, node
670 670 start = self.start(self.base(t))
671 671 end = self.end(t)
672 672
673 673 dfh.close()
674 674 ifh.close()
675 675 return node
@@ -1,133 +1,133 b''
1 1 # sshrepo.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from remoterepo import *
10 10 from i18n import gettext as _
11 11 from demandload import *
12 12 demandload(globals(), "hg os re stat")
13 13
14 14 class sshrepository(remoterepository):
15 15 def __init__(self, ui, path):
16 16 self.url = path
17 17 self.ui = ui
18 18
19 19 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
20 20 if not m:
21 raise hg.RepoError("couldn't parse destination %s" % path)
21 raise hg.RepoError(_("couldn't parse destination %s") % path)
22 22
23 23 self.user = m.group(2)
24 24 self.host = m.group(3)
25 25 self.port = m.group(5)
26 26 self.path = m.group(7) or "."
27 27
28 28 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
29 29 args = self.port and ("%s -p %s") % (args, self.port) or args
30 30
31 31 sshcmd = self.ui.config("ui", "ssh", "ssh")
32 32 remotecmd = self.ui.config("ui", "remotecmd", "hg")
33 33 cmd = '%s %s "%s -R %s serve --stdio"'
34 34 cmd = cmd % (sshcmd, args, remotecmd, self.path)
35 35
36 36 ui.note('running %s\n' % cmd)
37 37 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
38 38
39 39 def readerr(self):
40 40 while 1:
41 41 size = os.fstat(self.pipee.fileno())[stat.ST_SIZE]
42 42 if size == 0: break
43 43 l = self.pipee.readline()
44 44 if not l: break
45 self.ui.status("remote: ", l)
45 self.ui.status(_("remote: "), l)
46 46
47 47 def __del__(self):
48 48 try:
49 49 self.pipeo.close()
50 50 self.pipei.close()
51 51 # read the error descriptor until EOF
52 52 for l in self.pipee:
53 self.ui.status("remote: ", l)
53 self.ui.status(_("remote: "), l)
54 54 self.pipee.close()
55 55 except:
56 56 pass
57 57
58 58 def dev(self):
59 59 return -1
60 60
61 61 def do_cmd(self, cmd, **args):
62 self.ui.debug("sending %s command\n" % cmd)
62 self.ui.debug(_("sending %s command\n") % cmd)
63 63 self.pipeo.write("%s\n" % cmd)
64 64 for k, v in args.items():
65 65 self.pipeo.write("%s %d\n" % (k, len(v)))
66 66 self.pipeo.write(v)
67 67 self.pipeo.flush()
68 68
69 69 return self.pipei
70 70
71 71 def call(self, cmd, **args):
72 72 r = self.do_cmd(cmd, **args)
73 73 l = r.readline()
74 74 self.readerr()
75 75 try:
76 76 l = int(l)
77 77 except:
78 raise hg.RepoError("unexpected response '%s'" % l)
78 raise hg.RepoError(_("unexpected response '%s'") % l)
79 79 return r.read(l)
80 80
81 81 def lock(self):
82 82 self.call("lock")
83 83 return remotelock(self)
84 84
85 85 def unlock(self):
86 86 self.call("unlock")
87 87
88 88 def heads(self):
89 89 d = self.call("heads")
90 90 try:
91 91 return map(bin, d[:-1].split(" "))
92 92 except:
93 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
93 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
94 94
95 95 def branches(self, nodes):
96 96 n = " ".join(map(hex, nodes))
97 97 d = self.call("branches", nodes=n)
98 98 try:
99 99 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
100 100 return br
101 101 except:
102 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
102 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
103 103
104 104 def between(self, pairs):
105 105 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
106 106 d = self.call("between", pairs=n)
107 107 try:
108 108 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
109 109 return p
110 110 except:
111 raise hg.RepoError("unexpected response '%s'" % (d[:400] + "..."))
111 raise hg.RepoError(_("unexpected response '%s'") % (d[:400] + "..."))
112 112
113 113 def changegroup(self, nodes):
114 114 n = " ".join(map(hex, nodes))
115 115 f = self.do_cmd("changegroup", roots=n)
116 116 return self.pipei
117 117
118 118 def addchangegroup(self, cg):
119 119 d = self.call("addchangegroup")
120 120 if d:
121 raise hg.RepoError("push refused: %s", d)
121 raise hg.RepoError(_("push refused: %s"), d)
122 122
123 123 while 1:
124 124 d = cg.read(4096)
125 125 if not d: break
126 126 self.pipeo.write(d)
127 127 self.readerr()
128 128
129 129 self.pipeo.flush()
130 130
131 131 self.readerr()
132 132 l = int(self.pipei.readline())
133 133 return self.pipei.read(l) != ""
@@ -1,79 +1,79 b''
1 1 # transaction.py - simple journalling scheme for mercurial
2 2 #
3 3 # This transaction scheme is intended to gracefully handle program
4 4 # errors and interruptions. More serious failures like system crashes
5 5 # can be recovered with an fsck-like tool. As the whole repository is
6 6 # effectively log-structured, this should amount to simply truncating
7 7 # anything that isn't referenced in the changelog.
8 8 #
9 9 # Copyright 2005 Matt Mackall <mpm@selenic.com>
10 10 #
11 11 # This software may be used and distributed according to the terms
12 12 # of the GNU General Public License, incorporated herein by reference.
13 13
14 14 import os
15 15 import util
16 16 from i18n import gettext as _
17 17
18 18 class transaction:
19 19 def __init__(self, report, opener, journal, after=None):
20 20 self.journal = None
21 21
22 22 # abort here if the journal already exists
23 23 if os.path.exists(journal):
24 raise AssertionError("journal already exists - run hg recover")
24 raise AssertionError(_("journal already exists - run hg recover"))
25 25
26 26 self.report = report
27 27 self.opener = opener
28 28 self.after = after
29 29 self.entries = []
30 30 self.map = {}
31 31 self.journal = journal
32 32
33 33 self.file = open(self.journal, "w")
34 34
35 35 def __del__(self):
36 36 if self.journal:
37 37 if self.entries: self.abort()
38 38 self.file.close()
39 39 try: os.unlink(self.journal)
40 40 except: pass
41 41
42 42 def add(self, file, offset):
43 43 if file in self.map: return
44 44 self.entries.append((file, offset))
45 45 self.map[file] = 1
46 46 # add enough data to the journal to do the truncate
47 47 self.file.write("%s\0%d\n" % (file, offset))
48 48 self.file.flush()
49 49
50 50 def close(self):
51 51 self.file.close()
52 52 self.entries = []
53 53 if self.after:
54 54 self.after()
55 55 else:
56 56 os.unlink(self.journal)
57 57 self.journal = None
58 58
59 59 def abort(self):
60 60 if not self.entries: return
61 61
62 self.report("transaction abort!\n")
62 self.report(_("transaction abort!\n"))
63 63
64 64 for f, o in self.entries:
65 65 try:
66 66 self.opener(f, "a").truncate(o)
67 67 except:
68 self.report("failed to truncate %s\n" % f)
68 self.report(_("failed to truncate %s\n") % f)
69 69
70 70 self.entries = []
71 71
72 self.report("rollback completed\n")
72 self.report(_("rollback completed\n"))
73 73
74 74 def rollback(opener, file):
75 75 for l in open(file).readlines():
76 76 f, o = l.split('\0')
77 77 opener(f, "a").truncate(int(o))
78 78 os.unlink(file)
79 79
@@ -1,146 +1,146 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import os, ConfigParser
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "re socket sys util")
12 12
13 13 class ui:
14 14 def __init__(self, verbose=False, debug=False, quiet=False,
15 15 interactive=True):
16 16 self.overlay = {}
17 17 self.cdata = ConfigParser.SafeConfigParser()
18 18 self.cdata.read(util.rcpath)
19 19
20 20 self.quiet = self.configbool("ui", "quiet")
21 21 self.verbose = self.configbool("ui", "verbose")
22 22 self.debugflag = self.configbool("ui", "debug")
23 23 self.interactive = self.configbool("ui", "interactive", True)
24 24
25 25 self.updateopts(verbose, debug, quiet, interactive)
26 26
27 27 def updateopts(self, verbose=False, debug=False, quiet=False,
28 28 interactive=True):
29 29 self.quiet = (self.quiet or quiet) and not verbose and not debug
30 30 self.verbose = (self.verbose or verbose) or debug
31 31 self.debugflag = (self.debugflag or debug)
32 32 self.interactive = (self.interactive and interactive)
33 33
34 34 def readconfig(self, fp):
35 35 self.cdata.readfp(fp)
36 36
37 37 def setconfig(self, section, name, val):
38 38 self.overlay[(section, name)] = val
39 39
40 40 def config(self, section, name, default=None):
41 41 if self.overlay.has_key((section, name)):
42 42 return self.overlay[(section, name)]
43 43 if self.cdata.has_option(section, name):
44 44 return self.cdata.get(section, name)
45 45 return default
46 46
47 47 def configbool(self, section, name, default=False):
48 48 if self.overlay.has_key((section, name)):
49 49 return self.overlay[(section, name)]
50 50 if self.cdata.has_option(section, name):
51 51 return self.cdata.getboolean(section, name)
52 52 return default
53 53
54 54 def configitems(self, section):
55 55 if self.cdata.has_section(section):
56 56 return self.cdata.items(section)
57 57 return []
58 58
59 59 def walkconfig(self):
60 60 seen = {}
61 61 for (section, name), value in self.overlay.iteritems():
62 62 yield section, name, value
63 63 seen[section, name] = 1
64 64 for section in self.cdata.sections():
65 65 for name, value in self.cdata.items(section):
66 66 if (section, name) in seen: continue
67 67 yield section, name, value.replace('\n', '\\n')
68 68 seen[section, name] = 1
69 69
70 70 def extensions(self):
71 71 return self.configitems("extensions")
72 72
73 73 def username(self):
74 74 return (os.environ.get("HGUSER") or
75 75 self.config("ui", "username") or
76 76 os.environ.get("EMAIL") or
77 77 (os.environ.get("LOGNAME",
78 78 os.environ.get("USERNAME", "unknown"))
79 79 + '@' + socket.getfqdn()))
80 80
81 81 def shortuser(self, user):
82 82 """Return a short representation of a user name or email address."""
83 83 if not self.verbose:
84 84 f = user.find('@')
85 85 if f >= 0:
86 86 user = user[:f]
87 87 f = user.find('<')
88 88 if f >= 0:
89 89 user = user[f+1:]
90 90 return user
91 91
92 92 def expandpath(self, loc):
93 93 paths = {}
94 94 for name, path in self.configitems("paths"):
95 95 paths[name] = path
96 96
97 97 return paths.get(loc, loc)
98 98
99 99 def write(self, *args):
100 100 for a in args:
101 101 sys.stdout.write(str(a))
102 102
103 103 def write_err(self, *args):
104 104 sys.stdout.flush()
105 105 for a in args:
106 106 sys.stderr.write(str(a))
107 107
108 108 def readline(self):
109 109 return sys.stdin.readline()[:-1]
110 110 def prompt(self, msg, pat, default="y"):
111 111 if not self.interactive: return default
112 112 while 1:
113 113 self.write(msg, " ")
114 114 r = self.readline()
115 115 if re.match(pat, r):
116 116 return r
117 117 else:
118 self.write("unrecognized response\n")
118 self.write(_("unrecognized response\n"))
119 119 def status(self, *msg):
120 120 if not self.quiet: self.write(*msg)
121 121 def warn(self, *msg):
122 122 self.write_err(*msg)
123 123 def note(self, *msg):
124 124 if self.verbose: self.write(*msg)
125 125 def debug(self, *msg):
126 126 if self.debugflag: self.write(*msg)
127 127 def edit(self, text):
128 128 import tempfile
129 129 (fd, name) = tempfile.mkstemp("hg")
130 130 f = os.fdopen(fd, "w")
131 131 f.write(text)
132 132 f.close()
133 133
134 134 editor = (os.environ.get("HGEDITOR") or
135 135 self.config("ui", "editor") or
136 136 os.environ.get("EDITOR", "vi"))
137 137
138 138 os.environ["HGUSER"] = self.username()
139 util.system("%s %s" % (editor, name), errprefix="edit failed")
139 util.system("%s %s" % (editor, name), errprefix=_("edit failed"))
140 140
141 141 t = open(name).read()
142 142 t = re.sub("(?m)^HG:.*\n", "", t)
143 143
144 144 os.unlink(name)
145 145
146 146 return t
@@ -1,576 +1,576 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8
9 9 This contains helper routines that are independent of the SCM core and hide
10 10 platform-specific details from the core.
11 11 """
12 12
13 13 import os, errno
14 14 from i18n import gettext as _
15 15 from demandload import *
16 16 demandload(globals(), "re cStringIO shutil popen2 tempfile threading time")
17 17
18 18 def pipefilter(s, cmd):
19 19 '''filter string S through command CMD, returning its output'''
20 20 (pout, pin) = popen2.popen2(cmd, -1, 'b')
21 21 def writer():
22 22 pin.write(s)
23 23 pin.close()
24 24
25 25 # we should use select instead on UNIX, but this will work on most
26 26 # systems, including Windows
27 27 w = threading.Thread(target=writer)
28 28 w.start()
29 29 f = pout.read()
30 30 pout.close()
31 31 w.join()
32 32 return f
33 33
34 34 def tempfilter(s, cmd):
35 35 '''filter string S through a pair of temporary files with CMD.
36 36 CMD is used as a template to create the real command to be run,
37 37 with the strings INFILE and OUTFILE replaced by the real names of
38 38 the temporary files generated.'''
39 39 inname, outname = None, None
40 40 try:
41 41 infd, inname = tempfile.mkstemp(prefix='hgfin')
42 42 fp = os.fdopen(infd, 'wb')
43 43 fp.write(s)
44 44 fp.close()
45 45 outfd, outname = tempfile.mkstemp(prefix='hgfout')
46 46 os.close(outfd)
47 47 cmd = cmd.replace('INFILE', inname)
48 48 cmd = cmd.replace('OUTFILE', outname)
49 49 code = os.system(cmd)
50 if code: raise Abort("command '%s' failed: %s" %
50 if code: raise Abort(_("command '%s' failed: %s") %
51 51 (cmd, explain_exit(code)))
52 52 return open(outname, 'rb').read()
53 53 finally:
54 54 try:
55 55 if inname: os.unlink(inname)
56 56 except: pass
57 57 try:
58 58 if outname: os.unlink(outname)
59 59 except: pass
60 60
61 61 filtertable = {
62 62 'tempfile:': tempfilter,
63 63 'pipe:': pipefilter,
64 64 }
65 65
66 66 def filter(s, cmd):
67 67 "filter a string through a command that transforms its input to its output"
68 68 for name, fn in filtertable.iteritems():
69 69 if cmd.startswith(name):
70 70 return fn(s, cmd[len(name):].lstrip())
71 71 return pipefilter(s, cmd)
72 72
73 73 def patch(strip, patchname, ui):
74 74 """apply the patch <patchname> to the working directory.
75 75 a list of patched files is returned"""
76 76 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
77 77 files = {}
78 78 for line in fp:
79 79 line = line.rstrip()
80 80 ui.status("%s\n" % line)
81 81 if line.startswith('patching file '):
82 82 pf = parse_patch_output(line)
83 83 files.setdefault(pf, 1)
84 84 code = fp.close()
85 85 if code:
86 raise Abort("patch command failed: %s" % explain_exit(code)[0])
86 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
87 87 return files.keys()
88 88
89 89 def binary(s):
90 90 """return true if a string is binary data using diff's heuristic"""
91 91 if s and '\0' in s[:4096]:
92 92 return True
93 93 return False
94 94
95 95 def unique(g):
96 96 """return the uniq elements of iterable g"""
97 97 seen = {}
98 98 for f in g:
99 99 if f not in seen:
100 100 seen[f] = 1
101 101 yield f
102 102
103 103 class Abort(Exception):
104 104 """Raised if a command needs to print an error and exit."""
105 105
106 106 def always(fn): return True
107 107 def never(fn): return False
108 108
109 109 def globre(pat, head='^', tail='$'):
110 110 "convert a glob pattern into a regexp"
111 111 i, n = 0, len(pat)
112 112 res = ''
113 113 group = False
114 114 def peek(): return i < n and pat[i]
115 115 while i < n:
116 116 c = pat[i]
117 117 i = i+1
118 118 if c == '*':
119 119 if peek() == '*':
120 120 i += 1
121 121 res += '.*'
122 122 else:
123 123 res += '[^/]*'
124 124 elif c == '?':
125 125 res += '.'
126 126 elif c == '[':
127 127 j = i
128 128 if j < n and pat[j] in '!]':
129 129 j += 1
130 130 while j < n and pat[j] != ']':
131 131 j += 1
132 132 if j >= n:
133 133 res += '\\['
134 134 else:
135 135 stuff = pat[i:j].replace('\\','\\\\')
136 136 i = j + 1
137 137 if stuff[0] == '!':
138 138 stuff = '^' + stuff[1:]
139 139 elif stuff[0] == '^':
140 140 stuff = '\\' + stuff
141 141 res = '%s[%s]' % (res, stuff)
142 142 elif c == '{':
143 143 group = True
144 144 res += '(?:'
145 145 elif c == '}' and group:
146 146 res += ')'
147 147 group = False
148 148 elif c == ',' and group:
149 149 res += '|'
150 150 else:
151 151 res += re.escape(c)
152 152 return head + res + tail
153 153
154 154 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
155 155
156 156 def pathto(n1, n2):
157 157 '''return the relative path from one place to another.
158 158 this returns a path in the form used by the local filesystem, not hg.'''
159 159 if not n1: return localpath(n2)
160 160 a, b = n1.split('/'), n2.split('/')
161 161 a.reverse(), b.reverse()
162 162 while a and b and a[-1] == b[-1]:
163 163 a.pop(), b.pop()
164 164 b.reverse()
165 165 return os.sep.join((['..'] * len(a)) + b)
166 166
167 167 def canonpath(root, cwd, myname):
168 168 """return the canonical path of myname, given cwd and root"""
169 169 rootsep = root + os.sep
170 170 name = myname
171 171 if not name.startswith(os.sep):
172 172 name = os.path.join(root, cwd, name)
173 173 name = os.path.normpath(name)
174 174 if name.startswith(rootsep):
175 175 return pconvert(name[len(rootsep):])
176 176 elif name == root:
177 177 return ''
178 178 else:
179 179 raise Abort('%s not under root' % myname)
180 180
181 181 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head=''):
182 182 """build a function to match a set of file patterns
183 183
184 184 arguments:
185 185 canonroot - the canonical root of the tree you're matching against
186 186 cwd - the current working directory, if relevant
187 187 names - patterns to find
188 188 inc - patterns to include
189 189 exc - patterns to exclude
190 190 head - a regex to prepend to patterns to control whether a match is rooted
191 191
192 192 a pattern is one of:
193 193 'glob:<rooted glob>'
194 194 're:<rooted regexp>'
195 195 'path:<rooted path>'
196 196 'relglob:<relative glob>'
197 197 'relpath:<relative path>'
198 198 'relre:<relative regexp>'
199 199 '<rooted path or regexp>'
200 200
201 201 returns:
202 202 a 3-tuple containing
203 203 - list of explicit non-pattern names passed in
204 204 - a bool match(filename) function
205 205 - a bool indicating if any patterns were passed in
206 206
207 207 todo:
208 208 make head regex a rooted bool
209 209 """
210 210
211 211 def patkind(name):
212 212 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
213 213 if name.startswith(prefix + ':'): return name.split(':', 1)
214 214 for c in name:
215 215 if c in _globchars: return 'glob', name
216 216 return 'relpath', name
217 217
218 218 def regex(kind, name, tail):
219 219 '''convert a pattern into a regular expression'''
220 220 if kind == 're':
221 221 return name
222 222 elif kind == 'path':
223 223 return '^' + re.escape(name) + '(?:/|$)'
224 224 elif kind == 'relglob':
225 225 return head + globre(name, '(?:|.*/)', tail)
226 226 elif kind == 'relpath':
227 227 return head + re.escape(name) + tail
228 228 elif kind == 'relre':
229 229 if name.startswith('^'):
230 230 return name
231 231 return '.*' + name
232 232 return head + globre(name, '', tail)
233 233
234 234 def matchfn(pats, tail):
235 235 """build a matching function from a set of patterns"""
236 236 if pats:
237 237 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
238 238 return re.compile(pat).match
239 239
240 240 def globprefix(pat):
241 241 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
242 242 root = []
243 243 for p in pat.split(os.sep):
244 244 if patkind(p)[0] == 'glob': break
245 245 root.append(p)
246 246 return '/'.join(root)
247 247
248 248 pats = []
249 249 files = []
250 250 roots = []
251 251 for kind, name in map(patkind, names):
252 252 if kind in ('glob', 'relpath'):
253 253 name = canonpath(canonroot, cwd, name)
254 254 if name == '':
255 255 kind, name = 'glob', '**'
256 256 if kind in ('glob', 'path', 're'):
257 257 pats.append((kind, name))
258 258 if kind == 'glob':
259 259 root = globprefix(name)
260 260 if root: roots.append(root)
261 261 elif kind == 'relpath':
262 262 files.append((kind, name))
263 263 roots.append(name)
264 264
265 265 patmatch = matchfn(pats, '$') or always
266 266 filematch = matchfn(files, '(?:/|$)') or always
267 267 incmatch = always
268 268 if inc:
269 269 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
270 270 excmatch = lambda fn: False
271 271 if exc:
272 272 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
273 273
274 274 return (roots,
275 275 lambda fn: (incmatch(fn) and not excmatch(fn) and
276 276 (fn.endswith('/') or
277 277 (not pats and not files) or
278 278 (pats and patmatch(fn)) or
279 279 (files and filematch(fn)))),
280 280 (inc or exc or (pats and pats != [('glob', '**')])) and True)
281 281
282 282 def system(cmd, errprefix=None):
283 283 """execute a shell command that must succeed"""
284 284 rc = os.system(cmd)
285 285 if rc:
286 286 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
287 287 explain_exit(rc)[0])
288 288 if errprefix:
289 289 errmsg = "%s: %s" % (errprefix, errmsg)
290 290 raise Abort(errmsg)
291 291
292 292 def rename(src, dst):
293 293 """forcibly rename a file"""
294 294 try:
295 295 os.rename(src, dst)
296 296 except:
297 297 os.unlink(dst)
298 298 os.rename(src, dst)
299 299
300 300 def copyfiles(src, dst, hardlink=None):
301 301 """Copy a directory tree using hardlinks if possible"""
302 302
303 303 if hardlink is None:
304 304 hardlink = (os.stat(src).st_dev ==
305 305 os.stat(os.path.dirname(dst)).st_dev)
306 306
307 307 if os.path.isdir(src):
308 308 os.mkdir(dst)
309 309 for name in os.listdir(src):
310 310 srcname = os.path.join(src, name)
311 311 dstname = os.path.join(dst, name)
312 312 copyfiles(srcname, dstname, hardlink)
313 313 else:
314 314 if hardlink:
315 315 try:
316 316 os_link(src, dst)
317 317 except:
318 318 hardlink = False
319 319 shutil.copy2(src, dst)
320 320 else:
321 321 shutil.copy2(src, dst)
322 322
323 323 def opener(base):
324 324 """
325 325 return a function that opens files relative to base
326 326
327 327 this function is used to hide the details of COW semantics and
328 328 remote file access from higher level code.
329 329 """
330 330 p = base
331 331 def o(path, mode="r", text=False):
332 332 f = os.path.join(p, path)
333 333
334 334 if not text:
335 335 mode += "b" # for that other OS
336 336
337 337 if mode[0] != "r":
338 338 try:
339 339 nlink = nlinks(f)
340 340 except OSError:
341 341 d = os.path.dirname(f)
342 342 if not os.path.isdir(d):
343 343 os.makedirs(d)
344 344 else:
345 345 if nlink > 1:
346 346 file(f + ".tmp", "wb").write(file(f, "rb").read())
347 347 rename(f+".tmp", f)
348 348
349 349 return file(f, mode)
350 350
351 351 return o
352 352
353 353 def _makelock_file(info, pathname):
354 354 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
355 355 os.write(ld, info)
356 356 os.close(ld)
357 357
358 358 def _readlock_file(pathname):
359 359 return file(pathname).read()
360 360
361 361 def nlinks(pathname):
362 362 """Return number of hardlinks for the given file."""
363 363 return os.stat(pathname).st_nlink
364 364
365 365 if hasattr(os, 'link'):
366 366 os_link = os.link
367 367 else:
368 368 def os_link(src, dst):
369 raise OSError(0, "Hardlinks not supported")
369 raise OSError(0, _("Hardlinks not supported"))
370 370
371 371 # Platform specific variants
372 372 if os.name == 'nt':
373 373 nulldev = 'NUL:'
374 374
375 375 try:
376 376 import win32api, win32process
377 377 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
378 378 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
379 379
380 380 except ImportError:
381 381 systemrc = r'c:\mercurial\mercurial.ini'
382 382 pass
383 383
384 384 rcpath = (systemrc,
385 385 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
386 386
387 387 def parse_patch_output(output_line):
388 388 """parses the output produced by patch and returns the file name"""
389 389 pf = output_line[14:]
390 390 if pf[0] == '`':
391 391 pf = pf[1:-1] # Remove the quotes
392 392 return pf
393 393
394 394 try: # ActivePython can create hard links using win32file module
395 395 import win32file
396 396
397 397 def os_link(src, dst): # NB will only succeed on NTFS
398 398 win32file.CreateHardLink(dst, src)
399 399
400 400 def nlinks(pathname):
401 401 """Return number of hardlinks for the given file."""
402 402 try:
403 403 fh = win32file.CreateFile(pathname,
404 404 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
405 405 None, win32file.OPEN_EXISTING, 0, None)
406 406 res = win32file.GetFileInformationByHandle(fh)
407 407 fh.Close()
408 408 return res[7]
409 409 except:
410 410 return os.stat(pathname).st_nlink
411 411
412 412 except ImportError:
413 413 pass
414 414
415 415 def is_exec(f, last):
416 416 return last
417 417
418 418 def set_exec(f, mode):
419 419 pass
420 420
421 421 def pconvert(path):
422 422 return path.replace("\\", "/")
423 423
424 424 def localpath(path):
425 425 return path.replace('/', '\\')
426 426
427 427 def normpath(path):
428 428 return pconvert(os.path.normpath(path))
429 429
430 430 makelock = _makelock_file
431 431 readlock = _readlock_file
432 432
433 433 def explain_exit(code):
434 return "exited with status %d" % code, code
434 return _("exited with status %d") % code, code
435 435
436 436 else:
437 437 nulldev = '/dev/null'
438 438
439 439 hgrcd = '/etc/mercurial/hgrc.d'
440 440 hgrcs = []
441 441 if os.path.isdir(hgrcd):
442 442 hgrcs = [f for f in os.listdir(hgrcd) if f.endswith(".rc")]
443 443 rcpath = map(os.path.normpath, hgrcs +
444 444 ['/etc/mercurial/hgrc', os.path.expanduser('~/.hgrc')])
445 445
446 446 def parse_patch_output(output_line):
447 447 """parses the output produced by patch and returns the file name"""
448 448 return output_line[14:]
449 449
450 450 def is_exec(f, last):
451 451 """check whether a file is executable"""
452 452 return (os.stat(f).st_mode & 0100 != 0)
453 453
454 454 def set_exec(f, mode):
455 455 s = os.stat(f).st_mode
456 456 if (s & 0100 != 0) == mode:
457 457 return
458 458 if mode:
459 459 # Turn on +x for every +r bit when making a file executable
460 460 # and obey umask.
461 461 umask = os.umask(0)
462 462 os.umask(umask)
463 463 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
464 464 else:
465 465 os.chmod(f, s & 0666)
466 466
467 467 def pconvert(path):
468 468 return path
469 469
470 470 def localpath(path):
471 471 return path
472 472
473 473 normpath = os.path.normpath
474 474
475 475 def makelock(info, pathname):
476 476 try:
477 477 os.symlink(info, pathname)
478 478 except OSError, why:
479 479 if why.errno == errno.EEXIST:
480 480 raise
481 481 else:
482 482 _makelock_file(info, pathname)
483 483
484 484 def readlock(pathname):
485 485 try:
486 486 return os.readlink(pathname)
487 487 except OSError, why:
488 488 if why.errno == errno.EINVAL:
489 489 return _readlock_file(pathname)
490 490 else:
491 491 raise
492 492
493 493 def explain_exit(code):
494 494 """return a 2-tuple (desc, code) describing a process's status"""
495 495 if os.WIFEXITED(code):
496 496 val = os.WEXITSTATUS(code)
497 return "exited with status %d" % val, val
497 return _("exited with status %d") % val, val
498 498 elif os.WIFSIGNALED(code):
499 499 val = os.WTERMSIG(code)
500 return "killed by signal %d" % val, val
500 return _("killed by signal %d") % val, val
501 501 elif os.WIFSTOPPED(code):
502 502 val = os.WSTOPSIG(code)
503 return "stopped by signal %d" % val, val
504 raise ValueError("invalid exit code")
503 return _("stopped by signal %d") % val, val
504 raise ValueError(_("invalid exit code"))
505 505
506 506 class chunkbuffer(object):
507 507 """Allow arbitrary sized chunks of data to be efficiently read from an
508 508 iterator over chunks of arbitrary size."""
509 509
510 510 def __init__(self, in_iter, targetsize = 2**16):
511 511 """in_iter is the iterator that's iterating over the input chunks.
512 512 targetsize is how big a buffer to try to maintain."""
513 513 self.in_iter = iter(in_iter)
514 514 self.buf = ''
515 515 self.targetsize = int(targetsize)
516 516 if self.targetsize <= 0:
517 raise ValueError("targetsize must be greater than 0, was %d" %
517 raise ValueError(_("targetsize must be greater than 0, was %d") %
518 518 targetsize)
519 519 self.iterempty = False
520 520
521 521 def fillbuf(self):
522 522 """Ignore target size; read every chunk from iterator until empty."""
523 523 if not self.iterempty:
524 524 collector = cStringIO.StringIO()
525 525 collector.write(self.buf)
526 526 for ch in self.in_iter:
527 527 collector.write(ch)
528 528 self.buf = collector.getvalue()
529 529 self.iterempty = True
530 530
531 531 def read(self, l):
532 532 """Read L bytes of data from the iterator of chunks of data.
533 533 Returns less than L bytes if the iterator runs dry."""
534 534 if l > len(self.buf) and not self.iterempty:
535 535 # Clamp to a multiple of self.targetsize
536 536 targetsize = self.targetsize * ((l // self.targetsize) + 1)
537 537 collector = cStringIO.StringIO()
538 538 collector.write(self.buf)
539 539 collected = len(self.buf)
540 540 for chunk in self.in_iter:
541 541 collector.write(chunk)
542 542 collected += len(chunk)
543 543 if collected >= targetsize:
544 544 break
545 545 if collected < targetsize:
546 546 self.iterempty = True
547 547 self.buf = collector.getvalue()
548 548 s, self.buf = self.buf[:l], buffer(self.buf, l)
549 549 return s
550 550
551 551 def filechunkiter(f, size = 65536):
552 552 """Create a generator that produces all the data in the file size
553 553 (default 65536) bytes at a time. Chunks may be less than size
554 554 bytes if the chunk is the last chunk in the file, or the file is a
555 555 socket or some other type of file that sometimes reads less data
556 556 than is requested."""
557 557 s = f.read(size)
558 558 while len(s) > 0:
559 559 yield s
560 560 s = f.read(size)
561 561
562 562 def makedate():
563 563 t = time.time()
564 564 if time.daylight: tz = time.altzone
565 565 else: tz = time.timezone
566 566 return t, tz
567 567
568 568 def datestr(date=None, format='%c'):
569 569 """represent a (unixtime, offset) tuple as a localized time.
570 570 unixtime is seconds since the epoch, and offset is the time zone's
571 571 number of seconds away from UTC."""
572 572 t, tz = date or makedate()
573 573 return ("%s %+03d%02d" %
574 574 (time.strftime(format, time.gmtime(float(t) - tz)),
575 575 -tz / 3600,
576 576 ((-tz % 3600) / 60)))
General Comments 0
You need to be logged in to leave comments. Login now