##// END OF EJS Templates
encoding: pull fallbackencoding out of localrepo into early parsing
Matt Mackall -
r4619:5fd7cc89 default
parent child Browse files
Show More
@@ -1,1202 +1,1206
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 import fancyopts, revlog, version, extensions
12 import fancyopts, revlog, version, extensions
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20 class ParseError(Exception):
20 class ParseError(Exception):
21 """Exception raised on errors in parsing the command line."""
21 """Exception raised on errors in parsing the command line."""
22
22
23 def runcatch(ui, args):
23 def runcatch(ui, args):
24 def catchterm(*args):
24 def catchterm(*args):
25 raise util.SignalInterrupt
25 raise util.SignalInterrupt
26
26
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 num = getattr(signal, name, None)
28 num = getattr(signal, name, None)
29 if num: signal.signal(num, catchterm)
29 if num: signal.signal(num, catchterm)
30
30
31 try:
31 try:
32 try:
32 try:
33 # enter the debugger before command execution
33 # enter the debugger before command execution
34 if '--debugger' in args:
34 if '--debugger' in args:
35 pdb.set_trace()
35 pdb.set_trace()
36 try:
36 try:
37 return dispatch(ui, args)
37 return dispatch(ui, args)
38 finally:
38 finally:
39 ui.flush()
39 ui.flush()
40 except:
40 except:
41 # enter the debugger when we hit an exception
41 # enter the debugger when we hit an exception
42 if '--debugger' in args:
42 if '--debugger' in args:
43 pdb.post_mortem(sys.exc_info()[2])
43 pdb.post_mortem(sys.exc_info()[2])
44 ui.print_exc()
44 ui.print_exc()
45 raise
45 raise
46
46
47 except ParseError, inst:
47 except ParseError, inst:
48 if inst.args[0]:
48 if inst.args[0]:
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 commands.help_(ui, inst.args[0])
50 commands.help_(ui, inst.args[0])
51 else:
51 else:
52 ui.warn(_("hg: %s\n") % inst.args[1])
52 ui.warn(_("hg: %s\n") % inst.args[1])
53 commands.help_(ui, 'shortlist')
53 commands.help_(ui, 'shortlist')
54 except AmbiguousCommand, inst:
54 except AmbiguousCommand, inst:
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 (inst.args[0], " ".join(inst.args[1])))
56 (inst.args[0], " ".join(inst.args[1])))
57 except UnknownCommand, inst:
57 except UnknownCommand, inst:
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 commands.help_(ui, 'shortlist')
59 commands.help_(ui, 'shortlist')
60 except hg.RepoError, inst:
60 except hg.RepoError, inst:
61 ui.warn(_("abort: %s!\n") % inst)
61 ui.warn(_("abort: %s!\n") % inst)
62 except lock.LockHeld, inst:
62 except lock.LockHeld, inst:
63 if inst.errno == errno.ETIMEDOUT:
63 if inst.errno == errno.ETIMEDOUT:
64 reason = _('timed out waiting for lock held by %s') % inst.locker
64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 else:
65 else:
66 reason = _('lock held by %s') % inst.locker
66 reason = _('lock held by %s') % inst.locker
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 except lock.LockUnavailable, inst:
68 except lock.LockUnavailable, inst:
69 ui.warn(_("abort: could not lock %s: %s\n") %
69 ui.warn(_("abort: could not lock %s: %s\n") %
70 (inst.desc or inst.filename, inst.strerror))
70 (inst.desc or inst.filename, inst.strerror))
71 except revlog.RevlogError, inst:
71 except revlog.RevlogError, inst:
72 ui.warn(_("abort: %s!\n") % inst)
72 ui.warn(_("abort: %s!\n") % inst)
73 except util.SignalInterrupt:
73 except util.SignalInterrupt:
74 ui.warn(_("killed!\n"))
74 ui.warn(_("killed!\n"))
75 except KeyboardInterrupt:
75 except KeyboardInterrupt:
76 try:
76 try:
77 ui.warn(_("interrupted!\n"))
77 ui.warn(_("interrupted!\n"))
78 except IOError, inst:
78 except IOError, inst:
79 if inst.errno == errno.EPIPE:
79 if inst.errno == errno.EPIPE:
80 if ui.debugflag:
80 if ui.debugflag:
81 ui.warn(_("\nbroken pipe\n"))
81 ui.warn(_("\nbroken pipe\n"))
82 else:
82 else:
83 raise
83 raise
84 except socket.error, inst:
84 except socket.error, inst:
85 ui.warn(_("abort: %s\n") % inst[1])
85 ui.warn(_("abort: %s\n") % inst[1])
86 except IOError, inst:
86 except IOError, inst:
87 if hasattr(inst, "code"):
87 if hasattr(inst, "code"):
88 ui.warn(_("abort: %s\n") % inst)
88 ui.warn(_("abort: %s\n") % inst)
89 elif hasattr(inst, "reason"):
89 elif hasattr(inst, "reason"):
90 try: # usually it is in the form (errno, strerror)
90 try: # usually it is in the form (errno, strerror)
91 reason = inst.reason.args[1]
91 reason = inst.reason.args[1]
92 except: # it might be anything, for example a string
92 except: # it might be anything, for example a string
93 reason = inst.reason
93 reason = inst.reason
94 ui.warn(_("abort: error: %s\n") % reason)
94 ui.warn(_("abort: error: %s\n") % reason)
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 if ui.debugflag:
96 if ui.debugflag:
97 ui.warn(_("broken pipe\n"))
97 ui.warn(_("broken pipe\n"))
98 elif getattr(inst, "strerror", None):
98 elif getattr(inst, "strerror", None):
99 if getattr(inst, "filename", None):
99 if getattr(inst, "filename", None):
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 else:
101 else:
102 ui.warn(_("abort: %s\n") % inst.strerror)
102 ui.warn(_("abort: %s\n") % inst.strerror)
103 else:
103 else:
104 raise
104 raise
105 except OSError, inst:
105 except OSError, inst:
106 if getattr(inst, "filename", None):
106 if getattr(inst, "filename", None):
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 else:
108 else:
109 ui.warn(_("abort: %s\n") % inst.strerror)
109 ui.warn(_("abort: %s\n") % inst.strerror)
110 except util.UnexpectedOutput, inst:
110 except util.UnexpectedOutput, inst:
111 ui.warn(_("abort: %s") % inst[0])
111 ui.warn(_("abort: %s") % inst[0])
112 if not isinstance(inst[1], basestring):
112 if not isinstance(inst[1], basestring):
113 ui.warn(" %r\n" % (inst[1],))
113 ui.warn(" %r\n" % (inst[1],))
114 elif not inst[1]:
114 elif not inst[1]:
115 ui.warn(_(" empty string\n"))
115 ui.warn(_(" empty string\n"))
116 else:
116 else:
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 except util.Abort, inst:
118 except util.Abort, inst:
119 ui.warn(_("abort: %s\n") % inst)
119 ui.warn(_("abort: %s\n") % inst)
120 except TypeError, inst:
120 except TypeError, inst:
121 # was this an argument error?
121 # was this an argument error?
122 tb = traceback.extract_tb(sys.exc_info()[2])
122 tb = traceback.extract_tb(sys.exc_info()[2])
123 if len(tb) > 2: # no
123 if len(tb) > 2: # no
124 raise
124 raise
125 ui.debug(inst, "\n")
125 ui.debug(inst, "\n")
126 ui.warn(_("%s: invalid arguments\n") % cmd)
126 ui.warn(_("%s: invalid arguments\n") % cmd)
127 commands.help_(ui, cmd)
127 commands.help_(ui, cmd)
128 except SystemExit, inst:
128 except SystemExit, inst:
129 # Commands shouldn't sys.exit directly, but give a return code.
129 # Commands shouldn't sys.exit directly, but give a return code.
130 # Just in case catch this and and pass exit code to caller.
130 # Just in case catch this and and pass exit code to caller.
131 return inst.code
131 return inst.code
132 except:
132 except:
133 ui.warn(_("** unknown exception encountered, details follow\n"))
133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 ui.warn(_("** report bug details to "
134 ui.warn(_("** report bug details to "
135 "http://www.selenic.com/mercurial/bts\n"))
135 "http://www.selenic.com/mercurial/bts\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 % version.get_version())
138 % version.get_version())
139 raise
139 raise
140
140
141 return -1
141 return -1
142
142
143 def findpossible(ui, cmd):
143 def findpossible(ui, cmd):
144 """
144 """
145 Return cmd -> (aliases, command table entry)
145 Return cmd -> (aliases, command table entry)
146 for each matching command.
146 for each matching command.
147 Return debug commands (or their aliases) only if no normal command matches.
147 Return debug commands (or their aliases) only if no normal command matches.
148 """
148 """
149 choice = {}
149 choice = {}
150 debugchoice = {}
150 debugchoice = {}
151 for e in commands.table.keys():
151 for e in commands.table.keys():
152 aliases = e.lstrip("^").split("|")
152 aliases = e.lstrip("^").split("|")
153 found = None
153 found = None
154 if cmd in aliases:
154 if cmd in aliases:
155 found = cmd
155 found = cmd
156 elif not ui.config("ui", "strict"):
156 elif not ui.config("ui", "strict"):
157 for a in aliases:
157 for a in aliases:
158 if a.startswith(cmd):
158 if a.startswith(cmd):
159 found = a
159 found = a
160 break
160 break
161 if found is not None:
161 if found is not None:
162 if aliases[0].startswith("debug") or found.startswith("debug"):
162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 debugchoice[found] = (aliases, commands.table[e])
163 debugchoice[found] = (aliases, commands.table[e])
164 else:
164 else:
165 choice[found] = (aliases, commands.table[e])
165 choice[found] = (aliases, commands.table[e])
166
166
167 if not choice and debugchoice:
167 if not choice and debugchoice:
168 choice = debugchoice
168 choice = debugchoice
169
169
170 return choice
170 return choice
171
171
172 def findcmd(ui, cmd):
172 def findcmd(ui, cmd):
173 """Return (aliases, command table entry) for command string."""
173 """Return (aliases, command table entry) for command string."""
174 choice = findpossible(ui, cmd)
174 choice = findpossible(ui, cmd)
175
175
176 if choice.has_key(cmd):
176 if choice.has_key(cmd):
177 return choice[cmd]
177 return choice[cmd]
178
178
179 if len(choice) > 1:
179 if len(choice) > 1:
180 clist = choice.keys()
180 clist = choice.keys()
181 clist.sort()
181 clist.sort()
182 raise AmbiguousCommand(cmd, clist)
182 raise AmbiguousCommand(cmd, clist)
183
183
184 if choice:
184 if choice:
185 return choice.values()[0]
185 return choice.values()[0]
186
186
187 raise UnknownCommand(cmd)
187 raise UnknownCommand(cmd)
188
188
189 def findrepo():
189 def findrepo():
190 p = os.getcwd()
190 p = os.getcwd()
191 while not os.path.isdir(os.path.join(p, ".hg")):
191 while not os.path.isdir(os.path.join(p, ".hg")):
192 oldp, p = p, os.path.dirname(p)
192 oldp, p = p, os.path.dirname(p)
193 if p == oldp:
193 if p == oldp:
194 return None
194 return None
195
195
196 return p
196 return p
197
197
198 def parse(ui, args):
198 def parse(ui, args):
199 options = {}
199 options = {}
200 cmdoptions = {}
200 cmdoptions = {}
201
201
202 try:
202 try:
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 except fancyopts.getopt.GetoptError, inst:
204 except fancyopts.getopt.GetoptError, inst:
205 raise ParseError(None, inst)
205 raise ParseError(None, inst)
206
206
207 if args:
207 if args:
208 cmd, args = args[0], args[1:]
208 cmd, args = args[0], args[1:]
209 aliases, i = findcmd(ui, cmd)
209 aliases, i = findcmd(ui, cmd)
210 cmd = aliases[0]
210 cmd = aliases[0]
211 defaults = ui.config("defaults", cmd)
211 defaults = ui.config("defaults", cmd)
212 if defaults:
212 if defaults:
213 args = shlex.split(defaults) + args
213 args = shlex.split(defaults) + args
214 c = list(i[1])
214 c = list(i[1])
215 else:
215 else:
216 cmd = None
216 cmd = None
217 c = []
217 c = []
218
218
219 # combine global options into local
219 # combine global options into local
220 for o in commands.globalopts:
220 for o in commands.globalopts:
221 c.append((o[0], o[1], options[o[1]], o[3]))
221 c.append((o[0], o[1], options[o[1]], o[3]))
222
222
223 try:
223 try:
224 args = fancyopts.fancyopts(args, c, cmdoptions)
224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 except fancyopts.getopt.GetoptError, inst:
225 except fancyopts.getopt.GetoptError, inst:
226 raise ParseError(cmd, inst)
226 raise ParseError(cmd, inst)
227
227
228 # separate global options back out
228 # separate global options back out
229 for o in commands.globalopts:
229 for o in commands.globalopts:
230 n = o[1]
230 n = o[1]
231 options[n] = cmdoptions[n]
231 options[n] = cmdoptions[n]
232 del cmdoptions[n]
232 del cmdoptions[n]
233
233
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235
235
236 def parseconfig(config):
236 def parseconfig(config):
237 """parse the --config options from the command line"""
237 """parse the --config options from the command line"""
238 parsed = []
238 parsed = []
239 for cfg in config:
239 for cfg in config:
240 try:
240 try:
241 name, value = cfg.split('=', 1)
241 name, value = cfg.split('=', 1)
242 section, name = name.split('.', 1)
242 section, name = name.split('.', 1)
243 if not section or not name:
243 if not section or not name:
244 raise IndexError
244 raise IndexError
245 parsed.append((section, name, value))
245 parsed.append((section, name, value))
246 except (IndexError, ValueError):
246 except (IndexError, ValueError):
247 raise util.Abort(_('malformed --config option: %s') % cfg)
247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 return parsed
248 return parsed
249
249
250 def earlygetopt(aliases, args):
250 def earlygetopt(aliases, args):
251 if "--" in args:
251 if "--" in args:
252 args = args[:args.index("--")]
252 args = args[:args.index("--")]
253 for opt in aliases:
253 for opt in aliases:
254 if opt in args:
254 if opt in args:
255 return args[args.index(opt) + 1]
255 return args[args.index(opt) + 1]
256 return None
256 return None
257
257
258 def dispatch(ui, args):
258 def dispatch(ui, args):
259 # check for cwd first
259 # check for cwd first
260 cwd = earlygetopt(['--cwd'], args)
260 cwd = earlygetopt(['--cwd'], args)
261 if cwd:
261 if cwd:
262 os.chdir(cwd)
262 os.chdir(cwd)
263
263
264 extensions.loadall(ui)
265
266 # read the local repository .hgrc into a local ui object
264 # read the local repository .hgrc into a local ui object
267 # this will trigger its extensions to load
265 # this will trigger its extensions to load
268 path = earlygetopt(["-R", "--repository", "--repo"], args)
266 path = earlygetopt(["-R", "--repository", "--repo"], args)
269 if not path:
267 if not path:
270 path = findrepo() or ""
268 path = findrepo() or ""
269 lui = ui
271 if path:
270 if path:
272 try:
271 try:
273 lui = commands.ui.ui(parentui=ui)
272 lui = commands.ui.ui(parentui=ui)
274 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
273 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
275 extensions.loadall(lui)
276 except IOError:
274 except IOError:
277 extensions.loadall(ui)
275 pass
276
277 extensions.loadall(lui)
278 # check for fallback encoding
279 fallback = lui.config('ui', 'fallbackencoding')
280 if fallback:
281 util._fallbackencoding = fallback
278
282
279 cmd, func, args, options, cmdoptions = parse(ui, args)
283 cmd, func, args, options, cmdoptions = parse(ui, args)
280
284
281 if options["encoding"]:
285 if options["encoding"]:
282 util._encoding = options["encoding"]
286 util._encoding = options["encoding"]
283 if options["encodingmode"]:
287 if options["encodingmode"]:
284 util._encodingmode = options["encodingmode"]
288 util._encodingmode = options["encodingmode"]
285 if options["time"]:
289 if options["time"]:
286 def get_times():
290 def get_times():
287 t = os.times()
291 t = os.times()
288 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
292 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
289 t = (t[0], t[1], t[2], t[3], time.clock())
293 t = (t[0], t[1], t[2], t[3], time.clock())
290 return t
294 return t
291 s = get_times()
295 s = get_times()
292 def print_time():
296 def print_time():
293 t = get_times()
297 t = get_times()
294 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
298 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
295 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
299 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
296 atexit.register(print_time)
300 atexit.register(print_time)
297
301
298 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
302 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
299 not options["noninteractive"], options["traceback"],
303 not options["noninteractive"], options["traceback"],
300 parseconfig(options["config"]))
304 parseconfig(options["config"]))
301
305
302 if options['help']:
306 if options['help']:
303 return commands.help_(ui, cmd, options['version'])
307 return commands.help_(ui, cmd, options['version'])
304 elif options['version']:
308 elif options['version']:
305 return commands.version_(ui)
309 return commands.version_(ui)
306 elif not cmd:
310 elif not cmd:
307 return commands.help_(ui, 'shortlist')
311 return commands.help_(ui, 'shortlist')
308
312
309 if cmd not in commands.norepo.split():
313 if cmd not in commands.norepo.split():
310 repo = None
314 repo = None
311 try:
315 try:
312 repo = hg.repository(ui, path=path)
316 repo = hg.repository(ui, path=path)
313 ui = repo.ui
317 ui = repo.ui
314 if not repo.local():
318 if not repo.local():
315 raise util.Abort(_("repository '%s' is not local") % path)
319 raise util.Abort(_("repository '%s' is not local") % path)
316 except hg.RepoError:
320 except hg.RepoError:
317 if cmd not in commands.optionalrepo.split():
321 if cmd not in commands.optionalrepo.split():
318 raise
322 raise
319 d = lambda: func(ui, repo, *args, **cmdoptions)
323 d = lambda: func(ui, repo, *args, **cmdoptions)
320 else:
324 else:
321 d = lambda: func(ui, *args, **cmdoptions)
325 d = lambda: func(ui, *args, **cmdoptions)
322
326
323 return runcommand(ui, options, d)
327 return runcommand(ui, options, d)
324
328
325 def runcommand(ui, options, cmdfunc):
329 def runcommand(ui, options, cmdfunc):
326 if options['profile']:
330 if options['profile']:
327 import hotshot, hotshot.stats
331 import hotshot, hotshot.stats
328 prof = hotshot.Profile("hg.prof")
332 prof = hotshot.Profile("hg.prof")
329 try:
333 try:
330 try:
334 try:
331 return prof.runcall(cmdfunc)
335 return prof.runcall(cmdfunc)
332 except:
336 except:
333 try:
337 try:
334 ui.warn(_('exception raised - generating '
338 ui.warn(_('exception raised - generating '
335 'profile anyway\n'))
339 'profile anyway\n'))
336 except:
340 except:
337 pass
341 pass
338 raise
342 raise
339 finally:
343 finally:
340 prof.close()
344 prof.close()
341 stats = hotshot.stats.load("hg.prof")
345 stats = hotshot.stats.load("hg.prof")
342 stats.strip_dirs()
346 stats.strip_dirs()
343 stats.sort_stats('time', 'calls')
347 stats.sort_stats('time', 'calls')
344 stats.print_stats(40)
348 stats.print_stats(40)
345 elif options['lsprof']:
349 elif options['lsprof']:
346 try:
350 try:
347 from mercurial import lsprof
351 from mercurial import lsprof
348 except ImportError:
352 except ImportError:
349 raise util.Abort(_(
353 raise util.Abort(_(
350 'lsprof not available - install from '
354 'lsprof not available - install from '
351 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
355 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
352 p = lsprof.Profiler()
356 p = lsprof.Profiler()
353 p.enable(subcalls=True)
357 p.enable(subcalls=True)
354 try:
358 try:
355 return cmdfunc()
359 return cmdfunc()
356 finally:
360 finally:
357 p.disable()
361 p.disable()
358 stats = lsprof.Stats(p.getstats())
362 stats = lsprof.Stats(p.getstats())
359 stats.sort()
363 stats.sort()
360 stats.pprint(top=10, file=sys.stderr, climit=5)
364 stats.pprint(top=10, file=sys.stderr, climit=5)
361 else:
365 else:
362 return cmdfunc()
366 return cmdfunc()
363
367
364 def bail_if_changed(repo):
368 def bail_if_changed(repo):
365 modified, added, removed, deleted = repo.status()[:4]
369 modified, added, removed, deleted = repo.status()[:4]
366 if modified or added or removed or deleted:
370 if modified or added or removed or deleted:
367 raise util.Abort(_("outstanding uncommitted changes"))
371 raise util.Abort(_("outstanding uncommitted changes"))
368
372
369 def logmessage(opts):
373 def logmessage(opts):
370 """ get the log message according to -m and -l option """
374 """ get the log message according to -m and -l option """
371 message = opts['message']
375 message = opts['message']
372 logfile = opts['logfile']
376 logfile = opts['logfile']
373
377
374 if message and logfile:
378 if message and logfile:
375 raise util.Abort(_('options --message and --logfile are mutually '
379 raise util.Abort(_('options --message and --logfile are mutually '
376 'exclusive'))
380 'exclusive'))
377 if not message and logfile:
381 if not message and logfile:
378 try:
382 try:
379 if logfile == '-':
383 if logfile == '-':
380 message = sys.stdin.read()
384 message = sys.stdin.read()
381 else:
385 else:
382 message = open(logfile).read()
386 message = open(logfile).read()
383 except IOError, inst:
387 except IOError, inst:
384 raise util.Abort(_("can't read commit message '%s': %s") %
388 raise util.Abort(_("can't read commit message '%s': %s") %
385 (logfile, inst.strerror))
389 (logfile, inst.strerror))
386 return message
390 return message
387
391
388 def setremoteconfig(ui, opts):
392 def setremoteconfig(ui, opts):
389 "copy remote options to ui tree"
393 "copy remote options to ui tree"
390 if opts.get('ssh'):
394 if opts.get('ssh'):
391 ui.setconfig("ui", "ssh", opts['ssh'])
395 ui.setconfig("ui", "ssh", opts['ssh'])
392 if opts.get('remotecmd'):
396 if opts.get('remotecmd'):
393 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
397 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
394
398
395 def parseurl(url, revs):
399 def parseurl(url, revs):
396 '''parse url#branch, returning url, branch + revs'''
400 '''parse url#branch, returning url, branch + revs'''
397
401
398 if '#' not in url:
402 if '#' not in url:
399 return url, (revs or None)
403 return url, (revs or None)
400
404
401 url, rev = url.split('#', 1)
405 url, rev = url.split('#', 1)
402 return url, revs + [rev]
406 return url, revs + [rev]
403
407
404 def revpair(repo, revs):
408 def revpair(repo, revs):
405 '''return pair of nodes, given list of revisions. second item can
409 '''return pair of nodes, given list of revisions. second item can
406 be None, meaning use working dir.'''
410 be None, meaning use working dir.'''
407
411
408 def revfix(repo, val, defval):
412 def revfix(repo, val, defval):
409 if not val and val != 0 and defval is not None:
413 if not val and val != 0 and defval is not None:
410 val = defval
414 val = defval
411 return repo.lookup(val)
415 return repo.lookup(val)
412
416
413 if not revs:
417 if not revs:
414 return repo.dirstate.parents()[0], None
418 return repo.dirstate.parents()[0], None
415 end = None
419 end = None
416 if len(revs) == 1:
420 if len(revs) == 1:
417 if revrangesep in revs[0]:
421 if revrangesep in revs[0]:
418 start, end = revs[0].split(revrangesep, 1)
422 start, end = revs[0].split(revrangesep, 1)
419 start = revfix(repo, start, 0)
423 start = revfix(repo, start, 0)
420 end = revfix(repo, end, repo.changelog.count() - 1)
424 end = revfix(repo, end, repo.changelog.count() - 1)
421 else:
425 else:
422 start = revfix(repo, revs[0], None)
426 start = revfix(repo, revs[0], None)
423 elif len(revs) == 2:
427 elif len(revs) == 2:
424 if revrangesep in revs[0] or revrangesep in revs[1]:
428 if revrangesep in revs[0] or revrangesep in revs[1]:
425 raise util.Abort(_('too many revisions specified'))
429 raise util.Abort(_('too many revisions specified'))
426 start = revfix(repo, revs[0], None)
430 start = revfix(repo, revs[0], None)
427 end = revfix(repo, revs[1], None)
431 end = revfix(repo, revs[1], None)
428 else:
432 else:
429 raise util.Abort(_('too many revisions specified'))
433 raise util.Abort(_('too many revisions specified'))
430 return start, end
434 return start, end
431
435
432 def revrange(repo, revs):
436 def revrange(repo, revs):
433 """Yield revision as strings from a list of revision specifications."""
437 """Yield revision as strings from a list of revision specifications."""
434
438
435 def revfix(repo, val, defval):
439 def revfix(repo, val, defval):
436 if not val and val != 0 and defval is not None:
440 if not val and val != 0 and defval is not None:
437 return defval
441 return defval
438 return repo.changelog.rev(repo.lookup(val))
442 return repo.changelog.rev(repo.lookup(val))
439
443
440 seen, l = {}, []
444 seen, l = {}, []
441 for spec in revs:
445 for spec in revs:
442 if revrangesep in spec:
446 if revrangesep in spec:
443 start, end = spec.split(revrangesep, 1)
447 start, end = spec.split(revrangesep, 1)
444 start = revfix(repo, start, 0)
448 start = revfix(repo, start, 0)
445 end = revfix(repo, end, repo.changelog.count() - 1)
449 end = revfix(repo, end, repo.changelog.count() - 1)
446 step = start > end and -1 or 1
450 step = start > end and -1 or 1
447 for rev in xrange(start, end+step, step):
451 for rev in xrange(start, end+step, step):
448 if rev in seen:
452 if rev in seen:
449 continue
453 continue
450 seen[rev] = 1
454 seen[rev] = 1
451 l.append(rev)
455 l.append(rev)
452 else:
456 else:
453 rev = revfix(repo, spec, None)
457 rev = revfix(repo, spec, None)
454 if rev in seen:
458 if rev in seen:
455 continue
459 continue
456 seen[rev] = 1
460 seen[rev] = 1
457 l.append(rev)
461 l.append(rev)
458
462
459 return l
463 return l
460
464
461 def make_filename(repo, pat, node,
465 def make_filename(repo, pat, node,
462 total=None, seqno=None, revwidth=None, pathname=None):
466 total=None, seqno=None, revwidth=None, pathname=None):
463 node_expander = {
467 node_expander = {
464 'H': lambda: hex(node),
468 'H': lambda: hex(node),
465 'R': lambda: str(repo.changelog.rev(node)),
469 'R': lambda: str(repo.changelog.rev(node)),
466 'h': lambda: short(node),
470 'h': lambda: short(node),
467 }
471 }
468 expander = {
472 expander = {
469 '%': lambda: '%',
473 '%': lambda: '%',
470 'b': lambda: os.path.basename(repo.root),
474 'b': lambda: os.path.basename(repo.root),
471 }
475 }
472
476
473 try:
477 try:
474 if node:
478 if node:
475 expander.update(node_expander)
479 expander.update(node_expander)
476 if node and revwidth is not None:
480 if node and revwidth is not None:
477 expander['r'] = (lambda:
481 expander['r'] = (lambda:
478 str(repo.changelog.rev(node)).zfill(revwidth))
482 str(repo.changelog.rev(node)).zfill(revwidth))
479 if total is not None:
483 if total is not None:
480 expander['N'] = lambda: str(total)
484 expander['N'] = lambda: str(total)
481 if seqno is not None:
485 if seqno is not None:
482 expander['n'] = lambda: str(seqno)
486 expander['n'] = lambda: str(seqno)
483 if total is not None and seqno is not None:
487 if total is not None and seqno is not None:
484 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
488 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
485 if pathname is not None:
489 if pathname is not None:
486 expander['s'] = lambda: os.path.basename(pathname)
490 expander['s'] = lambda: os.path.basename(pathname)
487 expander['d'] = lambda: os.path.dirname(pathname) or '.'
491 expander['d'] = lambda: os.path.dirname(pathname) or '.'
488 expander['p'] = lambda: pathname
492 expander['p'] = lambda: pathname
489
493
490 newname = []
494 newname = []
491 patlen = len(pat)
495 patlen = len(pat)
492 i = 0
496 i = 0
493 while i < patlen:
497 while i < patlen:
494 c = pat[i]
498 c = pat[i]
495 if c == '%':
499 if c == '%':
496 i += 1
500 i += 1
497 c = pat[i]
501 c = pat[i]
498 c = expander[c]()
502 c = expander[c]()
499 newname.append(c)
503 newname.append(c)
500 i += 1
504 i += 1
501 return ''.join(newname)
505 return ''.join(newname)
502 except KeyError, inst:
506 except KeyError, inst:
503 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
507 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
504 inst.args[0])
508 inst.args[0])
505
509
506 def make_file(repo, pat, node=None,
510 def make_file(repo, pat, node=None,
507 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
511 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
508 if not pat or pat == '-':
512 if not pat or pat == '-':
509 return 'w' in mode and sys.stdout or sys.stdin
513 return 'w' in mode and sys.stdout or sys.stdin
510 if hasattr(pat, 'write') and 'w' in mode:
514 if hasattr(pat, 'write') and 'w' in mode:
511 return pat
515 return pat
512 if hasattr(pat, 'read') and 'r' in mode:
516 if hasattr(pat, 'read') and 'r' in mode:
513 return pat
517 return pat
514 return open(make_filename(repo, pat, node, total, seqno, revwidth,
518 return open(make_filename(repo, pat, node, total, seqno, revwidth,
515 pathname),
519 pathname),
516 mode)
520 mode)
517
521
518 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
522 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
519 cwd = repo.getcwd()
523 cwd = repo.getcwd()
520 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
524 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
521 opts.get('exclude'), globbed=globbed,
525 opts.get('exclude'), globbed=globbed,
522 default=default)
526 default=default)
523
527
524 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
528 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
525 default=None):
529 default=None):
526 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
530 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
527 default=default)
531 default=default)
528 exact = dict.fromkeys(files)
532 exact = dict.fromkeys(files)
529 cwd = repo.getcwd()
533 cwd = repo.getcwd()
530 for src, fn in repo.walk(node=node, files=files, match=matchfn,
534 for src, fn in repo.walk(node=node, files=files, match=matchfn,
531 badmatch=badmatch):
535 badmatch=badmatch):
532 yield src, fn, repo.pathto(fn, cwd), fn in exact
536 yield src, fn, repo.pathto(fn, cwd), fn in exact
533
537
534 def findrenames(repo, added=None, removed=None, threshold=0.5):
538 def findrenames(repo, added=None, removed=None, threshold=0.5):
535 '''find renamed files -- yields (before, after, score) tuples'''
539 '''find renamed files -- yields (before, after, score) tuples'''
536 if added is None or removed is None:
540 if added is None or removed is None:
537 added, removed = repo.status()[1:3]
541 added, removed = repo.status()[1:3]
538 ctx = repo.changectx()
542 ctx = repo.changectx()
539 for a in added:
543 for a in added:
540 aa = repo.wread(a)
544 aa = repo.wread(a)
541 bestname, bestscore = None, threshold
545 bestname, bestscore = None, threshold
542 for r in removed:
546 for r in removed:
543 rr = ctx.filectx(r).data()
547 rr = ctx.filectx(r).data()
544
548
545 # bdiff.blocks() returns blocks of matching lines
549 # bdiff.blocks() returns blocks of matching lines
546 # count the number of bytes in each
550 # count the number of bytes in each
547 equal = 0
551 equal = 0
548 alines = mdiff.splitnewlines(aa)
552 alines = mdiff.splitnewlines(aa)
549 matches = bdiff.blocks(aa, rr)
553 matches = bdiff.blocks(aa, rr)
550 for x1,x2,y1,y2 in matches:
554 for x1,x2,y1,y2 in matches:
551 for line in alines[x1:x2]:
555 for line in alines[x1:x2]:
552 equal += len(line)
556 equal += len(line)
553
557
554 lengths = len(aa) + len(rr)
558 lengths = len(aa) + len(rr)
555 if lengths:
559 if lengths:
556 myscore = equal*2.0 / lengths
560 myscore = equal*2.0 / lengths
557 if myscore >= bestscore:
561 if myscore >= bestscore:
558 bestname, bestscore = r, myscore
562 bestname, bestscore = r, myscore
559 if bestname:
563 if bestname:
560 yield bestname, a, bestscore
564 yield bestname, a, bestscore
561
565
562 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
566 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
563 similarity=None):
567 similarity=None):
564 if dry_run is None:
568 if dry_run is None:
565 dry_run = opts.get('dry_run')
569 dry_run = opts.get('dry_run')
566 if similarity is None:
570 if similarity is None:
567 similarity = float(opts.get('similarity') or 0)
571 similarity = float(opts.get('similarity') or 0)
568 add, remove = [], []
572 add, remove = [], []
569 mapping = {}
573 mapping = {}
570 for src, abs, rel, exact in walk(repo, pats, opts):
574 for src, abs, rel, exact in walk(repo, pats, opts):
571 target = repo.wjoin(abs)
575 target = repo.wjoin(abs)
572 if src == 'f' and repo.dirstate.state(abs) == '?':
576 if src == 'f' and repo.dirstate.state(abs) == '?':
573 add.append(abs)
577 add.append(abs)
574 mapping[abs] = rel, exact
578 mapping[abs] = rel, exact
575 if repo.ui.verbose or not exact:
579 if repo.ui.verbose or not exact:
576 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
580 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
577 if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
581 if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
578 remove.append(abs)
582 remove.append(abs)
579 mapping[abs] = rel, exact
583 mapping[abs] = rel, exact
580 if repo.ui.verbose or not exact:
584 if repo.ui.verbose or not exact:
581 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
585 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
582 if not dry_run:
586 if not dry_run:
583 repo.add(add, wlock=wlock)
587 repo.add(add, wlock=wlock)
584 repo.remove(remove, wlock=wlock)
588 repo.remove(remove, wlock=wlock)
585 if similarity > 0:
589 if similarity > 0:
586 for old, new, score in findrenames(repo, add, remove, similarity):
590 for old, new, score in findrenames(repo, add, remove, similarity):
587 oldrel, oldexact = mapping[old]
591 oldrel, oldexact = mapping[old]
588 newrel, newexact = mapping[new]
592 newrel, newexact = mapping[new]
589 if repo.ui.verbose or not oldexact or not newexact:
593 if repo.ui.verbose or not oldexact or not newexact:
590 repo.ui.status(_('recording removal of %s as rename to %s '
594 repo.ui.status(_('recording removal of %s as rename to %s '
591 '(%d%% similar)\n') %
595 '(%d%% similar)\n') %
592 (oldrel, newrel, score * 100))
596 (oldrel, newrel, score * 100))
593 if not dry_run:
597 if not dry_run:
594 repo.copy(old, new, wlock=wlock)
598 repo.copy(old, new, wlock=wlock)
595
599
596 def service(opts, parentfn=None, initfn=None, runfn=None):
600 def service(opts, parentfn=None, initfn=None, runfn=None):
597 '''Run a command as a service.'''
601 '''Run a command as a service.'''
598
602
599 if opts['daemon'] and not opts['daemon_pipefds']:
603 if opts['daemon'] and not opts['daemon_pipefds']:
600 rfd, wfd = os.pipe()
604 rfd, wfd = os.pipe()
601 args = sys.argv[:]
605 args = sys.argv[:]
602 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
606 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
603 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
607 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
604 args[0], args)
608 args[0], args)
605 os.close(wfd)
609 os.close(wfd)
606 os.read(rfd, 1)
610 os.read(rfd, 1)
607 if parentfn:
611 if parentfn:
608 return parentfn(pid)
612 return parentfn(pid)
609 else:
613 else:
610 os._exit(0)
614 os._exit(0)
611
615
612 if initfn:
616 if initfn:
613 initfn()
617 initfn()
614
618
615 if opts['pid_file']:
619 if opts['pid_file']:
616 fp = open(opts['pid_file'], 'w')
620 fp = open(opts['pid_file'], 'w')
617 fp.write(str(os.getpid()) + '\n')
621 fp.write(str(os.getpid()) + '\n')
618 fp.close()
622 fp.close()
619
623
620 if opts['daemon_pipefds']:
624 if opts['daemon_pipefds']:
621 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
625 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
622 os.close(rfd)
626 os.close(rfd)
623 try:
627 try:
624 os.setsid()
628 os.setsid()
625 except AttributeError:
629 except AttributeError:
626 pass
630 pass
627 os.write(wfd, 'y')
631 os.write(wfd, 'y')
628 os.close(wfd)
632 os.close(wfd)
629 sys.stdout.flush()
633 sys.stdout.flush()
630 sys.stderr.flush()
634 sys.stderr.flush()
631 fd = os.open(util.nulldev, os.O_RDWR)
635 fd = os.open(util.nulldev, os.O_RDWR)
632 if fd != 0: os.dup2(fd, 0)
636 if fd != 0: os.dup2(fd, 0)
633 if fd != 1: os.dup2(fd, 1)
637 if fd != 1: os.dup2(fd, 1)
634 if fd != 2: os.dup2(fd, 2)
638 if fd != 2: os.dup2(fd, 2)
635 if fd not in (0, 1, 2): os.close(fd)
639 if fd not in (0, 1, 2): os.close(fd)
636
640
637 if runfn:
641 if runfn:
638 return runfn()
642 return runfn()
639
643
640 class changeset_printer(object):
644 class changeset_printer(object):
641 '''show changeset information when templating not requested.'''
645 '''show changeset information when templating not requested.'''
642
646
643 def __init__(self, ui, repo, patch, buffered):
647 def __init__(self, ui, repo, patch, buffered):
644 self.ui = ui
648 self.ui = ui
645 self.repo = repo
649 self.repo = repo
646 self.buffered = buffered
650 self.buffered = buffered
647 self.patch = patch
651 self.patch = patch
648 self.header = {}
652 self.header = {}
649 self.hunk = {}
653 self.hunk = {}
650 self.lastheader = None
654 self.lastheader = None
651
655
652 def flush(self, rev):
656 def flush(self, rev):
653 if rev in self.header:
657 if rev in self.header:
654 h = self.header[rev]
658 h = self.header[rev]
655 if h != self.lastheader:
659 if h != self.lastheader:
656 self.lastheader = h
660 self.lastheader = h
657 self.ui.write(h)
661 self.ui.write(h)
658 del self.header[rev]
662 del self.header[rev]
659 if rev in self.hunk:
663 if rev in self.hunk:
660 self.ui.write(self.hunk[rev])
664 self.ui.write(self.hunk[rev])
661 del self.hunk[rev]
665 del self.hunk[rev]
662 return 1
666 return 1
663 return 0
667 return 0
664
668
665 def show(self, rev=0, changenode=None, copies=(), **props):
669 def show(self, rev=0, changenode=None, copies=(), **props):
666 if self.buffered:
670 if self.buffered:
667 self.ui.pushbuffer()
671 self.ui.pushbuffer()
668 self._show(rev, changenode, copies, props)
672 self._show(rev, changenode, copies, props)
669 self.hunk[rev] = self.ui.popbuffer()
673 self.hunk[rev] = self.ui.popbuffer()
670 else:
674 else:
671 self._show(rev, changenode, copies, props)
675 self._show(rev, changenode, copies, props)
672
676
673 def _show(self, rev, changenode, copies, props):
677 def _show(self, rev, changenode, copies, props):
674 '''show a single changeset or file revision'''
678 '''show a single changeset or file revision'''
675 log = self.repo.changelog
679 log = self.repo.changelog
676 if changenode is None:
680 if changenode is None:
677 changenode = log.node(rev)
681 changenode = log.node(rev)
678 elif not rev:
682 elif not rev:
679 rev = log.rev(changenode)
683 rev = log.rev(changenode)
680
684
681 if self.ui.quiet:
685 if self.ui.quiet:
682 self.ui.write("%d:%s\n" % (rev, short(changenode)))
686 self.ui.write("%d:%s\n" % (rev, short(changenode)))
683 return
687 return
684
688
685 changes = log.read(changenode)
689 changes = log.read(changenode)
686 date = util.datestr(changes[2])
690 date = util.datestr(changes[2])
687 extra = changes[5]
691 extra = changes[5]
688 branch = extra.get("branch")
692 branch = extra.get("branch")
689
693
690 hexfunc = self.ui.debugflag and hex or short
694 hexfunc = self.ui.debugflag and hex or short
691
695
692 parents = log.parentrevs(rev)
696 parents = log.parentrevs(rev)
693 if not self.ui.debugflag:
697 if not self.ui.debugflag:
694 if parents[1] == nullrev:
698 if parents[1] == nullrev:
695 if parents[0] >= rev - 1:
699 if parents[0] >= rev - 1:
696 parents = []
700 parents = []
697 else:
701 else:
698 parents = [parents[0]]
702 parents = [parents[0]]
699 parents = [(p, hexfunc(log.node(p))) for p in parents]
703 parents = [(p, hexfunc(log.node(p))) for p in parents]
700
704
701 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
705 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
702
706
703 # don't show the default branch name
707 # don't show the default branch name
704 if branch != 'default':
708 if branch != 'default':
705 branch = util.tolocal(branch)
709 branch = util.tolocal(branch)
706 self.ui.write(_("branch: %s\n") % branch)
710 self.ui.write(_("branch: %s\n") % branch)
707 for tag in self.repo.nodetags(changenode):
711 for tag in self.repo.nodetags(changenode):
708 self.ui.write(_("tag: %s\n") % tag)
712 self.ui.write(_("tag: %s\n") % tag)
709 for parent in parents:
713 for parent in parents:
710 self.ui.write(_("parent: %d:%s\n") % parent)
714 self.ui.write(_("parent: %d:%s\n") % parent)
711
715
712 if self.ui.debugflag:
716 if self.ui.debugflag:
713 self.ui.write(_("manifest: %d:%s\n") %
717 self.ui.write(_("manifest: %d:%s\n") %
714 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
718 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
715 self.ui.write(_("user: %s\n") % changes[1])
719 self.ui.write(_("user: %s\n") % changes[1])
716 self.ui.write(_("date: %s\n") % date)
720 self.ui.write(_("date: %s\n") % date)
717
721
718 if self.ui.debugflag:
722 if self.ui.debugflag:
719 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
723 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
720 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
724 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
721 files):
725 files):
722 if value:
726 if value:
723 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
727 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
724 elif changes[3] and self.ui.verbose:
728 elif changes[3] and self.ui.verbose:
725 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
729 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
726 if copies and self.ui.verbose:
730 if copies and self.ui.verbose:
727 copies = ['%s (%s)' % c for c in copies]
731 copies = ['%s (%s)' % c for c in copies]
728 self.ui.write(_("copies: %s\n") % ' '.join(copies))
732 self.ui.write(_("copies: %s\n") % ' '.join(copies))
729
733
730 if extra and self.ui.debugflag:
734 if extra and self.ui.debugflag:
731 extraitems = extra.items()
735 extraitems = extra.items()
732 extraitems.sort()
736 extraitems.sort()
733 for key, value in extraitems:
737 for key, value in extraitems:
734 self.ui.write(_("extra: %s=%s\n")
738 self.ui.write(_("extra: %s=%s\n")
735 % (key, value.encode('string_escape')))
739 % (key, value.encode('string_escape')))
736
740
737 description = changes[4].strip()
741 description = changes[4].strip()
738 if description:
742 if description:
739 if self.ui.verbose:
743 if self.ui.verbose:
740 self.ui.write(_("description:\n"))
744 self.ui.write(_("description:\n"))
741 self.ui.write(description)
745 self.ui.write(description)
742 self.ui.write("\n\n")
746 self.ui.write("\n\n")
743 else:
747 else:
744 self.ui.write(_("summary: %s\n") %
748 self.ui.write(_("summary: %s\n") %
745 description.splitlines()[0])
749 description.splitlines()[0])
746 self.ui.write("\n")
750 self.ui.write("\n")
747
751
748 self.showpatch(changenode)
752 self.showpatch(changenode)
749
753
750 def showpatch(self, node):
754 def showpatch(self, node):
751 if self.patch:
755 if self.patch:
752 prev = self.repo.changelog.parents(node)[0]
756 prev = self.repo.changelog.parents(node)[0]
753 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
757 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
754 self.ui.write("\n")
758 self.ui.write("\n")
755
759
756 class changeset_templater(changeset_printer):
760 class changeset_templater(changeset_printer):
757 '''format changeset information.'''
761 '''format changeset information.'''
758
762
759 def __init__(self, ui, repo, patch, mapfile, buffered):
763 def __init__(self, ui, repo, patch, mapfile, buffered):
760 changeset_printer.__init__(self, ui, repo, patch, buffered)
764 changeset_printer.__init__(self, ui, repo, patch, buffered)
761 filters = templater.common_filters.copy()
765 filters = templater.common_filters.copy()
762 filters['formatnode'] = (ui.debugflag and (lambda x: x)
766 filters['formatnode'] = (ui.debugflag and (lambda x: x)
763 or (lambda x: x[:12]))
767 or (lambda x: x[:12]))
764 self.t = templater.templater(mapfile, filters,
768 self.t = templater.templater(mapfile, filters,
765 cache={
769 cache={
766 'parent': '{rev}:{node|formatnode} ',
770 'parent': '{rev}:{node|formatnode} ',
767 'manifest': '{rev}:{node|formatnode}',
771 'manifest': '{rev}:{node|formatnode}',
768 'filecopy': '{name} ({source})'})
772 'filecopy': '{name} ({source})'})
769
773
770 def use_template(self, t):
774 def use_template(self, t):
771 '''set template string to use'''
775 '''set template string to use'''
772 self.t.cache['changeset'] = t
776 self.t.cache['changeset'] = t
773
777
774 def _show(self, rev, changenode, copies, props):
778 def _show(self, rev, changenode, copies, props):
775 '''show a single changeset or file revision'''
779 '''show a single changeset or file revision'''
776 log = self.repo.changelog
780 log = self.repo.changelog
777 if changenode is None:
781 if changenode is None:
778 changenode = log.node(rev)
782 changenode = log.node(rev)
779 elif not rev:
783 elif not rev:
780 rev = log.rev(changenode)
784 rev = log.rev(changenode)
781
785
782 changes = log.read(changenode)
786 changes = log.read(changenode)
783
787
784 def showlist(name, values, plural=None, **args):
788 def showlist(name, values, plural=None, **args):
785 '''expand set of values.
789 '''expand set of values.
786 name is name of key in template map.
790 name is name of key in template map.
787 values is list of strings or dicts.
791 values is list of strings or dicts.
788 plural is plural of name, if not simply name + 's'.
792 plural is plural of name, if not simply name + 's'.
789
793
790 expansion works like this, given name 'foo'.
794 expansion works like this, given name 'foo'.
791
795
792 if values is empty, expand 'no_foos'.
796 if values is empty, expand 'no_foos'.
793
797
794 if 'foo' not in template map, return values as a string,
798 if 'foo' not in template map, return values as a string,
795 joined by space.
799 joined by space.
796
800
797 expand 'start_foos'.
801 expand 'start_foos'.
798
802
799 for each value, expand 'foo'. if 'last_foo' in template
803 for each value, expand 'foo'. if 'last_foo' in template
800 map, expand it instead of 'foo' for last key.
804 map, expand it instead of 'foo' for last key.
801
805
802 expand 'end_foos'.
806 expand 'end_foos'.
803 '''
807 '''
804 if plural: names = plural
808 if plural: names = plural
805 else: names = name + 's'
809 else: names = name + 's'
806 if not values:
810 if not values:
807 noname = 'no_' + names
811 noname = 'no_' + names
808 if noname in self.t:
812 if noname in self.t:
809 yield self.t(noname, **args)
813 yield self.t(noname, **args)
810 return
814 return
811 if name not in self.t:
815 if name not in self.t:
812 if isinstance(values[0], str):
816 if isinstance(values[0], str):
813 yield ' '.join(values)
817 yield ' '.join(values)
814 else:
818 else:
815 for v in values:
819 for v in values:
816 yield dict(v, **args)
820 yield dict(v, **args)
817 return
821 return
818 startname = 'start_' + names
822 startname = 'start_' + names
819 if startname in self.t:
823 if startname in self.t:
820 yield self.t(startname, **args)
824 yield self.t(startname, **args)
821 vargs = args.copy()
825 vargs = args.copy()
822 def one(v, tag=name):
826 def one(v, tag=name):
823 try:
827 try:
824 vargs.update(v)
828 vargs.update(v)
825 except (AttributeError, ValueError):
829 except (AttributeError, ValueError):
826 try:
830 try:
827 for a, b in v:
831 for a, b in v:
828 vargs[a] = b
832 vargs[a] = b
829 except ValueError:
833 except ValueError:
830 vargs[name] = v
834 vargs[name] = v
831 return self.t(tag, **vargs)
835 return self.t(tag, **vargs)
832 lastname = 'last_' + name
836 lastname = 'last_' + name
833 if lastname in self.t:
837 if lastname in self.t:
834 last = values.pop()
838 last = values.pop()
835 else:
839 else:
836 last = None
840 last = None
837 for v in values:
841 for v in values:
838 yield one(v)
842 yield one(v)
839 if last is not None:
843 if last is not None:
840 yield one(last, tag=lastname)
844 yield one(last, tag=lastname)
841 endname = 'end_' + names
845 endname = 'end_' + names
842 if endname in self.t:
846 if endname in self.t:
843 yield self.t(endname, **args)
847 yield self.t(endname, **args)
844
848
845 def showbranches(**args):
849 def showbranches(**args):
846 branch = changes[5].get("branch")
850 branch = changes[5].get("branch")
847 if branch != 'default':
851 if branch != 'default':
848 branch = util.tolocal(branch)
852 branch = util.tolocal(branch)
849 return showlist('branch', [branch], plural='branches', **args)
853 return showlist('branch', [branch], plural='branches', **args)
850
854
851 def showparents(**args):
855 def showparents(**args):
852 parents = [[('rev', log.rev(p)), ('node', hex(p))]
856 parents = [[('rev', log.rev(p)), ('node', hex(p))]
853 for p in log.parents(changenode)
857 for p in log.parents(changenode)
854 if self.ui.debugflag or p != nullid]
858 if self.ui.debugflag or p != nullid]
855 if (not self.ui.debugflag and len(parents) == 1 and
859 if (not self.ui.debugflag and len(parents) == 1 and
856 parents[0][0][1] == rev - 1):
860 parents[0][0][1] == rev - 1):
857 return
861 return
858 return showlist('parent', parents, **args)
862 return showlist('parent', parents, **args)
859
863
860 def showtags(**args):
864 def showtags(**args):
861 return showlist('tag', self.repo.nodetags(changenode), **args)
865 return showlist('tag', self.repo.nodetags(changenode), **args)
862
866
863 def showextras(**args):
867 def showextras(**args):
864 extras = changes[5].items()
868 extras = changes[5].items()
865 extras.sort()
869 extras.sort()
866 for key, value in extras:
870 for key, value in extras:
867 args = args.copy()
871 args = args.copy()
868 args.update(dict(key=key, value=value))
872 args.update(dict(key=key, value=value))
869 yield self.t('extra', **args)
873 yield self.t('extra', **args)
870
874
871 def showcopies(**args):
875 def showcopies(**args):
872 c = [{'name': x[0], 'source': x[1]} for x in copies]
876 c = [{'name': x[0], 'source': x[1]} for x in copies]
873 return showlist('file_copy', c, plural='file_copies', **args)
877 return showlist('file_copy', c, plural='file_copies', **args)
874
878
875 if self.ui.debugflag:
879 if self.ui.debugflag:
876 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
880 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
877 def showfiles(**args):
881 def showfiles(**args):
878 return showlist('file', files[0], **args)
882 return showlist('file', files[0], **args)
879 def showadds(**args):
883 def showadds(**args):
880 return showlist('file_add', files[1], **args)
884 return showlist('file_add', files[1], **args)
881 def showdels(**args):
885 def showdels(**args):
882 return showlist('file_del', files[2], **args)
886 return showlist('file_del', files[2], **args)
883 def showmanifest(**args):
887 def showmanifest(**args):
884 args = args.copy()
888 args = args.copy()
885 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
889 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
886 node=hex(changes[0])))
890 node=hex(changes[0])))
887 return self.t('manifest', **args)
891 return self.t('manifest', **args)
888 else:
892 else:
889 def showfiles(**args):
893 def showfiles(**args):
890 return showlist('file', changes[3], **args)
894 return showlist('file', changes[3], **args)
891 showadds = ''
895 showadds = ''
892 showdels = ''
896 showdels = ''
893 showmanifest = ''
897 showmanifest = ''
894
898
895 defprops = {
899 defprops = {
896 'author': changes[1],
900 'author': changes[1],
897 'branches': showbranches,
901 'branches': showbranches,
898 'date': changes[2],
902 'date': changes[2],
899 'desc': changes[4],
903 'desc': changes[4],
900 'file_adds': showadds,
904 'file_adds': showadds,
901 'file_dels': showdels,
905 'file_dels': showdels,
902 'files': showfiles,
906 'files': showfiles,
903 'file_copies': showcopies,
907 'file_copies': showcopies,
904 'manifest': showmanifest,
908 'manifest': showmanifest,
905 'node': hex(changenode),
909 'node': hex(changenode),
906 'parents': showparents,
910 'parents': showparents,
907 'rev': rev,
911 'rev': rev,
908 'tags': showtags,
912 'tags': showtags,
909 'extras': showextras,
913 'extras': showextras,
910 }
914 }
911 props = props.copy()
915 props = props.copy()
912 props.update(defprops)
916 props.update(defprops)
913
917
914 try:
918 try:
915 if self.ui.debugflag and 'header_debug' in self.t:
919 if self.ui.debugflag and 'header_debug' in self.t:
916 key = 'header_debug'
920 key = 'header_debug'
917 elif self.ui.quiet and 'header_quiet' in self.t:
921 elif self.ui.quiet and 'header_quiet' in self.t:
918 key = 'header_quiet'
922 key = 'header_quiet'
919 elif self.ui.verbose and 'header_verbose' in self.t:
923 elif self.ui.verbose and 'header_verbose' in self.t:
920 key = 'header_verbose'
924 key = 'header_verbose'
921 elif 'header' in self.t:
925 elif 'header' in self.t:
922 key = 'header'
926 key = 'header'
923 else:
927 else:
924 key = ''
928 key = ''
925 if key:
929 if key:
926 h = templater.stringify(self.t(key, **props))
930 h = templater.stringify(self.t(key, **props))
927 if self.buffered:
931 if self.buffered:
928 self.header[rev] = h
932 self.header[rev] = h
929 else:
933 else:
930 self.ui.write(h)
934 self.ui.write(h)
931 if self.ui.debugflag and 'changeset_debug' in self.t:
935 if self.ui.debugflag and 'changeset_debug' in self.t:
932 key = 'changeset_debug'
936 key = 'changeset_debug'
933 elif self.ui.quiet and 'changeset_quiet' in self.t:
937 elif self.ui.quiet and 'changeset_quiet' in self.t:
934 key = 'changeset_quiet'
938 key = 'changeset_quiet'
935 elif self.ui.verbose and 'changeset_verbose' in self.t:
939 elif self.ui.verbose and 'changeset_verbose' in self.t:
936 key = 'changeset_verbose'
940 key = 'changeset_verbose'
937 else:
941 else:
938 key = 'changeset'
942 key = 'changeset'
939 self.ui.write(templater.stringify(self.t(key, **props)))
943 self.ui.write(templater.stringify(self.t(key, **props)))
940 self.showpatch(changenode)
944 self.showpatch(changenode)
941 except KeyError, inst:
945 except KeyError, inst:
942 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
946 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
943 inst.args[0]))
947 inst.args[0]))
944 except SyntaxError, inst:
948 except SyntaxError, inst:
945 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
949 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
946
950
947 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
951 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
948 """show one changeset using template or regular display.
952 """show one changeset using template or regular display.
949
953
950 Display format will be the first non-empty hit of:
954 Display format will be the first non-empty hit of:
951 1. option 'template'
955 1. option 'template'
952 2. option 'style'
956 2. option 'style'
953 3. [ui] setting 'logtemplate'
957 3. [ui] setting 'logtemplate'
954 4. [ui] setting 'style'
958 4. [ui] setting 'style'
955 If all of these values are either the unset or the empty string,
959 If all of these values are either the unset or the empty string,
956 regular display via changeset_printer() is done.
960 regular display via changeset_printer() is done.
957 """
961 """
958 # options
962 # options
959 patch = False
963 patch = False
960 if opts.get('patch'):
964 if opts.get('patch'):
961 patch = matchfn or util.always
965 patch = matchfn or util.always
962
966
963 tmpl = opts.get('template')
967 tmpl = opts.get('template')
964 mapfile = None
968 mapfile = None
965 if tmpl:
969 if tmpl:
966 tmpl = templater.parsestring(tmpl, quoted=False)
970 tmpl = templater.parsestring(tmpl, quoted=False)
967 else:
971 else:
968 mapfile = opts.get('style')
972 mapfile = opts.get('style')
969 # ui settings
973 # ui settings
970 if not mapfile:
974 if not mapfile:
971 tmpl = ui.config('ui', 'logtemplate')
975 tmpl = ui.config('ui', 'logtemplate')
972 if tmpl:
976 if tmpl:
973 tmpl = templater.parsestring(tmpl)
977 tmpl = templater.parsestring(tmpl)
974 else:
978 else:
975 mapfile = ui.config('ui', 'style')
979 mapfile = ui.config('ui', 'style')
976
980
977 if tmpl or mapfile:
981 if tmpl or mapfile:
978 if mapfile:
982 if mapfile:
979 if not os.path.split(mapfile)[0]:
983 if not os.path.split(mapfile)[0]:
980 mapname = (templater.templatepath('map-cmdline.' + mapfile)
984 mapname = (templater.templatepath('map-cmdline.' + mapfile)
981 or templater.templatepath(mapfile))
985 or templater.templatepath(mapfile))
982 if mapname: mapfile = mapname
986 if mapname: mapfile = mapname
983 try:
987 try:
984 t = changeset_templater(ui, repo, patch, mapfile, buffered)
988 t = changeset_templater(ui, repo, patch, mapfile, buffered)
985 except SyntaxError, inst:
989 except SyntaxError, inst:
986 raise util.Abort(inst.args[0])
990 raise util.Abort(inst.args[0])
987 if tmpl: t.use_template(tmpl)
991 if tmpl: t.use_template(tmpl)
988 return t
992 return t
989 return changeset_printer(ui, repo, patch, buffered)
993 return changeset_printer(ui, repo, patch, buffered)
990
994
991 def finddate(ui, repo, date):
995 def finddate(ui, repo, date):
992 """Find the tipmost changeset that matches the given date spec"""
996 """Find the tipmost changeset that matches the given date spec"""
993 df = util.matchdate(date + " to " + date)
997 df = util.matchdate(date + " to " + date)
994 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
998 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
995 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
999 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
996 results = {}
1000 results = {}
997 for st, rev, fns in changeiter:
1001 for st, rev, fns in changeiter:
998 if st == 'add':
1002 if st == 'add':
999 d = get(rev)[2]
1003 d = get(rev)[2]
1000 if df(d[0]):
1004 if df(d[0]):
1001 results[rev] = d
1005 results[rev] = d
1002 elif st == 'iter':
1006 elif st == 'iter':
1003 if rev in results:
1007 if rev in results:
1004 ui.status("Found revision %s from %s\n" %
1008 ui.status("Found revision %s from %s\n" %
1005 (rev, util.datestr(results[rev])))
1009 (rev, util.datestr(results[rev])))
1006 return str(rev)
1010 return str(rev)
1007
1011
1008 raise util.Abort(_("revision matching date not found"))
1012 raise util.Abort(_("revision matching date not found"))
1009
1013
1010 def walkchangerevs(ui, repo, pats, change, opts):
1014 def walkchangerevs(ui, repo, pats, change, opts):
1011 '''Iterate over files and the revs they changed in.
1015 '''Iterate over files and the revs they changed in.
1012
1016
1013 Callers most commonly need to iterate backwards over the history
1017 Callers most commonly need to iterate backwards over the history
1014 it is interested in. Doing so has awful (quadratic-looking)
1018 it is interested in. Doing so has awful (quadratic-looking)
1015 performance, so we use iterators in a "windowed" way.
1019 performance, so we use iterators in a "windowed" way.
1016
1020
1017 We walk a window of revisions in the desired order. Within the
1021 We walk a window of revisions in the desired order. Within the
1018 window, we first walk forwards to gather data, then in the desired
1022 window, we first walk forwards to gather data, then in the desired
1019 order (usually backwards) to display it.
1023 order (usually backwards) to display it.
1020
1024
1021 This function returns an (iterator, matchfn) tuple. The iterator
1025 This function returns an (iterator, matchfn) tuple. The iterator
1022 yields 3-tuples. They will be of one of the following forms:
1026 yields 3-tuples. They will be of one of the following forms:
1023
1027
1024 "window", incrementing, lastrev: stepping through a window,
1028 "window", incrementing, lastrev: stepping through a window,
1025 positive if walking forwards through revs, last rev in the
1029 positive if walking forwards through revs, last rev in the
1026 sequence iterated over - use to reset state for the current window
1030 sequence iterated over - use to reset state for the current window
1027
1031
1028 "add", rev, fns: out-of-order traversal of the given file names
1032 "add", rev, fns: out-of-order traversal of the given file names
1029 fns, which changed during revision rev - use to gather data for
1033 fns, which changed during revision rev - use to gather data for
1030 possible display
1034 possible display
1031
1035
1032 "iter", rev, None: in-order traversal of the revs earlier iterated
1036 "iter", rev, None: in-order traversal of the revs earlier iterated
1033 over with "add" - use to display data'''
1037 over with "add" - use to display data'''
1034
1038
1035 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1039 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1036 if start < end:
1040 if start < end:
1037 while start < end:
1041 while start < end:
1038 yield start, min(windowsize, end-start)
1042 yield start, min(windowsize, end-start)
1039 start += windowsize
1043 start += windowsize
1040 if windowsize < sizelimit:
1044 if windowsize < sizelimit:
1041 windowsize *= 2
1045 windowsize *= 2
1042 else:
1046 else:
1043 while start > end:
1047 while start > end:
1044 yield start, min(windowsize, start-end-1)
1048 yield start, min(windowsize, start-end-1)
1045 start -= windowsize
1049 start -= windowsize
1046 if windowsize < sizelimit:
1050 if windowsize < sizelimit:
1047 windowsize *= 2
1051 windowsize *= 2
1048
1052
1049 files, matchfn, anypats = matchpats(repo, pats, opts)
1053 files, matchfn, anypats = matchpats(repo, pats, opts)
1050 follow = opts.get('follow') or opts.get('follow_first')
1054 follow = opts.get('follow') or opts.get('follow_first')
1051
1055
1052 if repo.changelog.count() == 0:
1056 if repo.changelog.count() == 0:
1053 return [], matchfn
1057 return [], matchfn
1054
1058
1055 if follow:
1059 if follow:
1056 defrange = '%s:0' % repo.changectx().rev()
1060 defrange = '%s:0' % repo.changectx().rev()
1057 else:
1061 else:
1058 defrange = 'tip:0'
1062 defrange = 'tip:0'
1059 revs = revrange(repo, opts['rev'] or [defrange])
1063 revs = revrange(repo, opts['rev'] or [defrange])
1060 wanted = {}
1064 wanted = {}
1061 slowpath = anypats or opts.get('removed')
1065 slowpath = anypats or opts.get('removed')
1062 fncache = {}
1066 fncache = {}
1063
1067
1064 if not slowpath and not files:
1068 if not slowpath and not files:
1065 # No files, no patterns. Display all revs.
1069 # No files, no patterns. Display all revs.
1066 wanted = dict.fromkeys(revs)
1070 wanted = dict.fromkeys(revs)
1067 copies = []
1071 copies = []
1068 if not slowpath:
1072 if not slowpath:
1069 # Only files, no patterns. Check the history of each file.
1073 # Only files, no patterns. Check the history of each file.
1070 def filerevgen(filelog, node):
1074 def filerevgen(filelog, node):
1071 cl_count = repo.changelog.count()
1075 cl_count = repo.changelog.count()
1072 if node is None:
1076 if node is None:
1073 last = filelog.count() - 1
1077 last = filelog.count() - 1
1074 else:
1078 else:
1075 last = filelog.rev(node)
1079 last = filelog.rev(node)
1076 for i, window in increasing_windows(last, nullrev):
1080 for i, window in increasing_windows(last, nullrev):
1077 revs = []
1081 revs = []
1078 for j in xrange(i - window, i + 1):
1082 for j in xrange(i - window, i + 1):
1079 n = filelog.node(j)
1083 n = filelog.node(j)
1080 revs.append((filelog.linkrev(n),
1084 revs.append((filelog.linkrev(n),
1081 follow and filelog.renamed(n)))
1085 follow and filelog.renamed(n)))
1082 revs.reverse()
1086 revs.reverse()
1083 for rev in revs:
1087 for rev in revs:
1084 # only yield rev for which we have the changelog, it can
1088 # only yield rev for which we have the changelog, it can
1085 # happen while doing "hg log" during a pull or commit
1089 # happen while doing "hg log" during a pull or commit
1086 if rev[0] < cl_count:
1090 if rev[0] < cl_count:
1087 yield rev
1091 yield rev
1088 def iterfiles():
1092 def iterfiles():
1089 for filename in files:
1093 for filename in files:
1090 yield filename, None
1094 yield filename, None
1091 for filename_node in copies:
1095 for filename_node in copies:
1092 yield filename_node
1096 yield filename_node
1093 minrev, maxrev = min(revs), max(revs)
1097 minrev, maxrev = min(revs), max(revs)
1094 for file_, node in iterfiles():
1098 for file_, node in iterfiles():
1095 filelog = repo.file(file_)
1099 filelog = repo.file(file_)
1096 # A zero count may be a directory or deleted file, so
1100 # A zero count may be a directory or deleted file, so
1097 # try to find matching entries on the slow path.
1101 # try to find matching entries on the slow path.
1098 if filelog.count() == 0:
1102 if filelog.count() == 0:
1099 slowpath = True
1103 slowpath = True
1100 break
1104 break
1101 for rev, copied in filerevgen(filelog, node):
1105 for rev, copied in filerevgen(filelog, node):
1102 if rev <= maxrev:
1106 if rev <= maxrev:
1103 if rev < minrev:
1107 if rev < minrev:
1104 break
1108 break
1105 fncache.setdefault(rev, [])
1109 fncache.setdefault(rev, [])
1106 fncache[rev].append(file_)
1110 fncache[rev].append(file_)
1107 wanted[rev] = 1
1111 wanted[rev] = 1
1108 if follow and copied:
1112 if follow and copied:
1109 copies.append(copied)
1113 copies.append(copied)
1110 if slowpath:
1114 if slowpath:
1111 if follow:
1115 if follow:
1112 raise util.Abort(_('can only follow copies/renames for explicit '
1116 raise util.Abort(_('can only follow copies/renames for explicit '
1113 'file names'))
1117 'file names'))
1114
1118
1115 # The slow path checks files modified in every changeset.
1119 # The slow path checks files modified in every changeset.
1116 def changerevgen():
1120 def changerevgen():
1117 for i, window in increasing_windows(repo.changelog.count()-1,
1121 for i, window in increasing_windows(repo.changelog.count()-1,
1118 nullrev):
1122 nullrev):
1119 for j in xrange(i - window, i + 1):
1123 for j in xrange(i - window, i + 1):
1120 yield j, change(j)[3]
1124 yield j, change(j)[3]
1121
1125
1122 for rev, changefiles in changerevgen():
1126 for rev, changefiles in changerevgen():
1123 matches = filter(matchfn, changefiles)
1127 matches = filter(matchfn, changefiles)
1124 if matches:
1128 if matches:
1125 fncache[rev] = matches
1129 fncache[rev] = matches
1126 wanted[rev] = 1
1130 wanted[rev] = 1
1127
1131
1128 class followfilter:
1132 class followfilter:
1129 def __init__(self, onlyfirst=False):
1133 def __init__(self, onlyfirst=False):
1130 self.startrev = nullrev
1134 self.startrev = nullrev
1131 self.roots = []
1135 self.roots = []
1132 self.onlyfirst = onlyfirst
1136 self.onlyfirst = onlyfirst
1133
1137
1134 def match(self, rev):
1138 def match(self, rev):
1135 def realparents(rev):
1139 def realparents(rev):
1136 if self.onlyfirst:
1140 if self.onlyfirst:
1137 return repo.changelog.parentrevs(rev)[0:1]
1141 return repo.changelog.parentrevs(rev)[0:1]
1138 else:
1142 else:
1139 return filter(lambda x: x != nullrev,
1143 return filter(lambda x: x != nullrev,
1140 repo.changelog.parentrevs(rev))
1144 repo.changelog.parentrevs(rev))
1141
1145
1142 if self.startrev == nullrev:
1146 if self.startrev == nullrev:
1143 self.startrev = rev
1147 self.startrev = rev
1144 return True
1148 return True
1145
1149
1146 if rev > self.startrev:
1150 if rev > self.startrev:
1147 # forward: all descendants
1151 # forward: all descendants
1148 if not self.roots:
1152 if not self.roots:
1149 self.roots.append(self.startrev)
1153 self.roots.append(self.startrev)
1150 for parent in realparents(rev):
1154 for parent in realparents(rev):
1151 if parent in self.roots:
1155 if parent in self.roots:
1152 self.roots.append(rev)
1156 self.roots.append(rev)
1153 return True
1157 return True
1154 else:
1158 else:
1155 # backwards: all parents
1159 # backwards: all parents
1156 if not self.roots:
1160 if not self.roots:
1157 self.roots.extend(realparents(self.startrev))
1161 self.roots.extend(realparents(self.startrev))
1158 if rev in self.roots:
1162 if rev in self.roots:
1159 self.roots.remove(rev)
1163 self.roots.remove(rev)
1160 self.roots.extend(realparents(rev))
1164 self.roots.extend(realparents(rev))
1161 return True
1165 return True
1162
1166
1163 return False
1167 return False
1164
1168
1165 # it might be worthwhile to do this in the iterator if the rev range
1169 # it might be worthwhile to do this in the iterator if the rev range
1166 # is descending and the prune args are all within that range
1170 # is descending and the prune args are all within that range
1167 for rev in opts.get('prune', ()):
1171 for rev in opts.get('prune', ()):
1168 rev = repo.changelog.rev(repo.lookup(rev))
1172 rev = repo.changelog.rev(repo.lookup(rev))
1169 ff = followfilter()
1173 ff = followfilter()
1170 stop = min(revs[0], revs[-1])
1174 stop = min(revs[0], revs[-1])
1171 for x in xrange(rev, stop-1, -1):
1175 for x in xrange(rev, stop-1, -1):
1172 if ff.match(x) and x in wanted:
1176 if ff.match(x) and x in wanted:
1173 del wanted[x]
1177 del wanted[x]
1174
1178
1175 def iterate():
1179 def iterate():
1176 if follow and not files:
1180 if follow and not files:
1177 ff = followfilter(onlyfirst=opts.get('follow_first'))
1181 ff = followfilter(onlyfirst=opts.get('follow_first'))
1178 def want(rev):
1182 def want(rev):
1179 if ff.match(rev) and rev in wanted:
1183 if ff.match(rev) and rev in wanted:
1180 return True
1184 return True
1181 return False
1185 return False
1182 else:
1186 else:
1183 def want(rev):
1187 def want(rev):
1184 return rev in wanted
1188 return rev in wanted
1185
1189
1186 for i, window in increasing_windows(0, len(revs)):
1190 for i, window in increasing_windows(0, len(revs)):
1187 yield 'window', revs[0] < revs[-1], revs[-1]
1191 yield 'window', revs[0] < revs[-1], revs[-1]
1188 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1192 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1189 srevs = list(nrevs)
1193 srevs = list(nrevs)
1190 srevs.sort()
1194 srevs.sort()
1191 for rev in srevs:
1195 for rev in srevs:
1192 fns = fncache.get(rev)
1196 fns = fncache.get(rev)
1193 if not fns:
1197 if not fns:
1194 def fns_generator():
1198 def fns_generator():
1195 for f in change(rev)[3]:
1199 for f in change(rev)[3]:
1196 if matchfn(f):
1200 if matchfn(f):
1197 yield f
1201 yield f
1198 fns = fns_generator()
1202 fns = fns_generator()
1199 yield 'add', rev, fns
1203 yield 'add', rev, fns
1200 for rev in nrevs:
1204 for rev in nrevs:
1201 yield 'iter', rev, None
1205 yield 'iter', rev, None
1202 return iterate(), matchfn
1206 return iterate(), matchfn
@@ -1,1969 +1,1965
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util, extensions
13 import os, revlog, time, util, extensions
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.path = path
23 self.path = path
24 self.root = os.path.realpath(path)
24 self.root = os.path.realpath(path)
25 self.path = os.path.join(self.root, ".hg")
25 self.path = os.path.join(self.root, ".hg")
26 self.origroot = path
26 self.origroot = path
27 self.opener = util.opener(self.path)
27 self.opener = util.opener(self.path)
28 self.wopener = util.opener(self.root)
28 self.wopener = util.opener(self.root)
29
29
30 if not os.path.isdir(self.path):
30 if not os.path.isdir(self.path):
31 if create:
31 if create:
32 if not os.path.exists(path):
32 if not os.path.exists(path):
33 os.mkdir(path)
33 os.mkdir(path)
34 os.mkdir(self.path)
34 os.mkdir(self.path)
35 requirements = ["revlogv1"]
35 requirements = ["revlogv1"]
36 if parentui.configbool('format', 'usestore', True):
36 if parentui.configbool('format', 'usestore', True):
37 os.mkdir(os.path.join(self.path, "store"))
37 os.mkdir(os.path.join(self.path, "store"))
38 requirements.append("store")
38 requirements.append("store")
39 # create an invalid changelog
39 # create an invalid changelog
40 self.opener("00changelog.i", "a").write(
40 self.opener("00changelog.i", "a").write(
41 '\0\0\0\2' # represents revlogv2
41 '\0\0\0\2' # represents revlogv2
42 ' dummy changelog to prevent using the old repo layout'
42 ' dummy changelog to prevent using the old repo layout'
43 )
43 )
44 reqfile = self.opener("requires", "w")
44 reqfile = self.opener("requires", "w")
45 for r in requirements:
45 for r in requirements:
46 reqfile.write("%s\n" % r)
46 reqfile.write("%s\n" % r)
47 reqfile.close()
47 reqfile.close()
48 else:
48 else:
49 raise repo.RepoError(_("repository %s not found") % path)
49 raise repo.RepoError(_("repository %s not found") % path)
50 elif create:
50 elif create:
51 raise repo.RepoError(_("repository %s already exists") % path)
51 raise repo.RepoError(_("repository %s already exists") % path)
52 else:
52 else:
53 # find requirements
53 # find requirements
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 except IOError, inst:
56 except IOError, inst:
57 if inst.errno != errno.ENOENT:
57 if inst.errno != errno.ENOENT:
58 raise
58 raise
59 requirements = []
59 requirements = []
60 # check them
60 # check them
61 for r in requirements:
61 for r in requirements:
62 if r not in self.supported:
62 if r not in self.supported:
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64
64
65 # setup store
65 # setup store
66 if "store" in requirements:
66 if "store" in requirements:
67 self.encodefn = util.encodefilename
67 self.encodefn = util.encodefilename
68 self.decodefn = util.decodefilename
68 self.decodefn = util.decodefilename
69 self.spath = os.path.join(self.path, "store")
69 self.spath = os.path.join(self.path, "store")
70 else:
70 else:
71 self.encodefn = lambda x: x
71 self.encodefn = lambda x: x
72 self.decodefn = lambda x: x
72 self.decodefn = lambda x: x
73 self.spath = self.path
73 self.spath = self.path
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75
75
76 self.ui = ui.ui(parentui=parentui)
76 self.ui = ui.ui(parentui=parentui)
77 try:
77 try:
78 self.ui.readconfig(self.join("hgrc"), self.root)
78 self.ui.readconfig(self.join("hgrc"), self.root)
79 extensions.loadall(self.ui)
79 extensions.loadall(self.ui)
80 except IOError:
80 except IOError:
81 pass
81 pass
82
82
83 fallback = self.ui.config('ui', 'fallbackencoding')
84 if fallback:
85 util._fallbackencoding = fallback
86
87 self.tagscache = None
83 self.tagscache = None
88 self.branchcache = None
84 self.branchcache = None
89 self.nodetagscache = None
85 self.nodetagscache = None
90 self.filterpats = {}
86 self.filterpats = {}
91 self.transhandle = None
87 self.transhandle = None
92
88
93 def __getattr__(self, name):
89 def __getattr__(self, name):
94 if name == 'changelog':
90 if name == 'changelog':
95 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
96 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
97 return self.changelog
93 return self.changelog
98 if name == 'manifest':
94 if name == 'manifest':
99 self.changelog
95 self.changelog
100 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
101 return self.manifest
97 return self.manifest
102 if name == 'dirstate':
98 if name == 'dirstate':
103 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
104 return self.dirstate
100 return self.dirstate
105 else:
101 else:
106 raise AttributeError, name
102 raise AttributeError, name
107
103
108 def url(self):
104 def url(self):
109 return 'file:' + self.root
105 return 'file:' + self.root
110
106
111 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
112 def callhook(hname, funcname):
108 def callhook(hname, funcname):
113 '''call python hook. hook is callable object, looked up as
109 '''call python hook. hook is callable object, looked up as
114 name in python module. if callable returns "true", hook
110 name in python module. if callable returns "true", hook
115 fails, else passes. if hook raises exception, treated as
111 fails, else passes. if hook raises exception, treated as
116 hook failure. exception propagates if throw is "true".
112 hook failure. exception propagates if throw is "true".
117
113
118 reason for "true" meaning "hook failed" is so that
114 reason for "true" meaning "hook failed" is so that
119 unmodified commands (e.g. mercurial.commands.update) can
115 unmodified commands (e.g. mercurial.commands.update) can
120 be run as hooks without wrappers to convert return values.'''
116 be run as hooks without wrappers to convert return values.'''
121
117
122 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
118 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
123 obj = funcname
119 obj = funcname
124 if not callable(obj):
120 if not callable(obj):
125 d = funcname.rfind('.')
121 d = funcname.rfind('.')
126 if d == -1:
122 if d == -1:
127 raise util.Abort(_('%s hook is invalid ("%s" not in '
123 raise util.Abort(_('%s hook is invalid ("%s" not in '
128 'a module)') % (hname, funcname))
124 'a module)') % (hname, funcname))
129 modname = funcname[:d]
125 modname = funcname[:d]
130 try:
126 try:
131 obj = __import__(modname)
127 obj = __import__(modname)
132 except ImportError:
128 except ImportError:
133 try:
129 try:
134 # extensions are loaded with hgext_ prefix
130 # extensions are loaded with hgext_ prefix
135 obj = __import__("hgext_%s" % modname)
131 obj = __import__("hgext_%s" % modname)
136 except ImportError:
132 except ImportError:
137 raise util.Abort(_('%s hook is invalid '
133 raise util.Abort(_('%s hook is invalid '
138 '(import of "%s" failed)') %
134 '(import of "%s" failed)') %
139 (hname, modname))
135 (hname, modname))
140 try:
136 try:
141 for p in funcname.split('.')[1:]:
137 for p in funcname.split('.')[1:]:
142 obj = getattr(obj, p)
138 obj = getattr(obj, p)
143 except AttributeError, err:
139 except AttributeError, err:
144 raise util.Abort(_('%s hook is invalid '
140 raise util.Abort(_('%s hook is invalid '
145 '("%s" is not defined)') %
141 '("%s" is not defined)') %
146 (hname, funcname))
142 (hname, funcname))
147 if not callable(obj):
143 if not callable(obj):
148 raise util.Abort(_('%s hook is invalid '
144 raise util.Abort(_('%s hook is invalid '
149 '("%s" is not callable)') %
145 '("%s" is not callable)') %
150 (hname, funcname))
146 (hname, funcname))
151 try:
147 try:
152 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
148 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
153 except (KeyboardInterrupt, util.SignalInterrupt):
149 except (KeyboardInterrupt, util.SignalInterrupt):
154 raise
150 raise
155 except Exception, exc:
151 except Exception, exc:
156 if isinstance(exc, util.Abort):
152 if isinstance(exc, util.Abort):
157 self.ui.warn(_('error: %s hook failed: %s\n') %
153 self.ui.warn(_('error: %s hook failed: %s\n') %
158 (hname, exc.args[0]))
154 (hname, exc.args[0]))
159 else:
155 else:
160 self.ui.warn(_('error: %s hook raised an exception: '
156 self.ui.warn(_('error: %s hook raised an exception: '
161 '%s\n') % (hname, exc))
157 '%s\n') % (hname, exc))
162 if throw:
158 if throw:
163 raise
159 raise
164 self.ui.print_exc()
160 self.ui.print_exc()
165 return True
161 return True
166 if r:
162 if r:
167 if throw:
163 if throw:
168 raise util.Abort(_('%s hook failed') % hname)
164 raise util.Abort(_('%s hook failed') % hname)
169 self.ui.warn(_('warning: %s hook failed\n') % hname)
165 self.ui.warn(_('warning: %s hook failed\n') % hname)
170 return r
166 return r
171
167
172 def runhook(name, cmd):
168 def runhook(name, cmd):
173 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
169 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
174 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
170 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
175 r = util.system(cmd, environ=env, cwd=self.root)
171 r = util.system(cmd, environ=env, cwd=self.root)
176 if r:
172 if r:
177 desc, r = util.explain_exit(r)
173 desc, r = util.explain_exit(r)
178 if throw:
174 if throw:
179 raise util.Abort(_('%s hook %s') % (name, desc))
175 raise util.Abort(_('%s hook %s') % (name, desc))
180 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
176 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
181 return r
177 return r
182
178
183 r = False
179 r = False
184 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
180 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
185 if hname.split(".", 1)[0] == name and cmd]
181 if hname.split(".", 1)[0] == name and cmd]
186 hooks.sort()
182 hooks.sort()
187 for hname, cmd in hooks:
183 for hname, cmd in hooks:
188 if callable(cmd):
184 if callable(cmd):
189 r = callhook(hname, cmd) or r
185 r = callhook(hname, cmd) or r
190 elif cmd.startswith('python:'):
186 elif cmd.startswith('python:'):
191 r = callhook(hname, cmd[7:].strip()) or r
187 r = callhook(hname, cmd[7:].strip()) or r
192 else:
188 else:
193 r = runhook(hname, cmd) or r
189 r = runhook(hname, cmd) or r
194 return r
190 return r
195
191
196 tag_disallowed = ':\r\n'
192 tag_disallowed = ':\r\n'
197
193
198 def _tag(self, name, node, message, local, user, date, parent=None):
194 def _tag(self, name, node, message, local, user, date, parent=None):
199 use_dirstate = parent is None
195 use_dirstate = parent is None
200
196
201 for c in self.tag_disallowed:
197 for c in self.tag_disallowed:
202 if c in name:
198 if c in name:
203 raise util.Abort(_('%r cannot be used in a tag name') % c)
199 raise util.Abort(_('%r cannot be used in a tag name') % c)
204
200
205 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
201 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
206
202
207 if local:
203 if local:
208 # local tags are stored in the current charset
204 # local tags are stored in the current charset
209 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
205 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
210 self.hook('tag', node=hex(node), tag=name, local=local)
206 self.hook('tag', node=hex(node), tag=name, local=local)
211 return
207 return
212
208
213 # committed tags are stored in UTF-8
209 # committed tags are stored in UTF-8
214 line = '%s %s\n' % (hex(node), util.fromlocal(name))
210 line = '%s %s\n' % (hex(node), util.fromlocal(name))
215 if use_dirstate:
211 if use_dirstate:
216 self.wfile('.hgtags', 'ab').write(line)
212 self.wfile('.hgtags', 'ab').write(line)
217 else:
213 else:
218 ntags = self.filectx('.hgtags', parent).data()
214 ntags = self.filectx('.hgtags', parent).data()
219 self.wfile('.hgtags', 'ab').write(ntags + line)
215 self.wfile('.hgtags', 'ab').write(ntags + line)
220 if use_dirstate and self.dirstate.state('.hgtags') == '?':
216 if use_dirstate and self.dirstate.state('.hgtags') == '?':
221 self.add(['.hgtags'])
217 self.add(['.hgtags'])
222
218
223 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
219 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
224
220
225 self.hook('tag', node=hex(node), tag=name, local=local)
221 self.hook('tag', node=hex(node), tag=name, local=local)
226
222
227 return tagnode
223 return tagnode
228
224
229 def tag(self, name, node, message, local, user, date):
225 def tag(self, name, node, message, local, user, date):
230 '''tag a revision with a symbolic name.
226 '''tag a revision with a symbolic name.
231
227
232 if local is True, the tag is stored in a per-repository file.
228 if local is True, the tag is stored in a per-repository file.
233 otherwise, it is stored in the .hgtags file, and a new
229 otherwise, it is stored in the .hgtags file, and a new
234 changeset is committed with the change.
230 changeset is committed with the change.
235
231
236 keyword arguments:
232 keyword arguments:
237
233
238 local: whether to store tag in non-version-controlled file
234 local: whether to store tag in non-version-controlled file
239 (default False)
235 (default False)
240
236
241 message: commit message to use if committing
237 message: commit message to use if committing
242
238
243 user: name of user to use if committing
239 user: name of user to use if committing
244
240
245 date: date tuple to use if committing'''
241 date: date tuple to use if committing'''
246
242
247 for x in self.status()[:5]:
243 for x in self.status()[:5]:
248 if '.hgtags' in x:
244 if '.hgtags' in x:
249 raise util.Abort(_('working copy of .hgtags is changed '
245 raise util.Abort(_('working copy of .hgtags is changed '
250 '(please commit .hgtags manually)'))
246 '(please commit .hgtags manually)'))
251
247
252
248
253 self._tag(name, node, message, local, user, date)
249 self._tag(name, node, message, local, user, date)
254
250
255 def tags(self):
251 def tags(self):
256 '''return a mapping of tag to node'''
252 '''return a mapping of tag to node'''
257 if self.tagscache:
253 if self.tagscache:
258 return self.tagscache
254 return self.tagscache
259
255
260 globaltags = {}
256 globaltags = {}
261
257
262 def readtags(lines, fn):
258 def readtags(lines, fn):
263 filetags = {}
259 filetags = {}
264 count = 0
260 count = 0
265
261
266 def warn(msg):
262 def warn(msg):
267 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
263 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
268
264
269 for l in lines:
265 for l in lines:
270 count += 1
266 count += 1
271 if not l:
267 if not l:
272 continue
268 continue
273 s = l.split(" ", 1)
269 s = l.split(" ", 1)
274 if len(s) != 2:
270 if len(s) != 2:
275 warn(_("cannot parse entry"))
271 warn(_("cannot parse entry"))
276 continue
272 continue
277 node, key = s
273 node, key = s
278 key = util.tolocal(key.strip()) # stored in UTF-8
274 key = util.tolocal(key.strip()) # stored in UTF-8
279 try:
275 try:
280 bin_n = bin(node)
276 bin_n = bin(node)
281 except TypeError:
277 except TypeError:
282 warn(_("node '%s' is not well formed") % node)
278 warn(_("node '%s' is not well formed") % node)
283 continue
279 continue
284 if bin_n not in self.changelog.nodemap:
280 if bin_n not in self.changelog.nodemap:
285 warn(_("tag '%s' refers to unknown node") % key)
281 warn(_("tag '%s' refers to unknown node") % key)
286 continue
282 continue
287
283
288 h = []
284 h = []
289 if key in filetags:
285 if key in filetags:
290 n, h = filetags[key]
286 n, h = filetags[key]
291 h.append(n)
287 h.append(n)
292 filetags[key] = (bin_n, h)
288 filetags[key] = (bin_n, h)
293
289
294 for k,nh in filetags.items():
290 for k,nh in filetags.items():
295 if k not in globaltags:
291 if k not in globaltags:
296 globaltags[k] = nh
292 globaltags[k] = nh
297 continue
293 continue
298 # we prefer the global tag if:
294 # we prefer the global tag if:
299 # it supercedes us OR
295 # it supercedes us OR
300 # mutual supercedes and it has a higher rank
296 # mutual supercedes and it has a higher rank
301 # otherwise we win because we're tip-most
297 # otherwise we win because we're tip-most
302 an, ah = nh
298 an, ah = nh
303 bn, bh = globaltags[k]
299 bn, bh = globaltags[k]
304 if bn != an and an in bh and \
300 if bn != an and an in bh and \
305 (bn not in ah or len(bh) > len(ah)):
301 (bn not in ah or len(bh) > len(ah)):
306 an = bn
302 an = bn
307 ah.extend([n for n in bh if n not in ah])
303 ah.extend([n for n in bh if n not in ah])
308 globaltags[k] = an, ah
304 globaltags[k] = an, ah
309
305
310 # read the tags file from each head, ending with the tip
306 # read the tags file from each head, ending with the tip
311 f = None
307 f = None
312 for rev, node, fnode in self._hgtagsnodes():
308 for rev, node, fnode in self._hgtagsnodes():
313 f = (f and f.filectx(fnode) or
309 f = (f and f.filectx(fnode) or
314 self.filectx('.hgtags', fileid=fnode))
310 self.filectx('.hgtags', fileid=fnode))
315 readtags(f.data().splitlines(), f)
311 readtags(f.data().splitlines(), f)
316
312
317 try:
313 try:
318 data = util.fromlocal(self.opener("localtags").read())
314 data = util.fromlocal(self.opener("localtags").read())
319 # localtags are stored in the local character set
315 # localtags are stored in the local character set
320 # while the internal tag table is stored in UTF-8
316 # while the internal tag table is stored in UTF-8
321 readtags(data.splitlines(), "localtags")
317 readtags(data.splitlines(), "localtags")
322 except IOError:
318 except IOError:
323 pass
319 pass
324
320
325 self.tagscache = {}
321 self.tagscache = {}
326 for k,nh in globaltags.items():
322 for k,nh in globaltags.items():
327 n = nh[0]
323 n = nh[0]
328 if n != nullid:
324 if n != nullid:
329 self.tagscache[k] = n
325 self.tagscache[k] = n
330 self.tagscache['tip'] = self.changelog.tip()
326 self.tagscache['tip'] = self.changelog.tip()
331
327
332 return self.tagscache
328 return self.tagscache
333
329
334 def _hgtagsnodes(self):
330 def _hgtagsnodes(self):
335 heads = self.heads()
331 heads = self.heads()
336 heads.reverse()
332 heads.reverse()
337 last = {}
333 last = {}
338 ret = []
334 ret = []
339 for node in heads:
335 for node in heads:
340 c = self.changectx(node)
336 c = self.changectx(node)
341 rev = c.rev()
337 rev = c.rev()
342 try:
338 try:
343 fnode = c.filenode('.hgtags')
339 fnode = c.filenode('.hgtags')
344 except revlog.LookupError:
340 except revlog.LookupError:
345 continue
341 continue
346 ret.append((rev, node, fnode))
342 ret.append((rev, node, fnode))
347 if fnode in last:
343 if fnode in last:
348 ret[last[fnode]] = None
344 ret[last[fnode]] = None
349 last[fnode] = len(ret) - 1
345 last[fnode] = len(ret) - 1
350 return [item for item in ret if item]
346 return [item for item in ret if item]
351
347
352 def tagslist(self):
348 def tagslist(self):
353 '''return a list of tags ordered by revision'''
349 '''return a list of tags ordered by revision'''
354 l = []
350 l = []
355 for t, n in self.tags().items():
351 for t, n in self.tags().items():
356 try:
352 try:
357 r = self.changelog.rev(n)
353 r = self.changelog.rev(n)
358 except:
354 except:
359 r = -2 # sort to the beginning of the list if unknown
355 r = -2 # sort to the beginning of the list if unknown
360 l.append((r, t, n))
356 l.append((r, t, n))
361 l.sort()
357 l.sort()
362 return [(t, n) for r, t, n in l]
358 return [(t, n) for r, t, n in l]
363
359
364 def nodetags(self, node):
360 def nodetags(self, node):
365 '''return the tags associated with a node'''
361 '''return the tags associated with a node'''
366 if not self.nodetagscache:
362 if not self.nodetagscache:
367 self.nodetagscache = {}
363 self.nodetagscache = {}
368 for t, n in self.tags().items():
364 for t, n in self.tags().items():
369 self.nodetagscache.setdefault(n, []).append(t)
365 self.nodetagscache.setdefault(n, []).append(t)
370 return self.nodetagscache.get(node, [])
366 return self.nodetagscache.get(node, [])
371
367
372 def _branchtags(self):
368 def _branchtags(self):
373 partial, last, lrev = self._readbranchcache()
369 partial, last, lrev = self._readbranchcache()
374
370
375 tiprev = self.changelog.count() - 1
371 tiprev = self.changelog.count() - 1
376 if lrev != tiprev:
372 if lrev != tiprev:
377 self._updatebranchcache(partial, lrev+1, tiprev+1)
373 self._updatebranchcache(partial, lrev+1, tiprev+1)
378 self._writebranchcache(partial, self.changelog.tip(), tiprev)
374 self._writebranchcache(partial, self.changelog.tip(), tiprev)
379
375
380 return partial
376 return partial
381
377
382 def branchtags(self):
378 def branchtags(self):
383 if self.branchcache is not None:
379 if self.branchcache is not None:
384 return self.branchcache
380 return self.branchcache
385
381
386 self.branchcache = {} # avoid recursion in changectx
382 self.branchcache = {} # avoid recursion in changectx
387 partial = self._branchtags()
383 partial = self._branchtags()
388
384
389 # the branch cache is stored on disk as UTF-8, but in the local
385 # the branch cache is stored on disk as UTF-8, but in the local
390 # charset internally
386 # charset internally
391 for k, v in partial.items():
387 for k, v in partial.items():
392 self.branchcache[util.tolocal(k)] = v
388 self.branchcache[util.tolocal(k)] = v
393 return self.branchcache
389 return self.branchcache
394
390
395 def _readbranchcache(self):
391 def _readbranchcache(self):
396 partial = {}
392 partial = {}
397 try:
393 try:
398 f = self.opener("branch.cache")
394 f = self.opener("branch.cache")
399 lines = f.read().split('\n')
395 lines = f.read().split('\n')
400 f.close()
396 f.close()
401 except (IOError, OSError):
397 except (IOError, OSError):
402 return {}, nullid, nullrev
398 return {}, nullid, nullrev
403
399
404 try:
400 try:
405 last, lrev = lines.pop(0).split(" ", 1)
401 last, lrev = lines.pop(0).split(" ", 1)
406 last, lrev = bin(last), int(lrev)
402 last, lrev = bin(last), int(lrev)
407 if not (lrev < self.changelog.count() and
403 if not (lrev < self.changelog.count() and
408 self.changelog.node(lrev) == last): # sanity check
404 self.changelog.node(lrev) == last): # sanity check
409 # invalidate the cache
405 # invalidate the cache
410 raise ValueError('Invalid branch cache: unknown tip')
406 raise ValueError('Invalid branch cache: unknown tip')
411 for l in lines:
407 for l in lines:
412 if not l: continue
408 if not l: continue
413 node, label = l.split(" ", 1)
409 node, label = l.split(" ", 1)
414 partial[label.strip()] = bin(node)
410 partial[label.strip()] = bin(node)
415 except (KeyboardInterrupt, util.SignalInterrupt):
411 except (KeyboardInterrupt, util.SignalInterrupt):
416 raise
412 raise
417 except Exception, inst:
413 except Exception, inst:
418 if self.ui.debugflag:
414 if self.ui.debugflag:
419 self.ui.warn(str(inst), '\n')
415 self.ui.warn(str(inst), '\n')
420 partial, last, lrev = {}, nullid, nullrev
416 partial, last, lrev = {}, nullid, nullrev
421 return partial, last, lrev
417 return partial, last, lrev
422
418
423 def _writebranchcache(self, branches, tip, tiprev):
419 def _writebranchcache(self, branches, tip, tiprev):
424 try:
420 try:
425 f = self.opener("branch.cache", "w", atomictemp=True)
421 f = self.opener("branch.cache", "w", atomictemp=True)
426 f.write("%s %s\n" % (hex(tip), tiprev))
422 f.write("%s %s\n" % (hex(tip), tiprev))
427 for label, node in branches.iteritems():
423 for label, node in branches.iteritems():
428 f.write("%s %s\n" % (hex(node), label))
424 f.write("%s %s\n" % (hex(node), label))
429 f.rename()
425 f.rename()
430 except (IOError, OSError):
426 except (IOError, OSError):
431 pass
427 pass
432
428
433 def _updatebranchcache(self, partial, start, end):
429 def _updatebranchcache(self, partial, start, end):
434 for r in xrange(start, end):
430 for r in xrange(start, end):
435 c = self.changectx(r)
431 c = self.changectx(r)
436 b = c.branch()
432 b = c.branch()
437 partial[b] = c.node()
433 partial[b] = c.node()
438
434
439 def lookup(self, key):
435 def lookup(self, key):
440 if key == '.':
436 if key == '.':
441 key, second = self.dirstate.parents()
437 key, second = self.dirstate.parents()
442 if key == nullid:
438 if key == nullid:
443 raise repo.RepoError(_("no revision checked out"))
439 raise repo.RepoError(_("no revision checked out"))
444 if second != nullid:
440 if second != nullid:
445 self.ui.warn(_("warning: working directory has two parents, "
441 self.ui.warn(_("warning: working directory has two parents, "
446 "tag '.' uses the first\n"))
442 "tag '.' uses the first\n"))
447 elif key == 'null':
443 elif key == 'null':
448 return nullid
444 return nullid
449 n = self.changelog._match(key)
445 n = self.changelog._match(key)
450 if n:
446 if n:
451 return n
447 return n
452 if key in self.tags():
448 if key in self.tags():
453 return self.tags()[key]
449 return self.tags()[key]
454 if key in self.branchtags():
450 if key in self.branchtags():
455 return self.branchtags()[key]
451 return self.branchtags()[key]
456 n = self.changelog._partialmatch(key)
452 n = self.changelog._partialmatch(key)
457 if n:
453 if n:
458 return n
454 return n
459 raise repo.RepoError(_("unknown revision '%s'") % key)
455 raise repo.RepoError(_("unknown revision '%s'") % key)
460
456
461 def dev(self):
457 def dev(self):
462 return os.lstat(self.path).st_dev
458 return os.lstat(self.path).st_dev
463
459
464 def local(self):
460 def local(self):
465 return True
461 return True
466
462
467 def join(self, f):
463 def join(self, f):
468 return os.path.join(self.path, f)
464 return os.path.join(self.path, f)
469
465
470 def sjoin(self, f):
466 def sjoin(self, f):
471 f = self.encodefn(f)
467 f = self.encodefn(f)
472 return os.path.join(self.spath, f)
468 return os.path.join(self.spath, f)
473
469
474 def wjoin(self, f):
470 def wjoin(self, f):
475 return os.path.join(self.root, f)
471 return os.path.join(self.root, f)
476
472
477 def file(self, f):
473 def file(self, f):
478 if f[0] == '/':
474 if f[0] == '/':
479 f = f[1:]
475 f = f[1:]
480 return filelog.filelog(self.sopener, f)
476 return filelog.filelog(self.sopener, f)
481
477
482 def changectx(self, changeid=None):
478 def changectx(self, changeid=None):
483 return context.changectx(self, changeid)
479 return context.changectx(self, changeid)
484
480
485 def workingctx(self):
481 def workingctx(self):
486 return context.workingctx(self)
482 return context.workingctx(self)
487
483
488 def parents(self, changeid=None):
484 def parents(self, changeid=None):
489 '''
485 '''
490 get list of changectxs for parents of changeid or working directory
486 get list of changectxs for parents of changeid or working directory
491 '''
487 '''
492 if changeid is None:
488 if changeid is None:
493 pl = self.dirstate.parents()
489 pl = self.dirstate.parents()
494 else:
490 else:
495 n = self.changelog.lookup(changeid)
491 n = self.changelog.lookup(changeid)
496 pl = self.changelog.parents(n)
492 pl = self.changelog.parents(n)
497 if pl[1] == nullid:
493 if pl[1] == nullid:
498 return [self.changectx(pl[0])]
494 return [self.changectx(pl[0])]
499 return [self.changectx(pl[0]), self.changectx(pl[1])]
495 return [self.changectx(pl[0]), self.changectx(pl[1])]
500
496
501 def filectx(self, path, changeid=None, fileid=None):
497 def filectx(self, path, changeid=None, fileid=None):
502 """changeid can be a changeset revision, node, or tag.
498 """changeid can be a changeset revision, node, or tag.
503 fileid can be a file revision or node."""
499 fileid can be a file revision or node."""
504 return context.filectx(self, path, changeid, fileid)
500 return context.filectx(self, path, changeid, fileid)
505
501
506 def getcwd(self):
502 def getcwd(self):
507 return self.dirstate.getcwd()
503 return self.dirstate.getcwd()
508
504
509 def pathto(self, f, cwd=None):
505 def pathto(self, f, cwd=None):
510 return self.dirstate.pathto(f, cwd)
506 return self.dirstate.pathto(f, cwd)
511
507
512 def wfile(self, f, mode='r'):
508 def wfile(self, f, mode='r'):
513 return self.wopener(f, mode)
509 return self.wopener(f, mode)
514
510
515 def _link(self, f):
511 def _link(self, f):
516 return os.path.islink(self.wjoin(f))
512 return os.path.islink(self.wjoin(f))
517
513
518 def _filter(self, filter, filename, data):
514 def _filter(self, filter, filename, data):
519 if filter not in self.filterpats:
515 if filter not in self.filterpats:
520 l = []
516 l = []
521 for pat, cmd in self.ui.configitems(filter):
517 for pat, cmd in self.ui.configitems(filter):
522 mf = util.matcher(self.root, "", [pat], [], [])[1]
518 mf = util.matcher(self.root, "", [pat], [], [])[1]
523 l.append((mf, cmd))
519 l.append((mf, cmd))
524 self.filterpats[filter] = l
520 self.filterpats[filter] = l
525
521
526 for mf, cmd in self.filterpats[filter]:
522 for mf, cmd in self.filterpats[filter]:
527 if mf(filename):
523 if mf(filename):
528 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
524 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
529 data = util.filter(data, cmd)
525 data = util.filter(data, cmd)
530 break
526 break
531
527
532 return data
528 return data
533
529
534 def wread(self, filename):
530 def wread(self, filename):
535 if self._link(filename):
531 if self._link(filename):
536 data = os.readlink(self.wjoin(filename))
532 data = os.readlink(self.wjoin(filename))
537 else:
533 else:
538 data = self.wopener(filename, 'r').read()
534 data = self.wopener(filename, 'r').read()
539 return self._filter("encode", filename, data)
535 return self._filter("encode", filename, data)
540
536
541 def wwrite(self, filename, data, flags):
537 def wwrite(self, filename, data, flags):
542 data = self._filter("decode", filename, data)
538 data = self._filter("decode", filename, data)
543 if "l" in flags:
539 if "l" in flags:
544 f = self.wjoin(filename)
540 f = self.wjoin(filename)
545 try:
541 try:
546 os.unlink(f)
542 os.unlink(f)
547 except OSError:
543 except OSError:
548 pass
544 pass
549 d = os.path.dirname(f)
545 d = os.path.dirname(f)
550 if not os.path.exists(d):
546 if not os.path.exists(d):
551 os.makedirs(d)
547 os.makedirs(d)
552 os.symlink(data, f)
548 os.symlink(data, f)
553 else:
549 else:
554 try:
550 try:
555 if self._link(filename):
551 if self._link(filename):
556 os.unlink(self.wjoin(filename))
552 os.unlink(self.wjoin(filename))
557 except OSError:
553 except OSError:
558 pass
554 pass
559 self.wopener(filename, 'w').write(data)
555 self.wopener(filename, 'w').write(data)
560 util.set_exec(self.wjoin(filename), "x" in flags)
556 util.set_exec(self.wjoin(filename), "x" in flags)
561
557
562 def wwritedata(self, filename, data):
558 def wwritedata(self, filename, data):
563 return self._filter("decode", filename, data)
559 return self._filter("decode", filename, data)
564
560
565 def transaction(self):
561 def transaction(self):
566 tr = self.transhandle
562 tr = self.transhandle
567 if tr != None and tr.running():
563 if tr != None and tr.running():
568 return tr.nest()
564 return tr.nest()
569
565
570 # save dirstate for rollback
566 # save dirstate for rollback
571 try:
567 try:
572 ds = self.opener("dirstate").read()
568 ds = self.opener("dirstate").read()
573 except IOError:
569 except IOError:
574 ds = ""
570 ds = ""
575 self.opener("journal.dirstate", "w").write(ds)
571 self.opener("journal.dirstate", "w").write(ds)
576
572
577 renames = [(self.sjoin("journal"), self.sjoin("undo")),
573 renames = [(self.sjoin("journal"), self.sjoin("undo")),
578 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
574 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
579 tr = transaction.transaction(self.ui.warn, self.sopener,
575 tr = transaction.transaction(self.ui.warn, self.sopener,
580 self.sjoin("journal"),
576 self.sjoin("journal"),
581 aftertrans(renames))
577 aftertrans(renames))
582 self.transhandle = tr
578 self.transhandle = tr
583 return tr
579 return tr
584
580
585 def recover(self):
581 def recover(self):
586 l = self.lock()
582 l = self.lock()
587 if os.path.exists(self.sjoin("journal")):
583 if os.path.exists(self.sjoin("journal")):
588 self.ui.status(_("rolling back interrupted transaction\n"))
584 self.ui.status(_("rolling back interrupted transaction\n"))
589 transaction.rollback(self.sopener, self.sjoin("journal"))
585 transaction.rollback(self.sopener, self.sjoin("journal"))
590 self.invalidate()
586 self.invalidate()
591 return True
587 return True
592 else:
588 else:
593 self.ui.warn(_("no interrupted transaction available\n"))
589 self.ui.warn(_("no interrupted transaction available\n"))
594 return False
590 return False
595
591
596 def rollback(self, wlock=None, lock=None):
592 def rollback(self, wlock=None, lock=None):
597 if not wlock:
593 if not wlock:
598 wlock = self.wlock()
594 wlock = self.wlock()
599 if not lock:
595 if not lock:
600 lock = self.lock()
596 lock = self.lock()
601 if os.path.exists(self.sjoin("undo")):
597 if os.path.exists(self.sjoin("undo")):
602 self.ui.status(_("rolling back last transaction\n"))
598 self.ui.status(_("rolling back last transaction\n"))
603 transaction.rollback(self.sopener, self.sjoin("undo"))
599 transaction.rollback(self.sopener, self.sjoin("undo"))
604 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
600 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
605 self.invalidate()
601 self.invalidate()
606 self.dirstate.invalidate()
602 self.dirstate.invalidate()
607 else:
603 else:
608 self.ui.warn(_("no rollback information available\n"))
604 self.ui.warn(_("no rollback information available\n"))
609
605
610 def invalidate(self):
606 def invalidate(self):
611 for a in "changelog manifest".split():
607 for a in "changelog manifest".split():
612 if hasattr(self, a):
608 if hasattr(self, a):
613 self.__delattr__(a)
609 self.__delattr__(a)
614 self.tagscache = None
610 self.tagscache = None
615 self.nodetagscache = None
611 self.nodetagscache = None
616
612
617 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
613 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
618 desc=None):
614 desc=None):
619 try:
615 try:
620 l = lock.lock(lockname, 0, releasefn, desc=desc)
616 l = lock.lock(lockname, 0, releasefn, desc=desc)
621 except lock.LockHeld, inst:
617 except lock.LockHeld, inst:
622 if not wait:
618 if not wait:
623 raise
619 raise
624 self.ui.warn(_("waiting for lock on %s held by %r\n") %
620 self.ui.warn(_("waiting for lock on %s held by %r\n") %
625 (desc, inst.locker))
621 (desc, inst.locker))
626 # default to 600 seconds timeout
622 # default to 600 seconds timeout
627 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
623 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
628 releasefn, desc=desc)
624 releasefn, desc=desc)
629 if acquirefn:
625 if acquirefn:
630 acquirefn()
626 acquirefn()
631 return l
627 return l
632
628
633 def lock(self, wait=1):
629 def lock(self, wait=1):
634 return self.do_lock(self.sjoin("lock"), wait,
630 return self.do_lock(self.sjoin("lock"), wait,
635 acquirefn=self.invalidate,
631 acquirefn=self.invalidate,
636 desc=_('repository %s') % self.origroot)
632 desc=_('repository %s') % self.origroot)
637
633
638 def wlock(self, wait=1):
634 def wlock(self, wait=1):
639 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
635 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
640 self.dirstate.invalidate,
636 self.dirstate.invalidate,
641 desc=_('working directory of %s') % self.origroot)
637 desc=_('working directory of %s') % self.origroot)
642
638
643 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
639 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
644 """
640 """
645 commit an individual file as part of a larger transaction
641 commit an individual file as part of a larger transaction
646 """
642 """
647
643
648 t = self.wread(fn)
644 t = self.wread(fn)
649 fl = self.file(fn)
645 fl = self.file(fn)
650 fp1 = manifest1.get(fn, nullid)
646 fp1 = manifest1.get(fn, nullid)
651 fp2 = manifest2.get(fn, nullid)
647 fp2 = manifest2.get(fn, nullid)
652
648
653 meta = {}
649 meta = {}
654 cp = self.dirstate.copied(fn)
650 cp = self.dirstate.copied(fn)
655 if cp:
651 if cp:
656 # Mark the new revision of this file as a copy of another
652 # Mark the new revision of this file as a copy of another
657 # file. This copy data will effectively act as a parent
653 # file. This copy data will effectively act as a parent
658 # of this new revision. If this is a merge, the first
654 # of this new revision. If this is a merge, the first
659 # parent will be the nullid (meaning "look up the copy data")
655 # parent will be the nullid (meaning "look up the copy data")
660 # and the second one will be the other parent. For example:
656 # and the second one will be the other parent. For example:
661 #
657 #
662 # 0 --- 1 --- 3 rev1 changes file foo
658 # 0 --- 1 --- 3 rev1 changes file foo
663 # \ / rev2 renames foo to bar and changes it
659 # \ / rev2 renames foo to bar and changes it
664 # \- 2 -/ rev3 should have bar with all changes and
660 # \- 2 -/ rev3 should have bar with all changes and
665 # should record that bar descends from
661 # should record that bar descends from
666 # bar in rev2 and foo in rev1
662 # bar in rev2 and foo in rev1
667 #
663 #
668 # this allows this merge to succeed:
664 # this allows this merge to succeed:
669 #
665 #
670 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
666 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
671 # \ / merging rev3 and rev4 should use bar@rev2
667 # \ / merging rev3 and rev4 should use bar@rev2
672 # \- 2 --- 4 as the merge base
668 # \- 2 --- 4 as the merge base
673 #
669 #
674 meta["copy"] = cp
670 meta["copy"] = cp
675 if not manifest2: # not a branch merge
671 if not manifest2: # not a branch merge
676 meta["copyrev"] = hex(manifest1.get(cp, nullid))
672 meta["copyrev"] = hex(manifest1.get(cp, nullid))
677 fp2 = nullid
673 fp2 = nullid
678 elif fp2 != nullid: # copied on remote side
674 elif fp2 != nullid: # copied on remote side
679 meta["copyrev"] = hex(manifest1.get(cp, nullid))
675 meta["copyrev"] = hex(manifest1.get(cp, nullid))
680 elif fp1 != nullid: # copied on local side, reversed
676 elif fp1 != nullid: # copied on local side, reversed
681 meta["copyrev"] = hex(manifest2.get(cp))
677 meta["copyrev"] = hex(manifest2.get(cp))
682 fp2 = fp1
678 fp2 = fp1
683 else: # directory rename
679 else: # directory rename
684 meta["copyrev"] = hex(manifest1.get(cp, nullid))
680 meta["copyrev"] = hex(manifest1.get(cp, nullid))
685 self.ui.debug(_(" %s: copy %s:%s\n") %
681 self.ui.debug(_(" %s: copy %s:%s\n") %
686 (fn, cp, meta["copyrev"]))
682 (fn, cp, meta["copyrev"]))
687 fp1 = nullid
683 fp1 = nullid
688 elif fp2 != nullid:
684 elif fp2 != nullid:
689 # is one parent an ancestor of the other?
685 # is one parent an ancestor of the other?
690 fpa = fl.ancestor(fp1, fp2)
686 fpa = fl.ancestor(fp1, fp2)
691 if fpa == fp1:
687 if fpa == fp1:
692 fp1, fp2 = fp2, nullid
688 fp1, fp2 = fp2, nullid
693 elif fpa == fp2:
689 elif fpa == fp2:
694 fp2 = nullid
690 fp2 = nullid
695
691
696 # is the file unmodified from the parent? report existing entry
692 # is the file unmodified from the parent? report existing entry
697 if fp2 == nullid and not fl.cmp(fp1, t):
693 if fp2 == nullid and not fl.cmp(fp1, t):
698 return fp1
694 return fp1
699
695
700 changelist.append(fn)
696 changelist.append(fn)
701 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
697 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
702
698
703 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
699 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
704 if p1 is None:
700 if p1 is None:
705 p1, p2 = self.dirstate.parents()
701 p1, p2 = self.dirstate.parents()
706 return self.commit(files=files, text=text, user=user, date=date,
702 return self.commit(files=files, text=text, user=user, date=date,
707 p1=p1, p2=p2, wlock=wlock, extra=extra)
703 p1=p1, p2=p2, wlock=wlock, extra=extra)
708
704
709 def commit(self, files=None, text="", user=None, date=None,
705 def commit(self, files=None, text="", user=None, date=None,
710 match=util.always, force=False, lock=None, wlock=None,
706 match=util.always, force=False, lock=None, wlock=None,
711 force_editor=False, p1=None, p2=None, extra={}):
707 force_editor=False, p1=None, p2=None, extra={}):
712
708
713 commit = []
709 commit = []
714 remove = []
710 remove = []
715 changed = []
711 changed = []
716 use_dirstate = (p1 is None) # not rawcommit
712 use_dirstate = (p1 is None) # not rawcommit
717 extra = extra.copy()
713 extra = extra.copy()
718
714
719 if use_dirstate:
715 if use_dirstate:
720 if files:
716 if files:
721 for f in files:
717 for f in files:
722 s = self.dirstate.state(f)
718 s = self.dirstate.state(f)
723 if s in 'nmai':
719 if s in 'nmai':
724 commit.append(f)
720 commit.append(f)
725 elif s == 'r':
721 elif s == 'r':
726 remove.append(f)
722 remove.append(f)
727 else:
723 else:
728 self.ui.warn(_("%s not tracked!\n") % f)
724 self.ui.warn(_("%s not tracked!\n") % f)
729 else:
725 else:
730 changes = self.status(match=match)[:5]
726 changes = self.status(match=match)[:5]
731 modified, added, removed, deleted, unknown = changes
727 modified, added, removed, deleted, unknown = changes
732 commit = modified + added
728 commit = modified + added
733 remove = removed
729 remove = removed
734 else:
730 else:
735 commit = files
731 commit = files
736
732
737 if use_dirstate:
733 if use_dirstate:
738 p1, p2 = self.dirstate.parents()
734 p1, p2 = self.dirstate.parents()
739 update_dirstate = True
735 update_dirstate = True
740 else:
736 else:
741 p1, p2 = p1, p2 or nullid
737 p1, p2 = p1, p2 or nullid
742 update_dirstate = (self.dirstate.parents()[0] == p1)
738 update_dirstate = (self.dirstate.parents()[0] == p1)
743
739
744 c1 = self.changelog.read(p1)
740 c1 = self.changelog.read(p1)
745 c2 = self.changelog.read(p2)
741 c2 = self.changelog.read(p2)
746 m1 = self.manifest.read(c1[0]).copy()
742 m1 = self.manifest.read(c1[0]).copy()
747 m2 = self.manifest.read(c2[0])
743 m2 = self.manifest.read(c2[0])
748
744
749 if use_dirstate:
745 if use_dirstate:
750 branchname = self.workingctx().branch()
746 branchname = self.workingctx().branch()
751 try:
747 try:
752 branchname = branchname.decode('UTF-8').encode('UTF-8')
748 branchname = branchname.decode('UTF-8').encode('UTF-8')
753 except UnicodeDecodeError:
749 except UnicodeDecodeError:
754 raise util.Abort(_('branch name not in UTF-8!'))
750 raise util.Abort(_('branch name not in UTF-8!'))
755 else:
751 else:
756 branchname = ""
752 branchname = ""
757
753
758 if use_dirstate:
754 if use_dirstate:
759 oldname = c1[5].get("branch") # stored in UTF-8
755 oldname = c1[5].get("branch") # stored in UTF-8
760 if not commit and not remove and not force and p2 == nullid and \
756 if not commit and not remove and not force and p2 == nullid and \
761 branchname == oldname:
757 branchname == oldname:
762 self.ui.status(_("nothing changed\n"))
758 self.ui.status(_("nothing changed\n"))
763 return None
759 return None
764
760
765 xp1 = hex(p1)
761 xp1 = hex(p1)
766 if p2 == nullid: xp2 = ''
762 if p2 == nullid: xp2 = ''
767 else: xp2 = hex(p2)
763 else: xp2 = hex(p2)
768
764
769 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
765 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
770
766
771 if not wlock:
767 if not wlock:
772 wlock = self.wlock()
768 wlock = self.wlock()
773 if not lock:
769 if not lock:
774 lock = self.lock()
770 lock = self.lock()
775 tr = self.transaction()
771 tr = self.transaction()
776
772
777 # check in files
773 # check in files
778 new = {}
774 new = {}
779 linkrev = self.changelog.count()
775 linkrev = self.changelog.count()
780 commit.sort()
776 commit.sort()
781 is_exec = util.execfunc(self.root, m1.execf)
777 is_exec = util.execfunc(self.root, m1.execf)
782 is_link = util.linkfunc(self.root, m1.linkf)
778 is_link = util.linkfunc(self.root, m1.linkf)
783 for f in commit:
779 for f in commit:
784 self.ui.note(f + "\n")
780 self.ui.note(f + "\n")
785 try:
781 try:
786 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
782 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
787 new_exec = is_exec(f)
783 new_exec = is_exec(f)
788 new_link = is_link(f)
784 new_link = is_link(f)
789 if not changed or changed[-1] != f:
785 if not changed or changed[-1] != f:
790 # mention the file in the changelog if some flag changed,
786 # mention the file in the changelog if some flag changed,
791 # even if there was no content change.
787 # even if there was no content change.
792 old_exec = m1.execf(f)
788 old_exec = m1.execf(f)
793 old_link = m1.linkf(f)
789 old_link = m1.linkf(f)
794 if old_exec != new_exec or old_link != new_link:
790 if old_exec != new_exec or old_link != new_link:
795 changed.append(f)
791 changed.append(f)
796 m1.set(f, new_exec, new_link)
792 m1.set(f, new_exec, new_link)
797 except (OSError, IOError):
793 except (OSError, IOError):
798 if use_dirstate:
794 if use_dirstate:
799 self.ui.warn(_("trouble committing %s!\n") % f)
795 self.ui.warn(_("trouble committing %s!\n") % f)
800 raise
796 raise
801 else:
797 else:
802 remove.append(f)
798 remove.append(f)
803
799
804 # update manifest
800 # update manifest
805 m1.update(new)
801 m1.update(new)
806 remove.sort()
802 remove.sort()
807 removed = []
803 removed = []
808
804
809 for f in remove:
805 for f in remove:
810 if f in m1:
806 if f in m1:
811 del m1[f]
807 del m1[f]
812 removed.append(f)
808 removed.append(f)
813 elif f in m2:
809 elif f in m2:
814 removed.append(f)
810 removed.append(f)
815 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
811 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
816
812
817 # add changeset
813 # add changeset
818 new = new.keys()
814 new = new.keys()
819 new.sort()
815 new.sort()
820
816
821 user = user or self.ui.username()
817 user = user or self.ui.username()
822 if not text or force_editor:
818 if not text or force_editor:
823 edittext = []
819 edittext = []
824 if text:
820 if text:
825 edittext.append(text)
821 edittext.append(text)
826 edittext.append("")
822 edittext.append("")
827 edittext.append("HG: user: %s" % user)
823 edittext.append("HG: user: %s" % user)
828 if p2 != nullid:
824 if p2 != nullid:
829 edittext.append("HG: branch merge")
825 edittext.append("HG: branch merge")
830 if branchname:
826 if branchname:
831 edittext.append("HG: branch %s" % util.tolocal(branchname))
827 edittext.append("HG: branch %s" % util.tolocal(branchname))
832 edittext.extend(["HG: changed %s" % f for f in changed])
828 edittext.extend(["HG: changed %s" % f for f in changed])
833 edittext.extend(["HG: removed %s" % f for f in removed])
829 edittext.extend(["HG: removed %s" % f for f in removed])
834 if not changed and not remove:
830 if not changed and not remove:
835 edittext.append("HG: no files changed")
831 edittext.append("HG: no files changed")
836 edittext.append("")
832 edittext.append("")
837 # run editor in the repository root
833 # run editor in the repository root
838 olddir = os.getcwd()
834 olddir = os.getcwd()
839 os.chdir(self.root)
835 os.chdir(self.root)
840 text = self.ui.edit("\n".join(edittext), user)
836 text = self.ui.edit("\n".join(edittext), user)
841 os.chdir(olddir)
837 os.chdir(olddir)
842
838
843 lines = [line.rstrip() for line in text.rstrip().splitlines()]
839 lines = [line.rstrip() for line in text.rstrip().splitlines()]
844 while lines and not lines[0]:
840 while lines and not lines[0]:
845 del lines[0]
841 del lines[0]
846 if not lines:
842 if not lines:
847 return None
843 return None
848 text = '\n'.join(lines)
844 text = '\n'.join(lines)
849 if branchname:
845 if branchname:
850 extra["branch"] = branchname
846 extra["branch"] = branchname
851 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
847 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
852 user, date, extra)
848 user, date, extra)
853 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
849 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
854 parent2=xp2)
850 parent2=xp2)
855 tr.close()
851 tr.close()
856
852
857 if self.branchcache and "branch" in extra:
853 if self.branchcache and "branch" in extra:
858 self.branchcache[util.tolocal(extra["branch"])] = n
854 self.branchcache[util.tolocal(extra["branch"])] = n
859
855
860 if use_dirstate or update_dirstate:
856 if use_dirstate or update_dirstate:
861 self.dirstate.setparents(n)
857 self.dirstate.setparents(n)
862 if use_dirstate:
858 if use_dirstate:
863 self.dirstate.update(new, "n")
859 self.dirstate.update(new, "n")
864 self.dirstate.forget(removed)
860 self.dirstate.forget(removed)
865
861
866 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
862 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
867 return n
863 return n
868
864
869 def walk(self, node=None, files=[], match=util.always, badmatch=None):
865 def walk(self, node=None, files=[], match=util.always, badmatch=None):
870 '''
866 '''
871 walk recursively through the directory tree or a given
867 walk recursively through the directory tree or a given
872 changeset, finding all files matched by the match
868 changeset, finding all files matched by the match
873 function
869 function
874
870
875 results are yielded in a tuple (src, filename), where src
871 results are yielded in a tuple (src, filename), where src
876 is one of:
872 is one of:
877 'f' the file was found in the directory tree
873 'f' the file was found in the directory tree
878 'm' the file was only in the dirstate and not in the tree
874 'm' the file was only in the dirstate and not in the tree
879 'b' file was not found and matched badmatch
875 'b' file was not found and matched badmatch
880 '''
876 '''
881
877
882 if node:
878 if node:
883 fdict = dict.fromkeys(files)
879 fdict = dict.fromkeys(files)
884 # for dirstate.walk, files=['.'] means "walk the whole tree".
880 # for dirstate.walk, files=['.'] means "walk the whole tree".
885 # follow that here, too
881 # follow that here, too
886 fdict.pop('.', None)
882 fdict.pop('.', None)
887 mdict = self.manifest.read(self.changelog.read(node)[0])
883 mdict = self.manifest.read(self.changelog.read(node)[0])
888 mfiles = mdict.keys()
884 mfiles = mdict.keys()
889 mfiles.sort()
885 mfiles.sort()
890 for fn in mfiles:
886 for fn in mfiles:
891 for ffn in fdict:
887 for ffn in fdict:
892 # match if the file is the exact name or a directory
888 # match if the file is the exact name or a directory
893 if ffn == fn or fn.startswith("%s/" % ffn):
889 if ffn == fn or fn.startswith("%s/" % ffn):
894 del fdict[ffn]
890 del fdict[ffn]
895 break
891 break
896 if match(fn):
892 if match(fn):
897 yield 'm', fn
893 yield 'm', fn
898 ffiles = fdict.keys()
894 ffiles = fdict.keys()
899 ffiles.sort()
895 ffiles.sort()
900 for fn in ffiles:
896 for fn in ffiles:
901 if badmatch and badmatch(fn):
897 if badmatch and badmatch(fn):
902 if match(fn):
898 if match(fn):
903 yield 'b', fn
899 yield 'b', fn
904 else:
900 else:
905 self.ui.warn(_('%s: No such file in rev %s\n')
901 self.ui.warn(_('%s: No such file in rev %s\n')
906 % (self.pathto(fn), short(node)))
902 % (self.pathto(fn), short(node)))
907 else:
903 else:
908 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
904 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
909 yield src, fn
905 yield src, fn
910
906
911 def status(self, node1=None, node2=None, files=[], match=util.always,
907 def status(self, node1=None, node2=None, files=[], match=util.always,
912 wlock=None, list_ignored=False, list_clean=False):
908 wlock=None, list_ignored=False, list_clean=False):
913 """return status of files between two nodes or node and working directory
909 """return status of files between two nodes or node and working directory
914
910
915 If node1 is None, use the first dirstate parent instead.
911 If node1 is None, use the first dirstate parent instead.
916 If node2 is None, compare node1 with working directory.
912 If node2 is None, compare node1 with working directory.
917 """
913 """
918
914
919 def fcmp(fn, getnode):
915 def fcmp(fn, getnode):
920 t1 = self.wread(fn)
916 t1 = self.wread(fn)
921 return self.file(fn).cmp(getnode(fn), t1)
917 return self.file(fn).cmp(getnode(fn), t1)
922
918
923 def mfmatches(node):
919 def mfmatches(node):
924 change = self.changelog.read(node)
920 change = self.changelog.read(node)
925 mf = self.manifest.read(change[0]).copy()
921 mf = self.manifest.read(change[0]).copy()
926 for fn in mf.keys():
922 for fn in mf.keys():
927 if not match(fn):
923 if not match(fn):
928 del mf[fn]
924 del mf[fn]
929 return mf
925 return mf
930
926
931 modified, added, removed, deleted, unknown = [], [], [], [], []
927 modified, added, removed, deleted, unknown = [], [], [], [], []
932 ignored, clean = [], []
928 ignored, clean = [], []
933
929
934 compareworking = False
930 compareworking = False
935 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
931 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
936 compareworking = True
932 compareworking = True
937
933
938 if not compareworking:
934 if not compareworking:
939 # read the manifest from node1 before the manifest from node2,
935 # read the manifest from node1 before the manifest from node2,
940 # so that we'll hit the manifest cache if we're going through
936 # so that we'll hit the manifest cache if we're going through
941 # all the revisions in parent->child order.
937 # all the revisions in parent->child order.
942 mf1 = mfmatches(node1)
938 mf1 = mfmatches(node1)
943
939
944 mywlock = False
940 mywlock = False
945
941
946 # are we comparing the working directory?
942 # are we comparing the working directory?
947 if not node2:
943 if not node2:
948 (lookup, modified, added, removed, deleted, unknown,
944 (lookup, modified, added, removed, deleted, unknown,
949 ignored, clean) = self.dirstate.status(files, match,
945 ignored, clean) = self.dirstate.status(files, match,
950 list_ignored, list_clean)
946 list_ignored, list_clean)
951
947
952 # are we comparing working dir against its parent?
948 # are we comparing working dir against its parent?
953 if compareworking:
949 if compareworking:
954 if lookup:
950 if lookup:
955 # do a full compare of any files that might have changed
951 # do a full compare of any files that might have changed
956 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
952 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
957 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
953 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
958 nullid)
954 nullid)
959 for f in lookup:
955 for f in lookup:
960 if fcmp(f, getnode):
956 if fcmp(f, getnode):
961 modified.append(f)
957 modified.append(f)
962 else:
958 else:
963 if list_clean:
959 if list_clean:
964 clean.append(f)
960 clean.append(f)
965 if not wlock and not mywlock:
961 if not wlock and not mywlock:
966 mywlock = True
962 mywlock = True
967 try:
963 try:
968 wlock = self.wlock(wait=0)
964 wlock = self.wlock(wait=0)
969 except lock.LockException:
965 except lock.LockException:
970 pass
966 pass
971 if wlock:
967 if wlock:
972 self.dirstate.update([f], "n")
968 self.dirstate.update([f], "n")
973 else:
969 else:
974 # we are comparing working dir against non-parent
970 # we are comparing working dir against non-parent
975 # generate a pseudo-manifest for the working dir
971 # generate a pseudo-manifest for the working dir
976 # XXX: create it in dirstate.py ?
972 # XXX: create it in dirstate.py ?
977 mf2 = mfmatches(self.dirstate.parents()[0])
973 mf2 = mfmatches(self.dirstate.parents()[0])
978 is_exec = util.execfunc(self.root, mf2.execf)
974 is_exec = util.execfunc(self.root, mf2.execf)
979 is_link = util.linkfunc(self.root, mf2.linkf)
975 is_link = util.linkfunc(self.root, mf2.linkf)
980 for f in lookup + modified + added:
976 for f in lookup + modified + added:
981 mf2[f] = ""
977 mf2[f] = ""
982 mf2.set(f, is_exec(f), is_link(f))
978 mf2.set(f, is_exec(f), is_link(f))
983 for f in removed:
979 for f in removed:
984 if f in mf2:
980 if f in mf2:
985 del mf2[f]
981 del mf2[f]
986
982
987 if mywlock and wlock:
983 if mywlock and wlock:
988 wlock.release()
984 wlock.release()
989 else:
985 else:
990 # we are comparing two revisions
986 # we are comparing two revisions
991 mf2 = mfmatches(node2)
987 mf2 = mfmatches(node2)
992
988
993 if not compareworking:
989 if not compareworking:
994 # flush lists from dirstate before comparing manifests
990 # flush lists from dirstate before comparing manifests
995 modified, added, clean = [], [], []
991 modified, added, clean = [], [], []
996
992
997 # make sure to sort the files so we talk to the disk in a
993 # make sure to sort the files so we talk to the disk in a
998 # reasonable order
994 # reasonable order
999 mf2keys = mf2.keys()
995 mf2keys = mf2.keys()
1000 mf2keys.sort()
996 mf2keys.sort()
1001 getnode = lambda fn: mf1.get(fn, nullid)
997 getnode = lambda fn: mf1.get(fn, nullid)
1002 for fn in mf2keys:
998 for fn in mf2keys:
1003 if mf1.has_key(fn):
999 if mf1.has_key(fn):
1004 if mf1.flags(fn) != mf2.flags(fn) or \
1000 if mf1.flags(fn) != mf2.flags(fn) or \
1005 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
1001 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
1006 fcmp(fn, getnode))):
1002 fcmp(fn, getnode))):
1007 modified.append(fn)
1003 modified.append(fn)
1008 elif list_clean:
1004 elif list_clean:
1009 clean.append(fn)
1005 clean.append(fn)
1010 del mf1[fn]
1006 del mf1[fn]
1011 else:
1007 else:
1012 added.append(fn)
1008 added.append(fn)
1013
1009
1014 removed = mf1.keys()
1010 removed = mf1.keys()
1015
1011
1016 # sort and return results:
1012 # sort and return results:
1017 for l in modified, added, removed, deleted, unknown, ignored, clean:
1013 for l in modified, added, removed, deleted, unknown, ignored, clean:
1018 l.sort()
1014 l.sort()
1019 return (modified, added, removed, deleted, unknown, ignored, clean)
1015 return (modified, added, removed, deleted, unknown, ignored, clean)
1020
1016
1021 def add(self, list, wlock=None):
1017 def add(self, list, wlock=None):
1022 if not wlock:
1018 if not wlock:
1023 wlock = self.wlock()
1019 wlock = self.wlock()
1024 for f in list:
1020 for f in list:
1025 p = self.wjoin(f)
1021 p = self.wjoin(f)
1026 try:
1022 try:
1027 st = os.lstat(p)
1023 st = os.lstat(p)
1028 except:
1024 except:
1029 self.ui.warn(_("%s does not exist!\n") % f)
1025 self.ui.warn(_("%s does not exist!\n") % f)
1030 continue
1026 continue
1031 if st.st_size > 10000000:
1027 if st.st_size > 10000000:
1032 self.ui.warn(_("%s: files over 10MB may cause memory and"
1028 self.ui.warn(_("%s: files over 10MB may cause memory and"
1033 " performance problems\n"
1029 " performance problems\n"
1034 "(use 'hg revert %s' to unadd the file)\n")
1030 "(use 'hg revert %s' to unadd the file)\n")
1035 % (f, f))
1031 % (f, f))
1036 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1032 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1037 self.ui.warn(_("%s not added: only files and symlinks "
1033 self.ui.warn(_("%s not added: only files and symlinks "
1038 "supported currently\n") % f)
1034 "supported currently\n") % f)
1039 elif self.dirstate.state(f) in 'an':
1035 elif self.dirstate.state(f) in 'an':
1040 self.ui.warn(_("%s already tracked!\n") % f)
1036 self.ui.warn(_("%s already tracked!\n") % f)
1041 else:
1037 else:
1042 self.dirstate.update([f], "a")
1038 self.dirstate.update([f], "a")
1043
1039
1044 def forget(self, list, wlock=None):
1040 def forget(self, list, wlock=None):
1045 if not wlock:
1041 if not wlock:
1046 wlock = self.wlock()
1042 wlock = self.wlock()
1047 for f in list:
1043 for f in list:
1048 if self.dirstate.state(f) not in 'ai':
1044 if self.dirstate.state(f) not in 'ai':
1049 self.ui.warn(_("%s not added!\n") % f)
1045 self.ui.warn(_("%s not added!\n") % f)
1050 else:
1046 else:
1051 self.dirstate.forget([f])
1047 self.dirstate.forget([f])
1052
1048
1053 def remove(self, list, unlink=False, wlock=None):
1049 def remove(self, list, unlink=False, wlock=None):
1054 if unlink:
1050 if unlink:
1055 for f in list:
1051 for f in list:
1056 try:
1052 try:
1057 util.unlink(self.wjoin(f))
1053 util.unlink(self.wjoin(f))
1058 except OSError, inst:
1054 except OSError, inst:
1059 if inst.errno != errno.ENOENT:
1055 if inst.errno != errno.ENOENT:
1060 raise
1056 raise
1061 if not wlock:
1057 if not wlock:
1062 wlock = self.wlock()
1058 wlock = self.wlock()
1063 for f in list:
1059 for f in list:
1064 if unlink and os.path.exists(self.wjoin(f)):
1060 if unlink and os.path.exists(self.wjoin(f)):
1065 self.ui.warn(_("%s still exists!\n") % f)
1061 self.ui.warn(_("%s still exists!\n") % f)
1066 elif self.dirstate.state(f) == 'a':
1062 elif self.dirstate.state(f) == 'a':
1067 self.dirstate.forget([f])
1063 self.dirstate.forget([f])
1068 elif f not in self.dirstate:
1064 elif f not in self.dirstate:
1069 self.ui.warn(_("%s not tracked!\n") % f)
1065 self.ui.warn(_("%s not tracked!\n") % f)
1070 else:
1066 else:
1071 self.dirstate.update([f], "r")
1067 self.dirstate.update([f], "r")
1072
1068
1073 def undelete(self, list, wlock=None):
1069 def undelete(self, list, wlock=None):
1074 p = self.dirstate.parents()[0]
1070 p = self.dirstate.parents()[0]
1075 mn = self.changelog.read(p)[0]
1071 mn = self.changelog.read(p)[0]
1076 m = self.manifest.read(mn)
1072 m = self.manifest.read(mn)
1077 if not wlock:
1073 if not wlock:
1078 wlock = self.wlock()
1074 wlock = self.wlock()
1079 for f in list:
1075 for f in list:
1080 if self.dirstate.state(f) not in "r":
1076 if self.dirstate.state(f) not in "r":
1081 self.ui.warn("%s not removed!\n" % f)
1077 self.ui.warn("%s not removed!\n" % f)
1082 else:
1078 else:
1083 t = self.file(f).read(m[f])
1079 t = self.file(f).read(m[f])
1084 self.wwrite(f, t, m.flags(f))
1080 self.wwrite(f, t, m.flags(f))
1085 self.dirstate.update([f], "n")
1081 self.dirstate.update([f], "n")
1086
1082
1087 def copy(self, source, dest, wlock=None):
1083 def copy(self, source, dest, wlock=None):
1088 p = self.wjoin(dest)
1084 p = self.wjoin(dest)
1089 if not (os.path.exists(p) or os.path.islink(p)):
1085 if not (os.path.exists(p) or os.path.islink(p)):
1090 self.ui.warn(_("%s does not exist!\n") % dest)
1086 self.ui.warn(_("%s does not exist!\n") % dest)
1091 elif not (os.path.isfile(p) or os.path.islink(p)):
1087 elif not (os.path.isfile(p) or os.path.islink(p)):
1092 self.ui.warn(_("copy failed: %s is not a file or a "
1088 self.ui.warn(_("copy failed: %s is not a file or a "
1093 "symbolic link\n") % dest)
1089 "symbolic link\n") % dest)
1094 else:
1090 else:
1095 if not wlock:
1091 if not wlock:
1096 wlock = self.wlock()
1092 wlock = self.wlock()
1097 if self.dirstate.state(dest) == '?':
1093 if self.dirstate.state(dest) == '?':
1098 self.dirstate.update([dest], "a")
1094 self.dirstate.update([dest], "a")
1099 self.dirstate.copy(source, dest)
1095 self.dirstate.copy(source, dest)
1100
1096
1101 def heads(self, start=None):
1097 def heads(self, start=None):
1102 heads = self.changelog.heads(start)
1098 heads = self.changelog.heads(start)
1103 # sort the output in rev descending order
1099 # sort the output in rev descending order
1104 heads = [(-self.changelog.rev(h), h) for h in heads]
1100 heads = [(-self.changelog.rev(h), h) for h in heads]
1105 heads.sort()
1101 heads.sort()
1106 return [n for (r, n) in heads]
1102 return [n for (r, n) in heads]
1107
1103
1108 def branches(self, nodes):
1104 def branches(self, nodes):
1109 if not nodes:
1105 if not nodes:
1110 nodes = [self.changelog.tip()]
1106 nodes = [self.changelog.tip()]
1111 b = []
1107 b = []
1112 for n in nodes:
1108 for n in nodes:
1113 t = n
1109 t = n
1114 while 1:
1110 while 1:
1115 p = self.changelog.parents(n)
1111 p = self.changelog.parents(n)
1116 if p[1] != nullid or p[0] == nullid:
1112 if p[1] != nullid or p[0] == nullid:
1117 b.append((t, n, p[0], p[1]))
1113 b.append((t, n, p[0], p[1]))
1118 break
1114 break
1119 n = p[0]
1115 n = p[0]
1120 return b
1116 return b
1121
1117
1122 def between(self, pairs):
1118 def between(self, pairs):
1123 r = []
1119 r = []
1124
1120
1125 for top, bottom in pairs:
1121 for top, bottom in pairs:
1126 n, l, i = top, [], 0
1122 n, l, i = top, [], 0
1127 f = 1
1123 f = 1
1128
1124
1129 while n != bottom:
1125 while n != bottom:
1130 p = self.changelog.parents(n)[0]
1126 p = self.changelog.parents(n)[0]
1131 if i == f:
1127 if i == f:
1132 l.append(n)
1128 l.append(n)
1133 f = f * 2
1129 f = f * 2
1134 n = p
1130 n = p
1135 i += 1
1131 i += 1
1136
1132
1137 r.append(l)
1133 r.append(l)
1138
1134
1139 return r
1135 return r
1140
1136
1141 def findincoming(self, remote, base=None, heads=None, force=False):
1137 def findincoming(self, remote, base=None, heads=None, force=False):
1142 """Return list of roots of the subsets of missing nodes from remote
1138 """Return list of roots of the subsets of missing nodes from remote
1143
1139
1144 If base dict is specified, assume that these nodes and their parents
1140 If base dict is specified, assume that these nodes and their parents
1145 exist on the remote side and that no child of a node of base exists
1141 exist on the remote side and that no child of a node of base exists
1146 in both remote and self.
1142 in both remote and self.
1147 Furthermore base will be updated to include the nodes that exists
1143 Furthermore base will be updated to include the nodes that exists
1148 in self and remote but no children exists in self and remote.
1144 in self and remote but no children exists in self and remote.
1149 If a list of heads is specified, return only nodes which are heads
1145 If a list of heads is specified, return only nodes which are heads
1150 or ancestors of these heads.
1146 or ancestors of these heads.
1151
1147
1152 All the ancestors of base are in self and in remote.
1148 All the ancestors of base are in self and in remote.
1153 All the descendants of the list returned are missing in self.
1149 All the descendants of the list returned are missing in self.
1154 (and so we know that the rest of the nodes are missing in remote, see
1150 (and so we know that the rest of the nodes are missing in remote, see
1155 outgoing)
1151 outgoing)
1156 """
1152 """
1157 m = self.changelog.nodemap
1153 m = self.changelog.nodemap
1158 search = []
1154 search = []
1159 fetch = {}
1155 fetch = {}
1160 seen = {}
1156 seen = {}
1161 seenbranch = {}
1157 seenbranch = {}
1162 if base == None:
1158 if base == None:
1163 base = {}
1159 base = {}
1164
1160
1165 if not heads:
1161 if not heads:
1166 heads = remote.heads()
1162 heads = remote.heads()
1167
1163
1168 if self.changelog.tip() == nullid:
1164 if self.changelog.tip() == nullid:
1169 base[nullid] = 1
1165 base[nullid] = 1
1170 if heads != [nullid]:
1166 if heads != [nullid]:
1171 return [nullid]
1167 return [nullid]
1172 return []
1168 return []
1173
1169
1174 # assume we're closer to the tip than the root
1170 # assume we're closer to the tip than the root
1175 # and start by examining the heads
1171 # and start by examining the heads
1176 self.ui.status(_("searching for changes\n"))
1172 self.ui.status(_("searching for changes\n"))
1177
1173
1178 unknown = []
1174 unknown = []
1179 for h in heads:
1175 for h in heads:
1180 if h not in m:
1176 if h not in m:
1181 unknown.append(h)
1177 unknown.append(h)
1182 else:
1178 else:
1183 base[h] = 1
1179 base[h] = 1
1184
1180
1185 if not unknown:
1181 if not unknown:
1186 return []
1182 return []
1187
1183
1188 req = dict.fromkeys(unknown)
1184 req = dict.fromkeys(unknown)
1189 reqcnt = 0
1185 reqcnt = 0
1190
1186
1191 # search through remote branches
1187 # search through remote branches
1192 # a 'branch' here is a linear segment of history, with four parts:
1188 # a 'branch' here is a linear segment of history, with four parts:
1193 # head, root, first parent, second parent
1189 # head, root, first parent, second parent
1194 # (a branch always has two parents (or none) by definition)
1190 # (a branch always has two parents (or none) by definition)
1195 unknown = remote.branches(unknown)
1191 unknown = remote.branches(unknown)
1196 while unknown:
1192 while unknown:
1197 r = []
1193 r = []
1198 while unknown:
1194 while unknown:
1199 n = unknown.pop(0)
1195 n = unknown.pop(0)
1200 if n[0] in seen:
1196 if n[0] in seen:
1201 continue
1197 continue
1202
1198
1203 self.ui.debug(_("examining %s:%s\n")
1199 self.ui.debug(_("examining %s:%s\n")
1204 % (short(n[0]), short(n[1])))
1200 % (short(n[0]), short(n[1])))
1205 if n[0] == nullid: # found the end of the branch
1201 if n[0] == nullid: # found the end of the branch
1206 pass
1202 pass
1207 elif n in seenbranch:
1203 elif n in seenbranch:
1208 self.ui.debug(_("branch already found\n"))
1204 self.ui.debug(_("branch already found\n"))
1209 continue
1205 continue
1210 elif n[1] and n[1] in m: # do we know the base?
1206 elif n[1] and n[1] in m: # do we know the base?
1211 self.ui.debug(_("found incomplete branch %s:%s\n")
1207 self.ui.debug(_("found incomplete branch %s:%s\n")
1212 % (short(n[0]), short(n[1])))
1208 % (short(n[0]), short(n[1])))
1213 search.append(n) # schedule branch range for scanning
1209 search.append(n) # schedule branch range for scanning
1214 seenbranch[n] = 1
1210 seenbranch[n] = 1
1215 else:
1211 else:
1216 if n[1] not in seen and n[1] not in fetch:
1212 if n[1] not in seen and n[1] not in fetch:
1217 if n[2] in m and n[3] in m:
1213 if n[2] in m and n[3] in m:
1218 self.ui.debug(_("found new changeset %s\n") %
1214 self.ui.debug(_("found new changeset %s\n") %
1219 short(n[1]))
1215 short(n[1]))
1220 fetch[n[1]] = 1 # earliest unknown
1216 fetch[n[1]] = 1 # earliest unknown
1221 for p in n[2:4]:
1217 for p in n[2:4]:
1222 if p in m:
1218 if p in m:
1223 base[p] = 1 # latest known
1219 base[p] = 1 # latest known
1224
1220
1225 for p in n[2:4]:
1221 for p in n[2:4]:
1226 if p not in req and p not in m:
1222 if p not in req and p not in m:
1227 r.append(p)
1223 r.append(p)
1228 req[p] = 1
1224 req[p] = 1
1229 seen[n[0]] = 1
1225 seen[n[0]] = 1
1230
1226
1231 if r:
1227 if r:
1232 reqcnt += 1
1228 reqcnt += 1
1233 self.ui.debug(_("request %d: %s\n") %
1229 self.ui.debug(_("request %d: %s\n") %
1234 (reqcnt, " ".join(map(short, r))))
1230 (reqcnt, " ".join(map(short, r))))
1235 for p in xrange(0, len(r), 10):
1231 for p in xrange(0, len(r), 10):
1236 for b in remote.branches(r[p:p+10]):
1232 for b in remote.branches(r[p:p+10]):
1237 self.ui.debug(_("received %s:%s\n") %
1233 self.ui.debug(_("received %s:%s\n") %
1238 (short(b[0]), short(b[1])))
1234 (short(b[0]), short(b[1])))
1239 unknown.append(b)
1235 unknown.append(b)
1240
1236
1241 # do binary search on the branches we found
1237 # do binary search on the branches we found
1242 while search:
1238 while search:
1243 n = search.pop(0)
1239 n = search.pop(0)
1244 reqcnt += 1
1240 reqcnt += 1
1245 l = remote.between([(n[0], n[1])])[0]
1241 l = remote.between([(n[0], n[1])])[0]
1246 l.append(n[1])
1242 l.append(n[1])
1247 p = n[0]
1243 p = n[0]
1248 f = 1
1244 f = 1
1249 for i in l:
1245 for i in l:
1250 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1246 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1251 if i in m:
1247 if i in m:
1252 if f <= 2:
1248 if f <= 2:
1253 self.ui.debug(_("found new branch changeset %s\n") %
1249 self.ui.debug(_("found new branch changeset %s\n") %
1254 short(p))
1250 short(p))
1255 fetch[p] = 1
1251 fetch[p] = 1
1256 base[i] = 1
1252 base[i] = 1
1257 else:
1253 else:
1258 self.ui.debug(_("narrowed branch search to %s:%s\n")
1254 self.ui.debug(_("narrowed branch search to %s:%s\n")
1259 % (short(p), short(i)))
1255 % (short(p), short(i)))
1260 search.append((p, i))
1256 search.append((p, i))
1261 break
1257 break
1262 p, f = i, f * 2
1258 p, f = i, f * 2
1263
1259
1264 # sanity check our fetch list
1260 # sanity check our fetch list
1265 for f in fetch.keys():
1261 for f in fetch.keys():
1266 if f in m:
1262 if f in m:
1267 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1263 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1268
1264
1269 if base.keys() == [nullid]:
1265 if base.keys() == [nullid]:
1270 if force:
1266 if force:
1271 self.ui.warn(_("warning: repository is unrelated\n"))
1267 self.ui.warn(_("warning: repository is unrelated\n"))
1272 else:
1268 else:
1273 raise util.Abort(_("repository is unrelated"))
1269 raise util.Abort(_("repository is unrelated"))
1274
1270
1275 self.ui.debug(_("found new changesets starting at ") +
1271 self.ui.debug(_("found new changesets starting at ") +
1276 " ".join([short(f) for f in fetch]) + "\n")
1272 " ".join([short(f) for f in fetch]) + "\n")
1277
1273
1278 self.ui.debug(_("%d total queries\n") % reqcnt)
1274 self.ui.debug(_("%d total queries\n") % reqcnt)
1279
1275
1280 return fetch.keys()
1276 return fetch.keys()
1281
1277
1282 def findoutgoing(self, remote, base=None, heads=None, force=False):
1278 def findoutgoing(self, remote, base=None, heads=None, force=False):
1283 """Return list of nodes that are roots of subsets not in remote
1279 """Return list of nodes that are roots of subsets not in remote
1284
1280
1285 If base dict is specified, assume that these nodes and their parents
1281 If base dict is specified, assume that these nodes and their parents
1286 exist on the remote side.
1282 exist on the remote side.
1287 If a list of heads is specified, return only nodes which are heads
1283 If a list of heads is specified, return only nodes which are heads
1288 or ancestors of these heads, and return a second element which
1284 or ancestors of these heads, and return a second element which
1289 contains all remote heads which get new children.
1285 contains all remote heads which get new children.
1290 """
1286 """
1291 if base == None:
1287 if base == None:
1292 base = {}
1288 base = {}
1293 self.findincoming(remote, base, heads, force=force)
1289 self.findincoming(remote, base, heads, force=force)
1294
1290
1295 self.ui.debug(_("common changesets up to ")
1291 self.ui.debug(_("common changesets up to ")
1296 + " ".join(map(short, base.keys())) + "\n")
1292 + " ".join(map(short, base.keys())) + "\n")
1297
1293
1298 remain = dict.fromkeys(self.changelog.nodemap)
1294 remain = dict.fromkeys(self.changelog.nodemap)
1299
1295
1300 # prune everything remote has from the tree
1296 # prune everything remote has from the tree
1301 del remain[nullid]
1297 del remain[nullid]
1302 remove = base.keys()
1298 remove = base.keys()
1303 while remove:
1299 while remove:
1304 n = remove.pop(0)
1300 n = remove.pop(0)
1305 if n in remain:
1301 if n in remain:
1306 del remain[n]
1302 del remain[n]
1307 for p in self.changelog.parents(n):
1303 for p in self.changelog.parents(n):
1308 remove.append(p)
1304 remove.append(p)
1309
1305
1310 # find every node whose parents have been pruned
1306 # find every node whose parents have been pruned
1311 subset = []
1307 subset = []
1312 # find every remote head that will get new children
1308 # find every remote head that will get new children
1313 updated_heads = {}
1309 updated_heads = {}
1314 for n in remain:
1310 for n in remain:
1315 p1, p2 = self.changelog.parents(n)
1311 p1, p2 = self.changelog.parents(n)
1316 if p1 not in remain and p2 not in remain:
1312 if p1 not in remain and p2 not in remain:
1317 subset.append(n)
1313 subset.append(n)
1318 if heads:
1314 if heads:
1319 if p1 in heads:
1315 if p1 in heads:
1320 updated_heads[p1] = True
1316 updated_heads[p1] = True
1321 if p2 in heads:
1317 if p2 in heads:
1322 updated_heads[p2] = True
1318 updated_heads[p2] = True
1323
1319
1324 # this is the set of all roots we have to push
1320 # this is the set of all roots we have to push
1325 if heads:
1321 if heads:
1326 return subset, updated_heads.keys()
1322 return subset, updated_heads.keys()
1327 else:
1323 else:
1328 return subset
1324 return subset
1329
1325
1330 def pull(self, remote, heads=None, force=False, lock=None):
1326 def pull(self, remote, heads=None, force=False, lock=None):
1331 mylock = False
1327 mylock = False
1332 if not lock:
1328 if not lock:
1333 lock = self.lock()
1329 lock = self.lock()
1334 mylock = True
1330 mylock = True
1335
1331
1336 try:
1332 try:
1337 fetch = self.findincoming(remote, force=force)
1333 fetch = self.findincoming(remote, force=force)
1338 if fetch == [nullid]:
1334 if fetch == [nullid]:
1339 self.ui.status(_("requesting all changes\n"))
1335 self.ui.status(_("requesting all changes\n"))
1340
1336
1341 if not fetch:
1337 if not fetch:
1342 self.ui.status(_("no changes found\n"))
1338 self.ui.status(_("no changes found\n"))
1343 return 0
1339 return 0
1344
1340
1345 if heads is None:
1341 if heads is None:
1346 cg = remote.changegroup(fetch, 'pull')
1342 cg = remote.changegroup(fetch, 'pull')
1347 else:
1343 else:
1348 if 'changegroupsubset' not in remote.capabilities:
1344 if 'changegroupsubset' not in remote.capabilities:
1349 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1345 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1350 cg = remote.changegroupsubset(fetch, heads, 'pull')
1346 cg = remote.changegroupsubset(fetch, heads, 'pull')
1351 return self.addchangegroup(cg, 'pull', remote.url())
1347 return self.addchangegroup(cg, 'pull', remote.url())
1352 finally:
1348 finally:
1353 if mylock:
1349 if mylock:
1354 lock.release()
1350 lock.release()
1355
1351
1356 def push(self, remote, force=False, revs=None):
1352 def push(self, remote, force=False, revs=None):
1357 # there are two ways to push to remote repo:
1353 # there are two ways to push to remote repo:
1358 #
1354 #
1359 # addchangegroup assumes local user can lock remote
1355 # addchangegroup assumes local user can lock remote
1360 # repo (local filesystem, old ssh servers).
1356 # repo (local filesystem, old ssh servers).
1361 #
1357 #
1362 # unbundle assumes local user cannot lock remote repo (new ssh
1358 # unbundle assumes local user cannot lock remote repo (new ssh
1363 # servers, http servers).
1359 # servers, http servers).
1364
1360
1365 if remote.capable('unbundle'):
1361 if remote.capable('unbundle'):
1366 return self.push_unbundle(remote, force, revs)
1362 return self.push_unbundle(remote, force, revs)
1367 return self.push_addchangegroup(remote, force, revs)
1363 return self.push_addchangegroup(remote, force, revs)
1368
1364
1369 def prepush(self, remote, force, revs):
1365 def prepush(self, remote, force, revs):
1370 base = {}
1366 base = {}
1371 remote_heads = remote.heads()
1367 remote_heads = remote.heads()
1372 inc = self.findincoming(remote, base, remote_heads, force=force)
1368 inc = self.findincoming(remote, base, remote_heads, force=force)
1373
1369
1374 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1370 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1375 if revs is not None:
1371 if revs is not None:
1376 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1372 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1377 else:
1373 else:
1378 bases, heads = update, self.changelog.heads()
1374 bases, heads = update, self.changelog.heads()
1379
1375
1380 if not bases:
1376 if not bases:
1381 self.ui.status(_("no changes found\n"))
1377 self.ui.status(_("no changes found\n"))
1382 return None, 1
1378 return None, 1
1383 elif not force:
1379 elif not force:
1384 # check if we're creating new remote heads
1380 # check if we're creating new remote heads
1385 # to be a remote head after push, node must be either
1381 # to be a remote head after push, node must be either
1386 # - unknown locally
1382 # - unknown locally
1387 # - a local outgoing head descended from update
1383 # - a local outgoing head descended from update
1388 # - a remote head that's known locally and not
1384 # - a remote head that's known locally and not
1389 # ancestral to an outgoing head
1385 # ancestral to an outgoing head
1390
1386
1391 warn = 0
1387 warn = 0
1392
1388
1393 if remote_heads == [nullid]:
1389 if remote_heads == [nullid]:
1394 warn = 0
1390 warn = 0
1395 elif not revs and len(heads) > len(remote_heads):
1391 elif not revs and len(heads) > len(remote_heads):
1396 warn = 1
1392 warn = 1
1397 else:
1393 else:
1398 newheads = list(heads)
1394 newheads = list(heads)
1399 for r in remote_heads:
1395 for r in remote_heads:
1400 if r in self.changelog.nodemap:
1396 if r in self.changelog.nodemap:
1401 desc = self.changelog.heads(r, heads)
1397 desc = self.changelog.heads(r, heads)
1402 l = [h for h in heads if h in desc]
1398 l = [h for h in heads if h in desc]
1403 if not l:
1399 if not l:
1404 newheads.append(r)
1400 newheads.append(r)
1405 else:
1401 else:
1406 newheads.append(r)
1402 newheads.append(r)
1407 if len(newheads) > len(remote_heads):
1403 if len(newheads) > len(remote_heads):
1408 warn = 1
1404 warn = 1
1409
1405
1410 if warn:
1406 if warn:
1411 self.ui.warn(_("abort: push creates new remote branches!\n"))
1407 self.ui.warn(_("abort: push creates new remote branches!\n"))
1412 self.ui.status(_("(did you forget to merge?"
1408 self.ui.status(_("(did you forget to merge?"
1413 " use push -f to force)\n"))
1409 " use push -f to force)\n"))
1414 return None, 1
1410 return None, 1
1415 elif inc:
1411 elif inc:
1416 self.ui.warn(_("note: unsynced remote changes!\n"))
1412 self.ui.warn(_("note: unsynced remote changes!\n"))
1417
1413
1418
1414
1419 if revs is None:
1415 if revs is None:
1420 cg = self.changegroup(update, 'push')
1416 cg = self.changegroup(update, 'push')
1421 else:
1417 else:
1422 cg = self.changegroupsubset(update, revs, 'push')
1418 cg = self.changegroupsubset(update, revs, 'push')
1423 return cg, remote_heads
1419 return cg, remote_heads
1424
1420
1425 def push_addchangegroup(self, remote, force, revs):
1421 def push_addchangegroup(self, remote, force, revs):
1426 lock = remote.lock()
1422 lock = remote.lock()
1427
1423
1428 ret = self.prepush(remote, force, revs)
1424 ret = self.prepush(remote, force, revs)
1429 if ret[0] is not None:
1425 if ret[0] is not None:
1430 cg, remote_heads = ret
1426 cg, remote_heads = ret
1431 return remote.addchangegroup(cg, 'push', self.url())
1427 return remote.addchangegroup(cg, 'push', self.url())
1432 return ret[1]
1428 return ret[1]
1433
1429
1434 def push_unbundle(self, remote, force, revs):
1430 def push_unbundle(self, remote, force, revs):
1435 # local repo finds heads on server, finds out what revs it
1431 # local repo finds heads on server, finds out what revs it
1436 # must push. once revs transferred, if server finds it has
1432 # must push. once revs transferred, if server finds it has
1437 # different heads (someone else won commit/push race), server
1433 # different heads (someone else won commit/push race), server
1438 # aborts.
1434 # aborts.
1439
1435
1440 ret = self.prepush(remote, force, revs)
1436 ret = self.prepush(remote, force, revs)
1441 if ret[0] is not None:
1437 if ret[0] is not None:
1442 cg, remote_heads = ret
1438 cg, remote_heads = ret
1443 if force: remote_heads = ['force']
1439 if force: remote_heads = ['force']
1444 return remote.unbundle(cg, remote_heads, 'push')
1440 return remote.unbundle(cg, remote_heads, 'push')
1445 return ret[1]
1441 return ret[1]
1446
1442
1447 def changegroupinfo(self, nodes):
1443 def changegroupinfo(self, nodes):
1448 self.ui.note(_("%d changesets found\n") % len(nodes))
1444 self.ui.note(_("%d changesets found\n") % len(nodes))
1449 if self.ui.debugflag:
1445 if self.ui.debugflag:
1450 self.ui.debug(_("List of changesets:\n"))
1446 self.ui.debug(_("List of changesets:\n"))
1451 for node in nodes:
1447 for node in nodes:
1452 self.ui.debug("%s\n" % hex(node))
1448 self.ui.debug("%s\n" % hex(node))
1453
1449
1454 def changegroupsubset(self, bases, heads, source):
1450 def changegroupsubset(self, bases, heads, source):
1455 """This function generates a changegroup consisting of all the nodes
1451 """This function generates a changegroup consisting of all the nodes
1456 that are descendents of any of the bases, and ancestors of any of
1452 that are descendents of any of the bases, and ancestors of any of
1457 the heads.
1453 the heads.
1458
1454
1459 It is fairly complex as determining which filenodes and which
1455 It is fairly complex as determining which filenodes and which
1460 manifest nodes need to be included for the changeset to be complete
1456 manifest nodes need to be included for the changeset to be complete
1461 is non-trivial.
1457 is non-trivial.
1462
1458
1463 Another wrinkle is doing the reverse, figuring out which changeset in
1459 Another wrinkle is doing the reverse, figuring out which changeset in
1464 the changegroup a particular filenode or manifestnode belongs to."""
1460 the changegroup a particular filenode or manifestnode belongs to."""
1465
1461
1466 self.hook('preoutgoing', throw=True, source=source)
1462 self.hook('preoutgoing', throw=True, source=source)
1467
1463
1468 # Set up some initial variables
1464 # Set up some initial variables
1469 # Make it easy to refer to self.changelog
1465 # Make it easy to refer to self.changelog
1470 cl = self.changelog
1466 cl = self.changelog
1471 # msng is short for missing - compute the list of changesets in this
1467 # msng is short for missing - compute the list of changesets in this
1472 # changegroup.
1468 # changegroup.
1473 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1469 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1474 self.changegroupinfo(msng_cl_lst)
1470 self.changegroupinfo(msng_cl_lst)
1475 # Some bases may turn out to be superfluous, and some heads may be
1471 # Some bases may turn out to be superfluous, and some heads may be
1476 # too. nodesbetween will return the minimal set of bases and heads
1472 # too. nodesbetween will return the minimal set of bases and heads
1477 # necessary to re-create the changegroup.
1473 # necessary to re-create the changegroup.
1478
1474
1479 # Known heads are the list of heads that it is assumed the recipient
1475 # Known heads are the list of heads that it is assumed the recipient
1480 # of this changegroup will know about.
1476 # of this changegroup will know about.
1481 knownheads = {}
1477 knownheads = {}
1482 # We assume that all parents of bases are known heads.
1478 # We assume that all parents of bases are known heads.
1483 for n in bases:
1479 for n in bases:
1484 for p in cl.parents(n):
1480 for p in cl.parents(n):
1485 if p != nullid:
1481 if p != nullid:
1486 knownheads[p] = 1
1482 knownheads[p] = 1
1487 knownheads = knownheads.keys()
1483 knownheads = knownheads.keys()
1488 if knownheads:
1484 if knownheads:
1489 # Now that we know what heads are known, we can compute which
1485 # Now that we know what heads are known, we can compute which
1490 # changesets are known. The recipient must know about all
1486 # changesets are known. The recipient must know about all
1491 # changesets required to reach the known heads from the null
1487 # changesets required to reach the known heads from the null
1492 # changeset.
1488 # changeset.
1493 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1489 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1494 junk = None
1490 junk = None
1495 # Transform the list into an ersatz set.
1491 # Transform the list into an ersatz set.
1496 has_cl_set = dict.fromkeys(has_cl_set)
1492 has_cl_set = dict.fromkeys(has_cl_set)
1497 else:
1493 else:
1498 # If there were no known heads, the recipient cannot be assumed to
1494 # If there were no known heads, the recipient cannot be assumed to
1499 # know about any changesets.
1495 # know about any changesets.
1500 has_cl_set = {}
1496 has_cl_set = {}
1501
1497
1502 # Make it easy to refer to self.manifest
1498 # Make it easy to refer to self.manifest
1503 mnfst = self.manifest
1499 mnfst = self.manifest
1504 # We don't know which manifests are missing yet
1500 # We don't know which manifests are missing yet
1505 msng_mnfst_set = {}
1501 msng_mnfst_set = {}
1506 # Nor do we know which filenodes are missing.
1502 # Nor do we know which filenodes are missing.
1507 msng_filenode_set = {}
1503 msng_filenode_set = {}
1508
1504
1509 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1505 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1510 junk = None
1506 junk = None
1511
1507
1512 # A changeset always belongs to itself, so the changenode lookup
1508 # A changeset always belongs to itself, so the changenode lookup
1513 # function for a changenode is identity.
1509 # function for a changenode is identity.
1514 def identity(x):
1510 def identity(x):
1515 return x
1511 return x
1516
1512
1517 # A function generating function. Sets up an environment for the
1513 # A function generating function. Sets up an environment for the
1518 # inner function.
1514 # inner function.
1519 def cmp_by_rev_func(revlog):
1515 def cmp_by_rev_func(revlog):
1520 # Compare two nodes by their revision number in the environment's
1516 # Compare two nodes by their revision number in the environment's
1521 # revision history. Since the revision number both represents the
1517 # revision history. Since the revision number both represents the
1522 # most efficient order to read the nodes in, and represents a
1518 # most efficient order to read the nodes in, and represents a
1523 # topological sorting of the nodes, this function is often useful.
1519 # topological sorting of the nodes, this function is often useful.
1524 def cmp_by_rev(a, b):
1520 def cmp_by_rev(a, b):
1525 return cmp(revlog.rev(a), revlog.rev(b))
1521 return cmp(revlog.rev(a), revlog.rev(b))
1526 return cmp_by_rev
1522 return cmp_by_rev
1527
1523
1528 # If we determine that a particular file or manifest node must be a
1524 # If we determine that a particular file or manifest node must be a
1529 # node that the recipient of the changegroup will already have, we can
1525 # node that the recipient of the changegroup will already have, we can
1530 # also assume the recipient will have all the parents. This function
1526 # also assume the recipient will have all the parents. This function
1531 # prunes them from the set of missing nodes.
1527 # prunes them from the set of missing nodes.
1532 def prune_parents(revlog, hasset, msngset):
1528 def prune_parents(revlog, hasset, msngset):
1533 haslst = hasset.keys()
1529 haslst = hasset.keys()
1534 haslst.sort(cmp_by_rev_func(revlog))
1530 haslst.sort(cmp_by_rev_func(revlog))
1535 for node in haslst:
1531 for node in haslst:
1536 parentlst = [p for p in revlog.parents(node) if p != nullid]
1532 parentlst = [p for p in revlog.parents(node) if p != nullid]
1537 while parentlst:
1533 while parentlst:
1538 n = parentlst.pop()
1534 n = parentlst.pop()
1539 if n not in hasset:
1535 if n not in hasset:
1540 hasset[n] = 1
1536 hasset[n] = 1
1541 p = [p for p in revlog.parents(n) if p != nullid]
1537 p = [p for p in revlog.parents(n) if p != nullid]
1542 parentlst.extend(p)
1538 parentlst.extend(p)
1543 for n in hasset:
1539 for n in hasset:
1544 msngset.pop(n, None)
1540 msngset.pop(n, None)
1545
1541
1546 # This is a function generating function used to set up an environment
1542 # This is a function generating function used to set up an environment
1547 # for the inner function to execute in.
1543 # for the inner function to execute in.
1548 def manifest_and_file_collector(changedfileset):
1544 def manifest_and_file_collector(changedfileset):
1549 # This is an information gathering function that gathers
1545 # This is an information gathering function that gathers
1550 # information from each changeset node that goes out as part of
1546 # information from each changeset node that goes out as part of
1551 # the changegroup. The information gathered is a list of which
1547 # the changegroup. The information gathered is a list of which
1552 # manifest nodes are potentially required (the recipient may
1548 # manifest nodes are potentially required (the recipient may
1553 # already have them) and total list of all files which were
1549 # already have them) and total list of all files which were
1554 # changed in any changeset in the changegroup.
1550 # changed in any changeset in the changegroup.
1555 #
1551 #
1556 # We also remember the first changenode we saw any manifest
1552 # We also remember the first changenode we saw any manifest
1557 # referenced by so we can later determine which changenode 'owns'
1553 # referenced by so we can later determine which changenode 'owns'
1558 # the manifest.
1554 # the manifest.
1559 def collect_manifests_and_files(clnode):
1555 def collect_manifests_and_files(clnode):
1560 c = cl.read(clnode)
1556 c = cl.read(clnode)
1561 for f in c[3]:
1557 for f in c[3]:
1562 # This is to make sure we only have one instance of each
1558 # This is to make sure we only have one instance of each
1563 # filename string for each filename.
1559 # filename string for each filename.
1564 changedfileset.setdefault(f, f)
1560 changedfileset.setdefault(f, f)
1565 msng_mnfst_set.setdefault(c[0], clnode)
1561 msng_mnfst_set.setdefault(c[0], clnode)
1566 return collect_manifests_and_files
1562 return collect_manifests_and_files
1567
1563
1568 # Figure out which manifest nodes (of the ones we think might be part
1564 # Figure out which manifest nodes (of the ones we think might be part
1569 # of the changegroup) the recipient must know about and remove them
1565 # of the changegroup) the recipient must know about and remove them
1570 # from the changegroup.
1566 # from the changegroup.
1571 def prune_manifests():
1567 def prune_manifests():
1572 has_mnfst_set = {}
1568 has_mnfst_set = {}
1573 for n in msng_mnfst_set:
1569 for n in msng_mnfst_set:
1574 # If a 'missing' manifest thinks it belongs to a changenode
1570 # If a 'missing' manifest thinks it belongs to a changenode
1575 # the recipient is assumed to have, obviously the recipient
1571 # the recipient is assumed to have, obviously the recipient
1576 # must have that manifest.
1572 # must have that manifest.
1577 linknode = cl.node(mnfst.linkrev(n))
1573 linknode = cl.node(mnfst.linkrev(n))
1578 if linknode in has_cl_set:
1574 if linknode in has_cl_set:
1579 has_mnfst_set[n] = 1
1575 has_mnfst_set[n] = 1
1580 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1576 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1581
1577
1582 # Use the information collected in collect_manifests_and_files to say
1578 # Use the information collected in collect_manifests_and_files to say
1583 # which changenode any manifestnode belongs to.
1579 # which changenode any manifestnode belongs to.
1584 def lookup_manifest_link(mnfstnode):
1580 def lookup_manifest_link(mnfstnode):
1585 return msng_mnfst_set[mnfstnode]
1581 return msng_mnfst_set[mnfstnode]
1586
1582
1587 # A function generating function that sets up the initial environment
1583 # A function generating function that sets up the initial environment
1588 # the inner function.
1584 # the inner function.
1589 def filenode_collector(changedfiles):
1585 def filenode_collector(changedfiles):
1590 next_rev = [0]
1586 next_rev = [0]
1591 # This gathers information from each manifestnode included in the
1587 # This gathers information from each manifestnode included in the
1592 # changegroup about which filenodes the manifest node references
1588 # changegroup about which filenodes the manifest node references
1593 # so we can include those in the changegroup too.
1589 # so we can include those in the changegroup too.
1594 #
1590 #
1595 # It also remembers which changenode each filenode belongs to. It
1591 # It also remembers which changenode each filenode belongs to. It
1596 # does this by assuming the a filenode belongs to the changenode
1592 # does this by assuming the a filenode belongs to the changenode
1597 # the first manifest that references it belongs to.
1593 # the first manifest that references it belongs to.
1598 def collect_msng_filenodes(mnfstnode):
1594 def collect_msng_filenodes(mnfstnode):
1599 r = mnfst.rev(mnfstnode)
1595 r = mnfst.rev(mnfstnode)
1600 if r == next_rev[0]:
1596 if r == next_rev[0]:
1601 # If the last rev we looked at was the one just previous,
1597 # If the last rev we looked at was the one just previous,
1602 # we only need to see a diff.
1598 # we only need to see a diff.
1603 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1599 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1604 # For each line in the delta
1600 # For each line in the delta
1605 for dline in delta.splitlines():
1601 for dline in delta.splitlines():
1606 # get the filename and filenode for that line
1602 # get the filename and filenode for that line
1607 f, fnode = dline.split('\0')
1603 f, fnode = dline.split('\0')
1608 fnode = bin(fnode[:40])
1604 fnode = bin(fnode[:40])
1609 f = changedfiles.get(f, None)
1605 f = changedfiles.get(f, None)
1610 # And if the file is in the list of files we care
1606 # And if the file is in the list of files we care
1611 # about.
1607 # about.
1612 if f is not None:
1608 if f is not None:
1613 # Get the changenode this manifest belongs to
1609 # Get the changenode this manifest belongs to
1614 clnode = msng_mnfst_set[mnfstnode]
1610 clnode = msng_mnfst_set[mnfstnode]
1615 # Create the set of filenodes for the file if
1611 # Create the set of filenodes for the file if
1616 # there isn't one already.
1612 # there isn't one already.
1617 ndset = msng_filenode_set.setdefault(f, {})
1613 ndset = msng_filenode_set.setdefault(f, {})
1618 # And set the filenode's changelog node to the
1614 # And set the filenode's changelog node to the
1619 # manifest's if it hasn't been set already.
1615 # manifest's if it hasn't been set already.
1620 ndset.setdefault(fnode, clnode)
1616 ndset.setdefault(fnode, clnode)
1621 else:
1617 else:
1622 # Otherwise we need a full manifest.
1618 # Otherwise we need a full manifest.
1623 m = mnfst.read(mnfstnode)
1619 m = mnfst.read(mnfstnode)
1624 # For every file in we care about.
1620 # For every file in we care about.
1625 for f in changedfiles:
1621 for f in changedfiles:
1626 fnode = m.get(f, None)
1622 fnode = m.get(f, None)
1627 # If it's in the manifest
1623 # If it's in the manifest
1628 if fnode is not None:
1624 if fnode is not None:
1629 # See comments above.
1625 # See comments above.
1630 clnode = msng_mnfst_set[mnfstnode]
1626 clnode = msng_mnfst_set[mnfstnode]
1631 ndset = msng_filenode_set.setdefault(f, {})
1627 ndset = msng_filenode_set.setdefault(f, {})
1632 ndset.setdefault(fnode, clnode)
1628 ndset.setdefault(fnode, clnode)
1633 # Remember the revision we hope to see next.
1629 # Remember the revision we hope to see next.
1634 next_rev[0] = r + 1
1630 next_rev[0] = r + 1
1635 return collect_msng_filenodes
1631 return collect_msng_filenodes
1636
1632
1637 # We have a list of filenodes we think we need for a file, lets remove
1633 # We have a list of filenodes we think we need for a file, lets remove
1638 # all those we now the recipient must have.
1634 # all those we now the recipient must have.
1639 def prune_filenodes(f, filerevlog):
1635 def prune_filenodes(f, filerevlog):
1640 msngset = msng_filenode_set[f]
1636 msngset = msng_filenode_set[f]
1641 hasset = {}
1637 hasset = {}
1642 # If a 'missing' filenode thinks it belongs to a changenode we
1638 # If a 'missing' filenode thinks it belongs to a changenode we
1643 # assume the recipient must have, then the recipient must have
1639 # assume the recipient must have, then the recipient must have
1644 # that filenode.
1640 # that filenode.
1645 for n in msngset:
1641 for n in msngset:
1646 clnode = cl.node(filerevlog.linkrev(n))
1642 clnode = cl.node(filerevlog.linkrev(n))
1647 if clnode in has_cl_set:
1643 if clnode in has_cl_set:
1648 hasset[n] = 1
1644 hasset[n] = 1
1649 prune_parents(filerevlog, hasset, msngset)
1645 prune_parents(filerevlog, hasset, msngset)
1650
1646
1651 # A function generator function that sets up the a context for the
1647 # A function generator function that sets up the a context for the
1652 # inner function.
1648 # inner function.
1653 def lookup_filenode_link_func(fname):
1649 def lookup_filenode_link_func(fname):
1654 msngset = msng_filenode_set[fname]
1650 msngset = msng_filenode_set[fname]
1655 # Lookup the changenode the filenode belongs to.
1651 # Lookup the changenode the filenode belongs to.
1656 def lookup_filenode_link(fnode):
1652 def lookup_filenode_link(fnode):
1657 return msngset[fnode]
1653 return msngset[fnode]
1658 return lookup_filenode_link
1654 return lookup_filenode_link
1659
1655
1660 # Now that we have all theses utility functions to help out and
1656 # Now that we have all theses utility functions to help out and
1661 # logically divide up the task, generate the group.
1657 # logically divide up the task, generate the group.
1662 def gengroup():
1658 def gengroup():
1663 # The set of changed files starts empty.
1659 # The set of changed files starts empty.
1664 changedfiles = {}
1660 changedfiles = {}
1665 # Create a changenode group generator that will call our functions
1661 # Create a changenode group generator that will call our functions
1666 # back to lookup the owning changenode and collect information.
1662 # back to lookup the owning changenode and collect information.
1667 group = cl.group(msng_cl_lst, identity,
1663 group = cl.group(msng_cl_lst, identity,
1668 manifest_and_file_collector(changedfiles))
1664 manifest_and_file_collector(changedfiles))
1669 for chnk in group:
1665 for chnk in group:
1670 yield chnk
1666 yield chnk
1671
1667
1672 # The list of manifests has been collected by the generator
1668 # The list of manifests has been collected by the generator
1673 # calling our functions back.
1669 # calling our functions back.
1674 prune_manifests()
1670 prune_manifests()
1675 msng_mnfst_lst = msng_mnfst_set.keys()
1671 msng_mnfst_lst = msng_mnfst_set.keys()
1676 # Sort the manifestnodes by revision number.
1672 # Sort the manifestnodes by revision number.
1677 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1673 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1678 # Create a generator for the manifestnodes that calls our lookup
1674 # Create a generator for the manifestnodes that calls our lookup
1679 # and data collection functions back.
1675 # and data collection functions back.
1680 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1676 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1681 filenode_collector(changedfiles))
1677 filenode_collector(changedfiles))
1682 for chnk in group:
1678 for chnk in group:
1683 yield chnk
1679 yield chnk
1684
1680
1685 # These are no longer needed, dereference and toss the memory for
1681 # These are no longer needed, dereference and toss the memory for
1686 # them.
1682 # them.
1687 msng_mnfst_lst = None
1683 msng_mnfst_lst = None
1688 msng_mnfst_set.clear()
1684 msng_mnfst_set.clear()
1689
1685
1690 changedfiles = changedfiles.keys()
1686 changedfiles = changedfiles.keys()
1691 changedfiles.sort()
1687 changedfiles.sort()
1692 # Go through all our files in order sorted by name.
1688 # Go through all our files in order sorted by name.
1693 for fname in changedfiles:
1689 for fname in changedfiles:
1694 filerevlog = self.file(fname)
1690 filerevlog = self.file(fname)
1695 # Toss out the filenodes that the recipient isn't really
1691 # Toss out the filenodes that the recipient isn't really
1696 # missing.
1692 # missing.
1697 if msng_filenode_set.has_key(fname):
1693 if msng_filenode_set.has_key(fname):
1698 prune_filenodes(fname, filerevlog)
1694 prune_filenodes(fname, filerevlog)
1699 msng_filenode_lst = msng_filenode_set[fname].keys()
1695 msng_filenode_lst = msng_filenode_set[fname].keys()
1700 else:
1696 else:
1701 msng_filenode_lst = []
1697 msng_filenode_lst = []
1702 # If any filenodes are left, generate the group for them,
1698 # If any filenodes are left, generate the group for them,
1703 # otherwise don't bother.
1699 # otherwise don't bother.
1704 if len(msng_filenode_lst) > 0:
1700 if len(msng_filenode_lst) > 0:
1705 yield changegroup.genchunk(fname)
1701 yield changegroup.genchunk(fname)
1706 # Sort the filenodes by their revision #
1702 # Sort the filenodes by their revision #
1707 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1703 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1708 # Create a group generator and only pass in a changenode
1704 # Create a group generator and only pass in a changenode
1709 # lookup function as we need to collect no information
1705 # lookup function as we need to collect no information
1710 # from filenodes.
1706 # from filenodes.
1711 group = filerevlog.group(msng_filenode_lst,
1707 group = filerevlog.group(msng_filenode_lst,
1712 lookup_filenode_link_func(fname))
1708 lookup_filenode_link_func(fname))
1713 for chnk in group:
1709 for chnk in group:
1714 yield chnk
1710 yield chnk
1715 if msng_filenode_set.has_key(fname):
1711 if msng_filenode_set.has_key(fname):
1716 # Don't need this anymore, toss it to free memory.
1712 # Don't need this anymore, toss it to free memory.
1717 del msng_filenode_set[fname]
1713 del msng_filenode_set[fname]
1718 # Signal that no more groups are left.
1714 # Signal that no more groups are left.
1719 yield changegroup.closechunk()
1715 yield changegroup.closechunk()
1720
1716
1721 if msng_cl_lst:
1717 if msng_cl_lst:
1722 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1718 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1723
1719
1724 return util.chunkbuffer(gengroup())
1720 return util.chunkbuffer(gengroup())
1725
1721
1726 def changegroup(self, basenodes, source):
1722 def changegroup(self, basenodes, source):
1727 """Generate a changegroup of all nodes that we have that a recipient
1723 """Generate a changegroup of all nodes that we have that a recipient
1728 doesn't.
1724 doesn't.
1729
1725
1730 This is much easier than the previous function as we can assume that
1726 This is much easier than the previous function as we can assume that
1731 the recipient has any changenode we aren't sending them."""
1727 the recipient has any changenode we aren't sending them."""
1732
1728
1733 self.hook('preoutgoing', throw=True, source=source)
1729 self.hook('preoutgoing', throw=True, source=source)
1734
1730
1735 cl = self.changelog
1731 cl = self.changelog
1736 nodes = cl.nodesbetween(basenodes, None)[0]
1732 nodes = cl.nodesbetween(basenodes, None)[0]
1737 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1733 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1738 self.changegroupinfo(nodes)
1734 self.changegroupinfo(nodes)
1739
1735
1740 def identity(x):
1736 def identity(x):
1741 return x
1737 return x
1742
1738
1743 def gennodelst(revlog):
1739 def gennodelst(revlog):
1744 for r in xrange(0, revlog.count()):
1740 for r in xrange(0, revlog.count()):
1745 n = revlog.node(r)
1741 n = revlog.node(r)
1746 if revlog.linkrev(n) in revset:
1742 if revlog.linkrev(n) in revset:
1747 yield n
1743 yield n
1748
1744
1749 def changed_file_collector(changedfileset):
1745 def changed_file_collector(changedfileset):
1750 def collect_changed_files(clnode):
1746 def collect_changed_files(clnode):
1751 c = cl.read(clnode)
1747 c = cl.read(clnode)
1752 for fname in c[3]:
1748 for fname in c[3]:
1753 changedfileset[fname] = 1
1749 changedfileset[fname] = 1
1754 return collect_changed_files
1750 return collect_changed_files
1755
1751
1756 def lookuprevlink_func(revlog):
1752 def lookuprevlink_func(revlog):
1757 def lookuprevlink(n):
1753 def lookuprevlink(n):
1758 return cl.node(revlog.linkrev(n))
1754 return cl.node(revlog.linkrev(n))
1759 return lookuprevlink
1755 return lookuprevlink
1760
1756
1761 def gengroup():
1757 def gengroup():
1762 # construct a list of all changed files
1758 # construct a list of all changed files
1763 changedfiles = {}
1759 changedfiles = {}
1764
1760
1765 for chnk in cl.group(nodes, identity,
1761 for chnk in cl.group(nodes, identity,
1766 changed_file_collector(changedfiles)):
1762 changed_file_collector(changedfiles)):
1767 yield chnk
1763 yield chnk
1768 changedfiles = changedfiles.keys()
1764 changedfiles = changedfiles.keys()
1769 changedfiles.sort()
1765 changedfiles.sort()
1770
1766
1771 mnfst = self.manifest
1767 mnfst = self.manifest
1772 nodeiter = gennodelst(mnfst)
1768 nodeiter = gennodelst(mnfst)
1773 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1769 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1774 yield chnk
1770 yield chnk
1775
1771
1776 for fname in changedfiles:
1772 for fname in changedfiles:
1777 filerevlog = self.file(fname)
1773 filerevlog = self.file(fname)
1778 nodeiter = gennodelst(filerevlog)
1774 nodeiter = gennodelst(filerevlog)
1779 nodeiter = list(nodeiter)
1775 nodeiter = list(nodeiter)
1780 if nodeiter:
1776 if nodeiter:
1781 yield changegroup.genchunk(fname)
1777 yield changegroup.genchunk(fname)
1782 lookup = lookuprevlink_func(filerevlog)
1778 lookup = lookuprevlink_func(filerevlog)
1783 for chnk in filerevlog.group(nodeiter, lookup):
1779 for chnk in filerevlog.group(nodeiter, lookup):
1784 yield chnk
1780 yield chnk
1785
1781
1786 yield changegroup.closechunk()
1782 yield changegroup.closechunk()
1787
1783
1788 if nodes:
1784 if nodes:
1789 self.hook('outgoing', node=hex(nodes[0]), source=source)
1785 self.hook('outgoing', node=hex(nodes[0]), source=source)
1790
1786
1791 return util.chunkbuffer(gengroup())
1787 return util.chunkbuffer(gengroup())
1792
1788
1793 def addchangegroup(self, source, srctype, url):
1789 def addchangegroup(self, source, srctype, url):
1794 """add changegroup to repo.
1790 """add changegroup to repo.
1795
1791
1796 return values:
1792 return values:
1797 - nothing changed or no source: 0
1793 - nothing changed or no source: 0
1798 - more heads than before: 1+added heads (2..n)
1794 - more heads than before: 1+added heads (2..n)
1799 - less heads than before: -1-removed heads (-2..-n)
1795 - less heads than before: -1-removed heads (-2..-n)
1800 - number of heads stays the same: 1
1796 - number of heads stays the same: 1
1801 """
1797 """
1802 def csmap(x):
1798 def csmap(x):
1803 self.ui.debug(_("add changeset %s\n") % short(x))
1799 self.ui.debug(_("add changeset %s\n") % short(x))
1804 return cl.count()
1800 return cl.count()
1805
1801
1806 def revmap(x):
1802 def revmap(x):
1807 return cl.rev(x)
1803 return cl.rev(x)
1808
1804
1809 if not source:
1805 if not source:
1810 return 0
1806 return 0
1811
1807
1812 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1808 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1813
1809
1814 changesets = files = revisions = 0
1810 changesets = files = revisions = 0
1815
1811
1816 tr = self.transaction()
1812 tr = self.transaction()
1817
1813
1818 # write changelog data to temp files so concurrent readers will not see
1814 # write changelog data to temp files so concurrent readers will not see
1819 # inconsistent view
1815 # inconsistent view
1820 cl = self.changelog
1816 cl = self.changelog
1821 cl.delayupdate()
1817 cl.delayupdate()
1822 oldheads = len(cl.heads())
1818 oldheads = len(cl.heads())
1823
1819
1824 # pull off the changeset group
1820 # pull off the changeset group
1825 self.ui.status(_("adding changesets\n"))
1821 self.ui.status(_("adding changesets\n"))
1826 cor = cl.count() - 1
1822 cor = cl.count() - 1
1827 chunkiter = changegroup.chunkiter(source)
1823 chunkiter = changegroup.chunkiter(source)
1828 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1824 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1829 raise util.Abort(_("received changelog group is empty"))
1825 raise util.Abort(_("received changelog group is empty"))
1830 cnr = cl.count() - 1
1826 cnr = cl.count() - 1
1831 changesets = cnr - cor
1827 changesets = cnr - cor
1832
1828
1833 # pull off the manifest group
1829 # pull off the manifest group
1834 self.ui.status(_("adding manifests\n"))
1830 self.ui.status(_("adding manifests\n"))
1835 chunkiter = changegroup.chunkiter(source)
1831 chunkiter = changegroup.chunkiter(source)
1836 # no need to check for empty manifest group here:
1832 # no need to check for empty manifest group here:
1837 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1833 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1838 # no new manifest will be created and the manifest group will
1834 # no new manifest will be created and the manifest group will
1839 # be empty during the pull
1835 # be empty during the pull
1840 self.manifest.addgroup(chunkiter, revmap, tr)
1836 self.manifest.addgroup(chunkiter, revmap, tr)
1841
1837
1842 # process the files
1838 # process the files
1843 self.ui.status(_("adding file changes\n"))
1839 self.ui.status(_("adding file changes\n"))
1844 while 1:
1840 while 1:
1845 f = changegroup.getchunk(source)
1841 f = changegroup.getchunk(source)
1846 if not f:
1842 if not f:
1847 break
1843 break
1848 self.ui.debug(_("adding %s revisions\n") % f)
1844 self.ui.debug(_("adding %s revisions\n") % f)
1849 fl = self.file(f)
1845 fl = self.file(f)
1850 o = fl.count()
1846 o = fl.count()
1851 chunkiter = changegroup.chunkiter(source)
1847 chunkiter = changegroup.chunkiter(source)
1852 if fl.addgroup(chunkiter, revmap, tr) is None:
1848 if fl.addgroup(chunkiter, revmap, tr) is None:
1853 raise util.Abort(_("received file revlog group is empty"))
1849 raise util.Abort(_("received file revlog group is empty"))
1854 revisions += fl.count() - o
1850 revisions += fl.count() - o
1855 files += 1
1851 files += 1
1856
1852
1857 # make changelog see real files again
1853 # make changelog see real files again
1858 cl.finalize(tr)
1854 cl.finalize(tr)
1859
1855
1860 newheads = len(self.changelog.heads())
1856 newheads = len(self.changelog.heads())
1861 heads = ""
1857 heads = ""
1862 if oldheads and newheads != oldheads:
1858 if oldheads and newheads != oldheads:
1863 heads = _(" (%+d heads)") % (newheads - oldheads)
1859 heads = _(" (%+d heads)") % (newheads - oldheads)
1864
1860
1865 self.ui.status(_("added %d changesets"
1861 self.ui.status(_("added %d changesets"
1866 " with %d changes to %d files%s\n")
1862 " with %d changes to %d files%s\n")
1867 % (changesets, revisions, files, heads))
1863 % (changesets, revisions, files, heads))
1868
1864
1869 if changesets > 0:
1865 if changesets > 0:
1870 self.hook('pretxnchangegroup', throw=True,
1866 self.hook('pretxnchangegroup', throw=True,
1871 node=hex(self.changelog.node(cor+1)), source=srctype,
1867 node=hex(self.changelog.node(cor+1)), source=srctype,
1872 url=url)
1868 url=url)
1873
1869
1874 tr.close()
1870 tr.close()
1875
1871
1876 if changesets > 0:
1872 if changesets > 0:
1877 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1873 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1878 source=srctype, url=url)
1874 source=srctype, url=url)
1879
1875
1880 for i in xrange(cor + 1, cnr + 1):
1876 for i in xrange(cor + 1, cnr + 1):
1881 self.hook("incoming", node=hex(self.changelog.node(i)),
1877 self.hook("incoming", node=hex(self.changelog.node(i)),
1882 source=srctype, url=url)
1878 source=srctype, url=url)
1883
1879
1884 # never return 0 here:
1880 # never return 0 here:
1885 if newheads < oldheads:
1881 if newheads < oldheads:
1886 return newheads - oldheads - 1
1882 return newheads - oldheads - 1
1887 else:
1883 else:
1888 return newheads - oldheads + 1
1884 return newheads - oldheads + 1
1889
1885
1890
1886
1891 def stream_in(self, remote):
1887 def stream_in(self, remote):
1892 fp = remote.stream_out()
1888 fp = remote.stream_out()
1893 l = fp.readline()
1889 l = fp.readline()
1894 try:
1890 try:
1895 resp = int(l)
1891 resp = int(l)
1896 except ValueError:
1892 except ValueError:
1897 raise util.UnexpectedOutput(
1893 raise util.UnexpectedOutput(
1898 _('Unexpected response from remote server:'), l)
1894 _('Unexpected response from remote server:'), l)
1899 if resp == 1:
1895 if resp == 1:
1900 raise util.Abort(_('operation forbidden by server'))
1896 raise util.Abort(_('operation forbidden by server'))
1901 elif resp == 2:
1897 elif resp == 2:
1902 raise util.Abort(_('locking the remote repository failed'))
1898 raise util.Abort(_('locking the remote repository failed'))
1903 elif resp != 0:
1899 elif resp != 0:
1904 raise util.Abort(_('the server sent an unknown error code'))
1900 raise util.Abort(_('the server sent an unknown error code'))
1905 self.ui.status(_('streaming all changes\n'))
1901 self.ui.status(_('streaming all changes\n'))
1906 l = fp.readline()
1902 l = fp.readline()
1907 try:
1903 try:
1908 total_files, total_bytes = map(int, l.split(' ', 1))
1904 total_files, total_bytes = map(int, l.split(' ', 1))
1909 except ValueError, TypeError:
1905 except ValueError, TypeError:
1910 raise util.UnexpectedOutput(
1906 raise util.UnexpectedOutput(
1911 _('Unexpected response from remote server:'), l)
1907 _('Unexpected response from remote server:'), l)
1912 self.ui.status(_('%d files to transfer, %s of data\n') %
1908 self.ui.status(_('%d files to transfer, %s of data\n') %
1913 (total_files, util.bytecount(total_bytes)))
1909 (total_files, util.bytecount(total_bytes)))
1914 start = time.time()
1910 start = time.time()
1915 for i in xrange(total_files):
1911 for i in xrange(total_files):
1916 # XXX doesn't support '\n' or '\r' in filenames
1912 # XXX doesn't support '\n' or '\r' in filenames
1917 l = fp.readline()
1913 l = fp.readline()
1918 try:
1914 try:
1919 name, size = l.split('\0', 1)
1915 name, size = l.split('\0', 1)
1920 size = int(size)
1916 size = int(size)
1921 except ValueError, TypeError:
1917 except ValueError, TypeError:
1922 raise util.UnexpectedOutput(
1918 raise util.UnexpectedOutput(
1923 _('Unexpected response from remote server:'), l)
1919 _('Unexpected response from remote server:'), l)
1924 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1920 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1925 ofp = self.sopener(name, 'w')
1921 ofp = self.sopener(name, 'w')
1926 for chunk in util.filechunkiter(fp, limit=size):
1922 for chunk in util.filechunkiter(fp, limit=size):
1927 ofp.write(chunk)
1923 ofp.write(chunk)
1928 ofp.close()
1924 ofp.close()
1929 elapsed = time.time() - start
1925 elapsed = time.time() - start
1930 if elapsed <= 0:
1926 if elapsed <= 0:
1931 elapsed = 0.001
1927 elapsed = 0.001
1932 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1928 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1933 (util.bytecount(total_bytes), elapsed,
1929 (util.bytecount(total_bytes), elapsed,
1934 util.bytecount(total_bytes / elapsed)))
1930 util.bytecount(total_bytes / elapsed)))
1935 self.invalidate()
1931 self.invalidate()
1936 return len(self.heads()) + 1
1932 return len(self.heads()) + 1
1937
1933
1938 def clone(self, remote, heads=[], stream=False):
1934 def clone(self, remote, heads=[], stream=False):
1939 '''clone remote repository.
1935 '''clone remote repository.
1940
1936
1941 keyword arguments:
1937 keyword arguments:
1942 heads: list of revs to clone (forces use of pull)
1938 heads: list of revs to clone (forces use of pull)
1943 stream: use streaming clone if possible'''
1939 stream: use streaming clone if possible'''
1944
1940
1945 # now, all clients that can request uncompressed clones can
1941 # now, all clients that can request uncompressed clones can
1946 # read repo formats supported by all servers that can serve
1942 # read repo formats supported by all servers that can serve
1947 # them.
1943 # them.
1948
1944
1949 # if revlog format changes, client will have to check version
1945 # if revlog format changes, client will have to check version
1950 # and format flags on "stream" capability, and use
1946 # and format flags on "stream" capability, and use
1951 # uncompressed only if compatible.
1947 # uncompressed only if compatible.
1952
1948
1953 if stream and not heads and remote.capable('stream'):
1949 if stream and not heads and remote.capable('stream'):
1954 return self.stream_in(remote)
1950 return self.stream_in(remote)
1955 return self.pull(remote, heads)
1951 return self.pull(remote, heads)
1956
1952
1957 # used to avoid circular references so destructors work
1953 # used to avoid circular references so destructors work
1958 def aftertrans(files):
1954 def aftertrans(files):
1959 renamefiles = [tuple(t) for t in files]
1955 renamefiles = [tuple(t) for t in files]
1960 def a():
1956 def a():
1961 for src, dest in renamefiles:
1957 for src, dest in renamefiles:
1962 util.rename(src, dest)
1958 util.rename(src, dest)
1963 return a
1959 return a
1964
1960
1965 def instance(ui, path, create):
1961 def instance(ui, path, create):
1966 return localrepository(ui, util.drop_scheme('file', path), create)
1962 return localrepository(ui, util.drop_scheme('file', path), create)
1967
1963
1968 def islocal(path):
1964 def islocal(path):
1969 return True
1965 return True
@@ -1,21 +1,21
1 uisetup called
1 uisetup called
2 ui.parentui is None
2 ui.parentui isnot None
3 reposetup called for a
3 reposetup called for a
4 ui == repo.ui
4 ui == repo.ui
5 Foo
5 Foo
6 uisetup called
6 uisetup called
7 ui.parentui is None
7 ui.parentui is None
8 reposetup called for a
8 reposetup called for a
9 ui == repo.ui
9 ui == repo.ui
10 reposetup called for b
10 reposetup called for b
11 ui == repo.ui
11 ui == repo.ui
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13 uisetup called
13 uisetup called
14 ui.parentui is None
14 ui.parentui is None
15 Bar
15 Bar
16 % module/__init__.py-style
16 % module/__init__.py-style
17 uisetup called
17 uisetup called
18 ui.parentui is None
18 ui.parentui isnot None
19 reposetup called for a
19 reposetup called for a
20 ui == repo.ui
20 ui == repo.ui
21 Foo
21 Foo
General Comments 0
You need to be logged in to leave comments. Login now