##// END OF EJS Templates
do not attempt to translate ui.debug output
Martin Geisler -
r9467:4c041f1e default
parent child Browse files
Show More
@@ -60,12 +60,12 b' import getpass, urllib'
60 def buildmatch(ui, repo, user, key):
60 def buildmatch(ui, repo, user, key):
61 '''return tuple of (match function, list enabled).'''
61 '''return tuple of (match function, list enabled).'''
62 if not ui.has_section(key):
62 if not ui.has_section(key):
63 ui.debug(_('acl: %s not enabled\n') % key)
63 ui.debug('acl: %s not enabled\n' % key)
64 return None
64 return None
65
65
66 pats = [pat for pat, users in ui.configitems(key)
66 pats = [pat for pat, users in ui.configitems(key)
67 if user in users.replace(',', ' ').split()]
67 if user in users.replace(',', ' ').split()]
68 ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
68 ui.debug('acl: %s enabled, %d entries for user %s\n' %
69 (key, len(pats), user))
69 (key, len(pats), user))
70 if pats:
70 if pats:
71 return match.match(repo.root, '', pats)
71 return match.match(repo.root, '', pats)
@@ -77,7 +77,7 b' def hook(ui, repo, hooktype, node=None, '
77 raise util.Abort(_('config error - hook type "%s" cannot stop '
77 raise util.Abort(_('config error - hook type "%s" cannot stop '
78 'incoming changesets') % hooktype)
78 'incoming changesets') % hooktype)
79 if source not in ui.config('acl', 'sources', 'serve').split():
79 if source not in ui.config('acl', 'sources', 'serve').split():
80 ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
80 ui.debug('acl: changes have source "%s" - skipping\n' % source)
81 return
81 return
82
82
83 user = None
83 user = None
@@ -99,9 +99,9 b' def hook(ui, repo, hooktype, node=None, '
99 ctx = repo[rev]
99 ctx = repo[rev]
100 for f in ctx.files():
100 for f in ctx.files():
101 if deny and deny(f):
101 if deny and deny(f):
102 ui.debug(_('acl: user %s denied on %s\n') % (user, f))
102 ui.debug('acl: user %s denied on %s\n' % (user, f))
103 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
103 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
104 if allow and not allow(f):
104 if allow and not allow(f):
105 ui.debug(_('acl: user %s not allowed on %s\n') % (user, f))
105 ui.debug('acl: user %s not allowed on %s\n' % (user, f))
106 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
106 raise util.Abort(_('acl: access denied for changeset %s') % ctx)
107 ui.debug(_('acl: allowing changeset %s\n') % ctx)
107 ui.debug('acl: allowing changeset %s\n' % ctx)
@@ -153,7 +153,7 b' def churn(ui, repo, *pats, **opts):'
153 maxname = max(len(k) for k, v in rate)
153 maxname = max(len(k) for k, v in rate)
154
154
155 ttywidth = util.termwidth()
155 ttywidth = util.termwidth()
156 ui.debug(_("assuming %i character terminal\n") % ttywidth)
156 ui.debug("assuming %i character terminal\n" % ttywidth)
157 width = ttywidth - maxname - 2 - 6 - 2 - 2
157 width = ttywidth - maxname - 2 - 6 - 2 - 2
158
158
159 for date, count in rate:
159 for date, count in rate:
@@ -266,7 +266,7 b' class commandline(object):'
266
266
267 def _run(self, cmd, *args, **kwargs):
267 def _run(self, cmd, *args, **kwargs):
268 cmdline = self._cmdline(cmd, *args, **kwargs)
268 cmdline = self._cmdline(cmd, *args, **kwargs)
269 self.ui.debug(_('running: %s\n') % (cmdline,))
269 self.ui.debug('running: %s\n' % (cmdline,))
270 self.prerun()
270 self.prerun()
271 try:
271 try:
272 return util.popen(cmdline)
272 return util.popen(cmdline)
@@ -199,7 +199,7 b' def createlog(ui, directory=None, root="'
199
199
200 cmd = [util.shellquote(arg) for arg in cmd]
200 cmd = [util.shellquote(arg) for arg in cmd]
201 ui.note(_("running %s\n") % (' '.join(cmd)))
201 ui.note(_("running %s\n") % (' '.join(cmd)))
202 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
202 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
203
203
204 pfp = util.popen(' '.join(cmd))
204 pfp = util.popen(' '.join(cmd))
205 peek = pfp.readline()
205 peek = pfp.readline()
@@ -378,7 +378,7 b' def createlog(ui, directory=None, root="'
378 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
378 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
379 len(e.comment) == 1 and
379 len(e.comment) == 1 and
380 file_added_re.match(e.comment[0])):
380 file_added_re.match(e.comment[0])):
381 ui.debug(_('found synthetic revision in %s: %r\n')
381 ui.debug('found synthetic revision in %s: %r\n'
382 % (e.rcs, e.comment[0]))
382 % (e.rcs, e.comment[0]))
383 e.synthetic = True
383 e.synthetic = True
384
384
@@ -75,7 +75,7 b' class darcs_source(converter_source, com'
75 self.parents[child] = []
75 self.parents[child] = []
76
76
77 def after(self):
77 def after(self):
78 self.ui.debug(_('cleaning up %s\n') % self.tmppath)
78 self.ui.debug('cleaning up %s\n' % self.tmppath)
79 shutil.rmtree(self.tmppath, ignore_errors=True)
79 shutil.rmtree(self.tmppath, ignore_errors=True)
80
80
81 def xml(self, cmd, **kwargs):
81 def xml(self, cmd, **kwargs):
@@ -125,7 +125,7 b' class gnuarch_source(converter_source, c'
125 break
125 break
126
126
127 def after(self):
127 def after(self):
128 self.ui.debug(_('cleaning up %s\n') % self.tmppath)
128 self.ui.debug('cleaning up %s\n' % self.tmppath)
129 shutil.rmtree(self.tmppath, ignore_errors=True)
129 shutil.rmtree(self.tmppath, ignore_errors=True)
130
130
131 def getheads(self):
131 def getheads(self):
@@ -195,7 +195,7 b' class gnuarch_source(converter_source, c'
195 return os.system(cmdline)
195 return os.system(cmdline)
196
196
197 def _update(self, rev):
197 def _update(self, rev):
198 self.ui.debug(_('applying revision %s...\n') % rev)
198 self.ui.debug('applying revision %s...\n' % rev)
199 changeset, status = self.runlines('replay', '-d', self.tmppath,
199 changeset, status = self.runlines('replay', '-d', self.tmppath,
200 rev)
200 rev)
201 if status:
201 if status:
@@ -205,7 +205,7 b' class gnuarch_source(converter_source, c'
205 self._obtainrevision(rev)
205 self._obtainrevision(rev)
206 else:
206 else:
207 old_rev = self.parents[rev][0]
207 old_rev = self.parents[rev][0]
208 self.ui.debug(_('computing changeset between %s and %s...\n')
208 self.ui.debug('computing changeset between %s and %s...\n'
209 % (old_rev, rev))
209 % (old_rev, rev))
210 self._parsechangeset(changeset, rev)
210 self._parsechangeset(changeset, rev)
211
211
@@ -254,10 +254,10 b' class gnuarch_source(converter_source, c'
254 return changes, copies
254 return changes, copies
255
255
256 def _obtainrevision(self, rev):
256 def _obtainrevision(self, rev):
257 self.ui.debug(_('obtaining revision %s...\n') % rev)
257 self.ui.debug('obtaining revision %s...\n' % rev)
258 output = self._execute('get', rev, self.tmppath)
258 output = self._execute('get', rev, self.tmppath)
259 self.checkexit(output)
259 self.checkexit(output)
260 self.ui.debug(_('analyzing revision %s...\n') % rev)
260 self.ui.debug('analyzing revision %s...\n' % rev)
261 files = self._readcontents(self.tmppath)
261 files = self._readcontents(self.tmppath)
262 self.changes[rev].add_files += files
262 self.changes[rev].add_files += files
263
263
@@ -55,12 +55,12 b' class mercurial_sink(converter_sink):'
55 self.filemapmode = False
55 self.filemapmode = False
56
56
57 def before(self):
57 def before(self):
58 self.ui.debug(_('run hg sink pre-conversion action\n'))
58 self.ui.debug('run hg sink pre-conversion action\n')
59 self.wlock = self.repo.wlock()
59 self.wlock = self.repo.wlock()
60 self.lock = self.repo.lock()
60 self.lock = self.repo.lock()
61
61
62 def after(self):
62 def after(self):
63 self.ui.debug(_('run hg sink post-conversion action\n'))
63 self.ui.debug('run hg sink post-conversion action\n')
64 self.lock.release()
64 self.lock.release()
65 self.wlock.release()
65 self.wlock.release()
66
66
@@ -348,10 +348,10 b' class mercurial_source(converter_source)'
348 self.convertfp.flush()
348 self.convertfp.flush()
349
349
350 def before(self):
350 def before(self):
351 self.ui.debug(_('run hg source pre-conversion action\n'))
351 self.ui.debug('run hg source pre-conversion action\n')
352
352
353 def after(self):
353 def after(self):
354 self.ui.debug(_('run hg source post-conversion action\n'))
354 self.ui.debug('run hg source post-conversion action\n')
355
355
356 def hasnativeorder(self):
356 def hasnativeorder(self):
357 return True
357 return True
@@ -531,7 +531,7 b' class svn_source(converter_source):'
531 """
531 """
532 if not path.startswith(self.rootmodule):
532 if not path.startswith(self.rootmodule):
533 # Requests on foreign branches may be forbidden at server level
533 # Requests on foreign branches may be forbidden at server level
534 self.ui.debug(_('ignoring foreign branch %r\n') % path)
534 self.ui.debug('ignoring foreign branch %r\n' % path)
535 return None
535 return None
536
536
537 if not stop:
537 if not stop:
@@ -559,7 +559,7 b' class svn_source(converter_source):'
559 if not path.startswith(p) or not paths[p].copyfrom_path:
559 if not path.startswith(p) or not paths[p].copyfrom_path:
560 continue
560 continue
561 newpath = paths[p].copyfrom_path + path[len(p):]
561 newpath = paths[p].copyfrom_path + path[len(p):]
562 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
562 self.ui.debug("branch renamed from %s to %s at %d\n" %
563 (path, newpath, revnum))
563 (path, newpath, revnum))
564 path = newpath
564 path = newpath
565 break
565 break
@@ -567,7 +567,7 b' class svn_source(converter_source):'
567 stream.close()
567 stream.close()
568
568
569 if not path.startswith(self.rootmodule):
569 if not path.startswith(self.rootmodule):
570 self.ui.debug(_('ignoring foreign branch %r\n') % path)
570 self.ui.debug('ignoring foreign branch %r\n' % path)
571 return None
571 return None
572 return self.revid(dirent.created_rev, path)
572 return self.revid(dirent.created_rev, path)
573
573
@@ -579,7 +579,7 b' class svn_source(converter_source):'
579 prevmodule = self.prevmodule
579 prevmodule = self.prevmodule
580 if prevmodule is None:
580 if prevmodule is None:
581 prevmodule = ''
581 prevmodule = ''
582 self.ui.debug(_("reparent to %s\n") % svnurl)
582 self.ui.debug("reparent to %s\n" % svnurl)
583 svn.ra.reparent(self.ra, svnurl)
583 svn.ra.reparent(self.ra, svnurl)
584 self.prevmodule = module
584 self.prevmodule = module
585 return prevmodule
585 return prevmodule
@@ -612,14 +612,14 b' class svn_source(converter_source):'
612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
612 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
613 if not copyfrom_path:
613 if not copyfrom_path:
614 continue
614 continue
615 self.ui.debug(_("copied to %s from %s@%s\n") %
615 self.ui.debug("copied to %s from %s@%s\n" %
616 (entrypath, copyfrom_path, ent.copyfrom_rev))
616 (entrypath, copyfrom_path, ent.copyfrom_rev))
617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
617 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
618 elif kind == 0: # gone, but had better be a deleted *file*
618 elif kind == 0: # gone, but had better be a deleted *file*
619 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
619 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
620 pmodule, prevnum = self.revsplit(parents[0])[1:]
620 pmodule, prevnum = self.revsplit(parents[0])[1:]
621 parentpath = pmodule + "/" + entrypath
621 parentpath = pmodule + "/" + entrypath
622 self.ui.debug(_("entry %s\n") % parentpath)
622 self.ui.debug("entry %s\n" % parentpath)
623
623
624 # We can avoid the reparent calls if the module has
624 # We can avoid the reparent calls if the module has
625 # not changed but it probably does not worth the pain.
625 # not changed but it probably does not worth the pain.
@@ -646,7 +646,7 b' class svn_source(converter_source):'
646 del copies[childpath]
646 del copies[childpath]
647 entries.append(childpath)
647 entries.append(childpath)
648 else:
648 else:
649 self.ui.debug(_('unknown path in revision %d: %s\n') % \
649 self.ui.debug('unknown path in revision %d: %s\n' % \
650 (revnum, path))
650 (revnum, path))
651 elif kind == svn.core.svn_node_dir:
651 elif kind == svn.core.svn_node_dir:
652 # If the directory just had a prop change,
652 # If the directory just had a prop change,
@@ -679,7 +679,7 b' class svn_source(converter_source):'
679 if not copyfrompath:
679 if not copyfrompath:
680 continue
680 continue
681 copyfrom[path] = ent
681 copyfrom[path] = ent
682 self.ui.debug(_("mark %s came from %s:%d\n")
682 self.ui.debug("mark %s came from %s:%d\n"
683 % (path, copyfrompath, ent.copyfrom_rev))
683 % (path, copyfrompath, ent.copyfrom_rev))
684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
684 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
685 children.sort()
685 children.sort()
@@ -703,7 +703,7 b' class svn_source(converter_source):'
703 """Return the parsed commit object or None, and True if
703 """Return the parsed commit object or None, and True if
704 the revision is a branch root.
704 the revision is a branch root.
705 """
705 """
706 self.ui.debug(_("parsing revision %d (%d changes)\n") %
706 self.ui.debug("parsing revision %d (%d changes)\n" %
707 (revnum, len(orig_paths)))
707 (revnum, len(orig_paths)))
708
708
709 branched = False
709 branched = False
@@ -732,7 +732,7 b' class svn_source(converter_source):'
732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
732 self.ui.note(_('found parent of branch %s at %d: %s\n') %
733 (self.module, prevnum, prevmodule))
733 (self.module, prevnum, prevmodule))
734 else:
734 else:
735 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
735 self.ui.debug("no copyfrom path, don't know what to do.\n")
736
736
737 paths = []
737 paths = []
738 # filter out unrelated paths
738 # filter out unrelated paths
@@ -785,7 +785,7 b' class svn_source(converter_source):'
785 lastonbranch = True
785 lastonbranch = True
786 break
786 break
787 if not paths:
787 if not paths:
788 self.ui.debug(_('revision %d has no entries\n') % revnum)
788 self.ui.debug('revision %d has no entries\n' % revnum)
789 continue
789 continue
790 cset, lastonbranch = parselogentry(paths, revnum, author,
790 cset, lastonbranch = parselogentry(paths, revnum, author,
791 date, message)
791 date, message)
@@ -867,7 +867,7 b' class svn_source(converter_source):'
867 return relative
867 return relative
868
868
869 # The path is outside our tracked tree...
869 # The path is outside our tracked tree...
870 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
870 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
871 return None
871 return None
872
872
873 def _checkpath(self, path, revnum):
873 def _checkpath(self, path, revnum):
@@ -142,13 +142,13 b' def dodiff(ui, repo, diffcmd, diffopts, '
142 cmdline = ('%s %s %s %s' %
142 cmdline = ('%s %s %s %s' %
143 (util.shellquote(diffcmd), ' '.join(diffopts),
143 (util.shellquote(diffcmd), ' '.join(diffopts),
144 util.shellquote(dir1), util.shellquote(dir2)))
144 util.shellquote(dir1), util.shellquote(dir2)))
145 ui.debug(_('running %r in %s\n') % (cmdline, tmproot))
145 ui.debug('running %r in %s\n' % (cmdline, tmproot))
146 util.system(cmdline, cwd=tmproot)
146 util.system(cmdline, cwd=tmproot)
147
147
148 for copy_fn, working_fn, mtime in fns_and_mtime:
148 for copy_fn, working_fn, mtime in fns_and_mtime:
149 if os.path.getmtime(copy_fn) != mtime:
149 if os.path.getmtime(copy_fn) != mtime:
150 ui.debug(_('file changed while diffing. '
150 ui.debug('file changed while diffing. '
151 'Overwriting: %s (src: %s)\n') % (working_fn, copy_fn))
151 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
152 util.copyfile(copy_fn, working_fn)
152 util.copyfile(copy_fn, working_fn)
153
153
154 return 1
154 return 1
@@ -229,10 +229,10 b' def hook(ui, repo, hooktype, node=None, '
229 n = bin(node)
229 n = bin(node)
230 cia = hgcia(ui, repo)
230 cia = hgcia(ui, repo)
231 if not cia.user:
231 if not cia.user:
232 ui.debug(_('cia: no user specified'))
232 ui.debug('cia: no user specified')
233 return
233 return
234 if not cia.project:
234 if not cia.project:
235 ui.debug(_('cia: no project specified'))
235 ui.debug('cia: no project specified')
236 return
236 return
237 if hooktype == 'changegroup':
237 if hooktype == 'changegroup':
238 start = repo.changelog.rev(n)
238 start = repo.changelog.rev(n)
@@ -308,7 +308,7 b' def view(ui, repo, *etc, **opts):'
308 os.chdir(repo.root)
308 os.chdir(repo.root)
309 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
309 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
310 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
310 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
311 ui.debug(_("running %s\n") % cmd)
311 ui.debug("running %s\n" % cmd)
312 util.system(cmd)
312 util.system(cmd)
313
313
314 cmdtable = {
314 cmdtable = {
@@ -31,7 +31,7 b' def start_server(function):'
31 'removing it)\n'))
31 'removing it)\n'))
32 os.unlink(os.path.join(self.root, '.hg', 'inotify.sock'))
32 os.unlink(os.path.join(self.root, '.hg', 'inotify.sock'))
33 if err[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart:
33 if err[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart:
34 self.ui.debug(_('(starting inotify server)\n'))
34 self.ui.debug('(starting inotify server)\n')
35 try:
35 try:
36 try:
36 try:
37 server.start(self.ui, self.dirstate, self.root)
37 server.start(self.ui, self.dirstate, self.root)
@@ -50,7 +50,7 b' def start_server(function):'
50 'server: %s\n') % err[-1])
50 'server: %s\n') % err[-1])
51 elif err[0] in (errno.ECONNREFUSED, errno.ENOENT):
51 elif err[0] in (errno.ECONNREFUSED, errno.ENOENT):
52 # silently ignore normal errors if autostart is False
52 # silently ignore normal errors if autostart is False
53 self.ui.debug(_('(inotify server not running)\n'))
53 self.ui.debug('(inotify server not running)\n')
54 else:
54 else:
55 self.ui.warn(_('failed to contact inotify server: %s\n')
55 self.ui.warn(_('failed to contact inotify server: %s\n')
56 % err[-1])
56 % err[-1])
@@ -354,7 +354,7 b' def demo(ui, repo, *args, **opts):'
354 repo.commit(text=msg)
354 repo.commit(text=msg)
355 ui.status(_('\n\tkeywords expanded\n'))
355 ui.status(_('\n\tkeywords expanded\n'))
356 ui.write(repo.wread(fn))
356 ui.write(repo.wread(fn))
357 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
357 ui.debug('\nremoving temporary repository %s\n' % tmpdir)
358 shutil.rmtree(tmpdir, ignore_errors=True)
358 shutil.rmtree(tmpdir, ignore_errors=True)
359
359
360 def expand(ui, repo, *pats, **opts):
360 def expand(ui, repo, *pats, **opts):
@@ -321,7 +321,7 b' class queue(object):'
321 if bad:
321 if bad:
322 raise util.Abort(bad)
322 raise util.Abort(bad)
323 guards = sorted(set(guards))
323 guards = sorted(set(guards))
324 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
324 self.ui.debug('active guards: %s\n' % ' '.join(guards))
325 self.active_guards = guards
325 self.active_guards = guards
326 self.guards_dirty = True
326 self.guards_dirty = True
327
327
@@ -276,10 +276,10 b' def hook(ui, repo, hooktype, node=None, '
276 ctx = repo[node]
276 ctx = repo[node]
277
277
278 if not n.subs:
278 if not n.subs:
279 ui.debug(_('notify: no subscribers to repository %s\n') % n.root)
279 ui.debug('notify: no subscribers to repository %s\n' % n.root)
280 return
280 return
281 if n.skipsource(source):
281 if n.skipsource(source):
282 ui.debug(_('notify: changes have source "%s" - skipping\n') % source)
282 ui.debug('notify: changes have source "%s" - skipping\n' % source)
283 return
283 return
284
284
285 ui.pushbuffer()
285 ui.pushbuffer()
@@ -35,7 +35,7 b' def rebasemerge(repo, rev, first=False):'
35 if not first:
35 if not first:
36 ancestor.ancestor = newancestor
36 ancestor.ancestor = newancestor
37 else:
37 else:
38 repo.ui.debug(_("first revision, do not change ancestor\n"))
38 repo.ui.debug("first revision, do not change ancestor\n")
39 stats = merge.update(repo, rev, True, True, False)
39 stats = merge.update(repo, rev, True, True, False)
40 return stats
40 return stats
41
41
@@ -149,7 +149,7 b' def concludenode(repo, rev, p1, p2, stat'
149 """Skip commit if collapsing has been required and rev is not the last
149 """Skip commit if collapsing has been required and rev is not the last
150 revision, commit otherwise
150 revision, commit otherwise
151 """
151 """
152 repo.ui.debug(_(" set parents\n"))
152 repo.ui.debug(" set parents\n")
153 if collapse and not last:
153 if collapse and not last:
154 repo.dirstate.setparents(repo[p1].node())
154 repo.dirstate.setparents(repo[p1].node())
155 return None
155 return None
@@ -187,23 +187,23 b' def concludenode(repo, rev, p1, p2, stat'
187 def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
187 def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
188 extrafn):
188 extrafn):
189 'Rebase a single revision'
189 'Rebase a single revision'
190 repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev]))
190 repo.ui.debug("rebasing %d:%s\n" % (rev, repo[rev]))
191
191
192 p1, p2 = defineparents(repo, rev, target, state, targetancestors)
192 p1, p2 = defineparents(repo, rev, target, state, targetancestors)
193
193
194 repo.ui.debug(_(" future parents are %d and %d\n") % (repo[p1].rev(),
194 repo.ui.debug(" future parents are %d and %d\n" % (repo[p1].rev(),
195 repo[p2].rev()))
195 repo[p2].rev()))
196
196
197 # Merge phase
197 # Merge phase
198 if len(repo.parents()) != 2:
198 if len(repo.parents()) != 2:
199 # Update to target and merge it with local
199 # Update to target and merge it with local
200 if repo['.'].rev() != repo[p1].rev():
200 if repo['.'].rev() != repo[p1].rev():
201 repo.ui.debug(_(" update to %d:%s\n") % (repo[p1].rev(), repo[p1]))
201 repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
202 merge.update(repo, p1, False, True, False)
202 merge.update(repo, p1, False, True, False)
203 else:
203 else:
204 repo.ui.debug(_(" already in target\n"))
204 repo.ui.debug(" already in target\n")
205 repo.dirstate.write()
205 repo.dirstate.write()
206 repo.ui.debug(_(" merge against %d:%s\n") % (repo[rev].rev(), repo[rev]))
206 repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
207 first = repo[rev].rev() == repo[min(state)].rev()
207 first = repo[rev].rev() == repo[min(state)].rev()
208 stats = rebasemerge(repo, rev, first)
208 stats = rebasemerge(repo, rev, first)
209
209
@@ -211,7 +211,7 b' def rebasenode(repo, rev, target, state,'
211 raise util.Abort(_('fix unresolved conflicts with hg resolve then '
211 raise util.Abort(_('fix unresolved conflicts with hg resolve then '
212 'run hg rebase --continue'))
212 'run hg rebase --continue'))
213 else: # we have an interrupted rebase
213 else: # we have an interrupted rebase
214 repo.ui.debug(_('resuming interrupted rebase\n'))
214 repo.ui.debug('resuming interrupted rebase\n')
215
215
216 # Keep track of renamed files in the revision that is going to be rebased
216 # Keep track of renamed files in the revision that is going to be rebased
217 # Here we simulate the copies and renames in the source changeset
217 # Here we simulate the copies and renames in the source changeset
@@ -234,7 +234,7 b' def rebasenode(repo, rev, target, state,'
234 else:
234 else:
235 if not collapse:
235 if not collapse:
236 repo.ui.note(_('no changes, revision %d skipped\n') % rev)
236 repo.ui.note(_('no changes, revision %d skipped\n') % rev)
237 repo.ui.debug(_('next revision set to %s\n') % p1)
237 repo.ui.debug('next revision set to %s\n' % p1)
238 skipped.add(rev)
238 skipped.add(rev)
239 state[rev] = p1
239 state[rev] = p1
240
240
@@ -280,7 +280,7 b' def updatemq(repo, state, skipped, **opt'
280 mqrebase = {}
280 mqrebase = {}
281 for p in repo.mq.applied:
281 for p in repo.mq.applied:
282 if repo[p.rev].rev() in state:
282 if repo[p.rev].rev() in state:
283 repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') %
283 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
284 (repo[p.rev].rev(), p.name))
284 (repo[p.rev].rev(), p.name))
285 mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
285 mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
286
286
@@ -290,7 +290,7 b' def updatemq(repo, state, skipped, **opt'
290 # We must start import from the newest revision
290 # We must start import from the newest revision
291 for rev in sorted(mqrebase, reverse=True):
291 for rev in sorted(mqrebase, reverse=True):
292 if rev not in skipped:
292 if rev not in skipped:
293 repo.ui.debug(_('import mq patch %d (%s)\n')
293 repo.ui.debug('import mq patch %d (%s)\n'
294 % (state[rev], mqrebase[rev][0]))
294 % (state[rev], mqrebase[rev][0]))
295 repo.mq.qimport(repo, (), patchname=mqrebase[rev][0],
295 repo.mq.qimport(repo, (), patchname=mqrebase[rev][0],
296 git=mqrebase[rev][1],rev=[str(state[rev])])
296 git=mqrebase[rev][1],rev=[str(state[rev])])
@@ -311,7 +311,7 b' def storestatus(repo, originalwd, target'
311 newrev = repo[v].hex()
311 newrev = repo[v].hex()
312 f.write("%s:%s\n" % (oldrev, newrev))
312 f.write("%s:%s\n" % (oldrev, newrev))
313 f.close()
313 f.close()
314 repo.ui.debug(_('rebase status stored\n'))
314 repo.ui.debug('rebase status stored\n')
315
315
316 def clearstatus(repo):
316 def clearstatus(repo):
317 'Remove the status files'
317 'Remove the status files'
@@ -342,7 +342,7 b' def restorestatus(repo):'
342 else:
342 else:
343 oldrev, newrev = l.split(':')
343 oldrev, newrev = l.split(':')
344 state[repo[oldrev].rev()] = repo[newrev].rev()
344 state[repo[oldrev].rev()] = repo[newrev].rev()
345 repo.ui.debug(_('rebase status resumed\n'))
345 repo.ui.debug('rebase status resumed\n')
346 return originalwd, target, state, collapse, keep, keepbranches, external
346 return originalwd, target, state, collapse, keep, keepbranches, external
347 except IOError, err:
347 except IOError, err:
348 if err.errno != errno.ENOENT:
348 if err.errno != errno.ENOENT:
@@ -392,12 +392,12 b' def buildstate(repo, dest, src, base, co'
392 cwd = repo['.'].rev()
392 cwd = repo['.'].rev()
393
393
394 if cwd == dest:
394 if cwd == dest:
395 repo.ui.debug(_('already working on current\n'))
395 repo.ui.debug('already working on current\n')
396 return None
396 return None
397
397
398 targetancestors = set(repo.changelog.ancestors(dest))
398 targetancestors = set(repo.changelog.ancestors(dest))
399 if cwd in targetancestors:
399 if cwd in targetancestors:
400 repo.ui.debug(_('already working on the current branch\n'))
400 repo.ui.debug('already working on the current branch\n')
401 return None
401 return None
402
402
403 cwdancestors = set(repo.changelog.ancestors(cwd))
403 cwdancestors = set(repo.changelog.ancestors(cwd))
@@ -405,7 +405,7 b' def buildstate(repo, dest, src, base, co'
405 rebasingbranch = cwdancestors - targetancestors
405 rebasingbranch = cwdancestors - targetancestors
406 source = min(rebasingbranch)
406 source = min(rebasingbranch)
407
407
408 repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source))
408 repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
409 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
409 state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
410 external = nullrev
410 external = nullrev
411 if collapse:
411 if collapse:
@@ -429,8 +429,8 b' def pullrebase(orig, ui, repo, *args, **'
429 if opts.get('rebase'):
429 if opts.get('rebase'):
430 if opts.get('update'):
430 if opts.get('update'):
431 del opts['update']
431 del opts['update']
432 ui.debug(_('--update and --rebase are not compatible, ignoring '
432 ui.debug('--update and --rebase are not compatible, ignoring '
433 'the update flag\n'))
433 'the update flag\n')
434
434
435 cmdutil.bail_if_changed(repo)
435 cmdutil.bail_if_changed(repo)
436 revsprepull = len(repo)
436 revsprepull = len(repo)
@@ -463,7 +463,7 b' def dorecord(ui, repo, committer, *pats,'
463 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
463 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
464 dir=backupdir)
464 dir=backupdir)
465 os.close(fd)
465 os.close(fd)
466 ui.debug(_('backup %r as %r\n') % (f, tmpname))
466 ui.debug('backup %r as %r\n' % (f, tmpname))
467 util.copyfile(repo.wjoin(f), tmpname)
467 util.copyfile(repo.wjoin(f), tmpname)
468 backups[f] = tmpname
468 backups[f] = tmpname
469
469
@@ -481,7 +481,7 b' def dorecord(ui, repo, committer, *pats,'
481 # 3b. (apply)
481 # 3b. (apply)
482 if dopatch:
482 if dopatch:
483 try:
483 try:
484 ui.debug(_('applying patch\n'))
484 ui.debug('applying patch\n')
485 ui.debug(fp.getvalue())
485 ui.debug(fp.getvalue())
486 pfiles = {}
486 pfiles = {}
487 patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
487 patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
@@ -512,7 +512,7 b' def dorecord(ui, repo, committer, *pats,'
512 # 5. finally restore backed-up files
512 # 5. finally restore backed-up files
513 try:
513 try:
514 for realname, tmpname in backups.iteritems():
514 for realname, tmpname in backups.iteritems():
515 ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
515 ui.debug('restoring %r to %r\n' % (tmpname, realname))
516 util.copyfile(tmpname, repo.wjoin(realname))
516 util.copyfile(tmpname, repo.wjoin(realname))
517 os.unlink(tmpname)
517 os.unlink(tmpname)
518 os.rmdir(backupdir)
518 os.rmdir(backupdir)
@@ -142,6 +142,6 b' def reposetup(ui, repo):'
142 for f in funcs.split():
142 for f in funcs.split():
143 wrapname(f, wrapper)
143 wrapname(f, wrapper)
144 wrapname("mercurial.osutil.listdir", wrapperforlistdir)
144 wrapname("mercurial.osutil.listdir", wrapperforlistdir)
145 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
145 ui.debug("[win32mbcs] activated with encoding: %s\n"
146 % encoding.encoding)
146 % encoding.encoding)
147
147
@@ -1789,7 +1789,7 b' def import_(ui, repo, patch1, *patches, '
1789 else:
1789 else:
1790 # launch the editor
1790 # launch the editor
1791 message = None
1791 message = None
1792 ui.debug(_('message:\n%s\n') % message)
1792 ui.debug('message:\n%s\n' % message)
1793
1793
1794 wp = repo.parents()
1794 wp = repo.parents()
1795 if opts.get('exact'):
1795 if opts.get('exact'):
@@ -144,16 +144,16 b' def copies(repo, c1, c2, ca, checkdirs=F'
144 elif of in ma:
144 elif of in ma:
145 diverge.setdefault(of, []).append(f)
145 diverge.setdefault(of, []).append(f)
146
146
147 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
147 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
148
148
149 u1 = _nonoverlap(m1, m2, ma)
149 u1 = _nonoverlap(m1, m2, ma)
150 u2 = _nonoverlap(m2, m1, ma)
150 u2 = _nonoverlap(m2, m1, ma)
151
151
152 if u1:
152 if u1:
153 repo.ui.debug(_(" unmatched files in local:\n %s\n")
153 repo.ui.debug(" unmatched files in local:\n %s\n"
154 % "\n ".join(u1))
154 % "\n ".join(u1))
155 if u2:
155 if u2:
156 repo.ui.debug(_(" unmatched files in other:\n %s\n")
156 repo.ui.debug(" unmatched files in other:\n %s\n"
157 % "\n ".join(u2))
157 % "\n ".join(u2))
158
158
159 for f in u1:
159 for f in u1:
@@ -169,7 +169,7 b' def copies(repo, c1, c2, ca, checkdirs=F'
169 diverge2.update(fl) # reverse map for below
169 diverge2.update(fl) # reverse map for below
170
170
171 if fullcopy:
171 if fullcopy:
172 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
172 repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n")
173 for f in fullcopy:
173 for f in fullcopy:
174 note = ""
174 note = ""
175 if f in copy: note += "*"
175 if f in copy: note += "*"
@@ -180,7 +180,7 b' def copies(repo, c1, c2, ca, checkdirs=F'
180 if not fullcopy or not checkdirs:
180 if not fullcopy or not checkdirs:
181 return copy, diverge
181 return copy, diverge
182
182
183 repo.ui.debug(_(" checking for directory renames\n"))
183 repo.ui.debug(" checking for directory renames\n")
184
184
185 # generate a directory move map
185 # generate a directory move map
186 d1, d2 = _dirs(m1), _dirs(m2)
186 d1, d2 = _dirs(m1), _dirs(m2)
@@ -216,7 +216,7 b' def copies(repo, c1, c2, ca, checkdirs=F'
216 return copy, diverge
216 return copy, diverge
217
217
218 for d in dirmove:
218 for d in dirmove:
219 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
219 repo.ui.debug(" dir %s -> %s\n" % (d, dirmove[d]))
220
220
221 # check unaccounted nonoverlapping files against directory moves
221 # check unaccounted nonoverlapping files against directory moves
222 for f in u1 + u2:
222 for f in u1 + u2:
@@ -227,7 +227,7 b' def copies(repo, c1, c2, ca, checkdirs=F'
227 df = dirmove[d] + f[len(d):]
227 df = dirmove[d] + f[len(d):]
228 if df not in copy:
228 if df not in copy:
229 copy[f] = df
229 copy[f] = df
230 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
230 repo.ui.debug(" file %s -> %s\n" % (f, copy[f]))
231 break
231 break
232
232
233 return copy, diverge
233 return copy, diverge
@@ -214,7 +214,7 b' class cmdalias(object):'
214
214
215 def __call__(self, ui, *args, **opts):
215 def __call__(self, ui, *args, **opts):
216 if self.shadows:
216 if self.shadows:
217 ui.debug(_("alias '%s' shadows command\n") % self.name)
217 ui.debug("alias '%s' shadows command\n" % self.name)
218
218
219 return self.fn(ui, *args, **opts)
219 return self.fn(ui, *args, **opts)
220
220
@@ -140,7 +140,7 b' def filemerge(repo, mynode, orig, fcd, f'
140 binary = isbin(fcd) or isbin(fco) or isbin(fca)
140 binary = isbin(fcd) or isbin(fco) or isbin(fca)
141 symlink = 'l' in fcd.flags() + fco.flags()
141 symlink = 'l' in fcd.flags() + fco.flags()
142 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
142 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
143 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
143 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
144 (tool, fd, binary, symlink))
144 (tool, fd, binary, symlink))
145
145
146 if not tool or tool == 'internal:prompt':
146 if not tool or tool == 'internal:prompt':
@@ -170,13 +170,13 b' def filemerge(repo, mynode, orig, fcd, f'
170 else:
170 else:
171 ui.status(_("merging %s\n") % fd)
171 ui.status(_("merging %s\n") % fd)
172
172
173 ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
173 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
174
174
175 # do we attempt to simplemerge first?
175 # do we attempt to simplemerge first?
176 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
176 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
177 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
177 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
178 if not r:
178 if not r:
179 ui.debug(_(" premerge successful\n"))
179 ui.debug(" premerge successful\n")
180 os.unlink(back)
180 os.unlink(back)
181 os.unlink(b)
181 os.unlink(b)
182 os.unlink(c)
182 os.unlink(c)
@@ -35,7 +35,7 b' class httprepository(repo.repository):'
35 self._url, authinfo = url.getauthinfo(path)
35 self._url, authinfo = url.getauthinfo(path)
36
36
37 self.ui = ui
37 self.ui = ui
38 self.ui.debug(_('using %s\n') % self._url)
38 self.ui.debug('using %s\n' % self._url)
39
39
40 self.urlopener = url.opener(ui, authinfo)
40 self.urlopener = url.opener(ui, authinfo)
41
41
@@ -56,7 +56,7 b' class httprepository(repo.repository):'
56 self.caps = set(self.do_read('capabilities').split())
56 self.caps = set(self.do_read('capabilities').split())
57 except error.RepoError:
57 except error.RepoError:
58 self.caps = set()
58 self.caps = set()
59 self.ui.debug(_('capabilities: %s\n') %
59 self.ui.debug('capabilities: %s\n' %
60 (' '.join(self.caps or ['none'])))
60 (' '.join(self.caps or ['none'])))
61 return self.caps
61 return self.caps
62
62
@@ -68,21 +68,21 b' class httprepository(repo.repository):'
68 def do_cmd(self, cmd, **args):
68 def do_cmd(self, cmd, **args):
69 data = args.pop('data', None)
69 data = args.pop('data', None)
70 headers = args.pop('headers', {})
70 headers = args.pop('headers', {})
71 self.ui.debug(_("sending %s command\n") % cmd)
71 self.ui.debug("sending %s command\n" % cmd)
72 q = {"cmd": cmd}
72 q = {"cmd": cmd}
73 q.update(args)
73 q.update(args)
74 qs = '?%s' % urllib.urlencode(q)
74 qs = '?%s' % urllib.urlencode(q)
75 cu = "%s%s" % (self._url, qs)
75 cu = "%s%s" % (self._url, qs)
76 try:
76 try:
77 if data:
77 if data:
78 self.ui.debug(_("sending %s bytes\n") % len(data))
78 self.ui.debug("sending %s bytes\n" % len(data))
79 resp = self.urlopener.open(urllib2.Request(cu, data, headers))
79 resp = self.urlopener.open(urllib2.Request(cu, data, headers))
80 except urllib2.HTTPError, inst:
80 except urllib2.HTTPError, inst:
81 if inst.code == 401:
81 if inst.code == 401:
82 raise util.Abort(_('authorization failed'))
82 raise util.Abort(_('authorization failed'))
83 raise
83 raise
84 except httplib.HTTPException, inst:
84 except httplib.HTTPException, inst:
85 self.ui.debug(_('http error while sending %s command\n') % cmd)
85 self.ui.debug('http error while sending %s command\n' % cmd)
86 self.ui.traceback()
86 self.ui.traceback()
87 raise IOError(None, inst)
87 raise IOError(None, inst)
88 except IndexError:
88 except IndexError:
@@ -105,7 +105,7 b' class httprepository(repo.repository):'
105 if not (proto.startswith('application/mercurial-') or
105 if not (proto.startswith('application/mercurial-') or
106 proto.startswith('text/plain') or
106 proto.startswith('text/plain') or
107 proto.startswith('application/hg-changegroup')):
107 proto.startswith('application/hg-changegroup')):
108 self.ui.debug(_("requested URL: '%s'\n") % url.hidepassword(cu))
108 self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu))
109 raise error.RepoError(_("'%s' does not appear to be an hg repository")
109 raise error.RepoError(_("'%s' does not appear to be an hg repository")
110 % safeurl)
110 % safeurl)
111
111
@@ -527,7 +527,7 b' class localrepository(repo.repository):'
527
527
528 for mf, fn, cmd in self.filterpats[filter]:
528 for mf, fn, cmd in self.filterpats[filter]:
529 if mf(filename):
529 if mf(filename):
530 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
530 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
531 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
531 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
532 break
532 break
533
533
@@ -724,14 +724,14 b' class localrepository(repo.repository):'
724
724
725 # find source in nearest ancestor if we've lost track
725 # find source in nearest ancestor if we've lost track
726 if not crev:
726 if not crev:
727 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
727 self.ui.debug(" %s: searching for copy revision for %s\n" %
728 (fname, cfname))
728 (fname, cfname))
729 for ancestor in self['.'].ancestors():
729 for ancestor in self['.'].ancestors():
730 if cfname in ancestor:
730 if cfname in ancestor:
731 crev = ancestor[cfname].filenode()
731 crev = ancestor[cfname].filenode()
732 break
732 break
733
733
734 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
735 meta["copy"] = cfname
735 meta["copy"] = cfname
736 meta["copyrev"] = hex(crev)
736 meta["copyrev"] = hex(crev)
737 fparent1, fparent2 = nullid, newfparent
737 fparent1, fparent2 = nullid, newfparent
@@ -1287,22 +1287,22 b' class localrepository(repo.repository):'
1287 if n[0] in seen:
1287 if n[0] in seen:
1288 continue
1288 continue
1289
1289
1290 self.ui.debug(_("examining %s:%s\n")
1290 self.ui.debug("examining %s:%s\n"
1291 % (short(n[0]), short(n[1])))
1291 % (short(n[0]), short(n[1])))
1292 if n[0] == nullid: # found the end of the branch
1292 if n[0] == nullid: # found the end of the branch
1293 pass
1293 pass
1294 elif n in seenbranch:
1294 elif n in seenbranch:
1295 self.ui.debug(_("branch already found\n"))
1295 self.ui.debug("branch already found\n")
1296 continue
1296 continue
1297 elif n[1] and n[1] in m: # do we know the base?
1297 elif n[1] and n[1] in m: # do we know the base?
1298 self.ui.debug(_("found incomplete branch %s:%s\n")
1298 self.ui.debug("found incomplete branch %s:%s\n"
1299 % (short(n[0]), short(n[1])))
1299 % (short(n[0]), short(n[1])))
1300 search.append(n[0:2]) # schedule branch range for scanning
1300 search.append(n[0:2]) # schedule branch range for scanning
1301 seenbranch.add(n)
1301 seenbranch.add(n)
1302 else:
1302 else:
1303 if n[1] not in seen and n[1] not in fetch:
1303 if n[1] not in seen and n[1] not in fetch:
1304 if n[2] in m and n[3] in m:
1304 if n[2] in m and n[3] in m:
1305 self.ui.debug(_("found new changeset %s\n") %
1305 self.ui.debug("found new changeset %s\n" %
1306 short(n[1]))
1306 short(n[1]))
1307 fetch.add(n[1]) # earliest unknown
1307 fetch.add(n[1]) # earliest unknown
1308 for p in n[2:4]:
1308 for p in n[2:4]:
@@ -1317,11 +1317,11 b' class localrepository(repo.repository):'
1317
1317
1318 if r:
1318 if r:
1319 reqcnt += 1
1319 reqcnt += 1
1320 self.ui.debug(_("request %d: %s\n") %
1320 self.ui.debug("request %d: %s\n" %
1321 (reqcnt, " ".join(map(short, r))))
1321 (reqcnt, " ".join(map(short, r))))
1322 for p in xrange(0, len(r), 10):
1322 for p in xrange(0, len(r), 10):
1323 for b in remote.branches(r[p:p+10]):
1323 for b in remote.branches(r[p:p+10]):
1324 self.ui.debug(_("received %s:%s\n") %
1324 self.ui.debug("received %s:%s\n" %
1325 (short(b[0]), short(b[1])))
1325 (short(b[0]), short(b[1])))
1326 unknown.append(b)
1326 unknown.append(b)
1327
1327
@@ -1334,15 +1334,15 b' class localrepository(repo.repository):'
1334 p = n[0]
1334 p = n[0]
1335 f = 1
1335 f = 1
1336 for i in l:
1336 for i in l:
1337 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1337 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1338 if i in m:
1338 if i in m:
1339 if f <= 2:
1339 if f <= 2:
1340 self.ui.debug(_("found new branch changeset %s\n") %
1340 self.ui.debug("found new branch changeset %s\n" %
1341 short(p))
1341 short(p))
1342 fetch.add(p)
1342 fetch.add(p)
1343 base[i] = 1
1343 base[i] = 1
1344 else:
1344 else:
1345 self.ui.debug(_("narrowed branch search to %s:%s\n")
1345 self.ui.debug("narrowed branch search to %s:%s\n"
1346 % (short(p), short(i)))
1346 % (short(p), short(i)))
1347 newsearch.append((p, i))
1347 newsearch.append((p, i))
1348 break
1348 break
@@ -1361,10 +1361,10 b' class localrepository(repo.repository):'
1361 else:
1361 else:
1362 raise util.Abort(_("repository is unrelated"))
1362 raise util.Abort(_("repository is unrelated"))
1363
1363
1364 self.ui.debug(_("found new changesets starting at ") +
1364 self.ui.debug("found new changesets starting at " +
1365 " ".join([short(f) for f in fetch]) + "\n")
1365 " ".join([short(f) for f in fetch]) + "\n")
1366
1366
1367 self.ui.debug(_("%d total queries\n") % reqcnt)
1367 self.ui.debug("%d total queries\n" % reqcnt)
1368
1368
1369 return base.keys(), list(fetch), heads
1369 return base.keys(), list(fetch), heads
1370
1370
@@ -1381,7 +1381,7 b' class localrepository(repo.repository):'
1381 base = {}
1381 base = {}
1382 self.findincoming(remote, base, heads, force=force)
1382 self.findincoming(remote, base, heads, force=force)
1383
1383
1384 self.ui.debug(_("common changesets up to ")
1384 self.ui.debug("common changesets up to "
1385 + " ".join(map(short, base.keys())) + "\n")
1385 + " ".join(map(short, base.keys())) + "\n")
1386
1386
1387 remain = set(self.changelog.nodemap)
1387 remain = set(self.changelog.nodemap)
@@ -1602,7 +1602,7 b' class localrepository(repo.repository):'
1602 if self.ui.verbose or source == 'bundle':
1602 if self.ui.verbose or source == 'bundle':
1603 self.ui.status(_("%d changesets found\n") % len(nodes))
1603 self.ui.status(_("%d changesets found\n") % len(nodes))
1604 if self.ui.debugflag:
1604 if self.ui.debugflag:
1605 self.ui.debug(_("list of changesets:\n"))
1605 self.ui.debug("list of changesets:\n")
1606 for node in nodes:
1606 for node in nodes:
1607 self.ui.debug("%s\n" % hex(node))
1607 self.ui.debug("%s\n" % hex(node))
1608
1608
@@ -1988,7 +1988,7 b' class localrepository(repo.repository):'
1988 - number of heads stays the same: 1
1988 - number of heads stays the same: 1
1989 """
1989 """
1990 def csmap(x):
1990 def csmap(x):
1991 self.ui.debug(_("add changeset %s\n") % short(x))
1991 self.ui.debug("add changeset %s\n" % short(x))
1992 return len(cl)
1992 return len(cl)
1993
1993
1994 def revmap(x):
1994 def revmap(x):
@@ -2034,7 +2034,7 b' class localrepository(repo.repository):'
2034 f = changegroup.getchunk(source)
2034 f = changegroup.getchunk(source)
2035 if not f:
2035 if not f:
2036 break
2036 break
2037 self.ui.debug(_("adding %s revisions\n") % f)
2037 self.ui.debug("adding %s revisions\n" % f)
2038 fl = self.file(f)
2038 fl = self.file(f)
2039 o = len(fl)
2039 o = len(fl)
2040 chunkiter = changegroup.chunkiter(source)
2040 chunkiter = changegroup.chunkiter(source)
@@ -2067,7 +2067,7 b' class localrepository(repo.repository):'
2067
2067
2068 if changesets > 0:
2068 if changesets > 0:
2069 # forcefully update the on-disk branch cache
2069 # forcefully update the on-disk branch cache
2070 self.ui.debug(_("updating the branch cache\n"))
2070 self.ui.debug("updating the branch cache\n")
2071 self.branchtags()
2071 self.branchtags()
2072 self.hook("changegroup", node=hex(cl.node(clstart)),
2072 self.hook("changegroup", node=hex(cl.node(clstart)),
2073 source=srctype, url=url)
2073 source=srctype, url=url)
@@ -2116,7 +2116,7 b' class localrepository(repo.repository):'
2116 except (ValueError, TypeError):
2116 except (ValueError, TypeError):
2117 raise error.ResponseError(
2117 raise error.ResponseError(
2118 _('Unexpected response from remote server:'), l)
2118 _('Unexpected response from remote server:'), l)
2119 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2119 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2120 # for backwards compat, name was partially encoded
2120 # for backwards compat, name was partially encoded
2121 ofp = self.sopener(store.decodedir(name), 'w')
2121 ofp = self.sopener(store.decodedir(name), 'w')
2122 for chunk in util.filechunkiter(fp, limit=size):
2122 for chunk in util.filechunkiter(fp, limit=size):
@@ -161,8 +161,8 b' def manifestmerge(repo, p1, p2, pa, over'
161 act("divergent renames", "dr", of, fl)
161 act("divergent renames", "dr", of, fl)
162
162
163 repo.ui.note(_("resolving manifests\n"))
163 repo.ui.note(_("resolving manifests\n"))
164 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
164 repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
165 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
165 repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
166
166
167 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
167 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
168 copied = set(copy.values())
168 copied = set(copy.values())
@@ -252,7 +252,7 b' def applyupdates(repo, action, wctx, mct'
252 f2, fd, flags, move = a[2:]
252 f2, fd, flags, move = a[2:]
253 if f == '.hgsubstate': # merged internally
253 if f == '.hgsubstate': # merged internally
254 continue
254 continue
255 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
255 repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
256 fcl = wctx[f]
256 fcl = wctx[f]
257 fco = mctx[f2]
257 fco = mctx[f2]
258 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
258 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
@@ -263,7 +263,7 b' def applyupdates(repo, action, wctx, mct'
263 # remove renamed files after safely stored
263 # remove renamed files after safely stored
264 for f in moves:
264 for f in moves:
265 if util.lexists(repo.wjoin(f)):
265 if util.lexists(repo.wjoin(f)):
266 repo.ui.debug(_("removing %s\n") % f)
266 repo.ui.debug("removing %s\n" % f)
267 os.unlink(repo.wjoin(f))
267 os.unlink(repo.wjoin(f))
268
268
269 audit_path = util.path_auditor(repo.root)
269 audit_path = util.path_auditor(repo.root)
@@ -299,7 +299,7 b' def applyupdates(repo, action, wctx, mct'
299 merged += 1
299 merged += 1
300 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
300 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
301 if f != fd and move and util.lexists(repo.wjoin(f)):
301 if f != fd and move and util.lexists(repo.wjoin(f)):
302 repo.ui.debug(_("removing %s\n") % f)
302 repo.ui.debug("removing %s\n" % f)
303 os.unlink(repo.wjoin(f))
303 os.unlink(repo.wjoin(f))
304 elif m == "g": # get
304 elif m == "g": # get
305 flags = a[2]
305 flags = a[2]
@@ -93,12 +93,12 b' def extract(ui, fileobj):'
93 hgpatch = False
93 hgpatch = False
94 ignoretext = False
94 ignoretext = False
95
95
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
96 ui.debug('found patch at byte %d\n' % m.start(0))
97 diffs_seen += 1
97 diffs_seen += 1
98 cfp = cStringIO.StringIO()
98 cfp = cStringIO.StringIO()
99 for line in payload[:m.start(0)].splitlines():
99 for line in payload[:m.start(0)].splitlines():
100 if line.startswith('# HG changeset patch'):
100 if line.startswith('# HG changeset patch'):
101 ui.debug(_('patch generated by hg export\n'))
101 ui.debug('patch generated by hg export\n')
102 hgpatch = True
102 hgpatch = True
103 # drop earlier commit message content
103 # drop earlier commit message content
104 cfp.seek(0)
104 cfp.seek(0)
@@ -1155,7 +1155,7 b' def patch(patchname, ui, strip=1, cwd=No'
1155 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1155 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1156 except NoHunks:
1156 except NoHunks:
1157 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1157 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1158 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1158 ui.debug('no valid hunks found; trying with %r instead\n' %
1159 patcher)
1159 patcher)
1160 if util.needbinarypatch():
1160 if util.needbinarypatch():
1161 args.append('--binary')
1161 args.append('--binary')
@@ -75,7 +75,7 b' class sshrepository(repo.repository):'
75 if lines[-1] == "1\n" and l == "\n":
75 if lines[-1] == "1\n" and l == "\n":
76 break
76 break
77 if l:
77 if l:
78 ui.debug(_("remote: "), l)
78 ui.debug("remote: ", l)
79 lines.append(l)
79 lines.append(l)
80 max_noise -= 1
80 max_noise -= 1
81 else:
81 else:
@@ -113,7 +113,7 b' class sshrepository(repo.repository):'
113 __del__ = cleanup
113 __del__ = cleanup
114
114
115 def do_cmd(self, cmd, **args):
115 def do_cmd(self, cmd, **args):
116 self.ui.debug(_("sending %s command\n") % cmd)
116 self.ui.debug("sending %s command\n" % cmd)
117 self.pipeo.write("%s\n" % cmd)
117 self.pipeo.write("%s\n" % cmd)
118 for k, v in args.iteritems():
118 for k, v in args.iteritems():
119 self.pipeo.write("%s %d\n" % (k, len(v)))
119 self.pipeo.write("%s %d\n" % (k, len(v)))
@@ -46,7 +46,7 b' def stream_out(repo, untrusted=False):'
46 # get consistent snapshot of repo, lock during scan
46 # get consistent snapshot of repo, lock during scan
47 lock = repo.lock()
47 lock = repo.lock()
48 try:
48 try:
49 repo.ui.debug(_('scanning\n'))
49 repo.ui.debug('scanning\n')
50 for name, ename, size in repo.store.walk():
50 for name, ename, size in repo.store.walk():
51 # for backwards compat, name was partially encoded
51 # for backwards compat, name was partially encoded
52 entries.append((store.encodedir(name), size))
52 entries.append((store.encodedir(name), size))
@@ -57,11 +57,11 b' def stream_out(repo, untrusted=False):'
57 raise StreamException(2)
57 raise StreamException(2)
58
58
59 yield '0\n'
59 yield '0\n'
60 repo.ui.debug(_('%d files, %d bytes to transfer\n') %
60 repo.ui.debug('%d files, %d bytes to transfer\n' %
61 (len(entries), total_bytes))
61 (len(entries), total_bytes))
62 yield '%d %d\n' % (len(entries), total_bytes)
62 yield '%d %d\n' % (len(entries), total_bytes)
63 for name, size in entries:
63 for name, size in entries:
64 repo.ui.debug(_('sending %s (%d bytes)\n') % (name, size))
64 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
65 yield '%s\0%d\n' % (name, size)
65 yield '%s\0%d\n' % (name, size)
66 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
66 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
67 yield chunk
67 yield chunk
@@ -197,7 +197,7 b' class proxyhandler(urllib2.ProxyHandler)'
197 proxyuser, proxypasswd or ''),
197 proxyuser, proxypasswd or ''),
198 proxypath, proxyquery, proxyfrag))
198 proxypath, proxyquery, proxyfrag))
199 proxies = {'http': proxyurl, 'https': proxyurl}
199 proxies = {'http': proxyurl, 'https': proxyurl}
200 ui.debug(_('proxying through http://%s:%s\n') %
200 ui.debug('proxying through http://%s:%s\n' %
201 (proxyhost, proxyport))
201 (proxyhost, proxyport))
202 else:
202 else:
203 proxies = {}
203 proxies = {}
@@ -504,7 +504,7 b' def opener(ui, authinfo=None):'
504 if authinfo is not None:
504 if authinfo is not None:
505 passmgr.add_password(*authinfo)
505 passmgr.add_password(*authinfo)
506 user, passwd = authinfo[2:4]
506 user, passwd = authinfo[2:4]
507 ui.debug(_('http auth: user %s, password %s\n') %
507 ui.debug('http auth: user %s, password %s\n' %
508 (user, passwd and '*' * len(passwd) or 'not set'))
508 (user, passwd and '*' * len(passwd) or 'not set'))
509
509
510 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
510 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
General Comments 0
You need to be logged in to leave comments. Login now